id
stringlengths 3
8
| content
stringlengths 100
981k
|
|---|---|
11483930
|
import requests
url = "http://localhost:8501/v1/models/simple_model:predict"
payload = {'instances': [1.0]}
response = requests.post(url, json=payload)
print(response.text)
print(response.status_code, response.reason)
|
11483977
|
import conftest
from qemu import QemuVm
def run_ioctl_test(command: str, vm: QemuVm) -> None:
conftest.Helpers.run_vmsh_command(
[command, str(vm.pid)], cargo_executable="examples/test_ioctls"
)
def spawn_ioctl_test(command: str, vm: QemuVm) -> conftest.VmshPopen:
return conftest.Helpers.spawn_vmsh_command(
[command, str(vm.pid)], cargo_executable="examples/test_ioctls"
)
def test_injection(helpers: conftest.Helpers) -> None:
with helpers.spawn_qemu(helpers.notos_image()) as vm:
run_ioctl_test("inject", vm)
def test_alloc_mem(helpers: conftest.Helpers) -> None:
with helpers.spawn_qemu(helpers.notos_image()) as vm:
run_ioctl_test("alloc_mem", vm)
def test_ioctl_cpuid2(helpers: conftest.Helpers) -> None:
with helpers.spawn_qemu(helpers.notos_image()) as vm:
run_ioctl_test("cpuid2", vm)
def test_ioctl_guest_add_mem(helpers: conftest.Helpers) -> None:
with helpers.spawn_qemu(helpers.notos_image()) as vm:
run_ioctl_test("guest_add_mem", vm)
# add mem and try to get maps afterwards again
def test_ioctl_guest_add_mem_get_maps(helpers: conftest.Helpers) -> None:
with helpers.spawn_qemu(helpers.notos_image()) as vm:
vm.wait_for_ssh() # to be sure qemu won't add any memory we didn't expect
run_ioctl_test("guest_add_mem_get_maps", vm)
def test_fd_transfer(helpers: conftest.Helpers) -> None:
with helpers.spawn_qemu(helpers.notos_image()) as vm:
run_ioctl_test("fd_transfer", vm)
def test_get_vcpu_maps(helpers: conftest.Helpers) -> None:
with helpers.spawn_qemu(helpers.notos_image()) as vm:
run_ioctl_test("vcpu_maps", vm)
# def test_userfaultfd_completes(helpers: conftest.Helpers) -> None:
# with helpers.spawn_qemu(helpers.notos_image()) as vm:
# vm.wait_for_ssh()
# vmsh = spawn_ioctl_test("guest_userfaultfd", vm)
#
# with vmsh:
# vmsh.wait_until_line("pause", lambda l: "pause" in l)
# print("ssh available")
#
# res = vm.ssh_cmd(
# [
# "devmem2",
# "0xd0000000",
# "ww",
# "0x1337",
# ]
# )
# print("stdout:\n", res.stdout)
# print("stderr:\n", res.stderr)
def test_wrap_syscall(helpers: conftest.Helpers) -> None:
with helpers.spawn_qemu(helpers.notos_image()) as vm:
vm.wait_for_ssh()
print("ssh available")
# attach vmsh after boot, because it slows the vm down a lot.
vmsh = spawn_ioctl_test("guest_kvm_exits", vm)
with vmsh:
vmsh.wait_until_line("attached", lambda l: "attached" in l)
res = vm.ssh_cmd(["devmem2", "0xc0000000", "h"])
print("read:\n", res.stdout)
print("stderr:\n", res.stderr)
assert "0xDEAD" in res.stdout
res = vm.ssh_cmd(["devmem2", "0xc0000000", "h", "0xBEEF"])
print("write 0xBEEF:\n", res.stdout)
print("stderr:\n", res.stderr)
res = vm.ssh_cmd(["devmem2", "0xc0000000", "h"])
print("read:\n", res.stdout)
print("stderr:\n", res.stderr)
assert "0xDEAD" in res.stdout
# check that vm is still responsive
res = vm.ssh_cmd(["ls"])
assert res.returncode == 0
|
11483992
|
import numpy as np
from periodicity.core import TSeries
from periodicity.data import SustainedPlusGappedPureTones
from periodicity.decomposition import CEEMDAN
def test_two_tones_two_imfs():
# Test if nothing but the two tones are recovered by CEEMDAN
x = TSeries(values=SustainedPlusGappedPureTones())
imfs = CEEMDAN(ensemble_size=50, random_seed=42)(x)
assert len(imfs) == 2
# Test if the residual noise in the first mode is close to zero
left_mse = np.mean(np.square(imfs[0][11:490]))
right_mse = np.mean(np.square(imfs[0][761:990]))
assert left_mse < 1e-4
assert right_mse < 1e-4
# Test the closeness between the original tones and the recovered IMFs
s2 = np.sin(2 * np.pi * 0.065 * np.arange(1000))
s1 = np.zeros_like(s2)
s1[500:750] += np.sin(2 * np.pi * 0.255 * np.arange(250))
err1 = (imfs[0] - s1).values[3:-3]
err2 = (imfs[1] - s2).values[3:-3]
err = (sum(imfs) - x).values
rrse_1 = np.linalg.norm(err1) / np.linalg.norm(s1[3:-3])
rrse_2 = np.linalg.norm(err2) / np.linalg.norm(s2[3:-3])
rrse_x = np.linalg.norm(err) / np.linalg.norm(x.values)
assert rrse_1 < 0.10
assert rrse_2 < 0.05
assert rrse_x < 1e-16
|
11484084
|
import enum
#I might be doing something wrong, but I saw an "02" value come through
#which doesn't seem to be in any of the documentation I saw... it registered
#as low on the interface, so I'll just assume there's two codes
@enum.unique
class ErdRinseAgentRaw(enum.Enum):
RINSE_AGENT_GOOD = "00"
RINSE_AGENT_LOW1 = "01"
RINSE_AGENT_LOW2 = "02"
@enum.unique
class ErdRinseAgent(enum.Enum):
NA = "FF"
RINSE_AGENT_GOOD = "00"
RINSE_AGENT_LOW = "01"
def stringify(self, **kwargs):
return self.name.replace("RINSE_AGENT_","").replace("_"," ").title()
|
11484088
|
from flask import Blueprint
from api.controllers.logscontroller import logs_controller
logs_blueprint = Blueprint("logs", __name__)
logs_blueprint.add_url_rule(
'/logs/<int:project_id>',
view_func=logs_controller['fetch_project_logs'],
methods=['GET'],
)
logs_blueprint.add_url_rule(
'/logs/<int:project_id>/category/<string:category>',
view_func=logs_controller['fetch_category_logs'],
methods=['GET'],
)
logs_blueprint.add_url_rule(
'/logs/<int:project_id>/user/<string:user_email>',
view_func=logs_controller['fetch_user_logs'],
methods=['GET'],
)
logs_blueprint.add_url_rule(
'/logs/<int:project_id>/entity/<string:entity_type>/<int:entity_id>',
view_func=logs_controller['fetch_entity_logs'],
methods=['GET'],
)
|
11484094
|
import os
import numpy as np
import pandas as pd
from pyuplift.utils import download_file
def download_hillstrom_email_marketing(
data_home=None,
url='http://www.minethatdata.com/Kevin_Hillstrom_MineThatData_E-MailAnalytics_DataMiningChallenge_2008.03.20.csv'
):
"""Downloading the Hillstrom Email Marketing dataset.
****************
Data description
****************
This dataset contains 64,000 customers who last purchased within twelve months.
The customers were involved in an e-mail test.
* 1/3 were randomly chosen to receive an e-mail campaign featuring Mens merchandise.
* 1/3 were randomly chosen to receive an e-mail campaign featuring Womens merchandise.
* 1/3 were randomly chosen to not receive an e-mail campaign.
During a period of two weeks following the e-mail campaign, results were tracked.
Your job is to tell the world if the Mens or Womens e-mail campaign was successful.
+--------------------------+------------+
|Features | 8 |
+--------------------------+------------+
|Treatment | 3 |
+--------------------------+------------+
|Samples total | 64,000 |
+--------------------------+------------+
|Average spend rate | 1.05091 |
+--------------------------+------------+
|Average visit rate | 0.14678 |
+--------------------------+------------+
|Average conversion rate | 0.00903 |
+--------------------------+------------+
More information about dataset you can find in
the `official paper <http://minethatdata.com/Stochastic_Solutions_E-Mail_Challenge_2008.04.30.pdf>`_.
+-----------------+----------------------------------------------------------------------------------+
| **Parameters** | | **data_home: string** |
| | | Specify another download and cache folder for the dataset. |
| | | By default the dataset will be stored in the data folder in the same folder. |
| | | **url: string** |
| | | The URL to file with data. |
+-----------------+----------------------------------------------------------------------------------+
| **Returns** | **None** |
+-----------------+----------------------------------------------------------------------------------+
"""
data_home, dataset_path = __get_data_home_dataset_file_paths(data_home)
if not os.path.isdir(data_home):
os.makedirs(data_home)
if not os.path.exists(dataset_path):
download_file(url, dataset_path)
def load_hillstrom_email_marketing(
data_home=None,
load_raw_data=False,
download_if_missing=True
):
"""Loading the Hillstrom Email Marketing dataset from the local file.
****************
Data description
****************
This dataset contains 64,000 customers who last purchased within twelve months.
The customers were involved in an e-mail test.
* 1/3 were randomly chosen to receive an e-mail campaign featuring Mens merchandise.
* 1/3 were randomly chosen to receive an e-mail campaign featuring Womens merchandise.
* 1/3 were randomly chosen to not receive an e-mail campaign.
During a period of two weeks following the e-mail campaign, results were tracked.
Your job is to tell the world if the Mens or Womens e-mail campaign was successful.
+--------------------------+------------+
|Features | 8 |
+--------------------------+------------+
|Treatment | 3 |
+--------------------------+------------+
|Samples total | 64,000 |
+--------------------------+------------+
|Average spend rate | 1.05091 |
+--------------------------+------------+
|Average visit rate | 0.14678 |
+--------------------------+------------+
|Average conversion rate | 0.00903 |
+--------------------------+------------+
More information about dataset you can find in
the `official paper <http://minethatdata.com/Stochastic_Solutions_E-Mail_Challenge_2008.04.30.pdf>`_.
Parameters
----------
load_raw_data : bool, default: False
The loading of raw or preprocessed data?
data_home : str, optional (default=None)
Specify another download and cache folder for the dataset.
By default the dataset will be stored in the data folder in the same folder.
download_if_missing : bool, optional (default=True)
Download the dataset if it is not downloaded.
Returns
-------
dataset : dict object with the following attributes:
dataset.description : str
Description of the Hillstrom email marketing dataset.
dataset.data : ndarray, shape (64000, 8)
Each row corresponding to the 8 feature values in order.
dataset.feature_names : list, size 8
List of feature names.
dataset.treatment : ndarray, shape (64000,)
Each value corresponds to the treatment.
dataset.target : numpy array of shape (64000,)
Each value corresponds to one of the outcomes. By default, it's `spend` outcome (look at `target_spend` below).
dataset.target_spend : numpy array of shape (64000,)
Each value corresponds to how much customers spent during a two-week outcome period.
dataset.target_visit : numpy array of shape (64000,)
Each value corresponds to whether people visited the site during a two-week outcome period.
dataset.target_conversion : numpy array of shape (64000,)
Each value corresponds to whether they purchased at the site (“conversion”) during a two-week outcome period.
"""
data_home, dataset_path = __get_data_home_dataset_file_paths(data_home)
if not os.path.exists(dataset_path):
if download_if_missing:
download_hillstrom_email_marketing(data_home)
else:
raise FileNotFoundError(
'The dataset does not exist. '
'Use `download_hillstrom_email_marketing` function to download the dataset.'
)
df = pd.read_csv(dataset_path)
if not load_raw_data:
df = __encode_data(df)
description = 'This dataset contains 64,000 customers who last purchased within twelve months. ' \
'The customers were involved in an e-mail test. ' \
'1/3 were randomly chosen to receive an e-mail campaign featuring Mens merchandise. ' \
'1/3 were randomly chosen to receive an e-mail campaign featuring Womens merchandise. ' \
'1/3 were randomly chosen to not receive an e-mail campaign. ' \
'During a period of two weeks following the e-mail campaign, results were tracked. ' \
'Your job is to tell the world if the Mens or Womens e-mail campaign was successful.'
drop_fields = ['spend', 'visit', 'conversion', 'segment']
data = {
'description': description,
'data': df.drop(drop_fields, axis=1).values,
'feature_names': np.array(list(filter(lambda x: x not in drop_fields, df.columns))),
'treatment': df['segment'].values,
'target': df['spend'].values,
'target_spend': df['spend'].values,
'target_visit': df['visit'].values,
'target_conversion': df['conversion'].values,
}
return data
def __encode_data(df):
df['history_segment'] = df['history_segment'].apply(lambda s: s.split(') ')[1])
col_name = 'zip_code'
df = pd.get_dummies(df, columns=[col_name], prefix=col_name)
col_name = 'history_segment'
df = pd.get_dummies(df, columns=[col_name], prefix=col_name)
col_name = 'channel'
df = pd.get_dummies(df, columns=[col_name], prefix=col_name)
encoder = {'No E-Mail': 0, 'Mens E-Mail': 1, 'Womens E-Mail': 2}
df['segment'] = df['segment'].apply(lambda k: encoder[k])
return df
def __get_data_home_dataset_file_paths(data_home_path):
if data_home_path is None:
data_home_path = os.path.join(os.sep.join(__file__.split(os.sep)[:-1]), 'data')
dataset_path = os.path.join(data_home_path, 'hillstrom_email_marketing.csv')
return data_home_path, dataset_path
|
11484121
|
import aiotfm
import pytest
from aiotfm.utils import Date, get_keys, shakikoo
from aiotfm.utils import Locale
def test_date():
date = Date(2020, 6, 25)
assert date == Date.fromtimestamp(date.timestamp())
async def test_keys():
with pytest.raises(aiotfm.errors.EndpointError):
await get_keys('', '')
def test_shakikoo():
shakikoo('<PASSWORD>') == b'<KEY>
async def test_locale():
locale = Locale()
await locale.load()
assert locale.locale == 'en'
assert str(locale['T_0']) == str(locale['$T_0']) == 'Little Mouse'
assert str(locale['shouldnotexists']) == 'shouldnotexists'
assert repr(locale['shouldnotexists']) == 'shouldnotexists=shouldnotexists'
assert locale['texte.version.valeur'].format() == 'Version %1'
assert locale['texte.version.valeur'].format(125) == 'Version 125'
with pytest.raises(aiotfm.errors.InvalidLocale):
await locale.load('something invalid')
|
11484164
|
import types
def get_as_num(value):
"""Return the JS numeric equivalent for a value."""
if hasattr(value, 'get_literal_value'):
value = value.get_literal_value()
if value is None:
return 0
try:
if isinstance(value, types.StringTypes):
if value.startswith("0x"):
return int(value, 16)
else:
return float(value)
elif isinstance(value, (int, float, long)):
return value
else:
return int(value)
except (ValueError, TypeError):
return 0
def get_as_str(value):
"""Return the JS string equivalent for a literal value."""
if hasattr(value, 'get_literal_value'):
value = value.get_literal_value()
if value is None:
return ""
if isinstance(value, bool):
return u"true" if value else u"false"
elif isinstance(value, (int, float, long)):
if value == float('inf'):
return u"Infinity"
elif value == float('-inf'):
return u"-Infinity"
# Try to see if we can shave off some trailing significant figures.
try:
if int(value) == value:
return unicode(int(value))
except (ValueError, TypeError):
pass
return unicode(value)
def get_NaN(traverser):
# If we've cached the traverser's NaN instance, just use that.
ncache = getattr(traverser, "NAN_CACHE", None)
if ncache is not None:
return ncache
# Otherwise, we need to import GLOBAL_ENTITIES and build a raw copy.
from predefinedentities import GLOBAL_ENTITIES
ncache = traverser._build_global("NaN", GLOBAL_ENTITIES[u"NaN"])
# Cache it so we don't need to do this again.
traverser.NAN_CACHE = ncache
return ncache
def evaluate_lambdas(traverser, node):
if callable(node):
return evaluate_lambdas(traverser, node(traverser))
else:
return node
|
11484179
|
import torch
from torch.utils.data import DataLoader
from poutyne.framework import Model
from torch_enhance.datasets import BSDS300, Set14, Set5
from torch_enhance.models import SRCNN
from torch_enhance import metrics
scale_factor = 2
train_dataset = BSDS300(scale_factor=scale_factor)
val_dataset = Set14(scale_factor=scale_factor)
train_dataloader = DataLoader(train_dataset, batch_size=8)
val_dataloader = DataLoader(val_dataset, batch_size=2)
channels = 3 if train_dataset.color_space == "RGB" else 1
pytorch_network = SRCNN(scale_factor, channels)
model = Model(
pytorch_network,
"sgd",
"mse"
)
model.fit_generator(
train_dataloader,
val_dataloader,
epochs=1
)
|
11484216
|
from .GenericProduct import (open_product,
get_hdf5_file_product_type,
GenericProduct)
|
11484261
|
import json
import os
import requests
from config import *
from common import sdkIPAddress
headers = {
"Content-Type": "application/json",
"Accept": "application/json",
}
if oltest == "1":
url_fullnode = "http://{}/jsonrpc".format(sdkIPAddress(fullnode_dev))
else:
url_fullnode = "http://{}/jsonrpc".format(sdkIPAddress(fullnode_prod))
def rpc_call(method, params, url=url_fullnode):
payload = {
"method": method,
"params": params,
"id": 123,
"jsonrpc": "2.0"
}
response = requests.request("POST", url, data=json.dumps(payload), headers=headers)
if response.status_code != 200:
return ""
resp = json.loads(response.text)
return resp
def tx_by_hash(hash):
params = {"hash": hash}
response = requests.get(url_tmTx, params=params)
if response.status_code != 200:
return ""
resp = json.loads(response.text)
return resp
def convertBigInt(value):
return str(value)
|
11484301
|
import torch
import torch.nn as nn
from Model.Classifier import Classifier
from Model.Cross_TE_Module import Cross_TE_Module
class TE_Cross_Connector(nn.Module):
def __init__(self, _num_stages=3, _use_avg_on_conv3=True,
indim=256, transform_classes=6,num_class=10,
nchannels=256*8*8,
cls_type='MultLayerFC2',run_type=0):
"""
:param _num_stages: block combination
:param _use_avg_on_conv3: finally use avg or not
:param indim:
:param num_classes: transformation matrix
"""
self.run_type = run_type
super(TE_Cross_Connector, self).__init__()
self.Cross_TE = Cross_TE_Module(_num_stages=_num_stages, _use_avg_on_conv3=_use_avg_on_conv3, run_type=run_type)
self.fc = nn.Linear(indim, transform_classes)
for m in self.modules():
if isinstance(m, nn.Linear):
m.bias.data.zero_()
self.clf = Classifier(_nChannels=nchannels, _num_classes=num_class, _cls_type=cls_type)
def forward(self, x1, x2, out_feat_keys=None):
first_stage_list = ['conv1'] + ['conv2'] + ['Attention']
check_flag = True
if out_feat_keys!=None:
for item in out_feat_keys:
if item not in first_stage_list:
check_flag = False
else:
check_flag=False
if check_flag:
# only use first stage: cross-attention trained result directly applied to itself
x1, attention_matrix1 = self.Cross_TE(x1, out_feat_keys=out_feat_keys)
x2, attention_matrix2 = self.Cross_TE(x2, out_feat_keys=out_feat_keys)
elif out_feat_keys==None:
out_feat_key1 = 'Attention'
specific_key = 'conv2'
x1, attention_matrix1 = self.Cross_TE(x1, out_feat_keys=[specific_key, out_feat_key1])
x2, attention_matrix2 = self.Cross_TE(x2, out_feat_keys=[specific_key, out_feat_key1])
feat1 = x1[0] # output from conv without attention applying
feat2 = x2[0]
feat_clf1=x1[1]
new_key=['Cross','classifier']
x1, cross_matrix1 = self.Cross_TE(feat1, input_attention=attention_matrix2,out_feat_keys=new_key)
x2, cross_matrix2 = self.Cross_TE(feat2, input_attention=attention_matrix1,out_feat_keys=new_key)
classify_input = feat_clf1
classify_output, _ = self.clf(classify_input, False)
# in this semi-supervised, we do not use attention in the classifier part
transform_input1 = x1[1]
transform_input2 = x2[1]
x = torch.cat((transform_input1, transform_input2), dim=1)
transform_output = torch.tanh(self.fc(x))
else:
x1, attention_matrix1 = self.TE(x1, out_feat_keys)
x2, attention_matrix2 = self.TE(x2, out_feat_keys)
#x1,attention_matrix1=self.attention(x1)
#x2,attention_matrix2=self.attention(x2)
if out_feat_keys == None:
return x1, x2, transform_output,classify_output,attention_matrix1,attention_matrix2
else:
return x1, x2,attention_matrix1,attention_matrix2
|
11484303
|
from a10sdk.common.A10BaseClass import A10BaseClass
class StaticDestMapping(A10BaseClass):
"""Class Description::
Stateless NAT46 mapping (IPv4 <-> IPv6).
Class static-dest-mapping supports CRUD Operations and inherits from `common/A10BaseClass`.
This class is the `"PARENT"` class for this module.`
:param count: {"platform-specific-range": 1, "platform-specific-default": 1, "description": "Set number of consecutive mappings (Number of mappings)", "format": "number", "optional": true, "modify-not-allowed": 1, "type": "number"}
:param v6_address: {"optional": false, "type": "string", "description": "IPv6 address", "format": "ipv6-address"}
:param v4_address: {"optional": false, "type": "string", "description": "IPv4 address", "format": "ipv4-address"}
:param vrid: {"description": "VRRP-A vrid (Specify ha VRRP-A vrid)", "format": "number", "optional": true, "maximum": 31, "minimum": 1, "modify-not-allowed": 1, "type": "number"}
:param uuid: {"description": "uuid of the object", "format": "string", "minLength": 1, "modify-not-allowed": 1, "optional": true, "maxLength": 64, "type": "string"}
:param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
URL for this object::
`https://<Hostname|Ip address>//axapi/v3/cgnv6/nat46-stateless/static-dest-mapping/{v4_address}+{v6_address}`.
"""
def __init__(self, **kwargs):
self.ERROR_MSG = ""
self.required = [ "v4_address","v6_address"]
self.b_key = "static-dest-mapping"
self.a10_url="/axapi/v3/cgnv6/nat46-stateless/static-dest-mapping/{v4_address}+{v6_address}"
self.DeviceProxy = ""
self.count = ""
self.v6_address = ""
self.v4_address = ""
self.vrid = ""
self.uuid = ""
for keys, value in kwargs.items():
setattr(self,keys, value)
|
11484325
|
from setuptools import setup, find_packages
DISTNAME = "octopy"
VERSION = "1.0.0"
DESCRIPTION = """Octopy is a companion Python library for octosport.io. The library offers tools for football (soccer) analytics."""
LONG_DESCRIPTION = """Octopy is a companion Python library for octosport.io. The library offers tools for football (soccer) analytics."""
AUTHOR = "octosport.io"
AUTHOR_EMAIL = "<EMAIL>"
URL = "https://github.com/octosport/octopy"
LICENSE = "Apache License, Version 2.0"
REQUIREMENTS = ["pandas>=1.1.3", "scipy>=1.5.2", "scikit-learn>=0.23.2", "jax>=0.2.17"]
if __name__ == "__main__":
setup(
name=DISTNAME,
version=VERSION,
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
author=AUTHOR,
author_email=AUTHOR_EMAIL,
url=URL,
license=LICENSE,
packages=find_packages(),
package_data={"docs": ["*"]},
include_package_data=True,
zip_safe=False,
install_requires=REQUIREMENTS,
classifiers=["Programming Language :: Python :: 3.4"],
)
|
11484336
|
import logging
import pytest
from kazoo.client import KazooClient
from kazoo.handlers.threading import SequentialThreadingHandler
from kazoo.retry import KazooRetry
from kazoo.security import ACL, ANYONE_ID_UNSAFE, Permissions
@pytest.fixture(scope="session")
def zk_conn():
conn_retry_policy = KazooRetry(max_tries=-1,
delay=0.1,
backoff=2,
max_delay=3600)
handler = SequentialThreadingHandler()
conn = KazooClient(hosts="127.0.0.1:2181",
timeout=60,
handler=handler,
connection_retry=conn_retry_policy,
command_retry=conn_retry_policy)
conn.start()
yield conn
conn.stop()
@pytest.fixture(scope="function")
def zk(zk_conn):
# FIXME(prozlach): Let's narrow down this permisssions to better reflect
# what bootstrap script is doing. This requires some research wrt the IP
# we are connecting from to the container
# acl = LOCALHOST_ALL + [self.make_service_acl('dcos_etcd', all=True)]
anyone_all = [ACL(Permissions.ALL, ANYONE_ID_UNSAFE)]
zk_conn.ensure_path('/etcd', acl=anyone_all)
zk_conn.ensure_path('/etcd/nodes', acl=anyone_all)
zk_conn.set('/etcd/nodes', b"")
zk_conn.ensure_path('/etcd/locking', acl=anyone_all)
zk_conn.set('/etcd/locking', b"")
yield zk_conn
zk_conn.delete('/etcd', recursive=True)
def pytest_configure(config):
logging.basicConfig(format='[%(levelname)s] %(message)s', level='INFO')
|
11484338
|
import numpy as np
from fireworks import Workflow
from mpmorph.fireworks import powerups
from mpmorph.fireworks.core import StaticFW, MDFW, OptimizeFW
from mpmorph.util import recursive_update
__author__ = '<NAME> and <NAME>'
__maintainer__ = '<NAME>'
__email__ = '<EMAIL>'
def get_quench_wf(structures, priority=None, quench_type="slow_quench",
descriptor="", **kwargs):
"""
Args:
structure: Starting structure for the run
priority: Priority of all fireworks in the workflows
quench_type: use "slow_quench" for a gradual decrease in temperature or
"mp_quench" for a instantaneous DFT relaxation
target_steps: Target number of steps for production MD run
descriptor: Extra description to add to the name of the firework
**kwargs: Arguments such as cool_args, hold_args, quench_args, etc. Cool_args and hold args are only applicable
when using "slow_quench"
Returns: Workflow object
"""
fw_list = []
temperatures = kwargs.get('temperatures', {"start_temp": 3000, "end_temp": 500, "temp_step": 500})
cool_args = kwargs.get('cool_args', {"md_params": {"nsteps": 200}})
hold_args = kwargs.get('hold_args', {"md_params": {"nsteps": 500}})
quench_args = kwargs.get('quench_args', {})
for (i, structure) in enumerate(structures):
_fw_list = []
if quench_type == "slow_quench":
for temp in np.arange(temperatures["start_temp"], temperatures["end_temp"], -temperatures["temp_step"]):
# get fw for cool step
use_prev_structure = False
if len(_fw_list) > 0:
use_prev_structure = True
_fw = get_MDFW(structure, temp, temp - temperatures["temp_step"],
name="snap_" + str(i) + "_cool_" + str(temp - temperatures["temp_step"]),
args=cool_args, parents=[_fw_list[-1]] if len(_fw_list) > 0 else [],
priority=priority, previous_structure=use_prev_structure,
insert_db=True, **kwargs)
_fw_list.append(_fw)
# get fw for hold step
_fw = get_MDFW(structure, temp - temperatures["temp_step"], temp - temperatures["temp_step"],
name="snap_" + str(i) + "_hold_" + str(temp - temperatures["temp_step"]),
args=hold_args, parents=[_fw_list[-1]], priority=priority,
previous_structure=True, insert_db=True, **kwargs)
_fw_list.append(_fw)
if quench_type in ["slow_quench", "mp_quench"]:
# Relax OptimizeFW and StaticFW
run_args = {"run_specs": {"vasp_input_set": None, "vasp_cmd": ">>vasp_cmd<<",
"db_file": ">>db_file<<",
"spec": {"_priority": priority}
},
"optional_fw_params": {"override_default_vasp_params": {}}
}
run_args = recursive_update(run_args, quench_args)
_name = "snap_" + str(i)
use_prev_structure = True if len(_fw_list) > 0 else False
fw1 = OptimizeFW(structure=structure, name=f'{_name}{descriptor}_optimize',
parents=[_fw_list[-1]] if len(_fw_list) > 0 else [],
previous_structure=use_prev_structure,
**run_args["run_specs"], **run_args["optional_fw_params"],
max_force_threshold=None)
fw2 = StaticFW(structure=structure, name=f'{_name}{descriptor}_static',
parents=[fw1], previous_structure=True,
**run_args["run_specs"],
**run_args["optional_fw_params"])
_fw_list.extend([fw1, fw2])
fw_list.extend(_fw_list)
name = structure.composition.reduced_formula + descriptor + "_quench"
wf = Workflow(fw_list, name=name)
return wf
def get_MDFW(structure, start_temp, end_temp, name="molecular dynamics", priority=None, args={},
**kwargs):
"""
Helper function to get molecular dynamics firework for quench workflow
Args:
structure: Initial structure for molecular dynamics run
start_temp: Starting Temperature
end_temp: Ending Temperature
name: name of firework
priority: priority of job in database
args: custom arguments dictionary for molecular dynamics run
kwargs: kwargs for MDFW
Returns: Molecular Dynamics Firework
"""
# Get customized firework
run_args = {"md_params": {"nsteps": 500, "start_temp": start_temp, "end_temp": end_temp},
"run_specs": {"vasp_input_set": None, "vasp_cmd": ">>vasp_cmd<<", "db_file": ">>db_file<<",
"wall_time": 40000},
"optional_fw_params": {"override_default_vasp_params": {},
"spec": {'_priority': priority}}}
run_args["optional_fw_params"]["override_default_vasp_params"].update(
{'user_incar_settings': {'ISIF': 1, 'LWAVE': False, 'PREC': 'Low'}})
run_args = recursive_update(run_args, args)
_mdfw = MDFW(structure=structure, name=name, **run_args["md_params"],
**run_args["run_specs"], **run_args["optional_fw_params"], **kwargs)
return _mdfw
|
11484394
|
from SPARQLWrapper import SPARQLWrapper, CSV, JSON, POST
import json
from tqdm import tqdm
import time
import numpy as np
import pandas as pd
import io
from tabulate import tabulate
import os
import numpy as np
import bs4 as bs
import datetime
import uuid
from datetime import timezone
data = pd.read_excel('data/2021-sub-regional-fuel-poverty-tables.xlsx',sheet_name='Table 3',skiprows=2,skipfooter=9)
print(data)
LSOA_codes = data['LSOA Code'].values
house_num = data['Number of households1'].values
poor_num = data['Number of households in fuel poverty1'].values
start_time = "2019-01-01T12:00:00"
end_time = "2019-12-31T12:00:00"
total = len(house_num)
n_compile = total / 10
remainder = total % 10
n_compile = int(n_compile)
len_query = np.zeros(n_compile+2)
for i in range(1,len(len_query)-1):
len_query[i] = len_query[i-1] + 10
len_query[-1] = len_query[-2] + remainder
for g in tqdm(range(len(len_query)-1)):
i = int(len_query[g])
region = LSOA_codes[i]
houses = house_num[i]
poor = poor_num[i]
house_uuid = uuid.uuid1()
query = '''
PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>
PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>
PREFIX ofp: <http://www.theworldavatar.com/ontology/ontofuelpoverty/ontofuelpoverty.owl#>
PREFIX ofpt: <http://www.theworldavatar.com/kb/ontofuelpoverty/abox/>
PREFIX ons: <http://statistics.data.gov.uk/id/statistical-geography/>
PREFIX ons_t: <http://statistics.data.gov.uk/def/statistical-geography#>
INSERT DATA
{
ons:%s ofp:hasHouseholds ofpt:%s.
ofpt:%s ofp:validFrom "%s"^^xsd:dateTime;
ofp:validTo "%s"^^xsd:dateTime;
rdf:type ofp:households;
ofp:numberofhouseholds %s;
ofp:fuelpoorhouseholds %s.
'''%(region,
house_uuid,
house_uuid,
start_time,
end_time,
houses,
poor)
middle_num = int(len_query[g+1]-len_query[g])-2
for j in range(middle_num):
region = LSOA_codes[i+j+1]
houses = house_num[i+j+1]
poor = poor_num[i+j+1]
house_uuid = uuid.uuid1()
query += '''
ons:%s ofp:hasHouseholds ofpt:%s.
ofpt:%s ofp:validFrom "%s"^^xsd:dateTime;
ofp:validTo "%s"^^xsd:dateTime;
rdf:type ofp:households;
ofp:numberofhouseholds %s;
ofp:fuelpoorhouseholds %s.
'''%(region,
house_uuid,
house_uuid,
start_time,
end_time,
houses,
poor)
region = LSOA_codes[int(len_query[g+1])-1]
houses = house_num[int(len_query[g+1])-1]
poor = poor_num[int(len_query[g+1])-1]
house_uuid = uuid.uuid1()
query += '''
ons:%s ofp:hasHouseholds ofpt:%s.
ofpt:%s ofp:validFrom "%s"^^xsd:dateTime;
ofp:validTo "%s"^^xsd:dateTime;
rdf:type ofp:households;
ofp:numberofhouseholds %s;
ofp:fuelpoorhouseholds %s.}
'''%(region,
house_uuid,
house_uuid,
start_time,
end_time,
houses,
poor)
DEF_NAMESPACE = 'ontogasgrid'
LOCAL_KG = "http://localhost:9999/blazegraph"
LOCAL_KG_SPARQL = LOCAL_KG + '/namespace/'+DEF_NAMESPACE+'/sparql'
sparql = SPARQLWrapper(LOCAL_KG_SPARQL)
sparql.setMethod(POST) # POST query, not GET
sparql.setQuery(query)
ret = sparql.query()
|
11484397
|
import sys
sys.path.append('../py')
from iroha import *
from iroha.iroha import *
d = IDesign()
mod = IModule(d, "M_top")
tab = ITable(mod)
st1 = IState(tab)
st2 = IState(tab)
st3 = IState(tab)
tab.states.append(st1)
tab.states.append(st2)
tab.states.append(st3)
design_tool.AddNextState(st1, st2)
design_tool.AddNextState(st2, st3)
tab.initialSt = st1
# task_call = design_tool.CreateExtTaskCall(tab, "fn")
task_call = design_tool.CreateEmbeddedExtTaskCall(tab, "mod_task", "mod_task.v", "clk", "rst_n")
task_wait = design_tool.CreateExtTaskWait(tab, task_call)
task_call.input_types.append(IValueType(False, 32))
task_wait.output_types.append(IValueType(False, 32))
call_insn = IInsn(task_call)
arg0 = IRegister(tab, "arg0")
call_insn.inputs.append(arg0)
wait_insn = IInsn(task_wait)
ret0 = IRegister(tab, "ret0")
wait_insn.outputs.append(ret0)
st1.insns.append(call_insn)
st2.insns.append(wait_insn)
design_tool.ValidateIds(d)
DesignWriter(d).Write()
|
11484400
|
from .sbml import load_cbmodel
class ModelCache:
def __init__(self, ids, paths, load_args=None, post_processing=None):
self.paths = dict(zip(ids, paths))
self.cache = dict()
self.load_args = load_args if load_args is not None else {}
self.post_processing = post_processing
def get_ids(self):
return list(self.paths.keys())
def get_model(self, model_id, reset_id=False):
if model_id not in self.paths:
raise RuntimeError("Model not in list: " + model_id)
if model_id in self.cache:
return self.cache[model_id]
model = load_cbmodel(self.paths[model_id], **self.load_args)
self.cache[model_id] = model
if self.post_processing is not None:
self.post_processing(model)
if reset_id:
model.id = model_id
return model
|
11484405
|
import gitlab
import logging
from typing import Dict, List
from hypermodel.platform.gcp.config import GooglePlatformConfig
from hypermodel.platform.gcp.data_lake import DataLake
from hypermodel.platform.gcp.data_warehouse import DataWarehouse
from hypermodel.platform.gitlab.git_host import GitLabHost
from hypermodel.platform.abstract.services import PlatformServicesBase
class GooglePlatformServices(PlatformServicesBase):
"""
Services related to our Google Platform / Gitlab technology stack,
including:
Attributes:
config (GooglePlatformConfig): An object containing configuration information
lake (DataLake): A reference to DataLake functionality, implemented through Google Cloud Storage
warehouse (DataWarehouse): A reference to DataWarehouse functionality implemented through BigQuery
"""
def __init__(self):
logging.info("GooglePlatformServices.__init__()")
pass
def initialize(self):
logging.info("GooglePlatformServices.initialize()")
self._config: GooglePlatformConfig = GooglePlatformConfig()
self._lake: DataLake = DataLake(self.config)
self._warehouse: DataWarehouse = DataWarehouse(self.config)
self._git: GitLabHost = GitLabHost(self.config)
@property
def config(self) -> GooglePlatformConfig:
return self._config
@property
def lake(self) -> DataLake:
return self._lake
@property
def warehouse(self) -> DataWarehouse:
return self._warehouse
@property
def git(self) -> GitLabHost:
return self._git
|
11484414
|
from brownie import interface
from rich.console import Console
from helpers.utils import snapBalancesMatchForToken
from .StrategyCoreResolver import StrategyCoreResolver
console = Console()
class StrategyDiggLpMetaFarmResolver(StrategyCoreResolver):
def confirm_rebase(self, before, after, value):
"""
Lp token balance should stay the same.
"""
super().confirm_rebase(before, after, value)
assert snapBalancesMatchForToken(before, after, "want")
def confirm_harvest(self, before, after, tx):
console.print("=== Compare Harvest ===")
super().confirm_harvest(before, after, tx)
# No staking position, strategy want should increase irrespective of
# current balance.
# TODO: Add more specific check that the correct reward amount was deposited.
assert after.get("strategy.balanceOf") >= before.get("strategy.balanceOf")
# PPFS should not decrease
assert after.get("sett.pricePerFullShare") >= before.get(
"sett.pricePerFullShare"
)
def add_balances_snap(self, calls, entities):
super().add_balances_snap(calls, entities)
strategy = self.manager.strategy
digg = interface.IERC20(strategy.digg())
calls = self.add_entity_balances_for_tokens(calls, "digg", digg, entities)
calls = self.add_entity_shares_for_tokens(calls, "digg", digg, entities)
return calls
def get_strategy_destinations(self):
# No strategy destinations, uni lp stays in contract.
return {}
|
11484420
|
from django.urls import path
from . import views
app_name = 'books'
urlpatterns = [
path('',
views.HomeView.as_view(),
name='home'),
path('publishers/',
views.PublisherListView.as_view(),
name='publisher_list'),
path('publishers/<int:pk>/',
views.PublisherDetailView.as_view(),
name='publisher_detail'),
path('publishers/add/',
views.PublisherCreateView.as_view(),
name='publisher_create'),
path('publishers/<int:pk>/books/edit/',
views.PublisherBooksUpdateView.as_view(),
name='publisher_books_update'),
]
|
11484449
|
import os.path as op
import cooler
import cooltools.coverage
from numpy import testing
import numpy as np
def test_coverage_symmetric_upper(request):
# perform test:
clr = cooler.Cooler(op.join(request.fspath.dirname, "data/CN.mm9.1000kb.cool"))
cov = cooltools.coverage.get_coverage(clr, ignore_diags=2, chunksize=int(1e7))
# Test that minimal coverage is larger than 0.5
assert cov[cov > 0].min() >= 1
# Test that dense matrix marginal is the same:
mtx = clr.matrix(balance=False, as_pixels=False)[:]
np.fill_diagonal(mtx, 0)
np.fill_diagonal(mtx[1:, :], 0)
np.fill_diagonal(mtx[:, 1:], 0)
cov_dense = np.sum(mtx, axis=1)
testing.assert_allclose(
actual=cov[1],
desired=cov_dense,
equal_nan=True,
)
|
11484458
|
from authzed.api.v1 import Client, ReadSchemaRequest
from grpcutil import bearer_token_credentials
client = Client(
"grpc.authzed.com:443",
bearer_token_credentials("<PASSWORD>"),
)
resp = client.ReadSchema(ReadSchemaRequest())
print(resp.schema_text)
|
11484460
|
bl_info = {
"name": "Dagon Asset Export",
"author": "<NAME>",
"version": (1, 0),
"blender": (2, 7, 0),
"location": "File > Export > Dagon Asset (.asset)",
"description": "Export Dagon engine asset file",
"warning": "",
"wiki_url": "",
"tracker_url": "",
"category": "Import-Export"}
import os
import shutil
import struct
from pathlib import Path
from math import pi
import bpy
import bpy_extras
from bpy.props import StringProperty
from bpy_extras.io_utils import ExportHelper
import mathutils
def packVector4f(v):
return struct.pack('<ffff', v[0], v[1], v[2], v[3])
def packVector3f(v):
return struct.pack('<fff', v[0], v[1], v[2])
def packVector2f(v):
return struct.pack('<ff', v[0], v[1])
def saveMesh(scene, ob, absPath, localPath):
mw = ob.matrix_world.copy()
ob.matrix_world.identity()
scene.update()
meshAbsPath = absPath + "/" + ob.data.name + ".obj"
bpy.ops.object.select_all(action='DESELECT')
bpy.ops.object.select_pattern(pattern = ob.name)
bpy.ops.export_scene.obj(
filepath = meshAbsPath,
use_selection = True,
use_materials = False,
use_triangles = True,
use_uvs = True,
use_mesh_modifiers = True)
bpy.ops.object.select_all(action='DESELECT')
for sob in bpy.context.selected_objects:
sob.select = False
ob.matrix_world = mw.copy();
scene.update()
def saveMeshEntity(scene, ob, absPath, localPath):
entityAbsPath = absPath + "/" + ob.name + ".entity"
global_matrix = bpy_extras.io_utils.axis_conversion(to_forward="-Z", to_up="Y").to_4x4()
absTrans = global_matrix * ob.matrix_local * global_matrix.transposed()
objPosition = absTrans.to_translation()
objRotation = absTrans.to_quaternion()
objScale = absTrans.to_scale()
f = open(entityAbsPath, 'wb')
name = 'name: \"%s\";\n' % (ob.name)
f.write(bytearray(name.encode('ascii')))
props = ob.dagonProps
if ob.parent:
parentFilename = localPath + ob.parent.name + ".entity"
parentStr = 'parent: \"%s\";\n' % (parentFilename)
f.write(bytearray(parentStr.encode('ascii')))
pos = 'position: [%s, %s, %s];\n' % (objPosition.x, objPosition.y, objPosition.z)
f.write(bytearray(pos.encode('ascii')))
rot = 'rotation: [%s, %s, %s, %s];\n' % (objRotation.x, objRotation.y, objRotation.z, objRotation.w)
f.write(bytearray(rot.encode('ascii')))
scale = 'scale: [%s, %s, %s];\n' % (objScale.x, objScale.y, objScale.z)
f.write(bytearray(scale.encode('ascii')))
meshLocalPath = localPath + ob.data.name + ".obj"
mesh = 'mesh: \"%s\";\n' % (meshLocalPath)
f.write(bytearray(mesh.encode('ascii')))
if len(ob.data.materials) > 0:
mat = ob.data.materials[0]
materialName = mat.name
materialLocalPath = localPath + mat.name + ".mat"
materialStr = 'material: \"%s\";\n' % (materialLocalPath)
f.write(bytearray(materialStr.encode('ascii')))
visible = 'visible: %s;\n' % (int(props.dagonVisible))
f.write(bytearray(visible.encode('ascii')))
castShadow = 'castShadow: %s;\n' % (int(props.dagonCastShadow))
f.write(bytearray(castShadow.encode('ascii')))
useMotionBlur = 'useMotionBlur: %s;\n' % (int(props.dagonUseMotionBlur))
f.write(bytearray(useMotionBlur.encode('ascii')))
solid = 'solid: %s;\n' % (int(props.dagonSolid))
f.write(bytearray(solid.encode('ascii')))
layer = 'layer: %s;\n' % (props.dagonLayer)
f.write(bytearray(layer.encode('ascii')))
f.close()
def saveEmptyEntity(scene, ob, absPath, localPath):
entityAbsPath = absPath + "/" + ob.name + ".entity"
global_matrix = bpy_extras.io_utils.axis_conversion(to_forward="-Z", to_up="Y").to_4x4()
absTrans = global_matrix * ob.matrix_world * global_matrix.transposed()
objPosition = absTrans.to_translation()
objRotation = absTrans.to_quaternion()
objScale = absTrans.to_scale()
f = open(entityAbsPath, 'wb')
name = 'name: \"%s\";\n' % (ob.name)
f.write(bytearray(name.encode('ascii')))
props = ob.dagonProps
if ob.parent:
parentFilename = localPath + ob.parent.name + ".entity"
parentStr = 'parent: \"%s\";\n' % (parentFilename)
f.write(bytearray(parentStr.encode('ascii')))
pos = 'position: [%s, %s, %s];\n' % (objPosition.x, objPosition.y, objPosition.z)
f.write(bytearray(pos.encode('ascii')))
rot = 'rotation: [%s, %s, %s, %s];\n' % (objRotation.x, objRotation.y, objRotation.z, objRotation.w)
f.write(bytearray(rot.encode('ascii')))
scale = 'scale: [%s, %s, %s];\n' % (objScale.x, objScale.y, objScale.z)
f.write(bytearray(scale.encode('ascii')))
visible = 'visible: %s;\n' % (int(props.dagonVisible))
f.write(bytearray(visible.encode('ascii')))
castShadow = 'castShadow: %s;\n' % (int(props.dagonCastShadow))
f.write(bytearray(castShadow.encode('ascii')))
useMotionBlur = 'useMotionBlur: %s;\n' % (int(props.dagonUseMotionBlur))
f.write(bytearray(useMotionBlur.encode('ascii')))
solid = 'solid: %s;\n' % (int(props.dagonSolid))
f.write(bytearray(solid.encode('ascii')))
layer = 'layer: %s;\n' % (props.dagonLayer)
f.write(bytearray(layer.encode('ascii')))
f.close()
def copyFile(fileSrc, destDir):
destFile = destDir + "/" + os.path.basename(fileSrc)
if not os.path.exists(destFile):
shutil.copy2(fileSrc, destDir + "/")
def saveMaterial(scene, mat, absPath, localPath):
matAbsPath = absPath + "/" + mat.name + ".mat"
f = open(matAbsPath, 'wb')
name = 'name: \"%s\";\n' % (mat.name)
f.write(bytearray(name.encode('ascii')))
props = mat.dagonProps
# diffuse
diffuse = ''
if len(props.dagonDiffuseTexture):
imageName = props.dagonDiffuseTexture
imgAbsPath = bpy.path.abspath(props.dagonDiffuseTexture)
if props.dagonDiffuseTexture in bpy.data.images:
imgAbsPath = bpy.path.abspath(bpy.data.images[props.dagonDiffuseTexture].filepath)
imageName = os.path.basename(imgAbsPath)
imgPath = localPath + imageName
diffuse = 'diffuse: \"%s\";\n' % (imgPath)
copyFile(imgAbsPath, absPath)
else:
diffuse = 'diffuse: [%s, %s, %s];\n' % (props.dagonDiffuse.r, props.dagonDiffuse.g, props.dagonDiffuse.b)
f.write(bytearray(diffuse.encode('ascii')))
# roughness
roughness = ''
if len(props.dagonRoughnessTexture):
imageName = props.dagonRoughnessTexture
imgAbsPath = bpy.path.abspath(props.dagonRoughnessTexture)
if props.dagonRoughnessTexture in bpy.data.images:
imgAbsPath = bpy.path.abspath(bpy.data.images[props.dagonRoughnessTexture].filepath)
imageName = os.path.basename(imgAbsPath)
imgPath = localPath + imageName
roughness = 'roughness: \"%s\";\n' % (imgPath)
copyFile(imgAbsPath, absPath)
else:
roughness = 'roughness: %s;\n' % (props.dagonRoughness)
f.write(bytearray(roughness.encode('ascii')))
# metallic
metallic = ''
if len(props.dagonMetallicTexture):
imageName = props.dagonMetallicTexture
imgAbsPath = bpy.path.abspath(props.dagonMetallicTexture)
if props.dagonMetallicTexture in bpy.data.images:
imgAbsPath = bpy.path.abspath(bpy.data.images[props.dagonMetallicTexture].filepath)
imageName = os.path.basename(imgAbsPath)
imgPath = localPath + imageName
metallic = 'metallic: \"%s\";\n' % (imgPath)
copyFile(imgAbsPath, absPath)
else:
metallic = 'metallic: %s;\n' % (props.dagonMetallic)
f.write(bytearray(metallic.encode('ascii')))
# emission
emission = ''
if len(props.dagonEmissionTexture):
imageName = props.dagonEmissionTexture
imgAbsPath = bpy.path.abspath(props.dagonEmissionTexture)
if props.dagonEmissionTexture in bpy.data.images:
imgAbsPath = bpy.path.abspath(bpy.data.images[props.dagonEmissionTexture].filepath)
imageName = os.path.basename(imgAbsPath)
imgPath = localPath + imageName
emission = 'emission: \"%s\";\n' % (imgPath)
copyFile(imgAbsPath, absPath)
else:
emission = 'emission: [%s, %s, %s];\n' % (props.dagonEmission.r, props.dagonEmission.g, props.dagonEmission.b)
f.write(bytearray(emission.encode('ascii')))
# energy
energy = 'energy: %s;\n' % (props.dagonEnergy)
f.write(bytearray(energy.encode('ascii')))
# normal
if len(props.dagonNormalTexture):
imageName = props.dagonNormalTexture
imgAbsPath = bpy.path.abspath(props.dagonNormalTexture)
if props.dagonNormalTexture in bpy.data.images:
imgAbsPath = bpy.path.abspath(bpy.data.images[props.dagonNormalTexture].filepath)
imageName = os.path.basename(imgAbsPath)
imgPath = localPath + imageName
normal = 'normal: \"%s\";\n' % (imgPath)
copyFile(imgAbsPath, absPath)
f.write(bytearray(normal.encode('ascii')))
# height
if len(props.dagonHeightTexture):
imageName = props.dagonHeightTexture
imgAbsPath = bpy.path.abspath(props.dagonHeightTexture)
if props.dagonHeightTexture in bpy.data.images:
imgAbsPath = bpy.path.abspath(bpy.data.images[props.dagonHeightTexture].filepath)
imageName = os.path.basename(imgAbsPath)
imgPath = localPath + imageName
height = 'height: \"%s\";\n' % (imgPath)
copyFile(imgAbsPath, absPath)
f.write(bytearray(height.encode('ascii')))
# parallaxMode
parallaxMode = {
'ParallaxNone': 0,
'ParallaxSimple': 1,
'ParallaxOcclusionMapping': 2,
}[props.dagonParallaxMode];
parallax = 'parallax: %s;\n' % (parallaxMode)
f.write(bytearray(parallax.encode('ascii')))
# parallaxScale
parallaxScale = 'parallaxScale: %s;\n' % (props.dagonParallaxScale)
f.write(bytearray(parallaxScale.encode('ascii')))
# parallaxBias
parallaxBias = 'parallaxBias: %s;\n' % (props.dagonParallaxBias)
f.write(bytearray(parallaxBias.encode('ascii')))
# shadeless
shadeless = 'shadeless: %s;\n' % (int(props.dagonShadeless))
f.write(bytearray(shadeless.encode('ascii')))
# culling
culling = 'culling: %s;\n' % (int(props.dagonCulling))
f.write(bytearray(culling.encode('ascii')))
# colorWrite
colorWrite = 'colorWrite: %s;\n' % (int(props.dagonColorWrite))
f.write(bytearray(colorWrite.encode('ascii')))
# depthWrite
depthWrite = 'depthWrite: %s;\n' % (int(props.dagonDepthWrite))
f.write(bytearray(depthWrite.encode('ascii')))
# useShadows
useShadows = 'useShadows: %s;\n' % (int(props.dagonReceiveShadows))
f.write(bytearray(useShadows.encode('ascii')))
# useFog
useFog = 'useFog: %s;\n' % (int(props.dagonFog))
f.write(bytearray(useFog.encode('ascii')))
# shadowFilter
shadowFilter = {
'ShadowFilterNone': 0,
'ShadowFilterPCF': 1
}[props.dagonShadowFilter];
shadowFilterStr = 'shadowFilter: %s;\n' % (shadowFilter)
f.write(bytearray(shadowFilterStr.encode('ascii')))
# blendingMode
blendingMode = {
'BlendingModeOpaque': 0,
'BlendingModeTransparent': 1,
'BlendingModeAdditive': 2
}[props.dagonBlendingMode];
blendingModeStr = 'blendingMode: %s;\n' % (blendingMode)
f.write(bytearray(blendingModeStr.encode('ascii')))
# transparency
transparency = 'transparency: %s;\n' % (props.dagonTransparency)
f.write(bytearray(transparency.encode('ascii')))
f.close()
def saveIndexFile(entities, absPath, dirLocal):
indexAbsPath = absPath + "/INDEX"
f = open(indexAbsPath, 'wb')
for e in entities:
estr = '%s\n' % (e)
f.write(bytearray(estr.encode('ascii')))
f.close()
def doExport(context, filepath = ""):
scene = context.scene
dirName = Path(filepath).stem
dirParent = os.path.dirname(filepath)
dirAbs = dirParent + "/" + dirName + "_root"
if os.path.exists(dirAbs):
shutil.rmtree(dirAbs)
os.makedirs(dirAbs)
dirLocal = '' #dirName + "/"
entities = []
meshes = []
localFilenames = []
absFilenames = []
# Save *.obj and *.entity files
for ob in scene.objects:
if ob.type == 'MESH':
meshName = ob.data.name
if not meshName in meshes:
saveMesh(scene, ob, dirAbs, dirLocal)
meshLocalPath = dirLocal + meshName + ".obj"
localFilenames.append(meshLocalPath)
meshAbsPath = dirAbs + "/" + meshName + ".obj"
absFilenames.append(meshAbsPath)
meshes.append(meshName)
saveMeshEntity(scene, ob, dirAbs, dirLocal)
entityFileLocalPath = dirLocal + ob.name + ".entity"
localFilenames.append(entityFileLocalPath)
entityFileAbsPath = dirAbs + "/" + ob.name + ".entity"
absFilenames.append(entityFileAbsPath)
entities.append(entityFileLocalPath)
#TODO: lamps
else:
saveEmptyEntity(scene, ob, dirAbs, dirLocal)
entityFileLocalPath = dirLocal + ob.name + ".entity"
localFilenames.append(entityFileLocalPath)
entityFileAbsPath = dirAbs + "/" + ob.name + ".entity"
absFilenames.append(entityFileAbsPath)
entities.append(entityFileLocalPath)
for mat in bpy.data.materials:
saveMaterial(scene, mat, dirAbs, dirLocal)
matLocalPath = dirLocal + mat.name + ".mat"
localFilenames.append(matLocalPath)
matAbsPath = dirAbs + "/" + mat.name + ".mat"
absFilenames.append(matAbsPath)
for filename in os.listdir(dirAbs + "/"):
if filename.endswith(".png") or filename.endswith(".jpg") or filename.endswith(".bmp") or filename.endswith(".tga") or filename.endswith(".hdr"):
texLocalPath = dirLocal + os.path.basename(filename)
localFilenames.append(texLocalPath)
texAbsPath = dirAbs + "/" + os.path.basename(filename)
absFilenames.append(texAbsPath)
saveIndexFile(entities, dirAbs, dirLocal)
indexLocalPath = "INDEX"
localFilenames.append(indexLocalPath)
indexAbsPath = dirAbs + "/INDEX"
absFilenames.append(indexAbsPath)
fileDataOffset = 12; #initial offset
for i, filename in enumerate(localFilenames):
fileDataOffset = fileDataOffset + 4; # filename size
fileDataOffset = fileDataOffset + len(filename.encode('ascii'))
fileDataOffset = fileDataOffset + 8 # data offset
fileDataOffset = fileDataOffset + 8 # data size
# Save *.asset file (Box archive)
# Write header
f = open(filepath, 'wb')
f.write(bytearray('BOXF'.encode('ascii')))
f.write(struct.pack('<Q', len(localFilenames)))
# Write index
for i, filename in enumerate(localFilenames):
filenameData = bytearray(filename.encode('ascii'))
filePathSize = len(filenameData)
fileDataSize = os.path.getsize(absFilenames[i])
f.write(struct.pack('<I', filePathSize))
f.write(filenameData)
f.write(struct.pack('<Q', fileDataOffset))
f.write(struct.pack('<Q', fileDataSize))
fileDataOffset = fileDataOffset + fileDataSize
# Write data
for i, filename in enumerate(absFilenames):
f2 = open(filename, 'rb')
fileData = bytearray(f2.read())
f.write(fileData)
f2.close()
f.close()
return {'FINISHED'}
class ExportDagonAsset(bpy.types.Operator, ExportHelper):
bl_idname = "scene.asset"
bl_label = "Export Dagon Asset"
filename_ext = ".asset"
filter_glob = StringProperty(default = "unknown.asset", options = {"HIDDEN"})
@classmethod
def poll(cls, context):
return True
def execute(self, context):
filepath = self.filepath
filepath = bpy.path.ensure_ext(filepath, self.filename_ext)
return doExport(context, filepath)
def invoke(self, context, event):
wm = context.window_manager
if True:
wm.fileselect_add(self)
return {"RUNNING_MODAL"}
elif True:
wm.invoke_search_popup(self)
return {"RUNNING_MODAL"}
elif False:
return wm.invoke_props_popup(self, event)
elif False:
return self.execute(context)
def menu_func_export_dagon_asset(self, context):
self.layout.operator(ExportDagonAsset.bl_idname, text = "Dagon Asset (.asset)")
class DagonObjectProps(bpy.types.PropertyGroup):
dagonVisible = bpy.props.BoolProperty(name="Visible", default=True)
dagonSolid = bpy.props.BoolProperty(name="Solid", default=False)
dagonCastShadow = bpy.props.BoolProperty(name="Cast Shadow", default=True)
dagonUseMotionBlur = bpy.props.BoolProperty(name="Motion Blur", default=True)
dagonLayer = bpy.props.IntProperty(name="Layer", default=1)
class DagonObjectPropsPanel(bpy.types.Panel):
bl_label = "Dagon Properties"
bl_idname = "dagon_object_props"
bl_space_type = 'PROPERTIES'
bl_region_type = 'WINDOW'
bl_context = "object"
def draw(self, context):
obj = context.object
props = obj.dagonProps
row = self.layout.row()
split = row.split(percentage=0.5)
col = split.column()
col.prop(props, 'dagonVisible')
col = split.column()
col.prop(props, 'dagonSolid')
row = self.layout.row()
split = row.split(percentage=0.5)
col = split.column()
col.prop(props, 'dagonCastShadow')
col = split.column()
col.prop(props, 'dagonUseMotionBlur')
col = self.layout.column(align=True)
col.prop(props, 'dagonLayer')
ParallaxModeEnum = [
('ParallaxNone', "None", "", 0),
('ParallaxSimple', "Simple", "", 1),
('ParallaxOcclusionMapping', "Occlusion Mapping", "", 2)
]
ShadowFilterEnum = [
('ShadowFilterNone', "None", "", 0),
('ShadowFilterPCF', "PCF", "", 1)
]
BlendingModeEnum = [
('BlendingModeOpaque', "Opaque", "", 0),
('BlendingModeTransparent', "Transparent", "", 1),
('BlendingModeAdditive', "Additive", "", 2)
]
class DagonMaterialProps(bpy.types.PropertyGroup):
dagonDiffuse = bpy.props.FloatVectorProperty(name="Diffuse", default=(0.8, 0.8, 0.8), min=0.0, max=1.0, subtype='COLOR')
dagonRoughness = bpy.props.FloatProperty(name="Roughness", default=0.5, min=0.0, max=1.0, subtype='FACTOR')
dagonMetallic = bpy.props.FloatProperty(name="Metallic", default=0.0, min=0.0, max=1.0, subtype='FACTOR')
dagonEmission = bpy.props.FloatVectorProperty(name="Emission", default=(0.0, 0.0, 0.0), min=0.0, max=1.0, subtype='COLOR')
dagonEnergy = bpy.props.FloatProperty(name="Energy", default=0.0, min=0.0)
dagonDiffuseTexture = bpy.props.StringProperty(name="Diffuse Texture", subtype='FILE_PATH')
dagonRoughnessTexture = bpy.props.StringProperty(name="Roughness Texture", subtype='FILE_PATH')
dagonMetallicTexture = bpy.props.StringProperty(name="Metallic Texture", subtype='FILE_PATH')
dagonNormalTexture = bpy.props.StringProperty(name="Normal Texture", subtype='FILE_PATH')
dagonHeightTexture = bpy.props.StringProperty(name="Height Texture", subtype='FILE_PATH')
dagonEmissionTexture = bpy.props.StringProperty(name="Emission Texture", subtype='FILE_PATH')
dagonParallaxMode = bpy.props.EnumProperty(name="Parallax Mode", items=ParallaxModeEnum)
dagonParallaxScale = bpy.props.FloatProperty(name="Parallax Scale", default=0.03, min=0.0)
dagonParallaxBias = bpy.props.FloatProperty(name="Parallax Bias", default=-0.01)
dagonShadeless = bpy.props.BoolProperty(name="Shadeless", default=False)
dagonCulling = bpy.props.BoolProperty(name="Culling", default=True)
dagonColorWrite = bpy.props.BoolProperty(name="Color Write", default=True)
dagonDepthWrite = bpy.props.BoolProperty(name="Depth Write", default=True)
dagonReceiveShadows = bpy.props.BoolProperty(name="Receive Shadows", default=True)
dagonFog = bpy.props.BoolProperty(name="Fog", default=True)
dagonShadowFilter = bpy.props.EnumProperty(name="Shadow Filter", items=ShadowFilterEnum)
dagonBlendingMode = bpy.props.EnumProperty(name="Blending Mode", items=BlendingModeEnum)
dagonTransparency = bpy.props.FloatProperty(name="Transparency", default=1.0, min=0.0, max=1.0, subtype='FACTOR')
class DagonMaterialPropsPanel(bpy.types.Panel):
bl_label = "Dagon Properties"
bl_idname = "dagon_material_props"
bl_space_type = 'PROPERTIES'
bl_region_type = 'WINDOW'
bl_context = "material"
def draw(self, context):
obj = context.object
mat = obj.active_material
col = self.layout.column(align=True)
props = mat.dagonProps
col.prop(props, 'dagonDiffuse')
col = self.layout.column(align=True)
col.prop(props, 'dagonRoughness')
col.prop(props, 'dagonMetallic')
col = self.layout.column(align=True)
col.prop(props, 'dagonEmission')
col.prop(props, 'dagonEnergy')
col = self.layout.column(align=True)
col.prop_search(props, "dagonDiffuseTexture", bpy.data, "images")
col = self.layout.column(align=True)
col.prop_search(props, "dagonRoughnessTexture", bpy.data, "images")
col = self.layout.column(align=True)
col.prop_search(props, "dagonMetallicTexture", bpy.data, "images")
col = self.layout.column(align=True)
col.prop_search(props, "dagonNormalTexture", bpy.data, "images")
col = self.layout.column(align=True)
col.prop_search(props, "dagonHeightTexture", bpy.data, "images")
col = self.layout.column(align=True)
col.prop_search(props, "dagonEmissionTexture", bpy.data, "images")
col = self.layout.column(align=True)
col.prop(props, 'dagonParallaxMode')
col = self.layout.column(align=True)
col.prop(props, 'dagonParallaxScale')
col.prop(props, 'dagonParallaxBias')
row = self.layout.row()
split = row.split(percentage=0.5)
col = split.column()
col.prop(props, 'dagonShadeless')
col = split.column()
col.prop(props, 'dagonCulling')
row = self.layout.row()
split = row.split(percentage=0.5)
col = split.column()
col.prop(props, 'dagonColorWrite')
col = split.column()
col.prop(props, 'dagonDepthWrite')
row = self.layout.row()
split = row.split(percentage=0.5)
col = split.column()
col.prop(props, 'dagonReceiveShadows')
col = split.column()
col.prop(props, 'dagonFog')
col = self.layout.column(align=True)
col.prop(props, 'dagonShadowFilter')
col = self.layout.column(align=True)
col.prop(props, 'dagonBlendingMode')
col = self.layout.column(align=True)
col.prop(props, 'dagonTransparency')
def register():
bpy.utils.register_module(__name__)
bpy.types.INFO_MT_file_export.append(menu_func_export_dagon_asset)
bpy.types.Material.dagonProps = bpy.props.PointerProperty(type=DagonMaterialProps)
bpy.types.Object.dagonProps = bpy.props.PointerProperty(type=DagonObjectProps)
def unregister():
del bpy.types.Object.dagonProps
del bpy.types.Material.dagonProps
bpy.types.INFO_MT_file_export.remove(menu_func_export_dagon_asset)
bpy.utils.unregister_module(__name__)
if __name__ == "__main__":
register()
|
11484499
|
import isce3
def file_to_rdr_grid(ref_grid_path: str) -> isce3.product.RadarGridParameters:
'''read parameters from text file needed to create radar grid object'''
with open(ref_grid_path, 'r') as f_rdr_grid:
sensing_start = float(f_rdr_grid.readline())
wavelength = float(f_rdr_grid.readline())
prf = float(f_rdr_grid.readline())
starting_range = float(f_rdr_grid.readline())
range_pixel_spacing = float(f_rdr_grid.readline())
length = int(f_rdr_grid.readline())
width = int(f_rdr_grid.readline())
# read date string and remove newline
date_str = f_rdr_grid.readline()
ref_epoch = isce3.core.DateTime(date_str[:-1])
rdr_grid = isce3.product.RadarGridParameters(
sensing_start, wavelength, prf, starting_range,
range_pixel_spacing, "right", length, width,
ref_epoch)
return rdr_grid
def rdr_grid_to_file(ref_grid_path: str,
rdr_grid: isce3.product.RadarGridParameters) -> None:
'''save parameters needed to create a new radar grid object'''
with open(ref_grid_path, "w") as f_rdr_grid:
f_rdr_grid.write(str(rdr_grid.sensing_start) + '\n')
f_rdr_grid.write(str(rdr_grid.wavelength) + '\n')
f_rdr_grid.write(str(rdr_grid.prf) + '\n')
f_rdr_grid.write(str(rdr_grid.starting_range) + '\n')
f_rdr_grid.write(str(rdr_grid.range_pixel_spacing) + '\n')
f_rdr_grid.write(str(rdr_grid.length) + '\n')
f_rdr_grid.write(str(rdr_grid.width) + '\n')
f_rdr_grid.write(str(rdr_grid.ref_epoch) + '\n')
|
11484503
|
import logging
from functools import lru_cache
from typing import Collection, Union, Set, Callable, Tuple
from talkgenerator.datastructures.image_data import ImageData
logger = logging.getLogger("talkgenerator")
class PeakedWeight(object):
def __init__(
self, peak_values: Tuple[int, ...], weight: float, other_weight: float
):
self._peak_values = peak_values
self._weight = weight
self._other_weight = other_weight
def __call__(self, slide_nr: int, num_slides: int):
actual_peak_values = fix_indices(self._peak_values, num_slides)
if slide_nr in actual_peak_values:
return self._weight
return self._other_weight
@lru_cache(maxsize=30)
def fix_indices(values: Collection[int], num_slides: int):
return [value % num_slides if value < 0 else value for value in values]
class ConstantWeightFunction(object):
def __init__(self, weight):
self._weight = weight
def __call__(self, slide_nr, total_slides):
return self._weight
# Classes that are abstractly responsible for generating powerpoints
class SlideGeneratorData:
""" Responsible for providing the slide generator and other attributes, such as its name and weight"""
def __init__(
self,
generator,
weight_function: Callable[[int, int], float] = ConstantWeightFunction(1),
retries: int = 5,
allowed_repeated_elements: int = 0,
tags=None,
name=None,
):
self._generator = generator
self._weight_function = weight_function
self._retries = retries
self._name = name
self._allowed_repeated_elements = allowed_repeated_elements
if not tags:
tags = set()
self._tags = tags
def generate(self, presentation_context, used_elements):
"""Generate a slide for a given presentation using the given seed."""
logger.debug('slide_generator_data.generate()')
logger.debug('presentation_context: {}'.format(presentation_context))
logger.debug('used_elements: {}'.format(used_elements))
logger.debug('self._allowed_repeated_elements: {}'.format(self._allowed_repeated_elements))
# Try a certain amount of times
for i in range(self._retries):
logger.debug('retry: {}'.format(i))
logger.debug('self._generator: {}'.format(self._generator))
slide_results = self._generator.generate_slide(
presentation_context, (used_elements, self._allowed_repeated_elements)
)
logger.debug('slide_results: {}'.format(slide_results))
if slide_results:
(slide, generated_elements) = slide_results
logger.debug('slide: {}'.format(slide))
logger.debug('generated_elements: {}'.format(generated_elements))
# If the generated content is nothing, don't try again
if _has_not_generated_something(generated_elements):
return None
if slide:
# Add notes about the generation
slide.set_note(
"Seed: "
+ presentation_context["seed"]
+ "\nGenerator: "
+ str(self)
+ " \n Context: "
+ str(presentation_context)
+ " \n Generated Elements: "
+ str(generated_elements)
)
# Add all sources of generated elements
for generated_element in generated_elements:
if isinstance(generated_element, ImageData):
slide.add_source(generated_element.get_source())
return slide, generated_elements
def get_weight_for(self, slide_nr: int, total_slides: int) -> float:
"""The weight of the generator for a particular slide.
Determines how much chance it has being picked for a particular slide number"""
return self._weight_function(slide_nr, total_slides)
def get_allowed_repeated_elements(self) -> int:
return self._allowed_repeated_elements
def get_tags(self) -> Set[str]:
return self._tags
def __str__(self):
if bool(self._name):
return str(self._name)
name = str(self._generator.__name__)
if name == "<lambda>":
name = "Unnamed Generator"
return "SlideGenerator[" + name + "]"
def _has_not_generated_something(generated_elements) -> bool:
generated_elements = set(generated_elements)
_filter_generated_elements(generated_elements)
return len(generated_elements) == 0
def _filter_generated_elements(generated_elements: Set[Union[str, bool, None]]):
if "" in generated_elements:
generated_elements.remove("")
if None in generated_elements:
generated_elements.remove(None)
if True in generated_elements:
generated_elements.remove(True)
if False in generated_elements:
generated_elements.remove(False)
|
11484506
|
import asyncio
from asyncio import AbstractEventLoop
from concurrent.futures import ThreadPoolExecutor
from typing import TypeVar, Callable, Any, Awaitable, Optional
import logging
logger = logging.getLogger(__name__)
T = TypeVar("T")
executor = ThreadPoolExecutor(max_workers=2)
def _loop() -> AbstractEventLoop:
"""Return the event loop"""
asyncio.set_event_loop(asyncio.SelectorEventLoop())
return asyncio.get_event_loop()
def run_async(task: Callable[[Any], T], *args: Any) -> Awaitable[T]:
"""Run a callable async"""
if not callable(task):
raise TypeError("Task must be a callable")
return _loop().run_in_executor(executor, task, *args)
def wait_for(t: Awaitable[T]) -> Optional[T]:
"""Wait for the return of a callable. This is mostly intended
to be used for testing async tasks."""
return asyncio.get_event_loop().run_until_complete(t)
|
11484529
|
import argparse
import numpy as np
import os
import torch
import torch.nn as nn
from trixi.util import Config, GridSearch
class ConvModule(nn.Module):
"""Utility Module for more convenient weight initialization"""
conv_types = (nn.Conv1d,
nn.Conv2d,
nn.Conv3d,
nn.ConvTranspose1d,
nn.ConvTranspose2d,
nn.ConvTranspose3d)
@classmethod
def is_conv(cls, op):
if type(op) == type and issubclass(op, cls.conv_types):
return True
elif type(op) in cls.conv_types:
return True
else:
return False
def __init__(self, *args, **kwargs):
super(ConvModule, self).__init__(*args, **kwargs)
def init_weights(self, init_fn, *args, **kwargs):
class init_(object):
def __init__(self):
self.fn = init_fn
self.args = args
self.kwargs = kwargs
def __call__(self, module):
if ConvModule.is_conv(type(module)):
module.weight = self.fn(module.weight, *self.args, **self.kwargs)
_init_ = init_()
self.apply(_init_)
def init_bias(self, init_fn, *args, **kwargs):
class init_(object):
def __init__(self):
self.fn = init_fn
self.args = args
self.kwargs = kwargs
def __call__(self, module):
if ConvModule.is_conv(type(module)) and module.bias is not None:
module.bias = self.fn(module.bias, *self.args, **self.kwargs)
_init_ = init_()
self.apply(_init_)
def get_default_experiment_parser():
parser = argparse.ArgumentParser()
parser.add_argument("base_dir", type=str, help="Working directory for experiment.")
parser.add_argument("-c", "--config", type=str, default=None, help="Path to a config file.")
parser.add_argument("-v", "--visdomlogger", action="store_true", help="Use visdomlogger.")
parser.add_argument("-dc", "--default_config", type=str, default="DEFAULTS", help="Select a default Config")
parser.add_argument("--resume", type=str, default=None, help="Path to resume from")
parser.add_argument("-ir", "--ignore_resume_config", action="store_true", help="Ignore Config in experiment we resume from.")
parser.add_argument("--test", action="store_true", help="Run test instead of training")
parser.add_argument("--grid", type=str, help="Path to a config for grid search")
parser.add_argument("-s", "--skip_existing", action="store_true", help="Skip configs fpr which an experiment exists")
parser.add_argument("-m", "--mods", type=str, nargs="+", default=None, help="Mods are Config stubs to update only relevant parts for a certain setup.")
return parser
def run_experiment(experiment, configs, args, mods=None, **kwargs):
config = Config(file_=args.config) if args.config is not None else Config()
config.update_missing(configs[args.default_config])
if args.mods is not None:
for mod in args.mods:
config.update(mods[mod])
config = Config(config=config, update_from_argv=True)
# GET EXISTING EXPERIMENTS TO BE ABLE TO SKIP CERTAIN CONFIGS
if args.skip_existing:
existing_configs = []
for exp in os.listdir(args.base_dir):
try:
existing_configs.append(Config(file_=os.path.join(args.base_dir, exp, "config", "config.json")))
except Exception as e:
pass
if args.grid is not None:
grid = GridSearch().read(args.grid)
else:
grid = [{}]
for combi in grid:
config.update(combi)
if args.skip_existing:
skip_this = False
for existing_config in existing_configs:
if existing_config.contains(config):
skip_this = True
break
if skip_this:
continue
loggers = {}
if args.visdomlogger:
loggers["visdom"] = ("visdom", {}, 1)
exp = experiment(config=config,
base_dir=args.base_dir,
resume=args.resume,
ignore_resume_config=args.ignore_resume_config,
loggers=loggers,
**kwargs)
if not args.test:
exp.run()
else:
exp.run_test()
def set_seeds(seed, cuda=True):
if not hasattr(seed, "__iter__"):
seed = (seed, seed, seed)
np.random.seed(seed[0])
torch.manual_seed(seed[1])
if cuda: torch.cuda.manual_seed_all(seed[2])
|
11484530
|
import unittest
from synthetic.stats import create_stat, StatType, DistanceType
from synthetic.tests.graphs import *
class TestStats(unittest.TestCase):
def test_degrees_undir(self):
g = full_graph(directed=False)
s = create_stat(g, StatType.DEGREES, bins=10)
self.assertListEqual(list(s.data), [0, 0, 0, 0, 0, 0, 0, 0, 0, 100])
self.assertEqual(s.bin_edges[-1], 99.)
g = star_graph()
s = create_stat(g, StatType.DEGREES, bins=10)
self.assertListEqual(list(s.data), [99, 0, 0, 0, 0, 0, 0, 0, 0, 1])
self.assertEqual(s.bin_edges[-1], 99.)
g = ring_graph(directed=False)
s = create_stat(g, StatType.DEGREES, bins=10)
self.assertListEqual(list(s.data), [0, 0, 0, 0, 0, 0, 0, 0, 0, 100])
self.assertEqual(s.bin_edges[-1], 2.)
def test_degrees_dir(self):
g = full_graph(directed=True)
s = create_stat(g, StatType.DEGREES, bins=10)
self.assertListEqual(list(s.data), [0, 0, 0, 0, 0, 0, 0, 0, 0, 100])
self.assertEqual(s.bin_edges[-1], 198.)
g = in_star_graph()
s = create_stat(g, StatType.DEGREES, bins=10)
self.assertListEqual(list(s.data), [99, 0, 0, 0, 0, 0, 0, 0, 0, 1])
self.assertEqual(s.bin_edges[-1], 99.)
g = out_star_graph()
s = create_stat(g, StatType.DEGREES, bins=10)
self.assertListEqual(list(s.data), [99, 0, 0, 0, 0, 0, 0, 0, 0, 1])
self.assertEqual(s.bin_edges[-1], 99.)
g = ring_graph(directed=True)
s = create_stat(g, StatType.DEGREES, bins=10)
self.assertListEqual(list(s.data), [0, 0, 0, 0, 0, 0, 0, 0, 0, 100])
self.assertEqual(s.bin_edges[-1], 2.)
def test_in_degrees(self):
g = full_graph(directed=True)
s = create_stat(g, StatType.IN_DEGREES, bins=10)
self.assertListEqual(list(s.data), [0, 0, 0, 0, 0, 0, 0, 0, 0, 100])
self.assertEqual(s.bin_edges[-1], 99.)
g = in_star_graph()
s = create_stat(g, StatType.IN_DEGREES, bins=10)
self.assertListEqual(list(s.data), [99, 0, 0, 0, 0, 0, 0, 0, 0, 1])
self.assertEqual(s.bin_edges[-1], 99.)
g = out_star_graph()
s = create_stat(g, StatType.IN_DEGREES, bins=10)
self.assertListEqual(list(s.data), [1, 0, 0, 0, 0, 0, 0, 0, 0, 99])
self.assertEqual(s.bin_edges[-1], 1.)
g = ring_graph(directed=True)
s = create_stat(g, StatType.IN_DEGREES, bins=10)
self.assertListEqual(list(s.data), [0, 0, 0, 0, 0, 0, 0, 0, 0, 100])
self.assertEqual(s.bin_edges[-1], 1.)
def test_out_degrees(self):
g = full_graph(directed=True)
s = create_stat(g, StatType.OUT_DEGREES, bins=10)
self.assertListEqual(list(s.data), [0, 0, 0, 0, 0, 0, 0, 0, 0, 100])
self.assertEqual(s.bin_edges[-1], 99.)
g = in_star_graph()
s = create_stat(g, StatType.OUT_DEGREES, bins=10)
self.assertListEqual(list(s.data), [1, 0, 0, 0, 0, 0, 0, 0, 0, 99])
self.assertEqual(s.bin_edges[-1], 1.)
g = out_star_graph()
s = create_stat(g, StatType.OUT_DEGREES, bins=10)
self.assertListEqual(list(s.data), [99, 0, 0, 0, 0, 0, 0, 0, 0, 1])
self.assertEqual(s.bin_edges[-1], 99.)
g = ring_graph(directed=True)
s = create_stat(g, StatType.OUT_DEGREES, bins=10)
self.assertListEqual(list(s.data), [0, 0, 0, 0, 0, 0, 0, 0, 0, 100])
self.assertEqual(s.bin_edges[-1], 1.)
def test_u_page_ranks_undir(self):
g = full_graph(directed=False)
s = create_stat(g, StatType.U_PAGERANKS, bins=10)
self.assertListEqual(list(s.data), [0, 0, 0, 0, 0, 0, 0, 0, 0, 100])
self.assertAlmostEqual(s.bin_edges[-1], .01, places=2)
g = star_graph()
s = create_stat(g, StatType.U_PAGERANKS, bins=10)
self.assertListEqual(list(s.data), [99, 0, 0, 0, 0, 0, 0, 0, 0, 1])
self.assertAlmostEqual(s.bin_edges[-1], .46, places=2)
g = ring_graph(directed=False)
s = create_stat(g, StatType.U_PAGERANKS, bins=10)
self.assertListEqual(list(s.data), [0, 0, 0, 0, 0, 0, 0, 0, 0, 100])
self.assertAlmostEqual(s.bin_edges[-1], .01, places=2)
def test_u_page_ranks_dir(self):
g = full_graph(directed=True)
s = create_stat(g, StatType.U_PAGERANKS, bins=10)
self.assertListEqual(list(s.data), [0, 0, 0, 0, 0, 0, 0, 0, 0, 100])
self.assertAlmostEqual(s.bin_edges[-1], .01, places=2)
g = in_star_graph()
s = create_stat(g, StatType.U_PAGERANKS, bins=10)
self.assertListEqual(list(s.data), [99, 0, 0, 0, 0, 0, 0, 0, 0, 1])
self.assertAlmostEqual(s.bin_edges[-1], .46, places=2)
g = out_star_graph()
s = create_stat(g, StatType.U_PAGERANKS, bins=10)
self.assertListEqual(list(s.data), [99, 0, 0, 0, 0, 0, 0, 0, 0, 1])
self.assertAlmostEqual(s.bin_edges[-1], .46, places=2)
g = ring_graph(directed=True)
s = create_stat(g, StatType.U_PAGERANKS, bins=10)
self.assertListEqual(list(s.data), [0, 0, 0, 0, 0, 0, 0, 0, 0, 100])
self.assertAlmostEqual(s.bin_edges[-1], .01, places=2)
def test_d_page_ranks(self):
g = full_graph(directed=True)
s = create_stat(g, StatType.D_PAGERANKS, bins=10)
self.assertListEqual(list(s.data), [0, 0, 0, 0, 0, 0, 0, 0, 0, 100])
self.assertAlmostEqual(s.bin_edges[-1], .01, places=2)
g = in_star_graph()
s = create_stat(g, StatType.D_PAGERANKS, bins=10)
self.assertListEqual(list(s.data), [99, 0, 0, 0, 0, 0, 0, 0, 0, 1])
self.assertAlmostEqual(s.bin_edges[-1], .46, places=2)
g = out_star_graph()
s = create_stat(g, StatType.D_PAGERANKS, bins=10)
self.assertListEqual(list(s.data), [0, 0, 0, 0, 0, 0, 0, 0, 0, 100])
self.assertAlmostEqual(s.bin_edges[-1], .01, places=2)
g = ring_graph(directed=True)
s = create_stat(g, StatType.D_PAGERANKS, bins=10)
self.assertListEqual(list(s.data), [0, 0, 0, 0, 0, 0, 0, 0, 0, 100])
self.assertAlmostEqual(s.bin_edges[-1], .01, places=2)
def test_triad_census_undir(self):
g = full_graph(directed=False)
s = create_stat(g, StatType.TRIAD_CENSUS)
self.assertListEqual(list(s.data), [0, 0, 0, 161700])
g = star_graph()
s = create_stat(g, StatType.TRIAD_CENSUS)
self.assertListEqual(list(s.data), [0, 0, 4851, 0])
g = ring_graph(directed=False)
s = create_stat(g, StatType.TRIAD_CENSUS)
self.assertListEqual(list(s.data), [0, 0, 100, 0])
def test_triad_census_dir(self):
g = full_graph(directed=True)
s = create_stat(g, StatType.TRIAD_CENSUS)
self.assertListEqual(list(s.data), [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 161700])
g = in_star_graph()
s = create_stat(g, StatType.TRIAD_CENSUS)
self.assertListEqual(list(s.data), [0, 0, 4851, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0])
g = out_star_graph()
s = create_stat(g, StatType.TRIAD_CENSUS)
self.assertListEqual(list(s.data), [0, 0, 0, 0, 0, 0, 4851, 0, 0, 0, 0,
0, 0, 0, 0, 0])
g = ring_graph(directed=True)
s = create_stat(g, StatType.TRIAD_CENSUS)
self.assertListEqual(list(s.data), [0, 0, 0, 0, 100, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0])
def test_u_dists_undir(self):
g = full_graph(directed=False)
s = create_stat(g, StatType.U_DISTS, max_dist=10)
self.assertListEqual(list(s.data), [9900, 0, 0, 0, 0, 0, 0, 0, 0, 0])
self.assertAlmostEqual(s.bin_edges[-1], 10., places=2)
g = star_graph()
s = create_stat(g, StatType.U_DISTS, max_dist=10)
self.assertListEqual(list(s.data), [198, 9702, 0, 0, 0, 0, 0, 0, 0, 0])
self.assertAlmostEqual(s.bin_edges[-1], 10., places=2)
g = ring_graph(directed=False)
s = create_stat(g, StatType.U_DISTS, max_dist=10)
self.assertListEqual(list(s.data), [200, 200, 200, 200, 200, 200, 200,
200, 200, 8100])
self.assertAlmostEqual(s.bin_edges[-1], 10., places=2)
def test_d_dists_dir(self):
g = full_graph(directed=True)
s = create_stat(g, StatType.D_DISTS, max_dist=10)
self.assertListEqual(list(s.data), [9900, 0, 0, 0, 0, 0, 0, 0, 0, 0])
self.assertAlmostEqual(s.bin_edges[-1], 10., places=2)
g = in_star_graph()
s = create_stat(g, StatType.D_DISTS, max_dist=10)
self.assertListEqual(list(s.data), [99, 0, 0, 0, 0, 0, 0, 0, 0, 9801])
self.assertAlmostEqual(s.bin_edges[-1], 10., places=2)
g = out_star_graph()
s = create_stat(g, StatType.D_DISTS, max_dist=10)
self.assertListEqual(list(s.data), [99, 0, 0, 0, 0, 0, 0, 0, 0, 9801])
self.assertAlmostEqual(s.bin_edges[-1], 10., places=2)
g = ring_graph(directed=True)
s = create_stat(g, StatType.D_DISTS, max_dist=10)
self.assertListEqual(list(s.data), [100, 100, 100, 100, 100, 100, 100,
100, 100, 9000])
self.assertAlmostEqual(s.bin_edges[-1], 10., places=2)
def test_unknown_stat_throws_exception(self):
g = star_graph()
with self.assertRaises(ValueError):
create_stat(g, 999999999)
def test_unsupported_distance_throws_exception(self):
g = star_graph()
s = create_stat(g, StatType.TRIAD_CENSUS)
with self.assertRaises(NotImplementedError):
s.distance(s, DistanceType.EARTH_MOVER)
def test_normalized_manhattan_distance_undir(self):
g1 = full_graph(directed=False)
s1 = create_stat(g1, StatType.TRIAD_CENSUS)
g2 = star_graph()
s2 = create_stat(g2, StatType.TRIAD_CENSUS)
g3 = ring_graph(directed=False)
s3 = create_stat(g3, StatType.TRIAD_CENSUS)
g4 = random_graph_sparse(directed=False)
s4 = create_stat(g4, StatType.TRIAD_CENSUS)
g5 = random_graph_sparse(directed=False)
s5 = create_stat(g5, StatType.TRIAD_CENSUS)
self.assertEqual(
s1.distance(s1, DistanceType.NORMALIZED_MANHATTAN), 0.)
self.assertEqual(
s2.distance(s2, DistanceType.NORMALIZED_MANHATTAN), 0.)
self.assertEqual(
s3.distance(s3, DistanceType.NORMALIZED_MANHATTAN), 0.)
self.assertEqual(
s4.distance(s4, DistanceType.NORMALIZED_MANHATTAN), 0.)
self.assertEqual(
s5.distance(s5, DistanceType.NORMALIZED_MANHATTAN), 0.)
self.assertAlmostEqual(
s1.distance(s2, DistanceType.NORMALIZED_MANHATTAN), 2., places=2)
self.assertAlmostEqual(
s1.distance(s3, DistanceType.NORMALIZED_MANHATTAN), 2., places=2)
self.assertAlmostEqual(
s2.distance(
s3, DistanceType.NORMALIZED_MANHATTAN), 0.979, places=2)
self.assertLess(s4.distance(s5, DistanceType.NORMALIZED_MANHATTAN), 2.)
self.assertGreaterEqual(
s1.distance(s3, DistanceType.NORMALIZED_MANHATTAN),
s4.distance(s5, DistanceType.NORMALIZED_MANHATTAN))
def test_normalized_manhattan_distance_dir(self):
g1 = full_graph(directed=True)
s1 = create_stat(g1, StatType.TRIAD_CENSUS)
g2 = in_star_graph()
s2 = create_stat(g2, StatType.TRIAD_CENSUS)
g3 = out_star_graph()
s3 = create_stat(g3, StatType.TRIAD_CENSUS)
g4 = ring_graph(directed=True)
s4 = create_stat(g4, StatType.TRIAD_CENSUS)
g5 = random_graph_sparse(directed=True)
s5 = create_stat(g5, StatType.TRIAD_CENSUS)
g6 = random_graph_sparse(directed=True)
s6 = create_stat(g6, StatType.TRIAD_CENSUS)
self.assertEqual(
s1.distance(s1, DistanceType.NORMALIZED_MANHATTAN), 0.)
self.assertEqual(
s2.distance(s2, DistanceType.NORMALIZED_MANHATTAN), 0.)
self.assertEqual(
s3.distance(s3, DistanceType.NORMALIZED_MANHATTAN), 0.)
self.assertEqual(
s4.distance(s4, DistanceType.NORMALIZED_MANHATTAN), 0.)
self.assertEqual(
s5.distance(s5, DistanceType.NORMALIZED_MANHATTAN), 0.)
self.assertEqual(
s6.distance(s6, DistanceType.NORMALIZED_MANHATTAN), 0.)
self.assertAlmostEqual(
s1.distance(s2, DistanceType.NORMALIZED_MANHATTAN), 2., places=2)
self.assertAlmostEqual(
s1.distance(s3, DistanceType.NORMALIZED_MANHATTAN), 2., places=2)
self.assertAlmostEqual(
s1.distance(s4, DistanceType.NORMALIZED_MANHATTAN), 2., places=2)
self.assertAlmostEqual(
s2.distance(s3, DistanceType.NORMALIZED_MANHATTAN), 2., places=2)
self.assertAlmostEqual(
s2.distance(s4, DistanceType.NORMALIZED_MANHATTAN), 2., places=2)
self.assertAlmostEqual(
s3.distance(s4, DistanceType.NORMALIZED_MANHATTAN), 2., places=2)
self.assertLess(
s5.distance(s6, DistanceType.NORMALIZED_MANHATTAN), 10.)
def test_earth_mover_distance_undir(self):
g1 = full_graph(directed=False)
s1 = create_stat(g1, StatType.DEGREES, bins=10)
g2 = star_graph()
s2 = create_stat(g2, StatType.DEGREES, bins=10)
g3 = ring_graph(directed=False)
s3 = create_stat(g3, StatType.DEGREES, bins=10)
g4 = random_graph_sparse(directed=False)
s4 = create_stat(g4, StatType.DEGREES, bins=10)
g5 = random_graph_sparse(directed=False)
s5 = create_stat(g5, StatType.DEGREES, bins=10)
self.assertEqual(s1.distance(s1, DistanceType.EARTH_MOVER), 0.)
self.assertEqual(s2.distance(s2, DistanceType.EARTH_MOVER), 0.)
self.assertEqual(s3.distance(s3, DistanceType.EARTH_MOVER), 0.)
self.assertEqual(s4.distance(s4, DistanceType.EARTH_MOVER), 0.)
self.assertEqual(s5.distance(s5, DistanceType.EARTH_MOVER), 0.)
self.assertAlmostEqual(
s1.distance(s2, DistanceType.EARTH_MOVER), 8820.9, places=2)
self.assertAlmostEqual(
s1.distance(s3, DistanceType.EARTH_MOVER), 0., places=2)
self.assertAlmostEqual(
s2.distance(s3, DistanceType.EARTH_MOVER), 8820.9, places=2)
self.assertLess(
s4.distance(s5, DistanceType.EARTH_MOVER), 1000.)
self.assertLessEqual(
s1.distance(s3, DistanceType.EARTH_MOVER),
s4.distance(s5, DistanceType.EARTH_MOVER))
def test_earth_mover_distance_undir_rel(self):
g1 = full_graph(directed=False)
s1 = create_stat(g1, StatType.DEGREES, bins=10)
g2 = star_graph()
s2 = create_stat(g2, StatType.DEGREES, bins=10, ref_stat=s1)
g3 = ring_graph(directed=False)
s3 = create_stat(g3, StatType.DEGREES, bins=10, ref_stat=s1)
g4 = random_graph_sparse(directed=False)
s4 = create_stat(g4, StatType.DEGREES, bins=10, ref_stat=s1)
g5 = random_graph_sparse(directed=False)
s5 = create_stat(g5, StatType.DEGREES, bins=10, ref_stat=s1)
self.assertEqual(s1.distance(s1, DistanceType.EARTH_MOVER), 0.)
self.assertEqual(s2.distance(s2, DistanceType.EARTH_MOVER), 0.)
self.assertEqual(s3.distance(s3, DistanceType.EARTH_MOVER), 0.)
self.assertEqual(s4.distance(s4, DistanceType.EARTH_MOVER), 0.)
self.assertEqual(s5.distance(s5, DistanceType.EARTH_MOVER), 0.)
self.assertAlmostEqual(
s1.distance(s2, DistanceType.EARTH_MOVER), 8820.9, places=2)
self.assertAlmostEqual(
s1.distance(s3, DistanceType.EARTH_MOVER), 8910., places=2)
self.assertAlmostEqual(
s2.distance(s3, DistanceType.EARTH_MOVER), 89.1, places=2)
self.assertLess(s4.distance(s5, DistanceType.EARTH_MOVER), 1000.)
self.assertGreater(
s1.distance(s3, DistanceType.EARTH_MOVER),
s4.distance(s5, DistanceType.EARTH_MOVER))
def test_earth_mover_distance_dir(self):
g1 = full_graph(directed=True)
s1 = create_stat(g1, StatType.IN_DEGREES, bins=10)
g2 = in_star_graph()
s2 = create_stat(g2, StatType.IN_DEGREES, bins=10)
g3 = out_star_graph()
s3 = create_stat(g3, StatType.IN_DEGREES, bins=10)
g4 = ring_graph(directed=True)
s4 = create_stat(g4, StatType.IN_DEGREES, bins=10)
g5 = random_graph_sparse(directed=True)
s5 = create_stat(g5, StatType.IN_DEGREES, bins=10)
g6 = random_graph_sparse(directed=True)
s6 = create_stat(g6, StatType.IN_DEGREES, bins=10)
self.assertEqual(s1.distance(s1, DistanceType.EARTH_MOVER), 0.)
self.assertEqual(s2.distance(s2, DistanceType.EARTH_MOVER), 0.)
self.assertEqual(s3.distance(s3, DistanceType.EARTH_MOVER), 0.)
self.assertEqual(s4.distance(s4, DistanceType.EARTH_MOVER), 0.)
self.assertEqual(s5.distance(s5, DistanceType.EARTH_MOVER), 0.)
self.assertEqual(s6.distance(s6, DistanceType.EARTH_MOVER), 0.)
self.assertAlmostEqual(
s1.distance(s2, DistanceType.EARTH_MOVER), 8820.9, places=2)
self.assertAlmostEqual(
s1.distance(s3, DistanceType.EARTH_MOVER), 89.1, places=2)
self.assertAlmostEqual(
s1.distance(s4, DistanceType.EARTH_MOVER), 0., places=2)
self.assertAlmostEqual(
s2.distance(s3, DistanceType.EARTH_MOVER), 8731.8, places=2)
self.assertAlmostEqual(
s2.distance(s4, DistanceType.EARTH_MOVER), 8820.9, places=2)
self.assertAlmostEqual(
s3.distance(s4, DistanceType.EARTH_MOVER), 0.9, places=2)
self.assertLess(s5.distance(s6, DistanceType.EARTH_MOVER), 1000.)
def test_earth_mover_distance_dir_rel(self):
g1 = full_graph(directed=True)
s1 = create_stat(g1, StatType.IN_DEGREES, bins=10)
g2 = in_star_graph()
s2 = create_stat(g2, StatType.IN_DEGREES, bins=10, ref_stat=s1)
g3 = out_star_graph()
s3 = create_stat(g3, StatType.IN_DEGREES, bins=10, ref_stat=s1)
g4 = ring_graph(directed=True)
s4 = create_stat(g4, StatType.IN_DEGREES, bins=10, ref_stat=s1)
g5 = random_graph_sparse(directed=True)
s5 = create_stat(g5, StatType.IN_DEGREES, bins=10, ref_stat=s1)
g6 = random_graph_sparse(directed=True)
s6 = create_stat(g6, StatType.IN_DEGREES, bins=10, ref_stat=s1)
self.assertEqual(s1.distance(s1, DistanceType.EARTH_MOVER), 0.)
self.assertEqual(s2.distance(s2, DistanceType.EARTH_MOVER), 0.)
self.assertEqual(s3.distance(s3, DistanceType.EARTH_MOVER), 0.)
self.assertEqual(s4.distance(s4, DistanceType.EARTH_MOVER), 0.)
self.assertEqual(s5.distance(s5, DistanceType.EARTH_MOVER), 0.)
self.assertEqual(s6.distance(s6, DistanceType.EARTH_MOVER), 0.)
self.assertAlmostEqual(
s1.distance(s2, DistanceType.EARTH_MOVER), 8820.9, places=2)
self.assertAlmostEqual(
s1.distance(s3, DistanceType.EARTH_MOVER), 8910., places=2)
self.assertAlmostEqual(
s1.distance(s4, DistanceType.EARTH_MOVER), 8910., places=2)
self.assertAlmostEqual(
s2.distance(s3, DistanceType.EARTH_MOVER), 89.1, places=2)
self.assertAlmostEqual(
s2.distance(s4, DistanceType.EARTH_MOVER), 89.1, places=2)
self.assertAlmostEqual(
s3.distance(s4, DistanceType.EARTH_MOVER), 0., places=2)
self.assertLess(s5.distance(s6, DistanceType.EARTH_MOVER), 1000.)
if __name__ == '__main__':
unittest.main()
|
11484550
|
import argparse, sys
parser = argparse.ArgumentParser()
parser.add_argument('file', nargs='?',
type=argparse.FileType('r'), default=sys.stdin,
help="input file to be sorted")
parser.add_argument('-u', '--unique', action='store_true',
help="sort uniquely")
args = parser.parse_args()
ip_lines = args.file.readlines()
if args.unique:
ip_lines = set(ip_lines)
op_lines = sorted(ip_lines, key=lambda s: (s.rsplit('.', 1)[-1], s))
for line in op_lines:
print(line, end='')
|
11484642
|
from __future__ import absolute_import
import warnings
from functools import wraps
__all__ = ('Undef', 'UndefType', 'is_list', 'is_allowed_attr', 'opt_checker', 'same', 'warn', )
class UndefType(object):
def __repr__(self):
return "Undef"
def __reduce__(self):
return "Undef"
Undef = UndefType()
class AutoName(object):
def __init__(self, prefix="_auto_"):
self.counter = 0
self.prefix = prefix
def __iter__(self):
return self
def __next__(self):
self.counter += 1
return "{0}{1}".format(self.prefix, self.counter)
next = __next__
auto_name = AutoName()
def is_allowed_attr(instance, key):
if key.startswith('__'):
return False
if key in dir(instance.__class__): # type(instance)?
# It's a descriptor, like 'sql' defined in slots
return False
return True
def is_list(value):
return isinstance(value, (list, tuple))
def same(name):
def f(self, *a, **kw):
return getattr(self, name)(*a, **kw)
return f
def opt_checker(k_list):
def new_deco(f):
@wraps(f)
def new_func(self, *args, **opt):
for k, v in list(opt.items()):
if k not in k_list:
raise TypeError("Not implemented option: {0}".format(k))
return f(self, *args, **opt)
return new_func
return new_deco
def warn(old, new, stacklevel=3):
warnings.warn("{0} is deprecated. Use {1} instead".format(old, new), PendingDeprecationWarning, stacklevel=stacklevel)
|
11484670
|
from instauto.api.client import ApiClient
from instauto.helpers.friendships import follow_user
client = ApiClient.initiate_from_file('.instauto.save')
follow_user(client, user_id="user_id")
# or
follow_user(client, username="instagram")
|
11484688
|
import logging
import unittest
from unittest import TestCase
from supplychainpy._helpers import _data_cleansing
from supplychainpy.model_demand import holts_trend_corrected_exponential_smoothing_forecast, \
simple_exponential_smoothing_forecast
from supplychainpy.model_demand import holts_trend_corrected_exponential_smoothing_forecast_from_file
from supplychainpy.model_demand import simple_exponential_smoothing_forecast_from_file
from supplychainpy.sample_data.config import ABS_FILE_PATH
logging.basicConfig(level=logging.WARNING, format='%(asctime)s - %(levelname)s - %(message)s')
class TestModelDemand(TestCase):
def setUp(self):
self._data_set = {'jan': 25, 'feb': 25, 'mar': 25, 'apr': 25, 'may': 25, 'jun': 25, 'jul': 75,
'aug': 75, 'sep': 75, 'oct': 75, 'nov': 75, 'dec': 75}
self._orders = [165, 171, 147, 143, 164, 160, 152, 150, 159, 169, 173, 203, 169, 166, 162, 147, 188, 161, 162,
169, 185, 188, 200, 229, 189, 218, 185, 199, 210, 193, 211, 208, 216, 218, 264, 304]
self.ses_components = [
'mape',
'regression',
'forecast',
'forecast_breakdown',
'alpha',
'statistics',
'standard_error',
'optimal_alpha'
]
with open(ABS_FILE_PATH['COMPLETE_CSV_XSM'], 'r') as raw_data:
self.item_list = _data_cleansing.clean_orders_data_row_csv(raw_data, length=12)
self.sku_id = []
for sku in self.item_list:
self.sku_id.append(sku.get("sku_id"))
def test_simple_exponential_smoothing_forecast_trend(self):
self.ses_forecast = [i for i in
simple_exponential_smoothing_forecast_from_file(
file_path=ABS_FILE_PATH['COMPLETE_CSV_XSM'],
file_type='csv',
length=12,
smoothing_level_constant=0.5,
optimise=True)]
self.keys = [list(i.keys()) for i in self.ses_forecast]
self.unpack_keys = [i[0] for i in self.keys]
for key in self.sku_id:
self.assertIn(key, self.unpack_keys)
trending = [list(i.values()) for i in self.ses_forecast]
unpack_trending = [i[0] for i in trending]
stats = []
for i in unpack_trending:
stats.append(i.get('statistics'))
for stat in stats:
if stat.get('trend'):
self.assertTrue(stat.get('pvalue') < 0.05)
def test_holts_trend_corrected_exponential_smoothing(self):
self.htces_forecast = [i for i in
holts_trend_corrected_exponential_smoothing_forecast_from_file(
file_path=ABS_FILE_PATH['COMPLETE_CSV_XSM'],
file_type='csv',
length=12,
alpha=0.5,
gamma=0.5,
smoothing_level_constant=0.5,
optimise=True)]
holts_trend_corrected_esf = holts_trend_corrected_exponential_smoothing_forecast(demand=self._orders,
alpha=0.5,
gamma=0.5,
forecast_length=6,
initial_period=18,
optimise=False)
self.assertEqual(281, round(holts_trend_corrected_esf.get('forecast')[0]))
self.assertEqual(308, round(holts_trend_corrected_esf.get('forecast')[1]))
self.assertEqual(334, round(holts_trend_corrected_esf.get('forecast')[2]))
self.keys = [list(i.keys()) for i in self.htces_forecast]
self.unpack_keys_htces = [i[0] for i in self.keys]
for key in self.sku_id:
self.assertIn(key, self.unpack_keys_htces)
for i in self.htces_forecast:
for k in i.values():
self.assertGreater(k.get('original_standard_error'), k.get('standard_error'))
def test_simple_exponential_smoothing_key(self):
ses = simple_exponential_smoothing_forecast(demand=self._orders, alpha=0.5, forecast_length=6, initial_period=18)
for k in ses:
self.assertIn(k, self.ses_components)
if __name__ == "__main__":
unittest.main()
|
11484715
|
import pandas as pd
import numpy as np
import operator
import xgboost as xgb
import lightgbm as lgb
from catboost import CatBoostClassifier, cv, Pool
import seaborn as sns
import matplotlib.pyplot as plt
import math
color = sns.color_palette()
class BesXGboost:
"""
XGBoost model. https://github.com/dmlc/xgboost/blob/master/doc/parameter.md
params = {
'silent': 1 if self.silent else 0,
'use_buffer': int(self.use_buffer),
'num_round': self.num_round,
'ntree_limit': self.ntree_limit,
'nthread': self.nthread,
'booster': self.booster,
'eta': self.eta,
'gamma': self.gamma,
'max_depth': self.max_depth,
'min_child_weight': self.min_child_weight,
'subsample': self.subsample,
'colsample_bytree': self.colsample_bytree,
'max_delta_step': self.max_delta_step,
'l': self.l,
'alpha': self.alpha,
'lambda_bias': self.lambda_bias,
'objective': self.objective,
'eval_metric': self.eval_metric,
'seed': self.seed,
'num_class': self.num_class,
}
xgb_params = {
'booster': 'gbtree',
'eta': .1,
'colsample_bytree': 0.8,
'subsample': 0.8,
'seed': 123,
'nthread': 3,
'max_depth': 6,
'min_child_weight': .1,
'objective': 'binary:logistic',
'eval_metric': 'auc',
'silent': 1
}
"""
def __init__(
self,
params,
metric="auc",
maximize=True,
verbose=True,
features=None,
model=None,
):
assert params["booster"] in ["gbtree", "gblinear"]
assert params["objective"] in [
"reg:linear",
"reg:logistic",
"binary:logistic",
"binary:logitraw",
"multi:softmax",
"multi:softprob",
"rank:pairwise",
]
assert params["eval_metric"] in [
None,
"rmse",
"mlogloss",
"logloss",
"error",
"merror",
"auc",
"ndcg",
"map",
"ndcg@n",
"map@n",
]
self.params = params
self.metric = metric
self.maximize = maximize
self.verbose = verbose
self.features = features
self.model = model
def fit(self, X_train, y_train):
self.features = X_train.columns
dtrain = xgb.DMatrix(data=X_train, label=y_train)
if self.verbose:
bst = xgb.cv(
self.params,
dtrain,
num_boost_round=1000,
nfold=3,
early_stopping_rounds=100,
verbose_eval=50,
)
else:
bst = xgb.cv(
self.params,
dtrain,
num_boost_round=1000,
nfold=3,
early_stopping_rounds=100,
)
if self.maximize:
best_rounds = int(
np.argmax(
bst["test-" + self.metric + "-mean"]
- bst["test-" + self.metric + "-std"]
)
* 1.5
)
else:
best_rounds = int(
np.argmin(
bst["test-" + self.metric + "-mean"]
+ bst["test-" + self.metric + "-std"]
)
* 1.5
)
if self.verbose:
print("Best Iteration: {}".format(best_rounds))
self.model = xgb.train(self.params, dtrain, best_rounds)
def predict(self, X_test):
dtest = xgb.DMatrix(data=X_test)
pred_prob = self.model.predict(dtest)
return pred_prob
def feature_importance(self):
outfile = open("xgb.fmap", "w")
i = 0
for feat in self.features:
outfile.write("{0}\t{1}\tq\n".format(i, feat))
i = i + 1
outfile.close()
importance = self.model.get_fscore(fmap="xgb.fmap")
importance = sorted(importance.items(), key=operator.itemgetter(1))
imp = pd.DataFrame(importance, columns=["feature", "fscore"])
imp = imp.sort_values(["fscore"], ascending=False)
# import xgbfir
# xgbfir.saveXgbFI(model, feature_names=X_train.columns, OutputXlsxFile='irisFI.xlsx',TopK=300)
return imp
def _optimize_single_param(self):
pass
@staticmethod
def find_best_params(kag):
"""
lambda [default=1]
L2 regularization term on weights (analogous to Ridge regression)
This used to handle the regularization part of XGBoost. Though many data scientists don’t use it often, it should be explored to reduce overfitting.
alpha [default=0]
L1 regularization term on weight (analogous to Lasso regression)
Can be used in case of very high dimensionality so that the algorithm runs faster when implemented
scale_pos_weight [default=1]
A value greater than 0 should be used in case of high class imbalance as it helps in faster convergence.
"""
nthread = 3
lr = 0.3
bst = -math.inf if kag.maximize else math.inf
seed = 123
params = {
"booster": "gbtree",
"eta": lr,
"colsample_bytree": 0.8,
"subsample": 0.8,
"seed": seed,
"nthread": nthread,
"max_depth": 6,
"min_child_weight": 1,
"objective": "binary:logistic",
"eval_metric": kag.metric,
"lambda": 1,
"alpha": 0,
"gamma": 0,
}
for depth in [2, 4, 6, 8, 10]:
for mcw in [1, 3, 5, 7, 9]:
print(depth, mcw)
params["max_depth"] = depth
params["min_child_weight"] = mcw
met = kag.run_single_model_validation(
model_name="xgboost", params=params
)[0]
cond = met > bst if kag.maximize else met < bst
if cond:
bst = met
depth_bst = depth
mcw_bst = mcw
print("Best Depth: {}. Best MCW: {}".format(depth_bst, mcw_bst))
print("Score:", bst)
depth_bst_prev = depth_bst
mcw_bst_prev = mcw_bst
for depth in [depth_bst_prev - 1, depth_bst_prev, depth_bst_prev + 1]:
for mcw in [mcw_bst_prev - 1, mcw_bst_prev, mcw_bst_prev + 1]:
print(depth, mcw)
params["max_depth"] = depth
params["min_child_weight"] = mcw
met = kag.run_single_model_validation(
model_name="xgboost", params=params
)[0]
cond = met > bst if kag.maximize else met < bst
if cond:
bst = met
depth_bst = depth
mcw_bst = mcw
print("Best Depth: {}. Best MCW: {}".format(depth_bst, mcw_bst))
print("Score:", bst)
params["max_depth"] = depth_bst
params["min_child_weight"] = mcw_bst
colsample_bytree_bst = 0.8
subsample_bst = 0.8
for colsample_bytree in [0.4, 0.6, 0.8]:
for subsample in [0.4, 0.6, 0.8]:
print(colsample_bytree, subsample)
params["colsample_bytree"] = colsample_bytree
params["subsample"] = subsample
met = kag.run_single_model_validation(
model_name="xgboost", params=params
)[0]
cond = met > bst if kag.maximize else met < bst
if cond:
bst = met
colsample_bytree_bst = colsample_bytree
subsample_bst = subsample
print(
"Best Colsample: {}. Best Subsample: {}".format(
colsample_bytree_bst, subsample_bst
)
)
print("Score:", bst)
colsample_bytree_bst_prev = colsample_bytree_bst
subsample_bst_prev = subsample_bst
for colsample_bytree in [
colsample_bytree_bst_prev - 0.1,
colsample_bytree_bst_prev,
colsample_bytree_bst_prev + 0.1,
]:
for subsample in [
subsample_bst_prev - 0.1,
subsample_bst_prev,
subsample_bst_prev + 0.1,
]:
print(colsample_bytree, subsample)
params["colsample_bytree"] = colsample_bytree
params["subsample"] = subsample
met = kag.run_single_model_validation(
model_name="xgboost", params=params
)[0]
cond = met > bst if kag.maximize else met < bst
if cond:
bst = met
colsample_bytree_bst = colsample_bytree
subsample_bst = subsample
print(
"Best Colsample: {}. Best Subsample: {}".format(
colsample_bytree_bst, subsample_bst
)
)
print("Score:", bst)
params["colsample_bytree"] = colsample_bytree_bst
params["subsample"] = subsample_bst
# alpha_bst = 0
# lamb_bst = 1
# for alpha in [0, 0.1, 0.5, 1]:
# for lamb in [0, 0.1, 0.5, 1]:
# print(alpha, lamb)
# params['alpha'] = alpha
# params['lambda'] = lamb
#
# met = kag.run_single_model_validation(model_name='xgboost', params=params)[2]
# cond = met > bst if kag.maximize else met < bst
#
# if cond:
# bst = met
# alpha_bst = alpha
# lamb_bst = lamb
# print('Best Alpha: {}. Best Lambda: {}'.format(alpha_bst, lamb_bst))
# print('Score:', bst)
# params['alpha'] = alpha_bst
# params['lambda'] = lamb_bst
lamb_bst = 1
for lamb in [0, 0.1, 0.5, 1, 5, 10]:
print(lamb)
params["lambda"] = lamb
met = kag.run_single_model_validation(model_name="xgboost", params=params)[
0
]
cond = met > bst if kag.maximize else met < bst
if cond:
bst = met
lamb_bst = lamb
print("Best Lambda: {}".format(lamb_bst))
print("Score:", bst)
params["lambda"] = lamb_bst
gamma_bst = 0
for gamma in [0, 0.1, 0.5, 1, 10]:
print(gamma)
params["gamma"] = gamma
met = kag.run_single_model_validation(model_name="xgboost", params=params)[
0
]
cond = met > bst if kag.maximize else met < bst
if cond:
bst = met
gamma_bst = gamma
print("Best Gamma: {}".format(gamma_bst))
print("Score:", bst)
params["gamma"] = gamma_bst
print(params)
print("Score:", bst)
return params
class BesLightGBM:
"""
lgb_params = {
'boosting_type': 'gbdt',
'objective': 'binary',
'metric': 'auc',
'num_leaves': 31,
'learning_rate': 0.05,
'feature_fraction': 0.9,
'bagging_fraction': 0.8,
'bagging_freq': 5,
'verbose': 0
}
"""
def __init__(self, params, metric="auc", maximize=True, verbose=True, model=None):
self.params = params
self.metric = metric
self.maximize = maximize
self.verbose = verbose
self.model = model
def fit(self, X_train, y_train):
dtrain = lgb.Dataset(data=X_train, label=y_train)
if self.verbose:
bst = lgb.cv(
self.params,
dtrain,
num_boost_round=10000,
nfold=3,
early_stopping_rounds=50,
verbose_eval=50,
)
else:
bst = lgb.cv(
self.params,
dtrain,
num_boost_round=10000,
nfold=3,
early_stopping_rounds=50,
)
if self.maximize:
best_rounds = int(
np.argmax(
np.array(bst[self.metric + "-mean"])
- np.array(bst[self.metric + "-stdv"])
)
* 1.5
)
else:
best_rounds = int(
np.argmin(
np.array(bst[self.metric + "-mean"])
+ np.array(bst[self.metric + "-stdv"])
)
* 1.5
)
if self.verbose:
print("Best Iteration: {}".format(best_rounds))
self.model = lgb.train(self.params, dtrain, best_rounds)
def predict(self, X_test):
pred_prob = self.model.predict(X_test)
return pred_prob
def feature_importance(self):
lgb.plot_importance(self.model, max_num_features=10)
plt.show()
return self.model.feature_importance()
@staticmethod
def find_best_params(kag):
pass
class BesCatBoost:
"""
catboost_params = {
'iterations': 500,
'depth': 3,
'learning_rate': 0.1,
'eval_metric': 'AUC',
'random_seed': 42,
'logging_level': 'Verbose',
'l2_leaf_reg': 15.0,
'bagging_temperature': 0.75,
'allow_writing_files': False,
'metric_period': 50
}
"""
def __init__(self, params, metric="AUC", maximize=True, verbose=True, model=None):
self.params = params
self.metric = metric
self.maximize = maximize
self.verbose = verbose
self.model = model
def fit(self, X_train, y_train):
bst = cv(Pool(X_train, y_train), self.params)
best_rounds = int(bst["test-{}-mean".format(self.metric)].idxmax() * 1.5) + 1
print("Best Iteration: {}".format(best_rounds))
self.params["iterations"] = best_rounds
self.model = CatBoostClassifier(**self.params)
self.model.fit(X_train, y_train)
def predict(self, X_test):
pred_prob = self.model.predict_proba(X_test)[:, -1]
return pred_prob
def feature_importance(self):
pass
@staticmethod
def find_best_params(kag):
pass
|
11484784
|
import logging
import os
import sys
from skyline.initialization import (
check_skyline_preconditions,
initialize_skyline,
)
from skyline.error_printing import print_analysis_error
logger = logging.getLogger(__name__)
def register_command(subparsers):
parser = subparsers.add_parser(
"time",
help="Generate an iteration run time breakdown report.",
)
parser.add_argument(
"entry_point",
help="The entry point file in this project that contains the Skyline "
"provider functions.",
)
parser.add_argument(
"-o", "--output",
help="The location where the iteration run time breakdown report "
"should be stored.",
required=True,
)
parser.add_argument(
"--log-file",
help="The location of the log file.",
)
parser.add_argument(
"--debug", action="store_true", help="Log debug messages.")
parser.set_defaults(func=main)
def actual_main(args):
from skyline.analysis.session import AnalysisSession
from skyline.config import Config
from skyline.exceptions import AnalysisError
if os.path.exists(args.output):
print(
"ERROR: The specified output file already exists.",
file=sys.stderr,
)
sys.exit(1)
try:
session = AnalysisSession.new_from(
Config.project_root, Config.entry_point)
session.generate_run_time_breakdown_report(
save_report_to=args.output,
)
except AnalysisError as ex:
print_analysis_error(ex)
sys.exit(1)
def main(args):
check_skyline_preconditions(args)
initialize_skyline(args)
actual_main(args)
|
11484821
|
import os
import logging
try:
import jsonlogger
except ImportError:
# python-json-logger version 0.1.0 has changed the import structure
from pythonjsonlogger import jsonlogger
class TaskAdapter(logging.LoggerAdapter):
""" Enhance any log messages with extra information about the
current context of the scraper. """
def __init__(self, logger, scraper):
super(TaskAdapter, self).__init__(logger, {})
self.scraper = scraper
def process(self, msg, kwargs):
extra = kwargs.get('extra', {})
extra['scraperName'] = self.scraper.name
extra['scraperId'] = self.scraper.id
if hasattr(self.scraper.task_ctx, 'name'):
extra['taskName'] = self.scraper.task_ctx.name
if hasattr(self.scraper.task_ctx, 'id'):
extra['taskId'] = self.scraper.task_ctx.id
extra['scraperStartTime'] = self.scraper.start_time
kwargs['extra'] = extra
return (msg, kwargs)
def make_json_format():
supported_keys = ['asctime', 'created', 'filename', 'funcName',
'levelname', 'levelno', 'lineno', 'module',
'msecs', 'message', 'name', 'pathname',
'process', 'processName', 'relativeCreated',
'thread', 'threadName']
log_format = lambda x: ['%({0:s})'.format(i) for i in x]
return ' '.join(log_format(supported_keys))
def log_path(scraper):
""" Determine the file name for the JSON log. """
return os.path.join(scraper.config.data_path,
'%s.jsonlog' % scraper.name)
def make_logger(scraper):
""" Create two log handlers, one to output info-level ouput to the
console, the other to store all logging in a JSON file which will
later be used to generate reports. """
logger = logging.getLogger('')
logger.setLevel(logging.DEBUG)
requests_log = logging.getLogger("requests")
requests_log.setLevel(logging.WARNING)
json_handler = logging.FileHandler(log_path(scraper))
json_handler.setLevel(logging.DEBUG)
json_formatter = jsonlogger.JsonFormatter(make_json_format())
json_handler.setFormatter(json_formatter)
logger.addHandler(json_handler)
console_handler = logging.StreamHandler()
console_handler.setLevel(logging.INFO)
fmt = '%(name)s [%(levelname)-8s]: %(message)s'
formatter = logging.Formatter(fmt)
console_handler.setFormatter(formatter)
logger.addHandler(console_handler)
logger = logging.getLogger(scraper.name)
logger = TaskAdapter(logger, scraper)
return logger
|
11484847
|
import cv2 as cv
import numpy as np
from matplotlib import pyplot as plt
def hough():
'''
The Hough Transform is a Popular technique
to detect any shape, if you can represent
that shape in a mathematical form, It can
detect the shape even if it is broken or
distorted a little bit.
# Θ Theta
A line in the image space can be expressed
with two variables.
For ex:
-In Cartesian Coordinate System
y = mx + c
-In the polar coordinate system
x cosΘ + y sinΘ = r
Hough Transformation Algorithm
-------------------------------
1. Edge detection, e.g Using the Canny Edge
Detector.
2. Mapping of the edge points to the Hough
space and storage in an accumulator
3. Interpretation of the accumulator to
yield lines of infinite length. The
interpretation is done by thresholding
and possibly other constraints.
4. Conversion of infinite lines to finite lines.
OpenCV's Two kinds of Hough line Transforms
-------------------------------------------
o The Standard Hough Transform (HoughLines method)
o The Probabilistic Hough Line Transform (HoughLinesP)
'''
img = cv.imread("./img/sudoku.jpg")
img = cv.resize(img, (500, 500))
gray_img = cv.cvtColor(img, cv.COLOR_BGR2GRAY)
edges = cv.Canny(gray_img, 50, 150, apertureSize=3)
lines = cv.HoughLines(edges, 1, np.pi/180, 200)
for line in lines:
rho, theta = line[0]
a = np.cos(theta)
b = np.sin(theta)
x0 = a * rho
y0 = b * rho
# x1 stores the rounded off value of (r* cosΘ - 1000 * sinΘ)
x1 = int(x0 + 1000 * (-b))
# y1 stores the rounded off value of (r * sinΘ + 1000 * cosΘ)
y1 = int(y0 + 1000 * (a))
# x2 stores the rounded off value of (r * cosΘ + 1000 * sinΘ)
x2 = int(x0 - 1000 * (-b))
# y2 stores the rounded off value of (r * sinΘ - 1000 * cosΘ)
y2 = int(y0 - 1000 * (a))
cv.line(img, (x1, y1), (x2, y2), (0, 0, 255), 2)
cv.imshow("Image", img)
cv.imshow("Canny", edges)
cv.waitKey()
cv.destroyAllWindows()
def houghP():
img = cv.imread("./img/road.jpg")
img = cv.resize(img, (500, 500))
gray_img = cv.cvtColor(img, cv.COLOR_BGR2GRAY)
edges = cv.Canny(gray_img, 170, 200, apertureSize=3)
lines = cv.HoughLinesP(edges, 1, np.pi/180, 100,
minLineLength=100, maxLineGap=10)
for line in lines:
x1, y1, x2, y2 = line[0]
cv.line(img, (x1, y1), (x2, y2), (0, 255, 0), 2)
cv.imshow("Image", img)
cv.imshow("Canny", edges)
cv.waitKey()
cv.destroyAllWindows()
|
11484885
|
from databases import Database
from sqlalchemy import MetaData
from sqlalchemy import event
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.pool import Pool
from feeder import settings
@event.listens_for(Pool, "checkout")
def _fk_pragma_on_connect(dbapi_con, con_record, con_proxy):
dbapi_con.execute("pragma foreign_keys=ON")
db = Database(f"sqlite:///{settings.database_path}")
Base = declarative_base()
metadata = MetaData()
|
11484911
|
import numpy as np
import matplotlib
import matplotlib.pyplot as plt
class BenchmarkSoftsortBackwardsResultsParser:
r'''
Parses an individual results (i.e. log) file and stores the results.
'''
def __init__(self):
self.epochs = None
self.loss = None
self.spearmanr = None
self.total_time = None
self.time_per_epoch = None
self.oom = False
def parse(self, file_path, expected_length=None):
r'''
:param file_path: path to the results (i.e. log) file
'''
with open(file_path) as file:
for line in file:
line_tokens = line.replace(',', '').replace('\n', '').split(' ')
if line.startswith("Epochs"):
assert self.epochs is None
self.epochs = line_tokens[1]
elif line.startswith("Loss"):
assert self.loss is None
self.loss = line_tokens[1]
elif line.startswith("Spearmanr"):
assert self.spearmanr is None
self.spearmanr = line_tokens[1]
elif line.startswith("Total time"):
assert self.total_time is None
self.total_time = line_tokens[2]
elif line.startswith("Time per epoch"):
assert self.time_per_epoch is None
self.time_per_epoch = line_tokens[3]
if line.startswith("RuntimeError:"):
self.oom = True
return
if expected_length:
assert(int(self.epochs) == expected_length)
assert self.epochs is not None
assert self.loss is not None
assert self.spearmanr is not None
assert self.total_time is not None
assert self.time_per_epoch is not None
def get_epochs(self):
return self.epochs if not self.oom else '-'
def get_loss(self):
return self.loss if not self.oom else '-'
def get_spearmanr(self):
return self.spearmanr if not self.oom else '-'
def get_total_time(self):
return self.total_time if not self.oom else '-'
def get_time_per_epoch(self):
r'''
Returns the time per epoch in ms
'''
return ("%.5f" % (1000.0 * float(self.time_per_epoch))) if not self.oom else '-'
num_epochs = 100
frameworks = ['pytorch', 'pytorch', 'tf', 'tf']
devices = ['cpu', 'cuda', 'cpu', 'cuda']
ns_lists = \
[[str(i) for i in range(100, 4001, 100)]] * 4
methods = ['neuralsort', 'softsort']
res = dict()
for framework, device, ns in zip(frameworks, devices, ns_lists):
for n in ns:
for method in methods:
filename = "./benchmark_results_%s/N_%s_%s/N_%s_%s_DEVICE_%s.txt" %\
(framework, n, method, n, method, device)
print("Processing " + str(filename))
results_parser = BenchmarkSoftsortBackwardsResultsParser()
results_parser.parse(filename, expected_length=int(num_epochs))
epochs = results_parser.get_epochs()
loss = results_parser.get_loss()
spearmanr = results_parser.get_spearmanr()
total_time = results_parser.get_total_time()
time_per_epoch = results_parser.get_time_per_epoch()
res[(framework, device, n, method, 'epochs')] = epochs
res[(framework, device, n, method, 'loss')] = loss
res[(framework, device, n, method, 'spearmanr')] = spearmanr
res[(framework, device, n, method, 'total_time')] = total_time
res[(framework, device, n, method, 'time_per_epoch')] = time_per_epoch
def get_times_for_device_framework_and_method(device, framework, method):
times = []
for n in ns:
time = res[(framework, device, n, method, 'time_per_epoch')]
if time == '-':
break
times.append(time)
times = np.array(times)
return times
ns = np.array([str(i) for i in range(100, 4001, 100)])
for device in ['cpu', 'cuda']:
time_normalization = 1000 if device == 'cpu' else 1
for framework in ['pytorch', 'tf']:
times_neuralsort = get_times_for_device_framework_and_method(
device=device,
framework=framework,
method='neuralsort')
times_softsort = get_times_for_device_framework_and_method(
device=device,
framework=framework,
method='softsort')
fig1, ax1 = plt.subplots(figsize=(7, 5))
fontsize = 16
ax1.plot(ns[:len(times_neuralsort)].astype('int'), times_neuralsort.astype('float') / time_normalization,
color='red', linestyle='--')
ax1.plot(ns[:len(times_softsort)].astype('int'), times_softsort.astype('float') / time_normalization,
color='blue', linestyle='-')
plt.xticks(rotation=70, fontsize=fontsize)
ax1.set_xticks(ns.astype('int'))
ax1.get_xaxis().set_major_formatter(matplotlib.ticker.ScalarFormatter())
plt.xlabel(r'$n$', fontsize=fontsize)
plt.xticks(range(200, 4001, 200), fontsize=fontsize)
plt.yticks(fontsize=fontsize)
if device == 'cuda':
plt.ylim(0, 150)
plt.ylabel('time per epoch (ms)', fontsize=fontsize)
else:
plt.ylim(0, 30)
plt.ylabel('time per epoch (s)', fontsize=fontsize)
title = ""
if framework == 'pytorch':
title += 'Pytorch'
elif framework == 'tf':
title += 'TensorFlow'
if device == 'cuda':
title += ' GPU'
elif device == 'cpu':
title += ' CPU'
# plt.title(title) # Title should go in the figure latex caption
plt.legend(['NeuralSort', 'SoftSort'], fontsize=fontsize)
plt.tight_layout()
plt.savefig('images/' + title.replace(' ', '_') + '_softsort')
|
11484933
|
import subprocess
from __future__ import print_function
infile = open("alignment_export_2018_12_07.1.xml","rt")
xmllines = infile.readlines()
infile.close()
#tree = ET.parse(infilename)
#root = tree.getroot()
outfile =open("test.xml","wt")
iov = 0
firstline = xmllines.pop(0)
secondline = xmllines.pop(0)
lastline = xmllines.pop(-1)
for line in xmllines:
if "iov" not in line:
outfile.write(line)
else:
if "</iov>" in line:
outfile.write(line)
outfile.write(lastline)
outfile.close()
outfilename = "real_alignment_iov"+str(iov)+".xml"
output = subprocess.run("mv test.xml "+outfilename, shell=True, check=True)
outfile=open("test.xml","wt")
else:
outfile.write(firstline)
outfile.write(secondline)
outfile.write(line)
iovfirstfield = line.split()[1].split("=\"")[1].split(":")[0]
iov = int(iovfirstfield)
print (iov)
if not outfile.closed:
outfile.close()
|
11484960
|
from server.crud.base import CRUDBase
from server.db.models import ProductTypesTable
from server.schemas.product_type import ProductTypeCreate, ProductTypeUpdate
class CRUDProductType(CRUDBase[ProductTypesTable, ProductTypeCreate, ProductTypeUpdate]):
pass
product_type_crud = CRUDProductType(ProductTypesTable)
|
11484978
|
import datetime
import pandas as pd
from pkg_resources import resource_filename
sample = resource_filename(__name__, 'sample_data/survey_ibema_faxinal_Cartesian.csv')
from .coordinates import (
CartesianCoordinate,
CoordinateSystem,
)
from .geometry import (
Line2D,
Triangle,
TriangularMesh,
TriangularMesh as terrain_mesh,
)
from .survey import (
Survey,
Survey as load_survey,
)
from .plots import SurveyPlot as terrain_plots
def demo():
"""
A quick demo of this package. It loads a dataset available internally,
displays information about it and related graphs: 3D Scatter, Contour,
Histogram and 2D Scatter and finally the Volume Curves.
"""
print("Loading sample survey data...")
survey = load_survey(sample,
'Ibema Faxinal')
if survey is not None:
print('Sample survey data loaded successfully.')
print("---")
print("Sample survey information:")
print("Maximum elevation: {:.2f} meters.".format(
survey.data['elevation'].max()))
print("Elevation delta: {:.2f} meters.".format(
survey.data['elevation'].max() - survey.data['elevation'].min()))
print("Survey data count: {} points.".format(
survey.data['elevation'].count()))
print("---")
print("Generating survey plots...")
plots = terrain_plots(survey)
plots.scatter3d()
plots.contour()
plots.profile()
plots.mesh_plot()
print('---')
print('Calculating total terrain volume...')
mesh = terrain_mesh(survey.data)
volume = mesh.get_volume()
print("Total volume: {:.2f} cubic meters".format(volume))
print('---')
print("Generating volume curves...")
start = datetime.datetime.now()
print("Trianglular areas generated: {}".format(mesh.triangular_areas))
curves = mesh.get_volume_curves(step=1.0)
finish = datetime.datetime.now()
cputime = finish - start
print("Computing time: {}".format(cputime))
print("Volume curves DataFrame:")
print(curves)
mesh.plot_curves(curves)
else:
print('Error while loading sample survey data.')
|
11485012
|
from geotrek.common.forms import CommonForm
from .models import Report
class ReportForm(CommonForm):
geomfields = ["geom"]
class Meta:
fields = [
"geom",
"email",
"comment",
"activity",
"category",
"problem_magnitude",
"related_trek",
"status",
"locked",
"uid",
"origin"
]
model = Report
|
11485014
|
import warnings
from eventlet import Timeout # noqa
from eventlet import event
from eventlet import greenthread
from eventlet import queue
from greenlet import GreenletExit
from kombu import syn
blocking = syn.blocking
spawn = greenthread.spawn
Queue = queue.LightQueue
Event = event.Event
class Entry(object):
g = None
def __init__(self, interval, fun, *args, **kwargs):
self.interval = interval
self.fun = fun
self.args = args
self.kwargs = kwargs
self.cancelled = False
self._spawn()
def _spawn(self):
self.g = greenthread.spawn_after_local(self.interval, self)
self.g.link(self._exit)
def __call__(self):
try:
return blocking(self.fun, *self.args, **self.kwargs)
except Exception as exc:
warnings.warn('Periodic timer %r raised: %r' % (self.fun, exc))
finally:
self._spawn()
def _exit(self, g):
try:
self.g.wait()
except GreenletExit:
self.cancel()
def cancel(self):
if self.g and not self.cancelled:
self.g.cancel()
self.cancelled = True
def kill(self):
if self.g:
try:
self.g.kill()
except GreenletExit:
pass
def __repr__(self):
return '<Entry: %r (%s)>' % (
self.fun, 'cancelled' if self.cancelled else 'alive')
def timer(interval, fun, *args, **kwargs):
return Entry(interval, fun, *args, **kwargs)
|
11485028
|
import sys
import jax
import torch
import numpy as np
import torch.nn as nn
import jax.numpy as jnp
sys.path.append('..')
from train_tools import to_string
def jax2tor(array, tor2jax=False):
"""Convert between JAX arrays and Pytorch tensors"""
if tor2jax:
return jnp.array(array.numpy())
else:
return torch.tensor(np.array(array))
def strset2tor(strset, in_dim):
"""
Convert a StrSet object to Pytorch onehot tensor and str_lens vector
"""
index_mat = jax2tor(strset.index_mat).T.long()
str_lens = jax2tor(strset.str_lens).long()
index_tens = torch.zeros(index_mat.shape + (in_dim,))
index_tens = index_tens.scatter(2, index_mat[:, :, None], 1)
return index_tens, str_lens
class ProbLSTM(nn.Module):
def __init__(self, input_size, hidden_size, num_layers, bi_dir=False,
pos_enc=False, pe_dim=6, **kwargs):
"""
LSTM model trained to predict characters given surrounding context
Note that the model in question can take in StrSet data, to be
compatible with the framework I've already built up
Some parts based on github.com/salesforce/awd-lstm-lm
"""
super().__init__()
# Define a base LSTM, inital hidden and cell states, and a decoder
self.bi_dir = bi_dir
self.num_dir = 2 if bi_dir else 1
assert 'dropout' in kwargs
assert pe_dim % 2 == 0 # Only dealing with even-dim encodings
self.iesize = input_size + (pe_dim if pos_enc else 0)
self.lstm = torch.nn.LSTM(input_size=self.iesize,
hidden_size=hidden_size,
num_layers=num_layers,
bidirectional=bi_dir, **kwargs)
self.init_hidden = nn.Parameter(torch.empty(num_layers*self.num_dir, 1, hidden_size))
self.init_cell = nn.Parameter(torch.empty(num_layers*self.num_dir, 1, hidden_size))
# Whether or not to use a concatenated positional encoding
assert not (bi_dir and pos_enc)
self.pos_enc = pos_enc
self.pe_dim = pe_dim
# Define a decoder for the model
self.decoder = nn.Sequential(nn.Linear(hidden_size*self.num_dir, input_size),
nn.Softmax(dim=2))
# Initialize our initial hidden and cell states randomly
initrange = 0.1
self.init_hidden.data.uniform_(-initrange, initrange)
self.init_cell.data.uniform_(-initrange, initrange)
# Hyperparameters to remember
self.hsize = hidden_size
self.isize = input_size
def forward(self, inp_strset, log_format=True):
"""
Obtain log probabilities for each character in StrSet of strings
Note that this assumes the inputs are prepended/appended with
BOS and EOS tokens, and the returned probabilities are only for
the chars after the initial BOS. The EOS probability isn't included
for the bidirectional model, but is for the unidirectional model
"""
# Convert input from StrSet to onehot data tensor and str_lens vec
in_dim = self.isize
inp_data, str_lens = strset2tor(inp_strset, in_dim)
max_len, batch_size, inp_data_dim = inp_data.shape
# Concatenate positional encoding to input strings
if self.pos_enc:
inp_data = self.concat_posenc(inp_data, str_lens)
# Evaluate LSTM on input sequence
init_h = self.init_hidden.expand(-1, batch_size, -1)
init_c = self.init_cell.expand(-1, batch_size, -1)
seq_out, _ = self.lstm.forward(inp_data, (init_h, init_c))
# Rearrange our output, depending on our directionality. This
# ensures the output at each site doesn't directly contain
# information from the site itself
if self.bi_dir:
seq_out = seq_out.view(max_len, batch_size, 2, self.hsize)
forw_seq, back_seq = seq_out[:, :, 0], seq_out[:, :, 1]
seq_out = torch.stack((forw_seq[:-2], back_seq[2:]), dim=2)
seq_out = seq_out.view(max_len-2, batch_size, 2*self.hsize)
else:
seq_out = seq_out[:-1]
# Use our decoder to convert hidden states to probabilities
all_probs = self.decoder(seq_out)
if log_format:
probs_obj, fill_val = torch.log(all_probs), 0.
else:
probs_obj, fill_val = all_probs, 1.
# Fill all entries beyond the end of sequence with 0 or 1
len_diff = 2 if self.bi_dir else 1
str_lens, max_len = str_lens - len_diff, max_len - len_diff
too_long = torch.arange(max_len)[:, None] >= str_lens[None, :]
probs_obj = torch.where(too_long[..., None].expand(-1, -1, in_dim),
torch.full_like(probs_obj, fill_val), probs_obj)
return probs_obj
def get_loss(self, inp_strset, rescale_probs=False):
"""
Evaluate sum of NLL loss for each character in inp_strset
This assumes input StrSet begins and ends with BOS and EOS chars,
with the prob of the BOS char not included in output loss. The
prob of the EOS char is only included for the unidirectional model
"""
# Get log probabilities over all characters
str_lens = jax2tor(inp_strset.str_lens)
indices = jax2tor(inp_strset.index_mat).T.long()
max_len, batch_size = indices.shape
all_probs = self.forward(inp_strset, log_format=False)
# Remove initial chars (and maybe final chars) for evaluation, and
# rescale probabilities if needed to disallow BOS/EOS chars
if self.bi_dir:
# str_lens = str_lens - 2
indices = indices[1:-1]
max_len = max_len - 2
if rescale_probs:
all_probs[:, :, -2:] = 0
all_probs = all_probs / torch.sum(all_probs, dim=2, keepdim=True)
else:
str_lens = str_lens - 1
indices = indices[1:]
max_len = max_len - 1
if rescale_probs:
cond_vec = torch.arange(self.isize) < (self.isize - 2)
new_probs = torch.where(cond_vec[None, None], all_probs,
torch.zeros(()))
new_probs = new_probs / torch.sum(new_probs, dim=2, keepdim=True)
cond_mat = (torch.arange(len(all_probs))[:, None] ==
(str_lens-1)[None].long())
all_probs = torch.where(cond_mat[..., None], all_probs, new_probs)
# Get log probabilities of correct chars
log_probs = torch.log(all_probs)
seq, batch = np.mgrid[0:max_len, 0:batch_size]
scores = log_probs[seq, batch, indices]
# Sum log probs over characters in each sentence
return -torch.sum(scores, dim=0)
@torch.no_grad()
def sample(self, rng_key, alphabet, samp_mode='variable', num_samps=1,
samp_len=None, ref_strset=None):
"""
Produce a list of strings from the LSTM model using sampling mode
For a unidirectional model, available modes are 'variable' and
'fixed', which respectively sample until the EOS token, or sample
a fixed length while forbidding sampling of the EOS token. The
'fixed' mode requires samp_len to be set, and both require num_samps.
For a bidirectional model, the only available mode is 'completion',
which takes in StrSet corresponding to a list strings and returns
larger list of strings containing the model's completion of each
string when each of its characters have been masked out.
Args:
rng_key: JAX PRNGKey for randomization
alphabet: List of characters defining our sampling alphabet
samp_mode: String specifying particular means of sampling,
either 'variable', 'fixed', or 'completion'
num_samps: Number of samples to generate, required for
'variable' and 'fixed' modes
samp_len: Target length of samples, required for 'fixed', and
for 'variable' when positional encoding is used.
This length doesn't include BOS/EOS chars, which
aren't returned by the sampler
ref_strset: StrSet encoding the strings we wish to fill in,
required for 'completion'. All encoded strings are
assumed to begin/end with a BOS/EOS character
Returns:
samples: List of strings without BOS/EOS chars, which:
* For 'variable', list of variable-length strings.
* For 'fixed', list of fixed-length strings.
* For 'completion', long list of strings where
each string in ref_strset is repeated many times
with each character sampled from model using
surrounding characters as context
"""
if self.bi_dir:
# Check input, get strings with stripped
assert samp_mode == 'completion'
assert ref_strset is not None
# TODO: Deal with BOS/EOS issues
ref_strs = [s[1:-1] for s in to_string(ref_strset, alphabet)]
# ref_strs = to_string(ref_strset, alphabet)
char_probs = self.forward(ref_strset, log_format=False)
assert len(char_probs.shape) == 3
# Condition on no BOS or EOS char
char_probs[:, :, -2:] = 0
char_probs = char_probs / torch.sum(char_probs, 2, keepdim=True)
cum_probs = torch.cumsum(char_probs, axis=2)
# TODO: Account for variable length ref_strset
num_strs, str_len = ref_strset.index_mat.shape
str_len = str_len - 2 # Don't make predictions for BOS or EOS
# Sample chars to fill in each space of each sequence in ref_strset
rand_floats = jax.random.uniform(rng_key, shape=(str_len, num_strs))
# rand_floats = jax2tor(rand_floats)
samp_ints = np.argmax(jax2tor(cum_probs, tor2jax=True) > rand_floats[..., None], axis=2).T
samp_ints = jax2tor(samp_ints)
samp_chars = [[alphabet[i] for i in seq] for seq in samp_ints]
# TODO: Fix to ensure we return big list of filled-in strings
samples = [s[:i] + c + s[i+1:] for s, cs in zip(ref_strs, samp_chars)
for i, c in enumerate(cs)]
# Check that we're not making predictions for BOS and EOS
assert all(len(s) == str_len for s in samples)
return samples
else:
# Unpack args and state, build character lookup
assert num_samps is not None
assert samp_mode in ['variable', 'fixed']
fixed = samp_mode == 'fixed'
if fixed: assert samp_len >= 0
char2ind = {c: i for i, c in enumerate(alphabet)}
bos, eos = [alphabet[i] for i in (-2, -1)]
h = self.init_hidden.expand(-1, num_samps, -1)
c = self.init_cell.expand(-1, num_samps, -1)
pos_enc = self.pos_enc
if pos_enc: assert samp_len is not None
hidden = (h, c)
# Function which determines if we're finished sampling
if fixed:
stop_cond = lambda samps: all(len(s) == samp_len + 2
for s in samps)
else:
stop_cond = lambda samps: all(eos in s for s in samps)
# Sample characters one by one until stopping condition is hit
n = 0
samples = ["^"] * num_samps
while not stop_cond(samples):
# Build onehot LSTM input from last chars of samples
last_chars = torch.tensor([char2ind[s[-1]] for s in samples])
char_vecs = torch.zeros(num_samps, self.isize)
char_vecs.scatter_(1, last_chars[:, None], 1)
char_vecs = char_vecs[None] # LSTM needs length index
# Add on positional encoding
if pos_enc:
char_vecs = self.concat_posenc(char_vecs, samp_len, n)
# Call LSTM on our single character, get char probs
h_out, hidden = self.lstm.forward(char_vecs, hidden)
char_probs = self.decoder(h_out)[0]
# Condition on no BOS char
char_probs[:, -2] = 0
char_probs = char_probs / torch.sum(
char_probs, 1, keepdim=True)
# For fixed mode, either add a EOS or condition on no EOS
if fixed:
if n == samp_len:
samples = [s + eos for s in samples]
continue
else:
char_probs[:, -1] = 0
char_probs = char_probs / torch.sum(char_probs,
1, keepdim=True)
n += 1
cum_probs = np.cumsum(char_probs.numpy(), axis=1)
# Sample a new char for each sample string
rng_key, key = jax.random.split(rng_key)
rand_floats = jax.random.uniform(key, shape=(num_samps,))
samp_ints = np.argmax(cum_probs > rand_floats[:, None], axis=1)
samp_chars = [alphabet[i] for i in samp_ints]
samples = [s + alphabet[i] for s, i in zip(samples, samp_ints)]
# Remove initial BOS and trim to first EOS, check samples
if fixed:
assert all(s[0] == bos and s[-1] == eos for s in samples)
samples = [s[1:-1] for s in samples]
assert set(len(s) for s in samples) == {samp_len}
else:
assert all(s[0] == bos for s in samples)
samples = [s.split(eos)[0][1:] for s in samples]
assert all(bos not in s and eos not in s for s in samples)
return samples
def concat_posenc(self, inp_data, str_lens, start=0):
"""
Concatenate data with positional encoding of desired dimension,
starting from the position `start`
"""
pe_dim = self.pe_dim
half_dim = pe_dim // 2
assert 2 * half_dim == pe_dim
max_len, batch_size, inp_dim = inp_data.shape
if not isinstance(str_lens, torch.Tensor):
str_lens = torch.tensor(str_lens).expand(batch_size)
# Holds the position of each input
counter = torch.arange(start, start+max_len).float()
counter = counter[:, None, None].expand(-1, batch_size, half_dim)
# Frequencies associated with each entry of positional encoding
# Using similar encoding as the one in Attention is All You Need
freqs = torch.arange(half_dim).float() * (-2 / pe_dim)
str_lens = str_lens[None, :, None].float()
freqs = torch.pow(str_lens, freqs[None, None])
trig_args = (3.14159 / 2) * counter * freqs
# The positional encoding itself
enc_data = torch.zeros(max_len, batch_size, pe_dim)
enc_data[..., 0::2] = torch.sin(trig_args)
enc_data[..., 1::2] = torch.cos(trig_args)
# Return input concatenated with positional encodings
return torch.cat((inp_data, enc_data), dim=2)
def eval(self):
self.lstm = self.lstm.eval()
return self
def train(self):
self.lstm = self.lstm.train()
return self
# if __name__ == '__main__':
# input_dim = 1
# batch_dim = 1
# bond_dim = 2
# pe_dim = 6
# max_len = 10
# my_lstm = ProbLSTM(input_dim, bond_dim, 1,
# bi_dir=False, dropout=0,
# pos_enc=True, pe_dim=pe_dim)
# inp_data = torch.zeros(max_len, batch_dim, input_dim)
# str_lens = torch.full((batch_dim,), max_len)
# print(my_lstm.concat_posenc(inp_data, str_lens, start=0))
|
11485073
|
import cv2
import numpy as np
img_arr = np.arange(0, 640000, 1, np.uint8)
img_arr = np.reshape(img_arr, (800, 800))
height, width = img_arr.shape
for i in range(height):
for j in range(width):
if ((i//(width/8))%2+(j//(width/8))%2)%2==0:
img_arr[i][j]=255
else:
img_arr[i][j]=0
cv2.imwrite('result_1.png', img_arr)
|
11485110
|
import sqlite3, sys
from PyQt5.QtWidgets import QDialog, QApplication
from sqlite3 import Error
from demoDeleteUser import *
class MyForm(QDialog):
def __init__(self):
super().__init__()
self.ui = Ui_Dialog()
self.ui.setupUi(self)
self.ui.pushButtonDelete.clicked.connect(self.DeleteUser)
self.ui.pushButtonYes.clicked.connect(self.ConfirmDelete)
self.ui.labelSure.hide()
self.ui.pushButtonYes.hide()
self.ui.pushButtonNo.hide()
self.show()
def DeleteUser(self):
selectStatement="SELECT * FROM Users where EmailAddress like '"+self.ui.lineEditEmailAddress.text()+"' and Password like '"+ self.ui.lineEditPassword.text()+"'"
try:
conn = sqlite3.connect("ECommerce.db")
cur = conn.cursor()
cur.execute(selectStatement)
row = cur.fetchone()
if row==None:
self.ui.labelSure.hide()
self.ui.pushButtonYes.hide()
self.ui.pushButtonNo.hide()
self.ui.labelResponse.setText("Sorry, Incorrect email address or password ")
else:
self.ui.labelSure.show()
self.ui.pushButtonYes.show()
self.ui.pushButtonNo.show()
self.ui.labelResponse.setText("")
except Error as e:
self.ui.labelResponse.setText("Error in accessing user account")
finally:
conn.close()
def ConfirmDelete(self):
deleteStatement="DELETE FROM Users where EmailAddress like '"+self.ui.lineEditEmailAddress.text()+"' and Password like '"+ self.ui.lineEditPassword.text()+"'"
try:
conn = sqlite3.connect("ECommerce.db")
cur = conn.cursor()
with conn:
cur.execute(deleteStatement)
self.ui.labelResponse.setText("User successfully deleted")
except Error as e:
self.ui.labelResponse.setText("Error in deleting user account")
finally:
conn.close()
if __name__=="__main__":
app = QApplication(sys.argv)
w = MyForm()
w.show()
sys.exit(app.exec_())
|
11485111
|
from abc import ABC, abstractmethod
from typing import Dict, Optional
from great_expectations.core.expectation_configuration import ExpectationConfiguration
from great_expectations.rule_based_profiler.domain_builder import Domain
from great_expectations.rule_based_profiler.parameter_builder import ParameterContainer
class ExpectationConfigurationBuilder(ABC):
def build_expectation_configuration(
self,
domain: Domain,
variables: Optional[ParameterContainer] = None,
parameters: Optional[Dict[str, ParameterContainer]] = None,
) -> ExpectationConfiguration:
return self._build_expectation_configuration(
domain=domain, variables=variables, parameters=parameters
)
@abstractmethod
def _build_expectation_configuration(
self,
domain: Domain,
variables: Optional[ParameterContainer] = None,
parameters: Optional[Dict[str, ParameterContainer]] = None,
) -> ExpectationConfiguration:
pass
|
11485186
|
def aliquot_sum(input_num: int) -> int:
"""
Menemukan jumlah alikuot dari bilangan bulat input, di mana
alikuot jumlah suatu bilangan n
sebagai jumlah semua
bilangan asli kurang dari n
yang membagi n secara merata. Untuk
contoh, jumlah alikuot dari 15 adalah 1 + 3 + 5 = 9. Ini adalah
implementasi O(n) sederhana.
param input_num:
bilangan bulat positif yang jumlah alikuotnya dapat ditemukan
return:
jumlah alikuot dari input_num, jika input_num positif.
Jika tidak, naikkan ValueError
Penjelasan Wikipedia: https://en.wikipedia.org/wiki/Aliquot_sum
>>> aliquot_sum(19)
1
"""
if not isinstance(input_num, int):
raise ValueError("input harus integer")
if input_num <= 0:
raise ValueError("input harus positif")
return sum(
divisor for divisor in range(1, input_num // 2 + 1) if input_num % divisor == 0
)
if __name__ == "__main__":
import doctest
doctest.testmod()
|
11485190
|
import unittest, json
from backend.kubernetes.k8sclient import KubeClient
class KubeClientTestCase(unittest.TestCase):
def setUp(self):
self.client = KubeClient("http://192.168.0.10:8080/api/v1/")
def tearDown(self):
self.client = None
def test_add_slash(self):
url = "http://192.168.0.10:8080"
self.assertEqual(KubeClient.add_slash(url), "http://192.168.0.10:8080/")
def test_create_instance(self):
url = "http://192.168.0.10:8080"
client = KubeClient(url)
self.assertEqual(client.base_url, "http://192.168.0.10:8080/")
def test_send_request(self):
res = self.client.send_request("get", "namespaces",
labels={'a': 1, 'name': 'wangtao'})
self.assertEqual(isinstance(res, dict), True)
def test_list_namespaces(self):
namespaces = self.client.list_namespces()
print(namespaces)
self.assertEqual(True, True)
def test_list_nodes(self):
nodes = self.client.list_nodes()
print(nodes)
self.assertEqual(True, True)
def test_create_namespace(self):
self.client.create_namespace('user')
def test_delete_namespace(self):
self.client.delete_namespace('abcd')
def test_list_controllers(self):
controllers = self.client.list_controllers('user')
print(controllers)
def test_create_controller_1(self):
image_name = '192.168.0.15:5000/user/nginx:1.9.9'
res = self.client.create_controller('user', 'test-nginx', image_name,
replicas=2, tcp_ports={"http": 80})
print(res)
def test_create_controller_2(self):
image_name = '192.168.0.15:5000/admin/ubuntu:14.04'
self.client.create_controller('test-space', 'test-nginx', image_name,
replicas=1,
commands=['sleep', '3600'],
envs={"MYSQL": "192.168.0.100"}
)
def test_create_controller_3(self):
image_name = '192.168.0.15:5000/admin/nginx:1.9.9'
self.client.create_controller('test-space', 'test-nginx', image_name,
replicas=1, tcp_ports={"http": 80, "https": 443})
def test_create_controller_volume(self):
image_name = '192.168.0.15:5000/user/nginx:1.9.9'
self.client.create_controller('user', 'test-nginx', image_name,
cpu="100m", memory="64Mi", replicas=1, tcp_ports={"http": 80},
volumes={"project0-volume0": "/var/www/html"})
def test_delete_controller(self):
self.client.delete_controller('test-space', 'test-nginx')
def test_list_services(self):
services = self.client.list_services('test-space')
print(services)
def test_create_service_internal(self):
res = self.client.create_service('user', 'test-nginx',
tcp_ports={"http": 80},
is_public=False
)
print(res)
def test_create_service_external(self):
res = self.client.create_service('test-space', 'test-nginx',
tcp_ports={"http": 80},
is_public=True
)
print(res)
def test_create_service_session(self):
self.client.create_service('test-space', 'nginx',
tcp_ports={"http": 80},
is_public=True,
session_affinity=True
)
def test_delete_service(self):
self.client.delete_service('test-space', 'nginx')
def test_get_service_details(self):
res = self.client.get_service_details('test-space', 'test-nginx')
print(res)
def test_create_persistentvolume(self):
res = self.client.create_persistentvolume('default', 'project0-volume0', '10Mi',
'/hummer/user/project0/volume0', '192.168.0.15')
print(res)
def test_delete_persistentvolume(self):
res = self.client.delete_persistentvolume('default', 'project0-volume0')
print(res)
def test_create_persistentvolumeclaim(self):
res = self.client.create_persistentvolumeclaim('default', 'project0-volume0',
'10Mi')
print(res)
def test_delete_persistentvolumeclaim(self):
res = self.client.delete_persistentvolumeclaim('default', 'project0-volume0')
print(res)
def test_list_pods(self):
res = self.client.list_pods('user', label="app=project0-nginx-test")
print(res)
def test_get_logs_of_pod(self):
res = self.client.get_logs_of_pod('user', 'project0-nginx-test-3uhej', 20)
lines = res.split('\n')
print(lines)
def test_create_autoscaler(self):
beta_client = KubeClient("http://192.168.0.10:8080/apis/extensions/v1beta1/")
res = beta_client.create_autoscaler('user', 'project0-nginx-test', 1, 5, 50)
print(res)
def test_delete_autoscaler(self):
beta_client = KubeClient("http://192.168.0.10:8080/apis/extensions/v1beta1/")
res = beta_client.delete_autoscaler('user', 'project0-nginx-test')
print(res)
def test_list_host_ips(self):
hosts = self.client.list_host_ips('user', "app=project0-2048")
print(hosts)
|
11485195
|
import os
import numpy as np
from agent import BaseAgent
try:
import tensorflow as tf
except ImportError:
tf = None
from keras.backend import tensorflow_backend, image_data_format
from keras.optimizers import Adam
from rl.callbacks import ModelIntervalCheckpoint
from rl.core import Processor
from rl.memory import SequentialMemory
from rl.policy import LinearAnnealedPolicy
from rl.policy import BiasedEpsGreedyQPolicy
from rl.agents.dqn import DQNAgent
from malmo_rl.model import Minecraft, Minecraft_LSTM
class QLearner(BaseAgent):
def __init__(self, name, env, grayscale, width, height):
super(QLearner, self).__init__(name=name, env=env)
self.nb_actions = env.available_actions
self.abs_max_reward = env.abs_max_reward
self.mission_name = env.mission_name
self.grayscale = grayscale
self.width = width
self.height = height
self.recurrent = False # Use LSTM
self.batch_size = 32
self.window_length = 4
if tf:
config = tf.ConfigProto()
config.gpu_options.allow_growth = True
sess = tf.Session(config=config)
tensorflow_backend.set_session(session=sess)
if not self.recurrent:
self.model = Minecraft(self.window_length, self.grayscale, self.width, self.height, self.nb_actions)
else:
self.model = Minecraft_LSTM(self.window_length, self.grayscale, self.width, self.height, self.nb_actions)
# Replay memory
self.memory = SequentialMemory(limit=1000000, window_length=self.window_length)
'''
Select a policy. We use eps-greedy action selection, which means that a random action is selected
with probability eps. We can specify a custom biased probability distribution p for selecting random action,
so that the agent is more likely to choose some actions when exploring over others. For example,
if the possible actions are [move forward, move backward, turn right, turn left] and p = [0.6, 0.0, 0.2, 0.2]
the agent will go 60% forward, 0% backward, 20% left and 20% right when exploring.
If p == None, the default uniform distribution is used.
'''
self.policy = LinearAnnealedPolicy(BiasedEpsGreedyQPolicy(nb_actions=self.nb_actions, p=None),
attr='eps', value_max=1., value_min=.05, value_test=.005, nb_steps=1000000)
self.processor = MalmoProcessor(self.grayscale, self.window_length, self.recurrent, self.abs_max_reward)
self.agent = DQNAgent(model=self.model, nb_actions=self.nb_actions, policy=self.policy, test_policy=self.policy,
memory=self.memory, batch_size=self.batch_size, processor=self.processor,
nb_steps_warmup=50000, gamma=.99, target_model_update=10000, enable_double_dqn=True,
enable_dueling_network=True)
self.agent.compile(Adam(lr=.00025), metrics=['mae'])
def fit(self, env, nb_steps):
weights_dir = 'weights/{}'.format(self.mission_name)
if not os.path.exists(weights_dir):
os.makedirs(weights_dir)
weights_path = os.path.join(weights_dir, '{}'.format(self.name))
callbacks = [ModelIntervalCheckpoint(weights_path, interval=10000, verbose=1)]
self.agent.fit(env, nb_steps, action_repetition=4, callbacks=callbacks, verbose=1, log_interval=10000,
test_interval=10000, test_nb_episodes=10, test_action_repetition=4, test_visualize=False)
def test(self, env, nb_episodes):
self.agent.test(env, nb_episodes, action_repetition=4, verbose=1, visualize=False)
def save(self, out_dir):
self.agent.save_weights(out_dir, overwrite=True)
def load(self, out_dir):
self.agent.load_weights(out_dir)
class MalmoProcessor(Processor):
def __init__(self, grayscale, window_length, recurrent, abs_max_reward):
self.grayscale = grayscale
self.window_length = window_length
self.recurrent = recurrent
self.abs_max_reward = abs_max_reward
def process_state_batch(self, batch):
if not self.grayscale:
if not self.recurrent:
states = []
# Get each state in the batch
for i in range(self.window_length):
states.append(batch[:, i, :, :, :])
# Concatenate states in the batch along the channel axis
if image_data_format() == 'channels_last':
processed_batch = np.concatenate(states, axis=3)
else:
processed_batch = np.concatenate(states, axis=3).transpose((0, 3, 1, 2)) # Channels-first order
else:
if image_data_format() == 'channels_last':
processed_batch = batch
else:
processed_batch = batch.transpose((0, 1, 4, 2, 3)) # Channels-first order
else:
if not self.recurrent:
if image_data_format() == 'channels_last':
processed_batch = batch.transpose((0, 2, 3, 1))
else:
processed_batch = batch
else:
if image_data_format() == 'channels_last':
processed_batch = np.expand_dims(batch, axis=4)
else:
processed_batch = np.expand_dims(batch, axis=2)
processed_batch = processed_batch.astype('float32') / 255.
return processed_batch
def process_reward(self, reward):
if self.abs_max_reward:
return reward / self.abs_max_reward
else:
return reward
|
11485209
|
from __future__ import absolute_import
# flake8: noqa
# import apis into api package
from proton_api.api.annuities_api import AnnuitiesApi
from proton_api.api.business_financial_management_api import BusinessFinancialManagementApi
from proton_api.api.cards_api import CardsApi
from proton_api.api.financial_health_api import FinancialHealthApi
from proton_api.api.financial_planning_api import FinancialPlanningApi
from proton_api.api.goals_api import GoalsApi
from proton_api.api.life_insurance_api import LifeInsuranceApi
from proton_api.api.personal_financial_management_api import PersonalFinancialManagementApi
from proton_api.api.portfolio_construction_api import PortfolioConstructionApi
from proton_api.api.portfolio_management_api import PortfolioManagementApi
from proton_api.api.risk_scoring_api import RiskScoringApi
from proton_api.api.simulations_api import SimulationsApi
from proton_api.api.util_api import UtilApi
|
11485217
|
import torch
import torch.nn.functional as F
import math
def softmax_focalloss(y_pred, y_true, ignore_index=255, gamma=2.0, normalize=False):
"""
Args:
y_pred: [N, #class, H, W]
y_true: [N, H, W] from 0 to #class
gamma: scalar
Returns:
"""
losses = F.cross_entropy(y_pred, y_true, ignore_index=ignore_index, reduction='none')
with torch.no_grad():
p = y_pred.softmax(dim=1)
modulating_factor = (1 - p).pow(gamma)
valid_mask = ~ y_true.eq(ignore_index)
masked_y_true = torch.where(valid_mask, y_true, torch.zeros_like(y_true))
modulating_factor = torch.gather(modulating_factor, dim=1, index=masked_y_true.unsqueeze(dim=1)).squeeze_(dim=1)
scale = 1.
if normalize:
scale = losses.sum() / (losses * modulating_factor).sum()
losses = scale * (losses * modulating_factor).sum() / (valid_mask.sum() + p.size(0))
return losses
def cosine_annealing(lower_bound, upper_bound, _t, _t_max):
return upper_bound + 0.5 * (lower_bound - upper_bound) * (math.cos(math.pi * _t / _t_max) + 1)
def poly_annealing(lower_bound, upper_bound, _t, _t_max):
factor = (1 - _t / _t_max) ** 0.9
return upper_bound + factor * (lower_bound - upper_bound)
def linear_annealing(lower_bound, upper_bound, _t, _t_max):
factor = 1 - _t / _t_max
return upper_bound + factor * (lower_bound - upper_bound)
def annealing_softmax_focalloss(y_pred, y_true, t, t_max, ignore_index=255, gamma=2.0,
annealing_function=cosine_annealing):
losses = F.cross_entropy(y_pred, y_true, ignore_index=ignore_index, reduction='none')
with torch.no_grad():
p = y_pred.softmax(dim=1)
modulating_factor = (1 - p).pow(gamma)
valid_mask = ~ y_true.eq(ignore_index)
masked_y_true = torch.where(valid_mask, y_true, torch.zeros_like(y_true))
modulating_factor = torch.gather(modulating_factor, dim=1, index=masked_y_true.unsqueeze(dim=1)).squeeze_(dim=1)
normalizer = losses.sum() / (losses * modulating_factor).sum()
scales = modulating_factor * normalizer
if t > t_max:
scale = scales
else:
scale = annealing_function(1, scales, t, t_max)
losses = (losses * scale).sum() / (valid_mask.sum() + p.size(0))
return losses
|
11485239
|
import math
def get_chunks(xs, chunk_count=3):
"""
Helper function to split a list into roughly equally sized chunks.
"""
chunk_width = math.ceil(len(xs) / chunk_count)
ranges = range(0, len(xs), chunk_width)
return [xs[x:x + chunk_width] for x in ranges]
|
11485242
|
import os
import datetime
from flask import Flask
app = Flask(__name__)
@app.route('/')
def current_time():
ct = datetime.datetime.now()
return 'The current time is : {}!\n'.format(ct)
if __name__ == "__main__":
app.run(debug=True,host='0.0.0.0')
|
11485306
|
import unittest
import numpy as np
from pysprint.core._preprocess import *
class TestEdit(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_savgol(self):
x, y, v, w = np.loadtxt("test_arms.txt", delimiter=",", unpack=True)
a, b = savgol(x, y, v, w, window=10, order=3)
c, d = savgol(x, y, v, w, window=11, order=3)
assert len(a) == len(b)
assert len(d) == len(d)
with self.assertRaises(ValueError):
savgol(x, y, v, w, window=1, order=3)
def test_peak(self):
x, y = np.loadtxt("test_peak.txt", delimiter=",", unpack=True)
a, b, c, d = find_peak(x, y, [], [], threshold=0.01, pro_min=0.5, pro_max=0.5)
assert len(a) == len(b)
assert len(c) == len(d)
for val in b:
assert abs(val) > 0.01
for val in d:
assert abs(val) > 0.01
def test_interpolate(self):
x, y, v, w = np.loadtxt("test_arms.txt", delimiter=",", unpack=True)
a, b = interpolate_data(x, y, v, w)
assert len(a) == len(b)
np.random.seed(1000)
idx1 = np.random.randint(0, len(a))
idx2 = np.random.randint(0, len(a))
i = abs(a[idx1] - a[idx1 - 1])
j = abs(a[idx2] - a[idx2 - 1])
np.testing.assert_almost_equal(i, j)
with self.assertRaises(TypeError):
interpolate_data({"a": "1", "b": "2"}, x, y, v, window=1, order=3)
def test_cut(self):
x, y, v, w = np.loadtxt("test_arms.txt", delimiter=",", unpack=True)
a, b = cut_data(x, y, v, w, start=2.2, stop=2.8)
assert len(a) == len(b)
np.testing.assert_almost_equal(min(a), 2.2, decimal=2)
np.testing.assert_almost_equal(max(a), 2.8, decimal=2)
with self.assertRaises(ValueError):
cut_data(x, [], v, w)
def test_convolution(self):
x, y, v, w = np.loadtxt("test_arms.txt", delimiter=",", unpack=True)
a, b = convolution(x, y, v, w, len(x), standev=200)
assert len(a) == len(b)
with self.assertRaises(ValueError):
convolution(x, [], v, w, 10, standev=-541)
if __name__ == "__main__":
unittest.main()
|
11485312
|
from stepist.flow.steps.step import StepData
import ujson
def get_connection(sqs_engine):
queues = sqs_engine._queues.keys()
return SQSConnection(sqs_engine.session,
queues,
sqs_engine.message_retention_period,
sqs_engine.visibility_timeout)
class SQSConnection:
def __init__(self, sqs_session, queues_keys, message_retention_period,
visibility_timeout):
self.sqs_session = sqs_session
self.queues_keys = queues_keys
self.message_retention_period = message_retention_period
self.visibility_timeout = visibility_timeout
self.connection = None
self.channel = None
self.queues = None
def init_connection(self):
self.sqs_client = self.sqs_session.client('sqs')
self.sqs_resource = self.sqs_session.resource('sqs')
for q in self.queues_keys:
self.register_worker(q)
def register_worker(self, queue_name):
attrs = {}
kwargs = {
'QueueName': queue_name,
'Attributes': attrs,
}
if self.message_retention_period is not None:
attrs['MessageRetentionPeriod'] = str(self.message_retention_period)
if self.visibility_timeout is not None:
attrs['VisibilityTimeout'] = str(self.visibility_timeout)
self.sqs_client.create_queue(**kwargs)
queue = self.sqs_resource.get_queue_by_name(QueueName=queue_name)
self.queues[queue_name] = queue
def add_job(self, step_key: str, data):
queue = self.queues.get(step_key, None)
if not queue:
raise RuntimeError("Queue %s not found" % step_key)
step_data = StepData(data)
step_data_str = ujson.dumps({'data': step_data.get_dict()})
kwargs = {
'MessageBody': step_data_str,
'MessageAttributes': {},
'DelaySeconds': 0
}
ret = queue.send_message(**kwargs)
return ret['MessageId']
|
11485337
|
from django.db import models
from dictionary.views import get_def_for_tooltip
import json
from search.models import TableNames
import settings
from opus_support import (display_result_unit,
get_default_unit,
get_unit_display_name,
parse_form_type)
class ParamInfo(models.Model):
"""
This model describes every searchable param in the database.
Each has attributes like display, display order, query type, slug, etc.
"""
category_name = models.CharField(max_length=150)
name = models.CharField(max_length=87)
form_type = models.CharField(max_length=100, blank=True, null=True)
display = models.CharField(max_length=1)
display_results = models.IntegerField()
disp_order = models.IntegerField()
label = models.CharField(max_length=240, blank=True, null=True)
label_results = models.CharField(max_length=240, blank=True, null=True)
slug = models.CharField(max_length=255, blank=True, null=True)
old_slug = models.CharField(max_length=255, blank=True, null=True)
referred_slug = models.CharField(max_length=255, blank=True, null=True)
ranges = models.TextField()
field_hints1 = models.CharField(max_length=255, blank=True, null=True)
field_hints2 = models.CharField(max_length=255, blank=True, null=True)
intro = models.CharField(max_length=1023, blank=True, null=True)
tooltip = models.CharField(max_length=255, blank=True, null=True)
dict_context = models.CharField(max_length=255, blank=True, null=True)
dict_name = models.CharField(max_length=255, blank=True, null=True)
dict_context_results = models.CharField(max_length=255, blank=True, null=True)
dict_name_results = models.CharField(max_length=255, blank=True, null=True)
sub_heading = models.CharField(max_length=150, blank=True, null=True)
timestamp = models.DateTimeField()
class Meta:
db_table = ('param_info')
ordering = ('category_name', 'sub_heading', 'disp_order')
def __unicode__(self):
return u"%s" % self.name
def param_qualified_name(self):
return self.category_name + '.' + self.name
def get_tooltip(self):
definition = get_def_for_tooltip(self.dict_name, self.dict_context)
return definition
def get_tooltip_results(self):
if self.dict_name_results:
definition = get_def_for_tooltip(self.dict_name_results,
self.dict_context_results)
else:
definition = get_def_for_tooltip(self.dict_name, self.dict_context)
return definition
def get_link_tooltip(self):
table_label = (TableNames.objects
.get(table_name=self.category_name).label)
return (f'This field is a link to one available under {table_label}. '+
'It is provided here for your convenience.')
def body_qualified_label(self):
# Append "[Ring]" or "[<Surface Body>]" or "[Mission]" or "[Instrument]"
if self.label is None: # pragma: no cover
return None
pretty_name = (TableNames.objects
.get(table_name=self.category_name).label)
pretty_name = pretty_name.replace(' Surface Geometry Constraints', '')
pretty_name = pretty_name.replace(' Geometry Constraints', '')
pretty_name = pretty_name.replace(' Mission Constraints', '')
pretty_name = pretty_name.replace(' Constraints', '')
if (pretty_name == 'Surface' or
f'[{pretty_name}]' in self.label):
return self.label
return self.label + ' [' + pretty_name + ']'
def body_qualified_label_results(self, referred=False):
# Append "[Ring]" or "[<Surface Body>]" or "[Mission]" or "[Instrument]"
if self.label_results is None:
return None
pretty_name = (TableNames.objects
.get(table_name=self.category_name).label)
pretty_name = pretty_name.replace(' Surface Geometry Constraints', '')
pretty_name = pretty_name.replace(' Geometry Constraints', '')
pretty_name = pretty_name.replace(' Mission Constraints', '')
pretty_name = pretty_name.replace(' Constraints', '')
if (pretty_name in ['General', 'PDS', 'Wavelength', 'Image',
'Occultation/Reflectance Profiles', 'Surface']
and not referred):
return self.label_results
# Make sure "[Ring]", "[<Surface Body>]", etc is not duplicated in the
# label for referred slug.
if f'[{pretty_name}]' in self.label_results:
return self.label_results
return self.label_results + ' [' + pretty_name + ']'
def get_units(self):
# Put parentheses around units (units)
(form_type, form_type_format,
form_type_unit_id) = parse_form_type(self.form_type)
if form_type_unit_id and display_result_unit(form_type_unit_id):
default_unit = get_default_unit(form_type_unit_id)
display_name = get_unit_display_name(form_type_unit_id,
default_unit)
return ('(' + display_name + ')')
else:
return ''
def fully_qualified_label_results(self):
ret = self.body_qualified_label_results()
if ret is None: # pragma: no cover
return None
units = self.get_units()
if units != '':
ret += ' '+units
return ret
def is_string(self):
(form_type, form_type_format,
form_type_unit_id) = parse_form_type(self.form_type)
return form_type == 'STRING'
def is_string_or_mult(self):
(form_type, form_type_format,
form_type_unit_id) = parse_form_type(self.form_type)
return form_type == 'STRING' or form_type in settings.MULT_FORM_TYPES
def get_ranges_info(self):
"""
Get the ranges info except units & qtype
"""
ranges = {}
if self.ranges:
ranges = json.loads(self.ranges)
return ranges
|
11485438
|
import torch
import torch.nn as nn
import torchvision
from python_developer_tools.cv.bases.channels.channels import ChannelShuffle
def Conv3x3BNReLU(in_channels,out_channels,stride,groups):
return nn.Sequential(
nn.Conv2d(in_channels=in_channels, out_channels=out_channels, kernel_size=3, stride=stride, padding=1,groups=groups),
nn.BatchNorm2d(out_channels),
nn.ReLU6(inplace=True)
)
def Conv3x3BN(in_channels,out_channels,stride,groups):
return nn.Sequential(
nn.Conv2d(in_channels=in_channels, out_channels=out_channels, kernel_size=3, stride=stride, padding=1,groups=groups),
nn.BatchNorm2d(out_channels)
)
def Conv1x1BNReLU(in_channels,out_channels):
return nn.Sequential(
nn.Conv2d(in_channels=in_channels, out_channels=out_channels, kernel_size=1, stride=1),
nn.BatchNorm2d(out_channels),
nn.ReLU6(inplace=True)
)
def Conv1x1BN(in_channels,out_channels):
return nn.Sequential(
nn.Conv2d(in_channels=in_channels, out_channels=out_channels, kernel_size=1, stride=1),
nn.BatchNorm2d(out_channels)
)
class HalfSplit(nn.Module):
def __init__(self, dim=0, first_half=True):
super(HalfSplit, self).__init__()
self.first_half = first_half
self.dim = dim
def forward(self, input):
splits = torch.chunk(input, 2, dim=self.dim)
return splits[0] if self.first_half else splits[1]
class ShuffleNetUnits(nn.Module):
def __init__(self, in_channels, out_channels, stride, groups):
super(ShuffleNetUnits, self).__init__()
self.stride = stride
if self.stride > 1:
mid_channels = out_channels - in_channels
else:
mid_channels = out_channels // 2
in_channels = mid_channels
self.first_half = HalfSplit(dim=1, first_half=True)
self.second_split = HalfSplit(dim=1, first_half=False)
self.bottleneck = nn.Sequential(
Conv1x1BNReLU(in_channels, in_channels),
Conv3x3BN(in_channels, mid_channels, stride, groups),
Conv1x1BNReLU(mid_channels, mid_channels)
)
if self.stride > 1:
self.shortcut = nn.Sequential(
Conv3x3BN(in_channels=in_channels, out_channels=in_channels, stride=stride, groups=groups),
Conv1x1BNReLU(in_channels, in_channels)
)
self.channel_shuffle = ChannelShuffle(groups)
def forward(self, x):
if self.stride > 1:
x1 = self.bottleneck(x)
x2 = self.shortcut(x)
else:
x1 = self.first_half(x)
x2 = self.second_split(x)
x1 = self.bottleneck(x1)
out = torch.cat([x1, x2], dim=1)
out = self.channel_shuffle(out)
return out
class ShuffleNetV2(nn.Module):
def __init__(self, planes, layers, groups, num_classes=1000):
super(ShuffleNetV2, self).__init__()
self.groups = groups
self.stage1 = nn.Sequential(
Conv3x3BNReLU(in_channels=3, out_channels=24, stride=2, groups=1),
nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
)
self.stage2 = self._make_layer(24, planes[0], layers[0], True)
self.stage3 = self._make_layer(planes[0], planes[1], layers[1], False)
self.stage4 = self._make_layer(planes[1], planes[2], layers[2], False)
self.global_pool = nn.AdaptiveAvgPool2d(1)
self.dropout = nn.Dropout(p=0.2)
self.linear = nn.Linear(in_features=planes[2], out_features=num_classes)
self.init_params()
def _make_layer(self, in_channels, out_channels, block_num, is_stage2):
layers = []
layers.append(ShuffleNetUnits(in_channels=in_channels, out_channels=out_channels, stride= 2, groups=1 if is_stage2 else self.groups))
for idx in range(1, block_num):
layers.append(ShuffleNetUnits(in_channels=out_channels, out_channels=out_channels, stride=1, groups=self.groups))
return nn.Sequential(*layers)
def init_params(self):
for m in self.modules():
if isinstance(m, nn.Conv2d):
nn.init.kaiming_normal_(m.weight)
nn.init.constant_(m.bias, 0)
elif isinstance(m, nn.BatchNorm2d) or isinstance(m, nn.Linear):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
def forward(self, x):
x = self.stage1(x)
x = self.stage2(x)
x = self.stage3(x)
x = self.stage4(x)
x = self.global_pool(x)
x = x.view(x.size(0), -1)
x = self.dropout(x)
out = self.linear(x)
return out
def shufflenet_v2_x2_0(**kwargs):
planes = [244, 488, 976]
layers = [4, 8, 4]
model = ShuffleNetV2(planes, layers, 1)
return model
def shufflenet_v2_x1_5(**kwargs):
planes = [176, 352, 704]
layers = [4, 8, 4]
model = ShuffleNetV2(planes, layers, 1)
return model
def shufflenet_v2_x1_0(**kwargs):
planes = [116, 232, 464]
layers = [4, 8, 4]
model = ShuffleNetV2(planes, layers, 1)
return model
def shufflenet_v2_x0_5(**kwargs):
planes = [48, 96, 192]
layers = [4, 8, 4]
model = ShuffleNetV2(planes, layers, 1)
return model
if __name__ == '__main__':
model = shufflenet_v2_x2_0()
print(model)
input = torch.randn(1, 3, 224, 224)
out = model(input)
print(out.shape)
|
11485449
|
import time
import numpy as np
from lcc.entities.star import Star
from lcc.stars_processing.deciders import LDADec, QDADec, GradBoostDec
from lcc.stars_processing.descriptors import AbbeValueDescr
from lcc.stars_processing.stars_filter import StarsFilter
from lcc.stars_processing.systematic_search.stars_searcher import StarsSearcherRedis
from lcc.stars_processing.systematic_search.worker import run_workers
def test():
descriptors = [AbbeValueDescr()]
# deciders = [NeuronDecider(maxEpochs=800)]
deciders = [LDADec(), QDADec(), GradBoostDec()]
s_stars = [Star(name="Searched_{}".format(i)) for i in range(100)]
c_stars = [Star(name="Contam_{}".format(i)) for i in range(100)]
x = np.linspace(0, 10, 100)
for st in s_stars:
st.putLightCurve([x, np.cos(x) - 0.5 + np.random.random_sample(100)])
for st in c_stars:
st.putLightCurve([x, np.exp(x*np.random.random_sample(100))])
filt = StarsFilter(descriptors, deciders)
filt.learn(s_stars, c_stars)
searcher = StarsSearcherRedis([filt], db_connector="OgleII", save_path="/tmp/test_stars")
queries = [{"field": "LMC_SC3", "starid": i} for i in range(1, 10)]
searcher.queryStars(queries)
run_workers(n_workers=1)
assert len(searcher.getPassedStars()) > 0
time.sleep(2)
assert len(searcher.getStatus()) == len(queries)
|
11485477
|
from tincan.documents.document import Document
from tincan.documents.state_document import StateDocument
from tincan.documents.activity_profile_document import ActivityProfileDocument
from tincan.documents.agent_profile_document import AgentProfileDocument
|
11485488
|
import networkx as nx
from utils.graph import ExceptionDataNode, Graph
from utils.pageRank import PRIterator
from utils.pcalg import construct_service_dependency_diagram
class DiagnosisFaultService:
@staticmethod
def get_servcie_fault_causes(serviceNode,data):
"""
对某一故障服务进行细粒度诊断
:param serviceNode:
:param data: input_data实例
:return:
"""
# 确定与故障服务相关的异常指标
serviceId = serviceNode.serviceId
hostId = serviceNode.hostId
containerId = serviceNode.containerId
exception_metrics, exception_logs = data.get_fault_service_related_log_metric_data(serviceId,containerId,hostId)
# 处理原始数据,得到PC算法输入格式,原始数据预处理
pc_input = data.get_PC_input_data(exception_metrics, exception_logs)
# 利用PC算法生成图,g的节点为输入数据的Index
g, columns_mapping = construct_service_dependency_diagram(pc_input)
#生成的g,替换节点为metricId和logId
g_new = DiagnosisFaultService.get_g_dataId(g, columns_mapping)
# 识别图中的根因节点
falut_root_dict = DiagnosisFaultService.get_root_cause(g_new)
# 生成返回图结构
final_exception_data_graph = DiagnosisFaultService.geneate_final_return_graph(g_new,exception_metrics, exception_logs)
return falut_root_dict,final_exception_data_graph
@staticmethod
def get_g_dataId(g,columns_mapping):
g_new = nx.DiGraph()
for node in g.nodes:
g_new.add_node(columns_mapping[node])
for edge in g.edges:
g_new.add_edge(columns_mapping[edge[0]],columns_mapping[edge[1]])
return g_new
@staticmethod
def get_root_cause(g):
"""
通过关系图获取根因列表,获取故障服务根因列表
Args:
g: 关系图
Returns: 根因列表
"""
result = list()
# 获取Pr值最高的点
begin_node_id, begin_node_pr = None, 0
# for node_id in node_ids:
# if len(list(g.predecessors(node_id))) > max_pre_size:
# max_pre_node = node_id
# max_pre_size = len(list(g.predecessors(node_id)))
pr = PRIterator(g)
page_ranks = pr.page_rank()
node_pr_sorted = sorted(page_ranks.items(), key=lambda x: x[1], reverse=True)
begin_node_id = node_pr_sorted[0][0]
# 层次遍历
node_filter, node_queue = {begin_node_id}, list([begin_node_id])
while node_queue:
node_now = node_queue.pop(0)
if not g.predecessors(node_now):
if node_now not in result:
result.append(node_now)
continue
is_pre_not_filter = False
for k in g.predecessors(node_now):
if k not in node_filter:
is_pre_not_filter = True
node_filter.add(k)
node_queue.append(k)
# 如果所有的上游节点都在 filter 中,将当前节点加入 result,避免 result 为空的情况
if not is_pre_not_filter:
for k in g.predecessors(node_now):
if k not in result:
result.append(k)
if node_now not in result:
result.append(node_now)
g_reverse = g.reverse(copy=True)
pr_reverse = PRIterator(g_reverse)
page_ranks_reverse = pr_reverse.page_rank()
for key, value in page_ranks_reverse.items():
if key in result:
value += 0.5
node_pr_reverse_sorted = sorted(page_ranks.items(), key=lambda x: x[1], reverse=True)
result_final = {}
for index, i in enumerate(node_pr_reverse_sorted):
if index < 3:
result_final[i[0]]= i[1]
return result_final
@staticmethod
def geneate_final_return_graph(g_new,exception_metrics, exception_logs):
"""
生成返回的图结构
:param g_new:
:param data:
:param exception_metrics: 与服务相关的异常指标
:param exception_logs: 与服务相关的异常日志
:return:
"""
nodes = {}
for node_id in g_new.nodes:
id = node_id
if id in exception_metrics:
nodeType = "metric"
tmpExceptionDataNode = ExceptionDataNode(id, nodeType)
tmpExceptionDataNode.name = exception_metrics[id][0].metricName
tmpExceptionDataNode.detail = exception_metrics[id][0].value
tmpExceptionDataNode.units = exception_metrics[id][0].units
tmpExceptionDataNode.belongTo = exception_metrics[id][0].metricBelongTo
tmpExceptionDataNode.exceptionTime = exception_metrics[id][0].startTime
nodes[id] = tmpExceptionDataNode
elif id in exception_logs:
nodeType = "log"
tmpExceptionDataNode = ExceptionDataNode(id, nodeType)
tmpExceptionDataNode.belongTo = exception_logs[id][0].logBelongTo
tmpExceptionDataNode.exceptionTime = exception_logs[id][0].startTime
tmpExceptionDataNode.detail = exception_logs[id][0].logExceptionSegment
nodes[id] = tmpExceptionDataNode
else:
continue
edges = g_new.edges()
final_return_graph = Graph(nodes,edges)
return final_return_graph
|
11485509
|
import os
import sys
import tempfile
from pathlib import Path
from shutil import rmtree
import pytest
try:
from h5pyd._apps.hstouch import main as hstouch
from hsds.hsds_app import HsdsApp
with_reqd_pkgs = True
except ImportError:
with_reqd_pkgs = False
def set_hsds_root():
"""Make required HSDS root directory."""
hsds_root = Path(os.environ["ROOT_DIR"]) / os.environ["BUCKET_NAME"] / "home"
if hsds_root.exists():
rmtree(hsds_root)
old_sysargv = sys.argv
sys.argv = [""]
sys.argv.extend(["-e", os.environ["HS_ENDPOINT"]])
sys.argv.extend(["-u", "admin"])
sys.argv.extend(["-p", "admin"])
sys.argv.extend(["-b", os.environ["BUCKET_NAME"]])
sys.argv.append("/home/")
hstouch()
sys.argv = [""]
sys.argv.extend(["-e", os.environ["HS_ENDPOINT"]])
sys.argv.extend(["-u", "admin"])
sys.argv.extend(["-p", "admin"])
sys.argv.extend(["-b", os.environ["BUCKET_NAME"]])
sys.argv.extend(["-o", os.environ["HS_USERNAME"]])
sys.argv.append(f'/home/{os.environ["HS_USERNAME"]}/')
hstouch()
sys.argv = old_sysargv
@pytest.fixture(scope="session")
def hsds_up():
"""Provide HDF Highly Scalabale Data Service (HSDS) for h5pyd testing."""
if with_reqd_pkgs:
root_dir = Path(tempfile.mkdtemp(prefix="tmp-hsds-root-"))
os.environ["BUCKET_NAME"] = "data"
(root_dir / os.getenv("BUCKET_NAME")).mkdir(parents=True, exist_ok=True)
os.environ["ROOT_DIR"] = str(root_dir)
os.environ["HS_USERNAME"] = "h5netcdf-<PASSWORD>"
os.environ["HS_PASSWORD"] = "<PASSWORD>"
config = """allow_noauth: true
auth_expiration: -1
default_public: False
aws_access_key_id: xxx
aws_secret_access_key: xxx
aws_iam_role: hsds_role
aws_region: us-east-1
hsds_endpoint: http://hsds.hdf.test
aws_s3_gateway: null
aws_dynamodb_gateway: null
aws_dynamodb_users_table: null
azure_connection_string: null
azure_resource_id: null
azure_storage_account: null
azure_resource_group: null
root_dir: null
password_salt: <PASSWORD>
bucket_name: hsdstest
head_port: 5100
head_ram: 512m
dn_port: 6101
dn_ram: 3g
sn_port: 5101
sn_ram: 1g
rangeget_port: 6900
rangeget_ram: 2g
target_sn_count: 0
target_dn_count: 0
log_level: INFO
log_timestamps: false
log_prefix: null
max_tcp_connections: 100
head_sleep_time: 10
node_sleep_time: 10
async_sleep_time: 10
s3_sync_interval: 1
s3_sync_task_timeout: 10
store_read_timeout: 1
store_read_sleep_interval: 0.1
max_pending_write_requests: 20
flush_sleep_interval: 1
max_chunks_per_request: 1000
min_chunk_size: 1m
max_chunk_size: 4m
max_request_size: 100m
max_chunks_per_folder: 0
max_task_count: 100
max_tasks_per_node_per_request: 16
aio_max_pool_connections: 64
metadata_mem_cache_size: 128m
metadata_mem_cache_expire: 3600
chunk_mem_cache_size: 128m
chunk_mem_cache_expire: 3600
data_cache_size: 128m
data_cache_max_req_size: 128k
data_cache_expire_time: 3600
data_cache_page_size: 4m
data_cache_max_concurrent_read: 16
timeout: 30
password_file: /config/passwd.txt
groups_file: /config/groups.txt
server_name: Highly Scalable Data Service (HSDS)
greeting: Welcome to HSDS!
about: HSDS is a webservice for HDF data
top_level_domains: []
cors_domain: "*"
admin_user: admin
admin_group: null
openid_provider: azure
openid_url: null
openid_audience: null
openid_claims: unique_name,appid,roles
chaos_die: 0
standalone_app: false
blosc_nthreads: 2
http_compression: false
http_max_url_length: 512
k8s_app_label: hsds
k8s_namespace: null
restart_policy: on-failure
domain_req_max_objects_limit: 500
"""
tmp_dir = Path(tempfile.mkdtemp(prefix="tmp-hsds-"))
config_file = tmp_dir / "config.yml"
config_file.write_text(config)
passwd_file = tmp_dir / "passwd.txt"
passwd_file.write_text(
f'admin:admin\n{os.environ["HS_USERNAME"]}:{os.environ["HS_PASSWORD"]}\n'
)
log_file = str(tmp_dir / "hsds.log")
tmp_dir = str(tmp_dir)
if sys.platform == "darwin":
# macOS temp directory paths can be very long and break low-level
# socket comms code...
socket_dir = "/tmp/hsds"
else:
socket_dir = tmp_dir
try:
hsds = HsdsApp(
username=os.environ["HS_USERNAME"],
password=os.environ["HS_PASSWORD"],
password_file=str(passwd_file),
log_level=os.getenv("LOG_LEVEL", "DEBUG"),
logfile=log_file,
socket_dir=socket_dir,
config_dir=tmp_dir,
dn_count=2,
)
hsds.run()
is_up = hsds.ready
if is_up:
os.environ["HS_ENDPOINT"] = hsds.endpoint
set_hsds_root()
except Exception:
is_up = False
yield is_up
hsds.stop()
rmtree(tmp_dir, ignore_errors=True)
rmtree(socket_dir, ignore_errors=True)
rmtree(root_dir, ignore_errors=True)
else:
yield False
|
11485516
|
import click
from .tools.es import es_cli, purge as purge_es
from .tools.s3 import s3_cli, reset as reset_s3
from .tools.pg import pg_cli, reset as reset_pg
from .tools.neo import neo4j_cli, purge as purge_neo4j
from .config import Config
from pathlib import Path
import shutil
@click.group()
def cli():
"""Pipeline testing tools"""
pass
cli.add_command(es_cli)
cli.add_command(s3_cli)
cli.add_command(pg_cli)
cli.add_command(neo4j_cli)
@cli.command(name='setup')
@click.option(
'-t',
'--test-dir',
type=click.Path(
dir_okay=True,
file_okay=False,
resolve_path=True
),
help="Base directory for hierarchy of job test subdirectories",
default=Config.DEFAULT_LOCAL_TEST_PATH
)
@click.option(
'-f',
'--force',
is_flag=True,
help="Recreate test directory if already exists"
)
@click.pass_context
def setup(ctx: click.Context, test_dir: str, force: bool):
"""Configure backends and local directories for testing"""
ctx.invoke(purge_es)
ctx.invoke(purge_neo4j)
ctx.invoke(reset_pg)
ctx.invoke(reset_s3)
test_dir_p = Path(test_dir)
if test_dir_p.is_dir() and force:
shutil.rmtree(str(test_dir_p))
print(f"[INFO] Populating local test dirs under: {test_dir_p!s}")
raw_dir_p = Path(test_dir_p, 'raw')
parsed_dir_p = Path(test_dir_p, 'parsed')
thumbnail_dir_p = Path(test_dir_p, 'thumbnails')
job_dir_p = Path(test_dir_p, 'job')
test_dir_p.mkdir(exist_ok=True)
raw_dir_p.mkdir(exist_ok=True)
parsed_dir_p.mkdir(exist_ok=True)
thumbnail_dir_p.mkdir(exist_ok=True)
job_dir_p.mkdir(exist_ok=True)
for d in [p for p in Path(Config.CRAWLER_OUTPUT_PATH).iterdir() if p.is_dir()]:
shutil.copytree(str(d), str(Path(raw_dir_p, d.name)))
Path(parsed_dir_p, d.name).mkdir(exist_ok=True)
Path(thumbnail_dir_p, d.name).mkdir(exist_ok=True)
|
11485546
|
import os
class Note:
def __init__(self, path: str):
self.path = path
@classmethod
def from_file(cls, path: str):
if os.path.exists:
return cls(path)
|
11485564
|
import docker
import hashlib
import os
import os.path as op
import slugid
import sys
CONTAINER_PREFIX = "higlass-manage-container"
NETWORK_PREFIX = "higlass-manage-network"
REDIS_PREFIX = "higlass-manage-redis"
REDIS_CONF = "/usr/local/etc/redis/redis.conf"
SQLITEDB = "db.sqlite3"
def md5(fname):
hash_md5 = hashlib.md5()
with open(fname, "rb") as f:
for chunk in iter(lambda: f.read(4096), b""):
hash_md5.update(chunk)
return hash_md5.hexdigest()
def hg_name_to_container_name(hg_name):
return "{}-{}".format(CONTAINER_PREFIX, hg_name)
def hg_name_to_network_name(hg_name):
return "{}-{}".format(NETWORK_PREFIX, hg_name)
def hg_name_to_redis_name(hg_name):
return "{}-{}".format(REDIS_PREFIX, hg_name)
def get_port(hg_name):
client = docker.from_env()
container_name = hg_name_to_container_name(hg_name)
config = client.api.inspect_container(container_name)
port = config["HostConfig"]["PortBindings"]["80/tcp"][0]["HostPort"]
return port
def get_site_url(hg_name, _SITE_URL="SITE_URL"):
"""
get SITE_URL for a given container
using container docker-config, assuming
there is not more than one SITE_URL in
container's env.
Yields "localhost" when no SITE_URL entries
detected.
"""
client = docker.from_env()
container_name = hg_name_to_container_name(hg_name)
config = client.api.inspect_container(container_name)
site_url_entries = [s for s in config["Config"]["Env"] if _SITE_URL in s]
# if there is no SITE_URL entry yield "localhost"
if not site_url_entries:
return "http://localhost"
# otherwise there has to be only one SITE_URL entry in the Env:
elif len(site_url_entries) > 1:
raise ValueError(
"There are multiple SITE_URL entry in {} env".format(container_name)
)
else:
(site_url,) = site_url_entries
# parse "SITE_URL=http://hostname":
_, site_url = site_url.split("=")
return site_url
def fill_filetype_and_datatype(filename, filetype, datatype):
"""
If no filetype or datatype are provided, add them
based on the given filename.
Parameters:
----------
filename: str
The name of the file
filetype: str
The type of the file (can be None)
datatype: str
The datatype for the data in the file (can be None)
Returns:
--------
(filetype, datatype): (str, str)
Filled in filetype and datatype based on the given filename
"""
if filetype is None:
# no filetype provided, try a few common filetypes
filetype = infer_filetype(filename)
print("Inferred filetype:", filetype)
if filetype is None:
recommended_filetype = recommend_filetype(filename)
print(
"Unknown filetype, please specify using the --filetype option",
file=sys.stderr,
)
if recommended_filetype is not None:
print(
"Based on the filename, you may want to try the filetype: {}".format(
recommended_filetype
)
)
return (None, None)
if datatype is None:
datatype = infer_datatype(filetype)
print("Inferred datatype:", datatype)
if datatype is None:
recommended_datatype = recommend_datatype(filetype)
print(
"Unknown datatype, please specify using the --datatype option",
file=sys.stderr,
)
if recommended_datatype is not None:
print(
"Based on the filetype, you may want to try the datatype: {}".format(
recommended_datatype
)
)
return (filetype, datatype)
def recommend_filetype(filename):
ext = op.splitext(filename)
if op.splitext(filename)[1] == ".bed":
return "bedfile"
if op.splitext(filename)[1] == ".bedpe":
return "bedpe"
def recommend_datatype(filetype):
if filetype == "bedfile":
return "bedlike"
def datatype_to_tracktype(datatype):
if datatype == "matrix":
return ("heatmap", "center")
elif datatype == "vector":
return ("horizontal-bar", "top")
elif datatype == "gene-annotations":
return ("horizontal-gene-annotations", "top")
elif datatype == "chromsizes":
return ("horizontal-chromosome-labels", "top")
elif datatype == "2d-rectangle-domains":
return ("2d-rectangle-domains", "center")
elif datatype == "bedlike":
return ("bedlike", "top")
return (None, None)
def infer_filetype(filename):
_, ext = op.splitext(filename)
if ext.lower() == ".bw" or ext.lower() == ".bigwig":
return "bigwig"
elif ext.lower() == ".mcool" or ext.lower() == ".cool":
return "cooler"
elif ext.lower() == ".htime":
return "time-interval-json"
elif ext.lower() == ".hitile":
return "hitile"
elif ext.lower() == ".beddb":
return "beddb"
return None
def infer_datatype(filetype):
if filetype == "cooler":
return "matrix"
if filetype == "bigwig":
return "vector"
if filetype == "time-interval-json":
return "time-interval"
if filetype == "hitile":
return "vector"
if filetype == "beddb":
return "bedlike"
def import_file(
hg_name, filepath, filetype, datatype, assembly, name, uid, no_upload, project_name
):
# get this container's temporary directory
if not no_upload:
temp_dir = get_temp_dir(hg_name)
if not op.exists(temp_dir):
os.makedirs(temp_dir)
filename = op.split(filepath)[1]
to_import_path = op.join(temp_dir, filename)
if to_import_path != filepath:
# if this file already exists in the temporary dir
# remove it
if op.exists(to_import_path):
print("Removing existing file in temporary dir:", to_import_path)
os.remove(to_import_path)
os.link(filepath, to_import_path)
else:
filename = filepath
coordSystem = "--coordSystem {}".format(assembly) if assembly is not None else ""
name_text = '--name "{}"'.format(name) if name is not None else ""
project_name_text = (
'--project-name "{}"'.format(project_name) if project_name is not None else ""
)
print("name_text: {}".format(name_text))
client = docker.from_env()
print("hg_name:", hg_name)
container_name = hg_name_to_container_name(hg_name)
container = client.containers.get(container_name)
if no_upload:
command = (
"python higlass-server/manage.py ingest_tileset --filename"
+ " {}".format(filename.replace(" ", "\ "))
+ " --filetype {} --datatype {} {} {} {} --no-upload".format(
filetype, datatype, name_text, project_name_text, coordSystem
)
)
else:
command = (
"python higlass-server/manage.py ingest_tileset --filename"
+ " /tmp/{}".format(filename.replace(" ", "\ "))
+ " --filetype {} --datatype {} {} {} {}".format(
filetype, datatype, name_text, project_name_text, coordSystem
)
)
if uid is not None:
command += " --uid {}".format(uid)
else:
uid = slugid.nice()
command += " --uid {}".format(uid)
print("command:", command)
(exit_code, output) = container.exec_run(command)
if exit_code != 0:
print("ERROR:", output.decode("utf8"), file=sys.stderr)
return None
return uid
def get_temp_dir(hg_name):
client = docker.from_env()
container_name = hg_name_to_container_name(hg_name)
config = client.api.inspect_container(container_name)
print("state", config["State"]["Running"])
if config["State"]["Running"] != True:
raise HiGlassNotRunningException()
for mount in config["Mounts"]:
if mount["Destination"] == "/tmp":
return mount["Source"]
def get_data_dir(hg_name):
client = docker.from_env()
container_name = hg_name_to_container_name(hg_name)
config = client.api.inspect_container(container_name)
for mount in config["Mounts"]:
if mount["Destination"] == "/data":
return mount["Source"]
class HiGlassNotRunningException(Exception):
pass
|
11485575
|
import asyncio
import json
import os
import re
import zipfile
from os import path, walk
from aioconsole import aprint
from rich.progress import Progress
from fz_manager.factorio_zone_api import FZClient, ServerStatus
from fz_manager.menu import ActionMenu, SelectMenu, CheckboxMenu, MenuEntry, PathMenu, AlertMenu, InputMenu
from fz_manager.shell import Shell
from fz_manager.storage import Storage
from fz_manager.titlebar import create_titlebar
from fz_manager.utils import String, Term, Colors
from fz_manager.utils import run_on_thread
class Main:
def __init__(self) -> None:
self.storage = Storage()
self.client: (FZClient | None) = None
self.shell: (Shell | None) = None
self.titlebar = None
async def main(self):
token = await self.choose_token()
if token is None:
return
self.client = FZClient(token if not String.isblank(token) else None)
self.shell = Shell(self.client, self.storage)
self.titlebar = create_titlebar(self.client)
asyncio.get_event_loop_policy().get_event_loop().create_task(self.client.connect())
await self.client.wait_sync()
self.storage.store('userToken', self.client.user_token)
if token == '':
self.storage.token_history.append_string(self.client.user_token)
await self.main_menu()
# Main Menu
async def main_menu(self):
while True:
action = await ActionMenu(
message='Main menu',
entries=[
MenuEntry('Start server', self.start_server, condition=lambda: not self.client.running),
MenuEntry('Attach to server', self.attach_to_server, condition=lambda: self.client.running),
MenuEntry('Stop server', self.stop_server,
condition=lambda: self.client.running and self.client.server_status == ServerStatus.RUNNING),
MenuEntry('Manage mods', self.manage_mods_menu, condition=lambda: not self.client.running),
MenuEntry('Manage saves', self.manage_saves_menu, condition=lambda: not self.client.running),
MenuEntry('Exit')
],
titlebar=self.titlebar,
clear_screen=True
).show()
Term.cls()
if action is None or action.name == 'Exit':
break
async def manage_mods_menu(self):
while True:
action = await ActionMenu(
message='Manage mods',
entries=[
MenuEntry('Create mod-settings.zip', self.create_mod_settings),
MenuEntry('Upload mods', self.upload_mods_menu),
MenuEntry('Enable/Disable uploaded mods', self.disable_mods_menu),
MenuEntry('Delete uploaded mods', self.delete_mods_menu),
MenuEntry('Back')
],
titlebar=self.titlebar,
clear_screen=True
).show()
Term.cls()
if action is None or action.name == 'Back':
break
async def manage_saves_menu(self):
while True:
action = await ActionMenu(
message='Main menu',
entries=[
MenuEntry('Upload save', self.upload_save_menu),
MenuEntry('Delete save', self.delete_save_menu),
MenuEntry('Download save', self.download_save_menu),
MenuEntry('Back')
],
titlebar=self.titlebar,
clear_screen=True
).show()
Term.cls()
if action is None or action.name == 'Back':
break
async def create_mod_settings(self):
dat = 'mod-settings.dat'
def validator(p: str) -> bool:
return path.exists(path.join(p, dat))
mods_folder_path = await PathMenu(
'Insert path to mods folder: ',
only_directories=True,
validator=validator,
load_last_value=True,
titlebar=self.titlebar,
history=self.storage.mods_path_history
).show()
if mods_folder_path is None:
return
mod_settings_dat_path = path.join(mods_folder_path, dat)
info_json_path = path.join(mods_folder_path, 'info.json')
mod_settings_zip_path = path.join(mods_folder_path, "mod-settings.zip")
if not path.exists(mod_settings_dat_path):
return await AlertMenu(f'Unable to find {dat}').show()
with open(info_json_path, 'w') as fp:
json.dump({
'name': dat,
'version': '0.1.0',
'title': dat,
'description': 'Mod settings for factorio.zone created with FZ-Manager tool by @michelsciortino'
}, fp)
zf = zipfile.ZipFile(mod_settings_zip_path, "w")
zf.write(mod_settings_dat_path)
zf.write(info_json_path)
zf.close()
os.remove(info_json_path)
await AlertMenu(f'{mod_settings_zip_path} created').show()
async def upload_mods_menu(self):
mods_folder_path = await PathMenu(
'Insert path to mods folder: ',
only_directories=True,
validator=lambda p: path.exists(p),
titlebar=self.titlebar,
load_last_value=True,
history=self.storage.mods_path_history
).show()
if mods_folder_path is None:
return
root, _, filenames = next(walk(mods_folder_path), (None, None, []))
zip_files = list(filter(lambda n: n.endswith('.zip'), filenames))
if len(zip_files) == 0:
return await AlertMenu('No mod found in folder').show()
selected, _, _ = await CheckboxMenu(
message='Choose mods to upload',
entries=[MenuEntry(f, pre_selected=True) for f in zip_files],
).show()
if not selected or not len(selected):
return
mods: list[FZClient.Mod] = []
for entry in selected:
name = entry.name
file_path = path.join(root, name)
size = path.getsize(file_path)
mods.append(FZClient.Mod(name, file_path, size))
def callback(monitor):
progress.update(mod_task, completed=min(monitor.bytes_read, mod.size))
try:
with Progress() as progress:
main_task = progress.add_task('Uploading mods', total=len(mods))
for mod in mods:
mod_task = progress.add_task(f'Uploading {mod.name}', total=mod.size)
await self.client.upload_mod(mod, callback)
progress.update(main_task, advance=1)
except BaseException as ex:
return await AlertMenu(str(ex) or ex.__class__.__name__).show()
async def disable_mods_menu(self):
if not self.client.mods or not len(self.client.mods):
return await AlertMenu('No uploaded mods found', titlebar=self.titlebar).show()
_, added, deselected = await CheckboxMenu(
message='Enable/Disable mods',
entries=[MenuEntry(m['text'], pre_selected=m['enabled'], ext_index=m['id']) for m in self.client.mods],
titlebar=self.titlebar
).show()
if added is None or deselected is None:
return
with Progress() as progress:
bar = progress.add_task('Applying changes', total=len(added) + len(deselected))
for e in added:
progress.print(f'Enabling {e.name}')
await self.client.toggle_mod(e.ext_index, True)
progress.update(bar, advance=1)
for e in deselected:
progress.print(f'Disabling {e.name}')
await self.client.toggle_mod(e.ext_index, False)
progress.update(bar, advance=1)
progress.remove_task(bar)
async def delete_mods_menu(self):
if not self.client.mods or not len(self.client.mods):
return await AlertMenu('No uploaded mods found', titlebar=self.titlebar).show()
selected, _, _ = await CheckboxMenu(
message='Delete mods',
entries=[MenuEntry(m['text'], ext_index=m['id']) for m in self.client.mods],
titlebar=self.titlebar
).show()
with Progress() as progress:
bar = progress.add_task('Deleting mods', total=len(selected))
for e in selected:
progress.print(f'Deleting {e.name}')
await self.client.delete_mod(e.ext_index)
progress.update(bar, advance=1)
progress.remove_task(bar)
async def upload_save_menu(self):
file_path = await PathMenu(
'Insert path to save file: ',
validator=lambda p: path.exists(p) and path.splitext(p)[1] == '.zip',
load_last_value=True,
titlebar=self.titlebar,
history=self.storage.saves_path_history
).show()
if file_path is None:
return
if not path.exists(file_path):
return await AlertMenu(f'{file_path} save file does not exist.').show()
file_extension = path.splitext(file_path)[1]
filename = path.basename(file_path)
if file_extension != '.zip':
return await AlertMenu('Save file must be a zip archive.').show()
slot = await SelectMenu(
'Select save slot:',
[MenuEntry(f'slot {i}', ext_index=i) for i in range(1, 10)]
).show()
if not slot:
return
slot_name = f'slot{slot.ext_index}'
if self.client.saves[slot_name] != f'slot {slot.ext_index} (empty)':
choice = await SelectMenu(
f'Slot {slot.ext_index} is already used, do you want to replace it?',
[
MenuEntry('Yes', ext_index=0),
MenuEntry('No', ext_index=1)
]
).show()
if not choice or choice.ext_index == 1:
return
else:
await self.client.delete_save_slot(slot_name)
size = path.getsize(file_path)
save = FZClient.Save(filename, file_path, size, slot_name)
with Progress() as progress:
upload_task = progress.add_task(f'Uploading {filename}', total=size)
def callback(monitor):
progress.update(upload_task, completed=min(monitor.bytes_read, size))
try:
await self.client.upload_save(save, callback)
progress.remove_task(upload_task)
except Exception as ex:
progress.remove_task(upload_task)
await AlertMenu(str(ex)).show()
async def delete_save_menu(self):
entries = await self.get_remote_slots()
if len(entries) == 0:
return await AlertMenu('All the slots are empty', titlebar=self.titlebar).show()
selected, _, _ = await CheckboxMenu(
message='Select slots to delete:',
entries=entries,
titlebar=self.titlebar
).show()
if not selected:
return
with Progress() as progress:
delete_task = progress.add_task(f'', total=len(selected))
for slot in selected:
slot_name = f'slot{slot.ext_index}'
try:
progress.print(f'Deleting slot {slot.ext_index}')
await self.client.delete_save_slot(slot_name)
except Exception as ex:
await AlertMenu(str(ex)).show()
progress.update(delete_task, advance=1)
async def download_save_menu(self):
entries = await self.get_remote_slots()
if len(entries) == 0:
return await AlertMenu('All the slots are empty', titlebar=self.titlebar).show()
selected, _, _ = await CheckboxMenu(
message='Select slots to download:',
entries=entries,
titlebar=self.titlebar
).show()
if not selected:
return
directory = await PathMenu(
'Insert download directory path: ',
only_directories=True,
load_last_value=True,
history=self.storage.saves_path_history
).show()
if directory is None:
return
if not path.exists(directory):
return await AlertMenu('Directory not found').show()
if not path.isdir(directory):
return await AlertMenu(f'{directory} is not a directory').show()
with Progress() as progress:
download_task = progress.add_task(f'Downloading slots', total=len(selected))
for slot in selected:
expected_size = float(re.search('(\d+.\d+)MB', slot.name)[1]) * 1048576
slot_task = progress.add_task(f'Slot {slot.ext_index}', total=expected_size)
def update(n_bytes):
progress.update(slot_task, completed=n_bytes)
slot_name = f'slot{slot.ext_index}'
try:
await self.client.download_save_slot(slot_name, path.join(directory, f'slot{slot.ext_index}.zip'), update)
progress.update(slot_task, completed=expected_size)
except Exception as ex:
progress.print(ex)
progress.update(download_task, advance=1)
async def start_server(self):
if (region := await self.choose_region(show_titlebar=True)) is None:
return
if (version := await self.choose_factorio_version()) is None:
return
if (slot := await self.choose_slot()) is None:
return
await aprint('Starting instance...')
self.client.add_logs_listener(aprint)
await run_on_thread(FZClient.start_instance, self.client, region, version, f'slot{slot}')
while not self.client.running and not self.client.server_address:
await asyncio.sleep(1)
self.client.remove_logs_listener(aprint)
self.storage.persist()
async def attach_to_server(self):
await self.shell.show()
async def stop_server(self):
await aprint('Stopping instance...')
self.client.add_logs_listener(aprint)
await run_on_thread(FZClient.stop_instance, self.client)
while self.client.running:
await asyncio.sleep(1)
self.client.remove_logs_listener(aprint)
async def get_remote_slots(self):
slots: list[str] = self.client.saves.values().mapping.values()
return [MenuEntry(v, ext_index=i + 1) for i, v in enumerate(slots) if not v.endswith('(empty)')]
# AWS Region
async def choose_region(self, show_titlebar=False):
regions = sorted(self.client.regions.items())
region = self.storage.get('region')
region = await SelectMenu(
message='Choose a region:',
entries=[MenuEntry(f'{r[0]} - {r[1]}', ext_index=r[0]) for r in regions],
default=region,
titlebar=self.titlebar if show_titlebar else None
).show()
if region is None:
return
self.storage.store('region', region.ext_index)
return region.ext_index
# Factorio Version
# noinspection PyProtectedMember
async def choose_token(self):
menu = InputMenu(
message='Insert userToken:',
titlebar=create_titlebar(),
clear_screen=True,
load_last_value=True,
history=self.storage.token_history,
)
return await menu.show()
async def choose_factorio_version(self):
version = self.storage.get('version')
version = await SelectMenu(
message='Choose a Factorio version:',
entries=[MenuEntry(v, ext_index=v) for v in self.client.versions],
default=version
).show()
if version is None:
return
self.storage.store('version', version.ext_index)
return version.ext_index
async def choose_slot(self):
slot = self.storage.get('slot')
slots: list[str] = self.client.saves.values().mapping.values()
slot = await SelectMenu(
message='Select slots to download:',
entries=[MenuEntry(v, ext_index=i + 1) for i, v in enumerate(slots)],
default=slot
).show()
if slot is None:
return None
self.storage.store('slot', slot.ext_index)
return slot.ext_index
def create_header(self):
if self.client and self.client.server_address:
run = f'Server running at: {self.client.server_address}'
status = f'status: {self.client.server_status}'
else:
run = ''
status = ''
return Term.colorize(Colors.FACTORIO_FG, Colors.FACTORIO_BG,
'Factorio Zone Manager', ' ', run, status,
end=Term.ENDL + Term.RESET)
def main():
Term.cls()
program = Main()
asyncio.get_event_loop_policy().get_event_loop().run_until_complete(program.main()) # pragma: no cover
Term.cls()
|
11485583
|
import os
from ament_index_python.packages import get_package_share_directory
from launch import LaunchDescription
from launch.actions import IncludeLaunchDescription, DeclareLaunchArgument
from launch.launch_description_sources import PythonLaunchDescriptionSource
from launch.substitutions import LaunchConfiguration, PythonExpression
from launch_ros.actions import Node
from launch.conditions import IfCondition
def generate_launch_description():
pkg_path = get_package_share_directory("rover_gazebo")
pkg_gazebo_ros = get_package_share_directory("gazebo_ros")
pkg_rover_localization = get_package_share_directory("rover_localization")
### ARGS ###
world = LaunchConfiguration("world")
world_cmd = DeclareLaunchArgument(
"world",
default_value=os.path.join(pkg_path, "worlds", "empty.world"),
description="Gazebo world")
launch_gui = LaunchConfiguration("launch_gui")
launch_gui_cmd = DeclareLaunchArgument(
"launch_gui",
default_value="True",
description="Whether launch gzclient")
pause_gz = LaunchConfiguration("pause_gz")
pause_gz_cmd = DeclareLaunchArgument(
"pause_gz",
default_value="False",
description="Whether to pause gazebo")
launch_rviz = LaunchConfiguration("launch_rviz")
launch_rviz_cmd = DeclareLaunchArgument(
"launch_rviz",
default_value="True",
description="Whether launch rviz2")
initial_pose_x = LaunchConfiguration("initial_pose_x")
initial_pose_x_cmd = DeclareLaunchArgument(
"initial_pose_x",
default_value="0.0",
description="Initial pose x")
initial_pose_y = LaunchConfiguration("initial_pose_y")
initial_pose_y_cmd = DeclareLaunchArgument(
"initial_pose_y",
default_value="0.0",
description="Initial pose y")
initial_pose_z = LaunchConfiguration("initial_pose_z")
initial_pose_z_cmd = DeclareLaunchArgument(
"initial_pose_z",
default_value="0.22",
description="Initial pose z")
initial_pose_yaw = LaunchConfiguration("initial_pose_yaw")
initial_pose_yaw_cmd = DeclareLaunchArgument(
"initial_pose_yaw",
default_value="0.0",
description="Initial pose yaw")
### NODES ###
rviz_cmd = Node(
name="rviz",
package="rviz2",
executable="rviz2",
# output="log",
parameters=[{"use_sim_time": True}],
condition=IfCondition(PythonExpression([launch_rviz])),
)
### LAUNCHS ###
gazebo_client_cmd = IncludeLaunchDescription(
PythonLaunchDescriptionSource(
os.path.join(pkg_gazebo_ros, "launch", "gzclient.launch.py")
),
condition=IfCondition(PythonExpression([launch_gui]))
)
gazebo_server_cmd = IncludeLaunchDescription(
PythonLaunchDescriptionSource(
os.path.join(pkg_gazebo_ros, "launch", "gzserver.launch.py")
),
launch_arguments={"world": world, "pause": pause_gz, }.items()
)
localization_cmd = IncludeLaunchDescription(
PythonLaunchDescriptionSource(
os.path.join(pkg_rover_localization, "launch",
"localization.launch.py")
),
)
localization_cmd = IncludeLaunchDescription(
PythonLaunchDescriptionSource(
os.path.join(pkg_rover_localization, "launch",
"localization.launch.py")
),
)
cmd_vel_cmd = IncludeLaunchDescription(
PythonLaunchDescriptionSource(
os.path.join(pkg_path, "launch/include",
"cmd_vel.launch.py")
),
)
spawn_cmd = IncludeLaunchDescription(
PythonLaunchDescriptionSource(
os.path.join(pkg_path, "launch/include", "spawn.launch.py")
),
launch_arguments={
"initial_pose_x": initial_pose_x,
"initial_pose_y": initial_pose_y,
"initial_pose_z": initial_pose_z,
"initial_pose_yaw": initial_pose_yaw
}.items()
)
ld = LaunchDescription()
ld.add_action(launch_gui_cmd)
ld.add_action(pause_gz_cmd)
ld.add_action(launch_rviz_cmd)
ld.add_action(world_cmd)
ld.add_action(initial_pose_x_cmd)
ld.add_action(initial_pose_y_cmd)
ld.add_action(initial_pose_z_cmd)
ld.add_action(initial_pose_yaw_cmd)
ld.add_action(gazebo_client_cmd)
ld.add_action(gazebo_server_cmd)
ld.add_action(localization_cmd)
ld.add_action(cmd_vel_cmd)
ld.add_action(spawn_cmd)
ld.add_action(rviz_cmd)
return ld
|
11485615
|
from ceph_medic import metadata
from ceph_medic.checks import mons
class TestGetSecret(object):
def setup(self):
self.data = {
'paths': {
'/var/lib/ceph': {
'files': {
'/var/lib/ceph/mon/ceph-mon-0/keyring': {
'contents': '',
}
}
}
}
}
def set_contents(self, string, file_path=None):
file_path = file_path or '/var/lib/ceph/mon/ceph-mon-0/keyring'
self.data['paths']['/var/lib/ceph']['files'][file_path]['contents'] = string
def test_get_secret(self):
contents = """
[mon.]
key = <KEY>
caps mon = "allow *"
"""
self.set_contents(contents)
result = mons.get_secret(self.data)
assert result == 'AQBvaBFZAAAAABAA9VHgwCg3rWn8fMaX8KL01A=='
def test_get_no_secret_empty_file(self):
result = mons.get_secret(self.data)
assert result == ''
def test_get_no_secret_wrong_file(self):
contents = """
[mon.]
caps mon = "allow *"
"""
self.set_contents(contents)
result = mons.get_secret(self.data)
assert result == ''
class TestGetMonitorDirs(object):
def test_get_monitor_dirs(self):
result = mons.get_monitor_dirs([
'/var/lib/ceph/mon/ceph-mon-1',
'/var/lib/ceph/something'])
assert result == set(['ceph-mon-1'])
def test_cannot_get_monitor_dirs(self):
result = mons.get_monitor_dirs([
'/var/lib/ceph/osd/ceph-osd-1',
'/var/lib/ceph/something'])
assert result == set([])
def test_get_monitor_dirs_multiple(self):
result = mons.get_monitor_dirs([
'/var/lib/ceph/mon/ceph-mon-1',
'/var/lib/ceph/mon/ceph-mon-3',
'/var/lib/ceph/mon/ceph-mon-2',
'/var/lib/ceph/something'])
assert result == set(['ceph-mon-1', 'ceph-mon-2', 'ceph-mon-3'])
def test_get_monitor_dirs_nested_multiple(self):
result = mons.get_monitor_dirs([
'/var/lib/ceph/mon/ceph-mon-1',
'/var/lib/ceph/mon/ceph-mon-1/nested/dir/',
'/var/lib/ceph/mon/ceph-mon-1/other/nested',
'/var/lib/ceph/mon/ceph-mon-2',
'/var/lib/ceph/something'])
assert result == set(['ceph-mon-1', 'ceph-mon-2'])
class TestOsdDirs(object):
def test_get_osd_dirs_nested_multiple(self):
result = mons.get_osd_dirs([
'/var/lib/ceph/osd/ceph-1',
'/var/lib/ceph/osd/ceph-1/nested/dir/',
'/var/lib/ceph/osd/ceph-1/other/nested',
'/var/lib/ceph/osd/ceph-2',
'/var/lib/ceph/something'])
assert result == set(['ceph-1', 'ceph-2'])
class TestMonRecommendedCount(object):
def test_recommended_count_is_met(self, data):
metadata['mons'] = dict(('mon%s' % count, []) for count in range(6))
metadata['cluster_name'] = 'ceph'
osd_data = data()
result = mons.check_mon_recommended_count(None, osd_data)
assert result is None
def test_recommended_count_is_unmet(self, data):
metadata['mons'] = dict(('mon%s' % count, []) for count in range(1))
metadata['cluster_name'] = 'ceph'
osd_data = data()
code, message = mons.check_mon_recommended_count(None, osd_data)
assert code == 'WMON3'
assert message == 'Recommended number of MONs (3) not met: 1'
class TestMonCountIsOdd(object):
def test_count_is_odd(self, data):
metadata['mons'] = dict(('mon%s' % count, []) for count in range(3))
metadata['cluster_name'] = 'ceph'
osd_data = data()
result = mons.check_mon_count_is_odd(None, osd_data)
assert result is None
def test_recommended_count_is_unmet(self, data):
metadata['mons'] = dict(('mon%s' % count, []) for count in range(2))
metadata['cluster_name'] = 'ceph'
osd_data = data()
code, message = mons.check_mon_count_is_odd(None, osd_data)
assert code == 'WMON4'
assert message == 'Number of MONs is not an odd number: 2'
class TestSingleMon(object):
def test_is_single(self, data):
metadata['mons'] = {'mon.0': []}
metadata['cluster_name'] = 'ceph'
code, message = mons.check_for_single_mon(None, data())
assert code == 'WMON5'
assert message == 'A single monitor was detected: mon.0'
def test_is_not_single(self, data):
metadata['mons'] = dict(('mon%s' % count, []) for count in range(2))
metadata['cluster_name'] = 'ceph'
result = mons.check_for_single_mon(None, data())
assert result is None
|
11485622
|
import os
import matplotlib as mpl
import torch
import torchvision
from data_management import Permute, load_ct_data
from networks import RadonNet
# ----- load configuration -----
import config # isort:skip
# ----- global configuration -----
mpl.use("agg")
device = torch.device("cuda:0")
torch.cuda.set_device(0)
# ----- network configuration -----
radon_params = {
"n": [512, 512],
"n_detect": 1024,
"angles": torch.linspace(0, 360, 129, requires_grad=False)[:-1],
"d_source": torch.tensor(1000.00, requires_grad=False),
"s_detect": torch.tensor(-1.0, requires_grad=False),
"scale": torch.tensor(0.01, requires_grad=False),
"flat": True,
"mode": "fwd",
}
radon_net = RadonNet
# specify block coordinate descent order
def _specify_param(radon_net, train_phase):
if train_phase % 3 == 0:
radon_net.OpR.angles.requires_grad = False
radon_net.OpR.d_source.requires_grad = False
radon_net.OpR.scale.requires_grad = True
elif train_phase % 3 == 1:
radon_net.OpR.angles.requires_grad = False
radon_net.OpR.d_source.requires_grad = True
radon_net.OpR.scale.requires_grad = False
elif train_phase % 3 == 2:
radon_net.OpR.angles.requires_grad = True
radon_net.OpR.d_source.requires_grad = False
radon_net.OpR.scale.requires_grad = False
# ----- training configuration -----
mseloss = torch.nn.MSELoss(reduction="sum")
def loss_func(pred, tar):
return mseloss(pred, tar) / pred.shape[0]
train_phases = 3 * 10
train_params = {
"num_epochs": int(train_phases / 3) * [3, 2, 1],
"batch_size": train_phases * [10],
"loss_func": loss_func,
"save_path": [
os.path.join(
config.RESULTS_PATH,
"operator_radon_{}_"
"train_phase_{}".format(
radon_params["mode"], (i + 1) % (train_phases + 1),
),
)
for i in range(train_phases + 1)
],
"save_epochs": 1,
"optimizer": torch.optim.Adam,
"optimizer_params": int(train_phases / 3)
* [
{"lr": 1e-4, "eps": 1e-5},
{"lr": 1e-0, "eps": 1e-5},
{"lr": 1e-1, "eps": 1e-5},
],
"scheduler": torch.optim.lr_scheduler.StepLR,
"scheduler_params": {"step_size": 50, "gamma": 0.75},
"acc_steps": train_phases * [1],
"train_transform": torchvision.transforms.Compose([Permute([2, 1])]),
"val_transform": torchvision.transforms.Compose([Permute([2, 1])]),
"train_loader_params": {"shuffle": True, "num_workers": 0},
"val_loader_params": {"shuffle": False, "num_workers": 0},
}
# ----- data configuration -----
# always use same folds, num_fold for noth train and val
# always use leave_out=True on train and leave_out=False on val data
train_data_params = {
"folds": 400,
"num_fold": 0,
"leave_out": True,
}
val_data_params = {
"folds": 400,
"num_fold": 0,
"leave_out": False,
}
train_data = load_ct_data("train", **train_data_params)
val_data = load_ct_data("train", **val_data_params)
# ------ save hyperparameters -------
os.makedirs(train_params["save_path"][-1], exist_ok=True)
with open(
os.path.join(train_params["save_path"][-1], "hyperparameters.txt"), "w"
) as file:
for key, value in radon_params.items():
file.write(key + ": " + str(value) + "\n")
for key, value in train_params.items():
file.write(key + ": " + str(value) + "\n")
for key, value in train_data_params.items():
file.write(key + ": " + str(value) + "\n")
for key, value in val_data_params.items():
file.write(key + ": " + str(value) + "\n")
file.write("train_phases" + ": " + str(train_phases) + "\n")
# ------ construct network and train -----
radon_net = radon_net(**radon_params).to(device)
print(list(radon_net.parameters()))
for i in range(train_phases):
train_params_cur = {}
for key, value in train_params.items():
train_params_cur[key] = (
value[i] if isinstance(value, (tuple, list)) else value
)
print("Phase {}:".format(i + 1))
for key, value in train_params_cur.items():
print(key + ": " + str(value))
_specify_param(radon_net, i)
radon_net.train_on(train_data, val_data, **train_params_cur)
# ------ bias correction -----
# save biased operator
os.makedirs(train_params["save_path"][-1], exist_ok=True)
torch.save(
radon_net.state_dict(),
os.path.join(train_params["save_path"][-1], "model_weights_biased.pt"),
)
# compute and assign mean sinogram error
sino_diff_mean = torch.zeros(128, 1024, device=device)
data_load_train = torch.utils.data.DataLoader(train_data, 10, shuffle=False)
with torch.no_grad():
for i, v_batch in enumerate(reversed(list(data_load_train))):
sino_diff_mean += (
v_batch[1].to(device).mean(dim=0).squeeze()
- radon_net.OpR(v_batch[0].to(device)).mean(dim=0).squeeze()
)
sino_diff_mean = sino_diff_mean / (i + 1)
radon_net.OpR.fwd_offset.data = sino_diff_mean
# save final operator
os.makedirs(train_params["save_path"][-1], exist_ok=True)
torch.save(
radon_net.state_dict(),
os.path.join(train_params["save_path"][-1], "model_weights_final.pt"),
)
|
11485633
|
import tensorflow as tf
import numpy as np
import math
class Detector:
def __init__(self,sess,state_dim,obj_num,fea_size,state_input,postfix):
self.sess = sess;
self.state_dim = state_dim;
self.fea_size=fea_size;
self.obj_num=obj_num;
self.postfix=postfix;
self.state_input=state_input;
# create detector
self.Theta,self.net = self.create_network();
self.params_num=len(self.net);
def create_network(self):
state_input2 = tf.reshape(self.state_input,[-1,self.fea_size]);
# 6 Detectors
output=np.zeros(6,dtype=object);
params_list=[];
for i in range(6):
with tf.variable_scope('detector_'+str(i+1)+"_"+self.postfix):
w=tf.get_variable('w',shape=[self.fea_size,1]);
b=tf.get_variable('b',shape=[1]);
output[i]=tf.sigmoid(tf.matmul(state_input2,w)+b);
params_list+=tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES, scope='detector_'+str(i+1)+"_"+self.postfix);
output=tf.concat(list(output),1);
"""
state_input3 = tf.reshape(self.state_input,[-1,self.obj_num,self.fea_size]);
state_input3 = tf.unstack(state_input3,self.fea_size,2);
output = tf.stack(state_input3[5:14],2);
"""
output=tf.reshape(output,[-1,self.obj_num*6]);
return output,params_list;
def run_target_nets(self,state_input,params_list):
state_input2 = tf.reshape(state_input,[-1,self.fea_size]);
output=np.zeros(6,dtype=object);
idx=0;
for i in range(6):
output[i]=tf.sigmoid(tf.matmul(state_input2,params_list[idx])+params_list[idx+1]);
idx+=2;
output=tf.concat(list(output),1);
"""
state_input3 = tf.reshape(state_input,[-1,self.obj_num,self.fea_size]);
state_input3 = tf.unstack(state_input3,self.fea_size,2);
output = tf.stack(state_input3[5:14],2);
"""
output=tf.reshape(output,[-1,self.obj_num*6]);
return output;
|
11485667
|
import torch
import sys
MOCO_WEIGHTS_PATH = sys.argv[1]
CONVERTED_WEIGHTS_OUT_PATH = sys.argv[2]
moco_weights = torch.load(MOCO_WEIGHTS_PATH, map_location='cpu')
moco_weights = {
k.replace("module.encoder_q.", "model."): v
for k, v in moco_weights['state_dict'].items()
if "module.encoder_q." in k
}
torch.save(moco_weights, CONVERTED_WEIGHTS_OUT_PATH)
|
11485686
|
import jsonobject
import requests
from requests import HTTPError, RequestException
from dimagi.utils.logging import notify_exception
from corehq.apps.formplayer_api import const
from corehq.apps.formplayer_api.exceptions import (
FormplayerAPIException,
FormplayerRequestException,
)
from corehq.apps.formplayer_api.utils import get_formplayer_url
from corehq.util.hmac_request import get_hmac_digest
from django.conf import settings
class ValidationAPIProblem(jsonobject.JsonObject):
type = jsonobject.StringProperty(choices=[
"error", "markup", "invalid-structure", "dangerous", "technical"])
message = jsonobject.StringProperty()
fatal = jsonobject.BooleanProperty()
class ValidationAPIResult(jsonobject.JsonObject):
validated = jsonobject.BooleanProperty()
fatal_error = jsonobject.StringProperty()
fatal_error_expected = jsonobject.BooleanProperty()
problems = jsonobject.ListProperty(ValidationAPIProblem)
class FormValidationResult(object):
def __init__(self, problems, success, fatal_error):
self.problems = problems
self.success = success
self.fatal_error = fatal_error
def to_json(self):
return {
'problems': self.problems,
'success': self.success,
'fatal_error': self.fatal_error,
}
def validate_form(form_xml):
try:
response = requests.post(
get_formplayer_url() + const.ENDPOINT_VALIDATE_FORM,
data=form_xml,
headers={
'Content-Type': 'application/xml',
'X-MAC-DIGEST': get_hmac_digest(settings.FORMPLAYER_INTERNAL_AUTH_KEY, form_xml),
}
)
except RequestException as e:
notify_exception(None, "Error calling Formplayer form validation endpoint")
raise FormplayerAPIException(e)
try:
response.raise_for_status()
except HTTPError:
notify_exception(None, "Error calling Formplayer form validation endpoint", details={
'status_code': response.status_code
})
raise FormplayerRequestException(response.status_code)
api_result = ValidationAPIResult(response.json())
result = FormValidationResult(
problems=[problem.to_json() for problem in api_result.problems],
success=api_result.validated,
fatal_error=api_result.fatal_error,
)
return result
|
11485709
|
from ..model import ik,types,config
from ..math import vectorops
from ..robotsim import IKSolver,IKObjective
from ..io import loader
import time
import random
from ..math import optimize,symbolic,symbolic_klampt,so3,se3
import numpy as np
class KlamptVariable:
"""
Attributes:
name (str): the Klamp't item's name
type (str): the Klamp't item's type
encoding (str): the way in which the item is encoded in the optimization
variables (list of Variable): the list of Variables encoding this Klamp't item
expr (Expression): the Expression that will be used to replace the symbolic mainVariable via
appropriate variables
constraints, encoder, decoder: internally used
"""
def __init__(self,name,type):
self.name = name
self.type = type
self.encoding = None
self.variables = None
self.expr = None
self.constraints = []
self.encoder = None
self.decoder = None
def bind(self,obj):
"""Binds all Variables associated with this to the value of Klamp't object obj"""
if self.type in ['Config','Vector','Vector3','Point']:
self.variables[0].bind(obj)
elif self.type == 'Configs':
assert len(obj) == len(self.variables),"Invalid number of configs in Configs object"
for i,v in enumerate(obj):
self.variables[i].bind(v)
elif self.type == 'Rotation':
if self.encoder is None:
self.variables[0].bind(obj)
else:
self.variables[0].bind(self.encoder(obj))
elif self.type == 'RigidTransform':
if self.encoder is None:
self.variables[0].bind(obj[0])
self.variables[1].bind(obj[1])
else:
T = self.encoder(obj)
self.variables[0].bind(T[0])
self.variables[1].bind(T[1])
else:
raise ValueError("Unsupported object type "+self.type)
def getParams(self):
"""Returns the list of current parameters bound to the symbolic Variables."""
if len(self.variables) > 1:
return [v.value for v in self.variables]
else:
return self.variables[0].value
def getValue(self):
"""Returns the Klamp't value corresponding to the current bound parameters."""
return self.decode(self.getParams())
def unbind(self):
"""Unbinds all Variables associated with this."""
for v in self.variables:
v.unbind()
def encode(self,obj):
"""Returns the parameters giving the encoding of the Klamp't object obj"""
if self.encoder is None:
return obj
else:
return self.encoder(obj)
def decode(self,params):
"""Returns the Klamp't object given a parameters encoding it"""
if self.decoder is None:
return params
else:
return self.decoder(params)
class RobotOptimizationProblem(optimize.OptimizationProblemBuilder):
"""Defines a generalized optimization problem for a robot, which is a subclass of
OptimizationProblemBuilder. This may easily incorporate IK constraints, and may
have additional specifications of active DOF.
Attributes:
robot (RobotModel) the robot whose configuration is being optimized
world (WorldModel, optional): the world containing possible obstacles
context (KlamptContext, inherited): a symbolic.KlamptContext that stores the variable q
denoting the robot configuration, as well as any user data. User data "robot" and "world"
are available by default.
q (Variable): the primary optimization variable.
activeDofs (list): the list of active robot DOFs.
autoLoad (dict): a dictionary of (userDataName:fileName) pairs that are stored so that user data
is automatically loaded from files. I.e., upon self.loadJson(), for each pair in autoLoad
the command self.context.userData[userDataName] = loader.load(fileName) is executed.
managedVariables (dict of KlamptVariable): a dictionary of KlamptVariables like rotations and
rigid transforms.
Managed variables should be referred to in parsed expressions with the prefix @name,
and are encoded into optimization form and decoded from optimization form
using KlamptVariable.bind / KlamptVariable.unbind. You can also retrieve the Klampt value
by KlamptVariable.getValue().
If you would like to find the configuration *closest* to solving the
IK constraints, either add the IK constraints one by one with weight=1 (or some other
numeric value), or call enableSoftIK() after the constraints have been added. In this
case, solve will always return a solution, as long as it finds a configuration that
passes the feasibility tests. The optimization method changes so that it 1) optimizes
the residual norm, and then 2) optimizes the cost function to maintain the residual
norm at its current value. In other words, minimizing error is the first priority and
minimizing cost is the second priority.
"""
def __init__(self,robot=None,world=None,*ikgoals):
self.robot = robot
self.world = world
if self.world is not None and robot is None and self.world.numRobots() > 0:
robot = self.world.robot(0)
self.robot = robot
context = symbolic_klampt.KlamptContext()
context.addUserData("robot",self.robot)
if self.world:
context.addUserData("world",self.world)
optimize.OptimizationProblemBuilder.__init__(self,context)
self.activeDofs = None
self.autoLoad = dict()
nlinks = robot.numLinks() if robot is not None else None
self.q = self.context.addVar('q','V',nlinks)
self.managedVariables = dict()
self.optimizationVariables = [self.q]
self.setJointLimits()
for goal in ikgoals:
self.addIKObjective(goal)
def isIKObjective(self,index):
"""Returns True if the indexed constraint is an IKObjective"""
if self.objectives[index].type != "eq":
return False
return symbolic.is_op(self.objectives[index].expr,'ik.residual')
def getIKObjective(self,index):
"""Returns the IKObjective the indexed constraint is an IKObjective"""
res = self.objectives[index].expr.args[0]
assert isinstance(res,symbolic.ConstantExpression) and isinstance(res.value,IKObjective),"Not an IK objective: "+str(self.objectives[index].expr)
return res.value
def enableSoftIK(self,enabled=True):
"""Turns on soft IK solving. This is the same as hard IK solving if all
constraints can be reached, but if the constraints cannot be reached, it will
try to optimize the error.
"""
for i,o in enumerate(self.objective):
if self.isIKObjective(i):
o.soft = not o.soft
def addIKObjective(self,obj,weight=None):
"""Adds a new IKObjective to the problem. If weight is not None, it is
added as a soft constraint."""
assert isinstance(obj,IKObjective)
self.addEquality(self.context.ik.residual(obj,self.context.setConfig("robot",self.q)),weight)
if hasattr(obj,'robot'):
if self.robot is None:
self.robot = obj.robot
else:
assert self.robot.index == obj.robot.index,"All objectives must be on the same robot"
def addUserData(self,name,fn):
"""Adds an auto-loaded userData. Raises an exception if fn cannot be loaded.
Arguments:
- name: the name of the userData.
- fn: the file from which it is loaded. It must be loadable with loader.load.
"""
assert isinstance(fn,str)
obj = loader.load(fn)
self.context.addUserData(name,obj)
self.autoLoad[name] = fn
def addKlamptVar(self,name,type=None,initialValue=None,encoding='auto',constraints=True,optimize=True):
"""Adds one or more variables of a given Klamp't type (e.g., "Config", "Rotation", "RigidTransform").
If necessary, constraints on the object will also be added, e.g., joint limits, or a quaternion unit
norm constraint.
At least one of type / initialValue must be provided.
Args:
name (str): a name for the variable.
type (str, optional): a supported variable type (default None determines the type by initialValue).
Supported types include "Config", "Configs", Rotation", "RigidTransform", "Vector3". Future
work may support Trajectory and other types.
initialValue (optional): the configuration of the variable. If it's a float, the type will be set to
numeric, if it's a list it will be set to a vector, or if its a supported object, the type will
be set appropriately and config.getConfig(initialValue) will be used for its parameter setting.
encoding (str, optional): only supported for Rotation and RigidTransform types, and defines how the
variable will be parameterized in optimization. Can be:
- 'rotation_vector' (default) for rotation vector, 3 parameters
- 'quaternion' for quaternion encoding, 4 parameters + 1 constraint
- 'rpy' for roll-pitch-yaw euler angles, 3 parameters
- None for full rotation matrix (9 parameters, 6 constraints)
- 'auto' (equivalent to to 'rotation_vector')
constraints (bool, optional): True if all default constraints are to be added. For Config / Configs
types, bound constraints at the robot's joint limits are added.
optimize (bool, optional): If True, adds the parameterized variables to the list of optimization
variables.
Returns:
KlamptVariable: an object containing information about the encoding of the variable.
Note that extra symbolic Variable names may be decorated with extensions in the form of "_ext" if
the encoding is not direct.
"""
if type is None:
assert initialValue is not None,"Either type or initialValue must be provided"
type = types.objectToTypes(initialValue)
if type in ['Vector3','Point']:
if initialValue is None:
initialValue = [0.0]*3
else:
assert len(initialValue)==3
type = 'Vector'
def default(name,value):
v = self.context.addVar(name,"V",len(value))
v.value = value[:]
return v
if name in self.managedVariables:
raise ValueError("Klamp't variable name "+name+" already defined")
kv = KlamptVariable(name,type)
if type == 'Config':
if initialValue is None:
initialValue = self.robot.getConfig()
else:
assert len(initialValue) == self.robot.numLinks()
v = default(name,initialValue)
if constraints:
self.setBounds(v.name,*self.robot.getJointLimits())
kv.constraints = [self.robot.getJointLimits()]
elif type == 'Vector':
assert initialValue is not None,"Need to provide initialValue for "+type+" type variables"
v = default(name,initialValue)
kv.expr = VariableExpression(v)
elif type == 'Configs':
assert initialValue is not None,"Need to provide initialValue for "+type+" type variables"
vals = []
for i,v in enumerate(initialValue):
vals.append(default(name+"_"+str(i),v))
if constraints:
self.setBounds(vals[-1].name,*self.robot.getJointLimits())
kv.constraints.append(self.robot.getJointLimits())
kv.variables = vals
kv.expr = symbolic.list_(*vals)
elif type == 'Rotation':
if encoding == 'auto': encoding='rotation_vector'
if encoding == 'rotation_vector':
if initialValue is not None:
initialValue2 = so3.rotation_vector(initialValue)
else:
initialValue = so3.identity()
initialValue2 = [0.0]*3
v = default(name+"_rv",initialValue2)
kv.expr = self.context.so3.from_rotation_vector(v)
kv.decoder = so3.from_rotation_vector
kv.encoder = so3.rotation_vector
elif encoding == 'quaternion':
if initialValue is not None:
initialValue2 = so3.quaternion(initialValue)
else:
initialValue = so3.identity()
initialValue2 = [1,0,0,0]
v = default(name+"_q",initialValue2)
kv.expr = self.context.so3.from_quaternion(v)
kv.decoder = so3.from_quaternion
kv.encoder = so3.quaternion
if constraints:
f = self.addEquality(self.context.so3.quaternion_constraint(v))
f.name = name+"_q_constraint"
kv.constraints = [f]
elif encoding == 'rpy':
if initialValue is not None:
initialValue2 = so3.rpy(initialValue)
else:
initialValue = so3.identity()
initialValue2 = [0.0]*3
v = default(name+"_rpy",initialValue2)
kv.expr = self.context.so3.from_rpy(v)
kv.decoder = so3.from_rpy
kv.encoder = so3.rpy
elif encoding is None:
if initialValue is None:
initialValue = so3.identity()
v = self.addVar(name,"Vector",initialValue)
if constraints:
f = self.addEquality(self.context.so3.eq_constraint(v))
f.name = name+"_constraint"
kv.constraints = [f]
else:
raise ValueError("Invalid encoding "+str(encoding))
kv.encoding = encoding
elif type == 'RigidTransform':
if initialValue is None:
Ri,ti = None,[0.0]*3
else:
Ri,ti = initialValue
kR = self.addKlamptVar(name+'_R','Rotation',Ri,constraints=constraints,encoding=encoding)
t = default(name+'_t',ti)
kv.variables = kR.variables+[t]
kv.constraints = kR.constraints
kv.expr = symbolic.list_(kR.expr,t)
kv.encoding = encoding
if kR.encoder is not None:
kv.encoder = lambda T:(kR.encoder(T[0]),T[1])
kv.decoder = lambda T:(kR.decoder(T[0]),T[1])
del self.managedVariables[kR.name]
else:
raise ValueError("Unsupported object type "+type)
if kv.variables is None:
kv.variables = [v]
if kv.expr is None:
kv.expr = symbolic.VariableExpression(v)
self.context.addExpr(name,kv.expr)
if optimize:
for v in kv.variables:
self.optimizationVariables.append(v)
self.managedVariables[name] = kv
return kv
def get(self,name,defaultValue=None):
"""Returns a Variable or UserData in the context, or a managed KlamptVariable. If the item
does not exist, defaultValue is returned.
"""
if name in self.managedVariables:
return self.managedVariables[name]
else:
return self.context.get(name,defaultValue)
def rename(self,itemname,newname):
"""Renames a Variable, UserData, or managed KlamptVariable."""
if itemname in self.managedVariables:
item = self.managedVariables[itemname]
del self.managedVariables[itemname]
item.name = newname
print "Renaming KlamptVariable",itemname
self.context.expressions[newname] = self.context.expressions[itemname]
del self.context.expressions[itemname]
for var in item.variables:
varnewname = newname + var.name[len(itemname):]
print " Renaming internal variable",var.name,"to",varnewname
if var.name in self.variableBounds:
self.variableBounds[varnewname] = self.variableBounds[var.name]
del self.variableBounds[var.name]
self.context.renameVar(var,varnewname)
self.managedVariables[newname] = item
elif itemname in self.context.userData:
self.context.renameUserData(itemname,newname)
else:
var = self.context.variableDict[itemname]
if var.name in self.variableBounds:
self.variableBounds[newname] = self.variableBounds[var.name]
del self.variableBounds[var.name]
self.context.renameVar(var,newname)
def setActiveDofs(self,links):
"""Sets the list of active DOFs. These may be indices, RobotModelLinks, or strings."""
self.activeDofs = []
for v in links:
if isinstance(v,str):
self.activeDofs.append(self.robot.link(v).index)
elif isinstance(v,RobotModelLink):
self.activeDofs.append(v.index)
else:
assert isinstance(v,int)
self.activeDofs.append(v)
def enableDof(self,link):
"""Enables an active DOF. If this is the first time enableDof is called,
this initializes the list of active DOFs to the single link. Otherwise
it appends it to the list. (By default, all DOFs are enabled)"""
if isinstance(link,str):
link = self.robot.link(link).index
elif isinstance(link,RobotModelLink):
self.activeDofs.append(link.index)
else:
assert isinstance(link,int)
if self.activeDofs is None:
self.activeDofs = [link]
else:
if link not in self.activeDofs:
self.activeDofs.append(link)
def disableJointLimits(self):
"""Disables joint limits. By default, the robot's joint limits are
used."""
self.setBounds("q",None,None)
def setJointLimits(self,qmin=None,qmax=None):
"""Sets the joint limits to the given lists qmin,qmax. By default,
the robot's joint limits are used."""
if qmin is None:
self.setBounds("q",*self.robot.getJointLimits())
return
#error checking
assert(len(qmin)==len(qmax))
if len(qmin)==0:
#disabled bounds
self.setBounds("q",None,None)
else:
if self.activeDofs is not None:
assert(len(qmin)==len(self.activeDofs))
raise NotImplementedError("What to do when you set joint limits on a subset of DOFS?")
else:
if self.robot is not None:
assert(len(qmin) == self.robot.numLinks())
self.setBounds("q",qmin,qmax)
def inJointLimits(self,q):
"""Returns True if config q is in the currently set joint limits."""
qmin,qmax = self.variableBounds.get('q',self.robot.getJointLimits())
if len(qmin) == 0:
return True
if len(qmin) > 0:
for v,a,b in zip(q,qmin,qmax):
if v < a or v > b:
return False
return True
def toJson(self,saveContextFunctions=False,prettyPrintExprs=False):
res = optimize.OptimizationProblemBuilder.toJson(self,saveContextFunctions,prettyPrintExprs)
if self.activeDofs is not None:
res['activeDofs'] = self.activeDofs
if len(self.managedVariables) > 0:
varobjs = []
for (k,v) in self.managedVariables.iteritems():
varobj = dict()
assert k == v.name
varobj['name'] = v.name
varobj['type'] = v.type
varobj['encoding'] = v.encoding
varobjs.append(varobj)
res['managedVariables'] = varobjs
if len(self.autoLoad) > 0:
res['autoLoad'] = self.autoLoad
return res
def fromJson(self,obj,doAutoLoad=True):
"""Loads from a JSON-compatible object.
Args:
obj: the JSON-compatible object
doAutoLoad (bool, optional): if True, performs the auto-loading step. An IOError is raised if any
item can't be loaded.
"""
optimize.OptimizationProblemBuilder.fromJson(self,obj)
if 'activeDofs' in obj:
self.activeDofs = obj['activeDofs']
else:
self.activeDofs = None
assert 'q'in self.context.variableDict,'Strange, the loaded JSON file does not have a configuration q variable?'
self.q = self.context.variableDict['q']
if 'managedVariables' in obj:
self.managedVariables = dict()
for v in obj['managedVariables']:
name = v['name']
type = v['type']
encoding = v['encoding']
raise NotImplementedError("TODO: load managed variables from disk properly")
self.managedVariables[name] = self.addKlamptVar(name,type,encoding)
if doAutoLoad:
self.autoLoad = obj.get('autoLoad',dict())
for (name,fn) in self.autoLoad.iteritems():
try:
obj = loader.load(fn)
except Exception:
raise IOError("Auto-load item "+name+": "+fn+" could not be loaded")
self.context.addUserData(name,obj)
def solve(self,params=optimize.OptimizerParams()):
"""Locally or globally solves the given problem (using the robot's current configuration
as a seed if params.startRandom=False). Returns the solution configuration or
None if failed.
Args:
params (OptimizerParams, optional): configures the optimizer.
"""
if len(self.objectives) == 0:
print "Warning, calling solve without setting any constraints?"
return self.robot.getConfig()
robot = self.robot
solver = IKSolver(robot)
for i,obj in enumerate(self.objectives):
if self.isIKObjective(i):
ikobj = self.getIKObjective(i)
ikobj.robot = self.robot
solver.add(ikobj)
if self.activeDofs is not None:
solver.setActiveDofs(self.activeDofs)
ikActiveDofs = self.activeDofs
if 'q' in self.variableBounds:
solver.setJointLimits(*self.variableBounds['q'])
qmin,qmax = solver.getJointLimits()
if len(qmin)==0:
qmin,qmax = self.robot.getJointLimits()
backupJointLimits = None
if self.activeDofs is None:
#need to distinguish between dofs that affect feasibility vs IK
ikActiveDofs = solver.getActiveDofs()
if any(obj.type != 'ik' for obj in self.objectives):
activeDofs = [i for i in range(len(qmin)) if qmin[i] != qmax[i]]
activeNonIKDofs = [i for i in activeDofs if i not in ikActiveDofs]
ikToActive = [activeDofs.index(i) for i in ikActiveDofs]
else:
activeDofs = ikActiveDofs
nonIKDofs = []
ikToActive = range(len(activeDofs))
else:
activeDofs = ikActiveDofs
activeNonIKDofs = []
ikToActive = range(len(ikActiveDofs))
anyIKProblems = False
anyCosts = False
softIK = False
for obj in self.objectives:
if obj.type == 'ik':
anyIKProblems = True
if obj.soft:
softIK = True
elif obj.type == 'cost' or obj.soft:
anyCosts = True
#sample random start point
if params.startRandom:
self.randomVarBinding()
solver.sampleInitial()
if len(activeNonIKDofs)>0:
q = robot.getConfig()
for i in activeNonIKDofs:
q[i] = random.uniform(qmin[i],qmax[i])
robot.setConfig(q)
if params.localMethod is not None or params.globalMethod is not None or (anyCosts or not anyIKProblems):
#set up optProblem, an instance of optimize.Problem
assert self.optimizationVariables[0] is self.q
if len(activeDofs) < self.robot.numLinks():
#freeze those inactive DOFs
q = self.robot.getConfig()
backupJointLimits = qmin[:],qmax[:]
inactiveDofs = set(range(len(q))) - set(activeDofs)
for i in inactiveDofs:
qmin[i] = q[i]
qmax[i] = q[i]
self.setBounds("q",qmin,qmax)
reducedProblem,reducedToFullMapping,fullToReducedMapping = self.preprocess()
optq = reducedProblem.context.variableDict['q']
print "Preprocessed problem:"
reducedProblem.pprint()
optProblem = reducedProblem.getProblem()
assert backupJointLimits is not None
self.setBounds("q",*backupJointLimits)
else:
optq = self.q
optProblem = self.getProblem()
reducedToFullMapping = fullToReducedMapping = None
#optProblem is now ready to use
if params.globalMethod is not None:
#set seed = robot configuration
if self.q.value is None:
self.q.bind(robot.getConfig())
if reducedToFullMapping is None:
x0 = self.getVarVector()
else:
for var,vexpr in zip(reducedProblem.optimizationVariables,fullToReducedMapping):
var.bind(vexpr.eval(self.context))
x0 = reducedProblem.getVarVector()
#do global optimization of the cost function and return
(succ,res) = params.solve(optProblem,x0)
if not succ:
print "Global optimize returned failure"
return None
if reducedToFullMapping is not None:
reducedProblem.setVarVector(res)
for var,vexpr in zip(self.optimizationVariables,reducedToFullMapping):
var.bind(vexpr.eval(reducedProblem.context))
else:
self.setVarVector(res)
#check feasibility if desired
if not self.inJointLimits(self.q.value):
print "Result from global optimize is out of joint limits"
return None
if not self.feasibilityTestsPass():
print "Result from global optimize isn't feasible"
return None
if not self.satisfiesEqualities(params.tol):
print "Result from global optimize doesn't satisfy tolerance."
return None
#passed
print "Global optimize succeeded! Cost",self.cost()
q = self.q.value
return q
if anyIKProblems:
print "Performing random-restart newton raphson"
#random-restart newton-raphson
solver.setMaxIters(params.numIters)
solver.setTolerance(params.tol)
best = None
bestQuality = float('inf')
if softIK:
#quality is a tuple
bestQuality = bestQuality,bestQuality
quality = None
for restart in xrange(params.numRestarts):
if time.time() - t0 > params.timeout:
return best
t0 = time.time()
res = solver.solve()
if res or self.softObjectives:
q = robot.getConfig()
print "Got a solve, checking feasibility..."
#check feasibility if desired
t0 = time.time()
self.q.bind(q)
if not self.feasibilityTestsPass():
print "Failed feasibility"
#TODO: resample other non-robot optimization variables
if len(nonIKDofs) > 0:
u = float(restart+0.5)/params.numRestarts
q = robot.getConfig()
#perturbation sampling for non-IK dofs
for i in nonIKDofs:
delta = u*(qmax[i]-qmin[i])*0.5
q[i] = random.uniform(max(q[i]-delta,qmin[i]),min(q[i]+delta,qmax[i]))
robot.setConfig(q)
self.q.bind(q)
if not self.feasibilityTestsPass():
solver.sampleInitial()
continue
else:
solver.sampleInitial()
continue
print "Found a feasible config"
if softIK:
residual = solver.getResidual()
ikerr = max(abs(v) for v in residual)
if ikerr < params.tol:
ikerr = 0
else:
#minimize squared error
ikerr = vectorops.normSquared(residual)
if not anyCosts:
cost = 0
if ikerr == 0:
#feasible and no cost
return q
else:
cost = self.cost()
quality = ikerr,cost
else:
if not anyCosts:
#feasible, no costs, so we're done
print "Feasible and no costs, we're done"
return q
else:
#optimize
quality = self.cost(q)
print "Quality of solution",quality
if quality < bestQuality:
best = self.getVarValues()
bestQuality = quality
#sample a new ik seed
solver.sampleInitial()
if best is None or params.localMethod is None:
return best[0]
print "Performing post-optimization"
#post-optimize using local optimizer
self.setVarValues(best)
if softIK:
if not self.satisfiesEqualities(params.tol):
raise NotImplementedError("TODO: add soft IK inequality constraint |ik residual| <= |current ik residual|")
optSolver = optimize.LocalOptimizer(method=params.localMethod)
if reducedToFullMapping is not None:
for var,vexpr in zip(reducedProblem.optimizationVariables,fullToReducedMapping):
var.bind(vexpr.eval(self.context))
x0 = reducedProblem.getVarVector()
else:
x0 = self.getVarVector()
optSolver.setSeed(x0)
res = optSolver.solve(optProblem,params.numIters,params.tol)
if res[0]:
if reducedToFullMapping is not None:
reducedProblem.setVarVector(res[1])
for var,vexpr in zip(self.optimizationVariables,reducedToFullMapping):
var.bind(vexpr.eval(reducedProblem.context))
else:
self.setVarVector(res[1])
#check feasibility if desired
if not self.feasibilityTestsPass():
pass
elif not anyCosts:
#feasible
best = self.getVarValues()
else:
#optimize
quality = self.cost()
if quality < bestQuality:
#print "Optimization improvement",bestQuality,"->",quality
best = self.getVarValues()
bestQuality = quality
elif quality > bestQuality + 1e-2:
print "Got worse solution by local optimizing?",bestQuality,"->",quality
self.getVarValues(best)
print "Resulting quality",bestQuality
return best[0]
else:
#no IK problems, no global method set -- for now, just perform random restarts
#
#set seed = robot configuration
if self.q.value is None:
self.q.bind(robot.getConfig())
if reducedToFullMapping is None:
x0 = self.getVarVector()
else:
for var,vexpr in zip(reducedProblem.optimizationVariables,fullToReducedMapping):
var.bind(vexpr.eval(self.context))
x0 = reducedProblem.getVarVector()
#do global optimization of the cost function and return
print "Current optimization variable vector is",x0
(succ,res) = params.solve(optProblem,x0)
if not succ:
print "Global optimize returned failure"
return None
if reducedToFullMapping is not None:
reducedProblem.setVarVector(res)
for var,vexpr in zip(self.optimizationVariables,reducedToFullMapping):
var.bind(vexpr.eval(reducedProblem.context))
else:
self.setVarVector(res)
#check feasibility if desired
if not self.inJointLimits(self.q.value):
print "Result from global optimize is out of joint limits"
return None
if not self.feasibilityTestsPass():
print "Result from global optimize isn't feasible"
return None
if not self.satisfiesEqualities(params.tol):
print "Result from global optimize doesn't satisfy tolerance: result %s"%(str(self.equalityResidual()),)
for obj in self.objectives:
if obj.type == 'eq':
print " ",obj.expr,":",obj.expr.eval(self.context)
return None
#passed
print "Global optimize succeeded! Cost",self.cost()
q = self.q.value
return q
|
11485733
|
import argparse
import os.path
from transformers import ViTFeatureExtractor, ViTForImageClassification
from hugsvision.inference.VisionClassifierInference import VisionClassifierInference
parser = argparse.ArgumentParser(description='Image classifier')
parser.add_argument('--path', type=str, default="./out/MYVITMODEL/1_2021-08-10-00-53-58/model/", help='The model path')
parser.add_argument('--img', type=str, default="1.jpg", help='The input image')
args = parser.parse_args()
print("Process the image: " + args.img)
try:
classifier = VisionClassifierInference(
feature_extractor = ViTFeatureExtractor.from_pretrained(args.path),
model = ViTForImageClassification.from_pretrained(args.path),
)
label = classifier.predict(img_path=args.img)
print("Predicted class:", label)
except Exception as e:
if "containing a preprocessor_config.json file" in str(e) and os.path.isfile(args.path + "config.json") == True:
print("\033[91m\033[4mError:\033[0m")
print("\033[91mRename the config.json file into \033[4mpreprocessor_config.json\033[0m")
else:
print(str(e))
|
11485743
|
import os
import dgl
import tqdm
import torch
import json
import os.path
import numpy as np
import scipy.sparse
from dgl import DGLGraph
from dgl.data import citegrh
from itertools import compress
from torchvision.datasets import VisionDataset
from sklearn.preprocessing import StandardScaler, MinMaxScaler
class ContinuumLS(VisionDataset):
def __init__(self, root='/data/', name='reddit', data_type='train', task_type = 0, download=None, k_hop=1, thres_nodes = 50):
super(ContinuumLS, self).__init__(root)
self.name = name
self.k_hop = k_hop; self.thres_nodes = thres_nodes
adj_full, adj_train, feats, class_map, role = self.load_data(os.path.join(root,name))
self.adj_train = adj_train
self.adj_full = adj_full
self.features = torch.FloatTensor(feats)
self.feat_len = feats.shape[1]
self.labels = torch.LongTensor(list(class_map.values()))
if name in ["amazon"]:
self.num_class = self.labels.shape[1]
_, self.labels = self.labels.max(dim = 1)
else:
self.num_class = int(torch.max(self.labels) - torch.min(self.labels))+1
print("num_class", self.num_class)
if data_type == 'train':
self.mask = role["tr"]
elif data_type == 'mini':
self.mask = role["tr"][:100]
elif data_type == 'incremental':
self.mask = role["tr"]
self.mask = list((np.array(self.labels)[self.mask]==task_type).nonzero()[0])
elif data_type == 'valid':
self.mask = role["va"]
elif data_type == 'test':
self.mask = role["te"]
else:
raise RuntimeError('data type {} wrong'.format(data_type))
print('{} Dataset for {} Loaded with featlen {} and size {}.'.format(self.name, data_type, self.feat_len, len( self.mask)))
def __len__(self):
return len(self.labels[self.mask])
def __getitem__(self, index):
if self.k_hop == None:
k_hop = 1
else:
k_hop = self.k_hop
neighbors_khop = list()
ids_khop = [self.mask[index]]
for k in range(k_hop):
ids = torch.LongTensor()
neighbor = torch.FloatTensor()
for i in ids_khop:
ids = torch.cat((ids, self.get_neighborId(i)),0)
neighbor = torch.cat((neighbor, self.get_neighbor(i)),0)
## TODO random selection in pytorch is tricky
if ids.shape[0]>self.thres_nodes:
indices = torch.randperm(ids.shape[0])[:self.thres_nodes]
ids = ids[indices]
neighbor = neighbor[indices]
ids_khop = ids ## temp ids for next level
neighbors_khop.append(neighbor) ## cat different level neighbor
if self.k_hop == None:
neighbors_khop = neighbors_khop[0]
return torch.FloatTensor(self.features[self.mask[index]]).unsqueeze(-2), self.labels[self.mask[index]], neighbors_khop
def get_neighbor(self, i):
return self.features[self.get_neighborId(i)].unsqueeze(-2)
def get_neighborId(self, i):
return torch.LongTensor(self.adj_full[i].nonzero()[1])
def load_data(self, prefix, normalize=True):
adj_full = scipy.sparse.load_npz('{}/adj_full.npz'.format(prefix)).astype(np.bool)
adj_train = scipy.sparse.load_npz('{}/adj_train.npz'.format(prefix)).astype(np.bool)
role = json.load(open('{}/role.json'.format(prefix)))
feats = np.load('{}/feats.npy'.format(prefix))
class_map = json.load(open('{}/class_map.json'.format(prefix)))
class_map = {int(k):v for k,v in class_map.items()}
assert len(class_map) == feats.shape[0]
train_nodes = np.array(list(set(adj_train.nonzero()[0])))
train_feats = feats[train_nodes]
scaler = MinMaxScaler()
scaler.fit(train_feats)
feats = scaler.transform(feats)
return adj_full, adj_train, feats, class_map, role
|
11485792
|
import torch.nn as nn
import torchquantum as tq
from abc import ABCMeta
__all__ = [
'QuantumModule',
'QuantumModuleList',
'QuantumModuleDict',
]
class QuantumModule(nn.Module):
def __init__(self) -> None:
super().__init__()
self.static_mode = False
self.graph = None
self.parent_graph = None
self.is_graph_top = False
self.unitary = None
self.wires = None
self.n_wires = None
self.q_device = None
# this is for gpu or cpu, not q device
self.device = None
# for the static tensor network simulation optimizations
self.wires_per_block = None
self.qiskit_processor = None
self.noise_model_tq = None
def set_noise_model_tq(self, noise_model_tq):
for module in self.modules():
module.noise_model_tq = noise_model_tq
def set_qiskit_processor(self, processor):
for module in self.modules():
module.qiskit_processor = processor
def set_wires_per_block(self, wires_per_block):
self.wires_per_block = wires_per_block
def static_on(self, is_graph_top=True, wires_per_block=3):
self.wires_per_block = wires_per_block
# register graph of itself and parent
self.static_mode = True
self.is_graph_top = is_graph_top
if self.graph is None:
self.graph = tq.QuantumGraph()
for module in self.children():
if isinstance(module, nn.ModuleList) or isinstance(module,
nn.ModuleDict):
# if QuantumModuleList or QuantumModuleDict, its graph will
# be the same as the parent graph because ModuleList and
# ModuleDict do not call the forward function
module.graph = self.graph
module.parent_graph = self.graph
if not isinstance(module, tq.QuantumDevice):
module.static_on(is_graph_top=False)
def static_off(self):
self.static_mode = False
self.graph = None
for module in self.children():
if not isinstance(module, tq.QuantumDevice):
module.static_off()
def set_graph_build_finish(self):
self.graph.is_list_finish = True
for module in self.graph.module_list:
if not isinstance(module, tq.QuantumDevice):
module.set_graph_build_finish()
def static_forward(self, q_device: tq.QuantumDevice):
self.q_device = q_device
self.device = q_device.states.device
self.graph.q_device = q_device
self.graph.device = q_device.states.device
# self.unitary, self.wires, self.n_wires = \
self.graph.forward(wires_per_block=self.wires_per_block)
# tqf.qubitunitary(
# q_device=self.q_device,
# wires=self.wires,
# params=self.unitary
# )
def get_unitary(self, q_device: tq.QuantumDevice, x=None):
original_wires_per_block = self.wires_per_block
original_static_mode = self.static_mode
self.static_off()
self.static_on(wires_per_block=q_device.n_wires)
self.q_device = q_device
self.device = q_device.state.device
self.graph.q_device = q_device
self.graph.device = q_device.state.device
self.is_graph_top = False
# forward to register all modules to the module list, but do not
# apply the unitary to the state vector
if x is None:
self.forward(q_device)
else:
self.forward(q_device, x)
self.is_graph_top = True
self.graph.build(wires_per_block=q_device.n_wires)
self.graph.build_static_matrix()
unitary = self.graph.get_unitary()
self.static_off()
if original_static_mode:
self.static_on(original_wires_per_block)
return unitary
class QuantumModuleList(nn.ModuleList, QuantumModule, metaclass=ABCMeta):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
class QuantumModuleDict(nn.ModuleDict, QuantumModule, metaclass=ABCMeta):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def test():
pass
if __name__ == '__main__':
test()
|
11485793
|
import torch
from ezflow.modules import MODULE_REGISTRY
def test_ConvGRU():
inp_x = torch.rand(2, 8, 32, 32)
inp_h = torch.rand(2, 8, 32, 32)
module = MODULE_REGISTRY.get("ConvGRU")(hidden_dim=8, input_dim=8)
_ = module(inp_h, inp_x)
def test_BasicBlock():
inp = torch.randn(2, 3, 256, 256)
module = MODULE_REGISTRY.get("BasicBlock")(
inp.shape[1], 32, norm="group", activation="relu", stride=3
)
_ = module(inp)
del module
module = MODULE_REGISTRY.get("BasicBlock")(
inp.shape[1], 32, norm="batch", activation="leakyrelu", stride=3
)
_ = module(inp)
del module
module = MODULE_REGISTRY.get("BasicBlock")(
inp.shape[1], 32, norm="instance", activation="relu", stride=3
)
_ = module(inp)
del module
module = MODULE_REGISTRY.get("BasicBlock")(
inp.shape[1], 32, norm="none", activation="relu", stride=3
)
_ = module(inp)
del module
module = MODULE_REGISTRY.get("BasicBlock")(
inp.shape[1], 32, norm=None, activation="relu", stride=3
)
_ = module(inp)
del module
def test_BottleneckBlock():
inp = torch.randn(2, 3, 256, 256)
module = MODULE_REGISTRY.get("BottleneckBlock")(
inp.shape[1], 32, norm="group", activation="relu", stride=3
)
_ = module(inp)
del module
module = MODULE_REGISTRY.get("BottleneckBlock")(
inp.shape[1], 32, norm="batch", activation="leakyrelu", stride=3
)
_ = module(inp)
del module
module = MODULE_REGISTRY.get("BottleneckBlock")(
inp.shape[1], 32, norm="instance", activation="relu", stride=3
)
_ = module(inp)
del module
module = MODULE_REGISTRY.get("BottleneckBlock")(
inp.shape[1], 32, norm="none", activation="relu", stride=3
)
_ = module(inp)
del module
module = MODULE_REGISTRY.get("BottleneckBlock")(
inp.shape[1], 32, norm=None, activation="relu", stride=3
)
_ = module(inp)
del module
def test_DAP():
inp = torch.randn(2, 1, 7, 7, 16, 16)
module = MODULE_REGISTRY.get("DisplacementAwareProjection")(temperature=False)
_ = module(inp)
module = MODULE_REGISTRY.get("DisplacementAwareProjection")(temperature=True)
_ = module(inp)
|
11485817
|
import requests
import asyncio
import json
import urllib3
from walkoff_app_sdk.app_base import AppBase
class Crowdstrike_Falcon(AppBase):
__version__ = "1.0"
app_name = "Crowdstrike_Falcon"
def __init__(self, redis, logger, console_logger=None):
self.verify = False
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
super().__init__(redis, logger, console_logger)
def setup_headers(self, headers):
request_headers={}
if len(headers) > 0:
for header in headers.split("\n"):
if '=' in header:
headersplit=header.split('=')
request_headers[headersplit[0].strip()] = headersplit[1].strip()
elif ':' in header:
headersplit=header.split(':')
request_headers[headersplit[0].strip()] = headersplit[1].strip()
return request_headers
def setup_params(self, queries):
params={}
if len(queries) > 0:
for query in queries.split("\&"):
if '=' in query:
headersplit=query.split('&')
params[headersplit[0].strip()] = headersplit[1].strip()
return params
async def generate_oauth2_access_token(self, url, client_id, client_secret, headers="", queries="", body=""):
params={}
request_headers={}
url=f"{url}/oauth2/token"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
body={'client_id': client_id, 'client_secret': client_secret}
ret = requests.post(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def revoke_oauth2_access_token(self, url, client_id, client_secret, headers="", queries="", body=""):
params={}
request_headers={}
url=f"{url}/oauth2/revoke"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
body={'client_id': client_id, 'client_secret': client_secret}
ret = requests.post(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def download_analysis_artifacts(self, url, client_id, client_secret, id, headers="", queries="", name=""):
params={}
request_headers={}
url=f"{url}/falconx/entities/artifacts/v1?id={id}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if name:
params["name"] = name
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def get_detect_aggregates(self, url, client_id, client_secret, headers="", queries="", body=""):
params={}
request_headers={"Content-Type": "application/json","Accept": "application/json"}
url=f"{url}/detects/aggregates/detects/GET/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.post(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def view_information_about_detections(self, url, client_id, client_secret, headers="", queries="", body=""):
params={}
request_headers={"Content-Type": "application/json","Accept": "application/json"}
url=f"{url}/detects/entities/summaries/GET/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.post(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def modify_detections(self, url, client_id, client_secret, headers="", queries="", body=""):
params={}
request_headers={"Content-Type": "application/json","Accept": "application/json"}
url=f"{url}/detects/entities/detects/v2"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.patch(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def get_sandbox_reports(self, url, client_id, client_secret, headers="", queries="", filter="", offset="", limit="", sort=""):
params={}
request_headers={}
url=f"{url}/falconx/queries/reports/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if offset:
params["offset"] = offset
if limit:
params["limit"] = limit
if sort:
params["sort"] = sort
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def get_rules_by_id(self, url, client_id, client_secret, ids, headers="", queries=""):
params={}
request_headers={}
url=f"{url}/ioarules/entities/rules/v1?ids={ids}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def delete_rules_from_a_rule_group_by_id(self, url, client_id, client_secret, rule_group_id, ids, headers="", queries="", comment=""):
params={}
request_headers={}
url=f"{url}/ioarules/entities/rules/v1?rule_group_id={rule_group_id}&ids={ids}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.delete(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def create_a_rule_within_a_rule_group(self, url, client_id, client_secret, headers="", queries="", body=""):
params={}
request_headers={"Content-Type": "application/json","Accept": "application/json"}
url=f"{url}/ioarules/entities/rules/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.post(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def update_rules_within_a_rule_group(self, url, client_id, client_secret, headers="", queries="", body=""):
params={}
request_headers={"Content-Type": "application/json","Accept": "application/json"}
url=f"{url}/ioarules/entities/rules/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.patch(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def search_for_prevention_policy_members(self, url, client_id, client_secret, headers="", queries="", id="", filter="", offset="", limit="", sort=""):
params={}
request_headers={}
url=f"{url}/policy/combined/prevention-members/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if filter:
params["filter"] = filter
if offset:
params["offset"] = offset
if limit:
params["limit"] = limit
if sort:
params["sort"] = sort
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def set_precedence_of_device_control_policies(self, url, client_id, client_secret, headers="", queries="", body=""):
params={}
request_headers={}
url=f"{url}/policy/entities/device-control-precedence/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.post(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def retrieve_hidden_hosts(self, url, client_id, client_secret, headers="", queries="", offset="", limit="", sort="", filter=""):
params={}
request_headers={}
url=f"{url}/devices/queries/devices-hidden/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if limit:
params["limit"] = limit
if sort:
params["sort"] = sort
if filter:
params["filter"] = filter
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def get_rule_types_by_id(self, url, client_id, client_secret, ids, headers="", queries=""):
params={}
request_headers={}
url=f"{url}/ioarules/entities/rule-types/v1?ids={ids}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def get_all_platform_ids(self, url, client_id, client_secret, headers="", queries="", offset="", limit=""):
params={}
request_headers={}
url=f"{url}/ioarules/queries/platforms/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if limit:
params["limit"] = limit
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def get_combined_for_indicators(self, url, client_id, client_secret, headers="", queries="", filter="", offset="", limit="", sort=""):
params={}
request_headers={}
url=f"{url}/iocs/combined/indicator/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if offset:
params["offset"] = offset
if limit:
params["limit"] = limit
if sort:
params["sort"] = sort
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def set_precedence_of_response_policies(self, url, client_id, client_secret, headers="", queries="", body=""):
params={}
request_headers={}
url=f"{url}/policy/entities/response-precedence/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.post(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def get_a_set_of_sensor_visibility_exclusions(self, url, client_id, client_secret, ids, headers="", queries=""):
params={}
request_headers={}
url=f"{url}/policy/entities/sv-exclusions/v1?ids={ids}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def delete_the_sensor_visibility_exclusions_by_id(self, url, client_id, client_secret, ids, headers="", queries="", comment=""):
params={}
request_headers={}
url=f"{url}/policy/entities/sv-exclusions/v1?ids={ids}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.delete(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def create_the_sensor_visibility_exclusions(self, url, client_id, client_secret, headers="", queries="", body=""):
params={}
request_headers={}
url=f"{url}/policy/entities/sv-exclusions/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.post(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def update_the_sensor_visibility_exclusions(self, url, client_id, client_secret, headers="", queries="", body=""):
params={}
request_headers={}
url=f"{url}/policy/entities/sv-exclusions/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.patch(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def search_for_prevention_policy_ids(self, url, client_id, client_secret, headers="", queries="", filter="", offset="", limit="", sort=""):
params={}
request_headers={}
url=f"{url}/policy/queries/prevention/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if offset:
params["offset"] = offset
if limit:
params["limit"] = limit
if sort:
params["sort"] = sort
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def get_notifications_based_on_their_ids(self, url, client_id, client_secret, ids, headers="", queries=""):
params={}
request_headers={}
url=f"{url}/recon/entities/notifications/v1?ids={ids}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def delete_notifications_based_on_ids_notifications(self, url, client_id, client_secret, ids, headers="", queries=""):
params={}
request_headers={}
url=f"{url}/recon/entities/notifications/v1?ids={ids}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.delete(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def update_notification_status_or_assignee(self, url, client_id, client_secret, headers="", queries="", body=""):
params={}
request_headers={"Content-Type": "application/json","Accept": "application/json"}
url=f"{url}/recon/entities/notifications/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.patch(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def get_sensor_installer_ids_by_provided_query(self, url, client_id, client_secret, headers="", queries="", offset="", limit="", sort="", filter=""):
params={}
request_headers={}
url=f"{url}/sensors/queries/installers/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if limit:
params["limit"] = limit
if sort:
params["sort"] = sort
if filter:
params["filter"] = filter
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def get_info_about_indicators(self, url, client_id, client_secret, headers="", queries="", offset="", limit="", sort="", filter="", q="", include_deleted=""):
params={}
request_headers={}
url=f"{url}/intel/combined/indicators/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if limit:
params["limit"] = limit
if sort:
params["sort"] = sort
if filter:
params["filter"] = filter
if q:
params["q"] = q
if include_deleted:
params["include_deleted"] = include_deleted
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def download_earlier_rule_sets(self, url, client_id, client_secret, id, headers="", queries="", format=""):
params={}
request_headers={"Accept": "undefined"}
url=f"{url}/intel/entities/rules-files/v1?id={id}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def get_report_ids(self, url, client_id, client_secret, headers="", queries="", offset="", limit="", sort="", filter="", q=""):
params={}
request_headers={}
url=f"{url}/intel/queries/reports/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if limit:
params["limit"] = limit
if sort:
params["sort"] = sort
if filter:
params["filter"] = filter
if q:
params["q"] = q
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def search_for_rule_ids(self, url, client_id, client_secret, type, headers="", queries="", offset="", limit="", sort="", name="", description="", tags="", min_created_date="", max_created_date="", q=""):
params={}
request_headers={}
url=f"{url}/intel/queries/rules/v1?type={type}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if limit:
params["limit"] = limit
if sort:
params["sort"] = sort
if name:
params["name"] = name
if description:
params["description"] = description
if tags:
params["tags"] = tags
if min_created_date:
params["min_created_date"] = min_created_date
if max_created_date:
params["max_created_date"] = max_created_date
if q:
params["q"] = q
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def search_for_sensor_update_policies(self, url, client_id, client_secret, headers="", queries="", filter="", offset="", limit="", sort=""):
params={}
request_headers={}
url=f"{url}/policy/combined/sensor-update/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if offset:
params["offset"] = offset
if limit:
params["limit"] = limit
if sort:
params["sort"] = sort
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def get_a_set_of_ioa_exclusions(self, url, client_id, client_secret, ids, headers="", queries=""):
params={}
request_headers={}
url=f"{url}/policy/entities/ioa-exclusions/v1?ids={ids}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def delete_the_ioa_exclusions_by_id(self, url, client_id, client_secret, ids, headers="", queries="", comment=""):
params={}
request_headers={}
url=f"{url}/policy/entities/ioa-exclusions/v1?ids={ids}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.delete(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def create_the_ioa_exclusions(self, url, client_id, client_secret, headers="", queries="", body=""):
params={}
request_headers={}
url=f"{url}/policy/entities/ioa-exclusions/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.post(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def update_the_ioa_exclusions(self, url, client_id, client_secret, headers="", queries="", body=""):
params={}
request_headers={}
url=f"{url}/policy/entities/ioa-exclusions/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.patch(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def search_for_sensor_update_policy_member_ids(self, url, client_id, client_secret, headers="", queries="", id="", filter="", offset="", limit="", sort=""):
params={}
request_headers={}
url=f"{url}/policy/queries/sensor-update-members/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if filter:
params["filter"] = filter
if offset:
params["offset"] = offset
if limit:
params["limit"] = limit
if sort:
params["sort"] = sort
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def search_for_sensor_visibility_exclusions(self, url, client_id, client_secret, headers="", queries="", filter="", offset="", limit="", sort=""):
params={}
request_headers={}
url=f"{url}/policy/queries/sv-exclusions/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if offset:
params["offset"] = offset
if limit:
params["limit"] = limit
if sort:
params["sort"] = sort
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def find_ids_for_submitted_scans(self, url, client_id, client_secret, headers="", queries="", filter="", offset="", limit="", sort=""):
params={}
request_headers={}
url=f"{url}/scanner/queries/scans/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if offset:
params["offset"] = offset
if limit:
params["limit"] = limit
if sort:
params["sort"] = sort
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def get_sensor_installer_details_by_provided_query(self, url, client_id, client_secret, headers="", queries="", offset="", limit="", sort="", filter=""):
params={}
request_headers={}
url=f"{url}/sensors/combined/installers/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if limit:
params["limit"] = limit
if sort:
params["sort"] = sort
if filter:
params["filter"] = filter
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def search_for_hosts(self, url, client_id, client_secret, headers="", queries="", offset="", limit="", sort="", filter=""):
params={}
request_headers={}
url=f"{url}/devices/queries/devices-scroll/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if offset:
params["offset"] = offset
if limit:
params["limit"] = limit
if sort:
params["sort"] = sort
if filter:
params["filter"] = filter
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def get_info_about_reports(self, url, client_id, client_secret, headers="", queries="", offset="", limit="", sort="", filter="", q="", fields=""):
params={}
request_headers={}
url=f"{url}/intel/combined/reports/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if limit:
params["limit"] = limit
if sort:
params["sort"] = sort
if filter:
params["filter"] = filter
if q:
params["q"] = q
if fields:
params["fields"] = fields
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def get_a_zipped_sample(self, url, client_id, client_secret, ids, headers="", queries=""):
params={}
request_headers={}
url=f"{url}/malquery/entities/samples-fetch/v1?ids={ids}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def schedule_samples_for_download(self, url, client_id, client_secret, headers="", queries="", body=""):
params={}
request_headers={"Content-Type": "application/json","Accept": "application/json"}
url=f"{url}/malquery/entities/samples-multidownload/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.post(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def perform_action_on_the_sensor_update_policies(self, url, client_id, client_secret, action_name, headers="", queries="", body=""):
params={}
request_headers={}
url=f"{url}/policy/entities/sensor-update-actions/v1?action_name={action_name}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.post(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def query_notifications(self, url, client_id, client_secret, headers="", queries="", offset="", limit="", sort="", filter="", q=""):
params={}
request_headers={}
url=f"{url}/recon/queries/notifications/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if limit:
params["limit"] = limit
if sort:
params["sort"] = sort
if filter:
params["filter"] = filter
if q:
params["q"] = q
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def search_for_prevention_policies(self, url, client_id, client_secret, headers="", queries="", filter="", offset="", limit="", sort=""):
params={}
request_headers={}
url=f"{url}/policy/combined/prevention/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if offset:
params["offset"] = offset
if limit:
params["limit"] = limit
if sort:
params["sort"] = sort
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def get_status_of_an_executed_active_responder_command_on_a_single_host(self, url, client_id, client_secret, cloud_request_id, sequence_id, headers="", queries=""):
params={}
request_headers={}
url=f"{url}/real-time-response/entities/active-responder-command/v1?cloud_request_id={cloud_request_id}&sequence_id={sequence_id}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def execute_an_active_responder_command_on_a_single_host(self, url, client_id, client_secret, headers="", queries="", body=""):
params={}
request_headers={}
url=f"{url}/real-time-response/entities/active-responder-command/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.post(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def find_all_rule_ids(self, url, client_id, client_secret, headers="", queries="", sort="", filter="", q="", offset="", limit=""):
params={}
request_headers={}
url=f"{url}/ioarules/queries/rules/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if filter:
params["filter"] = filter
if q:
params["q"] = q
if offset:
params["offset"] = offset
if limit:
params["limit"] = limit
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def set_precedence_of_prevention_policies(self, url, client_id, client_secret, headers="", queries="", body=""):
params={}
request_headers={}
url=f"{url}/policy/entities/prevention-precedence/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.post(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def get_indicators_ids(self, url, client_id, client_secret, headers="", queries="", offset="", limit="", sort="", filter="", q="", include_deleted=""):
params={}
request_headers={}
url=f"{url}/intel/queries/indicators/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if limit:
params["limit"] = limit
if sort:
params["sort"] = sort
if filter:
params["filter"] = filter
if q:
params["q"] = q
if include_deleted:
params["include_deleted"] = include_deleted
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def search_for_sensor_update_policy_members(self, url, client_id, client_secret, headers="", queries="", id="", filter="", offset="", limit="", sort=""):
params={}
request_headers={}
url=f"{url}/policy/combined/sensor-update-members/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if filter:
params["filter"] = filter
if offset:
params["offset"] = offset
if limit:
params["limit"] = limit
if sort:
params["sort"] = sort
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def batch_refresh_a_rtr_session_on_multiple_hosts_rtr_sessions_will_expire_after_10_minutes_unless_refreshed(self, url, client_id, client_secret, headers="", queries="", timeout="", timeout_duration="", body=""):
params={}
request_headers={}
url=f"{url}/real-time-response/combined/batch-refresh-session/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if timeout_duration:
params["timeout_duration"] = timeout_duration
body = " ".join(body.strip().split()).encode("utf-8")
ret = requests.post(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def get_queued_session_metadata_by_session_id(self, url, client_id, client_secret, headers="", queries="", body=""):
params={}
request_headers={}
url=f"{url}/real-time-response/entities/queued-sessions/GET/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.post(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def perform_action_on_the_device_control_policies(self, url, client_id, client_secret, action_name, headers="", queries="", body=""):
params={}
request_headers={}
url=f"{url}/policy/entities/device-control-actions/v1?action_name={action_name}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.post(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def get_scans_aggregations(self, url, client_id, client_secret, headers="", queries="", body=""):
params={}
request_headers={"Content-Type": "application/json","Accept": "application/json"}
url=f"{url}/scanner/aggregates/scans/GET/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.post(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def get_detailed_notifications_based_on_their_ids(self, url, client_id, client_secret, ids, headers="", queries=""):
params={}
request_headers={}
url=f"{url}/recon/entities/notifications-detailed-translated/v1?ids={ids}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def retrieve_specific_indicators_using_their_indicator_ids(self, url, client_id, client_secret, headers="", queries="", body=""):
params={}
request_headers={"Content-Type": "application/json","Accept": "application/json"}
url=f"{url}/intel/entities/indicators/GET/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.post(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def find_all_rule_group_ids(self, url, client_id, client_secret, headers="", queries="", sort="", filter="", q="", offset="", limit=""):
params={}
request_headers={}
url=f"{url}/ioarules/queries/rule-groups/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if filter:
params["filter"] = filter
if q:
params["q"] = q
if offset:
params["offset"] = offset
if limit:
params["limit"] = limit
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def search_falcon_malquery(self, url, client_id, client_secret, headers="", queries="", body=""):
params={}
request_headers={"Content-Type": "application/json","Accept": "application/json"}
url=f"{url}/malquery/queries/exact-search/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.post(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def retrieve_available_builds_for_use_with_sensor_update_policies(self, url, client_id, client_secret, headers="", queries="", platform=""):
params={}
request_headers={}
url=f"{url}/policy/combined/sensor-update-builds/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def search_for_firewall_policies(self, url, client_id, client_secret, headers="", queries="", filter="", offset="", limit="", sort=""):
params={}
request_headers={}
url=f"{url}/policy/queries/firewall/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if offset:
params["offset"] = offset
if limit:
params["limit"] = limit
if sort:
params["sort"] = sort
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def retrieve_set_of_host_groups(self, url, client_id, client_secret, ids, headers="", queries=""):
params={}
request_headers={}
url=f"{url}/devices/entities/host-groups/v1?ids={ids}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def delete_set_of_host_groups(self, url, client_id, client_secret, ids, headers="", queries=""):
params={}
request_headers={}
url=f"{url}/devices/entities/host-groups/v1?ids={ids}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.delete(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def create_host_groups(self, url, client_id, client_secret, headers="", queries="", body=""):
params={}
request_headers={}
url=f"{url}/devices/entities/host-groups/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.post(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def update_host_groups(self, url, client_id, client_secret, headers="", queries="", body=""):
params={}
request_headers={}
url=f"{url}/devices/entities/host-groups/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.patch(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def search_for_behaviors(self, url, client_id, client_secret, headers="", queries="", filter="", offset="", limit="", sort=""):
params={}
request_headers={}
url=f"{url}/incidents/queries/behaviors/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if offset:
params["offset"] = offset
if limit:
params["limit"] = limit
if sort:
params["sort"] = sort
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def search_for_incidents(self, url, client_id, client_secret, headers="", queries="", sort="", filter="", offset="", limit=""):
params={}
request_headers={}
url=f"{url}/incidents/queries/incidents/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if filter:
params["filter"] = filter
if offset:
params["offset"] = offset
if limit:
params["limit"] = limit
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def get_rule_groups_by_id(self, url, client_id, client_secret, ids, headers="", queries=""):
params={}
request_headers={}
url=f"{url}/ioarules/entities/rule-groups/v1?ids={ids}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def delete_rule_groups_by_id(self, url, client_id, client_secret, ids, headers="", queries="", comment=""):
params={}
request_headers={}
url=f"{url}/ioarules/entities/rule-groups/v1?ids={ids}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.delete(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def create_a_rule_group(self, url, client_id, client_secret, headers="", queries="", body=""):
params={}
request_headers={"Content-Type": "application/json","Accept": "application/json"}
url=f"{url}/ioarules/entities/rule-groups/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.post(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def update_a_rule_group(self, url, client_id, client_secret, headers="", queries="", body=""):
params={}
request_headers={"Content-Type": "application/json","Accept": "application/json"}
url=f"{url}/ioarules/entities/rule-groups/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.patch(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def get_all_rule_type_ids(self, url, client_id, client_secret, headers="", queries="", offset="", limit=""):
params={}
request_headers={}
url=f"{url}/ioarules/queries/rule-types/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if limit:
params["limit"] = limit
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def get_information_about_search_and_download_quotas(self, url, client_id, client_secret, headers="", queries=""):
params={}
request_headers={}
url=f"{url}/malquery/aggregates/quotas/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def refresh_a_session_timeout_on_a_single_host(self, url, client_id, client_secret, headers="", queries="", body=""):
params={}
request_headers={}
url=f"{url}/real-time-response/entities/refresh-session/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.post(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def query_crowdscore(self, url, client_id, client_secret, headers="", queries="", filter="", offset="", limit="", sort=""):
params={}
request_headers={}
url=f"{url}/incidents/combined/crowdscores/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if offset:
params["offset"] = offset
if limit:
params["limit"] = limit
if sort:
params["sort"] = sort
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def perform_actions_on_incidents(self, url, client_id, client_secret, headers="", queries="", body=""):
params={}
request_headers={"Content-Type": "application/json","Accept": "application/json"}
url=f"{url}/incidents/entities/incident-actions/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.post(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def get_info_about_actors(self, url, client_id, client_secret, headers="", queries="", offset="", limit="", sort="", filter="", q="", fields=""):
params={}
request_headers={}
url=f"{url}/intel/combined/actors/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if limit:
params["limit"] = limit
if sort:
params["sort"] = sort
if filter:
params["filter"] = filter
if q:
params["q"] = q
if fields:
params["fields"] = fields
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def search_for_response_policy_members(self, url, client_id, client_secret, headers="", queries="", id="", filter="", offset="", limit="", sort=""):
params={}
request_headers={}
url=f"{url}/policy/combined/response-members/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if filter:
params["filter"] = filter
if offset:
params["offset"] = offset
if limit:
params["limit"] = limit
if sort:
params["sort"] = sort
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def batch_initialize_a_rtr_session_on_multiple_hosts__before_any_rtr_commands_can_be_used_an_active_session_is_needed_on_the_host(self, url, client_id, client_secret, headers="", queries="", timeout="", timeout_duration="", body=""):
params={}
request_headers={}
url=f"{url}/real-time-response/combined/batch-init-session/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if timeout_duration:
params["timeout_duration"] = timeout_duration
body = " ".join(body.strip().split()).encode("utf-8")
ret = requests.post(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def get_rtr_extracted_file_contents_for_specified_session_and_sha256(self, url, client_id, client_secret, session_id, sha256, headers="", queries="", filename=""):
params={}
request_headers={}
url=f"{url}/real-time-response/entities/extracted-file-contents/v1?session_id={session_id}&sha256={sha256}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def search_for_host_groups(self, url, client_id, client_secret, headers="", queries="", filter="", offset="", limit="", sort=""):
params={}
request_headers={}
url=f"{url}/devices/combined/host-groups/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if offset:
params["offset"] = offset
if limit:
params["limit"] = limit
if sort:
params["sort"] = sort
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def get_all_pattern_severity_ids(self, url, client_id, client_secret, headers="", queries="", offset="", limit=""):
params={}
request_headers={}
url=f"{url}/ioarules/queries/pattern-severities/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if limit:
params["limit"] = limit
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def get_indicators_by_ids(self, url, client_id, client_secret, ids, headers="", queries=""):
params={}
request_headers={}
url=f"{url}/iocs/entities/indicators/v1?ids={ids}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def delete_indicators_by_ids(self, url, client_id, client_secret, headers="", queries="", filter="", ids="", comment=""):
params={}
request_headers={}
url=f"{url}/iocs/entities/indicators/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if ids:
params["ids"] = ids
if comment:
params["comment"] = comment
ret = requests.delete(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def create_indicators(self, url, client_id, client_secret, headers="", queries="", retrodetects="", ignore_warnings="", body=""):
params={}
request_headers={"Content-Type": "application/json","Accept": "application/jsonX-CS-USERNAME"}
url=f"{url}/iocs/entities/indicators/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if ignore_warnings:
params["ignore_warnings"] = ignore_warnings
body = " ".join(body.strip().split()).encode("utf-8")
ret = requests.post(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def update_indicators(self, url, client_id, client_secret, headers="", queries="", retrodetects="", ignore_warnings="", body=""):
params={}
request_headers={"Content-Type": "application/json","Accept": "application/jsonX-CS-USERNAME"}
url=f"{url}/iocs/entities/indicators/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if ignore_warnings:
params["ignore_warnings"] = ignore_warnings
body = " ".join(body.strip().split()).encode("utf-8")
ret = requests.patch(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def retrieve_a_set_of_device_control_policies(self, url, client_id, client_secret, ids, headers="", queries=""):
params={}
request_headers={}
url=f"{url}/policy/entities/device-control/v1?ids={ids}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def delete_a_set_of_device_control_policies(self, url, client_id, client_secret, ids, headers="", queries=""):
params={}
request_headers={}
url=f"{url}/policy/entities/device-control/v1?ids={ids}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.delete(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def create_device_control_policies(self, url, client_id, client_secret, headers="", queries="", body=""):
params={}
request_headers={}
url=f"{url}/policy/entities/device-control/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.post(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def update_device_control_policies(self, url, client_id, client_secret, headers="", queries="", body=""):
params={}
request_headers={}
url=f"{url}/policy/entities/device-control/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.patch(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def search_for_ioa_exclusions(self, url, client_id, client_secret, headers="", queries="", filter="", offset="", limit="", sort=""):
params={}
request_headers={}
url=f"{url}/policy/queries/ioa-exclusions/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if offset:
params["offset"] = offset
if limit:
params["limit"] = limit
if sort:
params["sort"] = sort
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def get_aggregates_on_session_data(self, url, client_id, client_secret, headers="", queries="", body=""):
params={}
request_headers={}
url=f"{url}/real-time-response/aggregates/sessions/GET/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.post(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def delete_a_session(self, url, client_id, client_secret, session_id, headers="", queries=""):
params={}
request_headers={}
url=f"{url}/real-time-response/entities/sessions/v1?session_id={session_id}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.delete(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def initialize_a_new_session_with_the_rtr_cloud(self, url, client_id, client_secret, headers="", queries="", body=""):
params={}
request_headers={}
url=f"{url}/real-time-response/entities/sessions/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.post(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def get_a_full_sandbox_report(self, url, client_id, client_secret, ids, headers="", queries=""):
params={}
request_headers={}
url=f"{url}/falconx/entities/reports/v1?ids={ids}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def delete_report(self, url, client_id, client_secret, ids, headers="", queries=""):
params={}
request_headers={}
url=f"{url}/falconx/entities/reports/v1?ids={ids}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.delete(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def search_for_ml_exclusions(self, url, client_id, client_secret, headers="", queries="", filter="", offset="", limit="", sort=""):
params={}
request_headers={}
url=f"{url}/policy/queries/ml-exclusions/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if offset:
params["offset"] = offset
if limit:
params["limit"] = limit
if sort:
params["sort"] = sort
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def search_for_sensor_update_policy_ids(self, url, client_id, client_secret, headers="", queries="", filter="", offset="", limit="", sort=""):
params={}
request_headers={}
url=f"{url}/policy/queries/sensor-update/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if offset:
params["offset"] = offset
if limit:
params["limit"] = limit
if sort:
params["sort"] = sort
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def delete_a_queued_session_command(self, url, client_id, client_secret, session_id, cloud_request_id, headers="", queries=""):
params={}
request_headers={}
url=f"{url}/real-time-response/entities/queued-sessions/command/v1?session_id={session_id}&cloud_request_id={cloud_request_id}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.delete(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def preview_rules_notification_count_and_distribution(self, url, client_id, client_secret, headers="", queries="", body=""):
params={}
request_headers={"X-CS-USERUUID": "undefined"}
url=f"{url}/recon/aggregates/rules-preview/GET/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.post(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def get_a_report_pdf_attachment(self, url, client_id, client_secret, id, headers="", queries=""):
params={}
request_headers={}
url=f"{url}/intel/entities/report-files/v1?id={id}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def retrieve_a_set_of_prevention_policies(self, url, client_id, client_secret, ids, headers="", queries=""):
params={}
request_headers={}
url=f"{url}/policy/entities/prevention/v1?ids={ids}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def delete_a_set_of_prevention_policies(self, url, client_id, client_secret, ids, headers="", queries=""):
params={}
request_headers={}
url=f"{url}/policy/entities/prevention/v1?ids={ids}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.delete(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def create_prevention_policies(self, url, client_id, client_secret, headers="", queries="", body=""):
params={}
request_headers={}
url=f"{url}/policy/entities/prevention/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.post(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def update_prevention_policies(self, url, client_id, client_secret, headers="", queries="", body=""):
params={}
request_headers={}
url=f"{url}/policy/entities/prevention/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.patch(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def get_putfiles_based_on_the_ids_given(self, url, client_id, client_secret, ids, headers="", queries=""):
params={}
request_headers={}
url=f"{url}/real-time-response/entities/put-files/v1?ids={ids}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def delete_a_putfile_based_on_the_ids_given(self, url, client_id, client_secret, ids, headers="", queries=""):
params={}
request_headers={}
url=f"{url}/real-time-response/entities/put-files/v1?ids={ids}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.delete(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def upload_a_new_putfile_to_use_for_the_rtr_put_command(self, url, client_id, client_secret, headers="", queries="", body=""):
params={}
request_headers={}
url=f"{url}/real-time-response/entities/put-files/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.post(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def get_a_list_of_session_ids(self, url, client_id, client_secret, headers="", queries="", offset="", limit="", sort="", filter=""):
params={}
request_headers={}
url=f"{url}/real-time-response/queries/sessions/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if limit:
params["limit"] = limit
if sort:
params["sort"] = sort
if filter:
params["filter"] = filter
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def retrieve_list_of_samples(self, url, client_id, client_secret, headers="", queries="", body=""):
params={}
request_headers={"Content-Type": "application/json","Accept": "application/jsonX-CS-USERUUID"}
url=f"{url}/samples/queries/samples/GET/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.post(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def check_status_of_sandbox_analysis(self, url, client_id, client_secret, ids, headers="", queries=""):
params={}
request_headers={}
url=f"{url}/falconx/entities/submissions/v1?ids={ids}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def submit_upload_for_sandbox_analysis(self, url, client_id, client_secret, headers="", queries="", body=""):
params={}
request_headers={}
url=f"{url}/falconx/entities/submissions/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.post(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def get_number_of_hosts_that_have_observed_a_given_custom_ioc(self, url, client_id, client_secret, type, value, headers="", queries=""):
params={}
request_headers={}
url=f"{url}/indicators/aggregates/devices-count/v1?type={type}&value={value}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def set_precedence_of_firewall_policies(self, url, client_id, client_secret, headers="", queries="", body=""):
params={}
request_headers={}
url=f"{url}/policy/entities/firewall-precedence/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.post(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def get_notification_aggregates(self, url, client_id, client_secret, headers="", queries="", body=""):
params={}
request_headers={"Content-Type": "application/json","Accept": "application/json"}
url=f"{url}/recon/aggregates/notifications/GET/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.post(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def get_actions_based_on_their_ids(self, url, client_id, client_secret, ids, headers="", queries=""):
params={}
request_headers={}
url=f"{url}/recon/entities/actions/v1?ids={ids}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def delete_an_action_from_a_monitoring_rule_based_on_the_action_id(self, url, client_id, client_secret, id, headers="", queries=""):
params={}
request_headers={}
url=f"{url}/recon/entities/actions/v1?id={id}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.delete(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def create_actions_for_a_monitoring_rule(self, url, client_id, client_secret, headers="", queries="", body=""):
params={}
request_headers={"Content-Type": "application/json","Accept": "application/json"}
url=f"{url}/recon/entities/actions/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.post(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def update_an_action_for_a_monitoring_rule(self, url, client_id, client_secret, headers="", queries="", body=""):
params={}
request_headers={"Content-Type": "application/json","Accept": "application/json"}
url=f"{url}/recon/entities/actions/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.patch(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def query_actions(self, url, client_id, client_secret, headers="", queries="", offset="", limit="", sort="", filter="", q=""):
params={}
request_headers={}
url=f"{url}/recon/queries/actions/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if limit:
params["limit"] = limit
if sort:
params["sort"] = sort
if filter:
params["filter"] = filter
if q:
params["q"] = q
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def search_for_host_group_ids(self, url, client_id, client_secret, headers="", queries="", filter="", offset="", limit="", sort=""):
params={}
request_headers={}
url=f"{url}/devices/queries/host-groups/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if offset:
params["offset"] = offset
if limit:
params["limit"] = limit
if sort:
params["sort"] = sort
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def retrieve_indexed_files_metadata_by_their_hash(self, url, client_id, client_secret, ids, headers="", queries=""):
params={}
request_headers={}
url=f"{url}/malquery/entities/metadata/v1?ids={ids}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def search_for_sensor_update_policies_with_additional_support_for_uninstall_protection(self, url, client_id, client_secret, headers="", queries="", filter="", offset="", limit="", sort=""):
params={}
request_headers={}
url=f"{url}/policy/combined/sensor-update/v2"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if offset:
params["offset"] = offset
if limit:
params["limit"] = limit
if sort:
params["sort"] = sort
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def perform_action_on_the_firewall_policies(self, url, client_id, client_secret, action_name, headers="", queries="", body=""):
params={}
request_headers={}
url=f"{url}/policy/entities/firewall-actions/v1?action_name={action_name}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.post(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def get_process_details(self, url, client_id, client_secret, ids, headers="", queries=""):
params={}
request_headers={}
url=f"{url}/processes/entities/processes/v1?ids={ids}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def get_a_short_summary_version_of_a_sandbox_report(self, url, client_id, client_secret, ids, headers="", queries=""):
params={}
request_headers={}
url=f"{url}/falconx/entities/report-summaries/v1?ids={ids}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def schedule_a_yara_based_search_for_execution(self, url, client_id, client_secret, headers="", queries="", body=""):
params={}
request_headers={"Content-Type": "application/json","Accept": "application/json"}
url=f"{url}/malquery/queries/hunt/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.post(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def retrieve_the_status_of_batch_get_command__will_return_successful_files_when_they_are_finished_processing(self, url, client_id, client_secret, batch_get_cmd_req_id, headers="", queries="", timeout="", timeout_duration=""):
params={}
request_headers={}
url=f"{url}/real-time-response/combined/batch-get-command/v1?batch_get_cmd_req_id={batch_get_cmd_req_id}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if timeout_duration:
params["timeout_duration"] = timeout_duration
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def batch_executes_get_command_across_hosts_to_retrieve_files_after_this_call_is_made_get_realtimeresponsecombinedbatchgetcommandv1_is_used_to_query_for_the_results(self, url, client_id, client_secret, headers="", queries="", timeout="", timeout_duration="", body=""):
params={}
request_headers={}
url=f"{url}/real-time-response/combined/batch-get-command/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if timeout_duration:
params["timeout_duration"] = timeout_duration
body = " ".join(body.strip().split()).encode("utf-8")
ret = requests.post(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def query_monitoring_rules(self, url, client_id, client_secret, headers="", queries="", offset="", limit="", sort="", filter="", q=""):
params={}
request_headers={"X-CS-USERUUID": "undefined"}
url=f"{url}/recon/queries/rules/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if limit:
params["limit"] = limit
if sort:
params["sort"] = sort
if filter:
params["filter"] = filter
if q:
params["q"] = q
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def get_sensor_installer_details_by_provided_sha256_ids(self, url, client_id, client_secret, ids, headers="", queries=""):
params={}
request_headers={}
url=f"{url}/sensors/entities/installers/v1?ids={ids}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def modify_host_tags(self, url, client_id, client_secret, headers="", queries="", body=""):
params={}
request_headers={"Content-Type": "application/json","Accept": "application/json"}
url=f"{url}/devices/entities/devices/tags/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.patch(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def search_for_response_policy_member_ids(self, url, client_id, client_secret, headers="", queries="", id="", filter="", offset="", limit="", sort=""):
params={}
request_headers={}
url=f"{url}/policy/queries/response-members/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if filter:
params["filter"] = filter
if offset:
params["offset"] = offset
if limit:
params["limit"] = limit
if sort:
params["sort"] = sort
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def get_status_of_an_executed_rtr_administrator_command_on_a_single_host(self, url, client_id, client_secret, cloud_request_id, sequence_id, headers="", queries=""):
params={}
request_headers={}
url=f"{url}/real-time-response/entities/admin-command/v1?cloud_request_id={cloud_request_id}&sequence_id={sequence_id}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def execute_a_rtr_administrator_command_on_a_single_host(self, url, client_id, client_secret, headers="", queries="", body=""):
params={}
request_headers={}
url=f"{url}/real-time-response/entities/admin-command/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.post(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def refresh_an_active_event_stream(self, url, client_id, client_secret, action_name, appId, partition, headers="", queries="", body=""):
params={}
request_headers={}
url=f"{url}/sensors/entities/datafeed-actions/v1/{partition}?action_name={action_name}&appId={appId}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.post(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def validates_field_values_and_checks_for_string_matches(self, url, client_id, client_secret, headers="", queries="", body=""):
params={}
request_headers={"Content-Type": "application/json","Accept": "application/json"}
url=f"{url}/ioarules/entities/rules/validate/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.post(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def check_the_status_of_a_volume_scan(self, url, client_id, client_secret, ids, headers="", queries=""):
params={}
request_headers={}
url=f"{url}/scanner/entities/scans/v1?ids={ids}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def submit_a_volume_of_files_for_ml_scanning(self, url, client_id, client_secret, headers="", queries="", body=""):
params={}
request_headers={}
url=f"{url}/scanner/entities/scans/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.post(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def download_the_latest_rule_set(self, url, client_id, client_secret, type, headers="", queries="", format=""):
params={}
request_headers={"Accept": "undefined"}
url=f"{url}/intel/entities/rules-latest-files/v1?type={type}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def get_rules_by_id(self, url, client_id, client_secret, headers="", queries="", body=""):
params={}
request_headers={"Content-Type": "application/json","Accept": "application/json"}
url=f"{url}/ioarules/entities/rules/GET/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.post(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def find_all_rule_groups(self, url, client_id, client_secret, headers="", queries="", sort="", filter="", q="", offset="", limit=""):
params={}
request_headers={}
url=f"{url}/ioarules/queries/rule-groups-full/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if filter:
params["filter"] = filter
if q:
params["q"] = q
if offset:
params["offset"] = offset
if limit:
params["limit"] = limit
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def check_the_status_and_results_of_an_asynchronous_request(self, url, client_id, client_secret, ids, headers="", queries=""):
params={}
request_headers={}
url=f"{url}/malquery/entities/requests/v1?ids={ids}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def get_a_set_of_ml_exclusions(self, url, client_id, client_secret, ids, headers="", queries=""):
params={}
request_headers={}
url=f"{url}/policy/entities/ml-exclusions/v1?ids={ids}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def delete_the_ml_exclusions_by_id(self, url, client_id, client_secret, ids, headers="", queries="", comment=""):
params={}
request_headers={}
url=f"{url}/policy/entities/ml-exclusions/v1?ids={ids}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.delete(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def create_the_ml_exclusions(self, url, client_id, client_secret, headers="", queries="", body=""):
params={}
request_headers={}
url=f"{url}/policy/entities/ml-exclusions/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.post(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def update_the_ml_exclusions(self, url, client_id, client_secret, headers="", queries="", body=""):
params={}
request_headers={}
url=f"{url}/policy/entities/ml-exclusions/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.patch(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def search_for_device_control_policy_ids(self, url, client_id, client_secret, headers="", queries="", filter="", offset="", limit="", sort=""):
params={}
request_headers={}
url=f"{url}/policy/queries/device-control/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if offset:
params["offset"] = offset
if limit:
params["limit"] = limit
if sort:
params["sort"] = sort
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def search_for_firewall_policy_member_ids(self, url, client_id, client_secret, headers="", queries="", id="", filter="", offset="", limit="", sort=""):
params={}
request_headers={}
url=f"{url}/policy/queries/firewall-members/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if filter:
params["filter"] = filter
if offset:
params["offset"] = offset
if limit:
params["limit"] = limit
if sort:
params["sort"] = sort
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def get_notifications_based_on_their_ids(self, url, client_id, client_secret, ids, headers="", queries=""):
params={}
request_headers={}
url=f"{url}/recon/entities/notifications-translated/v1?ids={ids}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def search_for_host_group_members(self, url, client_id, client_secret, headers="", queries="", id="", filter="", offset="", limit="", sort=""):
params={}
request_headers={}
url=f"{url}/devices/combined/host-group-members/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if filter:
params["filter"] = filter
if offset:
params["offset"] = offset
if limit:
params["limit"] = limit
if sort:
params["sort"] = sort
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def get_platforms_by_id(self, url, client_id, client_secret, ids, headers="", queries=""):
params={}
request_headers={}
url=f"{url}/ioarules/entities/platforms/v1?ids={ids}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def perform_action_on_the_response_policies(self, url, client_id, client_secret, action_name, headers="", queries="", body=""):
params={}
request_headers={}
url=f"{url}/policy/entities/response-actions/v1?action_name={action_name}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.post(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def retrieve_a_set_of_response_policies(self, url, client_id, client_secret, ids, headers="", queries=""):
params={}
request_headers={}
url=f"{url}/policy/entities/response/v1?ids={ids}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def delete_a_set_of_response_policies(self, url, client_id, client_secret, ids, headers="", queries=""):
params={}
request_headers={}
url=f"{url}/policy/entities/response/v1?ids={ids}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.delete(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def create_response_policies(self, url, client_id, client_secret, headers="", queries="", body=""):
params={}
request_headers={}
url=f"{url}/policy/entities/response/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.post(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def update_response_policies(self, url, client_id, client_secret, headers="", queries="", body=""):
params={}
request_headers={}
url=f"{url}/policy/entities/response/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.patch(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def batch_executes_a_rtr_readonly_command(self, url, client_id, client_secret, headers="", queries="", timeout="", timeout_duration="", body=""):
params={}
request_headers={}
url=f"{url}/real-time-response/combined/batch-command/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if timeout_duration:
params["timeout_duration"] = timeout_duration
body = " ".join(body.strip().split()).encode("utf-8")
ret = requests.post(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def get_session_metadata_by_session_id(self, url, client_id, client_secret, headers="", queries="", body=""):
params={}
request_headers={}
url=f"{url}/real-time-response/entities/sessions/GET/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.post(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def perform_action_on_host_group(self, url, client_id, client_secret, action_name, host_group_id, hostnames, headers="", queries="", body=""):
params={}
request_headers={}
url=f"{url}/devices/entities/host-group-actions/v1?action_name={action_name}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
body = {"action_parameters": [{"name": "filter", "value": "(hostname:['" + hostnames + "'])" } ], "ids": [ host_group_id ]}
ret = requests.post(url, headers=request_headers, params=params, json=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def search_for_device_control_policy_members(self, url, client_id, client_secret, headers="", queries="", id="", filter="", offset="", limit="", sort=""):
params={}
request_headers={}
url=f"{url}/policy/combined/device-control-members/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if filter:
params["filter"] = filter
if offset:
params["offset"] = offset
if limit:
params["limit"] = limit
if sort:
params["sort"] = sort
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def search_for_firewall_policies(self, url, client_id, client_secret, headers="", queries="", filter="", offset="", limit="", sort=""):
params={}
request_headers={}
url=f"{url}/policy/combined/firewall/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if offset:
params["offset"] = offset
if limit:
params["limit"] = limit
if sort:
params["sort"] = sort
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def retrieve_a_set_of_sensor_update_policies_with_additional_support_for_uninstall_protection(self, url, client_id, client_secret, ids, headers="", queries=""):
params={}
request_headers={}
url=f"{url}/policy/entities/sensor-update/v2?ids={ids}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def create_sensor_update_policies(self, url, client_id, client_secret, headers="", queries="", body=""):
params={}
request_headers={}
url=f"{url}/policy/entities/sensor-update/v2"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.post(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def update_sensor_update_policies(self, url, client_id, client_secret, headers="", queries="", body=""):
params={}
request_headers={}
url=f"{url}/policy/entities/sensor-update/v2"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.patch(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def get_a_list_of_putfile_ids(self, url, client_id, client_secret, headers="", queries="", filter="", offset="", limit="", sort=""):
params={}
request_headers={}
url=f"{url}/real-time-response/queries/put-files/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if offset:
params["offset"] = offset
if limit:
params["limit"] = limit
if sort:
params["sort"] = sort
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def get_a_list_of_custom_script_ids(self, url, client_id, client_secret, headers="", queries="", filter="", offset="", limit="", sort=""):
params={}
request_headers={}
url=f"{url}/real-time-response/queries/scripts/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if offset:
params["offset"] = offset
if limit:
params["limit"] = limit
if sort:
params["sort"] = sort
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def get_detailed_notifications_based_on_their_ids_with_raw_intelligence_content_that_generated_the_match(self, url, client_id, client_secret, ids, headers="", queries=""):
params={}
request_headers={}
url=f"{url}/recon/entities/notifications-detailed/v1?ids={ids}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def get_all_event_streams(self, url, client_id, client_secret, appId, headers="", queries="", format=""):
params={}
request_headers={}
url=f"{url}/sensors/entities/datafeed/v2?appId={appId}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def download_sensor_installer_by_sha256_id(self, url, client_id, client_secret, id, headers="", queries=""):
params={}
request_headers={}
url=f"{url}/sensors/entities/download-installer/v1?id={id}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def get_hosts_that_have_observed_a_given_custom_ioc(self, url, client_id, client_secret, type, value, headers="", queries="", limit="", offset=""):
params={}
request_headers={}
url=f"{url}/indicators/queries/devices/v1?type={type}&value={value}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if offset:
params["offset"] = offset
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def retrieve_details_for_rule_sets_for_ids(self, url, client_id, client_secret, ids, headers="", queries=""):
params={}
request_headers={}
url=f"{url}/intel/entities/rules/v1?ids={ids}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def download_a_file_indexed_by_malquery(self, url, client_id, client_secret, ids, headers="", queries=""):
params={}
request_headers={}
url=f"{url}/malquery/entities/download-files/v1?ids={ids}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def retrieve_an_uninstall_token_for_a_specific_device(self, url, client_id, client_secret, headers="", queries="", body=""):
params={}
request_headers={}
url=f"{url}/policy/combined/reveal-uninstall-token/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.post(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def search_for_response_policy_ids(self, url, client_id, client_secret, headers="", queries="", filter="", offset="", limit="", sort=""):
params={}
request_headers={}
url=f"{url}/policy/queries/response/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if offset:
params["offset"] = offset
if limit:
params["limit"] = limit
if sort:
params["sort"] = sort
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def get_a_list_of_files_for_rtr_session(self, url, client_id, client_secret, session_id, headers="", queries=""):
params={}
request_headers={}
url=f"{url}/real-time-response/entities/file/v1?session_id={session_id}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def delete_a_rtr_session_file(self, url, client_id, client_secret, ids, session_id, headers="", queries=""):
params={}
request_headers={}
url=f"{url}/real-time-response/entities/file/v1?ids={ids}&session_id={session_id}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.delete(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def get_custom_scripts_based_on_the_ids_given(self, url, client_id, client_secret, ids, headers="", queries=""):
params={}
request_headers={}
url=f"{url}/real-time-response/entities/scripts/v1?ids={ids}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def delete_a_custom_script_based_on_the_id_given(self, url, client_id, client_secret, ids, headers="", queries=""):
params={}
request_headers={}
url=f"{url}/real-time-response/entities/scripts/v1?ids={ids}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.delete(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def upload_a_new_custom_script_to_use(self, url, client_id, client_secret, headers="", queries="", body=""):
params={}
request_headers={}
url=f"{url}/real-time-response/entities/scripts/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.post(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def upload_a_new_scripts_to_replace_an_existing_one(self, url, client_id, client_secret, headers="", queries="", body=""):
params={}
request_headers={}
url=f"{url}/real-time-response/entities/scripts/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.patch(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def get_details_on_hosts(self, url, client_id, client_secret, ids, headers="", queries=""):
params={}
request_headers={}
url=f"{url}/devices/entities/devices/v1?ids={ids}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def get_actor_ids(self, url, client_id, client_secret, headers="", queries="", offset="", limit="", sort="", filter="", q=""):
params={}
request_headers={}
url=f"{url}/intel/queries/actors/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if limit:
params["limit"] = limit
if sort:
params["sort"] = sort
if filter:
params["filter"] = filter
if q:
params["q"] = q
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def get_ccid_to_use_with_sensor_installers(self, url, client_id, client_secret, headers="", queries=""):
params={}
request_headers={}
url=f"{url}/sensors/queries/installers/ccid/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def find_submission_ids_for_uploaded_files(self, url, client_id, client_secret, headers="", queries="", filter="", offset="", limit="", sort=""):
params={}
request_headers={}
url=f"{url}/falconx/queries/submissions/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if offset:
params["offset"] = offset
if limit:
params["limit"] = limit
if sort:
params["sort"] = sort
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def get_details_on_behaviors(self, url, client_id, client_secret, headers="", queries="", body=""):
params={}
request_headers={"Content-Type": "application/json","Accept": "application/json"}
url=f"{url}/incidents/entities/behaviors/GET/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.post(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def search_for_device_control_policies(self, url, client_id, client_secret, headers="", queries="", filter="", offset="", limit="", sort=""):
params={}
request_headers={}
url=f"{url}/policy/combined/device-control/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if offset:
params["offset"] = offset
if limit:
params["limit"] = limit
if sort:
params["sort"] = sort
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def search_for_prevention_policy_member_ids(self, url, client_id, client_secret, headers="", queries="", id="", filter="", offset="", limit="", sort=""):
params={}
request_headers={}
url=f"{url}/policy/queries/prevention-members/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if filter:
params["filter"] = filter
if offset:
params["offset"] = offset
if limit:
params["limit"] = limit
if sort:
params["sort"] = sort
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def get_status_of_an_executed_command_on_a_single_host(self, url, client_id, client_secret, cloud_request_id, sequence_id, headers="", queries=""):
params={}
request_headers={}
url=f"{url}/real-time-response/entities/command/v1?cloud_request_id={cloud_request_id}&sequence_id={sequence_id}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def execute_a_command_on_a_single_host(self, url, client_id, client_secret, headers="", queries="", body=""):
params={}
request_headers={}
url=f"{url}/real-time-response/entities/command/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.post(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def retrieve_the_file_associated_with_the_given_id_sha256(self, url, client_id, client_secret, ids, headers="", queries="", password_protected=""):
params={}
request_headers={"X-CS-USERUUID": "undefined"}
url=f"{url}/samples/entities/samples/v3?ids={ids}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def delete_sample_from_the_collection(self, url, client_id, client_secret, ids, headers="", queries=""):
params={}
request_headers={"X-CS-USERUUID": "undefined"}
url=f"{url}/samples/entities/samples/v3?ids={ids}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.delete(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def upload_a_file_for_further_cloud_analysis(self, url, client_id, client_secret, file_name, headers="", queries="", comment="", is_confidential="", body=""):
params={}
request_headers={"X-CS-USERUUID": "undefined"}
url=f"{url}/samples/entities/samples/v3?file_name={file_name}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if is_confidential:
params["is_confidential"] = is_confidential
body = " ".join(body.strip().split()).encode("utf-8")
ret = requests.post(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def search_for_response_policies(self, url, client_id, client_secret, headers="", queries="", filter="", offset="", limit="", sort=""):
params={}
request_headers={}
url=f"{url}/policy/combined/response/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if offset:
params["offset"] = offset
if limit:
params["limit"] = limit
if sort:
params["sort"] = sort
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def retrieve_a_set_of_firewall_policies(self, url, client_id, client_secret, ids, headers="", queries=""):
params={}
request_headers={}
url=f"{url}/policy/entities/firewall/v1?ids={ids}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def delete_a_set_of_firewall_policies(self, url, client_id, client_secret, ids, headers="", queries=""):
params={}
request_headers={}
url=f"{url}/policy/entities/firewall/v1?ids={ids}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.delete(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def create_firewall_policies(self, url, client_id, client_secret, headers="", queries="", clone_id="", body=""):
params={}
request_headers={}
url=f"{url}/policy/entities/firewall/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
body = " ".join(body.strip().split()).encode("utf-8")
ret = requests.post(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def update_firewall_policies(self, url, client_id, client_secret, headers="", queries="", body=""):
params={}
request_headers={}
url=f"{url}/policy/entities/firewall/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.patch(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def set_precedence_of_sensor_update_policies(self, url, client_id, client_secret, headers="", queries="", body=""):
params={}
request_headers={}
url=f"{url}/policy/entities/sensor-update-precedence/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.post(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def search_for_device_control_policy_member_ids(self, url, client_id, client_secret, headers="", queries="", id="", filter="", offset="", limit="", sort=""):
params={}
request_headers={}
url=f"{url}/policy/queries/device-control-members/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if filter:
params["filter"] = filter
if offset:
params["offset"] = offset
if limit:
params["limit"] = limit
if sort:
params["sort"] = sort
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def batch_executes_a_rtr_active_responder_command(self, url, client_id, client_secret, headers="", queries="", timeout="", timeout_duration="", body=""):
params={}
request_headers={}
url=f"{url}/real-time-response/combined/batch-active-responder-command/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if timeout_duration:
params["timeout_duration"] = timeout_duration
body = " ".join(body.strip().split()).encode("utf-8")
ret = requests.post(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def batch_executes_a_rtr_administrator_command(self, url, client_id, client_secret, headers="", queries="", timeout="", timeout_duration="", body=""):
params={}
request_headers={}
url=f"{url}/real-time-response/combined/batch-admin-command/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if timeout_duration:
params["timeout_duration"] = timeout_duration
body = " ".join(body.strip().split()).encode("utf-8")
ret = requests.post(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def get_monitoring_rules_rules_by_provided_ids(self, url, client_id, client_secret, ids, headers="", queries=""):
params={}
request_headers={"X-CS-USERUUID": "undefined"}
url=f"{url}/recon/entities/rules/v1?ids={ids}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def delete_monitoring_rules(self, url, client_id, client_secret, ids, headers="", queries=""):
params={}
request_headers={"X-CS-USERUUID": "undefined"}
url=f"{url}/recon/entities/rules/v1?ids={ids}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.delete(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def create_monitoring_rules(self, url, client_id, client_secret, headers="", queries="", body=""):
params={}
request_headers={"X-CS-USERUUID": "undefined"}
url=f"{url}/recon/entities/rules/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.post(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def update_monitoring_rules(self, url, client_id, client_secret, headers="", queries="", body=""):
params={}
request_headers={"X-CS-USERUUID": "undefined"}
url=f"{url}/recon/entities/rules/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.patch(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def search_for_detection_ids(self, url, client_id, client_secret, headers="", queries="", offset="", limit="", sort="", filter="", q=""):
params={}
request_headers={}
url=f"{url}/detects/queries/detects/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if limit:
params["limit"] = limit
if sort:
params["sort"] = sort
if filter:
params["filter"] = filter
if q:
params["q"] = q
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def retrieve_the_file_associated_with_the_given_id_sha256(self, url, client_id, client_secret, ids, headers="", queries="", password_protected=""):
params={}
request_headers={"X-CS-USERUUID": "undefined"}
url=f"{url}/samples/entities/samples/v2?ids={ids}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def upload_for_sandbox_analysis(self, url, client_id, client_secret, file_name, headers="", queries="", comment="", is_confidential="", body=""):
params={}
request_headers={"X-CS-USERUUID": "undefined"}
url=f"{url}/samples/entities/samples/v2?file_name={file_name}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if is_confidential:
params["is_confidential"] = is_confidential
body = " ".join(body.strip().split()).encode("utf-8")
ret = requests.post(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def search_for_host_group_member_ids(self, url, client_id, client_secret, headers="", queries="", id="", filter="", offset="", limit="", sort=""):
params={}
request_headers={}
url=f"{url}/devices/queries/host-group-members/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if filter:
params["filter"] = filter
if offset:
params["offset"] = offset
if limit:
params["limit"] = limit
if sort:
params["sort"] = sort
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def get_details_on_incidents(self, url, client_id, client_secret, headers="", queries="", body=""):
params={}
request_headers={"Content-Type": "application/json","Accept": "application/json"}
url=f"{url}/incidents/entities/incidents/GET/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.post(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def search_for_processes_associated_with_a_custom_ioc(self, url, client_id, client_secret, type, value, device_id, headers="", queries="", limit="", offset=""):
params={}
request_headers={}
url=f"{url}/indicators/queries/processes/v1?type={type}&value={value}&device_id={device_id}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if offset:
params["offset"] = offset
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def retrieve_specific_reports_using_their_report_ids(self, url, client_id, client_secret, ids, headers="", queries="", fields=""):
params={}
request_headers={}
url=f"{url}/intel/entities/reports/v1?ids={ids}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def search_for_indicators(self, url, client_id, client_secret, headers="", queries="", filter="", offset="", limit="", sort=""):
params={}
request_headers={}
url=f"{url}/iocs/queries/indicators/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if offset:
params["offset"] = offset
if limit:
params["limit"] = limit
if sort:
params["sort"] = sort
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def search_for_firewall_policy_members(self, url, client_id, client_secret, headers="", queries="", id="", filter="", offset="", limit="", sort=""):
params={}
request_headers={}
url=f"{url}/policy/combined/firewall-members/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if filter:
params["filter"] = filter
if offset:
params["offset"] = offset
if limit:
params["limit"] = limit
if sort:
params["sort"] = sort
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def perform_action_on_the_prevention_policies(self, url, client_id, client_secret, action_name, headers="", queries="", body=""):
params={}
request_headers={}
url=f"{url}/policy/entities/prevention-actions/v1?action_name={action_name}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.post(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def retrieve_a_set_of_sensor_update_policies(self, url, client_id, client_secret, ids, headers="", queries=""):
params={}
request_headers={}
url=f"{url}/policy/entities/sensor-update/v1?ids={ids}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def delete_a_set_of_sensor_update_policies(self, url, client_id, client_secret, ids, headers="", queries=""):
params={}
request_headers={}
url=f"{url}/policy/entities/sensor-update/v1?ids={ids}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.delete(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def create_sensor_update_policies(self, url, client_id, client_secret, headers="", queries="", body=""):
params={}
request_headers={}
url=f"{url}/policy/entities/sensor-update/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.post(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def update_sensor_update_policies(self, url, client_id, client_secret, headers="", queries="", body=""):
params={}
request_headers={}
url=f"{url}/policy/entities/sensor-update/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.patch(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def take_action_on_hosts(self, url, client_id, client_secret, action_name, headers="", queries="", body=""):
params={}
request_headers={"Content-Type": "application/json","Accept": "application/json"}
url=f"{url}/devices/entities/devices-actions/v2?action_name={action_name}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.post(url, headers=request_headers, params=params, data=body)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def search_for_hosts(self, url, client_id, client_secret, headers="", queries="", offset="", limit="", sort="", filter=""):
params={}
request_headers={}
url=f"{url}/devices/queries/devices/v1"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
if limit:
params["limit"] = limit
if sort:
params["sort"] = sort
if filter:
params["filter"] = filter
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def retrieve_specific_actors_using_their_actor_ids(self, url, client_id, client_secret, ids, headers="", queries="", fields=""):
params={}
request_headers={}
url=f"{url}/intel/entities/actors/v1?ids={ids}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
async def get_pattern_severities_by_id(self, url, client_id, client_secret, ids, headers="", queries=""):
params={}
request_headers={}
url=f"{url}/ioarules/entities/pattern-severities/v1?ids={ids}"
request_headers=self.setup_headers(headers)
params=self.setup_params(queries)
ret = requests.get(url, headers=request_headers, params=params)
try:
return ret.json()
except json.decoder.JSONDecodeError:
return ret.text
if __name__ == "__main__":
asyncio.run(Crowdstrike_Falcon.run(), debug=True)
|
11485825
|
import numpy as np
import pandas as pd
from filter import movingaverage
import math
from trendy import segtrends
#!pip install mlboost
from mlboost.core.pphisto import SortHistogram
# little hack to make in working inside heroku twp submodule
import os, sys
sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), '../'))
def orders_from_trends(x, segments=2, charts=True, window=7, momentum=False):
''' generate orders from segtrends '''
x_maxima, maxima, x_minima, minima = segtrends(x, segments, charts, window)
n = len(x)
y = np.array(x)
movy = movingaverage(y, window)
# generate order strategy
orders = np.zeros(n)
last_buy = y[0]
last_sale = y[0]
for i in range(1,n):
# get 2 latest support point y values prior to x
pmin = list(minima[np.where(x_minima<=i)][-2:])
pmax = list(maxima[np.where(x_maxima<=i)][-2:])
# sell if support slop is negative
min_sell = True if ((len(pmin)==2) and (pmin[1]-pmin[0])<0) else False
max_sell = True if ((len(pmax)==2) and (pmax[1]-pmax[0])<0) else False
# if support down, sell
buy = -1 if (min_sell and max_sell) else 0
# buy only if lower the moving average else sale
buy = 1 if ((buy == 0) and (y[i]<movy[i])) else -1
# sell only if ...
buy= -1 if ((buy == -1) and y[i]>last_buy) else 1
buy_price_dec = y[i]<last_buy
sale_price_dec = y[i]<last_sale
orders[i] = buy
last_buy = y[i] if (buy==1) else last_buy
last_sale = y[i] if (buy==-1) else last_sale
if momentum:
# add momentum for buy
if (buy==1) and (orders[i-1]>=1):
#if buy_price_dec:
orders[i]=orders[i-1]*2#round(math.log(2*orders[i-1])+1)
#else:
# orders[i]=max(1, round(orders[i-1]/2))
# add momentum for sale
elif (buy==-1) and (orders[i-1]<=-1):
#if sale_price_dec:
orders[i]*=round(math.log(abs(orders[i-1]*2))+1)
#else:
# orders[i]=max(1, round(orders[i-1]/2))
# OUTPUT
return orders
def orders2strategy(orders, price, min_stocks=1):
strategy = pd.Series(index=price.index)
orders=[el*min_stocks for el in orders]
# create a stratgy from order
for i, idx in enumerate(price.index):
if orders[i]!=0:
strategy[idx] = orders[i]
return strategy
def eval(stockname='TSLA', field='open', months=12, initialCash=20000,
min_stocks=30, charts=True, verbose=False, debug=False,
signalType='shares'):
if verbose:
print "Evaluation ", stockname
import lib.yahooFinance as yahoo
import lib.backtest as bt
from pylab import title, figure, savefig, subplot
n = (5*4)*months
price = yahoo.getHistoricData(stockname)[field][-n:]
if (charts and debug):
figure()
title('automatic strategy base %s' %stockname)
orders = orders_from_trends(price, segments=n/5, charts=(charts and debug),
momentum=True);
strategy = orders2strategy(orders, price, min_stocks)
# do the backtest
btr = bt.Backtest(price, strategy, initialCash=initialCash, signalType=signalType)
if charts:
print "#1) Automatic buy/sales visualisation of the current strategy (buy=long, short=sale)"
subplot(211)
#figure()
btr.plotTrades(stockname)
subplot(212)
print "#2) Evaluation of the strategy (PnL (Profit & Log) = Value today - Value yesterday)"
#figure()
btr.pnl.plot()
title('pnl '+stockname)
savefig('eval.png')
print "#3) big picture: Price, shares, value, cash & PnL"
btr.data.plot()
title('all strategy data %s' %stockname)
return btr.data
def eval_best(stocks=["TSLA", "GS", "SCTY", "AMZN", "CSCO", 'UTX','JCI',"GOOGL",'AAPL','BP','MSFT'],
field='open', months=12,
initialCash=20000, min_stocks=30,
charts=True, verbose=False, debug=False):
# try current strategy on different stock
trademap = {}
tradedetails = {}
for i, stock in enumerate(stocks):
trade = eval(stock, field=field, months=months, initialCash=initialCash,
min_stocks=min_stocks, charts=charts, verbose=False, debug=debug)
if False:
print i, stock, trade.ix[-1:,'cash':]
trademap[stock] = trade[-1:]['pnl'][-1]
tradedetails[stock] = trade[-1:]
st = SortHistogram(trademap, False, True)
if verbose:
print "Here are the Stocks sorted by PnL"
for i,el in enumerate(st):
stock, value = el
print "#", i+1, stock, tradedetails[stock]
return st
if __name__ == "__main__":
#eval(charts=True)
pass
|
11485838
|
from __future__ import absolute_import
import six
import unittest
from mock import Mock, patch
import urllib
class TestUrllibPatch(unittest.TestCase):
@unittest.skipIf(six.PY2, "urllib not compatible with python2")
def test_request_fn_injects_headers_and_returns(self):
from beeline.patch.urllib import _urllibopen # pylint: disable=bad-option-value,import-outside-toplevel
with patch('beeline.get_beeline') as m_bl:
bl = Mock()
m_bl.return_value = bl
trace_context = "1;trace_id=foo,parent_id=bar,context=base64value=="
bl.tracer_impl.http_trace_propagation_hook.return_value = {
'X-Honeycomb-Trace': trace_context
}
# this is our request call that's being wrapped
m_urlopen = Mock()
m_urlopen.return_value = Mock(
headers={'content-type': 'application/json', 'content-length': 23}, status_code=500)
args = ('https://example.com',)
kwargs = {}
ret = _urllibopen(m_urlopen, None, args, kwargs)
# ensure our arg gets modified and header set before the real function is called
self.assertEqual(
type(m_urlopen.call_args.args[0]), urllib.request.Request)
self.assertEqual(
m_urlopen.call_args.args[0].headers['X-Honeycomb-Trace'], trace_context)
m_urlopen.asset_called_once()
m_urlopen.reset_mock()
# ensure we return a response
self.assertEqual(ret, m_urlopen.return_value)
# test case with Request object
m_urlopen.return_value = Mock(
headers={'content-type': 'application/json', 'content-length': 23}, status_code=500)
req = urllib.request.Request('https://example.com/2')
args = [req]
ret = _urllibopen(m_urlopen, None, args, kwargs)
self.assertEqual(type(args[0]), urllib.request.Request)
self.assertEqual(args[0].full_url, 'https://example.com/2')
self.assertEqual(
args[0].headers['X-Honeycomb-Trace'], trace_context)
self.assertEqual(ret, m_urlopen.return_value)
m_urlopen.assert_called_once_with(*args, **kwargs)
|
11485845
|
from datetime import date
from ..core import WesternCalendar, MON, TUE, WED, THU, FRI, SAT
from ..astronomy import solar_term
from ..registry_tools import iso_register
# https://www.feriados.cl
# http://www.feriadoschilenos.cl
@iso_register('CL')
class Chile(WesternCalendar):
"Chile"
FIXED_HOLIDAYS = WesternCalendar.FIXED_HOLIDAYS + (
(5, 21, "Navy Day"),
(9, 18, "National holiday"),
(9, 19, "Army holiday"),
(12, 31, "Banking Holiday"),
)
# Civil holidays
# Labor day (Law 2.200 and Law 18.018)
include_labour_day = True
# Christian holidays
# Holy Week (Law 2.977)
include_good_friday = True
include_easter_saturday = True
# Assumption (Law 2.977)
include_assumption = True
# All Saints (Law 2.977)
include_all_saints = True
# Immaculate Conception (Law 2.977)
include_immaculate_conception = True
def get_variable_days(self, year):
days = super().get_variable_days(year)
# Indigenous Peoples (Law 21.357)
indigenous_peoples_day = date(year, 6, 21)
if year == 2021:
days.append((indigenous_peoples_day, 'Indigenous Peoples Day'))
elif year > 2021:
june_solstice = solar_term(year, 90, 'America/Santiago')
days.append((june_solstice, 'Indigenous Peoples Day'))
# Saint Peter and Saint Paul (Law 18.432)
peter_paul = date(year, 6, 29)
if year < 2000:
days.append((peter_paul, 'Saint Peter and Saint Paul'))
else:
# floating monday (Law 19.668)
if peter_paul.weekday() in [TUE, WED, THU]:
days.append((Chile.get_nth_weekday_in_month(year, 6, MON, 4),
'Saint Peter and Saint Paul'))
elif peter_paul.weekday() == FRI:
days.append((date(year, 7, 2), 'Saint Peter and Saint Paul'))
else:
days.append((peter_paul, 'Columbus Day'))
# Our Lady of Mount Carmel (Law 20.148)
if year >= 2007:
days.append((date(year, 7, 16), "Our Lady of Mount Carmel"))
# National Bridge Days (Law 20.215)
if year >= 2007:
september_17 = date(year, 9, 17)
if september_17.weekday() == MON:
days.append((september_17, '"Bridge" holiday'))
september_20 = date(year, 9, 20)
if september_20.weekday() == FRI:
days.append((september_20, '"Bridge" holiday'))
# Additional Friday (Law 20.983)
if year >= 2017:
september_18 = date(year, 9, 18)
if (september_18.weekday() == SAT):
days.append((september_17, '"Bridge" holiday'))
# Meeting of the two Worlds (Law 3.810)
columbus_day = date(year, 10, 12)
if year < 2000:
days.append((columbus_day, 'Columbus Day'))
else:
# floating monday (Law 19.668)
if columbus_day.weekday() in [TUE, WED, THU]:
days.append((Chile.get_nth_weekday_in_month(year, 10, MON, 2),
'Columbus Day'))
elif columbus_day.weekday() == FRI:
days.append((date(year, 10, 15), 'Columbus Day'))
else:
days.append((columbus_day, 'Columbus Day'))
# Reformation Day (Law 20.299)
if year >= 2008:
reformation_day = date(year, 10, 31)
if reformation_day.weekday() == WED:
reformation_day = date(year, 11, 2)
elif reformation_day.weekday() == TUE:
reformation_day = date(year, 10, 27)
days.append((reformation_day, "Reformation Day"))
return days
|
11485862
|
from __future__ import division, print_function, absolute_import
import numpy as np
from ipsolver._constraints import (BoxConstraint,
LinearConstraint,
NonlinearConstraint,
_check_kind,
_check_enforce_feasibility,
_reinforce_box_constraint)
from numpy.testing import (TestCase, assert_array_almost_equal,
assert_array_equal, assert_array_less,
assert_equal, assert_,
run_module_suite, assert_allclose, assert_warns,
dec)
import warnings
import pytest
class TestCheckKind(TestCase):
def test_kind_wrong_type(self):
with pytest.raises(ValueError):
_check_kind(1, "bla")
def test_kind_empty(self):
with pytest.raises(ValueError):
_check_kind(1, [])
def test_kind_invalid_format(self):
with pytest.raises(ValueError):
_check_kind(3, ["interval", [1, 2, 3]])
def test_kind_mismatching_ub_lb(self):
with pytest.raises(ValueError):
_check_kind(3, ["interval", [1, 2, 3], [1, 2]])
def test_kind_ub_smaller_than_lb(self):
with pytest.raises(ValueError):
_check_kind(3, ["interval", [1, 2, 3], [1, 2, 1]])
def test_string(self):
keyword, lb = _check_kind("greater", 3)
assert_equal(keyword, "greater")
assert_equal(lb, [0, 0, 0])
def test_broadcast(self):
keyword, lb = _check_kind(("greater", 1), 3)
assert_equal(keyword, "greater")
assert_equal(lb, [1, 1, 1])
class TestCheckEnforceFeasibility(TestCase):
def test_wrong_size(self):
with pytest.raises(ValueError):
_check_enforce_feasibility([True, True], 3)
def test_single_value(self):
f = _check_enforce_feasibility(True, 3)
assert_array_equal(f, [True, True, True])
class TestReinforceBoxConstraints(TestCase):
def test_reinforce_box_constraints(self):
lb = np.array([0, 20, 30])
ub = np.array([0.5, np.inf, 70])
enforce_feasibility = np.array([True, False, True],
dtype=bool)
kind = ("interval", lb, ub)
x0 = [1, 2, 3]
x0 = _reinforce_box_constraint(kind, enforce_feasibility, x0)
assert_array_less(lb[enforce_feasibility], x0[enforce_feasibility])
assert_array_less(x0[enforce_feasibility], ub[enforce_feasibility])
class TestBoxConstraint(TestCase):
def test_unfeasible_initial_point(self):
lb = np.array([0, 20, 30])
ub = np.array([0.5, np.inf, 70])
x0 = np.array([1, 2, 3])
enforce_feasibility = np.array([False, True, True],
dtype=bool)
kind = ("interval", lb, ub)
box = BoxConstraint(kind, enforce_feasibility)
with warnings.catch_warnings():
warnings.simplefilter("ignore")
x0_new = box.evaluate_and_initialize(x0)
assert_((lb[enforce_feasibility] <= x0_new[enforce_feasibility]).all())
assert_((x0_new[enforce_feasibility] <= ub[enforce_feasibility]).all())
def test_box_to_linear_conversion(self):
box = BoxConstraint(("interval", [10, 20, 30], [50, np.inf, 70]))
x0 = np.array([1, 2, 3])
x0 = box.evaluate_and_initialize(x0)
linear = box.to_linear()
assert_array_equal(linear.A.todense(), np.eye(3))
def test_box_to_nonlinear_conversion(self):
box = BoxConstraint(("interval", [10, 20, 30], [50, np.inf, 70]))
x0 = np.array([1, 2, 3])
x0 = box.evaluate_and_initialize(x0)
nonlinear = box.to_nonlinear()
assert_array_equal(nonlinear.fun(x0), x0)
assert_array_equal(nonlinear.jac(x0).todense(), np.eye(3))
class TestLinearConstraint(TestCase):
def test_unfeasible_initial_point(self):
x0 = np.array([1, 2, 3, 4])
A = np.array([[1, 2, 3, 4], [5, 0, 0, 6], [7, 0, 8, 0]])
enforce_feasibility = np.array([True, True, True],
dtype=bool)
kind = ("less",)
linear = LinearConstraint(A, kind, enforce_feasibility)
with pytest.raises(ValueError):
linear.evaluate_and_initialize(x0)
def test_linear_to_nonlinear_conversion(self):
x0 = np.array([1, 2, 3, 4])
A = np.array([[1, 2, 3, 4], [5, 0, 0, 6], [7, 0, 8, 0]])
enforce_feasibility = np.array([False, False, False],
dtype=bool)
kind = ("less",)
linear = LinearConstraint(A, kind, enforce_feasibility)
x0 = linear.evaluate_and_initialize(x0)
nonlinear = linear.to_nonlinear()
assert_array_equal(nonlinear.fun(x0), A.dot(x0))
assert_array_equal(nonlinear.jac(x0), A)
class TestNonlinearConstraint(TestCase):
def test_unfeasible_initial_point(self):
x0 = np.array([1, 2, 3, 4])
A = np.array([[1, 2, 3, 4], [5, 0, 0, 6], [7, 0, 8, 0]])
def fun(x):
return A.dot(x)
def jac(x):
return A
enforce_feasibility = np.array([True, True, True],
dtype=bool)
kind = ("less",)
nonlinear = NonlinearConstraint(fun, kind, jac, None, enforce_feasibility)
with pytest.raises(ValueError):
nonlinear.evaluate_and_initialize(x0)
def test_approximated_hessian(self):
def fun(x):
return [x[0]**2 + x[1]**3,
2/x[0] + x[0]*x[1]**2]
def jac(x):
return [[2*x[0], 3*x[1]**2],
[-2/x[0]**2 + x[1]**2, 2*x[0]*x[1]]]
def hess(x, v):
return (v[0]*np.array([[2, 0],
[0, 6*x[1]]]) +
v[1]*np.array([[4/x[0]**3, 2*x[1]],
[2*x[1], 2*x[0]]]))
x0 = [1, 2]
nonlinear_exact = NonlinearConstraint(fun, ("equals"), jac, hess)
nonlinear_exact.evaluate_and_initialize(x0)
nonlinear_approx = NonlinearConstraint(fun, ("equals"), jac, "2-point")
nonlinear_approx.evaluate_and_initialize(x0)
np.random.seed(1)
for i in range(10):
v = np.random.uniform(-5, 5, 2)
for j in range(5):
x = np.random.uniform(-5, 5, 2)
H_exact = nonlinear_exact.hess(x, v)
H_approx = nonlinear_approx.hess(x, v)
for l in range(5):
p = np.random.uniform(-5, 5, 2)
print(H_approx, H_exact)
assert_array_almost_equal(H_approx.dot(p)/H_exact.dot(p),
np.ones(2), 5)
|
11485900
|
import numpy as np
import torch
"""
Generate mm-spaces an their euclidean metric
"""
def euclid_dist(x, y):
return np.linalg.norm(x[:, None, :] - y[None, :, :], axis=2)
def dist_matrix(x_i, y_j, p=2):
if p == 1:
return (x_i[:, :, None, :] - y_j[:, None, :, :]).norm(p=2, dim=3)
elif p == 2:
return (x_i[:, :, None, :] - y_j[:, None, :, :]).norm(p=2, dim=3) ** 2
else:
c_e = (x_i[:, :, None, :] - y_j[:, None, :, :]).norm(p=2, dim=3)
return c_e ** p
def generate_measure(n_batch, n_sample, n_dim, equal=False):
"""
Generate a batch of probability measures in R^d sampled over the unit
square.
:param n_batch: Number of batches
:param n_sample: Number of sampling points in R^d
:param n_dim: Dimension of the feature space
:param equal: Weights equal to 1
:return: A (Nbatch, Nsample, Ndim) torch.Tensor
"""
m = torch.distributions.exponential.Exponential(1.0)
a = m.sample(torch.Size([n_batch, n_sample]))
a = a / a.sum(dim=1)[:, None]
m = torch.distributions.uniform.Uniform(0.0, 1.0)
x = m.sample(torch.Size([n_batch, n_sample, n_dim]))
Cx = dist_matrix(x, x, 2)
if equal:
a = torch.ones_like(a)
return a, Cx, x
|
11485916
|
import demistomock as demisto
from CommonServerPython import *
from CommonServerUserPython import *
from functools import reduce
def get_by_incident_id(incident_id, get_key, set_key=""):
set_key = get_key if not set_key else set_key
keys = get_key.split('.')
try:
context = demisto.executeCommand("getContext", {"id": incident_id})[0]['Contents']['context']
res = reduce(lambda x, y: x[y], keys, context)
except KeyError:
error_msg = "Cannot find {} in incident #{}".format(get_key, incident_id)
return_error(message=error_msg,
error='GetByIncidentId: ' + error_msg,
outputs={set_key: error_msg})
entry_context = {set_key: res}
return_outputs(
readable_output="Key '{}' successfully retrieved and set into current incident at '{}'.".format(get_key,
set_key),
outputs=entry_context)
def main():
'''Get arguments and call primary function'''
inc_id = demisto.args().get('incident_id', demisto.incidents()[0]['id'])
get_k = demisto.args()['get_key']
set_k = demisto.args().get('set_key', get_k)
get_by_incident_id(inc_id, get_k, set_k)
if __name__ == "__builtin__" or __name__ == "builtins":
main()
|
11485936
|
from typing import Literal, Optional, Union
import torch
from torch import nn
from buglab.models.layers.relational_multihead_attention import RelationalMultiheadAttention
def _get_activation_fn(activation):
if activation == "relu":
return nn.functional.relu
elif activation == "gelu":
return nn.functional.gelu
raise RuntimeError("activation should be relu/gelu, not {}".format(activation))
class RelationalTransformerEncoderLayer(nn.Module):
def __init__(
self,
d_model: int,
key_query_dimension: int,
value_dimension: int,
nhead: int,
num_edge_types: int,
dim_feedforward: int = 2048,
dropout: float = 0.1,
activation="relu",
use_edge_value_biases: bool = False,
edge_attention_bias_is_scalar: bool = False,
rezero_mode: Literal["off", "scalar", "vector"] = "off",
normalisation_mode: Literal["off", "prenorm", "postnorm"] = "postnorm",
):
"""
Args:
- rezero_mode: Three different modes are supported:
* "off": No ReZero use.
* "scalar": Sublayers (attention / fully connected) are scaled by a single scalar, i.e.,
\alpha is a scalar in the following:
x' = x + \alpha * SelfAtt(x)
x'' = x' + \alpha * Boom(x')
return x''
See https://arxiv.org/pdf/2003.04887.pdf.
* "vector": Sublayers (attention / fully connected) are scaled by one value per dim, i.e.,
\alpha is a vector in the following:
x' = x + \alpha * SelfAtt(x)
x'' = x' + \alpha * Boom(x')
return x''
See https://arxiv.org/pdf/2103.17239.pdf.
- normalisation_mode: Three different modes are supported:
* "off": use no layer norm at all. Likely to diverge without using rezero as well.
* "prenorm": Normalise values before each sublayer (attention / fully connected):
x' = x + SelfAtt(LN(x))
x'' = x' + Boom(LN(x'))
return x''
* "postnorm": Normalise values after each sublayer:
x' = LN(x + SelfAtt(x))
x'' = LN(x' + Boom(x))
return x''
"""
super(RelationalTransformerEncoderLayer, self).__init__()
self.self_attn = RelationalMultiheadAttention(
input_state_dimension=d_model,
num_heads=nhead,
output_dimension=d_model,
dropout_rate=dropout,
num_edge_types=num_edge_types,
key_query_dimension=key_query_dimension,
value_dimension=value_dimension,
use_edge_value_biases=use_edge_value_biases,
edge_attention_bias_is_scalar=edge_attention_bias_is_scalar,
)
# Implementation of Feedforward model
self.linear1 = nn.Linear(d_model, dim_feedforward)
self.dropout = nn.Dropout(dropout)
self.linear2 = nn.Linear(dim_feedforward, d_model)
self._normalisation_mode = normalisation_mode
if normalisation_mode in ("prenorm", "postnorm"):
self.norm1: Optional[nn.LayerNorm] = nn.LayerNorm(d_model)
self.norm2: Optional[nn.LayerNorm] = nn.LayerNorm(d_model)
elif normalisation_mode == "off":
self.norm1 = None
self.norm2 = None
else:
raise ValueError(f"Unrecognized normalization mode `{normalisation_mode}`.")
self.dropout1 = nn.Dropout(dropout)
self.dropout2 = nn.Dropout(dropout)
self.activation = _get_activation_fn(activation)
self._rezero_mode = rezero_mode
if rezero_mode == "off":
self._alpha1: Union[float, torch.Tensor] = 1.0
self._alpha2: Union[float, torch.Tensor] = 1.0
elif rezero_mode == "scalar":
self._alpha1 = nn.Parameter(torch.tensor(0.0))
self._alpha2 = nn.Parameter(torch.tensor(0.0))
elif rezero_mode == "vector":
self._alpha1 = nn.Parameter(torch.zeros(size=(d_model,)))
self._alpha2 = nn.Parameter(torch.zeros(size=(d_model,)))
else:
raise ValueError(f"Unrecognized rezero mode `{rezero_mode}`.")
def forward(self, src, src_mask, edges, edge_types):
# --- Sublayer 1: Self-Attention:
attn_input = src
if self._normalisation_mode == "prenorm":
attn_input = self.norm1(src)
src2 = self.self_attn(attn_input, src_mask, edges, edge_types)
src2 = self._alpha1 * src2
src = src + self.dropout1(src2)
if self._normalisation_mode == "postnorm":
src = self.norm1(src)
fc_input = src
if self._normalisation_mode == "prenorm":
fc_input = self.norm2(fc_input)
src2 = self.linear2(self.dropout(self.activation(self.linear1(fc_input))))
src2 = self._alpha2 * src2
src = src + self.dropout2(src2)
if self._normalisation_mode == "postnorm":
src = self.norm1(src)
return src
|
11485945
|
from gevent import monkey
monkey.patch_all()
from typing import Optional
from apis.scaffold import mining, install
class Scaffold:
@staticmethod
def install(cdn: Optional[bool] = None):
"""
下载项目运行所需的配置。
## Basic Usage
Usage: python main.py install
_________________________________________________________________
or: python main.py install --cdn |使用CDN下载模型
_________________________________________________________________
## Intro
本指令不拉取 `requirements.txt`,需要手动操作。
:return:
"""
install.run(cdn=cdn)
@staticmethod
def mining(
env: Optional[str] = "development",
silence: Optional[bool] = True,
power: Optional[int] = 16,
collector: Optional[bool] = False,
classifier: Optional[bool] = False,
source: Optional[str] = "local",
batch: Optional[int] = 1,
):
"""
运行 Collector 以及 Classifier 采集并过滤基层数据
Usage: python main.py mining --silence=False |显式启动,在 linux 中运行时无效
or: python main.py mining --power=4 |指定分类器运行功率
or: python main.py mining --classifier --source=local |启动分类器,指定数据源为本地缓存
or: python main.py mining --classifier --source=remote --batch=1 |启动分类器,指定远程数据源
or: python main.py mining --collector |启动采集器
GitHub Actions Production
-------------------------
python main.py mining --env=production --collector --classifier --source=local
:param source: within [local remote] 指定数据源,仅对分类器生效
- local:使用本地 Collector 采集的数据进行分类
- remote:使用 SSPanel-Mining 母仓库数据进行分类(需要下载数据集)
:param batch: batch 应是自然数,仅在 source==remote 时生效,用于指定拉取的数据范围。
- batch=1 表示拉取昨天的数据(默认),batch=2 表示拉取昨天+前天的数据,以此类推往前堆叠
- 显然,当设置的 batch 大于母仓库存储量时会自动调整运行了逻辑,防止溢出。
:param env: within [development production]
:param silence: 采集器是否静默启动,默认静默。
:param power: 分类器运行功率。
:param collector: 采集器开启权限,默认关闭。
:param classifier: 分类器控制权限,默认关闭。
:return:
"""
if collector:
mining.run_collector(env=env, silence=silence)
if classifier:
mining.run_classifier(power=power, source=source, batch=batch)
|
11485981
|
from aws_cdk import (
aws_ecs as ecs,
aws_iam as iam,
aws_logs,
NestedStack,
Resource,
RemovalPolicy,
)
from constructs import Construct
from stacks.vpc_stack import VpcStack
class EcsStack(NestedStack):
def __init__(self, scope: Construct, id: str, vpc: VpcStack, **kwargs) -> None:
super().__init__(scope, id, **kwargs)
self.task_execution_role = iam.Role(
self,
"FargateTaskExecutionRole",
role_name="FargateTaskExecutionRole",
assumed_by=iam.ServicePrincipal("ecs-tasks.amazonaws.com"),
managed_policies=[
iam.ManagedPolicy.from_aws_managed_policy_name(
"service-role/AmazonECSTaskExecutionRolePolicy"
)
],
)
self._instance = ecs.Cluster(
self,
"SimulatorCluster",
cluster_name="SimulatorCluster",
vpc=vpc.instance,
container_insights=True,
)
self.simulator_log_group = aws_logs.LogGroup(
self,
"simulatorLogGroup",
log_group_name="/ecs/simulator",
retention=aws_logs.RetentionDays.ONE_MONTH,
removal_policy=RemovalPolicy.DESTROY,
)
@property
def instance(self) -> Resource:
return self._instance
|
11486007
|
from __future__ import absolute_import
import os
try:
# for Python2
import ConfigParser as cp
except ImportError:
# for Python3
import configparser as cp
import utils2to3
class EDRUserConfig(object):
def __init__(self, config_file='config/user_config.ini'):
self.config = cp.ConfigParser()
try:
self.config.read(utils2to3.abspathmaker(__file__, config_file))
except:
self.config = None
def discord_webhook(self, channel, incoming=True):
if self.config:
section = "discord_incoming" if incoming else "discord_outgoing"
key = "{}_webhook".format(channel)
try:
return self.config.get(section, key)
except:
return None
return None
class EDRConfig(object):
def __init__(self, config_file='config/config.ini'):
self.config = cp.ConfigParser()
self.config.read(utils2to3.abspathmaker(__file__, config_file))
def edr_version(self):
return self.config.get('general', 'version')
def edr_api_key(self):
return self.config.get('edr', 'edr_api_key')
def edr_server(self):
return self.config.get('edr', 'edr_server')
def edr_needs_u_novelty_threshold(self):
return int(self.config.get('edr', 'edr_needs_u_novelty_threshold'))
def edr_heartbeat(self):
return int(self.config.get('edr', 'edr_heartbeat'))
def inara_api_key(self):
return self.config.get('inara', 'inara_api_key')
def inara_endpoint(self):
return self.config.get('inara', 'inara_endpoint')
def edsm_api_key(self):
return self.config.get('edsm', 'edsm_api_key')
def edsm_server(self):
return self.config.get('edsm', 'edsm_server')
def intel_even_if_clean(self):
return self.config.getboolean('scans', 'intel_even_if_clean')
def intel_bounty_threshold(self):
return self.config.getint('scans', 'intel_bounty_threshold')
def legal_records_recent_threshold(self):
return int(self.config.get('scans', 'legal_records_recent_threshold'))
def legal_records_check_interval(self):
return int(self.config.get('scans', 'legal_records_check_interval'))
def legal_records_max_age(self):
return int(self.config.get('scans', 'legal_records_max_age'))
def system_novelty_threshold(self):
return int(self.config.get('novelty', 'system_novelty_threshold'))
def place_novelty_threshold(self):
return int(self.config.get('novelty', 'place_novelty_threshold'))
def ship_novelty_threshold(self):
return int(self.config.get('novelty', 'ship_novelty_threshold'))
def cognitive_novelty_threshold(self):
return int(self.config.get('novelty', 'cognitive_novelty_threshold'))
def enemy_alerts_pledge_threshold(self):
return int(self.config.get('enemies', 'enemy_alerts_pledge_threshold'))
def noteworthy_pledge_threshold(self):
return int(self.config.get('powerplay', 'noteworthy_pledge_threshold'))
def systems_max_age(self):
return int(self.config.get('lrucaches', 'systems_max_age'))
def cmdrs_max_age(self):
return int(self.config.get('lrucaches', 'cmdrs_max_age'))
def cmdrsdex_max_age(self):
return int(self.config.get('lrucaches', 'cmdrsdex_max_age'))
def sqdrdex_max_age(self):
return int(self.config.get('lrucaches', 'sqdrdex_max_age'))
def inara_max_age(self):
return int(self.config.get('lrucaches', 'inara_max_age'))
def blips_max_age(self):
return int(self.config.get('lrucaches', 'blips_max_age'))
def scans_max_age(self):
return int(self.config.get('lrucaches', 'scans_max_age'))
def traffic_max_age(self):
return int(self.config.get('lrucaches', 'traffic_max_age'))
def crimes_max_age(self):
return int(self.config.get('lrucaches', 'crimes_max_age'))
def alerts_max_age(self):
return int(self.config.get('lrucaches', 'alerts_max_age'))
def fights_max_age(self):
return int(self.config.get('lrucaches', 'fights_max_age'))
def materials_max_age(self):
return int(self.config.get('lrucaches', 'materials_max_age'))
def factions_max_age(self):
return int(self.config.get('lrucaches', 'factions_max_age'))
def edsm_systems_max_age(self):
return int(self.config.get('lrucaches', 'edsm_systems_max_age'))
def edsm_bodies_max_age(self):
return int(self.config.get('lrucaches', 'edsm_bodies_max_age'))
def edsm_stations_max_age(self):
return int(self.config.get('lrucaches', 'edsm_stations_max_age'))
def edsm_factions_max_age(self):
return int(self.config.get('lrucaches', 'edsm_factions_max_age'))
def edsm_markets_max_age(self):
return int(self.config.get('lrucaches', 'edsm_markets_max_age'))
def edsm_shipyards_max_age(self):
return int(self.config.get('lrucaches', 'edsm_shipyards_max_age'))
def edsm_outfitting_max_age(self):
return int(self.config.get('lrucaches', 'edsm_outfitting_max_age'))
def edsm_traffic_max_age(self):
return int(self.config.get('lrucaches', 'edsm_traffic_max_age'))
def edsm_deaths_max_age(self):
return int(self.config.get('lrucaches', 'edsm_deaths_max_age'))
def lru_max_size(self):
return int(self.config.get('lrucaches', 'lru_max_size'))
def edsm_within_radius_max_size(self):
return int(self.config.get('lrucaches', 'edsm_within_radius_max_size'))
def opponents_max_age(self, kind):
ckind = kind.lower()
return int(self.config.get(ckind, '{}_max_age'.format(ckind)))
def opponents_max_recents(self, kind):
ckind = kind.lower()
return int(self.config.get(ckind, '{}_max_recents'.format(ckind)))
def logging_level(self):
return self.config.get('general', 'logging_level')
def sitreps_timespan(self):
return int(self.config.get('sitreps', 'sitreps_timespan'))
def notams_timespan(self):
return int(self.config.get('notams', 'notams_timespan'))
def sitreps_max_age(self):
return int(self.config.get('sitreps', 'sitreps_max_age'))
def reports_check_interval(self):
return int(self.config.get('sitreps', 'reports_check_interval'))
def notams_check_interval(self):
return int(self.config.get('notams', 'notams_check_interval'))
def notams_max_age(self):
return int(self.config.get('notams', 'notams_max_age'))
def recon_recent_threshold(self):
return int(self.config.get('sitreps', 'recon_recent_threshold'))
def opponents_recent_threshold(self, kind):
ckind = kind.lower()
return int(self.config.get('sitreps', '{}_recent_threshold'.format(ckind)))
def crimes_recent_threshold(self):
return int(self.config.get('sitreps', 'crimes_recent_threshold'))
def traffic_recent_threshold(self):
return int(self.config.get('sitreps', 'traffic_recent_threshold'))
def instance_fight_staleness_threshold(self):
return int(self.config.get('instance', 'fight_staleness_threshold'))
def instance_danger_staleness_threshold(self):
return int(self.config.get('instance', 'danger_staleness_threshold'))
def hpp_trend_span(self):
return int(self.config.get('hpp', 'trend_span'))
def hpp_history_max_points(self):
return int(self.config.get('hpp', 'history_max_points'))
def hpp_history_max_span(self):
return int(self.config.get('hpp', 'history_max_span'))
EDR_CONFIG = EDRConfig()
|
11486008
|
from __future__ import division
from OpenGL.GL import *
from OpenGL.GLUT import *
from OpenGL.GLU import *
from model_helpers import *
from data_3d import *
# ----------------------------------------------
def render_polyface( polyface ):
if len(polyface) == 3:
_draw_polyface( polyface, face_type=GL_TRIANGLES )
elif len(polyface) == 4:
_draw_polyface( polyface, face_type=GL_QUADS )
elif len(polyface) > 4:
_draw_polyface( polyface, face_type=GL_POLYGON )
def make_triangle_face( tri = [( 0.0, 0.0, 0.0), (-1.0,-1.0,-1.0), (1.0,1.0,-1.0)] ):
render_polyface( tri )
#draw_normal_line( tri )
def _draw_polyface( face, face_type=GL_TRIANGLES ):
glPointSize(1.0)
glBegin(face_type)
glColor3f(1.0,1.0,1.0)
for v in face:
glNormal3f( v[0],v[1],v[2] )
glVertex3f( v[0],v[1],v[2] )
glEnd()
def draw_triangle_face( face ):
""" Wrapper for render_polyface()
"""
render_polyface(face)
def draw_normal_line( tri ):
c = find_center_of_triangle( tri )
n = find_perpendicular_of_triangle( tri )
n = np.add(c, n)
draw_line( c, n)
def draw_line( start=[0.0, 0.0, 0.0], end=[-2.0,-2.0,-2.0] , thickness=1.0 ):
s = start
e = end
glLineWidth( thickness )
glBegin(GL_LINES)
glVertex3f( s[0],s[1],s[2] )
glVertex3f( e[0],e[1],e[2] )
glEnd()
# ----------------------------------------------
def draw_text(text, x, y, DISABLE_LIGHTING=False, translate_point=[0,0,0]):
""" Display 2d text, orthogonally.
"""
windowWidth = glutGet(GLUT_WINDOW_WIDTH)
windowHeight = glutGet(GLUT_WINDOW_HEIGHT)
t = translate_point
# The Projection Matrix
glMatrixMode(GL_PROJECTION)
matrix = glGetDoublev(GL_PROJECTION_MATRIX)
glLoadIdentity()
glOrtho(0.0, windowWidth, windowHeight, 0.0, 0.0, 1.0)
# The Model Matrix
glMatrixMode (GL_MODELVIEW)
glLoadIdentity()
glPushMatrix()
glColor3f(1.0,1.0,1.0)
glRasterPos2i(x,y)
if DISABLE_LIGHTING:
glDisable(GL_LIGHTING)
for c in text:
glutBitmapCharacter(GLUT_BITMAP_HELVETICA_10, ord(c))
glPopMatrix()
# Revert the Projection Matrix
glMatrixMode(GL_PROJECTION)
glLoadMatrixd(matrix)
# Set model matrix model
glMatrixMode (GL_MODELVIEW)
# def get2dPoint(point3D, viewMatrix, projectionMatrix, width, height):
# projectionMatrix = glGetDoublev(GL_PROJECTION_MATRIX)
# viewProjectionMatrix = projectionMatrix * viewMatrix;
# # transform world to clipping coordinates
# point3D = viewProjectionMatrix.multiply(point3D);
# winX = int( round((( point3D[0] + 1 ) / 2.0) * width ))
# # we calculate -point3D.getY() because the screen Y axis is
# # oriented top->down
# winY = int( round((( 1 - point3D[1] ) / 2.0) *height ) )
# return (winX, winY)
def draw_sphere():
c=M_PI/180.0; # degrees to radians
interval = 25.0;
phiStart = 100.0; # Default 100
thetaStart = 180.0; # Default 180
# ---- for number of
phi = -phiStart
while phi <= (phiStart-interval):
#cout<<"phi "<< phi<<endl;
phir=c*phi;
phir20=c*(phi+interval); # Next phi, that is why phi<=(phiStart-interval)
#glBegin(GL_TRIANGLE_STRIP)
glBegin(GL_TRIANGLES)
#glBegin(GL_QUAD_STRIP);
#glBegin(GL_LINE_STRIP);
theta = -thetaStart
while theta <= thetaStart:
thetar=c*theta
x=sin(thetar)*cos(phir)
y=cos(thetar)*cos(phir)
z=sin(phir)
glVertex3d(x, y, z)
glNormal3f(x, y, z)
x=sin(phir20)
x=sin(thetar)*cos(phir20)
y=cos(thetar)*cos(phir20)
z=sin(phir20)
glNormal3f(x, y, z)
glVertex3d(x, y, z)
theta += interval
phi += interval
glEnd()
def draw_point(point3d=(1,1,1), size=1):
s = point3d
glPointSize(size)
glBegin(GL_POINTS)
glColor3f(1.0,1.0,1.0)
glVertex3f( s[0], s[1],s[2])
glEnd()
def draw_ground():
glBegin(GL_LINES)
glColor3f(1.0,1.0,1.0)
for i in range(16):
for j in range(16):
glVertex3f( i,-10,-j)
glVertex3f(-i,-10,-j)
glVertex3f(-i,-10, j)
glVertex3f( i,-10, j)
glEnd()
def draw_cube():
# Draw Cube (multiple quads)
glBegin(GL_QUADS)
glColor3f(0.0,1.0,0.0)
glVertex3f( 1.0, 1.0,-1.0)
glVertex3f(-1.0, 1.0,-1.0)
glVertex3f(-1.0, 1.0, 1.0)
glVertex3f( 1.0, 1.0, 1.0)
glColor3f(1.0,0.0,0.0)
glVertex3f( 1.0,-1.0, 1.0)
glVertex3f(-1.0,-1.0, 1.0)
glVertex3f(-1.0,-1.0,-1.0)
glVertex3f( 1.0,-1.0,-1.0)
glColor3f(0.0,1.0,0.0)
glVertex3f( 1.0, 1.0, 1.0)
glVertex3f(-1.0, 1.0, 1.0)
glVertex3f(-1.0,-1.0, 1.0)
glVertex3f( 1.0,-1.0, 1.0)
glColor3f(1.0,1.0,0.0)
glVertex3f( 1.0,-1.0,-1.0)
glVertex3f(-1.0,-1.0,-1.0)
glVertex3f(-1.0, 1.0,-1.0)
glVertex3f( 1.0, 1.0,-1.0)
glColor3f(0.0,0.0,1.0)
glVertex3f(-1.0, 1.0, 1.0)
glVertex3f(-1.0, 1.0,-1.0)
glVertex3f(-1.0,-1.0,-1.0)
glVertex3f(-1.0,-1.0, 1.0)
glColor3f(1.0,0.0,1.0)
glVertex3f( 1.0, 1.0,-1.0)
glVertex3f( 1.0, 1.0, 1.0)
glVertex3f( 1.0,-1.0, 1.0)
glVertex3f( 1.0,-1.0,-1.0)
glEnd()
def draw_axes(length=5, width_scale=3):
l = int(round(length))
thickness = 1
point_size = 1.667
# z
glColor3f(0.0,0.0,1.0)
draw_line((0,0,0), (0,0,l), thickness=thickness)
for i in range(l+1):
draw_point((0,0,i), point_size)
draw_line((0,0,0), (0,0,-l), thickness=thickness)
for i in range(-l,0,1):
draw_point((0,0,i), point_size)
# y
glColor3f(0.0,1.0,1.0)
draw_line((0,0,0), (0,l,0), thickness=thickness)
for i in range(l+1):
draw_point((0,i,0), point_size)
draw_line((0,0,0), (0,-l,0), thickness=thickness)
for i in range(-l,0,1):
draw_point((0,i,0), point_size)
# x
glColor3f(1.0,0.0,0.0)
draw_line((0,0,0), (l,0,0), thickness=thickness)
for i in range(l+1):
draw_point((i,0,0), point_size)
draw_line((0,0,0), (-l,0,0), thickness=thickness)
for i in range(-l,0,1):
draw_point((i,0,0), point_size)
def draw_reflection_rays(c, l, r):
draw_reflection_ray(c, r)
draw_incident_ray(c, l)
def draw_reflection_ray(c, r):
Origin_Vector = (0,0,0)
glTranslatef( c[0],c[1],c[2] )
draw_line(r, Origin_Vector, 5)
glTranslatef( -c[0],-c[1],-c[2] )
def draw_incident_ray(c, l):
Origin_Vector = (0,0,0)
glTranslatef( c[0],c[1],c[2] )
draw_line(l, Origin_Vector, 1)
glTranslatef( -c[0],-c[1],-c[2] )
def draw_reflection_to_camera( cameras, tri ):
for c in cameras:
face_center = find_center_of_triangle( tri )
draw_line( c, face_center )
def draw_cameras( cameraVertices ):
size = 0.8
for c in cameraVertices:
glPushMatrix()
glTranslatef( c[0],c[1],c[2] )
glutSolidCube( size )
glPopMatrix()
def draw_wire_sphere( vertex=(0,0,0), size=1, scale=1 ):
glPushMatrix()
# no_mat = [0.0, 0.0, 0.0, 1.0]
# mat_ambient = [0.7, 0.7, 0.7, 1.0]
# mat_ambient_color = [0.8, 0.8, 0.2, 1.0]
# mat_diffuse = [0.1, 0.5, 0.8, 1.0]
# mat_specular = [1.0, 1.0, 1.0, 1.0]
# no_shininess = 0.0
# low_shininess = 5.0
# high_shininess = 100.0
# mat_emission = [0.3, 0.2, 0.2, 0.0]
# from OpenGL.GL import GL_FRONT, GL_AMBIENT, GL_DIFFUSE, GL_SPECULAR, GL_SHININESS, GL_EMISSION, glMaterialfv, glMaterialf
# glMaterialfv(GL_FRONT, GL_AMBIENT, mat_ambient);
# glMaterialfv(GL_FRONT, GL_DIFFUSE, mat_diffuse);
# glMaterialfv(GL_FRONT, GL_SPECULAR, no_mat);
# glMaterialf(GL_FRONT, GL_SHININESS, no_shininess);
# glMaterialfv(GL_FRONT, GL_EMISSION, mat_emission);
glTranslatef( vertex[0]*scale, vertex[1]*scale, vertex[2]*scale )
glutWireSphere(size, 9, 9)
glPopMatrix()
def draw_solid_sphere( vertex=(0,0,0), size=1, scale=1 ):
glPushMatrix()
# no_mat = [0.0, 0.0, 0.0, 1.0]
# mat_ambient = [0.7, 0.7, 0.7, 1.0]
# mat_ambient_color = [0.8, 0.8, 0.2, 1.0]
# mat_diffuse = [0.1, 0.5, 0.8, 1.0]
# mat_specular = [1.0, 1.0, 1.0, 1.0]
# no_shininess = 0.0
# low_shininess = 5.0
# high_shininess = 100.0
# mat_emission = [0.3, 0.2, 0.2, 0.0]
# from OpenGL.GL import GL_FRONT, GL_AMBIENT, GL_DIFFUSE, GL_SPECULAR, GL_SHININESS, GL_EMISSION
# glMaterialfv(GL_FRONT, GL_AMBIENT, mat_ambient)
# glMaterialfv(GL_FRONT, GL_DIFFUSE, mat_diffuse)
# glMaterialfv(GL_FRONT, GL_SPECULAR, no_mat)
# glMaterialf(GL_FRONT, GL_SHININESS, no_shininess)
# glMaterialfv(GL_FRONT, GL_EMISSION, mat_emission)
glTranslatef( vertex[0]*scale, vertex[1]*scale, vertex[2]*scale )
# glColor3f(0.0,0.0,0.0)
glutSolidSphere(size, 9, 9)
glPopMatrix()
def draw_wire_frame_of_obj_from_filename(filename, scale=8):
faces = WaveFront.get_shape( filename, scale )
glLineWidth( 3.5 )
glBegin(GL_LINES)
glColor3f(0.0,0.0,0.0)
# faces = [Xn*faces][Xn*vertices][3xfloats]
for f in faces:
# Make Edges based on the quantity of vertices in a face; if 3, then (1,2),(2,3),(3,1); but if 4, then (1,2),(2,3),(3,4),(4,1).
for i in range(len(f)):
v1 = f[i]
j = i+1 if i < len(f)-1 else 0 # Wrap around to point0 if already at pointEND
v2 = f[j]
glVertex3f( v1[0], v1[1], v1[2] )
glVertex3f( v2[0], v2[1], v2[2] )
glEnd()
glLineWidth( 1.0 )
# Deprecated.
# def draw_dome( scale_multiplier =2,
# show_points = False,
# show_led_spheres= True,
# show_tris = False,
# show_lines = False,
# get_not_show_tris = False,
# show_selected_leds = None ):
# """
# Draw the dome into the current OpenGL view using old style opengl. And/Or return its point coordinates.
# -- Ready for refactoring... nasty code resides within --
# """
# scale = scale_multiplier
# edges = WaveFront.get_hardcoded_frame_faces() # dome_obj_data.get_dome_faces()
# vertices = dome_obj_data.get_dome_vertices()
# r = [x[1:] for x in vertices]
# #Nastily apply scaling to vertices for return variable.
# r2 = []
# for i in range(len(r)):
# r2.append([])
# for j in range(len(r[i])):
# r2[i].append( r[i][j]* scale )
# r = r2
# glColor3f(1.0,1.0,1.0)
# if show_selected_leds != None and type(show_selected_leds) == list:
# # Add the unselected LEDs as points.
# glPointSize(2.0)
# glBegin(GL_POINTS)
# for i in range(len(vertices)):
# v = vertices[i]
# if i not in show_selected_leds:
# glVertex3f( v[1]*scale, v[2]*scale, v[3]*scale )
# glEnd()
# # Add the selected LEDs as spheres.
# size = 0.3
# for i in range(len(vertices)):
# v = vertices[i]
# if i in show_selected_leds:
# glPushMatrix()
# glTranslatef( v[1]*scale, v[2]*scale, v[3]*scale )
# glutWireSphere(size, 10, 10)
# glPopMatrix()
# else:
# if show_points:
# glPointSize(4.0)
# glBegin(GL_POINTS)
# for v in vertices:
# glVertex3f( v[1]*scale, v[2]*scale, v[3]*scale )
# glEnd()
# if show_led_spheres:
# size = 0.1
# for v in vertices:
# glPushMatrix()
# glTranslatef( v[1]*scale, v[2]*scale, v[3]*scale )
# glutWireSphere(size, 10, 10)
# glPopMatrix()
# if show_tris and not get_not_show_tris:
# glPointSize(1.0)
# glBegin(GL_TRIANGLES)
# r = []
# r.append([])
# c = 0
# j = 0
# for e in edges:
# for i in e[1:]:
# c+=1
# v = vertices[i-1]
# tmpv = [v[1]*scale, v[2]*scale, v[3]*scale]
# glVertex3f( tmpv[0], tmpv[1], tmpv[2] )
# r[j].append(tmpv)
# if c % 3 == 0:
# j+=1
# r.append([])
# glEnd()
# """ Returns a list of lists of lists. The final list has 3 values. The mid-list has 3 vertices.
# The first list contains all the triangles.
# """
# if get_not_show_tris:
# r = []
# r.append([])
# c = 0
# j = 0
# for e in edges:
# for i in e[1:]:
# c+=1
# v = vertices[i-1]
# tmpv = [v[1]*scale, v[2]*scale, v[3]*scale]
# r[j].append(tmpv)
# if c % 3 == 0:
# j+=1
# r.append([])
# r = r[:len(r)-1] # remove the final empty list.
# """ Returns a list of lists of lists. The final list has 3 values. The mid-list has 3 vertices.
# The first list contains all the triangles.
# """
# if show_lines:
# glPointSize(1.0)
# glBegin(GL_LINES)
# qty_e = 0
# for e in edges:
# p1 = vertices[e[1]-1]
# p2 = vertices[e[2]-1]
# p3 = vertices[e[3]-1]
# #edge 1-2
# glVertex3f( p1[1]*scale, p1[2]*scale, p1[3]*scale )
# glVertex3f( p2[1]*scale, p2[2]*scale, p2[3]*scale )
# #edge 2-3
# glVertex3f( p2[1]*scale, p2[2]*scale, p2[3]*scale )
# glVertex3f( p3[1]*scale, p3[2]*scale, p3[3]*scale )
# qty_e += 2
# glEnd()
# checkShapeValidity( r )
# return r
|
11486012
|
from office365.runtime.client_value import ClientValue
class Video(ClientValue):
"""The Video resource groups video-related data items into a single structure."""
pass
|
11486063
|
import hashlib
import logging
print_warning = False
logger = logging.getLogger(__name__)
try:
from bcrypt import hashpw, gensalt
except ImportError:
print_warning = True
def hashpw(password, *args, **kwargs):
return hashlib.sha512(password).hexdigest().encode('utf8')
def gensalt(*args, **kwargs):
return b''
try:
from flask_bcrypt import Bcrypt
except ImportError:
try:
from flask.ext.bcrypt import Bcrypt
except ImportError:
print_warning = True
def generate_password_hash(password):
return hashlib.sha512(password.encode('utf8')).hexdigest()
def check_password_hash(reference, check):
check_hash = hashlib.sha512(check.encode('utf8')).hexdigest()
return check_hash == reference
class SHA512Fallback(object):
is_fallback_bcrypt = True
def __init__(self, app=None):
self.generate_password_hash = generate_password_hash
self.check_password_hash = <PASSWORD>password_<PASSWORD>
def init_app(self, app):
app.bcrypt = self
Bcrypt = SHA512Fallback
if print_warning:
logging.warning("Bcrypt not available... falling back to SHA512.")
logging.warning("Run `pip install Flask-Bcrypt` for more secure "
"password hashing.")
|
11486077
|
from django.conf.urls import url
from django.contrib import admin
from django_graph_api.views import GraphQLView
from test_app.schema import schema
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^graphql$', GraphQLView.as_view(schema=schema)),
]
|
11486091
|
import math
import time
import torch
from nitorch import io
from nitorch import spatial
from nitorch.core import utils, py
from nitorch.core.optionals import try_import
from .volumes import show_orthogonal_slices
# from .menu import Menu, MenuItem
# optional imports
plt = try_import('matplotlib.pyplot', _as=True)
gridspec = try_import('matplotlib.gridspec', _as=True)
__all__ = ['ImageViewer']
def ordered_set(*values):
return tuple({v: None for v in values}.keys())
class ImageArtist:
def __init__(self, image, parent=None, **kwargs):
self.parent = parent
self.show_cursor = kwargs.pop('show_cursor', getattr(self.parent,'show_cursor', True))
self.equalize = kwargs.pop('equalize', getattr(self.parent, 'equalize', False))
self.mode = kwargs.pop('mode', getattr(self.parent, 'mode', 'intensity'))
self.interpolation = kwargs.pop('interpolation', getattr(self.parent, 'interpolation', 1))
self.colormap = kwargs.pop('colormap', getattr(self.parent, 'colormap', None))
self.mmap = kwargs.pop('mmap', getattr(self.parent, 'mmap', False))
self.layout = kwargs.pop('layout', getattr(self.parent, 'layout', 'orth'))
for k, v in kwargs.items():
setattr(self, k, v)
self.image = image
@property
def image(self):
return self._image
@image.setter
def image(self, value):
self._image = value
self._map_image()
@property
def affine(self):
return getattr(self, '_affine', None)
@affine.setter
def affine(self, value):
if value is None:
self._affine = spatial.affine_default(self.shape)
self._affine = value
@property
def shape(self):
return getattr(self, '_shape', None)
@property
def map(self):
return getattr(self, '_map', None)
@property
def fdata(self):
if getattr(self, '_fdata', None) is None:
if self.map is not None:
if self.mmap:
return self.map.fdata()
else:
self._fdata = self.map.fdata()
return getattr(self, '_fdata', None)
def _map_image(self):
image = self.image
self._map = None
self._fdata = None
self._affine = None
self._shape = None
if isinstance(image, str):
self._map = io.map(image)
else:
self._map = None
if self._map is None:
if not isinstance(image, (list, tuple)):
image = [image]
if len(image) < 2:
image = [*image, None, None]
dat, aff, *_ = image
dat = torch.as_tensor(dat)
if aff is None:
aff = spatial.affine_default(dat.shape[-3:])
self._fdata = dat
self._affine = aff
self._shape = tuple(dat.shape[-3:])
else:
self._affine = self._map.affine
self._shape = tuple(self._map.shape[-3:])
def load(self):
if self.fdata is None and self.map is not None:
self._fdata = self.map.fdata(device=self.device)
return self.fdata
def clear(self):
if self.fdata is not None and self.map is not None:
self._fdata = None
@property
def mmap(self):
return self._mmap
@mmap.setter
def mmap(self, value):
self._mmap = bool(value)
if self.mmap and self._fdata is not None:
self._fdata = None
@property
def device(self):
return getattr(self, '_device', None)
@device.setter
def device(self, value):
if isinstance(value, int):
value = f'cuda:{value}'
self._device = torch.device(value)
if self._fdata is not None:
self._fdata = self._fdata.to(self.device)
@property
def clim(self):
return getattr(self, '_cmin', None), getattr(self, '_cmax', None)
@clim.setter
def clim(self, value):
if torch.is_tensor(value):
value = value.flatten().unbind()
cmin, cmax = py.make_list(value, 2)
self._cmin = cmin
self._cmax = cmax
def set_show_cursor(self, value, all=False):
self.show_cursor = value
if all:
self.propagate('show_cursor')
def set_equalize(self, value, all=False):
self.equalize = value
if all:
self.propagate('equalize')
def set_mode(self, value, all=False):
self.mode = value
if all:
self.propagate('mode')
def set_interpolation(self, value, all=False):
self.interpolation = value
if all:
self.propagate('interpolation')
def set_colormap(self, value, all=False):
self.colormap = value
if all:
self.propagate('colormap')
def set_mmap(self, value, all=False):
self.mmap = value
if all:
self.propagate('mmap')
def set_layout(self, value, all=False):
self.layout = value
if all:
self.propagate('layout')
def set_device(self, value, all=False):
self.device = value
if all:
self.propagate('device')
def set_clim(self, value, all=False):
self.clim = value
if all:
self.propagate('clim')
def propagate(self, key):
for image in getattr(self.parent, 'images', []):
if image is not self:
setattr(image, key, getattr(self, key))
def draw(self, index=None, space=None, fov=None, fig=None, gs=None):
fig = fig or getattr(self.parent, 'fig')
fig = (fig, gs)
_, self._axes, self._mats = show_orthogonal_slices(
self.fdata, index, self.affine,
fig=fig,
layout=self.layout,
interpolation=self.interpolation,
colormap=self.colormap,
eq=self.equalize,
clim=self.clim,
show_cursor=self.show_cursor,
space=space,
bbox=fov,
return_mat=True)
class ImageViewer:
"""Interactive viewer for volumetric images.
Attributes
----------
auto_redraw : bool, default=False
Automatically update the viewer when an attribute changes.
dpi : int, default=72
Resolution of the viewer (pixels per inch)
size : (width: float, height: float),
Size of the figure
aspect : float
Width/Height
fig : int or plt.Figure
Figure object
images : tuple[str or (tensor, tensor) or ImageArtist]
List of filenames or (data, affine).
At assignment, they are converted to ImageArtist objects.
grid : (nrow: int or None, ncol: int or None) or None, default=None
Grid shape used to display images.
If None, an optimal shape is found based on the figure size,
image layout and number of images.
space : (4, 4) tensor
Orientation matrix of the visualisation space.
By default, stadard RAS space
index : tuple[float]
Cursor position in millimetric visualisation space.
fov : (min: tuple[float], max: tuple[float])
Minimum and maximum coordinates of the field of view in
millimetric visualisation space.
If None, use the maximum bounding box of all images.
fov_size : float or tuple[float]
Size of the field of view.
If None, use the maximum bounding box of all images.
After assignment, the center of th eFOv is the current
cursor position.
layout : {'row', 'col', 'orth'}, default='orth'
Layout of the three views.
show_cursor : bool, default=True
Show cross-hair.
equalize : float or {'lin', 'quad', 'log'}, default=None
Histogram equalization method.
interpolation : int, default=1
Interpolation order.
colormap : str or (N, 3) tensor
Mapping from intensity to color.
mmap : bool, default=False
If True, do not keep the full images in memory but reload
them each time the figure is redrawn. Slower but saves memory.
scroll_step : float, default=100
Amount of zoom corresponding to one scroll unit.
draw_freq : float, default=1/25
Minimum amount of time, in sec, between two calls to `redraw`.
If `redraw` is called more than twice within `draw_freq` sec, only
the first call is executed.
"""
def __init__(self, images, **kwargs):
"""
Parameters
----------
images : list[str or (tensor, tensor)]
Inputs images.
Other Parameters
----------------
All attributes can be set on build.
"""
if plt is None:
raise ImportError('Matplotlib not available')
# defaults
dpi = kwargs.pop('dpi', None)
size = kwargs.pop('size', None)
fig = kwargs.pop('fig', None) or plt.figure(figsize=size, dpi=dpi)
if isinstance(fig, int):
fig = plt.figure(fig)
self.fig = fig
self.images = images
self.grid = kwargs.pop('grid', None)
self.space = kwargs.pop('space', None)
self.fov = kwargs.pop('fov', None)
self.index = kwargs.pop('index', None)
# user-defined
for k, v in kwargs.items():
setattr(self, k, v)
fig.canvas.mpl_connect('button_press_event', self.on_press)
fig.canvas.mpl_connect('motion_notify_event', self.on_move)
fig.canvas.mpl_connect('button_release_event', self.on_release)
fig.canvas.mpl_connect('scroll_event', self.on_scroll)
fig.canvas.mpl_connect('resize_event', self.on_resize)
self.redraw(show=True)
# self.menu = Menu(self.fig,
# [MenuItem(self.fig, 'field of view'),
# MenuItem(self.fig, 'show cursor'),
# MenuItem(self.fig, 'equalize'),
# MenuItem(self.fig, 'interpolation'),])
# self.fig.add_artist(self.menu)
def __setattr__(self, key, value):
do_redraw = key[0] != '_' and self.auto_redraw
super().__setattr__(key, value)
if do_redraw:
self.redraw()
def _image_from_ax(self, ax):
for image in self.images:
for d, ax0 in enumerate(image._axes):
if ax is ax0:
return image, d
return None, None
def _index_from_cursor(self, x, y, image, n_ax):
p = utils.as_tensor([x, y, 0])
mat = image._mats[n_ax]
self.index = spatial.affine_matvec(mat, p)
def on_release(self, event):
if event.button == 1: # LEFT
self._is_pressed = False
def on_press(self, event):
if event.button == 1: # LEFT
self._is_pressed = True
if event.inaxes:
x, y = (event.xdata, event.ydata)
image, n_ax = self._image_from_ax(event.inaxes)
self._index_from_cursor(x, y, image, n_ax)
self.redraw(show=True)
def on_move(self, event):
is_pressed = getattr(self, '_is_pressed', False)
if is_pressed and event.inaxes:
x, y = (event.xdata, event.ydata)
image, n_ax = self._image_from_ax(event.inaxes)
self._index_from_cursor(x, y, image, n_ax)
self.redraw(show=True)
def on_resize(self, event):
self.redraw(show=True)
def on_scroll(self, event):
auto, self.auto_redraw = (self.auto_redraw, False)
index0 = self.index
if event.inaxes:
x, y = (event.xdata, event.ydata)
image, n_ax = self._image_from_ax(event.inaxes)
if image:
self._index_from_cursor(x, y, image, n_ax)
steps = event.step
step_size = self.scroll_step ** (-steps)
min, max = self.fov
new_min = [i - step_size * (i - mn0) for i, mn0 in zip(self.index, min)]
new_max = [mn + step_size * (mx0 - mn0)
for i, mn, mn0, mx0 in zip(self.index, new_min, min, max)]
self.fov = (new_min, new_max)
self.index = index0
self.redraw(show=True)
self.auto_redraw = auto
@property
def auto_redraw(self):
return getattr(self, '_auto_redraw', False)
@auto_redraw.setter
def auto_redraw(self, value):
self._auto_redraw = bool(value)
@property
def dpi(self):
"""pixels per inch"""
return self.fig.get_dpi()
@dpi.setter
def dpi(self, value):
if not isinstance(value, (int, float)):
raise TypeError('Expected a number')
self.fig.set_dpi(float(value))
@property
def aspect(self):
"""Width/Height"""
return self.size[0]/self.size[1]
@aspect.setter
def aspect(self, value):
"""Change aspect while keeping area untouched"""
if not isinstance(value, (int, float)):
raise TypeError('Expected a number')
s0, s1 = self.size
area = math.sqrt(s0*s1)
s0 = math.sqrt(area * value)
s1 = math.sqrt(area / value)
self.size = (s0, s1)
@property
def scroll_step(self):
"""(Width, Height)"""
return getattr(self, '_scroll_step', 100)
@scroll_step.setter
def scroll_step(self, value):
if not isinstance(value, (float, int)):
raise ValueError('Expected a float')
self._scroll_step = value
@property
def draw_freq(self):
"""(Width, Height)"""
return getattr(self, '_draw_freq', 1/25)
@draw_freq.setter
def draw_freq(self, value):
if not isinstance(value, (float, int)):
raise ValueError('Expected a float')
self._draw_freq = value
@property
def size(self):
"""(Width, Height)"""
return tuple(self.fig.get_size_inches())
@size.setter
def size(self, value):
if not isinstance(value, (list, tuple)) or len(value) != 2:
raise ValueError('Expected a tuple of two values')
self.fig.get_size_inches(*value)
@property
def images(self):
return tuple(self._images)
@images.setter
def images(self, value):
self._images = [ImageArtist(image, parent=self) for image in value]
@property
def mmap(self):
mmap = [image.mmap for image in self.images]
return ordered_set(*mmap)[0]
@mmap.setter
def mmap(self, value):
for image in self.images:
image.mmap = bool(value)
@property
def device(self):
device = [image.device for image in self.images]
return ordered_set(*device)[0]
@device.setter
def device(self, value):
for image in self.images:
image.device = value
@property
def layout(self):
layout = [image.layout for image in self.images]
return ordered_set(*layout)[0]
@layout.setter
def layout(self, value):
value = value.lower()
if value not in ('row', 'col', 'orth'):
raise ValueError(f"Expected on of 'row', 'col', 'orth' but "
f"got {value}")
for image in self.images:
image.layout = value
@property
def show_cursor(self):
show_cursor = [image.show_cursor for image in self.images]
return ordered_set(*show_cursor)[0]
@show_cursor.setter
def show_cursor(self, value):
if not isinstance(value, bool):
value = float(value)
for image in self.images:
image.show_cursor = value
@property
def equalize(self):
equalize = [image.equalize for image in self.images]
return ordered_set(*equalize)[0]
@equalize.setter
def equalize(self, value):
if not isinstance(value, str):
value = float(value)
else:
value = value.lower()
if value not in ('lin', 'linear',
'quad', 'quadratic',
'log', 'logarithmic'):
raise ValueError(f'Unknown equalization {value}')
for image in self.images:
image.equalize = value
@property
def clim(self):
clim = [image.clim for image in self.images]
return (ordered_set(*[c[0] for c in clim])[0],
ordered_set(*[c[1] for c in clim])[0])
@clim.setter
def clim(self, value):
for image in self.images:
image.clim = value
@property
def mode(self):
mode = [image.mode for image in self.images]
return ordered_set(*mode)[0]
@mode.setter
def mode(self, value):
value = value.lower()
if value not in ('int', 'intensity',
'cat', 'categorical',
'disp', 'displacement'):
raise ValueError(f'Unknown mode {value}')
for image in self.images:
image.mode = value
@property
def interpolation(self):
interpolation = [image.interpolation for image in self.images]
return ordered_set(*interpolation)[0]
@interpolation.setter
def interpolation(self, value):
if not isinstance(value, str):
value = int(value)
else:
value = value.lower()
for image in self.images:
image.interpolation = value
@property
def colormap(self):
colormap = [image.colormap for image in self.images
if not torch.is_tensor(image.colormap)]
if not colormap:
return self.images[0].colormap
else:
return ordered_set(*colormap)[0]
@colormap.setter
def colormap(self, value):
if not isinstance(value, str):
value = torch.as_tensor(value)
for image in self.images:
image.colormap = value
@property
def index(self):
return self._index
@index.setter
def index(self, value):
if torch.is_tensor(value):
value = value.flatten().tolist()
value = py.make_list(value, 3)
value = [(mx+mn)/2 if v is None else v
for v, mn, mx in zip(value, *self.fov)]
self._index = tuple(value)
@property
def fov(self):
return self._fov
@fov.setter
def fov(self, value):
if value is None:
value = (None, None)
min, max = value
if torch.is_tensor(min):
min = min.flatten().tolist()
min = py.make_list(min, 3)
if torch.is_tensor(max):
max = max.flatten().tolist()
max = py.make_list(max, 3)
if any(mn is None for mn in min) or any(mx is None for mx in max):
min0, max0 = self._max_fov()
min = [mn or mn0 for mn, mn0 in zip(min, min0)]
max = [mx or mx0 for mx, mx0 in zip(max, max0)]
self._fov = (tuple(min), tuple(max))
def _max_fov(self):
affines = [image.affine for image in self.images]
shapes = [image.shape for image in self.images]
affines = utils.as_tensor(affines)
shapes = utils.as_tensor(shapes)
return spatial.compute_fov(self._space_matrix, affines, shapes)
@property
def fov_size(self):
min, max = self.fov
return tuple(mx-mn for mx, mn in zip(max, min))
@fov_size.setter
def fov_size(self, value):
if torch.is_tensor(value):
value = value.flatten().tolist()
value = py.make_list(value, 3)
min = [i - v/2 if v else None for i, v in zip(self._index, value)]
max = [i + v/2 if v else None for i, v in zip(self._index, value)]
self.fov = [min, max]
@property
def space(self):
return self._space
@space.setter
def space(self, value):
self._space = value
if torch.is_tensor(value):
if value.shape != (4, 4):
raise ValueError('Expected 4x4 matrix')
self._space_matrix = value
elif isinstance(value, int):
affines = [image.affine for image in self.images]
self._space_matrix = affines[value]
else:
if value is not None:
raise ValueError('Expected a 4x4 matrix or an int or None')
affines = [image.affine for image in self.images]
voxel_size = spatial.voxel_size(utils.as_tensor(affines))
voxel_size = voxel_size.min()
self._space_matrix = torch.eye(4)
self._space_matrix[:-1, :-1] *= voxel_size
@property
def grid(self):
return self._grid
@grid.setter
def grid(self, value):
if value is None:
self._grid = (None, None)
else:
if not isinstance(value, (list, tuple)) or len(value) != 2:
raise ValueError('Expected a tuple of two integers')
gx, gy = value
if gx is not None and gy is not None and gx*gy != len(self.images):
raise ValueError('Grid size not consistant with number '
'of images')
self._grid = tuple(value)
def _grid_auto(self, nb_image):
gx, gy = self._grid
if gx is not None and gy is not None:
return gx, gy
if gx is None and gy is not None:
gx = int(math.ceil(nb_image / gy))
return gx, gy
if gy is None and gx is not None:
gy = int(math.ceil(nb_image / gx))
return gx, gy
# heuristic
if self.layout == 'row':
ratio = [1, 3]
elif self.layout == 'col':
ratio = [3, 1]
else:
assert self.layout == 'orth', self.layout
ratio = [2, 2]
rh, rw = ratio
aspect = self.aspect # width / height
best_empty = None
best_size = (None, None)
for n_rows in range(1, nb_image + 1):
n_cols = math.ceil(nb_image / n_rows)
nr = n_rows * rh
nc = n_cols * rw
inner_aspect = nc / nr
inner_area = nc * nr
if inner_aspect < aspect:
outer_area = nr * nr * aspect
else:
outer_area = nc * nc / aspect
empty_ratio = (outer_area - inner_area) / outer_area
empty_box = (n_rows*n_cols - nb_image) * (rh * rw) / outer_area
compactness = 0.01 * (max(nr, nc)/min(nr, nc) - 1)
empty = empty_box + empty_ratio + compactness
# print(f'{n_rows} {n_cols} | {empty_box:6.3f} {empty_ratio:6.3f} {compactness:6.3f} {inner_aspect:6.3f}')
if best_empty is None or empty < best_empty:
best_size = (n_rows, n_cols)
best_empty = empty
return best_size
def redraw(self, show=False):
last_draw = getattr(self, '_last_draw', 0)
if (time.time() - last_draw) < self.draw_freq:
return
self.fig.clear()
grid = self._grid_auto(len(self.images))
gs = gridspec.GridSpec(*grid)
for d, image in enumerate(self.images):
image.draw(index=self.index, space=self._space_matrix,
fov=self.fov, fig=self.fig, gs=gs[d])
if show:
self.fig.show()
self._last_draw = time.time()
|
11486123
|
import os
root_dir = 'data/waymo2kitti/training/'
seg_list = sorted(os.listdir(root_dir))
interval = 3
f = open('train_org.txt', 'w')
for seg in seg_list:
if 'segment' in seg:
image_list = os.listdir(root_dir + seg+'/label_0')
img_list = sorted(image_list, key=lambda c: int(c.split('.')[0]))
# import pdb; pdb.set_trace()
for image in img_list:
if int(float(image.split('.')[0])) % interval == 0:
f.write(seg + ' ' + image.split('.')[0] + '\n')
f.close()
|
11486197
|
import time
import calendar
import unittest
from .. import cron
class TestCron(unittest.TestCase):
def test_init_coercion(self):
self.assertEqual(cron.Cron({
"weekday": cron.value("sunday")
}), cron.Cron({
"weekday": "sunday"
}))
self.assertEqual(cron.Cron({
"weekday": cron.value(0)
}), cron.Cron({
"weekday": 0
}))
def test_case_insensitivity(self):
self.assertEqual(cron.Cron({
"weekday": "SUNDAY",
}), cron.Cron({
"weekday": "sunday"
}))
def test_next(self):
c = cron.Cron({
"weekday": "sunday"
})
next_sunday = c.next(time.gmtime(0))
self.assertEqual(calendar.timegm(next_sunday), 259200)
def test_next_always_advances(self):
c = cron.Cron({
"weekday": "sunday"
})
sunday_after_that = c.next(time.gmtime(259200))
self.assertEqual(calendar.timegm(sunday_after_that), 864000)
def test_matches(self):
c = cron.Cron({
"weekday": "sunday"
})
self.assertFalse(c.matches(time.gmtime(0)))
self.assertTrue(c.matches(time.gmtime(259200)))
def test_day_or_weekday(self):
c = cron.Cron({
"day": 2,
"weekday": "sunday"
})
next_day = c.next(time.gmtime(0))
self.assertEqual(calendar.timegm(next_day), 86400)
next_sunday = c.next(next_day)
self.assertEqual(calendar.timegm(next_sunday), 259200)
def test_year_change(self):
c = cron.Cron(weekday="friday")
next_year = c.next(time.gmtime(calendar.timegm((1970, 12, 31, 23, 59, 0))))
self.assertEqual(calendar.timegm(next_year), calendar.timegm((1971, 1, 1, 0, 0, 0)))
class TestCronParsing(unittest.TestCase):
def test_parsing_value(self):
self.assertEqual(cron.parse("0 * * * *"), cron.Cron({
"minute": cron.value(0)
}))
def test_parsing_range(self):
self.assertEqual(cron.parse("10-20 * * * *"), cron.Cron({
"minute": cron.range(10, 20)
}))
self.assertEqual(cron.parse("10-20/5 * * * *"), cron.Cron({
"minute": cron.range(10, 20, step=5)
}))
def test_parsing_any(self):
self.assertEqual(cron.parse("* * * * *"), cron.Cron({
"minute": cron.any()
}))
self.assertEqual(cron.parse("*/5 * * * *"), cron.Cron({
"minute": cron.any(step=5)
}))
def test_parsing_multiple(self):
self.assertEqual(cron.parse("0,10-20/5,*/5 * * * *"), cron.Cron({
"minute": cron.value(0).or_range(10, 20, step=5).or_any(step=5)
}))
def test_parsing_day_or_weekday(self):
self.assertEqual(cron.parse("0 0 2 * *"), cron.Cron({
"day": cron.value(2),
"weekday": cron.any()
}))
self.assertEqual(cron.parse("0 0 * * sun"), cron.Cron({
"day": cron.any(),
"weekday": cron.value("sun")
}))
self.assertEqual(cron.parse("0 0 2 * sun"), cron.Cron({
"day": cron.value(2),
"weekday": cron.value("sun")
}))
def test_parsing_extra_spaces(self):
self.assertEqual(cron.parse("1,2 3 4 * *"), cron.parse("1,2 3 4 * *"))
self.assertEqual(cron.parse(" 1,2 3 4 * * "), cron.parse("1,2 3 4 * *"))
self.assertRaises(ValueError, cron.parse, "0, 1 * * *")
self.assertRaises(ValueError, cron.parse, "0 ,1 * * *")
self.assertRaises(ValueError, cron.parse, "0 , 1 * * *")
def test_parsing_month_names(self):
self.assertEqual(cron.parse("* * * jan *"), cron.parse("* * * 1 *"))
self.assertEqual(cron.parse("* * * january *"), cron.parse("* * * 1 *"))
self.assertEqual(cron.parse("* * * JANUARY *"), cron.parse("* * * 1 *"))
def test_parsing_day_names(self):
self.assertEqual(cron.parse("* * * * sun"), cron.parse("* * * * 0"))
self.assertEqual(cron.parse("* * * * sunday"), cron.parse("* * * * 0"))
self.assertEqual(cron.parse("* * * * SUNDAY"), cron.parse("* * * * 0"))
def test_parsing_unknown_names(self):
self.assertRaises(ValueError, cron.parse, "xyz * * * *")
self.assertRaises(ValueError, cron.parse, "* xyz * * *")
self.assertRaises(ValueError, cron.parse, "* * xyz * *")
self.assertRaises(ValueError, cron.parse, "* * * xyz *")
self.assertRaises(ValueError, cron.parse, "* * * * xyz")
def test_parsing_out_of_bounds_values(self):
self.assertRaises(ValueError, cron.parse, "61 * * * *")
self.assertRaises(ValueError, cron.parse, "* 24 * * *")
self.assertRaises(ValueError, cron.parse, "* * 0 * *")
self.assertRaises(ValueError, cron.parse, "* * 32 * *")
self.assertRaises(ValueError, cron.parse, "* * * 0 *")
self.assertRaises(ValueError, cron.parse, "* * * 13 *")
self.assertRaises(ValueError, cron.parse, "* * * * 7")
def test_parsing_out_of_bounds_steps(self):
self.assertRaises(ValueError, cron.parse, "*/0 * * * *")
self.assertRaises(ValueError, cron.parse, "*/61 * * * *")
self.assertRaises(ValueError, cron.parse, "*/0 * * *")
self.assertRaises(ValueError, cron.parse, "* */25 * * *")
self.assertRaises(ValueError, cron.parse, "* * */0 * *")
self.assertRaises(ValueError, cron.parse, "* */32 * *")
self.assertRaises(ValueError, cron.parse, "* * * */0 *")
self.assertRaises(ValueError, cron.parse, "* * * */13 *")
self.assertRaises(ValueError, cron.parse, "* * * * */0")
self.assertRaises(ValueError, cron.parse, "* * * * */8")
|
11486224
|
import os, re, sys
class Blob:
def append(self, *_):
pass
def finish(self, *_):
pass
class Formula(Blob):
def __init__(self):
self.lines = []
def append(self, line):
self.lines.append(line)
def finish(self, *_):
for i, line in enumerate(self.lines):
assert(len(line[:3].strip()) == 0)
line = re.sub(r"\s+", " ", line[3:])
line = re.sub(r"\(\s+", "(", line)
line = re.sub(r"\s+\)", ")", line)
self.lines[i] = line
def format(self, output):
output.write("/// ```text\n")
for line in self.lines:
output.write("/// {}\n".format(line))
output.write("/// ```\n")
class Space(Blob):
def format(self, output):
output.write("///\n")
class Text(Blob):
def __init__(self):
self.lines = []
def append(self, line):
self.lines.append(line)
def finish(self, index, total, f):
lines = self.lines
if index == 0:
first = re.sub(r"(?i)\s*{}\s+".format(f.name), "", lines[0])
lines[0] = first
line = " ".join(lines)
line = re.sub(r"\s+", " ", line)
line = re.sub(r"\(\s+", "(", line)
line = re.sub(r"\s+\)", ")", line)
if index == total - 1 and line[-1] != ".":
line = "{}.".format(line)
lines = line.split(". ")
lowercase = ["alpha", "or", "where"]
for i, line in enumerate(lines):
if all([not line.startswith(word) for word in lowercase]):
lines[i] = "{}{}".format(line[0].upper(), line[1:])
line = ". ".join(lines)
substitutes = {
"Compute": "Computes",
"equal to 1": "equal to one",
}
for key, value in substitutes.items():
line = re.sub(r"\b{}\b".format(key), value, line)
chunks = line.split(" ")
lines = []
count = 0
while len(chunks) > 0:
current = " ".join(chunks[:count])
if count == len(chunks) or 4 + len(current) + len(chunks[count]) >= 80:
lines.append(current)
chunks = chunks[count:]
count = 0
else:
count += 1
self.lines = lines
def format(self, output):
for line in self.lines:
output.write("/// {}\n".format(line))
def clean(lines):
lines = [re.sub(r"^\*> ?", "", line.rstrip()) for line in lines]
while len(lines) > 0 and lines[0].strip() == "":
lines = lines[1:]
while len(lines) > 0 and lines[-1].strip() == "":
lines = lines[:-1]
margin = 42
for line in lines:
if len(line.strip()) > 0:
margin = min(margin, len(line) - len(line.strip()))
for i, line in enumerate(lines):
if len(line.strip()) > 0:
lines[i] = lines[i][margin:]
return lines
def extract(filename):
SEARCHING = 0
IN_PURPOSE = 1
IN_DESCRIPTION = 2
lines = []
state = SEARCHING
with open(filename) as file:
for line in file:
if state == SEARCHING:
if "par Purpose" in line:
state = IN_PURPOSE
elif state == IN_PURPOSE:
if "\\verbatim" in line:
state = IN_DESCRIPTION
elif state == IN_DESCRIPTION:
if "\\endverbatim" in line:
break
lines.append(line.replace("\n", ""))
return lines
def partition(lines):
paragraphs = []
current = None
for line in lines:
if line.startswith(" "):
klass = Formula
elif len(line) == 0:
klass = Space
else:
klass = Text
if not isinstance(current, klass):
if current is not None:
paragraphs.append(current)
current = klass()
current.append(line)
if current is not None:
paragraphs.append(current)
return paragraphs
def print_documentation(f, reference):
filename = os.path.join(reference, "BLAS", "SRC", "{}.f".format(f.name))
if not os.path.exists(filename):
return
paragraphs = partition(clean(extract(filename)))
for i, paragraph in enumerate(paragraphs):
paragraph.finish(i, len(paragraphs), f)
paragraph.format(sys.stdout)
|
11486245
|
import argparse
from curses import wrapper
import logging
import sys
import pprint
from _version import __version__, name
try:
from .core import bluelyze, term, get_logo, wifilyze, allyze
except:
from core import bluelyze, term, get_logo, wifilyze, allyze
def main(args):
"""
Executes logic from parsed arguments
"""
logging.debug(args)
if args['wifi']:
wifilyze(show_extra_info=args["show_extra_info"],
graph=args["show_graph"],
analyze_all=False)
elif args['bluetooth']:
bluelyze(
graph=args["show_graph"],
show_name=args["show_name"],
show_extra_info=args["show_extra_info"],
analyze_all=False)
elif args['analyze_all']:
allyze(
show_name=args["show_name"],
show_extra_info=args["show_extra_info"],
analyze_all=True
)
else:
sys.exit('Protocol does not exist. It can only be one of bluetooth or wifi')
def cli_usage(name=None):
"""
custom usage message to override `cli.py`
"""
return """
{logo}
usage: signalyze [-h] [-o OUTPUT] [--show-name] [-b | -w | -all] [--show-graph | --show-extra-info]
""".format(logo=get_logo())
def runner():
"""
runner that handles parsing logic
"""
parser = argparse.ArgumentParser(description='Signalyze', usage=cli_usage())
graph_or_verbose = parser.add_mutually_exclusive_group()
parser.add_argument('--version', action='version',
version='{package} - {version}'.format(package=name, version=__version__))
parser.add_argument('-o', '--output', help='save to an output csv file')
parser.add_argument('--show-name', action="store_true", help='Show Device name and mac address')
protocol = parser.add_mutually_exclusive_group()
protocol.add_argument('-b','--bluetooth', action="store_true", help='Analyze only bluetooth')
protocol.add_argument('-w','--wifi', action="store_true", help='Analyze only wifi')
protocol.add_argument('-all','--analyze-all', action="store_true", help='Analyze both wifi and bluetooth')
graph_or_verbose.add_argument('--show-graph', action="store_true", help='Show Realtime graph of nearby devices')
graph_or_verbose.add_argument('--show-extra-info', action="store_true", help='Show extra information like services and device classification')
args = vars(parser.parse_args())
main(args)
if __name__ == '__main__':
logging.basicConfig(stream=sys.stderr, level=logging.INFO)
runner()
|
11486248
|
def test_issue_455(data: list[i32]) -> f64:
sum: f64
sum = 0.0
i: i32
for i in range(len(data)):
sum += data[i]
return sum
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.