id stringlengths 1 265 | text stringlengths 6 5.19M | dataset_id stringclasses 7
values |
|---|---|---|
3242596 | <gh_stars>1-10
import numpy as np
input_vectors = np.array([
[0.1 , 0.1 , 0.1 , 0.1],
[.01 ,.001 , 0.6 , 0.8 ],
[0.3 , 0.3 , 0.3 , 0.3],
[0.0 , 0.8 , 0.0 , 0.0],
[1.0 , 0.9 , 0.95, 0.82],
[0.35,0.95 , 0.24, 0.76]])
rate, spread, size, input_size = .4, .2, len(input_vectors), len(input_vectors[0])
| StarcoderdataPython |
1693656 | from .base import *
from .config import *
from .pipelines import *
| StarcoderdataPython |
3276913 | #!/usr/bin/python
def logAndPrint(logf, message):
print(message)
with open(logf, 'a+') as f:
f.write(message)
| StarcoderdataPython |
3201154 | from torchseq.utils.tokenizer import Tokenizer
class ParaphrasePair:
def __init__(self, sent1_text, sent2_text, template=None, is_paraphrase=True, tok_window=64):
if "artist appear below the euro symbol" in sent2_text:
print("Found the dodgy pair", sent1_text, sent2_text)
self._s1_doc = Tokenizer().tokenise(sent1_text)
self._s2_doc = Tokenizer().tokenise(sent2_text)
self._template_doc = Tokenizer().tokenise(sent2_text) if template is not None else None
self.is_paraphrase = is_paraphrase
if "artist appear below the euro symbol" in sent2_text:
print("Dodgy pair cleared tokenising")
if len(self._s1_doc) > tok_window:
self._s1_doc = self._s1_doc[:tok_window]
if len(self._s2_doc) > tok_window:
self._s2_doc = self._s2_doc[:tok_window]
def s1_as_ids(self):
id_list = [tok["id"] for tok in self._s1_doc]
return id_list
def s2_as_ids(self):
id_list = [tok["id"] for tok in self._s2_doc]
return id_list
def template_as_ids(self):
id_list = [tok["id"] for tok in self._template_doc]
return id_list
| StarcoderdataPython |
3369750 | <filename>archive/p/python/roman-numeral.py
# Python program to convert Roman Numerals
# to Numbers
import sys
# This function returns value of each Roman symbol
def value(r):
if (r == 'I'):
return 1
if (r == 'V'):
return 5
if (r == 'X'):
return 10
if (r == 'L'):
return 50
if (r == 'C'):
return 100
if (r == 'D'):
return 500
if (r == 'M'):
return 1000
return -1
def romanToDecimal(str):
res = 0
i = 0
while (i < len(str)):
# Getting value of symbol s[i]
s1 = value(str[i])
if (i+1 < len(str)):
# Getting value of symbol s[i+1]
s2 = value(str[i+1])
# Comparing both values
if (s1 >= s2):
# Value of current symbol is greater
# or equal to the next symbol
res = res + s1
i = i + 1
else:
# Value of current symbol is greater
# or equal to the next symbol
res = res + s2 - s1
i = i + 2
else:
res = res + s1
i = i + 1
return res
def main():
if len(sys.argv)>1:
roman_numeral=sys.argv[1]
print("Integer form of Roman Numeral is "),
print(romanToDecimal(roman_numeral))
else:
print("Argument Required")
if __name__=="__main__":
main()
| StarcoderdataPython |
9039 | import sys, os
sys.path.append("C:/Users/Delgado/Documents/Research/rheology-data-toolkit/rheodata/extractors")
import h5py
import pandas as pd
from antonpaar import AntonPaarExtractor as APE
from ARES_G2 import ARES_G2Extractor
# %%
sys.path.append("C:/Users/Delgado/Documents/Research/rheology-data-toolkit/rheodata")
from data_converter import rheo_data_transformer
import unittest
extractor = APE()
#converter = data_converter()
class TestAntonPaar(unittest.TestCase):
def setUp(self):
self.multi_file_test = "C:/Users/Delgado/Documents/Research/rheology-data-toolkit/tests/test_data/Anton_Paar/excel_test_data/two_tests_Steady State Viscosity Curve-LO50C_excel.xlsx"
self.modified_dict, self.raw_data_dict, self.cols, self.units = extractor.import_rheo_data(self.multi_file_test)
# Inilize the class to convert
self.converter = rheo_data_transformer(self.modified_dict, self.raw_data_dict, self.cols, self.units)
self.converter.load_to_hdf("test")
def test_modified_output_isdictionary(self):
self.assertIsInstance(self.modified_dict, dict)
def test_modified_output_dictionary_contains_pandas(self):
""" Test if the output is a dictonary of pandas dataframes'"""
for value in self.modified_dict.values():
self.assertIsInstance(value, pd.DataFrame)
def test_raw_output_isdictionary(self):
self.assertIsInstance(self.raw_data_dict, dict)
def test_raw_output_dictionary_contains_pandas(self):
""" Test if the output is a dictonary of pandas dataframes'"""
for value in self.raw_data_dict.values():
self.assertIsInstance(value, pd.DataFrame)
def test_project_name_added_raw_data(self):
""" Test if the output is a dictonary of pandas dataframes'"""
for df in self.raw_data_dict.values():
self.assertEqual(df.iloc[0,0], "Project:")
def test_hdf5_created(self):
name, ext = os.path.splitext("test.hdf5")
self.assertEqual(ext, ".hdf5")
def test_project_subfolders_added(self):
f = h5py.File('test.hdf5', "r")
project_keys = list(f['Project'].keys())
f.close()
self.assertListEqual(project_keys, ['Steady State Viscosity Curve-75C','Steady State Viscosity Curve-LO80C', ])
def test_analyze_cols(self):
temp_df = extractor.make_analyze_dataframes(self.multi_file_test)
for test_key in temp_df.keys():
test_cols = list(temp_df[test_key].columns)
parsed_cols = list(self.cols[test_key])
self.assertListEqual(test_cols, parsed_cols)
# TODO Write test for saving a file
if __name__ == '__main__':
unittest.main()
| StarcoderdataPython |
1758175 | <gh_stars>1000+
import datetime
import json
import logging
import requests
from core.analytics import OneShotAnalytics
from core.errors import ObservableValidationError
from core.observables import Hostname, Email, Ip, Hash
class ThreatCrowdAPI(object):
"""Base class for querying the ThreatCrowd API."""
@staticmethod
def fetch(observable):
base_url_api = "https://www.threatcrowd.org/searchApi/v2"
if isinstance(observable, Hostname):
url = base_url_api + "/domain/report/"
params = {"domain": observable.value}
try:
res = requests.get(url, params)
if res.ok:
return res.json()
except Exception as e:
print("Exception while getting domain report {}".format(e.message))
return None
elif isinstance(observable, Email):
url = base_url_api + "/email/report/"
params = {"email": observable.value}
try:
res = requests.get(url, params)
if res.ok:
return res.json()
except Exception as e:
print("Exception while getting email report {}".format(e.message))
return None
elif isinstance(observable, Ip):
url = base_url_api + "/ip/report/"
print(url)
params = {"ip": observable.value}
print(params)
try:
res = requests.get(url, params)
if res.ok:
return res.json()
except Exception as e:
print("Exception while getting email report {}".format(e.message))
return None
elif isinstance(observable, Hash):
url = base_url_api + "/file/report/"
params = {"resource": observable.value}
try:
res = requests.get(url, params)
if res.ok:
return res.json()
except Exception as e:
print("Exception while getting email report {}".format(e.message))
return None
class ThreatCrowdQuery(ThreatCrowdAPI, OneShotAnalytics):
default_values = {
"name": "ThreatCrowd",
"description": "Perform a ThreatCrowd query.",
}
ACTS_ON = ["Ip", "Hostname", "Hash", "Email"]
@staticmethod
def analyze(observable, results):
links = set()
json_result = ThreatCrowdAPI.fetch(observable)
json_string = json.dumps(
json_result, sort_keys=True, indent=4, separators=(",", ": ")
)
results.update(raw=json_string)
result = {}
if isinstance(observable, Hostname):
if "resolutions" in json_result:
result["ip on this domains"] = 0
for ip in json_result["resolutions"]:
if ip["ip_address"].strip() != observable.value:
if ip["last_resolved"] != "0000-00-00":
last_resolved = datetime.datetime.strptime(
ip["last_resolved"], "%Y-%m-%d"
)
try:
new_ip = Ip.get_or_create(
value=ip["ip_address"].strip()
)
links.update(
new_ip.active_link_to(
observable, "IP", "ThreatCrowd", last_resolved
)
)
result["ip on this domains"] += 1
except ObservableValidationError:
logging.error(
"An error occurred when trying to add subdomain {} to the database".format(
ip["ip_address"]
)
)
if "emails" in json_result:
result["nb emails"] = 0
for email in json_result["emails"]:
try:
new_email = Email.get_or_create(value=email)
links.update(
new_email.active_link_to(
observable, "Used by", "ThreatCrowd"
)
)
result["nb emails"] += 1
except ObservableValidationError:
logging.error(
"An error occurred when trying to add email {} to the database".format(
email
)
)
if "subdomains" in json_result:
result["nb subdomains"] = 0
for subdomain in json_result["subdomains"]:
try:
new_domain = Hostname.get_or_create(value=subdomain)
links.update(
observable.active_link_to(
new_domain, "subdomain", "ThreatCrowd"
)
)
result["nb subdomains"] += 1
except ObservableValidationError:
logging.error(
"An error occurred when trying to add subdomain {} to the database".format(
subdomain
)
)
if isinstance(observable, Ip):
if "resolutions" in json_result:
result["domains resolved"] = 0
for domain in json_result["resolutions"]:
if domain["domain"].strip() != observable.value:
try:
last_resolved = datetime.datetime.strptime(
domain["last_resolved"], "%Y-%m-%d"
)
new_domain = Hostname.get_or_create(
value=domain["domain"].strip()
)
links.update(
new_domain.active_link_to(
observable, "A Record", "ThreatCrowd", last_resolved
)
)
result["domains resolved"] += 1
except ObservableValidationError:
logging.error(
"An error occurred when trying to add domain {} to the database".format(
domain["domain"]
)
)
if "hashes" in json_result and len(json_result["hashes"]) > 0:
result["malwares"] = 0
for h in json_result["hashes"]:
new_hash = Hash.get_or_create(value=h)
links.update(
new_hash.active_link_to(observable, "hash", "ThreatCrowd")
)
result["malwares"] += 1
if isinstance(observable, Email):
if "domains" in json_result and len(json_result) > 0:
result["domains recorded by email"] = 0
for domain in json_result["domains"]:
new_domain = Hostname.get_or_create(value=domain)
links.update(
new_domain.active_link_to(
observable, "recorded by", "ThreatCrowd"
)
)
result["domains recorded by email"] += 1
if isinstance(observable, Hash):
result["nb c2"] = 0
if "md5" in json_result:
new_hash = Hash.get_or_create(value=json_result["md5"])
links.update(new_hash.active_link_to(observable, "md5", "ThreadCrowd"))
if "sha1" in json_result:
new_hash = Hash.get_or_create(value=json_result["sha1"])
links.update(new_hash.active_link_to(observable, "sha1", "ThreadCrowd"))
if "sha256" in json_result:
new_hash = Hash.get_or_create(value=json_result["sha256"])
links.update(
new_hash.active_link_to(observable, "sha256", "ThreadCrowd")
)
if "domains" in json_result and len(json_result["domains"]):
for domain in json_result["domains"]:
new_domain = Hostname.get_or_create(value=domain)
links.update(
observable.active_link_to(new_domain, "c2", "ThreatCrowd")
)
result["nb c2"] += 1
if "ips" in json_result and len(json_result["ips"]):
for ip in json_result["ips"]:
new_ip = Ip.get_or_create(value=ip.strip())
links.update(observable.active_link_to(new_ip, "c2", "ThreatCrowd"))
result["nb c2"] += 1
if "permalink" in json_result:
result["permalink"] = json_result["permalink"]
result["source"] = "threatcrowd_query"
result["raw"] = json_string
observable.add_context(result)
return list(links)
| StarcoderdataPython |
3295768 | # Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import random
from rlkit.exploration_strategies.base import RawExplorationStrategy
import numpy as np
class GaussianAndEpsilonStrategy(RawExplorationStrategy):
"""
With probability epsilon, take a completely random action.
with probability 1-epsilon, add Gaussian noise to the action taken by a
deterministic policy.
"""
def __init__(self, action_space, epsilon, max_sigma=1.0, min_sigma=None,
decay_period=1000000):
assert len(action_space.shape) == 1
if min_sigma is None:
min_sigma = max_sigma
self._max_sigma = max_sigma
self._epsilon = epsilon
self._min_sigma = min_sigma
self._decay_period = decay_period
self._action_space = action_space
def get_action_from_raw_action(self, action, t=None, **kwargs):
if random.random() < self._epsilon:
return self._action_space.sample()
else:
sigma = self._max_sigma - (self._max_sigma - self._min_sigma) * min(1.0, t * 1.0 / self._decay_period)
return np.clip(
action + np.random.normal(size=len(action)) * sigma,
self._action_space.low,
self._action_space.high,
)
| StarcoderdataPython |
105506 | import pathlib
import tempfile
import cbor2
from retry import retry
from pycardano import *
from .base import TestBase
class TestMint(TestBase):
@retry(tries=4, delay=6, backoff=2, jitter=(1, 3))
def test_mint(self):
address = Address(self.payment_vkey.hash(), network=self.NETWORK)
# Load payment keys or create them if they don't exist
def load_or_create_key_pair(base_dir, base_name):
skey_path = base_dir / f"{base_name}.skey"
vkey_path = base_dir / f"{base_name}.vkey"
if skey_path.exists():
skey = PaymentSigningKey.load(str(skey_path))
vkey = PaymentVerificationKey.from_signing_key(skey)
else:
key_pair = PaymentKeyPair.generate()
key_pair.signing_key.save(str(skey_path))
key_pair.verification_key.save(str(vkey_path))
skey = key_pair.signing_key
vkey = key_pair.verification_key
return skey, vkey
tempdir = tempfile.TemporaryDirectory()
PROJECT_ROOT = tempdir.name
root = pathlib.Path(PROJECT_ROOT)
# Create the directory if it doesn't exist
root.mkdir(parents=True, exist_ok=True)
"""Generate keys"""
key_dir = root / "keys"
key_dir.mkdir(exist_ok=True)
# Generate policy keys, which will be used when minting NFT
policy_skey, policy_vkey = load_or_create_key_pair(key_dir, "policy")
"""Create policy"""
# A policy that requires a signature from the policy key we generated above
pub_key_policy_1 = ScriptPubkey(policy_vkey.hash())
# A policy that requires a signature from the extended payment key
pub_key_policy_2 = ScriptPubkey(self.extended_payment_vkey.hash())
# A time policy that disallows token minting after 10000 seconds from last block
must_before_slot = InvalidHereAfter(self.chain_context.last_block_slot + 10000)
# Combine two policies using ScriptAll policy
policy = ScriptAll([pub_key_policy_1, pub_key_policy_2, must_before_slot])
# Calculate policy ID, which is the hash of the policy
policy_id = policy.hash()
"""Define NFT"""
my_nft = MultiAsset.from_primitive(
{
policy_id.payload: {
b"MY_NFT_1": 1, # Name of our NFT1 # Quantity of this NFT
b"MY_NFT_2": 1, # Name of our NFT2 # Quantity of this NFT
}
}
)
native_scripts = [policy]
"""Create metadata"""
# We need to create a metadata for our NFTs, so they could be displayed correctly by blockchain explorer
metadata = {
721: { # 721 refers to the metadata label registered for NFT standard here:
# https://github.com/cardano-foundation/CIPs/blob/master/CIP-0010/registry.json#L14-L17
policy_id.payload.hex(): {
"MY_NFT_1": {
"description": "This is my first NFT thanks to PyCardano",
"name": "PyCardano NFT example token 1",
"id": 1,
"image": "ipfs://QmRhTTbUrPYEw3mJGGhQqQST9k86v1DPBiTTWJGKDJsVFw",
},
"MY_NFT_2": {
"description": "This is my second NFT thanks to PyCardano",
"name": "PyCardano NFT example token 2",
"id": 2,
"image": "ipfs://QmRhTTbUrPYEw3mJGGhQqQST9k86v1DPBiTTWJGKDJsVFw",
},
}
}
}
# Place metadata in AuxiliaryData, the format acceptable by a transaction.
auxiliary_data = AuxiliaryData(AlonzoMetadata(metadata=Metadata(metadata)))
"""Build transaction"""
# Create a transaction builder
builder = TransactionBuilder(self.chain_context)
# Add our own address as the input address
builder.add_input_address(address)
# Since an InvalidHereAfter rule is included in the policy, we must specify time to live (ttl) for this transaction
builder.ttl = must_before_slot.after
# Set nft we want to mint
builder.mint = my_nft
# Set native script
builder.native_scripts = native_scripts
# Set transaction metadata
builder.auxiliary_data = auxiliary_data
# Calculate the minimum amount of lovelace that need to hold the NFT we are going to mint
min_val = min_lovelace(Value(0, my_nft), self.chain_context)
# Send the NFT to our own address
nft_output = TransactionOutput(address, Value(min_val, my_nft))
builder.add_output(nft_output)
# Build and sign transaction
signed_tx = builder.build_and_sign(
[self.payment_skey, self.extended_payment_skey, policy_skey], address
)
print("############### Transaction created ###############")
print(signed_tx)
print(signed_tx.to_cbor())
# Submit signed transaction to the network
print("############### Submitting transaction ###############")
self.chain_context.submit_tx(signed_tx.to_cbor())
self.assert_output(address, nft_output)
nft_to_send = TransactionOutput(
address,
Value(
20000000,
MultiAsset.from_primitive({policy_id.payload: {b"MY_NFT_1": 1}}),
),
)
builder = TransactionBuilder(self.chain_context)
builder.add_input_address(address)
builder.add_output(nft_to_send)
# Create final signed transaction
signed_tx = builder.build_and_sign([self.payment_skey], address)
print("############### Transaction created ###############")
print(signed_tx)
print(signed_tx.to_cbor())
# Submit signed transaction to the network
print("############### Submitting transaction ###############")
self.chain_context.submit_tx(signed_tx.to_cbor())
self.assert_output(address, nft_to_send)
@retry(tries=4, delay=6, backoff=2, jitter=(1, 3))
def test_mint_nft_with_script(self):
address = Address(self.payment_vkey.hash(), network=self.NETWORK)
with open("./plutus_scripts/fortytwo.plutus", "r") as f:
script_hex = f.read()
forty_two_script = cbor2.loads(bytes.fromhex(script_hex))
policy_id = plutus_script_hash(forty_two_script)
my_nft = MultiAsset.from_primitive(
{
policy_id.payload: {
b"MY_SCRIPT_NFT_1": 1, # Name of our NFT1 # Quantity of this NFT
b"MY_SCRIPT_NFT_2": 1, # Name of our NFT2 # Quantity of this NFT
}
}
)
metadata = {
721: {
policy_id.payload.hex(): {
"MY_SCRIPT_NFT_1": {
"description": "This is my first NFT thanks to PyCardano",
"name": "PyCardano NFT example token 1",
"id": 1,
"image": "ipfs://QmRhTTbUrPYEw3mJGGhQqQST9k86v1DPBiTTWJGKDJsVFw",
},
"MY_SCRIPT_NFT_2": {
"description": "This is my second NFT thanks to PyCardano",
"name": "PyCardano NFT example token 2",
"id": 2,
"image": "ipfs://QmRhTTbUrPYEw3mJGGhQqQST9k86v1DPBiTTWJGKDJsVFw",
},
}
}
}
# Place metadata in AuxiliaryData, the format acceptable by a transaction.
auxiliary_data = AuxiliaryData(AlonzoMetadata(metadata=Metadata(metadata)))
# Create a transaction builder
builder = TransactionBuilder(self.chain_context)
# Add our own address as the input address
builder.add_input_address(address)
# Add minting script with an empty datum and a minting redeemer
builder.add_minting_script(
forty_two_script, redeemer=Redeemer(RedeemerTag.MINT, 42)
)
# Set nft we want to mint
builder.mint = my_nft
# Set transaction metadata
builder.auxiliary_data = auxiliary_data
# Calculate the minimum amount of lovelace that need to hold the NFT we are going to mint
min_val = min_lovelace(Value(0, my_nft), self.chain_context)
# Send the NFT to our own address
nft_output = TransactionOutput(address, Value(min_val, my_nft))
builder.add_output(nft_output)
# Create a collateral
self.fund(address, self.payment_skey, address)
non_nft_utxo = None
for utxo in self.chain_context.utxos(str(address)):
# multi_asset should be empty for collateral utxo
if not utxo.output.amount.multi_asset:
non_nft_utxo = utxo
break
builder.collaterals.append(non_nft_utxo)
# Build and sign transaction
signed_tx = builder.build_and_sign([self.payment_skey], address)
# signed_tx.transaction_witness_set.plutus_data
print("############### Transaction created ###############")
print(signed_tx)
print(signed_tx.to_cbor())
# Submit signed transaction to the network
print("############### Submitting transaction ###############")
self.chain_context.submit_tx(signed_tx.to_cbor())
self.assert_output(address, nft_output)
| StarcoderdataPython |
184015 | import os
import json
import requests
from cloudshell.helpers.app_import.build_app_xml import app_template
from cloudshell.helpers.app_import.upload_app_xml import upload_app_to_cloudshell
from cloudshell.api.cloudshell_api import InputNameValue
from cloudshell.shell.core.session.logging_session import LoggingSessionContext
import cloudshell.helpers.scripts.cloudshell_scripts_helpers as script_help
from cloudshell.core.logger.qs_logger import get_qs_logger
class SaveAppUtility:
def __init__(self, sandbox, resource_name, server_address, admin_user, admin_password, display_image_url='', new_app_name='', save_as=False):
self.sandbox = sandbox
self.resource_name = resource_name
self.app_name = ''
self.AppTemplateName = ''
self.new_app_name = ''
self.api_missing = False
self.logger = get_qs_logger(log_group=sandbox.id, log_category="QS", log_file_prefix='test')
for vm in self.sandbox.automation_api.GetReservationDetails(self.sandbox.id).ReservationDescription.Resources:
if vm.Name == self.resource_name:
self.app_name = vm.AppDetails.AppName
try:
self.AppTemplateName = vm.AppTemplateName
if new_app_name == '':
if save_as:
if self.app_name == self.AppTemplateName:
self.api_missing = True
else:
self.new_app_name = self.app_name
else:
self.new_app_name = self.AppTemplateName
else:
self.new_app_name = new_app_name
except Exception as e:
self.new_app_name = self.app_name
if self.api_missing:
raise Exception("Stopping Save As because App's name was not changed prior to deployment.\n"
"Use the 'NewAppName' custom attribute on the command to override.")
self.server_address = server_address
self.admin_user = admin_user
self.admin_password = <PASSWORD>
self.display_image_name = 'vm.png'
self.display_image_url = display_image_url
self.display_image_result = None
self.deploy_info = None
self.saved_app_info = None
self.app_xml = None
def verify_deploy_info_and_display_image(self):
self.logger.info('verify_deploy_info_and_display_image')
self.get_deployment_info()
if self.deploy_info is None:
raise Exception("Could not locate Sandbox information on {}, App must be deployed by Setup script to use this functionality.\n".format(self.resource_name))
self.get_display_image()
def get_deployment_info(self):
self.logger.info('get_deployment_info')
for keyValue in self.sandbox.automation_api.GetSandboxData(self.sandbox.id).SandboxDataKeyValues:
if keyValue.Key == self.app_name:
self.deploy_info = json.loads(keyValue.Value)
self.logger.info('deployment_info: {}'.format(keyValue.Value))
break
def get_display_image(self):
self.logger.info('get_display_image')
self.display_image_result = self.sandbox.automation_api.GetReservationAppImage(self.sandbox.id,
self.resource_name).AppTemplateImage
if self.display_image_result == '':
self.logger.info('no display image from GetReservationAppImage')
try:
if self.display_image_url != '':
self.display_image_result = requests.get(self.display_image_url, allow_redirects=True).content
self.display_image_name = os.path.basename(self.display_image_url)
self.logger.info('got image from display_image_url')
else:
self.display_image_result = None
self.display_image_name = 'vm.png'
self.logger.info('no display_image_url, using defaults')
except:
self.display_image_result = None
self.display_image_name = 'vm.png'
self.logger.info('no image info, using defaults')
def save_app_info(self, delete):
self.logger.info('save_app_info. delete: {}'.format(delete))
command = [x.Name for x in self.sandbox.automation_api.GetResourceConnectedCommands(self.resource_name).Commands
if x.Name == 'create_app_image']
if len(command) == 1:
if delete:
inputs = ['True']
else:
inputs = ['False']
self.logger.info('executing create_app_image on {} with inputs {}'.format(self.resource_name, inputs))
self.saved_app_info = json.loads(self.sandbox.automation_api.ExecuteResourceConnectedCommand(self.sandbox.id,
self.resource_name,
'create_app_image',
'connectivity',
inputs).Output)
else:
raise Exception("Operation not supported by Cloud Provider\n")
def create_app_xml(self):
self.logger.info('create_app_xml')
resource = self.sandbox.automation_api.GetResourceDetails(self.resource_name)
app_attributes = dict()
for attr in resource.ResourceAttributes:
app_attributes[attr.Name] = attr.Value
app_categories = ['Applications']
if self.saved_app_info is not None:
for deploy_path in self.deploy_info['deploypaths']:
if deploy_path['is_default']:
for key, value in self.saved_app_info.iteritems():
# patch for AWS
if 'AWS' in key:
deploy_path['attributes']['AWS AMI Id'] = value
else:
deploy_path['attributes'][key] = value
self.app_xml = app_template(self.new_app_name, self.deploy_info['deploypaths'], app_categories, app_attributes,
resource.ResourceModelName, resource.DriverName,
resource.VmDetails.CloudProviderFullName, self.display_image_name)
self.logger.info('app xml:')
self.logger.info(self.app_xml)
def upload_app(self):
self.logger.info('upload_app template: ' + self.AppTemplateName)
result = upload_app_to_cloudshell(self.sandbox.automation_api, self.sandbox.id, self.new_app_name, self.app_xml,
self.server_address, self.admin_user, self.admin_password, self.display_image_result, self.display_image_name)
if result is None:
self.sandbox.automation_api.WriteMessageToReservationOutput(self.sandbox.id,
"App '{}' has been updated from instance '{}'\n".format(self.new_app_name, self.resource_name))
else:
raise Exception("Error uploading App to CloudShell\n{}".format(result))
def save_flow(self, delete=False):
self.logger.info('Save flow start. Delete: {}'.format(delete))
if not self.api_missing:
self.verify_deploy_info_and_display_image()
self.save_app_info(delete)
self.create_app_xml()
self.upload_app()
if delete:
self.logger.info('refreshing app in blueprints')
self.sandbox.automation_api.RefreshAppInBlueprints(self.AppTemplateName)
def save_flow_just_app(self, update=False):
self.logger.info('Save flow "just app" start. Update: {}'.format(update))
if not self.api_missing:
self.verify_deploy_info_and_display_image()
self.create_app_xml()
self.upload_app()
if update:
self.logger.info('refreshing app in blueprints')
self.sandbox.automation_api.RefreshAppInBlueprints(self.AppTemplateName)
| StarcoderdataPython |
155553 | from sklearn.decomposition import PCA
import pandas as pd
from sklearn.preprocessing import StandardScaler
import matplotlib.pyplot as plt
df = pd.read_csv("Iris.csv")
labels = df['Species']
X = df.drop(['Id','Species'],axis=1)
X_std = StandardScaler().fit_transform(X)
pca = PCA(n_components=4)
X_transform = pca.fit_transform(X_std)
explained_var = pca.explained_variance_ratio_
for var in explained_var:
print var
plt.bar([1,2,3,4],explained_var,label=var)
plt.xlabel("Component #")
plt.ylabel("% Variance Contribution")
plt.legend()
plt.show()
# pca1 = zip(*X_transform)[0]
# pca2 = zip(*X_transform)[1]
# color_dict = {}
# color_dict["Iris-setosa"] = "green"
# color_dict["Iris-versicolor"]='red'
# color_dict["Iris-virginica"] = 'blue'
# i=0
# for label in labels.values:
# plt.scatter(pca1[i],pca2[i],color=color_dict[label])
# i=i+1
# plt.show()
| StarcoderdataPython |
96012 | <reponame>NREL/engage
"""
Unit tests for Django app 'api' models - api/models/configuration.
"""
from mock import patch
from django.contrib.auth.models import User
from django.test import TestCase
from django.utils.html import mark_safe
from django.utils.safestring import SafeString
from api.models.engage import Help_Guide, User_Profile
class HelpGuideTestCase(TestCase):
@classmethod
def setUpTestData(cls):
cls.help_guide = Help_Guide.objects.create(
key="my-key", html="<p>This is Help Guide.</>"
)
def test_class_meta(self):
self.assertEqual(Help_Guide._meta.db_table, "help_guide")
self.assertEqual(Help_Guide._meta.verbose_name_plural, "[Admin] Help Guide")
def test_string_representation(self):
self.assertEqual(str(self.help_guide), self.help_guide.key)
def test_safe_html(self):
self.assertIsInstance(self.help_guide.safe_html(), SafeString)
def test_get_safe_html__available(self):
result = Help_Guide.get_safe_html(key="my-key")
self.assertEqual(result, mark_safe(self.help_guide.html))
def test_get_safe_html__not_available(self):
result = Help_Guide.get_safe_html(key="unknown-key")
self.assertEqual(result, "Not Available")
class UserProfileTestCase(TestCase):
@classmethod
def setUpTestData(cls):
cls.user = User.objects.create_user(
username="user1", password="<PASSWORD>", email="<EMAIL>"
)
cls.user_profile = User_Profile.objects.create(
user=cls.user, organization="my-organizaiton1"
)
def test_class_meta(self):
self.assertEqual(User_Profile._meta.db_table, "user_profile")
self.assertEqual(User_Profile._meta.verbose_name_plural, "[0] User Profiles")
def test_string_representation(self):
self.assertEqual(
str(self.user_profile), f"{self.user.first_name} {self.user.last_name}"
)
@patch("api.models.engage.send_mail")
def test_register__user_not_existing(self, send_mail):
result_profile = User_Profile.register(
http_host="localhost",
email="<EMAIL>",
password="<PASSWORD>",
first_name="my-firstname",
last_name="my-lastname",
organization="my-organization2",
)
self.assertTrue(send_mail.called)
self.assertEqual(result_profile.user.email, "<EMAIL>")
@patch("api.models.engage.send_mail")
def test_register__user_existing(self, send_mail):
user = User.objects.create_user(
username="user2", password="<PASSWORD>", email="<EMAIL>"
)
result_profile = User_Profile.register(
http_host="localhost",
email="<EMAIL>",
password="<PASSWORD>",
first_name="my-firstname",
last_name="my-lastname",
organization="my-organization3",
)
self.assertTrue(send_mail.called)
self.assertEqual(result_profile.user.email, "<EMAIL>")
| StarcoderdataPython |
3280387 | import pybullet
import math
def groundVertices(size, basez, world, base_position, view_range, chunk_view_adjustment):
"""This is a function which takes a set of vertices, the size of the
desired square and base z and returns the triangles for OpenGL to render.
It also creates the collision shape for the terrain."""
vertices = []
topsoil = []
for view_x in range(-view_range,view_range):
for view_y in range(-view_range, view_range):
chunk_loc = (base_position[0] + view_x, base_position[1] + view_y)
chunk_loc_xplus = (base_position[0] + view_x + 1, base_position[1] + view_y)
chunk_loc_yplus = (base_position[0] + view_x, base_position[1] + view_y + 1)
chunk_loc_xyplus = (base_position[0] + view_x + 1, base_position[1] + view_y + 1)
if chunk_loc in world:
current_chunk = world[chunk_loc]
v,t = render_chunk(size, chunk_loc, current_chunk, basez, chunk_view_adjustment)
vertices = vertices + v
topsoil = topsoil + t
if chunk_loc_xplus in world:
current_chunk_xplus = world[chunk_loc_xplus]
v,t = render_x_border(size, chunk_loc, current_chunk, current_chunk_xplus, basez, chunk_view_adjustment)
vertices = vertices + v
topsoil = topsoil + t
if chunk_loc_yplus in world:
current_chunk_yplus = world[chunk_loc_yplus]
v,t = render_y_border(size, chunk_loc, current_chunk, current_chunk_yplus, basez, chunk_view_adjustment)
vertices = vertices + v
topsoil = topsoil + t
if chunk_loc_xyplus in world:
current_chunk_xyplus = world[chunk_loc_xyplus]
corner_x = chunk_view_adjustment*(size-1) + (chunk_loc[0]*chunk_view_adjustment*size)
corner_y = chunk_view_adjustment*(size-1) + (chunk_loc[1]*chunk_view_adjustment*size)
vertices = vertices + render_square(chunk_view_adjustment, corner_x, corner_y, basez + len(current_chunk[size-1][size-1]), basez + len(current_chunk_xplus[0][size-1]), basez + len(current_chunk_yplus[size-1][0]), basez + len(current_chunk_xyplus[0][0]))
topsoil = topsoil + color_square(current_chunk[size-1][size-1][-1], current_chunk_xplus[0][size-1][-1], current_chunk_yplus[size-1][0][-1], current_chunk_xyplus[0][0][-1])
return vertices, topsoil
def render_chunk(size, base_position, world, basez, chunk_adjust):
vertices = []
topsoil = []
basex, basey = base_position
basex *= chunk_adjust
basey *= chunk_adjust
for x in range(0, size-1): # Size - 1 because annoying out-of-range errors
for y in range(0, size-1):
square_x = chunk_adjust*x + (basex*size)
square_y = chunk_adjust*y + (basey*size)
vertices = vertices + render_square(chunk_adjust, square_x, square_y, basez+len(world[x][y]), basez+len(world[x+1][y]), basez+len(world[x][y+1]), basez+len(world[x+1][y+1]))
topsoil = topsoil + color_square(world[x][y][-1], world[x+1][y][-1], world[x][y+1][-1], world[x+1][y+1][-1])
return vertices, topsoil
def render_x_border(size, base_position, world1, world2, basez, chunk_adjust):
vertices = []
topsoil = []
basex, basey = base_position
basex *= chunk_adjust
basey *= chunk_adjust
for y in range(0, size-1):
square_y = chunk_adjust*y + (basey*size)
vertices = vertices + render_square(chunk_adjust, chunk_adjust*(size-1) + (basex*size), square_y, basez+len(world1[size-1][y]), basez+len(world2[0][y]), basez+len(world1[size-1][y+1]), basez+len(world2[0][y+1]))
topsoil = topsoil + color_square(world1[size-1][y][-1], world2[0][y][-1], world1[size-1][y+1][-1], world2[0][y+1][-1])
return vertices, topsoil
def render_y_border(size, base_position, world1, world2, basez, chunk_adjust):
vertices = []
topsoil = []
basex, basey = base_position
basex *= chunk_adjust
basey *= chunk_adjust
for x in range(0, size-1):
square_x = chunk_adjust*x + (basex*size)
vertices = vertices + render_square(chunk_adjust, square_x, chunk_adjust*(size-1) + (basey*size), basez+len(world1[x][size-1]), basez+len(world1[x+1][size-1]), basez+len(world2[x][0]), basez+len(world2[x+1][0]))
topsoil = topsoil + color_square(world1[x][size-1][-1], world1[x+1][size-1][-1], world2[x][0][-1], world2[x+1][0][-1])
return vertices, topsoil
def render_square(chunk_adjust, base_x, base_y, z1, z2, z3, z4):
vertices = []
# z1 : (0,0)
# z2 : (1,0)
# z3 : (0,1)
# z4 : (1,1)
vertices.append((base_x, base_y, z1))
vertices.append((base_x + chunk_adjust, base_y, z2))
vertices.append((base_x, base_y + chunk_adjust, z3))
vertices.append((base_x + chunk_adjust, base_y + chunk_adjust, z4))
vertices.append((base_x + chunk_adjust, base_y, z2))
vertices.append((base_x, base_y + chunk_adjust, z3))
return vertices
def color_square(c1, c2, c3, c4):
topsoil = []
# z1 : (0,0)
# z2 : (1,0)
# z3 : (0,1)
# z4 : (1,1)
topsoil.append(c1)
topsoil.append(c2)
topsoil.append(c3)
topsoil.append(c4)
topsoil.append(c2)
topsoil.append(c3)
return topsoil
| StarcoderdataPython |
3270506 | import copy
import torch.optim as optim
import torch.nn as nn
import torch.nn.functional as F
import math
from optimizers.buffer import DataLoaderBuffer
from optimizers.scheduler import ReduceLROnPlateau
class TorchSGD:
r"""
Implements a stochastic gradient descent optimizer, which serves as the default optimizer for models defined
in /models (apart from <NAME> models, which use lmp.py).
Arguments:
net: the model to be trained. This must be a subclass of nn.Module
train_loader: data loader for the training dataset
val_loader: data loader for the validation dataset
criterion: loss function (nll or mse or l1)
val_criterion: For regression task, use this option to set different loss for training and validation.
E.g. train loss is mse, but l1 could be used for validation and showing result.
args: training parameters passed in from run_gce_nn.py
name: name of this instance, which serves as prefix to the text printed out
sum_net: for wgnet algorithm, this is the convex combination of modules that has been trained,
the output of this net will be combined with the output of the module to be trained.
transform_net: this can be either a features extractor or the trained modules in the case of dgn.
The output of this net is used to train the module that is to be trained.
alpha: alpha=0 indicates dgn training is required (no alpha involved. The input goes through
'trained_nets', then goes to 'net' (we only optimize 'net'). alpha > 0 indicates wgn training,
the output is convex combination of outputs of all nets: out = alpha * net(x) + (1-alpha) * trained_nets(x)
"""
MAX_N_EPOCHS = 10**4
def __init__(self, net, train_loader, val_loader, criterion, val_criterion,
args, name='', sum_net=None, transform_net=None, alpha=0.):
self.net = net
self.net.to(args.device)
self.args = args
self.name = name
self.alpha = alpha
self.n_epochs = args.n_epochs
self.epoch = 0
# batch_size is specified for warm_start training
self.train_buffer = DataLoaderBuffer(train_loader, args.caching, args.device, sum_net, transform_net)
val_caching = 3 if args.caching else 0
self.val_buffer = DataLoaderBuffer(val_loader, val_caching, args.device, sum_net, transform_net)
self.best_train = {'train_loss': float('inf'), 'train_error': float('inf'),
'train_rmse': float('inf'), 'train_mae': float('inf'),
'val_loss': float('inf'), 'val_error': float('inf'),
'val_rmse': float('inf'), 'val_mae': float('inf'),
'epoch': 0}
self.best_train_model = None
self.best_validation = {'train_loss': float('inf'), 'train_error': float('inf'),
'val_loss': float('inf'), 'val_error': float('inf'), 'epoch': 0}
self.best_validation_model = None
self.criterion = criterion
self.val_criterion = val_criterion
params_groups = self.get_params_groups(net, args.no_bias_decay)
if args.optimizer == 'sgd' or args.optimizer == 'nesterov':
self.optimizer = optim.SGD(params_groups, lr=args.lr, momentum=args.momentum,
weight_decay=args.weight_decay, nesterov=(args.optimizer == 'nesterov'))
elif args.optimizer == 'adam':
self.optimizer = optim.Adam(params_groups, lr=args.lr, weight_decay=args.weight_decay)
else:
raise Exception('Unknown optimizer: ' + args.optimizer)
if args.milestones:
if args.milestones == [0]:
milestones = self.get_auto_milestones(self.n_epochs, args.lr, args.min_lr, args.factor, )
else:
milestones = args.milestones
lr_decay_scheme = 'multi-step decay, gamma={}, milestones={}, min_lr={}'.format(
args.factor, milestones, args.min_lr)
self.scheduler = optim.lr_scheduler.MultiStepLR(self.optimizer, milestones, args.factor)
elif args.patience <= 0:
gamma = args.factor if args.factor > 0 else (args.min_lr / args.lr)**(1./args.epochs)
lr_decay_scheme = 'exponential decay each epoch, gamma={:.4f}, min_lr={}'.format(gamma, args.min_lr)
self.scheduler = optim.lr_scheduler.ExponentialLR(self.optimizer, gamma)
else:
lr_decay_scheme = 'decay on plateau, gamma={}, patience={}, min_lr={}, threshold={}.{}'.format(
args.factor, args.patience, args.min_lr, args.threshold_mode, args.threshold)
self.scheduler = ReduceLROnPlateau(self.optimizer, args.factor, args.patience,
args.threshold, mode=args.threshold_mode)
if args.verbose >= 5:
self.print_interval = 1
elif args.verbose <= 3:
self.print_interval = 10**10 # will print only final result
else: # args.verbose == 4
self.print_interval = max(10, self.n_epochs // 100) # print module result 10 times
if self.n_epochs <= 0: # unlimited number of epochs
self.print_interval = 100
if args.verbose >= 3:
print('SGD OPTIMIZER LOADED:\t{} ({}, bsize={}, lr={}, momentum={}, weight_decay={},\n\t\t\t{})\n'.format(
args.optimizer, 'regression' if args.regression else 'classification',
args.batch_size, args.lr, args.momentum, round(args.weight_decay, 9), lr_decay_scheme
))
@staticmethod
def get_auto_milestones(n_epochs, lr, min_lr, factor):
"""return milestones such that lr is equally reduce to min_lr in n_epochs"""
k = int((math.log(min_lr) - math.log(lr) + 0.001) // math.log(factor) + 1)
step_size = int((n_epochs - 1) // (k+1) + 1)
milestones = [(i+1) * step_size for i in range(k)]
return milestones
@staticmethod
def get_params_groups(net, no_bias_decay):
"""Separate parameters of the input nn.Module net into two groups, the bias group with no weight decay,
and the group of remaining weights with weight decay specified by the weight-decay argument"""
group_decay = []
group_no_decay = []
for name, param in net.named_parameters():
if param.requires_grad:
if '.cw' in name or '.alpha' in name: # parameterization of convex coeffs of wgn algorithm are not decayed
group_no_decay.append(param)
elif no_bias_decay:
if '.bias' in name:
group_no_decay.append(param)
else:
group_decay.append(param)
else:
group_decay.append(param)
groups = [dict(params=group_decay), dict(params=group_no_decay, weight_decay=.0)]
return groups
@staticmethod
def class2vec(y, k):
"""Convert from a vector of classes 'y' to the corresponding vector of vector encoding of classes.
k is the number of classes. Returns yk with dimension len(y) x k, where yk[i] the vector encode of class y[i]
"""
n = y.size(0)
if k <= 1: # single output = regression
yk = y.view(n, 1).float()
else: # multi output -> classification
yk = y.new(n, k).float().fill_(0.0)
yk[range(n), y] = 1.0
return yk
def train_epoch(self):
"""train the next epoch"""
self.epoch += 1
train_loss = 0.
mse = mae = 0.
n_correct = 0
n_trained = 0
n_data = self.train_buffer.n_samples
batch_log = []
self.net.train()
# get batch data from train_buffer. x,y is the data point/label, s is the output of trained_nets,
# which is convex combination of trained modules for wgnet, or output of trained layers for dgnet,
# or it can be a net for features extraction.
for batch_idx, (x, y, s) in enumerate(self.train_buffer):
self.optimizer.zero_grad()
output = self.net(x)
if s is not None: # s, if not None, is output of sum_net
output = self.alpha * output + (1. - self.alpha) * s
yt = y if self.args.loss == 'nll' else self.class2vec(y, output.size(1)) # form of y depends on loss type
loss = self.criterion(output, yt)
loss.backward()
self.optimizer.step()
m = n_trained
n_trained += y.shape[0]
train_loss = (m / n_trained) * train_loss + (y.shape[0]/n_trained) * loss.item()
if math.isnan(train_loss):
return float('nan'), float('nan'), float('nan'), float('nan'), []
if self.args.regression:
mae += F.l1_loss(output, yt).item() * y.size(0)
mse += F.mse_loss(output, yt).item() * y.size(0)
n_ok = 0
if not self.args.regression: # classification task
_, pred = output.max(1)
n_ok = pred.eq(y).sum().item()
elif self.args.dataset.endswith("_r"): # classification posed as regression [-1, 1]
pred = output.new_ones(output.shape[0], dtype=y.dtype)
pred[output[:, 0] < 0] = -1
n_ok = pred.eq(y).sum().item()
n_correct += n_ok
# batch_log: batchId, loss, error
if self.args.verbose > 2: # saving batch_log is memory intensive, only do so if verbose > 2
batch_log.append([batch_idx + 1, loss.item(),
(self.train_buffer.batch_size - n_ok)/self.train_buffer.batch_size])
if self.args.verbose >= 7:
if self.args.regression and not self.args.dataset.endswith("_r"):
print(' train (epoch {}) batch {:4} [{:7}/{} {:3}% ] loss: {:.5f}'
.format(self.epoch, batch_idx + 1, n_trained, n_data, int(100. * n_trained / n_data),
batch_log[-1][1]))
else:
print(' train (epoch {}) batch {:4} [{:7}/{} {:3}% ] loss: {:.5f} accuracy: {:.2f}%'
.format(self.epoch, batch_idx + 1, n_trained, n_data, int(100. * n_trained / n_data),
batch_log[-1][1], 100.*(1. - batch_log[-1][2])))
train_error = 1. - n_correct / n_trained
mse /= n_trained
rmse = mse**0.5
mae /= n_trained
if self.epoch % self.print_interval == 0 or (self.args.verbose >= 4 and self.epoch == self.n_epochs):
if self.args.regression and not self.args.dataset.endswith("_r"):
print('TRAIN {} Epoch {}/{}: Loss: {:.5f}'.format(
self.name, self.epoch, self.n_epochs, train_loss
))
else:
print('TRAIN {} Epoch {}/{}: Loss: {:.5f} Accuracy: {:.2f}% ({}/{})'.format(
self.name, self.epoch, self.n_epochs, train_loss, 100*n_correct/n_trained, n_correct, n_trained
))
return train_loss, train_error, rmse, mae, batch_log
def eval(self, buffer):
self.net.eval()
eval_loss = 0.
correct = n_eval = 0
mae = mse = 0
for x, y, s in buffer:
output = self.net(x)
if s is not None: # s, if not None, is output of sum_net
output = self.alpha * output + (1. - self.alpha) * s
yt = y if type(self.val_criterion) is nn.CrossEntropyLoss else self.class2vec(y, output.size(1))
loss = self.val_criterion(output, yt)
n_eval += y.shape[0] # y.shape[0] = number of samples in this batch. Next, update loss incrementally
eval_loss = ((n_eval - y.shape[0])/n_eval) * eval_loss + (y.shape[0]/n_eval) * loss.item()
if self.args.regression:
mae += F.l1_loss(output, yt).item() * y.size(0)
mse += F.mse_loss(output, yt).item() * y.size(0)
if not self.args.regression: # classification task
_, pred = output.max(1)
correct += pred.eq(y).sum().item()
elif self.args.dataset.endswith("_r"): # classification posed as regression [-1, 1]
pred = output.new_ones(output.shape[0], dtype=y.dtype)
pred[output[:, 0] < 0] = -1
correct += pred.eq(y).sum().item()
mae /= n_eval
mse /= n_eval
rmse = mse ** 0.5
error = 1.0 - correct / n_eval
return eval_loss, error, rmse, mae
def validate_epoch(self):
loss, error, rmse, mae = self.eval(self.val_buffer)
lr = round(self.optimizer.param_groups[0]['lr'], 7)
if self.epoch % self.print_interval == 0:
if self.args.regression and not self.args.dataset.endswith("_r"):
print('VALDT {} Epoch {}/{}: Loss: {:.5f} LR: {}\n'.format(
self.name, self.epoch, self.n_epochs, loss, lr
))
else:
print('VALDT {} Epoch {}/{}: Loss: {:.5f} Accuracy: {:.2f}% ({:.0f}/{}) LR: {}\n'.format(
self.name, self.epoch, self.n_epochs, loss, 100. * (1.0 - error),
(1.0 - error) * self.val_buffer.n_samples, self.val_buffer.n_samples, lr
))
return loss, error, rmse, mae, lr
def train(self, n_epochs=0):
if n_epochs > 0:
self.n_epochs = n_epochs
if self.n_epochs == 0: # automatically determine the number of epochs
self.n_epochs = self.MAX_N_EPOCHS
log = []
while self.epoch < self.n_epochs:
alpha = 0.
if hasattr(self.net, 'cw'):
cw, alpha = self.net.normalize_cw(self.net.cw, self.net.alpha)
# print('convex coeffs: alpha={}, cw={}'.format(round(alpha.data.item(),4),
# [round(w.item(),4) for w in cw.data]))
tr_loss, tr_error, tr_rmse, tr_mae, train_log = self.train_epoch()
assert not math.isnan(tr_loss), 'train_loss is NaN (probably too high LR, reduce it!)'
vl_loss, vl_error, vl_rmse, vl_mae, lr = self.validate_epoch()
result = {'module': 1, 'epoch': self.epoch,
'train_loss': tr_loss, 'train_error': tr_error, 'train_rmse': tr_rmse, 'train_mae': tr_mae,
'val_loss': vl_loss, 'val_error': vl_error, 'val_rmse': vl_rmse, 'val_mae': vl_mae}
if lr > self.args.min_lr:
if type(self.scheduler) == ReduceLROnPlateau:
self.scheduler.step(tr_loss, epoch=self.epoch) # change learning rate according to scheduler
else:
self.scheduler.step(epoch=self.epoch)
elif self.n_epochs > self.epoch + self.args.patience: # when lr <= min_lr, run maximally 10 more epochs
self.n_epochs = self.epoch + self.args.patience
if tr_loss < self.best_train['train_loss']:
self.best_train = result.copy()
self.best_train_model = copy.deepcopy(self.net)
if (self.args.regression and vl_loss < self.best_validation['val_loss']) \
or (not self.args.regression and vl_error < self.best_validation['val_error']):
self.best_validation = result.copy()
self.best_validation_model = copy.deepcopy(self.net)
log.append({'epoch': self.epoch, 'lr': lr, 'train_loss': tr_loss, 'train_error': tr_error,
'val_loss': vl_loss, 'val_error': vl_error, 'alpha': alpha, 'train_log': train_log})
if self.epoch % self.print_interval == 0 or self.epoch >= self.n_epochs:
if self.args.regression and not self.args.dataset.endswith("_r"):
print('BEST TRAIN RESULT: Loss: {:.5f} (Epoch {})'.format(
self.best_train['train_loss'], self.best_train['epoch']
))
print('BEST VALDT RESULT: Loss: {:.5f} (Epoch {})\n'.format(
self.best_validation['val_loss'], self.best_validation['epoch']
))
else:
print('BEST TRAIN RESULT: Loss: {:.5f} Accuracy: {:.2f}% (Epoch {})'.format(
self.best_train['train_loss'], 100. * (1 - self.best_train['train_error']),
self.best_train['epoch']
))
print('BEST VALDT RESULT: Loss: {:.5f} Accuracy: {:.2f}% (Epoch {})\n'.format(
self.best_validation['val_loss'], 100. * (1 - self.best_validation['val_error']),
self.best_validation['epoch']
))
return log
| StarcoderdataPython |
3308611 | <reponame>bela127/tf-custom-multi-gpu-training<gh_stars>1-10
import tensorflow as tf
from addict import Dict
def standart_callbacks():
callbacks = Dict()
callbacks.load_ckpt = None
callbacks.warmup = None
callbacks.input_pre = input_pre
return callbacks
def input_pre(batch):
inputs = batch
return inputs
| StarcoderdataPython |
1682752 | <gh_stars>1000+
import spacy
nlp = spacy.load("es_core_news_sm")
doc = nlp("Por Berlín fluye el río Esprea.")
# Obtén todos los tokens y los part-of-speech tags
token_texts = [token.text for token in doc]
pos_tags = [token.pos_ for token in doc]
for index, pos in enumerate(pos_tags):
# Revisa si el token actual es un nombre propio
if pos == "PROPN":
# Revisa si el siguiente token es un verbo
if pos_tags[index + 1] == "VERB":
result = token_texts[index]
print("Encontré un nombre propio antes de un verbo:", result)
| StarcoderdataPython |
109682 | <reponame>NSLS-II-OPLS/profile_collection
def ps(uid='-1',det='default',suffix='default',shift=.5,logplot='off',figure_number=999):
'''
function to determine statistic on line profile (assumes either peak or erf-profile)\n
calling sequence: uid='-1',det='default',suffix='default',shift=.5)\n
det='default' -> get detector from metadata, otherwise: specify, e.g. det='eiger4m_single'\n
suffix='default' -> _stats1_total / _sum_all, otherwise: specify, e.g. suffix='_stats2_total'\n
shift: scale for peak presence (0.5 -> peak has to be taller factor 2 above background)\n
figure_number: default=999 -> specify figure number for plot
'''
#import datetime
#import time
#import numpy as np
#from PIL import Image
#from databroker import db, get_fields, get_images, get_table
#from matplotlib import pyplot as pltfrom
#from lmfit import Model
#from lmfit import minimize, Parameters, Parameter, report_fit
#from scipy.special import erf
# get the scan information:
if uid == '-1':
uid=-1
if det == 'default':
if db[uid].start.detectors[0] == 'elm' and suffix=='default':
intensity_field='elm_sum_all'
elif db[uid].start.detectors[0] == 'elm':
intensity_field='elm'+suffix
elif suffix == 'default':
intensity_field= db[uid].start.detectors[0]+'_stats1_total'
else:
intensity_field= db[uid].start.detectors[0]+suffix
else:
if det=='elm' and suffix == 'default':
intensity_field='elm_sum_all'
elif det=='elm':
intensity_field = 'elm'+suffix
elif suffix == 'default':
intensity_field=det+'_stats1_total'
else:
intensity_field=det+suffix
field = db[uid].start.motors[0]
#field='dcm_b';intensity_field='elm_sum_all'
[x,y,t]=get_data(uid,field=field, intensity_field=intensity_field, det=None, debug=False) #need to re-write way to get data
x=np.array(x)
y=np.array(y)
x = np.nan_to_num(x)
y = np.nan_to_num(y)
PEAK=x[np.argmax(y)]
PEAK_y=np.max(y)
COM=np.sum(x * y) / np.sum(y)
### from Maksim: assume this is a peak profile:
def is_positive(num):
return True if num > 0 else False
# Normalize values first:
ym = (y - np.min(y)) / (np.max(y) - np.min(y)) - shift # roots are at Y=0
positive = is_positive(ym[0])
list_of_roots = []
for i in range(len(y)):
current_positive = is_positive(ym[i])
if current_positive != positive:
list_of_roots.append(x[i - 1] + (x[i] - x[i - 1]) / (abs(ym[i]) + abs(ym[i - 1])) * abs(ym[i - 1]))
positive = not positive
if len(list_of_roots) >= 2:
FWHM=abs(list_of_roots[-1] - list_of_roots[0])
CEN=list_of_roots[0]+0.5*(list_of_roots[1]-list_of_roots[0])
ps.fwhm=FWHM
ps.cen=CEN
#return {
# 'fwhm': abs(list_of_roots[-1] - list_of_roots[0]),
# 'x_range': list_of_roots,
#}
else: # ok, maybe it's a step function..
print('no peak...trying step function...')
ym = ym + shift
def err_func(x, x0, k=2, A=1, base=0 ): #### erf fit from Yugang
return base - A * erf(k*(x-x0))
mod = Model( err_func )
### estimate starting values:
x0=np.mean(x)
#k=0.1*(np.max(x)-np.min(x))
pars = mod.make_params( x0=x0, k=2, A = 1., base = 0. )
result = mod.fit(ym, pars, x = x )
CEN=result.best_values['x0']
FWHM = result.best_values['k']
ps.cen = CEN
ps.fwhm = FWHM
### re-plot results:
if logplot=='on':
plt.close(figure_number)
plt.figure(figure_number)
plt.semilogy([PEAK,PEAK],[np.min(y),np.max(y)],'k--',label='PEAK')
#plt.hold(True)
plt.semilogy([CEN,CEN],[np.min(y),np.max(y)],'r-.',label='CEN')
plt.semilogy([COM,COM],[np.min(y),np.max(y)],'g.-.',label='COM')
plt.semilogy(x,y,'bo-')
plt.xlabel(field);plt.ylabel(intensity_field)
plt.legend()
plt.title('uid: '+str(uid)+' @ '+str(t)+'\nPEAK: '+str(PEAK_y)[:8]+' @ '+str(PEAK)[:8]+' COM @ '+str(COM)[:8]+ '\n FWHM: '+str(FWHM)[:8]+' @ CEN: '+str(CEN)[:8],size=9)
plt.show()
else:
plt.close(figure_number)
plt.figure(figure_number)
plt.plot([PEAK,PEAK],[np.min(y),np.max(y)],'k--',label='PEAK')
#plt.hold(True)
plt.plot([CEN,CEN],[np.min(y),np.max(y)],'r-.',label='CEN')
plt.plot([COM,COM],[np.min(y),np.max(y)],'g.-.',label='COM')
plt.plot(x,y,'bo-')
plt.xlabel(field);plt.ylabel(intensity_field)
plt.legend()
plt.title('uid: '+str(uid)+' @ '+str(t)+'\nPEAK: '+str(PEAK_y)[:8]+' @ '+str(PEAK)[:8]+' COM @ '+str(COM)[:8]+ '\n FWHM: '+str(FWHM)[:8]+' @ CEN: '+str(CEN)[:8],size=9)
plt.show()
### assign values of interest as function attributes:
ps.peak=PEAK
ps.com=COM
| StarcoderdataPython |
4825131 | <filename>payments/tests/test_expire_too_old_unpaid_orders.py
from datetime import timedelta
import pytest
from django.core import management
from django.utils.timezone import now
from resources.models import Reservation
from ..factories import OrderFactory
from ..models import Order, OrderLogEntry
PAYMENT_WAITING_MINUTES = 15
REQUESTED_PAYMENT_WAITING_HOURS = 24
COMMAND_NAME = 'expire_too_old_unpaid_orders'
def get_order_expired_time():
return now() - (timedelta(minutes=PAYMENT_WAITING_MINUTES) + timedelta(seconds=10))
def get_order_not_expired_time():
return now() - (timedelta(minutes=PAYMENT_WAITING_MINUTES) - timedelta(seconds=10))
@pytest.fixture(autouse=True)
def init(db, settings):
settings.RESPA_PAYMENTS_PAYMENT_WAITING_TIME = PAYMENT_WAITING_MINUTES
settings.RESPA_PAYMENTS_PAYMENT_REQUESTED_WAITING_TIME = REQUESTED_PAYMENT_WAITING_HOURS
def set_order_created_at(order, created_at):
OrderLogEntry.objects.filter(id=order.log_entries.first().id).update(timestamp=created_at)
def test_orders_wont_get_expired_too_soon(two_hour_reservation, order_with_products):
set_order_created_at(order_with_products, get_order_not_expired_time())
management.call_command(COMMAND_NAME)
assert two_hour_reservation.state == Reservation.WAITING_FOR_PAYMENT
assert order_with_products.state == Order.WAITING
def test_orders_get_expired(two_hour_reservation, order_with_products):
set_order_created_at(order_with_products, get_order_expired_time())
management.call_command(COMMAND_NAME)
two_hour_reservation.refresh_from_db()
order_with_products.refresh_from_db()
assert two_hour_reservation.state == Reservation.CANCELLED
assert order_with_products.state == Order.EXPIRED
def test_requested_reservation_orders_get_expired(order_with_products):
order_with_products.is_requested_order = True
order_with_products.confirmed_by_staff_at = now() - timedelta(hours=REQUESTED_PAYMENT_WAITING_HOURS + 1)
order_with_products.save()
management.call_command(COMMAND_NAME)
order_with_products.refresh_from_db()
assert order_with_products.state == Order.EXPIRED
assert order_with_products.reservation.state == Reservation.CANCELLED
@pytest.mark.parametrize('order_state', (Order.CANCELLED, Order.REJECTED, Order.CONFIRMED))
def test_other_than_waiting_order_wont_get_expired(two_hour_reservation, order_state):
order = OrderFactory(reservation=two_hour_reservation, state=order_state)
reservation_state = order.reservation.state
set_order_created_at(order, get_order_expired_time())
management.call_command(COMMAND_NAME)
two_hour_reservation.refresh_from_db()
order.refresh_from_db()
assert two_hour_reservation.state == reservation_state
assert order.state == order_state
| StarcoderdataPython |
3300606 | """ Quiz: readable_timedelta
Write a function named readable_timedelta. The function should take one argument, an integer days, and return a string that says how many weeks and days that is. For example, calling the function and printing the result like this:
print(readable_timedelta(10))
should output the following:
1 week(s) and 3 day(s).
"""
# write your function here
def readable_timedelta(days):
week = days // 7 # == int(days / 7)
days %= 7
return "{} week(s) and {} day(s).".format(week, days)
# test your function
print(readable_timedelta(10))
| StarcoderdataPython |
67025 | # generate_javascript_layers.py
# Script that generates a javascript file named layers.js
# This is done via a list of hardcoded layers to translate into the correct format
# The file thus generated is then served to clients to be used by the javascript interface,
# allowing easier maintenance of the software
# Built-in activation functions - see keras/activations.py
# some of them can take additional options
#class Options(object):
# type =
from keras_layers import *
def _get_js_varname(layer):
return 'layer'+layer
def _get_js_param_varname(layer, param):
return _get_js_varname(layer) + 'Param' + param
def generate_rawlayer(layer, group):
varname = _get_js_varname(layer)
str = 'var '+varname+' = new KerasLayer(\''+layer+'\',\''+group+'\')\n'
#str += varname+'.searchTerm = "'+layer+'";\n';
return str
def generate_unimp(layer, paramName, paramData):
layerVarname = _get_js_varname(layer)
paramVarname = _get_js_param_varname(layer, paramName)
str = 'var ' + paramVarname + ' = new KerasParameter("'+paramName+'"); // Unimplemented Type\n'
str += layerVarname+'.addParameter('+paramVarname+');\n'
return str
def generate_int(layer, paramName, paramData):
layerVarname = _get_js_varname(layer)
paramVarname = _get_js_param_varname(layer, paramName)
str = 'var ' + paramVarname + ' = new KerasParameterNumeric("'+paramName+'");\n'
str += layerVarname+'.addParameter('+paramVarname+');\n'
if 'step' in paramData:
str += _get_js_param_varname(layer, paramName) + '.setStep('+paramData['step']+');\n'
else:
str += _get_js_param_varname(layer, paramName) + '.setStep(1);\n'
if 'conditions' in paramData:
for condition in paramData['conditions']:
print(condition)
if condition[0:2] == '<=':
str += paramVarname + '.setMaximum(' + condition[2:] + ',true);\n'
elif condition[0:2] == '>=':
str += paramVarname + '.setMinimum(' + condition[2:] + ',true);\n'
elif condition[0] == '<':
str += paramVarname + '.setMaximum(' + condition[1:] + ',false);\n'#TODO: INVALID IF EQUAL
elif condition[0] == '>':
str += paramVarname + '.setMinimum(' + condition[1:] + ',false);\n'#TODO: INVALID IF EQUAL
return str
def generate_list(layer, paramName, paramData):
layerVarname = _get_js_varname(layer)
listName = _get_js_param_varname(layer, paramName)
listElements = paramData['list']
str = 'var ' + listName + ' = new KerasParameterList("'+paramName+'");\n'
str += layerVarname+'.addParameter('+listName+');\n'
for listEl in listElements:
listElVarname = listName + listEl
str += 'var '+listElVarname+' = new KerasParameterListElement("'+listEl+'");\n'
str += listName+'.addListElement('+listElVarname+');\n'
return str
def generate_float(layer, paramName, paramData):
str = generate_int(layer, paramName, paramData)
str += _get_js_param_varname(layer, paramName) + '.setTypeToFloat(true);\n'
if 'step' not in paramData:
str += _get_js_param_varname(layer, paramName) + '.setStep(0.1);\n'
return str
def generate_tuple_int(layer, paramName, paramData):
layerVarname = _get_js_varname(layer)
paramVarname = _get_js_param_varname(layer, paramName)
s = 'var ' + paramVarname + ' = new KerasParameterTuple("'+paramName+'");\n'
if 'elements_number' in paramData:
elements_number = paramData['elements_number']
s += paramVarname + '.setElementsNumber('+str(elements_number)+');\n'
if 'elements_max_number' in paramData:
elements_max_number = paramData['elements_max_number']
s += paramVarname + '.setElementsMaxNumber('+str(elements_max_number)+');\n'
if 'elements_min_number' in paramData:
elements_min_number = paramData['elements_min_number']
s += paramVarname + '.setElementsMinNumber('+str(elements_min_number)+');\n'
s += layerVarname+'.addParameter('+paramVarname+');\n'
print(s)
return s
def generate_param_multiplexer(layer, paramName, paramData):
if(paramData['type'] in generate_types):
return generate_types[paramData['type']](layer, paramName, paramData)
print("Param not implemented : " + paramData['type'])
return generate_unimp(layer, paramName, paramData)
generate_types = {'int': generate_int,
'list': generate_list,
'boolean': generate_unimp,
'float': generate_float,
'tuple_int': generate_tuple_int,
'string': generate_unimp,
'function': generate_unimp,
}
def generate_layer(layer, group, parameters = {}):
rs = generate_rawlayer(layer, group)
for parameter in parameters:
rs += generate_param_multiplexer(layer, parameter, parameters[parameter])
return rs
def generate_layers(layer_dict, category_name):
rs = ''
for layer, parameters in layer_dict.items():
rs += generate_layer(layer, category_name, parameters) + '\n\n'
return rs
def generate_js():
rs = '' # return string
rs += generate_layer('Input', 'Input / Output', keras_core_layers['Input']) + '\n\n'
rs += generate_layer('Output', 'Input / Output') + '\n\n'
for layer, parameters in keras_core_layers.items():
if(layer == 'Input'):
continue
rs += generate_layer(layer, "Core Layers", parameters) + '\n\n'
for category_name, layer_dict in keras_layers_categories.items():
rs += generate_layers(layer_dict, category_name)
#print(rs)
return rs
| StarcoderdataPython |
1763620 | <filename>Latest/venv/Lib/site-packages/pyface/tests/test_clipboard.py
from __future__ import absolute_import
import unittest
from ..clipboard import clipboard
class TestObject(object):
def __init__(self, **kwargs):
for key, value in kwargs.items():
setattr(self, key, value)
def __eq__(self, other):
if isinstance(other, TestObject):
return all(getattr(other, key) == value
for key, value in self.__dict__.items())
class TestClipboard(unittest.TestCase):
def setUp(self):
self.clipboard = clipboard
def test_set_text_data(self):
self.clipboard.data = 'test'
self.assertTrue(self.clipboard.has_data)
self.assertEquals(self.clipboard.data_type, 'str')
self.assertEquals(self.clipboard.data, 'test')
self.assertTrue(self.clipboard.has_text_data)
self.assertEquals(self.clipboard.text_data, 'test')
self.assertFalse(self.clipboard.has_file_data)
self.assertFalse(self.clipboard.has_object_data)
def test_set_text_data_unicode(self):
self.clipboard.data = u'test'
self.assertTrue(self.clipboard.has_data)
self.assertEquals(self.clipboard.data_type, 'str')
self.assertEquals(self.clipboard.data, u'test')
self.assertTrue(self.clipboard.has_text_data)
self.assertEquals(self.clipboard.text_data, u'test')
self.assertFalse(self.clipboard.has_file_data)
self.assertFalse(self.clipboard.has_object_data)
@unittest.skip('backends not consistent')
def test_set_file_data(self):
self.clipboard.data = ['file:///images']
self.assertTrue(self.clipboard.has_data)
self.assertEquals(self.clipboard.data_type, 'file')
self.assertEquals(self.clipboard.data, ['/images'])
self.assertTrue(self.clipboard.has_file_data)
self.assertEquals(self.clipboard.file_data, ['/images'])
self.assertFalse(self.clipboard.has_text_data)
self.assertFalse(self.clipboard.has_object_data)
def test_set_object_data(self):
data = TestObject(foo='bar', baz=1)
self.clipboard.data = data
self.assertTrue(self.clipboard.has_data)
self.assertEquals(self.clipboard.data_type, TestObject)
self.assertEquals(self.clipboard.data, data)
self.assertTrue(self.clipboard.has_object_data)
self.assertEquals(self.clipboard.object_type, TestObject)
self.assertEquals(self.clipboard.object_data, data)
self.assertFalse(self.clipboard.has_text_data)
self.assertFalse(self.clipboard.has_file_data)
| StarcoderdataPython |
1681310 | <gh_stars>1-10
from enum import Enum, auto
class RandomStrategy(Enum):
SECRETS_CHOICE = auto()
SECRETS_RANDOM = auto()
RANDOM_LIB = auto()
| StarcoderdataPython |
1674290 | from hpp.corbaserver.manipulation import Robot, loadServerPlugin, createContext, newProblem, ProblemSolver, ConstraintGraph, Rule, Constraints, CorbaClient
from hpp.gepetto.manipulation import ViewerFactory
import sys, argparse
# parse arguments
defaultContext = "corbaserver"
p = argparse.ArgumentParser (description=
'Initialize demo of Pyrene manipulating a box')
p.add_argument ('--context', type=str, metavar='context',
default=defaultContext,
help="identifier of ProblemSolver instance")
p.add_argument ('--ros-param', type=str, metavar='ros_param',
help="The name of the ROS param containing the URDF.")
args = p.parse_args ()
if args.context != defaultContext:
createContext (args.context)
isSimulation = args.context == "simulation"
Robot.urdfFilename = "package://tiago_data/robots/tiago_pal_hey5.urdf"
Robot.srdfFilename = "package://tiago_data/srdf/pal_hey5_gripper.srdf"
class Driller:
urdfFilename = "package://gerard_bauzil/urdf/driller_with_qr_drill.urdf"
srdfFilename = "package://gerard_bauzil/srdf/driller.srdf"
rootJointType = "freeflyer"
class AircraftSkin:
urdfFilename = "package://agimus_demos/urdf/aircraft_skin_with_marker.urdf"
srdfFilename = "package://agimus_demos/srdf/aircraft_skin_with_marker.srdf"
rootJointType = "anchor"
## Reduce joint range for security
def shrinkJointRange (robot, ratio):
for j in robot.jointNames:
if j[:6] != "tiago/": continue
tj = j[6:]
if tj.startswith("torso") or tj.startswith("arm") or tj.startswith("head"):
bounds = robot.getJointBounds (j)
if len (bounds) == 2:
width = bounds [1] - bounds [0]
mean = .5 * (bounds [1] + bounds [0])
m = mean - .5 * ratio * width
M = mean + .5 * ratio * width
robot.setJointBounds (j, [m, M])
print("context=" + args.context)
loadServerPlugin (args.context, "manipulation-corba.so")
client = CorbaClient(context=args.context)
client.manipulation.problem.selectProblem (args.context)
robot = Robot("robot", "tiago", rootJointType="planar", client=client)
robot.setJointBounds('tiago/root_joint', [-2, 2, -2, 2])
#robot.insertRobotSRDFModel("tiago", "tiago_data", "schunk", "_gripper")
ps = ProblemSolver(robot)
vf = ViewerFactory(ps)
vf.loadRobotModel (Driller, "driller")
robot.insertRobotSRDFModel("driller", "gerard_bauzil", "qr_drill", "")
robot.setJointBounds('driller/root_joint', [-2, 2, -2, 2, 0, 2])
ps.selectPathValidation("Graph-Dichotomy", 0)
ps.selectPathProjector("Progressive", 0.2)
ps.addPathOptimizer("EnforceTransitionSemantic")
ps.addPathOptimizer("SimpleTimeParameterization")
if isSimulation:
ps.setMaxIterProjection (1)
ps.setParameter("SimpleTimeParameterization/safety", 0.25)
ps.setParameter("SimpleTimeParameterization/order", 2)
ps.setParameter("SimpleTimeParameterization/maxAcceleration", 1.0)
ps.setParameter("ManipulationPlanner/extendStep", 0.7)
#from hpp import Quaternion
#oMsk = (0.10576, -0.0168, 1.6835) + Quaternion().fromRPY(1.8, 0, 0).toTuple()
#oMsk = (0.30576, -0.0138, 1.5835) + Quaternion().fromRPY(1.8, 0, 0).toTuple()
#vf.loadObstacleModel(skinTagUrdf, "skin")
#vf.moveObstacle("skin", oMsk)
vf.loadObjectModel (AircraftSkin, "skin")
#vf.loadRobotModelFromString ("skin", AircraftSkin.rootJointType, AircraftSkin.urdfString, AircraftSkin.srdfString)
#robot.setRootJointPosition("skin", oMsk)
#robot.setJointPosition("skin/root_joint", oMsk)
shrinkJointRange(robot, 0.95)
q0 = robot.getCurrentConfig()
q0[:4] = [0, -0.9, 0, 1]
q0[robot.rankInConfiguration['tiago/torso_lift_joint']] = 0.15
q0[robot.rankInConfiguration['tiago/arm_1_joint']] = 0.10
q0[robot.rankInConfiguration['tiago/arm_2_joint']] = -1.47
q0[robot.rankInConfiguration['tiago/arm_3_joint']] = -0.16
q0[robot.rankInConfiguration['tiago/arm_4_joint']] = 1.87
q0[robot.rankInConfiguration['tiago/arm_5_joint']] = -1.57
q0[robot.rankInConfiguration['tiago/arm_6_joint']] = 0.01
q0[robot.rankInConfiguration['tiago/arm_7_joint']] = 0.00
q0[robot.rankInConfiguration['tiago/hand_thumb_abd_joint']] = 1.5707
q0[robot.rankInConfiguration['tiago/hand_index_abd_joint']] = 0.35
q0[robot.rankInConfiguration['tiago/hand_middle_abd_joint']] = -0.1
q0[robot.rankInConfiguration['tiago/hand_ring_abd_joint']] = -0.2
q0[robot.rankInConfiguration['tiago/hand_little_abd_joint']] = -0.35
def lockJoint(jname, q, cname=None):
if cname is None:
cname = jname
s = robot.rankInConfiguration[jname]
e = s+robot.getJointConfigSize(jname)
ps.createLockedJoint(cname, jname, q[s:e])
ps.setConstantRightHandSide(cname, True)
return cname
ljs = list()
ljs.append(lockJoint("tiago/root_joint", q0))
for n in robot.jointNames:
if n.startswith('tiago/hand_'):
ljs.append(lockJoint(n, q0))
ps.createPositionConstraint("gaze", "tiago/xtion_rgb_optical_frame", "driller/tag_joint",
(0,0,0), (0,0,0), (True,True,False))
from hpp.corbaserver.manipulation import ConstraintGraphFactory
graph = ConstraintGraph(robot, 'graph')
factory = ConstraintGraphFactory(graph)
factory.setGrippers([ "tiago/gripper", "driller/drill_tip", ])
factory.setObjects([ "driller", "skin", ],
[ [ "driller/handle", ], [ "skin/hole", ], ],
[ [ ], [ ], ])
factory.setRules([
# Tiago always hold the gripper.
Rule([ "tiago/gripper", ], [ "driller/handle", ], True), Rule([ "tiago/gripper", ], [ ".*", ], False),
# Allow to associate drill_tip with skin/hole only.
Rule([ "driller/drill_tip", ], [ "driller/handle", ], False), Rule([ "driller/drill_tip", ], [ ".*", ], True), ])
factory.generate()
graph.addConstraints(graph=True, constraints=Constraints(numConstraints=ljs))
for n in [ 'driller/drill_tip > skin/hole | 0-0_pregrasp', 'tiago/gripper grasps driller/handle : driller/drill_tip grasps skin/hole' ]:
graph.addConstraints(node=n, constraints=Constraints(numConstraints=["gaze"]))
graph.initialize()
# Constraint in this state are explicit so ps.setMaxIterProjection(1) should not
# make it fail.
res, q1, err = graph.applyNodeConstraints('tiago/gripper grasps driller/handle', q0)
q1valid, msg = robot.isConfigValid(q1)
if not q1valid:
print(msg)
assert res
ps.setInitialConfig(q1)
if not isSimulation:
qrand = q1
for i in range(100):
q2valid, q2, err = graph.generateTargetConfig('driller/drill_tip > skin/hole | 0-0', q1, qrand)
if q2valid:
q2valid, msg = robot.isConfigValid(q2)
if q2valid:
break
qrand = robot.shootRandomConfig()
assert q2valid
if not isSimulation:
ps.addGoalConfig(q2)
ps.solve()
try:
v = vf.createViewer()
v (q1)
except:
pass
| StarcoderdataPython |
3312999 | <reponame>aliyun/dingtalk-sdk<filename>dingtalk/python/alibabacloud_dingtalk/workflow_1_0/models.py
# -*- coding: utf-8 -*-
# This file is auto-generated, don't edit it. Thanks.
from Tea.model import TeaModel
from typing import List, Dict, Any
class SelectOption(TeaModel):
def __init__(
self,
key: str = None,
value: str = None,
):
# 每一个选项的唯一键
self.key = key
# 每一个选项的值
self.value = value
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.key is not None:
result['key'] = self.key
if self.value is not None:
result['value'] = self.value
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('key') is not None:
self.key = m.get('key')
if m.get('value') is not None:
self.value = m.get('value')
return self
class FormDataSourceTarget(TeaModel):
def __init__(
self,
app_uuid: str = None,
app_type: int = None,
biz_type: str = None,
form_code: str = None,
):
# 应用appUuid
self.app_uuid = app_uuid
# 表单类型,0流程表单
self.app_type = app_type
# 关联表单业务标识
self.biz_type = biz_type
# 关联表单的formCode
self.form_code = form_code
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.app_uuid is not None:
result['appUuid'] = self.app_uuid
if self.app_type is not None:
result['appType'] = self.app_type
if self.biz_type is not None:
result['bizType'] = self.biz_type
if self.form_code is not None:
result['formCode'] = self.form_code
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('appUuid') is not None:
self.app_uuid = m.get('appUuid')
if m.get('appType') is not None:
self.app_type = m.get('appType')
if m.get('bizType') is not None:
self.biz_type = m.get('bizType')
if m.get('formCode') is not None:
self.form_code = m.get('formCode')
return self
class FormDataSource(TeaModel):
def __init__(
self,
type: str = None,
target: FormDataSourceTarget = None,
):
# 关联类型,form关联表单
self.type = type
# 关联表单信息
self.target = target
def validate(self):
if self.target:
self.target.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.type is not None:
result['type'] = self.type
if self.target is not None:
result['target'] = self.target.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('type') is not None:
self.type = m.get('type')
if m.get('target') is not None:
temp_model = FormDataSourceTarget()
self.target = temp_model.from_map(m['target'])
return self
class AvaliableTemplate(TeaModel):
def __init__(
self,
name: str = None,
process_code: str = None,
):
# 表单名称
self.name = name
# 表单模板processCode
self.process_code = process_code
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.name is not None:
result['name'] = self.name
if self.process_code is not None:
result['processCode'] = self.process_code
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('name') is not None:
self.name = m.get('name')
if m.get('processCode') is not None:
self.process_code = m.get('processCode')
return self
class FormComponentPropsStatField(TeaModel):
def __init__(
self,
component_id: str = None,
label: str = None,
upper: str = None,
):
# 需要统计的明细控件内子控件id
self.component_id = component_id
# 子控件标题
self.label = label
# 金额控件是否需要大写,1不需要大写,其他需要大写
self.upper = upper
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.component_id is not None:
result['componentId'] = self.component_id
if self.label is not None:
result['label'] = self.label
if self.upper is not None:
result['upper'] = self.upper
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('componentId') is not None:
self.component_id = m.get('componentId')
if m.get('label') is not None:
self.label = m.get('label')
if m.get('upper') is not None:
self.upper = m.get('upper')
return self
class FormComponentProps(TeaModel):
def __init__(
self,
component_id: str = None,
label: str = None,
async_condition: bool = None,
required: bool = None,
content: str = None,
format: str = None,
upper: str = None,
unit: str = None,
placeholder: str = None,
biz_alias: str = None,
biz_type: str = None,
duration: bool = None,
choice: str = None,
disabled: bool = None,
align: str = None,
invisible: bool = None,
link: str = None,
vertical_print: bool = None,
formula: str = None,
common_biz_type: str = None,
options: List[SelectOption] = None,
print: str = None,
stat_field: List[FormComponentPropsStatField] = None,
data_source: FormDataSource = None,
address_model: str = None,
multiple: bool = None,
limit: int = None,
available_templates: List[AvaliableTemplate] = None,
table_view_mode: str = None,
):
# 控件表单内唯一id
self.component_id = component_id
# 控件标题
self.label = label
# 套件中控件是否可设置为分条件字段
self.async_condition = async_condition
# 是否必填
self.required = required
# 说明文字控件内容
self.content = content
# 时间格式
self.format = format
# 金额控件是否需要大写,1不需要大写,其他需要大写
self.upper = upper
# 时间单位(天、小时)
self.unit = unit
# 输入提示
self.placeholder = placeholder
# 业务别名
self.biz_alias = biz_alias
# 套件的业务标识
self.biz_type = biz_type
# 是否自动计算时长
self.duration = duration
# 联系人控件是否支持多选,1多选,0单选
self.choice = choice
# 是否不可编辑
self.disabled = disabled
# 文字提示控件显示方式:top|middle|bottom
self.align = align
# 是否隐藏字段
self.invisible = invisible
# 说明文字控件链接地址
self.link = link
# 明细打印方式false横向,true纵向
self.vertical_print = vertical_print
# 公式
self.formula = formula
# 自定义控件渲染标识
self.common_biz_type = common_biz_type
# 单选多选控件选项列表
self.options = options
# 字段是否可打印,1打印,0不打印,默认打印
self.print = print
# 明细控件数据汇总统计
self.stat_field = stat_field
# 关联数据源配置
self.data_source = data_source
# 地址控件模式city省市,district省市区,street省市区街道
self.address_model = address_model
# 部门控件是否可多选
self.multiple = multiple
# 评分控件上限
self.limit = limit
# 关联审批单控件限定模板列表
self.available_templates = available_templates
# 明细填写方式,table(表格)、list(列表)
self.table_view_mode = table_view_mode
def validate(self):
if self.options:
for k in self.options:
if k:
k.validate()
if self.stat_field:
for k in self.stat_field:
if k:
k.validate()
if self.data_source:
self.data_source.validate()
if self.available_templates:
for k in self.available_templates:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.component_id is not None:
result['componentId'] = self.component_id
if self.label is not None:
result['label'] = self.label
if self.async_condition is not None:
result['asyncCondition'] = self.async_condition
if self.required is not None:
result['required'] = self.required
if self.content is not None:
result['content'] = self.content
if self.format is not None:
result['format'] = self.format
if self.upper is not None:
result['upper'] = self.upper
if self.unit is not None:
result['unit'] = self.unit
if self.placeholder is not None:
result['placeholder'] = self.placeholder
if self.biz_alias is not None:
result['bizAlias'] = self.biz_alias
if self.biz_type is not None:
result['bizType'] = self.biz_type
if self.duration is not None:
result['duration'] = self.duration
if self.choice is not None:
result['choice'] = self.choice
if self.disabled is not None:
result['disabled'] = self.disabled
if self.align is not None:
result['align'] = self.align
if self.invisible is not None:
result['invisible'] = self.invisible
if self.link is not None:
result['link'] = self.link
if self.vertical_print is not None:
result['verticalPrint'] = self.vertical_print
if self.formula is not None:
result['formula'] = self.formula
if self.common_biz_type is not None:
result['commonBizType'] = self.common_biz_type
result['options'] = []
if self.options is not None:
for k in self.options:
result['options'].append(k.to_map() if k else None)
if self.print is not None:
result['print'] = self.print
result['statField'] = []
if self.stat_field is not None:
for k in self.stat_field:
result['statField'].append(k.to_map() if k else None)
if self.data_source is not None:
result['dataSource'] = self.data_source.to_map()
if self.address_model is not None:
result['addressModel'] = self.address_model
if self.multiple is not None:
result['multiple'] = self.multiple
if self.limit is not None:
result['limit'] = self.limit
result['availableTemplates'] = []
if self.available_templates is not None:
for k in self.available_templates:
result['availableTemplates'].append(k.to_map() if k else None)
if self.table_view_mode is not None:
result['tableViewMode'] = self.table_view_mode
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('componentId') is not None:
self.component_id = m.get('componentId')
if m.get('label') is not None:
self.label = m.get('label')
if m.get('asyncCondition') is not None:
self.async_condition = m.get('asyncCondition')
if m.get('required') is not None:
self.required = m.get('required')
if m.get('content') is not None:
self.content = m.get('content')
if m.get('format') is not None:
self.format = m.get('format')
if m.get('upper') is not None:
self.upper = m.get('upper')
if m.get('unit') is not None:
self.unit = m.get('unit')
if m.get('placeholder') is not None:
self.placeholder = m.get('placeholder')
if m.get('bizAlias') is not None:
self.biz_alias = m.get('bizAlias')
if m.get('bizType') is not None:
self.biz_type = m.get('bizType')
if m.get('duration') is not None:
self.duration = m.get('duration')
if m.get('choice') is not None:
self.choice = m.get('choice')
if m.get('disabled') is not None:
self.disabled = m.get('disabled')
if m.get('align') is not None:
self.align = m.get('align')
if m.get('invisible') is not None:
self.invisible = m.get('invisible')
if m.get('link') is not None:
self.link = m.get('link')
if m.get('verticalPrint') is not None:
self.vertical_print = m.get('verticalPrint')
if m.get('formula') is not None:
self.formula = m.get('formula')
if m.get('commonBizType') is not None:
self.common_biz_type = m.get('commonBizType')
self.options = []
if m.get('options') is not None:
for k in m.get('options'):
temp_model = SelectOption()
self.options.append(temp_model.from_map(k))
if m.get('print') is not None:
self.print = m.get('print')
self.stat_field = []
if m.get('statField') is not None:
for k in m.get('statField'):
temp_model = FormComponentPropsStatField()
self.stat_field.append(temp_model.from_map(k))
if m.get('dataSource') is not None:
temp_model = FormDataSource()
self.data_source = temp_model.from_map(m['dataSource'])
if m.get('addressModel') is not None:
self.address_model = m.get('addressModel')
if m.get('multiple') is not None:
self.multiple = m.get('multiple')
if m.get('limit') is not None:
self.limit = m.get('limit')
self.available_templates = []
if m.get('availableTemplates') is not None:
for k in m.get('availableTemplates'):
temp_model = AvaliableTemplate()
self.available_templates.append(temp_model.from_map(k))
if m.get('tableViewMode') is not None:
self.table_view_mode = m.get('tableViewMode')
return self
class FormComponent(TeaModel):
def __init__(
self,
component_type: str = None,
props: FormComponentProps = None,
children: List['FormComponent'] = None,
):
# 控件类型
self.component_type = component_type
# 控件属性
self.props = props
# 子控件集合
self.children = children
def validate(self):
if self.props:
self.props.validate()
if self.children:
for k in self.children:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.component_type is not None:
result['componentType'] = self.component_type
if self.props is not None:
result['props'] = self.props.to_map()
result['children'] = []
if self.children is not None:
for k in self.children:
result['children'].append(k.to_map() if k else None)
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('componentType') is not None:
self.component_type = m.get('componentType')
if m.get('props') is not None:
temp_model = FormComponentProps()
self.props = temp_model.from_map(m['props'])
self.children = []
if m.get('children') is not None:
for k in m.get('children'):
temp_model = FormComponent()
self.children.append(temp_model.from_map(k))
return self
class QueryFormInstanceHeaders(TeaModel):
def __init__(
self,
common_headers: Dict[str, str] = None,
x_acs_dingtalk_access_token: str = None,
):
self.common_headers = common_headers
self.x_acs_dingtalk_access_token = x_acs_dingtalk_access_token
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.common_headers is not None:
result['commonHeaders'] = self.common_headers
if self.x_acs_dingtalk_access_token is not None:
result['x-acs-dingtalk-access-token'] = self.x_acs_dingtalk_access_token
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('commonHeaders') is not None:
self.common_headers = m.get('commonHeaders')
if m.get('x-acs-dingtalk-access-token') is not None:
self.x_acs_dingtalk_access_token = m.get('x-acs-dingtalk-access-token')
return self
class QueryFormInstanceRequest(TeaModel):
def __init__(
self,
form_instance_id: str = None,
form_code: str = None,
app_uuid: str = None,
):
# 表单实例id
self.form_instance_id = form_instance_id
# 表单模板Code
self.form_code = form_code
# 应用搭建id
self.app_uuid = app_uuid
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.form_instance_id is not None:
result['formInstanceId'] = self.form_instance_id
if self.form_code is not None:
result['formCode'] = self.form_code
if self.app_uuid is not None:
result['appUuid'] = self.app_uuid
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('formInstanceId') is not None:
self.form_instance_id = m.get('formInstanceId')
if m.get('formCode') is not None:
self.form_code = m.get('formCode')
if m.get('appUuid') is not None:
self.app_uuid = m.get('appUuid')
return self
class QueryFormInstanceResponseBodyFormInstDataList(TeaModel):
def __init__(
self,
component_type: str = None,
biz_alias: str = None,
extend_value: str = None,
label: str = None,
value: str = None,
key: str = None,
):
self.component_type = component_type
self.biz_alias = biz_alias
self.extend_value = extend_value
self.label = label
self.value = value
self.key = key
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.component_type is not None:
result['componentType'] = self.component_type
if self.biz_alias is not None:
result['bizAlias'] = self.biz_alias
if self.extend_value is not None:
result['extendValue'] = self.extend_value
if self.label is not None:
result['label'] = self.label
if self.value is not None:
result['value'] = self.value
if self.key is not None:
result['key'] = self.key
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('componentType') is not None:
self.component_type = m.get('componentType')
if m.get('bizAlias') is not None:
self.biz_alias = m.get('bizAlias')
if m.get('extendValue') is not None:
self.extend_value = m.get('extendValue')
if m.get('label') is not None:
self.label = m.get('label')
if m.get('value') is not None:
self.value = m.get('value')
if m.get('key') is not None:
self.key = m.get('key')
return self
class QueryFormInstanceResponseBody(TeaModel):
def __init__(
self,
form_instance_id: str = None,
form_inst_data_list: List[QueryFormInstanceResponseBodyFormInstDataList] = None,
app_uuid: str = None,
form_code: str = None,
title: str = None,
creator: str = None,
modifier: str = None,
create_timestamp: int = None,
modify_timestamp: int = None,
out_instance_id: str = None,
out_biz_code: str = None,
attributes: Dict[str, Any] = None,
):
# 实例id
self.form_instance_id = form_instance_id
# 表单数据
self.form_inst_data_list = form_inst_data_list
# 应用搭建id
self.app_uuid = app_uuid
# 表单模板id
self.form_code = form_code
# 表单标题
self.title = title
# 创建人
self.creator = creator
# 修改人
self.modifier = modifier
# 实例创建时间戳
self.create_timestamp = create_timestamp
# 实例最近修改时间戳
self.modify_timestamp = modify_timestamp
# 外联业务实例id
self.out_instance_id = out_instance_id
# 外联业务code
self.out_biz_code = out_biz_code
# 扩展信息
self.attributes = attributes
def validate(self):
if self.form_inst_data_list:
for k in self.form_inst_data_list:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.form_instance_id is not None:
result['formInstanceId'] = self.form_instance_id
result['formInstDataList'] = []
if self.form_inst_data_list is not None:
for k in self.form_inst_data_list:
result['formInstDataList'].append(k.to_map() if k else None)
if self.app_uuid is not None:
result['appUuid'] = self.app_uuid
if self.form_code is not None:
result['formCode'] = self.form_code
if self.title is not None:
result['title'] = self.title
if self.creator is not None:
result['creator'] = self.creator
if self.modifier is not None:
result['modifier'] = self.modifier
if self.create_timestamp is not None:
result['createTimestamp'] = self.create_timestamp
if self.modify_timestamp is not None:
result['modifyTimestamp'] = self.modify_timestamp
if self.out_instance_id is not None:
result['outInstanceId'] = self.out_instance_id
if self.out_biz_code is not None:
result['outBizCode'] = self.out_biz_code
if self.attributes is not None:
result['attributes'] = self.attributes
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('formInstanceId') is not None:
self.form_instance_id = m.get('formInstanceId')
self.form_inst_data_list = []
if m.get('formInstDataList') is not None:
for k in m.get('formInstDataList'):
temp_model = QueryFormInstanceResponseBodyFormInstDataList()
self.form_inst_data_list.append(temp_model.from_map(k))
if m.get('appUuid') is not None:
self.app_uuid = m.get('appUuid')
if m.get('formCode') is not None:
self.form_code = m.get('formCode')
if m.get('title') is not None:
self.title = m.get('title')
if m.get('creator') is not None:
self.creator = m.get('creator')
if m.get('modifier') is not None:
self.modifier = m.get('modifier')
if m.get('createTimestamp') is not None:
self.create_timestamp = m.get('createTimestamp')
if m.get('modifyTimestamp') is not None:
self.modify_timestamp = m.get('modifyTimestamp')
if m.get('outInstanceId') is not None:
self.out_instance_id = m.get('outInstanceId')
if m.get('outBizCode') is not None:
self.out_biz_code = m.get('outBizCode')
if m.get('attributes') is not None:
self.attributes = m.get('attributes')
return self
class QueryFormInstanceResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
body: QueryFormInstanceResponseBody = None,
):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = QueryFormInstanceResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ProcessForecastHeaders(TeaModel):
def __init__(
self,
common_headers: Dict[str, str] = None,
x_acs_dingtalk_access_token: str = None,
):
self.common_headers = common_headers
self.x_acs_dingtalk_access_token = x_acs_dingtalk_access_token
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.common_headers is not None:
result['commonHeaders'] = self.common_headers
if self.x_acs_dingtalk_access_token is not None:
result['x-acs-dingtalk-access-token'] = self.x_acs_dingtalk_access_token
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('commonHeaders') is not None:
self.common_headers = m.get('commonHeaders')
if m.get('x-acs-dingtalk-access-token') is not None:
self.x_acs_dingtalk_access_token = m.get('x-acs-dingtalk-access-token')
return self
class ProcessForecastRequestFormComponentValuesDetailsDetails(TeaModel):
def __init__(
self,
id: str = None,
biz_alias: str = None,
name: str = None,
value: str = None,
ext_value: str = None,
component_type: str = None,
):
# 控件id
self.id = id
# 控件别名
self.biz_alias = biz_alias
# 控件名称
self.name = name
# 控件值
self.value = value
# 控件扩展值
self.ext_value = ext_value
self.component_type = component_type
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.id is not None:
result['id'] = self.id
if self.biz_alias is not None:
result['bizAlias'] = self.biz_alias
if self.name is not None:
result['name'] = self.name
if self.value is not None:
result['value'] = self.value
if self.ext_value is not None:
result['extValue'] = self.ext_value
if self.component_type is not None:
result['componentType'] = self.component_type
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('id') is not None:
self.id = m.get('id')
if m.get('bizAlias') is not None:
self.biz_alias = m.get('bizAlias')
if m.get('name') is not None:
self.name = m.get('name')
if m.get('value') is not None:
self.value = m.get('value')
if m.get('extValue') is not None:
self.ext_value = m.get('extValue')
if m.get('componentType') is not None:
self.component_type = m.get('componentType')
return self
class ProcessForecastRequestFormComponentValuesDetails(TeaModel):
def __init__(
self,
id: str = None,
biz_alias: str = None,
name: str = None,
value: str = None,
ext_value: str = None,
details: List[ProcessForecastRequestFormComponentValuesDetailsDetails] = None,
):
# 控件id
self.id = id
# 控件别名
self.biz_alias = biz_alias
# 控件名称
self.name = name
# 控件值
self.value = value
# 控件扩展值
self.ext_value = ext_value
self.details = details
def validate(self):
if self.details:
for k in self.details:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.id is not None:
result['id'] = self.id
if self.biz_alias is not None:
result['bizAlias'] = self.biz_alias
if self.name is not None:
result['name'] = self.name
if self.value is not None:
result['value'] = self.value
if self.ext_value is not None:
result['extValue'] = self.ext_value
result['details'] = []
if self.details is not None:
for k in self.details:
result['details'].append(k.to_map() if k else None)
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('id') is not None:
self.id = m.get('id')
if m.get('bizAlias') is not None:
self.biz_alias = m.get('bizAlias')
if m.get('name') is not None:
self.name = m.get('name')
if m.get('value') is not None:
self.value = m.get('value')
if m.get('extValue') is not None:
self.ext_value = m.get('extValue')
self.details = []
if m.get('details') is not None:
for k in m.get('details'):
temp_model = ProcessForecastRequestFormComponentValuesDetailsDetails()
self.details.append(temp_model.from_map(k))
return self
class ProcessForecastRequestFormComponentValues(TeaModel):
def __init__(
self,
id: str = None,
biz_alias: str = None,
name: str = None,
value: str = None,
ext_value: str = None,
component_type: str = None,
details: List[ProcessForecastRequestFormComponentValuesDetails] = None,
):
# 控件id
self.id = id
# 控件别名
self.biz_alias = biz_alias
# 控件名称
self.name = name
# 控件值
self.value = value
# 控件扩展值
self.ext_value = ext_value
# 控件类型
self.component_type = component_type
self.details = details
def validate(self):
if self.details:
for k in self.details:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.id is not None:
result['id'] = self.id
if self.biz_alias is not None:
result['bizAlias'] = self.biz_alias
if self.name is not None:
result['name'] = self.name
if self.value is not None:
result['value'] = self.value
if self.ext_value is not None:
result['extValue'] = self.ext_value
if self.component_type is not None:
result['componentType'] = self.component_type
result['details'] = []
if self.details is not None:
for k in self.details:
result['details'].append(k.to_map() if k else None)
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('id') is not None:
self.id = m.get('id')
if m.get('bizAlias') is not None:
self.biz_alias = m.get('bizAlias')
if m.get('name') is not None:
self.name = m.get('name')
if m.get('value') is not None:
self.value = m.get('value')
if m.get('extValue') is not None:
self.ext_value = m.get('extValue')
if m.get('componentType') is not None:
self.component_type = m.get('componentType')
self.details = []
if m.get('details') is not None:
for k in m.get('details'):
temp_model = ProcessForecastRequestFormComponentValuesDetails()
self.details.append(temp_model.from_map(k))
return self
class ProcessForecastRequest(TeaModel):
def __init__(
self,
ding_corp_id: str = None,
ding_org_id: int = None,
ding_isv_org_id: int = None,
ding_suite_key: str = None,
ding_token_grant_type: int = None,
request_id: str = None,
process_code: str = None,
dept_id: int = None,
user_id: str = None,
form_component_values: List[ProcessForecastRequestFormComponentValues] = None,
):
self.ding_corp_id = ding_corp_id
self.ding_org_id = ding_org_id
self.ding_isv_org_id = ding_isv_org_id
self.ding_suite_key = ding_suite_key
self.ding_token_grant_type = ding_token_grant_type
self.request_id = request_id
# 审批流的唯一码
self.process_code = process_code
# 部门ID
self.dept_id = dept_id
# 审批发起人的userId
self.user_id = user_id
# 表单数据内容,控件列表
self.form_component_values = form_component_values
def validate(self):
if self.form_component_values:
for k in self.form_component_values:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.ding_corp_id is not None:
result['dingCorpId'] = self.ding_corp_id
if self.ding_org_id is not None:
result['dingOrgId'] = self.ding_org_id
if self.ding_isv_org_id is not None:
result['dingIsvOrgId'] = self.ding_isv_org_id
if self.ding_suite_key is not None:
result['dingSuiteKey'] = self.ding_suite_key
if self.ding_token_grant_type is not None:
result['dingTokenGrantType'] = self.ding_token_grant_type
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.process_code is not None:
result['processCode'] = self.process_code
if self.dept_id is not None:
result['deptId'] = self.dept_id
if self.user_id is not None:
result['userId'] = self.user_id
result['formComponentValues'] = []
if self.form_component_values is not None:
for k in self.form_component_values:
result['formComponentValues'].append(k.to_map() if k else None)
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('dingCorpId') is not None:
self.ding_corp_id = m.get('dingCorpId')
if m.get('dingOrgId') is not None:
self.ding_org_id = m.get('dingOrgId')
if m.get('dingIsvOrgId') is not None:
self.ding_isv_org_id = m.get('dingIsvOrgId')
if m.get('dingSuiteKey') is not None:
self.ding_suite_key = m.get('dingSuiteKey')
if m.get('dingTokenGrantType') is not None:
self.ding_token_grant_type = m.get('dingTokenGrantType')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('processCode') is not None:
self.process_code = m.get('processCode')
if m.get('deptId') is not None:
self.dept_id = m.get('deptId')
if m.get('userId') is not None:
self.user_id = m.get('userId')
self.form_component_values = []
if m.get('formComponentValues') is not None:
for k in m.get('formComponentValues'):
temp_model = ProcessForecastRequestFormComponentValues()
self.form_component_values.append(temp_model.from_map(k))
return self
class ProcessForecastResponseBodyResultWorkflowActivityRulesWorkflowActorActorSelectionRangeApprovals(TeaModel):
def __init__(
self,
work_no: str = None,
user_name: str = None,
):
# 员工 userId
self.work_no = work_no
# 员工姓名
self.user_name = user_name
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.work_no is not None:
result['workNo'] = self.work_no
if self.user_name is not None:
result['userName'] = self.user_name
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('workNo') is not None:
self.work_no = m.get('workNo')
if m.get('userName') is not None:
self.user_name = m.get('userName')
return self
class ProcessForecastResponseBodyResultWorkflowActivityRulesWorkflowActorActorSelectionRangeLabels(TeaModel):
def __init__(
self,
labels: str = None,
label_names: str = None,
):
# 角色 id
self.labels = labels
# 角色名字
self.label_names = label_names
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.labels is not None:
result['labels'] = self.labels
if self.label_names is not None:
result['labelNames'] = self.label_names
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('labels') is not None:
self.labels = m.get('labels')
if m.get('labelNames') is not None:
self.label_names = m.get('labelNames')
return self
class ProcessForecastResponseBodyResultWorkflowActivityRulesWorkflowActorActorSelectionRange(TeaModel):
def __init__(
self,
approvals: List[ProcessForecastResponseBodyResultWorkflowActivityRulesWorkflowActorActorSelectionRangeApprovals] = None,
labels: List[ProcessForecastResponseBodyResultWorkflowActivityRulesWorkflowActorActorSelectionRangeLabels] = None,
):
# 审批指定成员
self.approvals = approvals
# 审批指定角色
self.labels = labels
def validate(self):
if self.approvals:
for k in self.approvals:
if k:
k.validate()
if self.labels:
for k in self.labels:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
result['approvals'] = []
if self.approvals is not None:
for k in self.approvals:
result['approvals'].append(k.to_map() if k else None)
result['labels'] = []
if self.labels is not None:
for k in self.labels:
result['labels'].append(k.to_map() if k else None)
return result
def from_map(self, m: dict = None):
m = m or dict()
self.approvals = []
if m.get('approvals') is not None:
for k in m.get('approvals'):
temp_model = ProcessForecastResponseBodyResultWorkflowActivityRulesWorkflowActorActorSelectionRangeApprovals()
self.approvals.append(temp_model.from_map(k))
self.labels = []
if m.get('labels') is not None:
for k in m.get('labels'):
temp_model = ProcessForecastResponseBodyResultWorkflowActivityRulesWorkflowActorActorSelectionRangeLabels()
self.labels.append(temp_model.from_map(k))
return self
class ProcessForecastResponseBodyResultWorkflowActivityRulesWorkflowActor(TeaModel):
def __init__(
self,
actor_key: str = None,
actor_type: str = None,
actor_selection_type: str = None,
actor_selection_range: ProcessForecastResponseBodyResultWorkflowActivityRulesWorkflowActorActorSelectionRange = None,
allowed_multi: bool = None,
approval_type: str = None,
approval_method: str = None,
actor_activate_type: str = None,
required: bool = None,
):
# 节点操作人 key
self.actor_key = actor_key
# 节点操作人类型
self.actor_type = actor_type
# 节点操作人选择范围类型
self.actor_selection_type = actor_selection_type
# 节点操作人选择范围
self.actor_selection_range = actor_selection_range
# 是否允许多选,还是仅允许选一人
self.allowed_multi = allowed_multi
# 节点审批类型
self.approval_type = approval_type
# 节点审批方式
self.approval_method = approval_method
# 节点激活类型
self.actor_activate_type = actor_activate_type
# 该审批人节点在发起审批时是否必填
self.required = required
def validate(self):
if self.actor_selection_range:
self.actor_selection_range.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.actor_key is not None:
result['actorKey'] = self.actor_key
if self.actor_type is not None:
result['actorType'] = self.actor_type
if self.actor_selection_type is not None:
result['actorSelectionType'] = self.actor_selection_type
if self.actor_selection_range is not None:
result['actorSelectionRange'] = self.actor_selection_range.to_map()
if self.allowed_multi is not None:
result['allowedMulti'] = self.allowed_multi
if self.approval_type is not None:
result['approvalType'] = self.approval_type
if self.approval_method is not None:
result['approvalMethod'] = self.approval_method
if self.actor_activate_type is not None:
result['actorActivateType'] = self.actor_activate_type
if self.required is not None:
result['required'] = self.required
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('actorKey') is not None:
self.actor_key = m.get('actorKey')
if m.get('actorType') is not None:
self.actor_type = m.get('actorType')
if m.get('actorSelectionType') is not None:
self.actor_selection_type = m.get('actorSelectionType')
if m.get('actorSelectionRange') is not None:
temp_model = ProcessForecastResponseBodyResultWorkflowActivityRulesWorkflowActorActorSelectionRange()
self.actor_selection_range = temp_model.from_map(m['actorSelectionRange'])
if m.get('allowedMulti') is not None:
self.allowed_multi = m.get('allowedMulti')
if m.get('approvalType') is not None:
self.approval_type = m.get('approvalType')
if m.get('approvalMethod') is not None:
self.approval_method = m.get('approvalMethod')
if m.get('actorActivateType') is not None:
self.actor_activate_type = m.get('actorActivateType')
if m.get('required') is not None:
self.required = m.get('required')
return self
class ProcessForecastResponseBodyResultWorkflowActivityRules(TeaModel):
def __init__(
self,
activity_id: str = None,
prev_activity_id: str = None,
activity_name: str = None,
activity_type: str = None,
is_target_select: bool = None,
workflow_actor: ProcessForecastResponseBodyResultWorkflowActivityRulesWorkflowActor = None,
):
# 节点 id
self.activity_id = activity_id
# 流程中前一个节点的 id
self.prev_activity_id = prev_activity_id
# 节点名称
self.activity_name = activity_name
# 规则类型
self.activity_type = activity_type
# 是否自选审批节点
self.is_target_select = is_target_select
# 节点操作人信息
self.workflow_actor = workflow_actor
def validate(self):
if self.workflow_actor:
self.workflow_actor.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.activity_id is not None:
result['activityId'] = self.activity_id
if self.prev_activity_id is not None:
result['prevActivityId'] = self.prev_activity_id
if self.activity_name is not None:
result['activityName'] = self.activity_name
if self.activity_type is not None:
result['activityType'] = self.activity_type
if self.is_target_select is not None:
result['isTargetSelect'] = self.is_target_select
if self.workflow_actor is not None:
result['workflowActor'] = self.workflow_actor.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('activityId') is not None:
self.activity_id = m.get('activityId')
if m.get('prevActivityId') is not None:
self.prev_activity_id = m.get('prevActivityId')
if m.get('activityName') is not None:
self.activity_name = m.get('activityName')
if m.get('activityType') is not None:
self.activity_type = m.get('activityType')
if m.get('isTargetSelect') is not None:
self.is_target_select = m.get('isTargetSelect')
if m.get('workflowActor') is not None:
temp_model = ProcessForecastResponseBodyResultWorkflowActivityRulesWorkflowActor()
self.workflow_actor = temp_model.from_map(m['workflowActor'])
return self
class ProcessForecastResponseBodyResultWorkflowForecastNodes(TeaModel):
def __init__(
self,
activity_id: str = None,
out_id: str = None,
):
# 节点 id
self.activity_id = activity_id
# 节点出线 id
self.out_id = out_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.activity_id is not None:
result['activityId'] = self.activity_id
if self.out_id is not None:
result['outId'] = self.out_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('activityId') is not None:
self.activity_id = m.get('activityId')
if m.get('outId') is not None:
self.out_id = m.get('outId')
return self
class ProcessForecastResponseBodyResult(TeaModel):
def __init__(
self,
is_forecast_success: bool = None,
process_code: str = None,
user_id: str = None,
process_id: int = None,
is_static_workflow: bool = None,
workflow_activity_rules: List[ProcessForecastResponseBodyResultWorkflowActivityRules] = None,
workflow_forecast_nodes: List[ProcessForecastResponseBodyResultWorkflowForecastNodes] = None,
):
# 是否预测成功
self.is_forecast_success = is_forecast_success
# 流程 code
self.process_code = process_code
# 用户 id
self.user_id = user_id
# 流程 id
self.process_id = process_id
# 是否静态流程
self.is_static_workflow = is_static_workflow
self.workflow_activity_rules = workflow_activity_rules
self.workflow_forecast_nodes = workflow_forecast_nodes
def validate(self):
if self.workflow_activity_rules:
for k in self.workflow_activity_rules:
if k:
k.validate()
if self.workflow_forecast_nodes:
for k in self.workflow_forecast_nodes:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.is_forecast_success is not None:
result['isForecastSuccess'] = self.is_forecast_success
if self.process_code is not None:
result['processCode'] = self.process_code
if self.user_id is not None:
result['userId'] = self.user_id
if self.process_id is not None:
result['processId'] = self.process_id
if self.is_static_workflow is not None:
result['isStaticWorkflow'] = self.is_static_workflow
result['workflowActivityRules'] = []
if self.workflow_activity_rules is not None:
for k in self.workflow_activity_rules:
result['workflowActivityRules'].append(k.to_map() if k else None)
result['workflowForecastNodes'] = []
if self.workflow_forecast_nodes is not None:
for k in self.workflow_forecast_nodes:
result['workflowForecastNodes'].append(k.to_map() if k else None)
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('isForecastSuccess') is not None:
self.is_forecast_success = m.get('isForecastSuccess')
if m.get('processCode') is not None:
self.process_code = m.get('processCode')
if m.get('userId') is not None:
self.user_id = m.get('userId')
if m.get('processId') is not None:
self.process_id = m.get('processId')
if m.get('isStaticWorkflow') is not None:
self.is_static_workflow = m.get('isStaticWorkflow')
self.workflow_activity_rules = []
if m.get('workflowActivityRules') is not None:
for k in m.get('workflowActivityRules'):
temp_model = ProcessForecastResponseBodyResultWorkflowActivityRules()
self.workflow_activity_rules.append(temp_model.from_map(k))
self.workflow_forecast_nodes = []
if m.get('workflowForecastNodes') is not None:
for k in m.get('workflowForecastNodes'):
temp_model = ProcessForecastResponseBodyResultWorkflowForecastNodes()
self.workflow_forecast_nodes.append(temp_model.from_map(k))
return self
class ProcessForecastResponseBody(TeaModel):
def __init__(
self,
result: ProcessForecastResponseBodyResult = None,
):
# 返回结果
self.result = result
def validate(self):
if self.result:
self.result.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.result is not None:
result['result'] = self.result.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('result') is not None:
temp_model = ProcessForecastResponseBodyResult()
self.result = temp_model.from_map(m['result'])
return self
class ProcessForecastResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
body: ProcessForecastResponseBody = None,
):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = ProcessForecastResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class GrantCspaceAuthorizationHeaders(TeaModel):
def __init__(
self,
common_headers: Dict[str, str] = None,
x_acs_dingtalk_access_token: str = None,
):
self.common_headers = common_headers
self.x_acs_dingtalk_access_token = x_acs_dingtalk_access_token
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.common_headers is not None:
result['commonHeaders'] = self.common_headers
if self.x_acs_dingtalk_access_token is not None:
result['x-acs-dingtalk-access-token'] = self.x_acs_dingtalk_access_token
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('commonHeaders') is not None:
self.common_headers = m.get('commonHeaders')
if m.get('x-acs-dingtalk-access-token') is not None:
self.x_acs_dingtalk_access_token = m.get('x-acs-dingtalk-access-token')
return self
class GrantCspaceAuthorizationRequest(TeaModel):
def __init__(
self,
space_id: str = None,
type: str = None,
user_id: str = None,
duration_seconds: int = None,
ding_corp_id: str = None,
ding_org_id: int = None,
ding_isv_org_id: int = None,
ding_suite_key: str = None,
ding_token_grant_type: int = None,
):
# 审批控件 id。
self.space_id = space_id
# 权限类型。
self.type = type
# 用户 id。
self.user_id = user_id
# 权限有效时间,单位为秒。
self.duration_seconds = duration_seconds
self.ding_corp_id = ding_corp_id
self.ding_org_id = ding_org_id
self.ding_isv_org_id = ding_isv_org_id
self.ding_suite_key = ding_suite_key
self.ding_token_grant_type = ding_token_grant_type
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.space_id is not None:
result['spaceId'] = self.space_id
if self.type is not None:
result['type'] = self.type
if self.user_id is not None:
result['userId'] = self.user_id
if self.duration_seconds is not None:
result['durationSeconds'] = self.duration_seconds
if self.ding_corp_id is not None:
result['dingCorpId'] = self.ding_corp_id
if self.ding_org_id is not None:
result['dingOrgId'] = self.ding_org_id
if self.ding_isv_org_id is not None:
result['dingIsvOrgId'] = self.ding_isv_org_id
if self.ding_suite_key is not None:
result['dingSuiteKey'] = self.ding_suite_key
if self.ding_token_grant_type is not None:
result['dingTokenGrantType'] = self.ding_token_grant_type
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('spaceId') is not None:
self.space_id = m.get('spaceId')
if m.get('type') is not None:
self.type = m.get('type')
if m.get('userId') is not None:
self.user_id = m.get('userId')
if m.get('durationSeconds') is not None:
self.duration_seconds = m.get('durationSeconds')
if m.get('dingCorpId') is not None:
self.ding_corp_id = m.get('dingCorpId')
if m.get('dingOrgId') is not None:
self.ding_org_id = m.get('dingOrgId')
if m.get('dingIsvOrgId') is not None:
self.ding_isv_org_id = m.get('dingIsvOrgId')
if m.get('dingSuiteKey') is not None:
self.ding_suite_key = m.get('dingSuiteKey')
if m.get('dingTokenGrantType') is not None:
self.ding_token_grant_type = m.get('dingTokenGrantType')
return self
class GrantCspaceAuthorizationResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
):
self.headers = headers
def validate(self):
self.validate_required(self.headers, 'headers')
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
return self
class QueryAllProcessInstancesHeaders(TeaModel):
def __init__(
self,
common_headers: Dict[str, str] = None,
x_acs_dingtalk_access_token: str = None,
):
self.common_headers = common_headers
self.x_acs_dingtalk_access_token = x_acs_dingtalk_access_token
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.common_headers is not None:
result['commonHeaders'] = self.common_headers
if self.x_acs_dingtalk_access_token is not None:
result['x-acs-dingtalk-access-token'] = self.x_acs_dingtalk_access_token
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('commonHeaders') is not None:
self.common_headers = m.get('commonHeaders')
if m.get('x-acs-dingtalk-access-token') is not None:
self.x_acs_dingtalk_access_token = m.get('x-acs-dingtalk-access-token')
return self
class QueryAllProcessInstancesRequest(TeaModel):
def __init__(
self,
next_token: str = None,
max_results: int = None,
start_time_in_mills: int = None,
end_time_in_mills: int = None,
process_code: str = None,
app_uuid: str = None,
):
# 分页起始值
self.next_token = next_token
# 分页大小
self.max_results = max_results
# 开始时间
self.start_time_in_mills = start_time_in_mills
# 结束时间
self.end_time_in_mills = end_time_in_mills
# 模板编码
self.process_code = process_code
# 应用编码
self.app_uuid = app_uuid
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.next_token is not None:
result['nextToken'] = self.next_token
if self.max_results is not None:
result['maxResults'] = self.max_results
if self.start_time_in_mills is not None:
result['startTimeInMills'] = self.start_time_in_mills
if self.end_time_in_mills is not None:
result['endTimeInMills'] = self.end_time_in_mills
if self.process_code is not None:
result['processCode'] = self.process_code
if self.app_uuid is not None:
result['appUuid'] = self.app_uuid
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('nextToken') is not None:
self.next_token = m.get('nextToken')
if m.get('maxResults') is not None:
self.max_results = m.get('maxResults')
if m.get('startTimeInMills') is not None:
self.start_time_in_mills = m.get('startTimeInMills')
if m.get('endTimeInMills') is not None:
self.end_time_in_mills = m.get('endTimeInMills')
if m.get('processCode') is not None:
self.process_code = m.get('processCode')
if m.get('appUuid') is not None:
self.app_uuid = m.get('appUuid')
return self
class QueryAllProcessInstancesResponseBodyResultListFormComponentValues(TeaModel):
def __init__(
self,
name: str = None,
id: str = None,
value: str = None,
ext_value: str = None,
):
# 控件名称
self.name = name
# 控件id
self.id = id
# 控件数据
self.value = value
# 控件扩展数据
self.ext_value = ext_value
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.name is not None:
result['name'] = self.name
if self.id is not None:
result['id'] = self.id
if self.value is not None:
result['value'] = self.value
if self.ext_value is not None:
result['extValue'] = self.ext_value
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('name') is not None:
self.name = m.get('name')
if m.get('id') is not None:
self.id = m.get('id')
if m.get('value') is not None:
self.value = m.get('value')
if m.get('extValue') is not None:
self.ext_value = m.get('extValue')
return self
class QueryAllProcessInstancesResponseBodyResultList(TeaModel):
def __init__(
self,
process_instance_id: str = None,
main_process_instance_id: str = None,
finish_time: int = None,
attached_process_instance_ids: str = None,
business_id: str = None,
title: str = None,
originator_dept_id: str = None,
result: str = None,
create_time: int = None,
originator_userid: str = None,
status: str = None,
form_component_values: List[QueryAllProcessInstancesResponseBodyResultListFormComponentValues] = None,
):
# 流程实例ID
self.process_instance_id = process_instance_id
# 主单实例Id
self.main_process_instance_id = main_process_instance_id
# 审批结束时间
self.finish_time = finish_time
# 附属单信息
self.attached_process_instance_ids = attached_process_instance_ids
# 审批单编号
self.business_id = business_id
# 审批单标题
self.title = title
# 发起人部门id
self.originator_dept_id = originator_dept_id
# 审批结果
self.result = result
# 审批单创建时间
self.create_time = create_time
# 发起者userId
self.originator_userid = originator_userid
# 审批单状态
self.status = status
self.form_component_values = form_component_values
def validate(self):
if self.form_component_values:
for k in self.form_component_values:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.process_instance_id is not None:
result['processInstanceId'] = self.process_instance_id
if self.main_process_instance_id is not None:
result['mainProcessInstanceId'] = self.main_process_instance_id
if self.finish_time is not None:
result['finishTime'] = self.finish_time
if self.attached_process_instance_ids is not None:
result['attachedProcessInstanceIds'] = self.attached_process_instance_ids
if self.business_id is not None:
result['businessId'] = self.business_id
if self.title is not None:
result['title'] = self.title
if self.originator_dept_id is not None:
result['originatorDeptId'] = self.originator_dept_id
if self.result is not None:
result['result'] = self.result
if self.create_time is not None:
result['createTime'] = self.create_time
if self.originator_userid is not None:
result['originatorUserid'] = self.originator_userid
if self.status is not None:
result['status'] = self.status
result['formComponentValues'] = []
if self.form_component_values is not None:
for k in self.form_component_values:
result['formComponentValues'].append(k.to_map() if k else None)
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('processInstanceId') is not None:
self.process_instance_id = m.get('processInstanceId')
if m.get('mainProcessInstanceId') is not None:
self.main_process_instance_id = m.get('mainProcessInstanceId')
if m.get('finishTime') is not None:
self.finish_time = m.get('finishTime')
if m.get('attachedProcessInstanceIds') is not None:
self.attached_process_instance_ids = m.get('attachedProcessInstanceIds')
if m.get('businessId') is not None:
self.business_id = m.get('businessId')
if m.get('title') is not None:
self.title = m.get('title')
if m.get('originatorDeptId') is not None:
self.originator_dept_id = m.get('originatorDeptId')
if m.get('result') is not None:
self.result = m.get('result')
if m.get('createTime') is not None:
self.create_time = m.get('createTime')
if m.get('originatorUserid') is not None:
self.originator_userid = m.get('originatorUserid')
if m.get('status') is not None:
self.status = m.get('status')
self.form_component_values = []
if m.get('formComponentValues') is not None:
for k in m.get('formComponentValues'):
temp_model = QueryAllProcessInstancesResponseBodyResultListFormComponentValues()
self.form_component_values.append(temp_model.from_map(k))
return self
class QueryAllProcessInstancesResponseBodyResult(TeaModel):
def __init__(
self,
next_token: str = None,
has_more: bool = None,
max_results: int = None,
list: List[QueryAllProcessInstancesResponseBodyResultList] = None,
):
# 下次获取数据的游标
self.next_token = next_token
# 是否有更多数据
self.has_more = has_more
# 总数
self.max_results = max_results
self.list = list
def validate(self):
if self.list:
for k in self.list:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.next_token is not None:
result['nextToken'] = self.next_token
if self.has_more is not None:
result['hasMore'] = self.has_more
if self.max_results is not None:
result['maxResults'] = self.max_results
result['list'] = []
if self.list is not None:
for k in self.list:
result['list'].append(k.to_map() if k else None)
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('nextToken') is not None:
self.next_token = m.get('nextToken')
if m.get('hasMore') is not None:
self.has_more = m.get('hasMore')
if m.get('maxResults') is not None:
self.max_results = m.get('maxResults')
self.list = []
if m.get('list') is not None:
for k in m.get('list'):
temp_model = QueryAllProcessInstancesResponseBodyResultList()
self.list.append(temp_model.from_map(k))
return self
class QueryAllProcessInstancesResponseBody(TeaModel):
def __init__(
self,
result: QueryAllProcessInstancesResponseBodyResult = None,
):
# result
self.result = result
def validate(self):
if self.result:
self.result.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.result is not None:
result['result'] = self.result.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('result') is not None:
temp_model = QueryAllProcessInstancesResponseBodyResult()
self.result = temp_model.from_map(m['result'])
return self
class QueryAllProcessInstancesResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
body: QueryAllProcessInstancesResponseBody = None,
):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = QueryAllProcessInstancesResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class QueryAllFormInstancesHeaders(TeaModel):
def __init__(
self,
common_headers: Dict[str, str] = None,
x_acs_dingtalk_access_token: str = None,
):
self.common_headers = common_headers
self.x_acs_dingtalk_access_token = x_acs_dingtalk_access_token
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.common_headers is not None:
result['commonHeaders'] = self.common_headers
if self.x_acs_dingtalk_access_token is not None:
result['x-acs-dingtalk-access-token'] = self.x_acs_dingtalk_access_token
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('commonHeaders') is not None:
self.common_headers = m.get('commonHeaders')
if m.get('x-acs-dingtalk-access-token') is not None:
self.x_acs_dingtalk_access_token = m.get('x-acs-dingtalk-access-token')
return self
class QueryAllFormInstancesRequest(TeaModel):
def __init__(
self,
next_token: str = None,
max_results: int = None,
app_uuid: str = None,
form_code: str = None,
):
# 分页游标,第一次调用传空或者null
self.next_token = next_token
# 翻页size
self.max_results = max_results
# 应用搭建id
self.app_uuid = app_uuid
# 表单模板id
self.form_code = form_code
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.next_token is not None:
result['nextToken'] = self.next_token
if self.max_results is not None:
result['maxResults'] = self.max_results
if self.app_uuid is not None:
result['appUuid'] = self.app_uuid
if self.form_code is not None:
result['formCode'] = self.form_code
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('nextToken') is not None:
self.next_token = m.get('nextToken')
if m.get('maxResults') is not None:
self.max_results = m.get('maxResults')
if m.get('appUuid') is not None:
self.app_uuid = m.get('appUuid')
if m.get('formCode') is not None:
self.form_code = m.get('formCode')
return self
class QueryAllFormInstancesResponseBodyResultValuesFormInstDataList(TeaModel):
def __init__(
self,
component_type: str = None,
biz_alias: str = None,
extend_value: str = None,
label: str = None,
value: str = None,
key: str = None,
):
# 控件类型
self.component_type = component_type
# 控件别名
self.biz_alias = biz_alias
# 表单控件扩展数据
self.extend_value = extend_value
# 控件名称
self.label = label
# 控件填写的数据
self.value = value
# 控件唯一id
self.key = key
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.component_type is not None:
result['componentType'] = self.component_type
if self.biz_alias is not None:
result['bizAlias'] = self.biz_alias
if self.extend_value is not None:
result['extendValue'] = self.extend_value
if self.label is not None:
result['label'] = self.label
if self.value is not None:
result['value'] = self.value
if self.key is not None:
result['key'] = self.key
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('componentType') is not None:
self.component_type = m.get('componentType')
if m.get('bizAlias') is not None:
self.biz_alias = m.get('bizAlias')
if m.get('extendValue') is not None:
self.extend_value = m.get('extendValue')
if m.get('label') is not None:
self.label = m.get('label')
if m.get('value') is not None:
self.value = m.get('value')
if m.get('key') is not None:
self.key = m.get('key')
return self
class QueryAllFormInstancesResponseBodyResultValues(TeaModel):
def __init__(
self,
form_instance_id: str = None,
app_uuid: str = None,
form_code: str = None,
title: str = None,
creator: str = None,
modifier: str = None,
create_timestamp: int = None,
modify_timestamp: int = None,
out_instance_id: str = None,
out_biz_code: str = None,
attributes: Dict[str, Any] = None,
form_inst_data_list: List[QueryAllFormInstancesResponseBodyResultValuesFormInstDataList] = None,
):
# 表单实例id
self.form_instance_id = form_instance_id
# 应用搭建id
self.app_uuid = app_uuid
# 表单模板code
self.form_code = form_code
# 标题
self.title = title
# 创建人
self.creator = creator
# 修改人
self.modifier = modifier
# 创建时间
self.create_timestamp = create_timestamp
# 修改时间
self.modify_timestamp = modify_timestamp
# 外部实例编码
self.out_instance_id = out_instance_id
# 外部业务编码
self.out_biz_code = out_biz_code
# 扩展信息
self.attributes = attributes
# 表单实例数据
self.form_inst_data_list = form_inst_data_list
def validate(self):
if self.form_inst_data_list:
for k in self.form_inst_data_list:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.form_instance_id is not None:
result['formInstanceId'] = self.form_instance_id
if self.app_uuid is not None:
result['appUuid'] = self.app_uuid
if self.form_code is not None:
result['formCode'] = self.form_code
if self.title is not None:
result['title'] = self.title
if self.creator is not None:
result['creator'] = self.creator
if self.modifier is not None:
result['modifier'] = self.modifier
if self.create_timestamp is not None:
result['createTimestamp'] = self.create_timestamp
if self.modify_timestamp is not None:
result['modifyTimestamp'] = self.modify_timestamp
if self.out_instance_id is not None:
result['outInstanceId'] = self.out_instance_id
if self.out_biz_code is not None:
result['outBizCode'] = self.out_biz_code
if self.attributes is not None:
result['attributes'] = self.attributes
result['formInstDataList'] = []
if self.form_inst_data_list is not None:
for k in self.form_inst_data_list:
result['formInstDataList'].append(k.to_map() if k else None)
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('formInstanceId') is not None:
self.form_instance_id = m.get('formInstanceId')
if m.get('appUuid') is not None:
self.app_uuid = m.get('appUuid')
if m.get('formCode') is not None:
self.form_code = m.get('formCode')
if m.get('title') is not None:
self.title = m.get('title')
if m.get('creator') is not None:
self.creator = m.get('creator')
if m.get('modifier') is not None:
self.modifier = m.get('modifier')
if m.get('createTimestamp') is not None:
self.create_timestamp = m.get('createTimestamp')
if m.get('modifyTimestamp') is not None:
self.modify_timestamp = m.get('modifyTimestamp')
if m.get('outInstanceId') is not None:
self.out_instance_id = m.get('outInstanceId')
if m.get('outBizCode') is not None:
self.out_biz_code = m.get('outBizCode')
if m.get('attributes') is not None:
self.attributes = m.get('attributes')
self.form_inst_data_list = []
if m.get('formInstDataList') is not None:
for k in m.get('formInstDataList'):
temp_model = QueryAllFormInstancesResponseBodyResultValuesFormInstDataList()
self.form_inst_data_list.append(temp_model.from_map(k))
return self
class QueryAllFormInstancesResponseBodyResult(TeaModel):
def __init__(
self,
next_token: str = None,
has_more: bool = None,
max_results: int = None,
values: List[QueryAllFormInstancesResponseBodyResultValues] = None,
):
# 下一页的游标
self.next_token = next_token
# 是否有更多数据
self.has_more = has_more
# 分页大小
self.max_results = max_results
# 表单列表
self.values = values
def validate(self):
if self.values:
for k in self.values:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.next_token is not None:
result['nextToken'] = self.next_token
if self.has_more is not None:
result['hasMore'] = self.has_more
if self.max_results is not None:
result['maxResults'] = self.max_results
result['values'] = []
if self.values is not None:
for k in self.values:
result['values'].append(k.to_map() if k else None)
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('nextToken') is not None:
self.next_token = m.get('nextToken')
if m.get('hasMore') is not None:
self.has_more = m.get('hasMore')
if m.get('maxResults') is not None:
self.max_results = m.get('maxResults')
self.values = []
if m.get('values') is not None:
for k in m.get('values'):
temp_model = QueryAllFormInstancesResponseBodyResultValues()
self.values.append(temp_model.from_map(k))
return self
class QueryAllFormInstancesResponseBody(TeaModel):
def __init__(
self,
result: QueryAllFormInstancesResponseBodyResult = None,
):
# 分页结果
self.result = result
def validate(self):
if self.result:
self.result.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.result is not None:
result['result'] = self.result.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('result') is not None:
temp_model = QueryAllFormInstancesResponseBodyResult()
self.result = temp_model.from_map(m['result'])
return self
class QueryAllFormInstancesResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
body: QueryAllFormInstancesResponseBody = None,
):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = QueryAllFormInstancesResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class QueryFormByBizTypeHeaders(TeaModel):
def __init__(
self,
common_headers: Dict[str, str] = None,
x_acs_dingtalk_access_token: str = None,
):
self.common_headers = common_headers
self.x_acs_dingtalk_access_token = x_acs_dingtalk_access_token
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.common_headers is not None:
result['commonHeaders'] = self.common_headers
if self.x_acs_dingtalk_access_token is not None:
result['x-acs-dingtalk-access-token'] = self.x_acs_dingtalk_access_token
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('commonHeaders') is not None:
self.common_headers = m.get('commonHeaders')
if m.get('x-acs-dingtalk-access-token') is not None:
self.x_acs_dingtalk_access_token = m.get('x-acs-dingtalk-access-token')
return self
class QueryFormByBizTypeRequest(TeaModel):
def __init__(
self,
app_uuid: str = None,
biz_types: List[str] = None,
):
# 应用搭建id
self.app_uuid = app_uuid
# 表单业务标识
self.biz_types = biz_types
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.app_uuid is not None:
result['appUuid'] = self.app_uuid
if self.biz_types is not None:
result['bizTypes'] = self.biz_types
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('appUuid') is not None:
self.app_uuid = m.get('appUuid')
if m.get('bizTypes') is not None:
self.biz_types = m.get('bizTypes')
return self
class QueryFormByBizTypeResponseBodyResult(TeaModel):
def __init__(
self,
creator: str = None,
app_uuid: str = None,
form_code: str = None,
form_uuid: str = None,
name: str = None,
memo: str = None,
owner_id: str = None,
app_type: int = None,
biz_type: str = None,
status: str = None,
create_time: int = None,
modifed_time: int = None,
content: str = None,
):
# 创建人
self.creator = creator
# 应用搭建id
self.app_uuid = app_uuid
# 模板code
self.form_code = form_code
# 表单uuid
self.form_uuid = form_uuid
# 模板名称
self.name = name
# 模板描述
self.memo = memo
# 数据归属id
self.owner_id = owner_id
# 表单类型,0为流程表单,1为数据表单
self.app_type = app_type
# 业务标识
self.biz_type = biz_type
# 模板状态
self.status = status
# 创建时间
self.create_time = create_time
# 修改时间
self.modifed_time = modifed_time
# 表单控件描述
self.content = content
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.creator is not None:
result['creator'] = self.creator
if self.app_uuid is not None:
result['appUuid'] = self.app_uuid
if self.form_code is not None:
result['formCode'] = self.form_code
if self.form_uuid is not None:
result['formUuid'] = self.form_uuid
if self.name is not None:
result['name'] = self.name
if self.memo is not None:
result['memo'] = self.memo
if self.owner_id is not None:
result['ownerId'] = self.owner_id
if self.app_type is not None:
result['appType'] = self.app_type
if self.biz_type is not None:
result['bizType'] = self.biz_type
if self.status is not None:
result['status'] = self.status
if self.create_time is not None:
result['createTime'] = self.create_time
if self.modifed_time is not None:
result['modifedTime'] = self.modifed_time
if self.content is not None:
result['content'] = self.content
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('creator') is not None:
self.creator = m.get('creator')
if m.get('appUuid') is not None:
self.app_uuid = m.get('appUuid')
if m.get('formCode') is not None:
self.form_code = m.get('formCode')
if m.get('formUuid') is not None:
self.form_uuid = m.get('formUuid')
if m.get('name') is not None:
self.name = m.get('name')
if m.get('memo') is not None:
self.memo = m.get('memo')
if m.get('ownerId') is not None:
self.owner_id = m.get('ownerId')
if m.get('appType') is not None:
self.app_type = m.get('appType')
if m.get('bizType') is not None:
self.biz_type = m.get('bizType')
if m.get('status') is not None:
self.status = m.get('status')
if m.get('createTime') is not None:
self.create_time = m.get('createTime')
if m.get('modifedTime') is not None:
self.modifed_time = m.get('modifedTime')
if m.get('content') is not None:
self.content = m.get('content')
return self
class QueryFormByBizTypeResponseBody(TeaModel):
def __init__(
self,
result: List[QueryFormByBizTypeResponseBodyResult] = None,
):
# 模板列表
self.result = result
def validate(self):
if self.result:
for k in self.result:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
result['result'] = []
if self.result is not None:
for k in self.result:
result['result'].append(k.to_map() if k else None)
return result
def from_map(self, m: dict = None):
m = m or dict()
self.result = []
if m.get('result') is not None:
for k in m.get('result'):
temp_model = QueryFormByBizTypeResponseBodyResult()
self.result.append(temp_model.from_map(k))
return self
class QueryFormByBizTypeResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
body: QueryFormByBizTypeResponseBody = None,
):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = QueryFormByBizTypeResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class FormCreateHeaders(TeaModel):
def __init__(
self,
common_headers: Dict[str, str] = None,
x_acs_dingtalk_access_token: str = None,
):
self.common_headers = common_headers
self.x_acs_dingtalk_access_token = x_acs_dingtalk_access_token
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.common_headers is not None:
result['commonHeaders'] = self.common_headers
if self.x_acs_dingtalk_access_token is not None:
result['x-acs-dingtalk-access-token'] = self.x_acs_dingtalk_access_token
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('commonHeaders') is not None:
self.common_headers = m.get('commonHeaders')
if m.get('x-acs-dingtalk-access-token') is not None:
self.x_acs_dingtalk_access_token = m.get('x-acs-dingtalk-access-token')
return self
class FormCreateRequestTemplateConfig(TeaModel):
def __init__(
self,
disable_stop_process_button: bool = None,
hidden: bool = None,
disable_delete_process: bool = None,
disable_form_edit: bool = None,
disable_resubmit: bool = None,
disable_homepage: bool = None,
dir_id: str = None,
origin_dir_id: str = None,
):
# 禁用停止按钮
self.disable_stop_process_button = disable_stop_process_button
# 审批场景内隐藏模板
self.hidden = hidden
# 禁用模板删除按钮
self.disable_delete_process = disable_delete_process
# 禁用表单编辑
self.disable_form_edit = disable_form_edit
# 禁用再次提交
self.disable_resubmit = disable_resubmit
# 首页工作台是否可见
self.disable_homepage = disable_homepage
# 更新后模板目录id
self.dir_id = dir_id
# 源模板目录id
self.origin_dir_id = origin_dir_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.disable_stop_process_button is not None:
result['disableStopProcessButton'] = self.disable_stop_process_button
if self.hidden is not None:
result['hidden'] = self.hidden
if self.disable_delete_process is not None:
result['disableDeleteProcess'] = self.disable_delete_process
if self.disable_form_edit is not None:
result['disableFormEdit'] = self.disable_form_edit
if self.disable_resubmit is not None:
result['disableResubmit'] = self.disable_resubmit
if self.disable_homepage is not None:
result['disableHomepage'] = self.disable_homepage
if self.dir_id is not None:
result['dirId'] = self.dir_id
if self.origin_dir_id is not None:
result['originDirId'] = self.origin_dir_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('disableStopProcessButton') is not None:
self.disable_stop_process_button = m.get('disableStopProcessButton')
if m.get('hidden') is not None:
self.hidden = m.get('hidden')
if m.get('disableDeleteProcess') is not None:
self.disable_delete_process = m.get('disableDeleteProcess')
if m.get('disableFormEdit') is not None:
self.disable_form_edit = m.get('disableFormEdit')
if m.get('disableResubmit') is not None:
self.disable_resubmit = m.get('disableResubmit')
if m.get('disableHomepage') is not None:
self.disable_homepage = m.get('disableHomepage')
if m.get('dirId') is not None:
self.dir_id = m.get('dirId')
if m.get('originDirId') is not None:
self.origin_dir_id = m.get('originDirId')
return self
class FormCreateRequest(TeaModel):
def __init__(
self,
ding_corp_id: str = None,
ding_org_id: int = None,
ding_isv_org_id: int = None,
ding_suite_key: str = None,
ding_token_grant_type: int = None,
request_id: str = None,
process_code: str = None,
name: str = None,
description: str = None,
form_components: List[FormComponent] = None,
template_config: FormCreateRequestTemplateConfig = None,
):
self.ding_corp_id = ding_corp_id
self.ding_org_id = ding_org_id
self.ding_isv_org_id = ding_isv_org_id
self.ding_suite_key = ding_suite_key
self.ding_token_grant_type = ding_token_grant_type
self.request_id = request_id
self.process_code = process_code
# 表单模板名称
self.name = name
# 表单模板描述
self.description = description
# 表单控件列表
self.form_components = form_components
# 模板配置信息
self.template_config = template_config
def validate(self):
if self.form_components:
for k in self.form_components:
if k:
k.validate()
if self.template_config:
self.template_config.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.ding_corp_id is not None:
result['dingCorpId'] = self.ding_corp_id
if self.ding_org_id is not None:
result['dingOrgId'] = self.ding_org_id
if self.ding_isv_org_id is not None:
result['dingIsvOrgId'] = self.ding_isv_org_id
if self.ding_suite_key is not None:
result['dingSuiteKey'] = self.ding_suite_key
if self.ding_token_grant_type is not None:
result['dingTokenGrantType'] = self.ding_token_grant_type
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.process_code is not None:
result['processCode'] = self.process_code
if self.name is not None:
result['name'] = self.name
if self.description is not None:
result['description'] = self.description
result['formComponents'] = []
if self.form_components is not None:
for k in self.form_components:
result['formComponents'].append(k.to_map() if k else None)
if self.template_config is not None:
result['templateConfig'] = self.template_config.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('dingCorpId') is not None:
self.ding_corp_id = m.get('dingCorpId')
if m.get('dingOrgId') is not None:
self.ding_org_id = m.get('dingOrgId')
if m.get('dingIsvOrgId') is not None:
self.ding_isv_org_id = m.get('dingIsvOrgId')
if m.get('dingSuiteKey') is not None:
self.ding_suite_key = m.get('dingSuiteKey')
if m.get('dingTokenGrantType') is not None:
self.ding_token_grant_type = m.get('dingTokenGrantType')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('processCode') is not None:
self.process_code = m.get('processCode')
if m.get('name') is not None:
self.name = m.get('name')
if m.get('description') is not None:
self.description = m.get('description')
self.form_components = []
if m.get('formComponents') is not None:
for k in m.get('formComponents'):
temp_model = FormComponent()
self.form_components.append(temp_model.from_map(k))
if m.get('templateConfig') is not None:
temp_model = FormCreateRequestTemplateConfig()
self.template_config = temp_model.from_map(m['templateConfig'])
return self
class FormCreateResponseBodyResult(TeaModel):
def __init__(
self,
process_code: str = None,
):
# 保存或更新的表单code
self.process_code = process_code
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.process_code is not None:
result['processCode'] = self.process_code
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('processCode') is not None:
self.process_code = m.get('processCode')
return self
class FormCreateResponseBody(TeaModel):
def __init__(
self,
result: FormCreateResponseBodyResult = None,
):
# 表单模板信息
self.result = result
def validate(self):
if self.result:
self.result.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.result is not None:
result['result'] = self.result.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('result') is not None:
temp_model = FormCreateResponseBodyResult()
self.result = temp_model.from_map(m['result'])
return self
class FormCreateResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
body: FormCreateResponseBody = None,
):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = FormCreateResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class QuerySchemaByProcessCodeHeaders(TeaModel):
def __init__(
self,
common_headers: Dict[str, str] = None,
x_acs_dingtalk_access_token: str = None,
):
self.common_headers = common_headers
self.x_acs_dingtalk_access_token = x_acs_dingtalk_access_token
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.common_headers is not None:
result['commonHeaders'] = self.common_headers
if self.x_acs_dingtalk_access_token is not None:
result['x-acs-dingtalk-access-token'] = self.x_acs_dingtalk_access_token
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('commonHeaders') is not None:
self.common_headers = m.get('commonHeaders')
if m.get('x-acs-dingtalk-access-token') is not None:
self.x_acs_dingtalk_access_token = m.get('x-acs-dingtalk-access-token')
return self
class QuerySchemaByProcessCodeRequest(TeaModel):
def __init__(
self,
process_code: str = None,
):
# 表单的唯一码
self.process_code = process_code
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.process_code is not None:
result['processCode'] = self.process_code
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('processCode') is not None:
self.process_code = m.get('processCode')
return self
class QuerySchemaByProcessCodeResponseBodyResultSchemaContentItemsPropsStatField(TeaModel):
def __init__(
self,
id: str = None,
label: str = None,
upper: bool = None,
unit: str = None,
):
# id 值。
self.id = id
# 名称。
self.label = label
# 大写。
self.upper = upper
# 单位。
self.unit = unit
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.id is not None:
result['id'] = self.id
if self.label is not None:
result['label'] = self.label
if self.upper is not None:
result['upper'] = self.upper
if self.unit is not None:
result['unit'] = self.unit
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('id') is not None:
self.id = m.get('id')
if m.get('label') is not None:
self.label = m.get('label')
if m.get('upper') is not None:
self.upper = m.get('upper')
if m.get('unit') is not None:
self.unit = m.get('unit')
return self
class QuerySchemaByProcessCodeResponseBodyResultSchemaContentItemsPropsObjOptions(TeaModel):
def __init__(
self,
value: str = None,
):
self.value = value
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.value is not None:
result['value'] = self.value
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('value') is not None:
self.value = m.get('value')
return self
class QuerySchemaByProcessCodeResponseBodyResultSchemaContentItemsPropsPush(TeaModel):
def __init__(
self,
push_switch: int = None,
push_tag: str = None,
attendance_rule: int = None,
):
# 开启状态(1表示开启, 0表示关闭)
self.push_switch = push_switch
# 状态显示名称
self.push_tag = push_tag
# 考勤类型(1表示请假, 2表示出差, 3表示加班, 4表示外出)
self.attendance_rule = attendance_rule
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.push_switch is not None:
result['pushSwitch'] = self.push_switch
if self.push_tag is not None:
result['pushTag'] = self.push_tag
if self.attendance_rule is not None:
result['attendanceRule'] = self.attendance_rule
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('pushSwitch') is not None:
self.push_switch = m.get('pushSwitch')
if m.get('pushTag') is not None:
self.push_tag = m.get('pushTag')
if m.get('attendanceRule') is not None:
self.attendance_rule = m.get('attendanceRule')
return self
class QuerySchemaByProcessCodeResponseBodyResultSchemaContentItemsPropsBehaviorLinkageTargets(TeaModel):
def __init__(
self,
field_id: str = None,
behavior: str = None,
):
# 字段 id。
self.field_id = field_id
# 行为。
self.behavior = behavior
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.field_id is not None:
result['fieldId'] = self.field_id
if self.behavior is not None:
result['behavior'] = self.behavior
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('fieldId') is not None:
self.field_id = m.get('fieldId')
if m.get('behavior') is not None:
self.behavior = m.get('behavior')
return self
class QuerySchemaByProcessCodeResponseBodyResultSchemaContentItemsPropsBehaviorLinkage(TeaModel):
def __init__(
self,
value: str = None,
targets: List[QuerySchemaByProcessCodeResponseBodyResultSchemaContentItemsPropsBehaviorLinkageTargets] = None,
):
# 控件值。
self.value = value
# 关联控件列表。
self.targets = targets
def validate(self):
if self.targets:
for k in self.targets:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.value is not None:
result['value'] = self.value
result['targets'] = []
if self.targets is not None:
for k in self.targets:
result['targets'].append(k.to_map() if k else None)
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('value') is not None:
self.value = m.get('value')
self.targets = []
if m.get('targets') is not None:
for k in m.get('targets'):
temp_model = QuerySchemaByProcessCodeResponseBodyResultSchemaContentItemsPropsBehaviorLinkageTargets()
self.targets.append(temp_model.from_map(k))
return self
class QuerySchemaByProcessCodeResponseBodyResultSchemaContentItemsProps(TeaModel):
def __init__(
self,
id: str = None,
label: str = None,
biz_alias: str = None,
required: bool = None,
placeholder: str = None,
options: List[str] = None,
app_id: int = None,
duration_label: str = None,
push_to_calendar: int = None,
align: str = None,
stat_field: List[QuerySchemaByProcessCodeResponseBodyResultSchemaContentItemsPropsStatField] = None,
hide_label: bool = None,
obj_options: List[QuerySchemaByProcessCodeResponseBodyResultSchemaContentItemsPropsObjOptions] = None,
format: str = None,
push_to_attendance: bool = None,
label_editable_freeze: bool = None,
push: QuerySchemaByProcessCodeResponseBodyResultSchemaContentItemsPropsPush = None,
common_biz_type: str = None,
required_editable_freeze: bool = None,
unit: str = None,
extract: bool = None,
link: str = None,
pay_enable: bool = None,
hidden: bool = None,
biz_type: str = None,
staff_status_enabled: bool = None,
action_name: str = None,
attend_type_label: str = None,
child_field_visible: bool = None,
not_print: str = None,
vertical_print: bool = None,
duration: bool = None,
holiday_options: str = None,
use_calendar: bool = None,
hidden_in_approval_detail: bool = None,
disabled: bool = None,
async_condition: bool = None,
behavior_linkage: List[QuerySchemaByProcessCodeResponseBodyResultSchemaContentItemsPropsBehaviorLinkage] = None,
show_attend_options: bool = None,
not_upper: str = None,
fields_info: str = None,
e_sign: bool = None,
main_title: str = None,
formula: str = None,
choice: int = None,
):
# 控件 id。
self.id = id
# 控件名称。
self.label = label
# 控件业务自定义别名。
self.biz_alias = biz_alias
# 是否必填。
self.required = required
# 占位符。
self.placeholder = placeholder
# 单选框选项列表。
self.options = options
# ISV 微应用 appId,用于ISV身份权限识别,ISV可获得相应数据。
self.app_id = app_id
# 兼容字段。
self.duration_label = duration_label
# 是否推送管理日历(DDDateRangeField, 1表示推送, 0表示不推送, 该属性为兼容保留)。
self.push_to_calendar = push_to_calendar
# textnote的样式,top|middle|bottom。
self.align = align
# 需要计算总和的明细组件
self.stat_field = stat_field
# 加班套件4.0新增 加班明细是否隐藏标签。
self.hide_label = hide_label
# 选项内容列表,提供给业务方更多的选择器操作。
self.obj_options = obj_options
# 时间格式(DDDateField和DDDateRangeField)。
self.format = format
# 推送到考勤, 子类型(DDSelectField)。
self.push_to_attendance = push_to_attendance
# label是否可修改 true:不可修改。
self.label_editable_freeze = label_editable_freeze
# 同步到考勤, 表示是否设置为员工状态。
self.push = push
# common field的commonBizType。
self.common_biz_type = common_biz_type
# 必填是否可修改 true:不可修改。
self.required_editable_freeze = required_editable_freeze
# 数字组件/日期区间组件单位属性。
self.unit = unit
# 套件值是否打平
self.extract = extract
# 说明文案的链接地址。
self.link = link
# 是否有支付属性。
self.pay_enable = pay_enable
# 加班套件4.0新增 加班明细是否隐藏。
self.hidden = hidden
# 业务套件类型。
self.biz_type = biz_type
# 是否开启员工状态。
self.staff_status_enabled = staff_status_enabled
# 加班套件4.0新增 加班明细名称。
self.action_name = action_name
# 请假、出差、外出、加班类型标签。
self.attend_type_label = attend_type_label
# 套件内子组件可见性。
self.child_field_visible = child_field_visible
# 是否参与打印(1表示不打印, 0表示打印)。
self.not_print = not_print
# 明细打印排版方式 false:横向 true:纵向。
self.vertical_print = vertical_print
# 是否自动计算时长。
self.duration = duration
# 兼容出勤套件类型。
self.holiday_options = holiday_options
# 是否使用考勤日历。
self.use_calendar = use_calendar
# textnote在详情页是否隐藏,true隐藏, false不隐藏
self.hidden_in_approval_detail = hidden_in_approval_detail
# 是否可编辑。
self.disabled = disabled
# 套件是否开启异步获取分条件规则,true:开启;false:不开启。
self.async_condition = async_condition
# 表单关联控件列表。
self.behavior_linkage = behavior_linkage
# 兼容出勤套件类型。
self.show_attend_options = show_attend_options
# 是否需要大写 默认是需要; 1:不需要大写, 空或者0:需要大写。
self.not_upper = not_upper
# 关联表单中的fields存储
self.fields_info = fields_info
# e签宝专用标识。
self.e_sign = e_sign
# 加班套件4.0新增 加班明细描述。
self.main_title = main_title
# 公式。
self.formula = formula
# 内部联系人choice,1表示多选,0表示单选。
self.choice = choice
def validate(self):
if self.stat_field:
for k in self.stat_field:
if k:
k.validate()
if self.obj_options:
for k in self.obj_options:
if k:
k.validate()
if self.push:
self.push.validate()
if self.behavior_linkage:
for k in self.behavior_linkage:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.id is not None:
result['id'] = self.id
if self.label is not None:
result['label'] = self.label
if self.biz_alias is not None:
result['bizAlias'] = self.biz_alias
if self.required is not None:
result['required'] = self.required
if self.placeholder is not None:
result['placeholder'] = self.placeholder
if self.options is not None:
result['options'] = self.options
if self.app_id is not None:
result['appId'] = self.app_id
if self.duration_label is not None:
result['durationLabel'] = self.duration_label
if self.push_to_calendar is not None:
result['pushToCalendar'] = self.push_to_calendar
if self.align is not None:
result['align'] = self.align
result['statField'] = []
if self.stat_field is not None:
for k in self.stat_field:
result['statField'].append(k.to_map() if k else None)
if self.hide_label is not None:
result['hideLabel'] = self.hide_label
result['objOptions'] = []
if self.obj_options is not None:
for k in self.obj_options:
result['objOptions'].append(k.to_map() if k else None)
if self.format is not None:
result['format'] = self.format
if self.push_to_attendance is not None:
result['pushToAttendance'] = self.push_to_attendance
if self.label_editable_freeze is not None:
result['labelEditableFreeze'] = self.label_editable_freeze
if self.push is not None:
result['push'] = self.push.to_map()
if self.common_biz_type is not None:
result['commonBizType'] = self.common_biz_type
if self.required_editable_freeze is not None:
result['requiredEditableFreeze'] = self.required_editable_freeze
if self.unit is not None:
result['unit'] = self.unit
if self.extract is not None:
result['extract'] = self.extract
if self.link is not None:
result['link'] = self.link
if self.pay_enable is not None:
result['payEnable'] = self.pay_enable
if self.hidden is not None:
result['hidden'] = self.hidden
if self.biz_type is not None:
result['bizType'] = self.biz_type
if self.staff_status_enabled is not None:
result['staffStatusEnabled'] = self.staff_status_enabled
if self.action_name is not None:
result['actionName'] = self.action_name
if self.attend_type_label is not None:
result['attendTypeLabel'] = self.attend_type_label
if self.child_field_visible is not None:
result['childFieldVisible'] = self.child_field_visible
if self.not_print is not None:
result['notPrint'] = self.not_print
if self.vertical_print is not None:
result['verticalPrint'] = self.vertical_print
if self.duration is not None:
result['duration'] = self.duration
if self.holiday_options is not None:
result['holidayOptions'] = self.holiday_options
if self.use_calendar is not None:
result['useCalendar'] = self.use_calendar
if self.hidden_in_approval_detail is not None:
result['hiddenInApprovalDetail'] = self.hidden_in_approval_detail
if self.disabled is not None:
result['disabled'] = self.disabled
if self.async_condition is not None:
result['asyncCondition'] = self.async_condition
result['behaviorLinkage'] = []
if self.behavior_linkage is not None:
for k in self.behavior_linkage:
result['behaviorLinkage'].append(k.to_map() if k else None)
if self.show_attend_options is not None:
result['showAttendOptions'] = self.show_attend_options
if self.not_upper is not None:
result['notUpper'] = self.not_upper
if self.fields_info is not None:
result['fieldsInfo'] = self.fields_info
if self.e_sign is not None:
result['eSign'] = self.e_sign
if self.main_title is not None:
result['mainTitle'] = self.main_title
if self.formula is not None:
result['formula'] = self.formula
if self.choice is not None:
result['choice'] = self.choice
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('id') is not None:
self.id = m.get('id')
if m.get('label') is not None:
self.label = m.get('label')
if m.get('bizAlias') is not None:
self.biz_alias = m.get('bizAlias')
if m.get('required') is not None:
self.required = m.get('required')
if m.get('placeholder') is not None:
self.placeholder = m.get('placeholder')
if m.get('options') is not None:
self.options = m.get('options')
if m.get('appId') is not None:
self.app_id = m.get('appId')
if m.get('durationLabel') is not None:
self.duration_label = m.get('durationLabel')
if m.get('pushToCalendar') is not None:
self.push_to_calendar = m.get('pushToCalendar')
if m.get('align') is not None:
self.align = m.get('align')
self.stat_field = []
if m.get('statField') is not None:
for k in m.get('statField'):
temp_model = QuerySchemaByProcessCodeResponseBodyResultSchemaContentItemsPropsStatField()
self.stat_field.append(temp_model.from_map(k))
if m.get('hideLabel') is not None:
self.hide_label = m.get('hideLabel')
self.obj_options = []
if m.get('objOptions') is not None:
for k in m.get('objOptions'):
temp_model = QuerySchemaByProcessCodeResponseBodyResultSchemaContentItemsPropsObjOptions()
self.obj_options.append(temp_model.from_map(k))
if m.get('format') is not None:
self.format = m.get('format')
if m.get('pushToAttendance') is not None:
self.push_to_attendance = m.get('pushToAttendance')
if m.get('labelEditableFreeze') is not None:
self.label_editable_freeze = m.get('labelEditableFreeze')
if m.get('push') is not None:
temp_model = QuerySchemaByProcessCodeResponseBodyResultSchemaContentItemsPropsPush()
self.push = temp_model.from_map(m['push'])
if m.get('commonBizType') is not None:
self.common_biz_type = m.get('commonBizType')
if m.get('requiredEditableFreeze') is not None:
self.required_editable_freeze = m.get('requiredEditableFreeze')
if m.get('unit') is not None:
self.unit = m.get('unit')
if m.get('extract') is not None:
self.extract = m.get('extract')
if m.get('link') is not None:
self.link = m.get('link')
if m.get('payEnable') is not None:
self.pay_enable = m.get('payEnable')
if m.get('hidden') is not None:
self.hidden = m.get('hidden')
if m.get('bizType') is not None:
self.biz_type = m.get('bizType')
if m.get('staffStatusEnabled') is not None:
self.staff_status_enabled = m.get('staffStatusEnabled')
if m.get('actionName') is not None:
self.action_name = m.get('actionName')
if m.get('attendTypeLabel') is not None:
self.attend_type_label = m.get('attendTypeLabel')
if m.get('childFieldVisible') is not None:
self.child_field_visible = m.get('childFieldVisible')
if m.get('notPrint') is not None:
self.not_print = m.get('notPrint')
if m.get('verticalPrint') is not None:
self.vertical_print = m.get('verticalPrint')
if m.get('duration') is not None:
self.duration = m.get('duration')
if m.get('holidayOptions') is not None:
self.holiday_options = m.get('holidayOptions')
if m.get('useCalendar') is not None:
self.use_calendar = m.get('useCalendar')
if m.get('hiddenInApprovalDetail') is not None:
self.hidden_in_approval_detail = m.get('hiddenInApprovalDetail')
if m.get('disabled') is not None:
self.disabled = m.get('disabled')
if m.get('asyncCondition') is not None:
self.async_condition = m.get('asyncCondition')
self.behavior_linkage = []
if m.get('behaviorLinkage') is not None:
for k in m.get('behaviorLinkage'):
temp_model = QuerySchemaByProcessCodeResponseBodyResultSchemaContentItemsPropsBehaviorLinkage()
self.behavior_linkage.append(temp_model.from_map(k))
if m.get('showAttendOptions') is not None:
self.show_attend_options = m.get('showAttendOptions')
if m.get('notUpper') is not None:
self.not_upper = m.get('notUpper')
if m.get('fieldsInfo') is not None:
self.fields_info = m.get('fieldsInfo')
if m.get('eSign') is not None:
self.e_sign = m.get('eSign')
if m.get('mainTitle') is not None:
self.main_title = m.get('mainTitle')
if m.get('formula') is not None:
self.formula = m.get('formula')
if m.get('choice') is not None:
self.choice = m.get('choice')
return self
class QuerySchemaByProcessCodeResponseBodyResultSchemaContentItems(TeaModel):
def __init__(
self,
component_name: str = None,
props: QuerySchemaByProcessCodeResponseBodyResultSchemaContentItemsProps = None,
):
# 控件类型,取值:
self.component_name = component_name
# 控件属性。
self.props = props
def validate(self):
if self.props:
self.props.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.component_name is not None:
result['componentName'] = self.component_name
if self.props is not None:
result['props'] = self.props.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('componentName') is not None:
self.component_name = m.get('componentName')
if m.get('props') is not None:
temp_model = QuerySchemaByProcessCodeResponseBodyResultSchemaContentItemsProps()
self.props = temp_model.from_map(m['props'])
return self
class QuerySchemaByProcessCodeResponseBodyResultSchemaContent(TeaModel):
def __init__(
self,
title: str = None,
icon: str = None,
items: List[QuerySchemaByProcessCodeResponseBodyResultSchemaContentItems] = None,
):
# 表单名称。
self.title = title
# 图标
self.icon = icon
# 控件列表。
self.items = items
def validate(self):
if self.items:
for k in self.items:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.title is not None:
result['title'] = self.title
if self.icon is not None:
result['icon'] = self.icon
result['items'] = []
if self.items is not None:
for k in self.items:
result['items'].append(k.to_map() if k else None)
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('title') is not None:
self.title = m.get('title')
if m.get('icon') is not None:
self.icon = m.get('icon')
self.items = []
if m.get('items') is not None:
for k in m.get('items'):
temp_model = QuerySchemaByProcessCodeResponseBodyResultSchemaContentItems()
self.items.append(temp_model.from_map(k))
return self
class QuerySchemaByProcessCodeResponseBodyResult(TeaModel):
def __init__(
self,
creator_user_id: str = None,
creator_uid: int = None,
app_uuid: str = None,
form_code: str = None,
form_uuid: str = None,
name: str = None,
memo: str = None,
owner_id: str = None,
owner_id_type: str = None,
schema_content: QuerySchemaByProcessCodeResponseBodyResultSchemaContent = None,
icon: str = None,
app_type: int = None,
biz_type: str = None,
engine_type: int = None,
status: str = None,
list_order: int = None,
custom_setting: str = None,
proc_type: str = None,
visible_range: str = None,
gmt_create: int = None,
gmt_modified: int = None,
):
# 创建人 userId。
self.creator_user_id = creator_user_id
# 创建人 uid。
self.creator_uid = creator_uid
# 表单应用 uuid 或者 corpId。
self.app_uuid = app_uuid
# 表单的唯一码。
self.form_code = form_code
# 表单 uuid。
self.form_uuid = form_uuid
# 表单名称。
self.name = name
# 说明文案。
self.memo = memo
# 数据归属者的 id。
self.owner_id = owner_id
# 数据归属者的 id 类型。企业(orgId), 群(cid), 人(uid)。
self.owner_id_type = owner_id_type
# 表单 schema 详情。
self.schema_content = schema_content
# 图标。
self.icon = icon
# 表单类型。
self.app_type = app_type
# 代表表单业务含义的类型。
self.biz_type = biz_type
# 引擎类型,表单:0,页面:1
self.engine_type = engine_type
# 状态, PUBLISHED(启用), INVALID(停用), SAVED(草稿)
self.status = status
# 排序 id。
self.list_order = list_order
# 业务自定义设置数据。
self.custom_setting = custom_setting
# 目标类型: inner, outer, customer。
self.proc_type = proc_type
# 可见范围类型。
self.visible_range = visible_range
# 创建时间的时间戳。
self.gmt_create = gmt_create
# 修改时间的时间戳。
self.gmt_modified = gmt_modified
def validate(self):
if self.schema_content:
self.schema_content.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.creator_user_id is not None:
result['creatorUserId'] = self.creator_user_id
if self.creator_uid is not None:
result['creatorUid'] = self.creator_uid
if self.app_uuid is not None:
result['appUuid'] = self.app_uuid
if self.form_code is not None:
result['formCode'] = self.form_code
if self.form_uuid is not None:
result['formUuid'] = self.form_uuid
if self.name is not None:
result['name'] = self.name
if self.memo is not None:
result['memo'] = self.memo
if self.owner_id is not None:
result['ownerId'] = self.owner_id
if self.owner_id_type is not None:
result['ownerIdType'] = self.owner_id_type
if self.schema_content is not None:
result['schemaContent'] = self.schema_content.to_map()
if self.icon is not None:
result['icon'] = self.icon
if self.app_type is not None:
result['appType'] = self.app_type
if self.biz_type is not None:
result['bizType'] = self.biz_type
if self.engine_type is not None:
result['engineType'] = self.engine_type
if self.status is not None:
result['status'] = self.status
if self.list_order is not None:
result['listOrder'] = self.list_order
if self.custom_setting is not None:
result['customSetting'] = self.custom_setting
if self.proc_type is not None:
result['procType'] = self.proc_type
if self.visible_range is not None:
result['visibleRange'] = self.visible_range
if self.gmt_create is not None:
result['gmtCreate'] = self.gmt_create
if self.gmt_modified is not None:
result['gmtModified'] = self.gmt_modified
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('creatorUserId') is not None:
self.creator_user_id = m.get('creatorUserId')
if m.get('creatorUid') is not None:
self.creator_uid = m.get('creatorUid')
if m.get('appUuid') is not None:
self.app_uuid = m.get('appUuid')
if m.get('formCode') is not None:
self.form_code = m.get('formCode')
if m.get('formUuid') is not None:
self.form_uuid = m.get('formUuid')
if m.get('name') is not None:
self.name = m.get('name')
if m.get('memo') is not None:
self.memo = m.get('memo')
if m.get('ownerId') is not None:
self.owner_id = m.get('ownerId')
if m.get('ownerIdType') is not None:
self.owner_id_type = m.get('ownerIdType')
if m.get('schemaContent') is not None:
temp_model = QuerySchemaByProcessCodeResponseBodyResultSchemaContent()
self.schema_content = temp_model.from_map(m['schemaContent'])
if m.get('icon') is not None:
self.icon = m.get('icon')
if m.get('appType') is not None:
self.app_type = m.get('appType')
if m.get('bizType') is not None:
self.biz_type = m.get('bizType')
if m.get('engineType') is not None:
self.engine_type = m.get('engineType')
if m.get('status') is not None:
self.status = m.get('status')
if m.get('listOrder') is not None:
self.list_order = m.get('listOrder')
if m.get('customSetting') is not None:
self.custom_setting = m.get('customSetting')
if m.get('procType') is not None:
self.proc_type = m.get('procType')
if m.get('visibleRange') is not None:
self.visible_range = m.get('visibleRange')
if m.get('gmtCreate') is not None:
self.gmt_create = m.get('gmtCreate')
if m.get('gmtModified') is not None:
self.gmt_modified = m.get('gmtModified')
return self
class QuerySchemaByProcessCodeResponseBody(TeaModel):
def __init__(
self,
result: QuerySchemaByProcessCodeResponseBodyResult = None,
):
# 返回结果详情。
self.result = result
def validate(self):
if self.result:
self.result.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.result is not None:
result['result'] = self.result.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('result') is not None:
temp_model = QuerySchemaByProcessCodeResponseBodyResult()
self.result = temp_model.from_map(m['result'])
return self
class QuerySchemaByProcessCodeResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
body: QuerySchemaByProcessCodeResponseBody = None,
):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = QuerySchemaByProcessCodeResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class StartProcessInstanceHeaders(TeaModel):
def __init__(
self,
common_headers: Dict[str, str] = None,
x_acs_dingtalk_access_token: str = None,
):
self.common_headers = common_headers
self.x_acs_dingtalk_access_token = x_acs_dingtalk_access_token
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.common_headers is not None:
result['commonHeaders'] = self.common_headers
if self.x_acs_dingtalk_access_token is not None:
result['x-acs-dingtalk-access-token'] = self.x_acs_dingtalk_access_token
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('commonHeaders') is not None:
self.common_headers = m.get('commonHeaders')
if m.get('x-acs-dingtalk-access-token') is not None:
self.x_acs_dingtalk_access_token = m.get('x-acs-dingtalk-access-token')
return self
class StartProcessInstanceRequestApprovers(TeaModel):
def __init__(
self,
action_type: str = None,
user_ids: List[str] = None,
):
# 审批类型
self.action_type = action_type
# 审批人列表
self.user_ids = user_ids
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.action_type is not None:
result['actionType'] = self.action_type
if self.user_ids is not None:
result['userIds'] = self.user_ids
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('actionType') is not None:
self.action_type = m.get('actionType')
if m.get('userIds') is not None:
self.user_ids = m.get('userIds')
return self
class StartProcessInstanceRequestTargetSelectActioners(TeaModel):
def __init__(
self,
actioner_key: str = None,
actioner_user_ids: List[str] = None,
):
# 自选节点的规则key
self.actioner_key = actioner_key
# 操作人userId列表
self.actioner_user_ids = actioner_user_ids
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.actioner_key is not None:
result['actionerKey'] = self.actioner_key
if self.actioner_user_ids is not None:
result['actionerUserIds'] = self.actioner_user_ids
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('actionerKey') is not None:
self.actioner_key = m.get('actionerKey')
if m.get('actionerUserIds') is not None:
self.actioner_user_ids = m.get('actionerUserIds')
return self
class StartProcessInstanceRequestFormComponentValuesDetailsDetails(TeaModel):
def __init__(
self,
id: str = None,
biz_alias: str = None,
name: str = None,
value: str = None,
ext_value: str = None,
component_type: str = None,
):
# 控件id
self.id = id
# 控件别名
self.biz_alias = biz_alias
# 控件名称
self.name = name
# 控件值
self.value = value
# 控件扩展值
self.ext_value = ext_value
# 控件类型
self.component_type = component_type
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.id is not None:
result['id'] = self.id
if self.biz_alias is not None:
result['bizAlias'] = self.biz_alias
if self.name is not None:
result['name'] = self.name
if self.value is not None:
result['value'] = self.value
if self.ext_value is not None:
result['extValue'] = self.ext_value
if self.component_type is not None:
result['componentType'] = self.component_type
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('id') is not None:
self.id = m.get('id')
if m.get('bizAlias') is not None:
self.biz_alias = m.get('bizAlias')
if m.get('name') is not None:
self.name = m.get('name')
if m.get('value') is not None:
self.value = m.get('value')
if m.get('extValue') is not None:
self.ext_value = m.get('extValue')
if m.get('componentType') is not None:
self.component_type = m.get('componentType')
return self
class StartProcessInstanceRequestFormComponentValuesDetails(TeaModel):
def __init__(
self,
id: str = None,
biz_alias: str = None,
name: str = None,
value: str = None,
ext_value: str = None,
details: List[StartProcessInstanceRequestFormComponentValuesDetailsDetails] = None,
):
# 控件id
self.id = id
# 控件别名
self.biz_alias = biz_alias
# 控件名称
self.name = name
# 控件值
self.value = value
# 控件扩展值
self.ext_value = ext_value
self.details = details
def validate(self):
if self.details:
for k in self.details:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.id is not None:
result['id'] = self.id
if self.biz_alias is not None:
result['bizAlias'] = self.biz_alias
if self.name is not None:
result['name'] = self.name
if self.value is not None:
result['value'] = self.value
if self.ext_value is not None:
result['extValue'] = self.ext_value
result['details'] = []
if self.details is not None:
for k in self.details:
result['details'].append(k.to_map() if k else None)
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('id') is not None:
self.id = m.get('id')
if m.get('bizAlias') is not None:
self.biz_alias = m.get('bizAlias')
if m.get('name') is not None:
self.name = m.get('name')
if m.get('value') is not None:
self.value = m.get('value')
if m.get('extValue') is not None:
self.ext_value = m.get('extValue')
self.details = []
if m.get('details') is not None:
for k in m.get('details'):
temp_model = StartProcessInstanceRequestFormComponentValuesDetailsDetails()
self.details.append(temp_model.from_map(k))
return self
class StartProcessInstanceRequestFormComponentValues(TeaModel):
def __init__(
self,
id: str = None,
biz_alias: str = None,
name: str = None,
value: str = None,
ext_value: str = None,
component_type: str = None,
details: List[StartProcessInstanceRequestFormComponentValuesDetails] = None,
):
# 控件id
self.id = id
# 控件别名
self.biz_alias = biz_alias
# 控件名称
self.name = name
# 控件值
self.value = value
# 控件扩展值
self.ext_value = ext_value
# 控件类型
self.component_type = component_type
self.details = details
def validate(self):
if self.details:
for k in self.details:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.id is not None:
result['id'] = self.id
if self.biz_alias is not None:
result['bizAlias'] = self.biz_alias
if self.name is not None:
result['name'] = self.name
if self.value is not None:
result['value'] = self.value
if self.ext_value is not None:
result['extValue'] = self.ext_value
if self.component_type is not None:
result['componentType'] = self.component_type
result['details'] = []
if self.details is not None:
for k in self.details:
result['details'].append(k.to_map() if k else None)
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('id') is not None:
self.id = m.get('id')
if m.get('bizAlias') is not None:
self.biz_alias = m.get('bizAlias')
if m.get('name') is not None:
self.name = m.get('name')
if m.get('value') is not None:
self.value = m.get('value')
if m.get('extValue') is not None:
self.ext_value = m.get('extValue')
if m.get('componentType') is not None:
self.component_type = m.get('componentType')
self.details = []
if m.get('details') is not None:
for k in m.get('details'):
temp_model = StartProcessInstanceRequestFormComponentValuesDetails()
self.details.append(temp_model.from_map(k))
return self
class StartProcessInstanceRequest(TeaModel):
def __init__(
self,
originator_user_id: str = None,
process_code: str = None,
dept_id: int = None,
microapp_agent_id: int = None,
approvers: List[StartProcessInstanceRequestApprovers] = None,
cc_list: List[str] = None,
cc_position: str = None,
target_select_actioners: List[StartProcessInstanceRequestTargetSelectActioners] = None,
form_component_values: List[StartProcessInstanceRequestFormComponentValues] = None,
request_id: str = None,
ding_corp_id: str = None,
ding_org_id: int = None,
ding_isv_org_id: int = None,
ding_suite_key: str = None,
ding_token_grant_type: int = None,
):
# 审批发起人的userId
self.originator_user_id = originator_user_id
# 审批流的唯一码
self.process_code = process_code
# 部门ID
self.dept_id = dept_id
# 企业微应用标识
self.microapp_agent_id = microapp_agent_id
# 不使用审批流模板时,直接指定审批人列表
self.approvers = approvers
# 抄送人userId列表
self.cc_list = cc_list
# 抄送时间
self.cc_position = cc_position
# 使用审批流模板时,模板上的自选操作人列表
self.target_select_actioners = target_select_actioners
# 表单数据内容,控件列表
self.form_component_values = form_component_values
self.request_id = request_id
self.ding_corp_id = ding_corp_id
self.ding_org_id = ding_org_id
self.ding_isv_org_id = ding_isv_org_id
self.ding_suite_key = ding_suite_key
self.ding_token_grant_type = ding_token_grant_type
def validate(self):
if self.approvers:
for k in self.approvers:
if k:
k.validate()
if self.target_select_actioners:
for k in self.target_select_actioners:
if k:
k.validate()
if self.form_component_values:
for k in self.form_component_values:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.originator_user_id is not None:
result['originatorUserId'] = self.originator_user_id
if self.process_code is not None:
result['processCode'] = self.process_code
if self.dept_id is not None:
result['deptId'] = self.dept_id
if self.microapp_agent_id is not None:
result['microappAgentId'] = self.microapp_agent_id
result['approvers'] = []
if self.approvers is not None:
for k in self.approvers:
result['approvers'].append(k.to_map() if k else None)
if self.cc_list is not None:
result['ccList'] = self.cc_list
if self.cc_position is not None:
result['ccPosition'] = self.cc_position
result['targetSelectActioners'] = []
if self.target_select_actioners is not None:
for k in self.target_select_actioners:
result['targetSelectActioners'].append(k.to_map() if k else None)
result['formComponentValues'] = []
if self.form_component_values is not None:
for k in self.form_component_values:
result['formComponentValues'].append(k.to_map() if k else None)
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.ding_corp_id is not None:
result['dingCorpId'] = self.ding_corp_id
if self.ding_org_id is not None:
result['dingOrgId'] = self.ding_org_id
if self.ding_isv_org_id is not None:
result['dingIsvOrgId'] = self.ding_isv_org_id
if self.ding_suite_key is not None:
result['dingSuiteKey'] = self.ding_suite_key
if self.ding_token_grant_type is not None:
result['dingTokenGrantType'] = self.ding_token_grant_type
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('originatorUserId') is not None:
self.originator_user_id = m.get('originatorUserId')
if m.get('processCode') is not None:
self.process_code = m.get('processCode')
if m.get('deptId') is not None:
self.dept_id = m.get('deptId')
if m.get('microappAgentId') is not None:
self.microapp_agent_id = m.get('microappAgentId')
self.approvers = []
if m.get('approvers') is not None:
for k in m.get('approvers'):
temp_model = StartProcessInstanceRequestApprovers()
self.approvers.append(temp_model.from_map(k))
if m.get('ccList') is not None:
self.cc_list = m.get('ccList')
if m.get('ccPosition') is not None:
self.cc_position = m.get('ccPosition')
self.target_select_actioners = []
if m.get('targetSelectActioners') is not None:
for k in m.get('targetSelectActioners'):
temp_model = StartProcessInstanceRequestTargetSelectActioners()
self.target_select_actioners.append(temp_model.from_map(k))
self.form_component_values = []
if m.get('formComponentValues') is not None:
for k in m.get('formComponentValues'):
temp_model = StartProcessInstanceRequestFormComponentValues()
self.form_component_values.append(temp_model.from_map(k))
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('dingCorpId') is not None:
self.ding_corp_id = m.get('dingCorpId')
if m.get('dingOrgId') is not None:
self.ding_org_id = m.get('dingOrgId')
if m.get('dingIsvOrgId') is not None:
self.ding_isv_org_id = m.get('dingIsvOrgId')
if m.get('dingSuiteKey') is not None:
self.ding_suite_key = m.get('dingSuiteKey')
if m.get('dingTokenGrantType') is not None:
self.ding_token_grant_type = m.get('dingTokenGrantType')
return self
class StartProcessInstanceResponseBody(TeaModel):
def __init__(
self,
instance_id: str = None,
):
# 审批实例id
self.instance_id = instance_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['instanceId'] = self.instance_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('instanceId') is not None:
self.instance_id = m.get('instanceId')
return self
class StartProcessInstanceResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
body: StartProcessInstanceResponseBody = None,
):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = StartProcessInstanceResponseBody()
self.body = temp_model.from_map(m['body'])
return self
| StarcoderdataPython |
3382722 | import os
import re
import uuid
import json
import logging
import operator
import tornado.web
import tornado.ioloop
import tempfile
from tornado.concurrent import Future
from tornado import gen
import sandstone.lib.decorators
from sandstone import settings
from sandstone.lib.handlers.base import BaseHandler
from sandstone.lib.filesystem.mixins import FSMixin
from sandstone.lib.filesystem.filewatcher import Filewatcher
@tornado.web.stream_request_body
class SimpleUploadHandler(BaseHandler, FSMixin):
@tornado.web.authenticated
def post(self):
fp = self.request.headers['Uploaddir']
dest_path = os.path.join(fp,self.filename)
self.fd.close()
# shutil.move(self.fd.name,dest_path)
self.fs.move(self.fd.name, dest_path)
os.chmod(dest_path, 0644)
@tornado.web.authenticated
def prepare(self):
self.tmp_cache = ''
self.stream_started = False
self.request.connection.set_max_body_size(2*1024**3)
fd_info = tempfile.mkstemp()
self.fd = open(fd_info[1],'w')
def data_received(self, data):
self.tmp_cache += data
pdata = self._process(data)
self.fd.write(pdata)
def _process(self, data):
trimmed = data.splitlines()
tmp = data.splitlines(True)
if not self.stream_started:
self.boundary = trimmed[0].strip()
tmp = tmp[1:]
trimmed = trimmed[1:]
self.stream_started = True
try:
first_elem = trimmed[:5].index("")
metadata = trimmed[:first_elem]
self.filename = metadata[0].split(';')[-1].split('=')[-1][1:-1]
tmp = tmp[first_elem + 1:]
trimmed = trimmed[first_elem + 1:]
except ValueError:
pass
try:
last_elem = trimmed.index(self.boundary + "--")
self.stream_started = False
return "".join(tmp[:last_elem - 1])
except ValueError:
return "".join(tmp)
| StarcoderdataPython |
3384405 | #Making Database Migrations
import psycopg2
import psycopg2.extras
import os
from .initial1 import migrations
from config import BaseConfig
from ..utils import db_config
class Database:
def __init__(self):
self.config = db_config()
self.database = self.config.get('database')
def migrate(self):
con = psycopg2.connect(**self.config)
con.autocommit = True
cur = con.cursor(cursor_factory=psycopg2.extras.DictCursor)
cur.execute("select * from pg_database where datname = %(database_name)s", {'database_name': self.database})
databases = cur.fetchall()
if len(databases) > 0:
print(" * Database {} exists".format(self.database))
for command in migrations:
try:
cur.execute(command)
con.commit()
except Exception as e:
print(e)
else:
print(" * Database {} does not exists".format(self.database))
con.close()
#migrate the test database
def migrate_test_db(self):
""" Create test database and schema """
os.environ['APP_SETTINGS'] = 'TESTING'
con = psycopg2.connect(**self.config)
con.autocommit = True
cur = con.cursor(cursor_factory=psycopg2.extras.DictCursor)
print('\n * Creating test db\n')
try:
cur.execute('CREATE DATABASE {} OWNER {};'.format(BaseConfig.TEST_DB, self.config.get('user')))
except:
pass
con.close()
self.config['database'] = BaseConfig.TEST_DB
con = psycopg2.connect(**self.config)
cur = con.cursor(cursor_factory=psycopg2.extras.DictCursor)
for command in migrations:
try:
cur.execute(command)
con.commit()
pass
except Exception as e:
print(e)
con.close()
def drop_test_database(self):
print('\n * Dropping test database \n')
os.environ['APP_SETTINGS'] = 'PRODUCTION'
self.config = db_config()
con = psycopg2.connect(**self.config)
con.autocommit = True
cur = con.cursor(cursor_factory=psycopg2.extras.DictCursor)
cur.execute('DROP DATABASE IF EXISTS {};'.format(BaseConfig.TEST_DB, self.config.get('user')))
con.close()
db = Database() | StarcoderdataPython |
3321987 | <filename>alarms/src/twiml_messages.py
# Copyright 2010-2019 <NAME>, <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
v1_incoming_voice = """<?xml version="1.0" encoding="UTF-8"?>
<Response>
<Gather timeout="120" numDigits="1" action="v1_incoming_voice_response1" method="POST">
<Say voice="woman">
Hello, this is the Isadore Alarm Service. Please select an option. Press 1 to call Employee One. Press
2 to call Employee Two. Press 3 to call Employee three.
</Say>
</Gather>
</Response>"""
v1_sms_help = """<?xml version="1.0" encoding="UTF-8"?>
<Response>
<Sms>Isadore Alarm Commands:
STOP - to unsubscribe.
</Sms>
</Response>"""
# Version 2
incoming_voice = """<?xml version="1.0" encoding="UTF-8"?>
<Response>
<Gather timeout="120" numDigits="1" action="incoming_voice_response1" method="POST">
<Say voice="woman">
Hello, this is the Isadore Alarm Service. Please select an option. Press 8 to unsubscribe from any
future alarms. Press 1 to call Employee One. Press 2 to call Employee two. Press 7 to pause
alarms for a number of hours. Press 9 to resume alarms.
</Say>
</Gather>
</Response>"""
dial_xml = """<?xml version="1.0" encoding="UTF-8"?>
<Response>
<Say voice="woman">
Connecting you to %s, please wait...
</Say>
<Dial>%s</Dial>
</Response>"""
unsubscribed = """<?xml version="1.0" encoding="UTF-8"?>
<Response>
<Say voice="woman">
You have been unsubscribed from future alarms. Thank you.
</Say>
<Hangup/>
</Response>"""
resume = """<?xml version="1.0" encoding="UTF-8"?>
<Response>
<Say voice="woman">
Alarms have been resumed. Thank you.
</Say>
<Hangup/>
</Response>"""
resume_but_blacklisted = """<?xml version="1.0" encoding="UTF-8"?>
<Response>
<Say voice="woman">
Our records indicate you are on a no contact list. In order to resume fully you must text the word, "START", to
this number. Thank you and Good-bye.
</Say>
<Hangup/>
</Response>"""
pause_prompt = """<?xml version="1.0" encoding="UTF-8"?>
<Response>
<Gather timeout="30" numDigits="3" action="incoming_voice_response_pause" method="POST">
<Say voice="woman">
Please enter the number of hours you would like to pause alarms for followed by the pound key.
</Say>
</Gather>
</Response>"""
invalid_choice = """<?xml version="1.0" encoding="UTF-8"?>
<Response>
<Say voice="woman">
You selected an invalid option.
</Say>
<Redirect method="POST">%s</Redirect>
</Response>"""
paused = """<?xml version="1.0" encoding="UTF-8"?>
<Response>
<Say voice="woman">
All alarms have been paused for %s hours. Thank you.
</Say>
</Response>
"""
outgoing = """<?xml version="1.0" encoding="UTF-8"?>
<Response>
<Gather timeout="45" numDigits="1" action="../outgoing_response" method="POST">
<Say voice="woman">
Hello, this is the Isadore Alarm Service. The following alarm has been triggered: %s Press 8 to
unsubscribe from any future alarms. Press 7 to pause alarms for a number of hours.
</Say>
</Gather>
</Response>"""
sms_help = """<?xml version="1.0" encoding="UTF-8"?>
<Response>
<Sms>Isadore Alarm Commands:
STOP - to unsubscribe.
PAUSE XXX - pause alarms for XXX hours.
START - resume all alarms.
For more better control log in to Isadore.</Sms>
</Response>"""
sms_paused = """<?xml version="1.0" encoding="UTF-8"?>
<Response>
<Sms>You will receive no further alarms for %d hours. Reply START to resume all alarms before then.</Sms>
</Response>"""
sms_paused_invalid = """<?xml version="1.0" encoding="UTF-8"?>
<Response>
<Sms>Invalid pause command.
PAUSE XXX - pause alarms for XXX hours.
Example:
PAUSE 12</Sms>
</Response>"""
sms_resumed = """<?xml version="1.0" encoding="UTF-8"?>
<Response>
<Sms>Alarms have been resumed.</Sms>
</Response>"""
sms_unsubscribe = """<?xml version="1.0" encoding="UTF-8"?>
<Response>
<Sms>You have been unsubscribed from all alarms.</Sms>
</Response>"""
sms_verify = """Isadore Alarm verification code: %d"""
voice_verify = """<?xml version="1.0" encoding="UTF-8"?>
<Response>
<Gather timeout="45" numDigits="1" action="./voice3" method="POST">
<Say voice="woman">
This is the Isadore Alarm system calling to verify your phone number. Please press 1 to enable getting
alarms at this number. Press 9 to never receive these calls again. Otherwise hang up.
</Say>
</Gather>
</Response>"""
voice_verify_verified = """<?xml version="1.0" encoding="UTF-8"?>
<Response>
<Say voice="woman">
Your number has been verified and can receive Isadore Alarms. Thank you, goodbye.
</Say>
</Response>"""
voice_verify_stop = """<?xml version="1.0" encoding="UTF-8"?>
<Response>
<Say voice="woman">
We will not contact you again. Thank you, goodbye.
</Say>
</Response>"""
email_verify_txt = """Hello, please confirm your email going to the link below:
https://alarms.example.com/static/ev.html?%s
Or go to:
https://alarms.example.com/verify_email
Enter your email address and copy and paste the confirmation code:
%s
"""
email_verify_html = """<html><body>
Hello, please confirm your email going to the link below:<br/>
<br/>
<a href="https://alarms.example.com/static/ev.html?%s">https://alarms.example.com/static/ev.html?%s</a><br/>
<br/>
Or go to:<br/>
<br/>
<a href="https://alarms.example.com/static/ev.html">https://alarms.example.com/static/ev.html</a><br/>
Enter your email address and copy and paste the confirmation code:<br/>
<br/>
%s<br/>
<br/>
</body></html>"""
| StarcoderdataPython |
3281663 | map=[]
with open("Day3\Aoc3.txt", "r") as data:
map = data.readlines()
map = [line.strip() for line in map]
#print(map)
slopes = [(1,1),(3,1),(5,1),(7,1),(1,2)]
totalTrees = []
for slopes in slopes:
tree = '#'
treeCount = 0
x = 0
y = 0
while y+1 < len(map):
x+=slopes[0]
y+=slopes[1]
position = map[y][x % len(map[y])]
#print(position)
if position == '#':
treeCount+=1
totalTrees.append(treeCount)
#print(totalTrees)
i=1
for x in totalTrees:
i = i * x
print(i) | StarcoderdataPython |
197287 | <filename>app/main/forms.py
from flask_wtf import FlaskForm
from wtforms import StringField,TextAreaField,SubmitField,SelectField
from wtforms.validators import Required
class PitchForm(FlaskForm):
title = StringField('Pitch title', validators=[Required()])
text = TextAreaField('Text', validators=[Required()])
category = SelectField('Type', choices=[('interview', 'Interview pitch'),('product','Product pitch'),('promotion','Promotion pitch')],validators=[Required()])
submit = SubmitField('Submit')
class UpdateProfile(FlaskForm):
bio = TextAreaField('Bio.', validators=[Required()])
submit = SubmitField('Submit')
class CommentForm(FlaskForm):
text = TextAreaField('Leave a comment:', validators=[Required()])
submit = SubmitField('Submit')
| StarcoderdataPython |
3213262 | import numpy as np
import base64
def MeshViewer(R, L, F, U, f1, f2):
source = """
<!--<div id="info"><a href="http://threejs.org" target="_blank" rel="noopener">three.js</a> - dashed lines example</div>-->
<div id="container"></div>
<script src="https://threejs.org/build/three.js"></script>
<script src="https://threejs.org/examples/js/WebGL.js"></script>
<script src="https://threejs.org/examples/js/libs/stats.min.js"></script>
<script src="https://threejs.org/examples/js/libs/dat.gui.min.js"></script>
<script src="https://threejs.org/examples/js/controls/OrbitControls.js"></script>
<style>
.dg li {
background: #f7f7f7 !important;
}
.dg {
color: #111;
text-shadow: none;
}
.dg.main .close-button {
background: none;
}
.dg.main .close-button:hover {
background: none;
}
.dg .cr.boolean {
border-left: 1px solid #cfcfcf;
}
.dg .cr.number {
border-left: 1px solid #cfcfcf;
}
.dg .c input[type=text] {
background: #fffefe00;
outline: none;
color: #111 !important;
}
.dg .c input[type=text]:hover {
background: #fffefe00;
outline: none;
color: #111 !important;
}
.dg .c .slider {
background: #d6d6d6;
cursor: ew-resize;
border-radius: 5px;
}
.dg .c .slider:hover {
background: #d6d6d6;
}
.dg .c .slider-fg {
background: #747575;
border-radius: 5px;
}
.dg .c .slider:hover .slider-fg {
background: #42a5f5;
}
.dg li:not(.folder) {
border: 1px solid #cfcfcf;
border-radius: 2px;
}
</style>
<script>
function NewArray(type, base64) {
var binary_string = window.atob(base64);
var len = binary_string.length;
var bytes = new Uint8Array( len );
for (var i = 0; i < len; i++) {
bytes[i] = binary_string.charCodeAt(i);
}
return new type(bytes.buffer);
}
//if ( WEBGL.isWebGLAvailable() === false ) {
// document.body.appendChild( WEBGL.getWebGLErrorMessage() );
//}
var renderer, scene, camera, stats, controls;
var objects = [];
var gui;
factor_mesh = %f;
factor_force = %f;
var WIDTH = window.innerWidth, HEIGHT = window.innerHeight;
init();
animate();
function init() {
camera = new THREE.PerspectiveCamera( 60, WIDTH / HEIGHT, 1, 200 );
camera.position.z = 150;
scene = new THREE.Scene();
scene.background = new THREE.Color( 0xFFFFFF);//0x111111 );
scene.fog = new THREE.Fog( 0xFFFFFF, 50, 200);
renderer = new THREE.WebGLRenderer( { antialias: true } );
renderer.setPixelRatio( window.devicePixelRatio );
renderer.setSize( WIDTH, HEIGHT );
var container = document.getElementById( 'container' );
container.appendChild( renderer.domElement );
//stats = new Stats();
//container.appendChild( stats.dom );
//
addMesh(%s, %s, %s, %s)
window.addEventListener( 'resize', onWindowResize, false );
controls = new THREE.OrbitControls( camera, renderer.domElement );
//controls.minDistance = 10;
//controls.maxDistance = 500;
initGui();
}
function addMesh(points1, lines1, F1, U1) {
points = points1;
lines = lines1;
F = F1;
U = U1;
for(var i=0; i < points.length; i++) {
points[i] *= factor_mesh;
U[i] *= factor_mesh;
}
//var h = size * 0.5;
var geometry = new THREE.BufferGeometry();
var position = [];
//console.log(points.length, tets.length);
for(var t=0; t < lines1.length/2; t++) {
var t1 = lines1[t*2+0];
var t2 = lines1[t*2+1];
for(var x=0; x < 3; x++)
position.push(points[t1*3+x]);
for(var x=0; x < 3; x++)
position.push(points[t2*3+x]);
//console.log(t);
}
console.log("ready");
geometry.addAttribute( 'position', new THREE.Float32BufferAttribute( position, 3 ) );
//var geometryCube = cube( 50 );
//var lineSegments = new THREE.LineSegments( geometry, new THREE.LineDashedMaterial( { color: 0xffaa00, dashSize: 3, gapSize: 1 } ) );
mesh_lines = new THREE.LineSegments( geometry, new THREE.LineBasicMaterial( { color: 0xffaa00, linewidth: 0.5, transparent: true, opacity: 0.5} ) );
mesh_lines.computeLineDistances();
objects.push( mesh_lines );
scene.add( mesh_lines );
var geometry = new THREE.BufferGeometry();
var position = [];
var force_tips = [];
for(var i=0; i < U.length/3; i++) {
f_abs = Math.sqrt(F[i*3+0]**2 + F[i*3+1]**2 + F[i*3+2]**2);
factor = factor_force*factor_mesh;//1/f_abs/3000 * f_abs * 100000;
for(var x=0; x < 3; x++)
position.push((points[i*3+x]));
for(var x=0; x < 3; x++) {
position.push(points[i * 3 + x] + F[i * 3 + x] * factor);
force_tips.push(points[i * 3 + x] + F[i * 3 + x] * factor);
}
}
geometry.addAttribute( 'position', new THREE.Float32BufferAttribute( position, 3 ) );
force_mat = new THREE.LineBasicMaterial( { color: 0xaa0000, linewidth: 3,} );
force_lines = new THREE.LineSegments( geometry, force_mat );
force_lines.computeLineDistances();
objects.push( force_lines );
scene.add( force_lines );
var sprite = new THREE.TextureLoader().load( 'https://threejs.org/examples/textures/sprites/disc.png' );
var geometry = new THREE.BufferGeometry();
geometry.addAttribute( 'position', new THREE.Float32BufferAttribute( points, 3 ) );
mesh_points = new THREE.Points( geometry, new THREE.PointsMaterial( { size: 8, sizeAttenuation: false, color: 0xffaa00, map: sprite, alphaTest: 0.5, transparent: true } ) );
scene.add( mesh_points );
var geometry = new THREE.BufferGeometry();
geometry.addAttribute( 'position', new THREE.Float32BufferAttribute( force_tips, 3 ) );
force_points = new THREE.Points( geometry, new THREE.PointsMaterial( { size: 10, sizeAttenuation: false, color: 0xaa0000, map: sprite, alphaTest: 0.5, transparent: true } ) );
scene.add( force_points );
// Displacements
var geometry = new THREE.BufferGeometry();
var position = [];
var displacement_tips = [];
for(var i=0; i < U.length/3; i++) {
for(var x=0; x < 3; x++)
position.push((points[i*3+x]));
for(var x=0; x < 3; x++) {
position.push(points[i * 3 + x] + U[i * 3 + x]);
displacement_tips.push(points[i * 3 + x] + U[i * 3 + x]);
}
}
geometry.addAttribute( 'position', new THREE.Float32BufferAttribute( position, 3 ) );
displacement_mat = new THREE.LineBasicMaterial( { color: 0x00aa00, linewidth: 2,} );
displacement_lines = new THREE.LineSegments( geometry, displacement_mat );
displacement_lines.computeLineDistances();
objects.push( displacement_lines );
scene.add( displacement_lines );
var geometry = new THREE.BufferGeometry();
geometry.addAttribute( 'position', new THREE.Float32BufferAttribute( displacement_tips, 3 ) );
displacement_points = new THREE.Points( geometry, new THREE.PointsMaterial( { size: 10, sizeAttenuation: false, color: 0x00aa00, map: sprite, alphaTest: 0.5, transparent: true } ) );
scene.add( displacement_points );
}
function onWindowResize() {
camera.aspect = window.innerWidth / window.innerHeight;
camera.updateProjectionMatrix();
renderer.setSize( window.innerWidth, window.innerHeight );
}
function animate() {
requestAnimationFrame( animate );
render();
renderer.render( scene, camera );
//stats.update();
}
function render() {
var time = Date.now() * 0.001;
scene.traverse( function ( object ) {
//if ( object.isLine ) {
//object.rotation.y = 0.25 * time;
//object.rotation.y = 0.25 * time;
//}
} );
renderer.render( scene, camera );
}
function initGui() {
gui = new dat.GUI();
var param = {
'mesh': true,
'forces': true,
'force scale': 1,
'displacements': true,
'view_range' : 200,
};
gui.add( param, 'mesh' ).onChange( function ( val ) {
mesh_lines.visible = val;
mesh_points.visible = val;
} );
gui.add( param, 'forces' ).onChange( function ( val ) {
force_lines.visible = val;
force_points.visible = val;
} );
gui.add( param, 'force scale', 1, 8, 0.1 ).onChange( function ( val ) {
var position = [];
var force_tips = [];
for(var i=0; i < U.length/3; i++) {
f_abs = Math.sqrt(F[i * 3 + 0] ** 2 + F[i * 3 + 1] ** 2 + F[i * 3 + 2] ** 2);
factor = factor_force * factor_mesh * val;//1/f_abs/3000 * f_abs * 100000;
for (var x = 0; x < 3; x++)
position.push((points[i * 3 + x]));
for (var x = 0; x < 3; x++) {
position.push(points[i * 3 + x] + F[i * 3 + x] * factor);
force_tips.push(points[i * 3 + x] + F[i * 3 + x] * factor);
}
}
force_lines.geometry.addAttribute( 'position', new THREE.Float32BufferAttribute( position, 3 ) );
force_points.geometry.addAttribute( 'position', new THREE.Float32BufferAttribute( force_tips, 3 ) );
} );
gui.add( param, 'displacements' ).onChange( function ( val ) {
displacement_lines.visible = val;
displacement_points.visible = val;
} );
gui.add( param, 'view_range', 10, 300, 1 ).onChange( function ( val ) {
scene.fog.far = val;
} );
}
</script>
"""
source = source.replace("'", "\"")
def wrap(array):
if array.dtype == "float32":
data_type = "Float32Array"
elif array.dtype == "float64":
data_type = "Float64Array"
elif array.dtype == "int8":
data_type = "Int8Array"
elif array.dtype == "uint8":
data_type = "Uint8Array"
elif array.dtype == "int16":
data_type = "Int16Array"
elif array.dtype == "uint16":
data_type = "Uint16Array"
elif array.dtype == "int32":
data_type = "Int32Array"
elif array.dtype == "uint32":
data_type = "Uint32Array"
elif array.dtype == "int64":
data_type = "BigInt64Array"
elif array.dtype == "uint64":
data_type = "BigUint64Array"
else:
raise TypeError(array.dtype)
return "NewArray("+data_type+", \""+repr(base64.b64encode(array))[2:-1]+"\")"
here = source % (f1, f2, wrap(R-np.mean(R, axis=0)), wrap(L), wrap(F), wrap(U))
from IPython.core.display import HTML, display
code = "<h1></h1><iframe srcdoc='{0}' scrolling=no style='border:none; width: 100%; height: 600px'></iframe>".format(here)
display(HTML(code))
| StarcoderdataPython |
168296 | <gh_stars>0
#!/usr/bin/python
"""Standalone utility functions for Mininet tests."""
import os
import socket
FAUCET_DIR = os.getenv('FAUCET_DIR', '../src/ryu_faucet/org/onfsdn/faucet')
RESERVED_FOR_TESTS_PORTS = (179, 5001, 5002, 6633, 6653, 9179)
def mininet_dpid(int_dpid):
"""Return stringified hex version, of int DPID for mininet."""
return str('%x' % int(int_dpid))
def str_int_dpid(str_dpid):
"""Return stringified int version, of int or hex DPID from YAML."""
str_dpid = str(str_dpid)
if str_dpid.startswith('0x'):
return str(int(str_dpid, 16))
else:
return str(int(str_dpid))
def find_free_port(ports_socket):
"""Retrieve a free TCP port from test server."""
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
sock.connect(ports_socket)
buf = ''
while not buf.find('\n') > -1:
buf = buf + sock.recv(1024)
return [int(x) for x in buf.strip().split()]
def serve_ports(ports_socket):
"""Implement a TCP server to dispense free TCP ports."""
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
sock.bind(ports_socket)
sock.listen(1)
ports_served = set()
while True:
connection, _ = sock.accept()
while True:
free_socket = socket.socket()
free_socket.bind(('', 0))
free_port = free_socket.getsockname()[1]
free_socket.close()
if free_port < 1024:
continue
if free_port in RESERVED_FOR_TESTS_PORTS:
continue
if free_port in ports_served:
continue
break
ports_served.add(free_port)
connection.sendall('%u %u\n' % (free_port, len(ports_served)))
connection.close()
| StarcoderdataPython |
3236074 | '''
Utilities for loading config files, etc.
'''
import os
import json
from copy import deepcopy
def config_files():
'''
Get list of currently used config files.
'''
sensu_loaded_tempfile = os.environ.get('SENSU_LOADED_TEMPFILE')
sensu_config_files = os.environ.get('SENSU_CONFIG_FILES')
if sensu_loaded_tempfile and os.path.isfile(sensu_loaded_tempfile):
with open(sensu_loaded_tempfile, 'r') as tempfile:
contents = tempfile.read()
return contents.split(':')
elif sensu_config_files:
return sensu_config_files.split(':')
else:
files = ['/etc/sensu/config.json']
filenames = [f for f in os.listdir('/etc/sensu/conf.d')
if os.path.splitext(f)[1] == '.json']
for filename in filenames:
files.append('/etc/sensu/conf.d/{}'.format(filename))
return files
def get_settings():
'''
Get all currently loaded settings.
'''
settings = {}
for config_file in config_files():
config_contents = load_config(config_file)
if config_contents is not None:
settings = deep_merge(settings, config_contents)
return settings
def load_config(filename):
'''
Read contents of config file.
'''
try:
with open(filename, 'r') as config_file:
return json.loads(config_file.read())
except IOError:
pass
def deep_merge(dict_one, dict_two):
'''
Deep merge two dicts.
'''
merged = dict_one.copy()
for key, value in dict_two.items():
# value is equivalent to dict_two[key]
if (key in dict_one and
isinstance(dict_one[key], dict) and
isinstance(value, dict)):
merged[key] = deep_merge(dict_one[key], value)
elif (key in dict_one and
isinstance(dict_one[key], list) and
isinstance(value, list)):
merged[key] = list(set(dict_one[key] + value))
else:
merged[key] = value
return merged
def map_v2_event_into_v1(event):
'''
Helper method to convert Sensu 2.x event into Sensu 1.x event.
'''
# return the event if it has already been mapped
if "v2_event_mapped_into_v1" in event:
return event
# Trigger mapping code if enity exists and client does not
if not bool(event.get('client')) and "entity" in event:
event['client'] = event['entity']
# Fill in missing client attributes
if "name" not in event['client']:
event['client']['name'] = event['entity']['id']
if "subscribers" not in event['client']:
event['client']['subscribers'] = event['entity']['subscriptions']
# Fill in renamed check attributes expected in 1.4 event
if "subscribers" not in event['check']:
event['check']['subscribers'] = event['check']['subscriptions']
if "source" not in event['check']:
event['check']['source'] = event['check']['proxy_entity_id']
# Mimic 1.4 event action based on 2.0 event state
# action used in logs and fluentd plugins handlers
action_state_mapping = {'flapping': 'flapping', 'passing': 'resolve',
'failing': 'create'}
if "state" in event['check']:
state = event['check']['state']
else:
state = "unknown::2.0_event"
if "action" not in event and state.lower() in action_state_mapping:
event['action'] = action_state_mapping[state.lower()]
else:
event['action'] = state
# Mimic 1.4 event history based on 2.0 event history
if "history" in event['check']:
# save the original history
event['check']['history_v2'] = deepcopy(event['check']['history'])
legacy_history = []
for history in event['check']['history']:
if isinstance(history['status'], int):
legacy_history.append(str(history['status']))
else:
legacy_history.append("3")
event['check']['history'] = legacy_history
# Setting flag indicating this function has already been called
event['v2_event_mapped_into_v1'] = True
# return the updated event
return event
| StarcoderdataPython |
1782737 | <gh_stars>1-10
from django.core.urlresolvers import reverse
from admin_tools.dashboard import modules, Dashboard
from admin_tools.utils import get_admin_site_name
from crate.web.dashboard.modules import StatusModule, RedisStatusModule
class CrateIndexDashboard(Dashboard):
def init_with_context(self, context):
site_name = get_admin_site_name(context)
# append a link list module for "quick links"
self.children.append(modules.LinkList(
"Quick links",
layout="inline",
draggable=False,
deletable=False,
collapsible=False,
children=[
["Return to site", "/"],
["Change password",
reverse("%s:password_change" % site_name)],
["Log out", reverse("%s:logout" % site_name)],
]
))
# append an app list module for "Administration"
self.children.append(modules.AppList(
"Administration",
models=('django.contrib.*',),
))
# append an app list module for "Applications"
self.children.append(modules.AppList(
"Applications",
exclude=[
"django.contrib.*",
"pinax.apps.*",
"djcelery.*",
"emailconfirmation.*",
"profiles.*",
],
))
self.children.append(StatusModule("Status"))
self.children.append(RedisStatusModule(
"Redis Status",
))
# append a recent actions module
self.children.append(modules.RecentActions("Recent Actions", 5))
| StarcoderdataPython |
3366665 | <filename>c1_2_processes/s13_ex1_19.py
from util import logl
from util import square
from util import even
def fib(n):
return fib_iter(1, 0, 0, 1, n)
def fib_iter(a, b, p, q, count):
logl("(" + str(a) + ", " + str(b) + ", " + str(p) +
", " + str(q) + ", " + str(count) + ")")
if count == 0:
return b
elif even(count):
return fib_iter(a,
b,
square(p) + square(q),
2 * p * q + square(q),
count / 2)
else:
return fib_iter((b * q) + (a * q) + (a * p),
(b * p) + (a * q),
p,
q,
(count - 1))
print(fib(10)) | StarcoderdataPython |
136705 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.9 on 2018-01-31 18:21
from __future__ import unicode_literals
try:
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.contrib.contenttypes.models import ContentType
from django.db import DEFAULT_DB_ALIAS, connections, migrations, models
from django.db.migrations.recorder import MigrationRecorder
import django.db.models.deletion
except ImportError as error:
print(error)
# Fix a bad `social_django` migration.
try:
BAD_MIGRATION = ('default', '0004_auto_20160423_0400')
recorder = MigrationRecorder(connections[DEFAULT_DB_ALIAS])
applied = recorder.applied_migrations()
if applied and (BAD_MIGRATION not in applied):
recorder.record_applied(*BAD_MIGRATION)
except (NameError, ImproperlyConfigured) as error:
print(error)
class Migration(migrations.Migration):
dependencies = [
('api_v3', '0008_v1_to_v2_attachments'),
]
operations = [
migrations.RunPython(
lambda _a, _s: ContentType.objects.filter(
app_label='accounts', model='profile'
).update(app_label='api_v3')
),
migrations.CreateModel(
name='Subscriber',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True)),
],
),
migrations.AddField(
model_name='subscriber',
name='ticket',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='subscribers', to='api_v3.Ticket'),
),
migrations.AddField(
model_name='subscriber',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AlterUniqueTogether(
name='subscriber',
unique_together=set([('user', 'ticket')]),
),
]
| StarcoderdataPython |
59842 | """ Services
This module is reponsible to handle all interactions to the database
and bussiness rules
"""
import typing
import environs
import dotenv
import requests
import sqlalchemy.orm
from . import models, schemas, cache
env = environs.Env()
dotenv.load_dotenv()
SECRET_KEY_RECAPTCHA = env("RECAPTCHA_SECRET_KEY", "")
VALIDATE_RECAPTCHA = env.bool("VALIDATE_RECAPTCHA", True)
class ServiceException(Exception):
""" Service Exception
This error is raised when data passed to the function is not valid
"""
class ValidationError(ServiceException):
pass
_cache = cache.get_cache()
class CityService:
""" City Service
Service class designed to provide reusable functionalities relate to city
"""
_db: sqlalchemy.orm.Session
def __init__(self, db: sqlalchemy.orm.Session):
"""
This constructor set database to others methods
"""
self._db = db
def __getstate__(self):
"""
Prevent database connection to be cached
"""
state = self.__dict__.copy()
state.pop("_db") # do not pickle _db session
return state
def get_city_by_id(self, name: str, state: str) -> models.City:
""" Get City By ID
This method is used to get City
Args:
name (str): City's name
state (str): City's state
Returns:
Instance of models.City
"""
db_city = (
self._db.query(models.City)
.filter_by(id=models.City.generate_id(name=name, state=state))
.first()
)
return db_city
def create_city(self, city: schemas.CityBase) -> models.City:
""" Create City
This method is used to create a City
Args:
city (schemas.CityInput): City's fields
Returns:
models.City
"""
if None in city.dict().values():
raise ValidationError("Invalid Post")
db_city = self.get_city_by_id(name=city.name, state=city.state)
if db_city:
raise ValidationError("City already exist")
city = models.City(**city.dict())
city.id = models.City.generate_id(name=city.name, state=city.state)
self._db.add(city)
self._db.commit()
self._db.flush()
self.cached_filter_city.invalidate_all()
return city
def filter_city(self, name: str) -> typing.List[models.City]:
""" Filter City
This method is used to filter a Cities
Args:
name (str): City's name
Returns:
list of cities
"""
query = self._db.query(models.City).filter(
models.City.name.contains(name)
)
return query.all()
@_cache.cache(ttl=60)
def cached_filter_city(self, name: str):
""" Cached Filter City
Cached version of filter_city it prevents from hitting
database for alredy cached queries
Args:
name (str): City's name
Returns:
list of cities
"""
return self.filter_city(name)
class GoogleService:
_RECAPTCHA_SITEVERIFY_URL = (
"https://www.google.com/recaptcha/api/siteverify"
)
def validate_recaptcha(self, response_token: str) -> bool:
if not VALIDATE_RECAPTCHA:
return True
data = {
"response": response_token,
"secret": SECRET_KEY_RECAPTCHA,
}
response = requests.post(self._RECAPTCHA_SITEVERIFY_URL, data=data)
if response.json().get("success") is not True:
return False
return True
| StarcoderdataPython |
128763 | <gh_stars>1-10
import tweepy
import csv
import numpy as np
from textblob import TextBlob
from keras.models import Sequential
from keras.layers import Dense
consumer_key = '0FAwDEdtG0DlUCHdKgICtLmHf'
consumer_secret = '<KEY>'
access_token = '<KEY>'
access_token_secret = '<KEY>'
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
api = tweepy.API(auth)
def stockSentiment(stockName, numTweets=100):
public_tweets = api.search(stockName, count=numTweets)
print(public_tweets[2].text)
threshold = posSentTweet = negSentTweet = 0
for tweet in public_tweets:
analysis = TextBlob(tweet.text)
print(analysis.sentiment)
if analysis.sentiment.polarity >= threshold:
posSentTweet = posSentTweet + 1
else:
negSentTweet = negSentTweet + 1
if posSentTweet > negSentTweet:
print("Overall Positive")
return True
else:
print("Overall Negative")
return False
stockSentiment('Nabil Bank')
# data collection from csv files
dates = []
prices = []
def stock_prediction(filename):
with open(filename, 'r') as csvfile:
csvFileReader = csv.reader(csvfile)
next(csvFileReader)
for row in csvFileReader:
dates.append(int(row[0].split('-')[0]))
prices.append(float(row[1]))
return
stock_prediction('NBL.csv')
# creating the dataset matrix
def create_datasets(dates, prices):
train_size = int(0.80 * len(dates))
TrainX, TrainY = [], []
TestX, TestY = [], []
cntr = 0
for date in dates:
if cntr < train_size:
TrainX.append(date)
else:
TestX.append(date)
for price in prices:
if cntr < train_size:
TrainY.append(price)
else:
TestY.append(price)
return TrainX, TrainY, TestX, TestY
def predict_prices(dates, prices, x):
TrainX, TrainY, TestX, TestY = create_datasets(dates, prices)
TrainX = np.reshape(TrainX, (len(TrainX), 1))
TrainY = np.reshape(TrainY, (len(TrainY), 1))
TestX = np.reshape(TestX, (len(TestX), 1))
TestY = np.reshape(TestY, (len(TestY), 1))
# create multilayer perrceptron model
model = Sequential()
model.add(Dense(32, input_dim=1, init='uniform', activation='relu'))
model.add(Dense(32, input_dim=1, init='uniform', activation='relu'))
model.add(Dense(16, init='uniform', activation='relu'))
model.add(Dense(1, init='uniform', activation='relu'))
model.compile(loss='mean_squared_error', optimizer='adam', metrics=['accuracy'])
model.fit(TrainX, TrainY, nb_epoch=100, batch_size=3, verbose=1)
# # # Our prediction for tomorrow
# # prediction = model.predict(np.array([dataset[0]]))
# # result = 'The price will move from %.2f to %.2f' % (
# # dataset[0], prediction[0][0])
# # return result
# predicted_price = predict_price(dates, prices, 29)
# print(predicted_price)
predict_prices(dates, prices, 2)
# if __name__ == "__main__":
# Ask user for a stock name
# stockName = input('Enter a stock quote: ').upper()
| StarcoderdataPython |
115041 | <reponame>mahmoudmohsen213/tsp-aco
# Input file structure:
# The first line contains exactly two integers n the number of nodes and m the
# number of edges.
# Follow, m lines, each line represent and edge and contains exactly three
# integers u the source node, v the destination node, and c the edge weight.
# The file is processed as an undirected graph, that is, a line with the contents
# "u v c" represents an edge from u to v with weight c, and also an edge from v
# to u with weight c.
# ==============================================================================
from model import Graph
# load a txt file into an adjacency list graph
def load(fileName: str) -> Graph:
print('loader: load: loading graph ' + fileName + '...')
with open(fileName, 'r') as inputFile:
inputData = list(map(float, inputFile.read().split()))
n, m = int(inputData[0]), int(inputData[1])
if(n < 1):
raise Exception('loader: load: invalid number of nodes', n)
if(m < (n - 1)):
raise Exception('loader: load: invalid number of edges', m)
print(len(inputData))
g = Graph(n, m)
inputIndex, edgeIndex = 2, 0
while(edgeIndex < m):
u, inputIndex = int(inputData[inputIndex]), inputIndex + 1
v, inputIndex = int(inputData[inputIndex]), inputIndex + 1
c, inputIndex = inputData[inputIndex], inputIndex + 1
if(u >= n):
raise Exception('loader: load: invalid node id', u)
if(v >= n):
raise Exception('loader: load: invalid node id', v)
g.addEdge(u, v, c)
g.addEdge(v, u, c)
edgeIndex += 1
print('loader: load: loading graph done', g)
return g
| StarcoderdataPython |
147410 | ##########################################################################
# Autor: WizIO 2021 <NAME>
# http://www.wizio.eu/
# https://github.com/Wiz-IO/wizio-gsm
#
# Support: Comet Electronics
# https://www.comet.bg/en
##########################################################################
from os.path import join
from SCons.Script import (AlwaysBuild, Builder, COMMAND_LINE_TARGETS, Default, DefaultEnvironment)
from colorama import Fore
import click
env = DefaultEnvironment()
click.echo(
"%s<<<<<<<<<<<< %s 2021 <NAME> >>>>>>>>>>>>"
% ( click.style("", fg="green", bold=True), env.BoardConfig().get("name").upper() )
)
elf = env.BuildProgram()
src = env.MakeHeader( join("$BUILD_DIR", "${PROGNAME}"), env.ElfToBin(join("$BUILD_DIR", "${PROGNAME}"), elf) )
AlwaysBuild( src )
upload = env.Alias("upload", src, [
env.VerboseAction(env.AutodetectUploadPort, "Looking for upload port..."),
env.VerboseAction("$UPLOADCMD", "Uploading: $PROGNAME"),
env.VerboseAction("", "Ready"),
])
AlwaysBuild( upload )
Default( src )
| StarcoderdataPython |
3286309 | <reponame>MingxuZhang/python-polar-coding
import numpy as np
from python_polar_coding.polar_codes.base import BaseDecoder
from .decoding_path import SCPath
class SCListDecoder(BaseDecoder):
"""SC List decoding."""
path_class = SCPath
def __init__(self, n: int,
mask: np.array,
is_systematic: bool = True,
L: int = 1):
super().__init__(n=n, mask=mask, is_systematic=is_systematic)
self.L = L
self.paths = [
self.path_class(n=n, mask=mask, is_systematic=is_systematic),
]
@property
def result(self):
"""Decoding result."""
return [path.result for path in self.paths]
@property
def best_result(self):
"""Result from the best path."""
return self.result[0]
def decode_internal(self, received_llr: np.array) -> np.array:
"""Implementation of SC decoding method."""
self._set_initial_state(received_llr)
for pos in range(self.N):
self._decode_position(pos)
return self.best_result
def _set_initial_state(self, received_llr):
"""Initialize paths with received message."""
for path in self.paths:
path._set_initial_state(received_llr)
def _decode_position(self, position):
"""Single step of SC-decoding algorithm to decode one bit."""
self.set_decoder_state(position)
self._compute_intermediate_alpha(position)
if self.mask[position] == 1:
self._populate_paths()
if self.mask[position] == 0:
self.set_frozen_value()
self._update_paths_metrics()
self._select_best_paths()
self._compute_bits(position)
def set_decoder_state(self, position):
"""Set current state of each path."""
for path in self.paths:
path._set_decoder_state(position)
def _compute_intermediate_alpha(self, position):
"""Compute intermediate LLR values of each path."""
for path in self.paths:
path._compute_intermediate_alpha(position)
def set_frozen_value(self):
"""Set current position to frozen values of each path."""
for path in self.paths:
path._current_decision = 0
def _populate_paths(self):
"""Populate SC paths with alternative decisions."""
new_paths = list()
for path in self.paths:
split_result = path.split_path()
new_paths += split_result
self.paths = new_paths
def _update_paths_metrics(self):
"""Update path metric of each path."""
for path in self.paths:
path.update_path_metric()
def _select_best_paths(self):
"""Select best of populated paths.
If the number of paths is less then L, all populated paths returned.
"""
if len(self.paths) <= self.L:
self.paths = sorted(self.paths, reverse=True)
else:
self.paths = sorted(self.paths, reverse=True)[:self.L]
def _compute_bits(self, position):
"""Compute bits of each path."""
for path in self.paths:
path._compute_intermediate_beta(position)
path._update_decoder_state()
| StarcoderdataPython |
74747 | <reponame>livefire2015/DataEngineeringProject
class Config:
PROXY_WEBPAGE = "https://free-proxy-list.net/"
TESTING_URL = "https://google.com"
REDIS_CONFIG = {
"host": "redis",
"port": "6379",
"db": 0
}
REDIS_KEY = "proxies"
MAX_WORKERS = 50
NUMBER_OF_PROXIES = 50
RSS_FEEDS = {
"en": [
"https://www.goal.com/feeds/en/news",
"https://www.eyefootball.com/football_news.xml",
"https://www.101greatgoals.com/feed/",
"https://sportslens.com/feed/",
"https://deadspin.com/rss"
],
"pl": [
"https://weszlo.com/feed/",
"https://sportowefakty.wp.pl/rss.xml",
"https://futbolnews.pl/feed",
"https://igol.pl/feed/"
],
"es": [
"https://as.com/rss/tags/ultimas_noticias.xml",
"https://e00-marca.uecdn.es/rss/futbol/mas-futbol.xml",
"https://www.futbolred.com/rss-news/liga-de-espana.xml",
"https://www.futbolya.com/rss/noticias.xml"
],
"de": [
"https://www.spox.com/pub/rss/sport-media.xml",
"https://www.dfb.de/news/rss/feed/"
]
}
BOOTSTRAP_SERVERS = ["kafka:9092"]
TOPIC = "rss_news"
VALIDATOR_CONFIG = {
"description_length": 10,
"languages": [
"en", "pl", "es", "de"
]
}
REFERENCE_RATES = {
"secured": ["https://markets.newyorkfed.org/api/rates/secured/all/latest.xml"],
"unsecured": ["https://markets.newyorkfed.org/api/rates/unsecured/all/latest.xml"]
}
| StarcoderdataPython |
1628728 | <filename>safe_relay_service/relay/migrations/0026_auto_20200626_1531.py
# Generated by Django 3.0.7 on 2020-06-26 15:31
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('relay', '0025_auto_20200429_1101'),
]
operations = [
migrations.AlterUniqueTogether(
name='safemultisigtx',
unique_together=set(),
),
]
| StarcoderdataPython |
1799648 | # -*- coding: utf-8 -*-
from abc import abstractmethod
from typing import Optional
from PySDDP.dessem.script.templates.arquivo_entrada import ArquivoEntrada
class InfofcfTemplate(ArquivoEntrada):
"""
Classe que contem todos os elementos comuns a qualquer versao do arquivo Infofcf do Dessem.
Esta classe tem como intuito fornecer duck typing para a classe Dessem e ainda adicionar um nivel de especificacao
dentro da fabrica. Alem disso esta classe deve passar adiante a responsabilidade da implementacao dos metodos de
leitura e escrita
"""
def __init__(self):
super().__init__()
self.dir_base = None
self.const = None
self.mapfcf_sisgnl = None
self.mapfcf_durpat = None
self.fcffix_usit = None
self.mapfcf_tviag = None
self.mapfcf_cgtmin = None
self.mapfcf_sisgnl_df = None
self.mapfcf_durpat_df = None
self.fcffix_usit_df = None
self.mapfcf_tviag_df = None
self.mapfcf_cgtmin_df = None
self.infofcf = None
# O arquivo script.arq possuem tanto dados quanto nomes de arquivos,
# então resolvi criar duas estruturas para armazenar estas informações
# e ficar mais fácil na hora da escrita
# {chave: valor}
# chave -> mneumonico ou nome do registro
# valor -> dicionários contendo:
# tipo: 0 para dados ou 1 para arquivos
# descricao: Descricação de cada mneumomico usada na impressao do arquivo
self.dados = {
"infofcf":{
'descricao': 'Armazena todos os comentarios e os mneumos',
'valor': None
},
"mapfcf_sisgnl":{
'descricao':'Identificacao dos subsistemas onde ha usinas com despacho antecipado',
'cabecalho': "& Mnem Ind Num Nlag Npat\n"
"&XXXXX XXXXXX XXX XXX XXX XXX",
'formato': "{mneumo:>14} {ind:>3} {num:>3} {lag:>3} {patamares:>3}\n",
'valor': None
},
"mapfcf_durpat": {
'descricao': 'Duracao dos patamares de carga para os periodos futuros',
'cabecalho': "& Mnem Lag Pat Dur (h)\n"
"&XXXXX XXXXXX XXX XXX XXXXXXXXXX",
'formato': "{mneumo:>14} {lag:>3} {patamar:>3} {duracao:>9}\n",
'valor': None
},
"fcffix_usit":{
'descricao':'Valores de geracao termica sinalizada e/ou comandada para as semanas / meses alem do '
'horizonte de estudo do modelo dessem',
'cabecalho': "& TpEnt IdEnt IdVar lag Pat Valor Justificativa"
"XXXXXX XXXXXX XXX XXXXXX XXX XXX XXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX",
'formato':"{mneumo:>13} {num_ent:>3} {variavel:>6} {lag:>3} {patamar:>3} {valor:>10} {comentario:>20}\n",
'valor': None
},
"mapfcf_tviag": {
'descricao': 'Informacoes para tempos de viagem considerados pelo modelo DECOMP',
'cabecalho': "& Mnem Ind Num\n"
"&XXXXX XXXXXXX XXX XXX",
'formato':"{mneumo:>14} {ind:>3} {num:>3}\n",
'valor': None
},
"mapfcf_cgtmin": {
'descricao': 'Custo de geracao termica minima alem do horizonte de estudo',
'cabecalho': "& Mnem Custo\n"
"&XXXXX XXXXXXX XXXXXXXXXXXXXXX",
'formato': "{mneumo:>14} {custo:>15}\n",
'valor': None
},
}
# preenchido após o método de leitura ser chamado
# representa os nomes lidos no script.arq
self.nome_arquivos = list()
@abstractmethod
def ler(self, *args, **kwargs) -> None:
"""
Metodo abstrato da ArquivoEntrada sendo repassado para as classes filhas
:param args: conjunto de parametros obrigatorios
:param kwargs: conjunto de parametros opcionais
:return:
"""
@abstractmethod
def escrever(self, *args, **kwargs) -> None:
"""
Metodo abstrato da ArquivoEntrada sendo repassado para as classes filhas
:param args: conjunto de parametros obrigatorios
:param kwargs: conjunto de parametros opcionais
:return:
"""
| StarcoderdataPython |
4816011 | <filename>combine_gtfs_feeds/cli/log_controller.py
import logging
from functools import wraps
from time import time
import datetime
import os, sys, errno
import yaml
import shutil
from shutil import copy2 as shcopy
def setup_custom_logger(name, output_dir):
if os.path.exists(os.path.join(output_dir, "run_log.txt")):
os.remove(os.path.join(output_dir, "run_log.txt"))
logging.basicConfig(
filename=os.path.join(output_dir, "run_log.txt"),
format="%(asctime)s %(message)s",
datefmt="%m/%d/%Y %I:%M:%S %p",
)
handler = logging.StreamHandler()
logger = logging.getLogger(name)
logger.setLevel(logging.INFO)
logger.addHandler(handler)
return logger
def timed(f):
@wraps(f)
def wrapper(*args, **kwds):
main_logger = logging.getLogger("main_logger")
start = datetime.datetime.now()
main_logger.info(" %s starting" % (f.__name__))
result = f(*args, **kwds)
elapsed = datetime.datetime.now() - start
main_logger.info("%s took %s" % (f.__name__, str(elapsed)))
return result
return wrapper
| StarcoderdataPython |
47184 | <gh_stars>1-10
import torch
import numpy as np
from PIL import Image
import random
import math
import seaborn as sns
from sklearn import metrics
import matplotlib.pyplot as plt
def adjusted_classes(y_scores, threshold):
"""
This function adjusts class predictions based on the prediction threshold (t).
Will only work for binary classification problems.
Inputs:
y_scores (1D array): Values between 0 and 1.
threshold (float): probability threshold
Returns:
array of 0s and 1s
"""
return (y_scores >= threshold).astype(int)
def confusion_matrix(Ytrue, Ypred):
"""
Display a color weighted confusion matrix for binary classification.
"""
sns.heatmap(metrics.confusion_matrix(Ytrue, Ypred), annot=True)
plt.ylabel("True Label")
plt.xlabel("Predicted Label")
plt.show()
def roc_auc(model, Xtest, Ytest):
"""
Display the ROC curve.
"""
metrics.plot_roc_curve(model, Xtest, Ytest)
plt.show()
def precision_recall(model, Xtest, Ytest):
"""
Display the Precision-Recall curve.
"""
metrics.plot_precision_recall_curve(model, Xtest, Ytest)
plt.show()
class LatLonBBDraw():
'''
A callable class to provide random samples of fixed size bounding
boxes from within a larger (super) bounding box.
'''
def __init__(self, lat_range, lon_range, dimension):
"""
Create parameters for sampling function.
Inputs:
lat_range (tuple of two floats): north and south boundaries
of super bounding box.
lon_range (tuple of two floats): west and east boundaries.
dimension (float): width (and height) in degrees of desired bounding
box sub samples.
"""
self.lat_range = lat_range
self.lon_range = lon_range
self.dimension = dimension
def __call__(self):
"""
Returns:
A length 4 tuple of floats: (west, south, east, north)
"""
lat = random.uniform(self.lat_range[0], self.lat_range[1])
lon = random.uniform(self.lon_range[0], self.lon_range[1])
return (lon, lat, lon+self.dimension, lat+self.dimension)
class LatLonDraw():
'''
A callable class to provide random samples of lat/lon pairs
from within a bounding box.
'''
def __init__(self, lat_range, lon_range):
"""
Create parameters for sampling function.
Inputs:
lat_range (tuple of two floats): north and south boundaries
of super bounding box.
lon_range (tuple of two floats): west and east boundaries.
"""
self.lat_range = lat_range
self.lon_range = lon_range
def __call__(self):
"""
Returns:
A length 2 tuple of floats: (lat, lon)
"""
lat = random.uniform(self.lat_range[0], self.lat_range[1])
lon = random.uniform(self.lon_range[0], self.lon_range[1])
return (lon, lat)
def resize(digits, row_size, column_size):
"""
Resize images from input scale to row-size x clumn_size
@row_size,column_size : scale_size intended to be
"""
return np.array(
[
np.array(Image.fromarray(_).resize((row_size, column_size)))
for _ in digits
]
)
def gen_solution(test_lst, fname):
"""
Generate csv file for Kaggle submission
------
:in:
test_lst: 1d array of (n_data), predicted test labels
fname: string, name of output file
"""
heads = ['Id', 'Category']
with open(fname, 'w') as fo:
fo.write(heads[0] + ',' + heads[1] + '\n')
for ind in range(len(test_lst)):
fo.write(heads[0] + ' ' + str(ind + 1) + ',' + str(test_lst[ind]) + '\n')
def create_space(lat, lon, s=10):
"""Creates a s km x s km square centered on (lat, lon)"""
v = (180/math.pi)*(500/6378137)*s # roughly 0.045 for s=10
return lat - v, lon - v, lat + v, lon + v
| StarcoderdataPython |
3297969 | #Adapted from: http://stackoverflow.com/questions/32671306/how-can-i-read-keyboard-input-in-python
#usr/bin/env python
import sys
#Tries a couple of potentialy built in packages to enable the retrival of single charecters from the keybaord.
try:
import tty, termios
except ImportError:
try:
import msvcrt
except ImportError:
raise ImportError('getch not available')
else:
getch = msvcrt.getch
else:
#Assuming it can find one it attempts to return the first keyboard key pressed.
def getch():
fd = sys.stdin.fileno()
old_settings = termios.tcgetattr(fd)
try:
tty.setraw(fd)
ch = sys.stdin.read(1)
finally:
termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
return ch
| StarcoderdataPython |
1684882 | from __future__ import absolute_import, unicode_literals
import datetime
import phonenumbers
import pytz
import regex
from abc import ABCMeta, abstractmethod
from datetime import timedelta
from enum import Enum
from ordered_set import OrderedSet
from temba_expressions import conversions
from temba_expressions.dates import DateStyle
from temba_expressions.evaluator import Evaluator, EvaluationContext, EvaluationStrategy
from temba_expressions.utils import format_json_date, parse_json_date
from .definition.flow import Action, Flow, RuleSet
from .exceptions import FlowRunException, FlowLoopException
from .utils import normalize_number
DEFAULT_EVALUATOR = Evaluator(expression_prefix='@',
allowed_top_levels=('channel', 'contact', 'date', 'extra', 'flow', 'step'))
class Org(object):
"""
An organization - used to provide additional information about how a flow should be run
"""
def __init__(self, country, primary_language, timezone, date_style, is_anon):
self.country = country
self.primary_language = primary_language
self.timezone = timezone
self.date_style = date_style
self.is_anon = is_anon
@classmethod
def from_json(cls, json_obj):
return cls(json_obj['country'],
json_obj['primary_language'],
pytz.timezone(json_obj['timezone']),
DateStyle[json_obj['date_style'].upper()],
json_obj['anon'])
def to_json(self):
return {
'country': self.country,
'primary_language': self.primary_language,
'timezone': unicode(self.timezone),
'date_style': self.date_style.name.lower(),
'anon': self.is_anon
}
class Field(object):
"""
A contact field
"""
# can't create contact fields with these keys
RESERVED_KEYS = ('name', 'first_name', 'phone', 'language', 'created_by', 'modified_by', 'org', 'uuid', 'groups')
class ValueType(Enum):
TEXT = 'T'
DECIMAL = 'N'
DATETIME = 'D'
STATE = 'S'
DISTRICT = 'I'
WARD = 'W'
def __init__(self, code):
self.code = code
@classmethod
def from_code(cls, code):
for name, val in cls.__members__.iteritems():
if code == val.code:
return val
return None
def __init__(self, key, label, value_type, is_new=False):
if not self.is_valid_key(key):
raise ValueError("Field key '%s' is invalid or reserved" % key)
if not self.is_valid_label(label):
raise ValueError("Field label '%s' is invalid" % label)
self.key = key
self.label = label
self.value_type = value_type
self.is_new = is_new
@classmethod
def from_json(cls, json_obj):
return cls(json_obj['key'], json_obj['label'], Field.ValueType.from_code(json_obj['value_type']))
def to_json(self):
return {'key': self.key, 'label': self.label, 'value_type': self.value_type.code}
@classmethod
def make_key(cls, label):
key = regex.sub(r'([^a-z0-9]+)', ' ', label.lower(), regex.V0).strip()
return regex.sub(r'([^a-z0-9]+)', '_', key, regex.V0)
@classmethod
def is_valid_key(cls, key):
return regex.match(r'^[a-z][a-z0-9_]*$', key, regex.V0) and key not in cls.RESERVED_KEYS
@classmethod
def is_valid_label(cls, label):
return regex.match(r'^[A-Za-z0-9\- ]+$', label, regex.V0)
def __eq__(self, other):
return self.key == other.key and self.label == other.label and self.value_type == other.value_type
def __ne__(self, other):
return not (self == other)
def __hash__(self):
return hash(self.key)
class Contact(object):
"""
A contact that can participate in a flow
"""
def __init__(self, uuid, name, urns, groups, fields, language):
self.uuid = uuid
self.name = name
self.urns = urns
self.groups = groups
self.fields = fields
self.language = language
@classmethod
def from_json(cls, json_obj):
return cls(json_obj.get('uuid', None),
json_obj['name'],
[ContactUrn.from_string(u) for u in json_obj['urns']],
OrderedSet(json_obj['groups']),
json_obj['fields'],
json_obj.get('language', None))
def to_json(self):
return {'uuid': self.uuid,
'name': self.name,
'urns': [unicode(u) for u in self.urns],
'groups': list(self.groups),
'fields': self.fields,
'language': self.language}
def get_first_name(self, org):
if not self.name:
return self.get_urn_display(org)
else:
names = self.name.split()
if len(names) > 1:
return names[0]
else:
return self.name
def set_first_name(self, first_name):
if not self.name:
self.name = first_name
else:
names = self.name.split()
names = [first_name] + names[1:]
self.name = " ".join(names)
def get_display(self, org, full=False):
"""
Gets a displayable name or URN for the contact. If available, org can be provided to avoid having to fetch it
again based on the contact.
"""
if self.name:
return self.name
elif org.is_anon:
return self.get_anon_identifier()
else:
return self.get_urn_display(org=org, full=full)
def get_urn(self, schemes=None):
"""
Gets the highest priority matching URN for this contact
"""
if schemes is not None:
for urn in self.urns:
if urn.scheme in schemes:
return urn
return None
else:
# otherwise return highest priority of any scheme
return self.urns[0] if self.urns else None
def get_urn_display(self, org, scheme=None, full=False):
"""
Gets a displayable URN for the contact. If available, org can be provided to avoid having to fetch it again
based on the contact.
"""
if org.is_anon:
return self.get_anon_identifier()
schemes = [scheme] if scheme else None
urn = self.get_urn(schemes)
return urn.get_display(org=org, full=full) if urn else ''
def get_anon_identifier(self):
# TODO where can we get the usual anon identifier from? Is UUID an ok substitute?
return self.uuid
def build_context(self, run, container):
"""
Builds the evaluation context for this contact
:param run: the current run state
:param container: the containing evaluation context
:return: the context
"""
context = {
'*': self.get_display(run.org, False),
'name': self.name,
'first_name': self.get_first_name(run.org),
'tel_e164': self.get_urn_display(run.org, ContactUrn.Scheme.TEL, True),
'groups': ",".join(self.groups),
'uuid': self.uuid,
'language': self.language
}
# add all URNs
for scheme in ContactUrn.Scheme.__members__.values():
context[unicode(scheme.name).lower()] = self.get_urn_display(run.org, scheme, False)
# add all fields
for key, raw_value in self.fields.iteritems():
field = run.get_or_create_field(key)
if field and field.value_type == Field.ValueType.DATETIME:
as_datetime = conversions.to_datetime(raw_value, container)
value = conversions.to_string(as_datetime, container)
else:
value = raw_value
context[key] = value
return context
class ContactUrn(object):
"""
A URN for a contact (e.g. a telephone number or twitter handle)
"""
class Scheme(Enum):
TEL = 1
TWITTER = 2
TELEGRAM = 3
MAILTO = 4
EXT = 5
ANON_MASK = '********'
def __init__(self, scheme, path):
self.scheme = scheme
self.path = path
@classmethod
def from_string(cls, urn):
"""
Parses a URN from a string
:param urn: the string, e.g. tel:+260964153686, twitter:joe
:return: the parsed URN
"""
parts = urn.split(':', 2)
scheme = ContactUrn.Scheme[parts[0].upper()]
return ContactUrn(scheme, parts[1])
def normalized(self, org):
"""
Returns a normalized version of this URN
:param org: the org
:return: the normalized URN
"""
norm_path = self.path.strip()
if self.scheme == ContactUrn.Scheme.TWITTER:
norm_path = norm_path.lower()
if norm_path[0] == '@':
norm_path = norm_path[1:]
elif self.scheme == ContactUrn.Scheme.MAILTO:
norm_path = norm_path.lower()
elif self.scheme == ContactUrn.Scheme.TEL:
norm_path, is_valid = normalize_number(norm_path, org.country)
return ContactUrn(self.scheme, norm_path)
def get_display(self, org, full=False):
"""
Gets a representation of the URN for display
"""
if org.is_anon:
return self.ANON_MASK
if self.scheme == ContactUrn.Scheme.TEL and not full:
# if we don't want a full tell, see if we can show the national format instead
try:
if self.path and self.path[0] == '+':
return phonenumbers.format_number(phonenumbers.parse(self.path, None),
phonenumbers.PhoneNumberFormat.NATIONAL)
except Exception:
pass
return self.path
def __eq__(self, other):
return self.scheme == other.scheme and self.path == other.path
def __unicode__(self):
return '%s:%s' % (unicode(self.scheme.name).lower(), self.path)
class Input(object):
def __init__(self, value, time=None):
self.value = value
self.time = time if time else datetime.datetime.now(tz=pytz.UTC)
self.consumed = False
def build_context(self, container, contact_context):
"""
Builds the evaluation context for this input
:param container: the evaluation context
:param contact_context: the context
:return:
"""
as_text = self.get_value_as_text(container)
return {
'*': as_text,
'value': as_text,
'time': conversions.to_string(self.time, container),
'contact': contact_context
}
def get_value_as_text(self, context):
"""
Gets the input value as text which can be matched by rules
:param context: the evaluation context
:return: the text value
"""
return conversions.to_string(self.value, context)
def consume(self):
self.consumed = True
class Location(object):
"""
Simple location model
"""
class Level(Enum):
STATE = 1
DISTRICT = 2
WARD = 3
def __init__(self, osm_id, name, level):
self.osm_id = osm_id
self.name = name
self.level = level
class Resolver(object):
__metaclass__ = ABCMeta
@abstractmethod
def resolve(self, text, country, level, parent):
"""
Resolves a location name from the given input
:param text: the text to parse
:param country: the 2-digit country code
:param level: the level
:param parent: the parent location (may be null)
:return: the location or null if no such location exists
"""
pass
class Step(object):
"""
A step taken by a contact or surveyor in a flow run
"""
def __init__(self, node, arrived_on, left_on=None, rule_result=None, actions=None, errors=None):
self.node = node
self.arrived_on = arrived_on
self.left_on = left_on
self.rule_result = rule_result
self.actions = actions if actions else []
self.errors = errors if errors else []
@classmethod
def from_json(cls, json_obj, context):
return cls(context.flow.get_element_by_uuid(json_obj['node']),
parse_json_date(json_obj['arrived_on']),
parse_json_date(json_obj['left_on']),
RuleSet.Result.from_json(json_obj['rule'], context) if json_obj.get('rule') else None,
[Action.from_json(a, context) for a in json_obj['actions']],
json_obj['errors'])
def to_json(self):
return {
'node': self.node.uuid,
'arrived_on': format_json_date(self.arrived_on),
'left_on': format_json_date(self.left_on),
'rule': self.rule_result.to_json() if self.rule_result else None,
'actions': [a.to_json() for a in self.actions],
'errors': self.errors
}
def add_action_result(self, action_result):
if action_result.performed:
self.actions.append(action_result.performed)
if action_result.errors:
self.errors += action_result.errors
def is_completed(self):
return self.left_on is not None
class Value(object):
"""
Holds the result of a contact's last visit to a ruleset
"""
def __init__(self, value, category, text, time):
self.value = value
self.category = category
self.text = text
self.time = time
@classmethod
def from_json(cls, json_object):
return cls(json_object['value'],
json_object['category'],
json_object['text'],
parse_json_date(json_object['time']))
def to_json(self):
return {
'value': self.value,
'category': self.category,
'text': self.text,
'time': format_json_date(self.time)
}
def build_context(self, container):
return {
'*': self.value,
'value': self.value,
'category': self.category,
'text': self.text,
'time': conversions.to_string(self.time, container)
}
class RunState(object):
"""
Represents state of a flow run after visiting one or more nodes in the flow
"""
class State(Enum):
IN_PROGRESS = 1
COMPLETED = 2
WAIT_MESSAGE = 3
def __init__(self, org, fields, contact, flow):
self.org = org
self.fields = {f.key: f for f in fields}
self.contact = contact
self.started = datetime.datetime.now(tz=pytz.UTC)
self.steps = []
self.values = {}
self.extra = {}
self.state = RunState.State.IN_PROGRESS
self.flow = flow
@classmethod
def from_json(cls, json_obj, flow):
"""
Restores a run state from JSON
:param json_obj: the JSON containing a serialized run state
:param flow: the flow the run state is for
:return: the run state
"""
deserialization_context = Flow.DeserializationContext(flow)
run = cls(Org.from_json(json_obj['org']),
[Field.from_json(f) for f in json_obj['fields']],
Contact.from_json(json_obj['contact']),
flow)
run.started = parse_json_date(json_obj['started'])
run.steps = [Step.from_json(s, deserialization_context) for s in json_obj['steps']]
run.values = {k: Value.from_json(v) for k, v in json_obj['values'].iteritems()}
run.extra = json_obj['extra']
run.state = RunState.State[json_obj['state'].upper()]
return run
def to_json(self):
"""
Serializes this run state to JSON
"""
return {
'org': self.org.to_json(),
'fields': [f.to_json() for f in self.fields.values()],
'contact': self.contact.to_json(),
'started': format_json_date(self.started),
'steps': [s.to_json() for s in self.steps],
'values': {k: v.to_json() for k, v in self.values.iteritems()},
'extra': self.extra,
'state': self.state.name.lower()
}
def build_context(self, runner, input):
# our concept of now may be overridden by the runner
now = runner.now if runner.now else datetime.datetime.now(tz=self.org.timezone)
context = EvaluationContext({}, self.org.timezone, self.org.date_style, now)
contact_context = self.contact.build_context(self, context)
if input is not None:
context.put_variable("step", input.build_context(context, contact_context))
context.put_variable("date", self.build_date_context(context))
context.put_variable("contact", contact_context)
context.put_variable("extra", self.extra)
flow_context = {}
values = []
for key, value in self.values.iteritems():
flow_context[key] = value.build_context(context)
values.append("%s: %s" % (key, value))
flow_context['*'] = "\n".join(values)
context.put_variable("flow", flow_context)
return context
def update_value(self, rule_set, result, time):
"""
Updates a value in response to a rule match
:param rule_set: the rule set
:param result: the rule match result
:param time: the time from the input
:return:
"""
key = regex.sub(r'[^<KEY>', '_', rule_set.label.lower())
self.values[key] = Value(result.value, result.category, result.text, time)
@staticmethod
def build_date_context(container):
"""
Builds the date context (i.e. @date.now, @date.today, ...)
"""
as_date = container.now.date()
as_datetime_str = conversions.to_string(container.now, container)
as_date_str = conversions.to_string(as_date, container)
return {
'*': as_datetime_str,
'now': as_datetime_str,
'today': as_date_str,
'tomorrow': conversions.to_string(as_date + timedelta(days=1), container),
'yesterday': conversions.to_string(as_date - timedelta(days=1), container)
}
def get_completed_steps(self):
"""
Gets the completed steps, i.e. those where the contact left the node or a terminal node
"""
completed = []
for step in self.steps:
if step.is_completed() or self.state == RunState.State.COMPLETED:
completed.append(step)
return completed
def get_or_create_field(self, key, label=None, value_type=Field.ValueType.TEXT):
"""
Gets or creates a contact field
"""
if not key and not label:
raise ValueError("Must provide either key or label")
if key:
field = self.fields.get(key)
if field:
return field
else:
key = Field.make_key(label)
if not label:
label = regex.sub(r'([^A-Za-z0-9\- ]+)', ' ', key, regex.V0).title()
field = Field(key, label, value_type, is_new=True)
self.fields[key] = field
return field
def get_created_fields(self):
return [f for f in self.fields.values() if f.is_new]
class Runner(object):
"""
The flow runner
"""
def __init__(self, template_evaluator=DEFAULT_EVALUATOR, location_resolver=None, now=None):
self.template_evaluator = template_evaluator
self.location_resolver = location_resolver
self.now = now
def start(self, org, fields, contact, flow):
"""
Starts a new run
:param org: the org
:param fields: the contact fields
:param contact: the contact
:param flow: the flow
:return: the run state
"""
run = RunState(org, fields, contact, flow)
return self.resume(run, None)
def resume(self, run, input):
"""
Resumes an existing run with new input
:param run: the previous run state
:param input: the new input
:return: the updated run state
"""
if run.state == RunState.State.COMPLETED:
raise FlowRunException("Cannot resume a completed run state")
last_step = run.steps[-1] if len(run.steps) > 0 else None
# reset steps list so that it doesn't grow forever in a never-ending flow
run.steps = []
if last_step:
current_node = last_step.node # we're resuming an existing run
else:
current_node = run.flow.entry # we're starting a new run
if not current_node:
raise FlowRunException("Flow has no entry point")
# tracks nodes visited so we can detect loops
nodes_visited = OrderedSet()
while current_node:
# if we're resuming a previously paused step, then use its arrived on value
if last_step and len(nodes_visited) == 0:
arrived_on = last_step.arrived_on
else:
arrived_on = datetime.datetime.now(tz=pytz.UTC)
# create new step for this node
step = Step(current_node, arrived_on)
run.steps.append(step)
# should we pause at this node?
if isinstance(current_node, RuleSet):
if current_node.is_pause() and (not input or input.consumed):
run.state = RunState.State.WAIT_MESSAGE
return run
# check for an non-pausing loop
if current_node in nodes_visited:
raise FlowLoopException(nodes_visited)
else:
nodes_visited.add(current_node)
next_node = current_node.visit(self, run, step, input)
if next_node:
# if we have a next node, then record leaving this one
step.left_on = datetime.datetime.now(tz=pytz.UTC)
else:
# if not then we've completed this flow
run.state = RunState.State.COMPLETED
current_node = next_node
return run
def substitute_variables(self, text, context):
"""
Performs variable substitution on the the given text
:param text: the text, e.g. "Hi @contact.name"
:param context: the evaluation context
:return: the evaluated template, e.g. "Hi Joe"
"""
return self.template_evaluator.evaluate_template(text, context)
def substitute_variables_if_available(self, text, context):
"""
Performs partial variable substitution on the the given text
:param text: the text, e.g. "Hi @contact.name"
:param context: the evaluation context
:return: the evaluated template, e.g. "Hi Joe"
"""
return self.template_evaluator.evaluate_template(text, context, False, EvaluationStrategy.RESOLVE_AVAILABLE)
def parse_location(self, text, country, level, parent=None):
"""
Parses a location from the given text
:param text: the text containing a location name
:param country: the 2-digit country code
:param level: the level
:param parent: the parent location (may be null)
:return: the location or null if no such location exists
"""
if self.location_resolver:
return self.location_resolver.resolve(text, country, level, parent)
return None
def update_contact_field(self, run, key, value, label=None):
"""
Updates a field on the contact for the given run
:param run: the current run state
:param key: the field key
:param value: the field value
:return the field which may have been created
"""
field = run.get_or_create_field(key, label)
actual_value = None
if field.value_type in (Field.ValueType.TEXT, Field.ValueType.DECIMAL, Field.ValueType.DATETIME):
actual_value = value
elif field.value_type == Field.ValueType.STATE:
state = self.location_resolver.resolve(value, run.org.country, Location.Level.STATE, None)
if state:
actual_value = state.name
elif field.value_type == Field.ValueType.DISTRICT:
state_field = self.get_location_field(run, Field.ValueType.STATE)
if state_field:
state_name = run.contact.fields.get(state_field.key, None)
if state_name:
state = self.location_resolver.resolve(state_name, run.org.country, Location.Level.STATE, None)
if state:
district = self.location_resolver.resolve(value, run.org.country, Location.Level.DISTRICT, state)
if district:
actual_value = district.name
elif field.value_type == Field.ValueType.WARD:
state_field = self.get_location_field(run, Field.ValueType.STATE)
if state_field:
state_name = run.contact.fields.get(state_field.key, None)
if state_name:
state = self.location_resolver.resolve(state_name, run.org.country, Location.Level.STATE, None)
if state:
district_field = self.get_location_field(run, Field.ValueType.DISTRICT)
if district_field:
district_name = run.contact.fields.get(district_field.key, None)
if district_name:
district = self.location_resolver.resolve(district_name, run.org.country, Location.Level.DISTRICT, state)
if district:
ward = self.location_resolver.resolve(value, run.org.country, Location.Level.WARD, district)
if ward:
actual_value = ward.name
run.contact.fields[field.key] = actual_value
return field
def update_extra(self, run, values):
"""
Updates the extra key values for the given run state
:param run: the run state
:param values: the key values
"""
run.extra.update(values)
def get_location_field(self, run, type):
# TODO this mimics what we currently do in RapidPro but needs changed
for field in run.fields.values():
if field.value_type == type:
return field
return None
| StarcoderdataPython |
127722 | <filename>utility/genkey.py
"""
Generates an Admin-key and DB-key for you
Call using
`python genkey.py your-password`
"""
import sys
import re
import bcrypt
def bad_password(password):
"""
checks for valid password
"""
return len(password) < 8 or re.match('^[A-Za-z0-9@#$%^&\*\.+=]+$', password) is None
if __name__ == '__main__':
if len(sys.argv) < 3:
print ('Please provide 2 passwords for admin and DB key')
exit(1)
admin_password = (sys.argv[1]).encode()
if bad_password(admin_password.decode()):
print ('Password should be of minimum length 8 and should contain only A-Za-z0-9@#$%^&*.+=')
exit(1)
admin_key = bcrypt.hashpw(admin_password, bcrypt.gensalt())
db_password = (sys.argv[2]).encode()
if bad_password(db_password.decode()):
print ('Password should be of minimum length 8 and should contain only A-Za-z0-9@#$%^&*.+=')
exit(1)
db_key = bcrypt.hashpw(db_password, bcrypt.gensalt())
print ("ADMIN_SECRET:", admin_key.decode())
print ("DB_SECRET:", db_key.decode())
| StarcoderdataPython |
1648183 | <filename>rlpy/Domains/PacmanPackage/__init__.py<gh_stars>100-1000
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
# When reinforcement module is imported, the following submodules will be
# imported.
from future import standard_library
standard_library.install_aliases()
__all__ = ["game",
"util",
"layout",
"pacman",
"graphicsDisplay",
"ghostAgents",
"keyboardAgents"]
| StarcoderdataPython |
3384391 | <gh_stars>1-10
import argparse
import os
import pandas as pd
from datetime import timedelta
'''
Get formatted PJM hourly metered loads.
Input: Hourly metered load data, downloaded from PJM DataMiner2 (for RTO region).
Link: https://dataminer2.pjm.com/feed/hrl_load_metered/definition
(Note: Old data is from
https://pjm.com/markets-and-operations/ops-analysis/historical-load-data.aspx)
Output: Clean hourly metered load data.
'''
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--save', default='formatted_data',
help='save folder path')
args = parser.parse_args()
# Read in and format load data
print('getting load data')
df = pd.read_csv(os.path.join('raw_data', 'hrl_load_metered.csv'),
parse_dates=['datetime_beginning_utc'], index_col='datetime_beginning_utc',
usecols=['datetime_beginning_utc', 'mw'])
df.index = df.index + timedelta(hours=-5) # TODO: currently dates are actually UTC-5
df.index.names =['DATE_UTC']
df.columns = ['RTO-HrMeteredLoad'] # TODO: add units (MW)
# Save data
print('saving data')
save = args.save
if not os.path.exists(save): os.makedirs(save)
df.to_csv(os.path.join(save, 'hourly_loads.csv'))
if __name__=='__main__':
main() | StarcoderdataPython |
1608552 | import unittest
from pyning.tail_recursion.exponent import loop_exp
from pyning.utils.testutils import BaseTest
class ExponentLoopTest(BaseTest):
def test_0_exp_3(self):
self.check(f=loop_exp, xr=0, b=0, p=3)
def test_1_exp_3(self):
self.check(f=loop_exp, xr=1, b=1, p=3)
def test_2_exp_0(self):
self.check(f=loop_exp, xr=1, b=2, p=0)
def test_2_exp_1(self):
self.check(f=loop_exp, xr=2, b=2, p=1)
def test_3_exp_3(self):
self.check(f=loop_exp, xr=27, b=3, p=3)
def test_3_exp_4(self):
self.check(f=loop_exp, xr=81, b=3, p=4)
def test_3_exp_6(self):
self.check(f=loop_exp, xr=729, b=3, p=6)
if __name__ == '__main__':
unittest.main()
| StarcoderdataPython |
144649 | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
def validate_failover_policies(ns):
""" Extracts multiple space-separated failoverPolicies in regionName=failoverPriority format """
from azure.mgmt.cosmosdb.models.failover_policy import FailoverPolicy
fp_dict = []
for item in ns.failover_policies:
comps = item.split('=', 1)
fp_dict.append(FailoverPolicy(comps[0], int(comps[1])))
ns.failover_policies = fp_dict
def validate_locations(ns):
""" Extracts multiple space-separated locations in regionName=failoverPriority format """
from azure.mgmt.cosmosdb.models.location import Location
if ns.locations is None:
ns.locations = []
return
loc_dict = []
for item in ns.locations:
comps = item.split('=', 1)
loc_dict.append(Location(location_name=comps[0], failover_priority=int(comps[1])))
ns.locations = loc_dict
def validate_ip_range_filter(ns):
if ns.ip_range_filter:
ns.ip_range_filter = ",".join(ns.ip_range_filter)
| StarcoderdataPython |
123039 | #!/usr/bin/env python
# File name: chop_chain_joiner.py
# Author: <NAME>
# Date created: 5/24/2017
# Date last modified: 5/24/2017
# Python Version: 3.6
"""
Description:
This script 'fills in' missing residues in PDB files and creates a fixed PDB file.
In order to fill in the gaps, Modeller is used to create a homology model with the
original PDB file serving as a template and the full sequence serving as the target
sequence to model.
The process of doing this is as follows:
1. Call make_seq.py to extract the sequence from the original PDB file with missing residues.
2. Call make_alignment.py to create an alignment file, alignment.ali, between the original PDB structure
with missing residues and the full fasta sequence (usually available on PDB website).
3. Call make_model.py to create the actual homology model.
Please see the headers of each of these scripts for more specific information.
Usage: python chain_joiner.py -p pdbfile.pdb -f fastafile.fasta [options]
For example, to make a loop model of PDB code 1qg8, I would call it as:
'python chain_joiner.py -p 1qg8.pdb -f 1qg8_full_seq.fasta -a'
Input Arguments:
[optional]
-a, --automodel
The simplest method for simple comparitive modeling. Will not give
great results but suggested when many chain breaks are present. [default: True]
-f, --fixed_automodel
Builds an automodel and keeps the non-missing residues fixed,
whereas they can move in the other methods. [default: False]
-l, --loopmodel
Builds a model by refining the loop with the missing residues.
Suggested when have one small chain break in the PDB. [default: False]
Output: A set of PDB files (number depends on the chosen method)
"""
import argparse
import sys
import os
# import modules
# from chain_joiner import make_seq
# from chain_joiner import make_alignment
# from chain_joiner import make_model
from chain_joiner import make_seq
from chain_joiner import make_alignment
from chain_joiner import make_model
def main():
parser = argparse.ArgumentParser(
prog='make_model.py',
formatter_class=argparse.RawDescriptionHelpFormatter,
description=
"""
This script 'fills in' missing residues in PDB files and creates a fixed PDB file.
In order to fill in the gaps, Modeller is used to create a homology model with the
original PDB file serving as a template and the full sequence serving as the target
sequence to model.
The process of doing this is as follows:
1. Call make_seq.py to extract the sequence from
the original PDB file with missing residues.
2. Call make_alignment.py to create an alignment file, alignment.ali,
between the original PDB structure with missing residues and the full
fasta sequence (usually available on PDB website).
3. Call make_model.py to create the actual homology model.
Please see the headers of each of these scripts for more specific information.
Usage: python chain_joiner.py -p pdbfile.pdb -f fastafile.fasta [options]
For example, to make a loop model of PDB code 1qg8, I would call it as:
'python chain_joiner.py -p 1qg8.pdb -f 1qg8_full_seq.fasta -a'
@author: <NAME>, <EMAIL>
<NAME>en lab, Yale University
REQUIREMENTS:
Preferably Anaconda python 3 with following modules:
argparse
modeller
"""
)
parser.add_argument(
"-p", "--pdb", help="path of the pdb file with .pdb file descriptor")
parser.add_argument(
"-f", "--fasta", help="path of the fasta file with .fasta file descriptor")
parser.add_argument(
"-a", "--automodel", help="the simplest method for simple comparitive modeling", action="store_true")
parser.add_argument(
"-fm", "--fixed_automodel", help="builds an automodel and keeps the non-missing residues fixed", action="store_true")
parser.add_argument(
"-l", "--loopmodel", help="builds a model by refining the loop with the missing residues", action="store_true")
args = parser.parse_args()
# join the chains
join_chains(args.pdb, args.fasta, args.automodel, args.fixed_automodel, args.loopmodel)
def join_chains(pdb_file, fasta_file, a, fm, l):
# Get the PDB id from the file
pdb_id = os.path.splitext(os.path.basename(pdb_file))[0]
# get the sequence from the PDB file:
make_seq.get_sequence(pdb_file)
# make the alignment file:
make_alignment.align(pdb_file, pdb_id + ".seq", fasta_file)
# make the model
make_model.model(pdb_file, a, fm, l)
# make a folder for the output
dir_name = './' + pdb_id +'_output/'
os.makedirs(dir_name)
# get a list of all output files in the working directory
output_files = [filename for filename in os.listdir('.') if filename.startswith(pdb_id)]
# remove the folder name
if (pdb_id + '_output') in output_files:
output_files.remove(pdb_id + '_output')
# mv these files to the output folder
for file in output_files:
try:
os.system('mv ' + file + ' ./' + pdb_id + '_output/')
except:
pass
if __name__ == "__main__":
main() | StarcoderdataPython |
1724145 | <filename>tailcoat/plugins/maya/publish/validate_normals_unlocked.py
import pyblish.api
from maya import cmds
class SelectNormalsLocked(pyblish.api.Action):
label = "Normals Locked"
on = "failed"
icon = "hand-o-up"
def process(self, context, plugin):
cmds.select(plugin.locked)
class UnlockNormals(pyblish.api.Action):
label = "Unlock Normals"
on = "failed"
def process(self, context, plugin):
cmds.polyNormalPerVertex(plugin.locked, unFreezeNormal=True)
class ValidateNormalsUnlocked(pyblish.api.InstancePlugin):
"""Normals of a model may not be locked
Locked normals shading during interactive use to behave
unexpectedly. No part of the pipeline take advantage of
the ability to lock normals.
"""
label = "Normals Unlocked"
order = pyblish.api.ValidatorOrder
hosts = ["maya"]
families = ["tailcoat.model"]
actions = [
pyblish.api.Category("Select"),
SelectNormalsLocked,
pyblish.api.Category("Fix It"),
UnlockNormals,
]
locked = []
def process(self, instance):
mesh_list = cmds.ls(instance,
type="mesh",
long=True,
noIntermediate=True)
for mesh in mesh_list:
faces = cmds.polyListComponentConversion(mesh, toVertexFace=True)
locked = cmds.polyNormalPerVertex(faces,
query=True,
freezeNormal=True)
self.locked.append(mesh) if any(locked) else None
# On locked normals, indicate that validation has failed
# with a friendly message for the user.
assert not self.locked, (
"Meshes found with locked normals: %s" % self.locked)
self.log.info("The normals of \"%s\" are correct." % instance)
| StarcoderdataPython |
68430 |
import time
import random
import os
import os.path as osp
from mmseg.datasets.builder import DATASETS
from mmseg.datasets.ainno import AinnoDataset
CLASSES = ['background', 'huahen', 'zangwu', 'laji']
LABELS = [0, 1, 2, 3]
PALETTE = [[0, 0, 0], [0, 0, 255], [255, 0, 0], [0, 255, 0]]
@DATASETS.register_module()
class AnjieDataset(AinnoDataset):
def __init__(self,
classes=CLASSES,
palette=PALETTE,
labels=LABELS,
**kwargs):
super(AnjieDataset, self).__init__(
classes=classes,
palette=palette,
labels=labels,
**kwargs)
def _get_split_file(self):
self._split_file = osp.join(self.data_root, '{}/{}.csv'.format(self.dataset, self.split))
if not osp.isfile(self._split_file):
raise ValueError('Unexist dataset _split_file: {}'.format(self._split_file))
def _pre_class_balance(self, max_times=10):
class_nums_str = ''
class_times_str = ''
sort_samples = []
ng_num = 0
for k, v in self.class_samples.items():
class_nums_str += '{}: {}, '.format(k, len(v))
sort_samples.append(v)
ng_num += len(v)
self.class_avg_num = int(ng_num / len(self.class_samples.keys()))
self.dataset_infos['class_nums'] = class_nums_str
# 按长度升序
sort_samples.sort(key=lambda x: len(x))
class_times = [self.class_avg_num / len(sort_samples[i]) for i in range(len(sort_samples))]
for t in class_times:
class_times_str += '{:.2f}, '.format(t)
self.dataset_infos['class_times'] = class_times_str
class_times = [min(round(t), max_times) for t in class_times] # round 4舍5入
self.major_smaples = sort_samples[-1]
self.few_samples = []
for i, t in enumerate(class_times[:-1]):
t = max(t+1, 1)
smaples = sort_samples[i] * t
self.few_samples += smaples[:]
def _epoch_balance(self, ):
self.ok_samples = random.sample(self.ok_ori_samples, self.ok_len)
random.shuffle(self.major_smaples)
self.ng_samples = self.few_samples + self.major_smaples[:self.class_avg_num]
self.img_infos = self.ok_samples + self.ng_samples
random.shuffle(self.img_infos)
def info2sample(self, line):
_image, _mask, label = line.rstrip('\n').split(',')[:3]
image_path = osp.join(self.data_root, _image)
mask_path = osp.join(self.data_root, _mask)
image_exist = self.check_image_exist(image_path)
mask_exist = self.check_image_exist(mask_path)
sample = None
if image_exist and mask_exist:
sample = dict(filename=image_path,
label=label,
ann=dict(seg_map=mask_path))
return sample
def update_samples(self, sample):
if not sample:
return
label = sample['label']
if self.split == 'train':
if label == 'ok' or label == 'OK':
self.ok_ori_samples.append(sample)
elif label not in self.class_samples.keys():
label_init = {label: [sample]}
self.class_samples.update(label_init)
elif label in self.class_samples.keys():
self.class_samples[label].append(sample)
else:
raise ValueError('Unkonwn label type !!!')
elif self.split == 'val':
if label == 'ok' or label == 'OK':
self.ok_ori_samples.append(sample)
else:
self.ng_ori_samples.append(sample)
else:
raise ValueError('Unkonwn dataset split : {}'.format(self.split))
def load_annotations(self, ):
self.ok_ori_samples = []
self.ng_ori_samples = []
self.class_samples = {}
self.total_index = 0
if not self.test_mode:
self._get_split_file()
if not osp.isfile(self._split_file):
raise ValueError('Unexist dataset _split_file: {}'.format(self._split_file))
with open(self._split_file, "r") as lines:
lines = list(lines)
for line in lines:
sample = self.info2sample(line)
self.update_samples(sample)
else:
# test_dir = osp.join(self.data_root, self.split)
test_dir = '/root/public02/manuag/zhangshuai/data/anjie/real_data/train_split/image'
assert osp.isdir(test_dir), test_dir
for img_name in os.listdir(test_dir):
sample = dict(filename=osp.join(test_dir, img_name),
ann=dict(seg_map=None))
self.img_infos.append(sample)
# self.update_samples(sample)
##---------------- 训练集类别平衡 ----------------##
self.ok_ori_len = len(self.ok_ori_samples)
if self.split == 'train':
self.ok_len = int(self.ok_ori_len * 0.1)
self._pre_class_balance(max_times=10)
self._epoch_balance()
self.ng_len = len(self.ng_samples)
elif self.split == 'val':
self.ok_len = self.ok_ori_len
self.img_infos = self.ok_ori_samples + self.ng_ori_samples
self.ng_len = len(self.ng_ori_samples)
elif self.split == 'test':
self.ok_len = len(self.img_infos)
self.ng_len = 0
self.set_len = len(self.img_infos)
assert self.set_len == self.ok_len + self.ng_len
self.dataset_infos['sample_nums'] = '(OK: {}, NG: {} ,Total: {})'.format(
self.ok_len, self.ng_len, self.set_len)
time.sleep(1)
def epoch_ops(self):
"""Some operations that need to be performed every n epochs. """
self._epoch_balance()
def __getitem__(self, idx):
if self.test_mode:
return self.prepare_test_img(idx)
else:
return self.prepare_train_img(idx)
def __len__(self):
return self.set_len | StarcoderdataPython |
153344 | """
Artificial Intelligence for Humans
Volume 1: Fundamental Algorithms
Python Version
http://www.aifh.org
http://www.jeffheaton.com
Code repository:
https://github.com/jeffheaton/aifh
Copyright 2013 by <NAME>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
For more information on Heaton Research copyrights, licenses
and trademarks visit:
http://www.heatonresearch.com/copyright
"""
__author__ = 'jheaton'
import numpy as np
from rbf import RbfGaussian
class RbfNetwork(object):
""" A RBF network is an advanced machine learning algorithm that uses a series of RBF functions to perform
regression. It can also perform classification by means of one-of-n encoding.
The long term memory of a RBF network is made up of the widths and centers of the RBF functions, as well as
input and output weighting.
http://en.wikipedia.org/wiki/RBF_network
"""
def __init__(self, input_count, rbf_count, output_count):
""" Create an RBF network with the specified shape.
@param input_count: The input count.
@param rbf_count: The RBF function count.
@param output_count: The output count.
"""
self.input_count = input_count
self.output_count = output_count
# calculate input and output weight counts
# add 1 to output to account for an extra bias node
input_weight_count = input_count * rbf_count
output_weight_count = (rbf_count + 1) * output_count
rbf_params = (input_count + 1) * rbf_count
self.long_term_memory = np.zeros((input_weight_count + output_weight_count + rbf_params), dtype=float)
self.index_input_weights = 0
self.index_output_weights = input_weight_count + rbf_params
self.rbf = {}
# default the Rbf's to gaussian
for i in xrange(0, rbf_count):
rbf_index = input_weight_count + ((input_count + 1) * i)
self.rbf[i] = RbfGaussian(input_count, self.long_term_memory, rbf_index)
def compute_regression(self, input):
""" Compute the output for the network.
@param input: The input pattern.
@return: The output pattern.
"""
# first, compute the output values of each of the RBFs
# Add in one additional RBF output for bias (always set to one).
rbf_output = [0] * (len(self.rbf) + 1)
# bias
rbf_output[len(rbf_output) - 1] = 1.0
for rbfIndex in xrange(0, len(self.rbf)):
# weight the input
weighted_input = [0] * len(input)
for inputIndex in xrange(0, len(input)):
memory_index = self.index_input_weights + (rbfIndex * self.input_count) + inputIndex
weighted_input[inputIndex] = input[inputIndex] * self.long_term_memory[memory_index]
# calculate the rbf
rbf_output[rbfIndex] = self.rbf[rbfIndex].evaluate(weighted_input)
# Second, calculate the output, which is the result of the weighted result of the RBF's.
result = [0] * self.output_count
for outputIndex in xrange(0, len(result)):
sum_value = 0
for rbfIndex in xrange(0, len(rbf_output)):
# add 1 to rbf length for bias
memory_index = self.index_output_weights + (outputIndex * (len(self.rbf) + 1)) + rbfIndex
sum_value += rbf_output[rbfIndex] * self.long_term_memory[memory_index]
result[outputIndex] = sum_value
# finally, return the result.
return result
def reset(self):
"""
Reset the network to a random state.
"""
for i in xrange(0, len(self.long_term_memory)):
self.long_term_memory[i] = np.random.uniform(0, 1)
def compure_classification(self, input):
""" Compute the output and return the index of the output with the largest value. This is the class that
the network recognized.
@param input: The input pattern.
@return:
"""
output = self.compute_regression(input)
return output.index(max(output))
def copy_memory(self, source):
""" Copy the specified vector into the long term memory of the network.
@param source: The source vector.
"""
for i in xrange(0, len(source)):
self.long_term_memory[i] = source[i] | StarcoderdataPython |
1637378 | # coding: utf-8
from __future__ import unicode_literals
from itertools import chain
from .private import format_arg, format_kwarg
class GentyArgs(object):
"""
Store args and kwargs for use in a genty-generated test.
"""
def __init__(self, *args, **kwargs):
super(GentyArgs, self).__init__()
self._args = args
self._kwargs = kwargs
@property
def args(self):
"""Return tuple of positional arguments to be passed to the test."""
return self._args
@property
def kwargs(self):
"""Return dictionary of keyword arguments to be passed to the test."""
return self._kwargs
def __iter__(self):
"""Allow iterating over the argument list.
First, yield value of args in given order.
Then yield kwargs in sorted order, formatted as key_equals_value.
"""
sorted_kwargs = sorted(self._kwargs.iteritems())
return chain(
(format_arg(arg) for arg in self._args),
(format_kwarg(k, v) for k, v in sorted_kwargs),
)
def genty_args(*args, **kwargs):
"""
Used to pass args and kwargs to a test wrapped with @genty_dataset.
Runs the test with the same arguments and keyword arguments passed
to genty_args. genty_args is usefule for tests with a large number of
arguments or optional arguments.
To use, simply pass your arguments and keyword arguments to genty_args
in the same way you'd like them to be passed to your test:
@genty_dataset(
genty_args('a1', 'b1', 1, 'd1'),
genty_args('a2', 'b2', d='d2')
)
def test_function(a, b, c=0, d=None)
...
For each genty_args call, a suffix identifying the arguments will be built
by concatenating the positional argument values, and then concatenating the
keyword argument names and values (formatted like parameter_equals_value).
For example:
@genty_dataset(
genty_args('a1', 'b1', 1, 'd1'),
genty_args('a2', 'b2', d='d2')
)
def test_function(a, b, c=0, d=None)
...
produces tests named
test_function('a1', 'b1', 1, 'd1') and
test_function('a2', 'b2', d='d2')
:param args:
Ordered arguments that should be sent to the test.
:type args:
`tuple` of varies
:param kwargs:
Keyword arguments that should be sent to the test.
:type kwargs:
`dict` of `unicode` to varies
"""
return GentyArgs(*args, **kwargs)
| StarcoderdataPython |
172679 | from django.apps import AppConfig
class Games(AppConfig):
name = 'games'
verbose_name = '24h du Jeu' | StarcoderdataPython |
1770151 | import collections
import numpy as np
import tensorflow.compat.v1 as tf
tf.disable_v2_behavior()
import sarnet_td3.common.ops as ops
RRLCellTuple = collections.namedtuple("RRLCellTuple", ("memory"))
class RRLCell(tf.compat.v1.nn.rnn_cell.RNNCell):
"""
Input:
Query: Query from the agent "i" recurrent cell
[batchSize, query_units]
Keys: Current representation of the observation from each agent
[batchSize, n, query_units]
numAgents: number of agents
[batchSize]
batchSize: Tensor Scalar
[batchSize]
Values: Representation of observation of all agents
[batchSize, n, mem_units]
"""
def __init__(self, train, args, reuse=None):
self.args = args
self.num_agents = self.args.num_adversaries
self.dropouts = {}
self.dropouts["memory"] = self.args.memory_dropout
self.dropouts["read"] = self.args.read_dropout
self.dropouts["write"] = self.args.write_dropout
self.train = train
self.reuse = reuse
self.none = tf.zeros((1, 1), dtype=tf.float32)
self.attn_scale = np.sqrt(self.args.query_units)
"""Cell State Size"""
@property
def state_size(self):
return RRLCellTuple(self.args.value_units)
"""Cell output size. No outputs used for now"""
@property
def output_size(self):
return self.args.value_units
"""
The Control Unit
Input:
queryInput: external input to the control unit (RNN output of specific agent)
[batchSize, query_units]
Keys: Observation embeddings from all agents
[batchSize, n, query_units]
num_agents: Total number of agents in the reasoning operation
query: previous query control hidden state value
[batchSize, query_units]
Returns:
New control state
[batchSize, #agents]
"""
def control(self, query, keys, reuse=None):
with tf.compat.v1.variable_scope("control", reuse=reuse):
dim = self.args.query_units
interactions = tf.expand_dims(query, axis=-2) * keys
# Multiplies Q * K and reduces across the agent dimension
# Input: [batch, #agents, dim] -> [batch, #agents] through a linear transformation
if not self.args.tar_attn:
logits = ops.inter2logits(interactions, dim)
else:
logits = ops.inter2logits(interactions, dim, sumMod="SUM")
attention = tf.nn.softmax(logits / self.attn_scale)
return attention
"""
The Read Unit
Input:
valueBase: [?, n, mem_size]
memory: [?, mem_size]
query: [?, query_units]
Returns:
Information: [?, mem_size]
"""
def read(self, valueBase, memory, query, reuse=None):
with tf.compat.v1.variable_scope("read", reuse=reuse):
# memory dropout
newMemory = memory
if self.args.memory_dropout:
newMemory = tf.nn.dropout(memory, self.dropouts["memory"])
# Convert memory dim from [batch, dim] to [batch, #agents, dim]
newMemory = tf.expand_dims(newMemory, axis=-2)
newMemory = tf.zeros_like(valueBase) + newMemory
interactions = newMemory * valueBase
# Perform Linear{(Memory * Value) + Value}
if self.args.FeedInteractions:
interactions = tf.add(interactions, valueBase)
interactions = ops.linear(interactions, self.args.value_units, self.args.value_units, name="interactAfterAdd", reuse=reuse)
else:
# Perform Linear(Memory * Value) + Value
interactions = tf.add(ops.linear(interactions, self.args.value_units, self.args.value_units, name="interactBeforeAdd", reuse=reuse), valueBase, name="ValueMemSUM")
# Query: [batch, #agents], Inter: [batch, #agents, dim]
# Output: [batch, dim]
readInformation = ops.att2Smry(query, interactions)
dim = self.args.value_units
if self.args.FeedOldMemory:
dim += dim
readInformation = tf.concat([readInformation, memory], axis=-1)
# read dropout
if self.args.read_dropout:
readInformation = tf.nn.dropout(readInformation, self.dropouts["read"])
readInformation = ops.linear(readInformation, dim, self.args.value_units, name="finalMemory", reuse=reuse)
if self.args.memoryBN:
newMemory = tf.contrib.layers.batch_norm(readInformation, decay=self.args.bnDecay, center=self.args.bnCenter,
scale=self.args.bnScale, is_training=self.train,
updates_collections=None, reuse=reuse)
else:
newMemory = readInformation
return newMemory
def __call__(self, inputs, state, scope=None):
scope = scope or type(self).__name__
with tf.compat.v1.variable_scope(scope, reuse=self.reuse):
memory = state
query, keys, values = inputs
# Reshape keys/values to [agent, batch, dim] to [batch, agent, dim]
keys = tf.stack(keys, axis=-2)
values = tf.stack(values, axis=-2)
## Control unit Output: [batch, #agents]
newAttn = self.control(query, keys)
## Read Unit [batch, dim]
info = self.read(values, memory, newAttn)
newState = info
return newAttn, newState
| StarcoderdataPython |
19814 | from __future__ import print_function
import warnings
import numpy as np
C4 = 261.6 # Hz
piano_max = 4186.01 # Hz
piano_min = 27.5000 # Hz - not audible
__all__ = ['cent_per_value','get_f_min','get_f_max','FrequencyScale']
def cent_per_value(f_min, f_max, v_min, v_max):
"""
This function takes in a frequency max and min, and y value max and min and returns a y scale parameter in units of cents/y value.
Cents are a logarithmic unit of tone intervals (https://en.wikipedia.org/wiki/Cent_(music)).
Parameters
----------
f_min : float
Minimum frequency.
f_max : float
Maximum frequency.
v_min : float
Minimum y value.
v_max : float
Maximum y value.
Returns
-------
float
A y-scale parameter in units of cents/y value.
"""
step = 1200 * np.log2(f_max / f_min) / (v_max - v_min)
return step
def get_f_min(f_max, cents_per_value, v_min, v_max):
"""
This function takes in a y value max and min, a maximum frequency and a y scale parameter in units of cents/y value, and returns the minimum frequency that fits to such a scale.
Cents are a logarithmic unit of tone intervals (https://en.wikipedia.org/wiki/Cent_(music)).
Parameters
----------
f_max : float
Maximum frequency.
cents_per_value : float
A y scale parameter in units of cents/y value.
v_min : float
Minimum y value.
v_max : float
Maximum y value.
Returns
-------
float
Minimum frequency.
"""
f_min = f_max / (2 ** ((v_max - v_min) * cents_per_value / 1200))
return f_min
def get_f_max(f_min, cents_per_value, v_min, v_max):
"""
This function takes in a y value max and min, a minimum frequency and a y scale parameter in units of cents/y value, and returns the maximum frequency that fits to such a scale.
Cents are a logarithmic unit of tone intervals (https://en.wikipedia.org/wiki/Cent_(music)).
Parameters
----------
f_min : float
Minimum frequency.
cents_per_value : float
A y scale parameter in units of cents/y value.
v_min : float
Minimum y value.
v_max : float
Maximum y value.
Returns
-------
float
Maximum frequency.
"""
f_max = f_min * (2 ** ((v_max - v_min) * cents_per_value / 1200))
return f_max
class FrequencyScale(object):
"""
This class builds a frequency scale and populates the namespace of frequency objects based on the given inputs from the following combos:
- frequency_min, frequency_max, y value min and y value max
- frequency_max, cents_per_value, y value min and y value max
- frequency_min, cents_per_value, y value min and y value max
Cents are a logarithmic unit of tone intervals (https://en.wikipedia.org/wiki/Cent_(music)).
Parameters
----------
frequency_min : float
Minimum frequency.
frequency_max : float
Maximum frequency.
cents_per_value : float
A y scale parameter in units of cents/y value.
value_min : float
Description of parameter `value_min`.
value_max : float
Description of parameter `value_max`.
verbose : bool
Flag to toggle printing functions.
"""
def __init__(self, value_min, value_max,
frequency_min=None, frequency_max=None, cents_per_value=None,
verbose=False):
if verbose:
print('initial vals (fmin, fmax, vmin, vmax):',
frequency_min, frequency_max, value_min, value_max)
# checking for which inputs were given
self.y_inputs = []
if frequency_min != None:
self.y_inputs.append('frequency_min')
if frequency_max != None:
self.y_inputs.append('frequency_max')
if cents_per_value != None:
self.y_inputs.append('cents_per_value')
self.y_n_inputs = len(self.y_inputs)
# raising exception if anything other than two inputs were given
if self.y_n_inputs != 2:
raise Exception('Frequency takes 2 of the frequency_min, frequency_max, and cents_per_value inputs. You inputted {} inputs, which were {}.'.format(
self.y_n_inputs, self.y_inputs))
# frequency_min and frequency_max input case
if (cents_per_value == None):
cents_per_value = cent_per_value(frequency_min, frequency_max,
value_min, value_max)
# cents_per_value and frequency_max input case
if (frequency_min == None):
frequency_min = get_f_min(frequency_max, cents_per_value,
value_min, value_max)
# cents_per_value and frequency_min input case
if (frequency_max == None):
frequency_max = get_f_max(frequency_min, cents_per_value,
value_min, value_max)
self.y_value_min = value_min
self.y_value_max = value_max
self.y_frequency_max = frequency_max
self.y_frequency_min = frequency_min
self.y_cents_per_value = cents_per_value
if self.y_frequency_max > piano_max:
warnings.warn('Your maximum frequency of {} Hz is above a pianos maximum of {} Hz.'.format(
np.round(self.y_frequency_max, 2), piano_max))
if self.y_frequency_min < piano_min:
warnings.warn('Your minimum frequency of {} Hz is below a pianos minimum of {} Hz.'.format(
np.round(self.y_frequency_min, 2), piano_min))
if self.y_value_min > self.y_value_max:
warnings.warn('Min y value is greater than max y value.')
if verbose:
print('initial vals (f_min, f_max, y_min, y_max):', self.y_frequency_min,
self.y_frequency_max, self.y_value_min, self.y_value_max)
def freq(v): return self.y_frequency_min * \
2 ** ((v - self.y_value_min) * self.y_cents_per_value / 1200)
self.y_freq_translate_to_range = lambda array: list(map(freq, array))
if verbose:
print('Frequency Scale Built')
| StarcoderdataPython |
1640775 | from .base import Field
class ReferenceField(Field):
@classmethod
def make_property(cls, name):
def _set(self, value):
field = self.__fields__.get(name)
self.__values__[name] = field.create(value)
def _get(self):
return self.__values__[name]
return property(fget=_get, fset=_set)
@classmethod
def is_serializable(cls):
return True
def create(self, value=None):
raise NotImplementedError
class ListValue(list):
def __init__(self, ref_type):
super(ListValue, self).__init__()
self.ref_type = ref_type
def append(self, item):
self.ref_type.validate(item)
super(ListValue, self).append(item)
class ListField(ReferenceField):
def __init__(self, ref_type, default=[]):
super(ListField, self).__init__()
self.ref_type = ref_type
self.default = default
def get_initial(self):
return self.create()
def create(self, value=None):
val = ListValue(ref_type=self.ref_type)
for item in (value or self.default):
val.append(item)
return val
def serialize(self, value):
result = []
for item in value:
result.append(self.ref_type.serialize(item))
return result
def deserialize(self, value):
result = []
for item in value:
result.append(self.ref_type.deserialize(item))
return result
class StructValue(object):
def __init__(self):
self.__values__ = {}
for name, field in self.__fields__.items():
self.__values__[name] = field.get_initial()
def serialize(self):
return self.struct_field.serialize(self)
@classmethod
def deserialize(cls, value):
return cls.struct_field.deserialize(value)
class StructField(ReferenceField):
def __init__(self, **fields):
self.__fields__ = {}
for name, field in fields.items():
if isinstance(field, Field):
self.__fields__[name] = field
self._make_class()
def _make_class(self):
new_dct = {}
new_dct['__fields__'] = {}
for attr in self.__fields__:
field = self.__fields__.get(attr)
new_dct['__fields__'][attr] = field
new_dct[attr] = field.make_property(attr)
self.klass = type('StructValueInst', (StructValue,), new_dct)
self.klass.struct_field = self
def create(self, value=None):
return self.klass()
def get_initial(self):
return self.create()
def serialize(self, value):
result = {}
for name, field in self.__fields__.items():
if field.is_serializable():
result[name] = field.serialize(getattr(value, name))
return result
def deserialize(self, value):
result = self.create()
for name, field in self.__fields__.items():
if name in value and field.is_serializable():
setattr(result, name, field.deserialize(value.get(name)))
return result
| StarcoderdataPython |
3220807 | import torch
import torch.nn as nn
class Discriminator2(nn.Module):
def __init__(self, n_h):
super(Discriminator2, self).__init__()
self.f_k = nn.Bilinear(n_h, n_h, 1)
for m in self.modules():
self.weights_init(m)
def weights_init(self, m):
if isinstance(m, nn.Bilinear):
torch.nn.init.xavier_uniform_(m.weight.data)
if m.bias is not None:
m.bias.data.fill_(0.0)
def forward(self, c, h_pl, h_mi, s_bias1=None, s_bias2=None):
# c_x = torch.unsqueeze(c, 1)
# c_x = c_x.expand_as(h_pl)
c_x = c
sc_1 = torch.squeeze(self.f_k(h_pl, c_x), 2)
sc_2 = torch.squeeze(self.f_k(h_mi, c_x), 2)
if s_bias1 is not None:
sc_1 += s_bias1
if s_bias2 is not None:
sc_2 += s_bias2
logits = torch.cat((sc_1, sc_2), 1)
return logits
| StarcoderdataPython |
3207856 | <gh_stars>1-10
# This file was *autogenerated* from the file matrices.sage
from sage.all_cmdline import * # import sage library
_sage_const_3 = Integer(3); _sage_const_32 = Integer(32); _sage_const_16 = Integer(16); _sage_const_5 = Integer(5)#!/usr/bin/env sage
from generatormatrix import *
import os
import errno
def print_to_file(instance,data):
switcher={'phi_matrix_elements_H': generatormatrixphi(instance), 'phi_matrix_bits':generatormatrixphi_element(instance), 'psi_matrix':generatormatrixpsi(instance), 'S_o_psi_matrix':generatormatrixS_o_psi(instance), 'h_poly':instance.h, 'phi_matrix_bits':generatormatrixphi_bits(instance), 'Ker_S_o_psi_basis': KerS_o_psi(instance), 'Ker_psi_basis':Kerpsi(instance), 'phi_of_one':phi_of_one(instance)}
option=switcher.get(data, lambda: 'Second argument should be one of the following'+str(list(switcher)))
filename = 'output_data/'+'RMFE'+'_'+str(instance.k)+'_'+str(instance.e)+'/'+data+'_'+str(instance.k)+'_'+str(instance.e)+'.txt'
if not os.path.exists(os.path.dirname(filename)):
try:
os.makedirs(os.path.dirname(filename))
except OSError as exc:
if exc.errno != errno.EEXIST:
raise
filet=open(filename, "w")
filet.write(str(option))
filet.close()
#BUG TO BE CORRECTED! The following does not seem to work well when List_instances contains more than one instance, in some cases where k1=3. When List_instances only contains one instance, or several instances with k1=2, it seems to work well.
#List_instances=[twostepinstance(2,4,4,8),twostepinstance(2,3,8,16),twostepinstance(2,3,9,17),twostepinstance(2,4,8,16),twostepinstance(2,4,16,32),twostepinstance(3,5,16,32),twostepinstance(3,6,16,32),
#twostepinstance(3,8,16,32),twostepinstance(3,5,33,65)]
List_instances=[twostepinstance(_sage_const_3 ,_sage_const_5 ,_sage_const_16 ,_sage_const_32 )]
List_data=['phi_matrix_bits', 'psi_matrix', 'S_o_psi_matrix', 'h_poly', 'phi_matrix_bits', 'Ker_S_o_psi_basis', 'Ker_psi_basis', 'phi_of_one', 'phi_matrix_elements_H']
for instance in List_instances:
for data in List_data:
print_to_file(instance,data)
| StarcoderdataPython |
3262957 | <reponame>geohazards-tep/dcs-sar-flood-tool<gh_stars>1-10
# vim: ts=8:expandtab:cpo+=I:fo+=r
"""
cache
=====
Library for caching functions and method outputs.
Two caching methods are provided:
LRU store the last N results of the function/method
Persistent store results on a file
This two methods can be used separetaly or chained (first level LRU, on miss try persistent).
For caching instance methods based on instance state (e.g. attributes value) define its __hash__
method using hashParameters.
Provides
cache decorator for caching using a two-level (in memory and persistent) cache
quantizeParameters function for easily quantizing parameters (allows to use results from similar but different parameters)
hashParameters function for creating hash of a set of quantized parameter
registerCache register a symbolic name for a persistent cache
"""
try:
from thread import allocate_lock as Lock
except:
try:
from _thread import allocate_lock as Lock
except:
from _dummy_thread import allocate_lock as Lock
from functools import update_wrapper
from collections import namedtuple
import atexit
################################################################################
### Parameters quantization and hashing for persistent method caching
################################################################################
def quantizeParameters(*args):
"""Smartly quantize parameters in user-defined per-parameter ways
Parameters
----------
args: 3-sequence like (any, None|sequence|slice, None|string)
The first element represents the actual value.
The second one indicates the way to quantize the parameter.
The third one specifies some options.
See Examples for more details.
Returns
-------
outlist: list
Quantized parameters. Raises a ValueError exception if encounters an invalid option.
See Also
--------
hashParameters: create a hash of a list of quantized parameters
Examples
--------
>>> print quantizeParameters(
... ('M', None, None) , # -> 'M' (no quantize, no check)
... ('A', ['A','B'], 'exact') , # -> 'A' (no quantize, check value)
... (1.5, [1,3,4], 'floor') , # -> 1
... (-1, [1,3,4], 'floor') , # -> None
... (1.5, [1,3,4], 'ceil') , # -> 3
... (5.0, [1,3,4], 'ceil') , # -> None
... (1.5, [1,3,4], 'index') , # -> 0
... (-1, [1,3,4], 'index') , # -> -1
... (5.0, [1,3,4], 'index') , # -> 2
... (365.3, [0,90,180,270], 'wfloor') , # -> 0 (wrapped)
... (365.3, [0,90,180,270], 'wceil') , # -> 90 (wrapped)
... (365.3, [0,90,180,270], 'windex') , # -> 0 (wrapped)
... (1.5, slice(1,4), 'floor') , # -> 1)
... (1.5, slice(1,4), 'ceil') , # -> 2)
... (1.5, slice(1,4), 'index') , # -> 0)
... (365.3, slice(0,360), 'wfloor') , # -> 5 (wrapped)
... (365.3, slice(0,360), 'wceil') , # -> 6 (wrapped)
... (365.3, slice(0,360), 'windex') , # -> 5 (wrapped)
['M', 'A', 1, None, 3, None, 0, -1, 2, 0, 90, 0, 1.0, 2.0, 0.0, 5.0, 6.0, 5.0]
"""
from sys import maxsize
from math import floor, ceil
rit = []
for value,bins,option in args:
if bins is None:
rit.append(value)
elif type(bins) is slice:
start = float(0 if bins.start is None else bins.start)
stop = float(maxsize if bins.stop is None else bins.stop)
step = float(1 if bins.step is None else bins.step)
bin_ = (value-start)/step
if option[0]=='w':
#wrap
bin_ %= (stop - start)
option = option[1:]
if option=='floor': rit.append(floor(bin_)*step + start)
elif option=='ceil': rit.append( ceil(bin_)*step + start)
elif option=='round': rit.append(floor(bin_+0.5)*step + start)
elif option=='index': rit.append(floor(bin_))
else: raise ValueError("Unknown option: %s" % option)
else:
if option=='exact':
if value in bins: rit.append(value)
else: raise ValueError("Unadmittable value: %s not in %s" % (value, bins))
else:
from numpy import digitize
if option[0]=='w':
#wrap
value = ((value-bins[0]) % (bins[1]-bins[0]) ) + bins[0]
option = option[1:]
idx = digitize([value], bins)[0]
if option=='floor': rit.append(bins[idx-1] if idx>0 else None)
elif option=='ceil': rit.append(bins[idx] if idx<len(bins) else None)
elif option=='index': rit.append(idx-1)
else: raise ValueError("Unknown option: %s" % option)
return rit
def hashParameters(*args):
"""Hash arguments using quantization
Parameters
----------
See quantizeParameters
Returns
-------
Hash computed from the output of quantizeParameters.
See Also
--------
quantizeParameters
"""
representation = '\x00'.join(map(str, quantizeParameters(*args)))
return hash(representation)
def quantized(*args):
"""Generate a decorator for quantizing arguments
Parameters
----------
args: 3-sequence like (string, None|sequence|slice, None|string)
The first element is the argument name
The second one indicates the way to quantize the parameter.
The third one specifies some options.
See quantizeParameters for more details.
Returns
-------
A function decorator
See Also
--------
quantizeParameters
Examples
--------
>>> @quantized(([1,3,4], 'floor'))
>>> def f(a): return a
>>> x = range(-1,6) + [100]
>>> print zip(x, map(f, x))
[(-1, None), (0, None), (1, 1), (2, 1), (3, 3), (4, 4), (5, 4), (100, 4)]
"""
bins,option = zip(*args)
def decorating_function(user_function):
from inspect import getargspec
try:
f = user_function.__wrapped__
except:
f = user_function
nArgs = len(getargspec(f).args)
defaults = f.__defaults__
def wrapper(*wargs):
if len(wargs)-nArgs < len(defaults or ()):
wargs = wargs + defaults[len(wargs)-nArgs:]
values = quantizeParameters(*zip(wargs, bins, option))
return user_function(*values)
return update_wrapper(wrapper, user_function)
return decorating_function
################################################################################
### Two-level persistent caching
################################################################################
_cache = {}
def registerCache(ID, filename=None, livesync=False):
"""Associate a symbolic name to a persistent (on-file) cache.
Parameters
----------
ID: string
Symbolic name to use for the newly created persistent cache.
filename: string, optional
Path of the file onto save cached results (defaults to argv[0]).
livesync: bool, optional
Whether to update the file each time a new result is generated
(if True can slow down execution).
Results
-------
cache: percache.Cache
The newly created persistent cache. It is also added to the list of known caches.
See Also
--------
cache: decorator for caching function outputs using a LRU cache + a persistent cache
"""
from sys import argv
from percache import Cache
#assert ID not in _cache
if not ID in _cache:
cacheExtension = '.cache'
if filename is None:
filename = argv[0]
if not filename.endswith(cacheExtension):
filename += cacheExtension
_cache[ID] = Cache(filename, livesync=livesync, repr=lambda x:repr(hash(x)))
return _cache[ID]
@atexit.register
def unregisterCache(*args):
for ID,c in _cache.iteritems():
if len(args)==0 or ID in args:
c.close()
_cache.clear()
def cache(lrusize=None, persistent=False, ID='__default__'):
"""Generate a decorator for caching function outputs using a LRU cache + a persistent cache
Parameters
----------
lrusize: int, optional
size (in number of elements) of in-memory cached results.
If 0, no in-memory caching is performed.
If None, cache in memory forever (no LRU behaviour)
persistent: bool, optional
If True, create a second level cache on disk which stores results between sessions.
If False, cache only in memory (results are not stored between sessions).
It is advisable to set a finite lrusize if persistent is True.
ID: string, optional
Symbolic name of the persistent cache to be used. Unused if persistent is False.
Returns
-------
decorator: callable
A decorator for caching.
See Also
--------
registerCache: function for associating a symbolic name to a persistent cache
"""
if lrusize is not None:
try:
lrusize = int(lrusize)
except TypeError:
raise TypeError("Wrong lrusize argument, maybe you used @%s instead of @%s()?" \
% (__name__,)*2 \
)
lru = lru_cache(maxsize=lrusize, typed=False)
if persistent:
if ID not in _cache:
if ID=='__default__':
registerCache('__default__')
else:
raise ValueError("Unknown ID: %s" % ID)
def wrapper(f):
rit = lru(_cache[ID](f))
rit.percache_clear = lambda: _cache[ID].clear() or rit.cache_clear()
rit.percache_info = _cache[ID].stats
return rit
else:
def wrapper(f):
rit = lru(f)
rit.percache_clear = rit.cache_clear
rit.percache_info = rit.cache_info
return rit
return wrapper
################################################################################
### LRU Cache function decorator
################################################################################
# The following functions are extracted from functool for Python3.4.0a0 and adapted for Python2.7
# See http://docs.python.org/dev/library/functools.html
_CacheInfo = namedtuple("CacheInfo", ["hits", "misses", "maxsize", "currsize"])
class _HashedSeq(list):
__slots__ = 'hashvalue'
def __init__(self, tup, hash=hash):
self[:] = tup
self.hashvalue = hash(tup)
def __hash__(self):
return self.hashvalue
def _make_key(args, kwds, typed,
kwd_mark = (object(),),
fasttypes = {int, str, frozenset, type(None)},
sorted=sorted, tuple=tuple, type=type, len=len):
'Make a cache key from optionally typed positional and keyword arguments'
key = args
if kwds:
sorted_items = sorted(kwds.items())
key += kwd_mark
for item in sorted_items:
key += item
if typed:
key += tuple(type(v) for v in args)
if kwds:
key += tuple(type(v) for k, v in sorted_items)
elif len(key) == 1 and type(key[0]) in fasttypes:
return key[0]
return _HashedSeq(key)
def lru_cache(maxsize=128, typed=False):
"""Least-recently-used cache decorator.
If *maxsize* is set to None, the LRU features are disabled and the cache
can grow without bound.
If *typed* is True, arguments of different types will be cached separately.
For example, f(3.0) and f(3) will be treated as distinct calls with
distinct results.
Arguments to the cached function must be hashable.
View the cache statistics named tuple (hits, misses, maxsize, currsize)
with f.cache_info(). Clear the cache and statistics with f.cache_clear().
Access the underlying function with f.__wrapped__.
See: http://en.wikipedia.org/wiki/Cache_algorithms#Least_Recently_Used
"""
# Users should only access the lru_cache through its public API:
# cache_info, cache_clear, and f.__wrapped__
# The internals of the lru_cache are encapsulated for thread safety and
# to allow the implementation to change (including a possible C version).
# Constants shared by all lru cache instances:
sentinel = object() # unique object used to signal cache misses
make_key = _make_key # build a key from the function arguments
PREV, NEXT, KEY, RESULT = 0, 1, 2, 3 # names for the link fields
def decorating_function(user_function):
cache = {}
hits = misses = currsize = 0
full = False
cache_get = cache.get # bound method to lookup a key or return None
lock = Lock() # because linkedlist updates aren't threadsafe
root = [] # root of the circular doubly linked list
root[:] = [root, root, None, None] # initialize by pointing to self
nonlocals = dict(
hits = hits,
misses = misses,
currsize = currsize,
full = full,
root = root,
)
if maxsize == 0:
def wrapper(*args, **kwds):
# no caching, just a statistics update after a successful call
result = user_function(*args, **kwds)
nonlocals['misses'] += 1
return result
elif maxsize is None:
def wrapper(*args, **kwds):
# simple caching without ordering or size limit
key = make_key(args, kwds, typed)
result = cache_get(key, sentinel)
if result is not sentinel:
nonlocals['hits'] += 1
return result
result = user_function(*args, **kwds)
cache[key] = result
nonlocals['misses'] += 1
nonlocals['currsize'] += 1
return result
else:
def wrapper(*args, **kwds):
# size limited caching that tracks accesses by recency
key = make_key(args, kwds, typed)
with lock:
link = cache_get(key)
if link is not None:
# move the link to the front of the circular queue
link_prev, link_next, key, result = link
link_prev[NEXT] = link_next
link_next[PREV] = link_prev
last = nonlocals['root'][PREV]
last[NEXT] = nonlocals['root'][PREV] = link
link[PREV] = last
link[NEXT] = nonlocals['root']
nonlocals['hits'] += 1
return result
result = user_function(*args, **kwds)
with lock:
if key in cache:
# getting here means that this same key was added to the
# cache while the lock was released. since the link
# update is already done, we need only return the
# computed result and update the count of misses.
pass
elif nonlocals['full']:
# use root to store the new key and result
nonlocals['root'][KEY] = key
nonlocals['root'][RESULT] = result
cache[key] = nonlocals['root']
# empty the oldest link and make it the new root
nonlocals['root'] = nonlocals['root'][NEXT]
del cache[nonlocals['root'][KEY]]
nonlocals['root'][KEY] = nonlocals['root'][RESULT] = None
else:
# put result in a new link at the front of the queue
last = nonlocals['root'][PREV]
link = [last, nonlocals['root'], key, result]
cache[key] = last[NEXT] = nonlocals['root'][PREV] = link
nonlocals['currsize'] += 1
nonlocals['full'] = (nonlocals['currsize'] == maxsize)
nonlocals['misses'] += 1
return result
def cache_info():
"""Report cache statistics"""
with lock:
return _CacheInfo(nonlocals['hits'], nonlocals['misses'], maxsize, nonlocals['currsize'])
def cache_clear():
"""Clear the cache and cache statistics"""
with lock:
cache.clear()
nonlocals['root'][:] = [nonlocals['root'], nonlocals['root'], None, None]
nonlocals['hits'] = nonlocals['misses'] = nonlocals['currsize'] = 0
nonlocals['full'] = False
wrapper.cache_info = cache_info
wrapper.cache_clear = cache_clear
wrapper.__wrapped__ = user_function
return update_wrapper(wrapper, user_function)
return decorating_function
| StarcoderdataPython |
1604082 | import LPRLite as pr
import cv2
import os
import numpy as np
from PIL import ImageFont
from PIL import Image
from PIL import ImageDraw
def compute_iou(rec1, rec2):
"""
computing IoU
:param rec1: (y0, x0, y1, x1), which reflects
(top, left, bottom, right)
:param rec2: (y0, x0, y1, x1)
:return: scala value of IoU
"""
# computing area of each rectangles
S_rec1 = (rec1[2]) * (rec1[3] )
S_rec2 = (rec2[2] ) * (rec2[3] )
# computing the sum_area
sum_area = S_rec1 + S_rec2
# find the each edge of intersect rectangle
left_line = max(rec1[1], rec2[1])
right_line = min(rec1[1] + rec1[3], rec2[1] + rec2[3])
top_line = max(rec1[0], rec2[0])
bottom_line = min(rec1[0] + rec1[2], rec2[0] + rec2[2])
# judge if there is an intersect
if left_line >= right_line or top_line >= bottom_line:
return 0
else:
intersect = (right_line - left_line) * (bottom_line - top_line)
return intersect / (sum_area - intersect)
fontC = ImageFont.truetype("Font/platech.ttf", 30, 0)
def drawRectBox(image):
img = Image.fromarray(image)
draw = ImageDraw.Draw(img)
for i in range(len(rec_plate)):
x = box_rect[i][0]
y = box_rect[i][1]
w = box_rect[i][0] + box_rect[i][2]
h = box_rect[i][1] + box_rect[i][3]
draw.line([x, y, w, y], (0, 255, 0), width=5)
draw.line([x, y, x, h], (0, 255, 0), width=5)
draw.line([x, h, w, h], (0, 255, 0), width=5)
draw.line([w, y, w, h], (0, 255, 0), width=5)
# cv2.rectangle(image, (int(box_rect[i][0]), int(box_rect[i][1])), (int(box_rect[i][0] + box_rect[i][2]), int(box_rect[i][1] + box_rect[i][3])), (0, 255, 0), 2,
# cv2.LINE_AA)
draw.text((int(box_rect[i][0] + 1), int(box_rect[i][1] - 30)), rec_plate[i].encode("utf-8").decode('utf-8'),
(0, 0, 255),
font=fontC)
imagex = np.array(img)
return imagex
testdata_path = 'image'
model = pr.LPR("model/cascade.xml", "model/model12.h5", "model/ocr_plate_all_gru.h5")
for images in os.listdir(testdata_path):
print("图片名称:", images)
filename = os.path.splitext(os.path.split(images)[1])[0]
file_path = testdata_path + "/" + images
grr = cv2.imread(file_path)
box_rect = []
rec_plate = []
confidencelist = []
remove = []
for pstr, confidence, rect in model.SimpleRecognizePlateByE2E(grr):
if confidence > 0.7:
box_rect.append(rect)
# rec_plate.append(pstr + " " + str(round(confidence, 2)))
rec_plate.append(pstr)
confidencelist.append(confidence)
print("plate_str:", pstr)
print("plate_confidence", str(round(confidence, 2)))
#iou去重
for i in range(len(rec_plate)):
for j in range(len(rec_plate)):
iou = compute_iou(box_rect[i], box_rect[j])
# print(iou)
if iou > 0.5 and iou < 0.98:
if confidencelist[i] < confidencelist[j]:
remove.append(i)
else:
remove.append(j)
print(box_rect)
remove = list(set(remove))#列表去重
print(remove)
flag = False
if len(remove) < 2:
for i in range(len(remove)):
box_rect.remove(box_rect[remove[i]])
rec_plate.remove(rec_plate[remove[i]])
else:
for i in range(len(remove)):
if flag == False:
box_rect.remove(box_rect[remove[i]])
rec_plate.remove(rec_plate[remove[i]])
flag = True
else:
box_rect.remove(box_rect[remove[i-1]])
rec_plate.remove(rec_plate[remove[i-1]])
image = drawRectBox(grr)
cv2.imwrite('images_rec/' + filename + '.jpg', image)
| StarcoderdataPython |
1676300 | <gh_stars>0
import websocket, json, pprint, talib, numpy
import config
from binance.client import Client
from binance.enums import *
SOCKET = "wss://stream.binance.com:9443/ws/ethusdt@kline_1m"
RSI_PERIOD = 14
RSI_OVERBOUGHT = 70
RSI_OVERSOLD = 30
TRADE_SYMBOL = 'ETHUSD'
TRADE_QUANTITY = 0.05
closes = []
in_position = False
client = Client(config.API_KEY, config.API_SECRET)
def order(side, quantity, symbol,order_type=ORDER_TYPE_MARKET):
try:
print("sending order")
order = client.create_order(symbol=symbol, side=side, type=order_type, quantity=quantity)
print(order)
except Exception as e:
print("an exception occured - {}".format(e))
return False
return True
def on_open(ws):
print('opened connection')
def on_close(ws):
print('closed connection')
def on_message(ws, message):
global closes, in_position
print('received message')
json_message = json.loads(message)
pprint.pprint(json_message)
candle = json_message['k']
is_candle_closed = candle['x']
close = candle['c']
if is_candle_closed:
print("candle closed at {}".format(close))
closes.append(float(close))
print("closes")
print(closes)
if len(closes) > RSI_PERIOD:
np_closes = numpy.array(closes)
rsi = talib.RSI(np_closes, RSI_PERIOD)
print("all rsis calculated so far")
print(rsi)
last_rsi = rsi[-1]
print("the current rsi is {}".format(last_rsi))
if last_rsi > RSI_OVERBOUGHT:
if in_position:
print("Overbought! Sell! Sell! Sell!")
# put binance sell logic here
order_succeeded = order(SIDE_SELL, TRADE_QUANTITY, TRADE_SYMBOL)
if order_succeeded:
in_position = False
else:
print("It is overbought, but we don't own any. Nothing to do.")
if last_rsi < RSI_OVERSOLD:
if in_position:
print("It is oversold, but you already own it, nothing to do.")
else:
print("Oversold! Buy! Buy! Buy!")
# put binance buy order logic here
order_succeeded = order(SIDE_BUY, TRADE_QUANTITY, TRADE_SYMBOL)
if order_succeeded:
in_position = True
ws = websocket.WebSocketApp(SOCKET, on_open=on_open, on_close=on_close, on_message=on_message)
ws.run_forever() | StarcoderdataPython |
4813538 | <reponame>pioneers/PieCentral2
"""To Install: Run `pip install --upgrade google-api-python-client`"""
from __future__ import print_function
import os
import csv
import httplib2 # pylint: disable=import-error
from googleapiclient import discovery # pylint: disable=import-error,no-name-in-module
from oauth2client import client # pylint: disable=import-error
from oauth2client import tools # pylint: disable=import-error
from oauth2client.file import Storage # pylint: disable=import-error
from Utils import *
# If modifying these scopes, delete your previously saved credentials
# at ~/.credentials/sheets.googleapis.com-python-quickstart.json
SCOPES = 'https://www.googleapis.com/auth/spreadsheets'
CLIENT_SECRET_FILE = 'Sheets/client_secret.json'
APPLICATION_NAME = 'Google Sheets API Python Quickstart'
def get_credentials():
"""Gets valid user credentials from storage.
If nothing has been stored, or if the stored credentials are invalid,
the OAuth2 flow is completed to obtain the new credentials.
Returns:
Credentials, the obtained credential.
"""
home_dir = os.path.expanduser('~')
credential_dir = os.path.join(home_dir, '.credentials')
if not os.path.exists(credential_dir):
os.makedirs(credential_dir)
credential_path = os.path.join(credential_dir,
'sheets.googleapis.com-python-quickstart.json')
store = Storage(credential_path)
credentials = store.get()
if not credentials or credentials.invalid:
flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)
flow.user_agent = APPLICATION_NAME
# Needed only for compatibility with Python 2.6
credentials = tools.run_flow(flow, store)
print('Storing credentials to ' + credential_path)
return credentials
def get_match(match_number):
#return get_offline_match(match_number)
try:
return get_online_match(match_number)
except httplib2.ServerNotFoundError:
return get_offline_match(match_number)
def write_scores(match_number, blue_score, gold_score):
try:
write_online_scores(match_number, blue_score, gold_score)
except httplib2.ServerNotFoundError:
print("Unable to write to spreadsheet")
def get_online_match(match_number):
"""
A lot of this is adapted from google quickstart.
Takes the match number and returns a dictionary with the teams names
and team numbers for that match.
"""
credentials = get_credentials()
http = credentials.authorize(httplib2.Http())
discoveryUrl = ('https://sheets.googleapis.com/$discovery/rest?'
'version=v4')
service = discovery.build('sheets', 'v4', http=http,
discoveryServiceUrl=discoveryUrl)
spreadsheetId = CONSTANTS.SPREADSHEET_ID
range_name = "Match Database!A2:J"
spreadsheet = service.spreadsheets() # pylint: disable=no-member
game_data = spreadsheet.values().get(
spreadsheetId=spreadsheetId, range=range_name).execute()
row = 48
for i, j in enumerate(game_data['values']):
if int(j[0]) == match_number:
row = i
match = game_data['values'][row]
return {"b1name" : match[3], "b1num" : match[2],
"b2name" : match[5], "b2num" : match[4],
"g1name" : match[7], "g1num" : match[6],
"g2name" : match[9], "g2num" : match[8]}
def get_offline_match(match_number):
"""
reads from the downloaded csv file in the event that the online file cannot
be read from.
"""
csv_file = open(CONSTANTS.CSV_FILE_NAME, newline='')
match_reader = csv.reader(csv_file, delimiter=' ', quotechar='|')
matches = list(match_reader)
match = matches[match_number]
match = " ".join(match)
match = match.split(',')
return {"b1name" : match[3], "b1num" : match[2],
"b2name" : match[5], "b2num" : match[4],
"g1name" : match[7], "g1num" : match[6],
"g2name" : match[9], "g2num" : match[8]}
# pylint: disable=too-many-locals
def write_online_scores(match_number, blue_score, gold_score):
"""
A method that writes the scores to the sheet
"""
credentials = get_credentials()
http = credentials.authorize(httplib2.Http())
discoveryUrl = ('https://sheets.googleapis.com/$discovery/rest?'
'version=v4')
service = discovery.build('sheets', 'v4', http=http,
discoveryServiceUrl=discoveryUrl)
spreadsheetId = CONSTANTS.SPREADSHEET_ID
range_name = "Match Database!A2:J"
spreadsheet = service.spreadsheets() # pylint: disable=no-member
game_data = spreadsheet.values().get(
spreadsheetId=spreadsheetId, range=range_name).execute()
row = 47
for i, j in enumerate(game_data['values']):
if int(j[0]) == match_number:
row = i
range_name = "'Match Database'!K" + str(row + 2) + ":L" + str(row + 2)
score_sheets = service.spreadsheets() # pylint: disable=no-member
game_scores = score_sheets.values().get(
spreadsheetId=spreadsheetId, range=range_name).execute()
game_scores['values'] = [[blue_score, gold_score]]
sheets = service.spreadsheets() # pylint: disable=no-member
sheets.values().update(spreadsheetId=spreadsheetId,
range=range_name, body=game_scores,
valueInputOption="RAW").execute()
| StarcoderdataPython |
130231 | <filename>src/applications/player/admin.py
from django.contrib import admin
# Locals Models
from .models import Player, Guild
class PlayerAdmin(admin.ModelAdmin):
list_display = ("id", "account_id", "name", "level", "exp", "last_play", "ip")
search_fields = ["name"]
class GuildAdmin(admin.ModelAdmin):
list_display = ("id", "name", "master", "level", "exp")
search_fields = ["name"]
admin.site.register(Player, PlayerAdmin)
admin.site.register(Guild, GuildAdmin)
| StarcoderdataPython |
1718472 | #!/usr/local/bin/python3
# Generate and print a random password of a specified length
# using a secure RNG.
import string
import sys
import secrets
characters = string.ascii_letters + string.digits
try:
length = int(sys.argv[1])
except (ValueError, IndexError):
length = 43 # fewest to ensure 256 bits of entropy
print("Generated password: ", end="", file=sys.stderr, flush=True)
print("".join(secrets.choice(characters) for _ in range(length)))
| StarcoderdataPython |
3277176 | # Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unit tests for utils."""
# This file conforms to the external style guide.
# pylint: disable=bad-indentation, g-bad-import-order
import io
import logging
import os
import shutil
import tarfile
import tempfile
import textwrap
import unittest
import docker
from fakes import fake_docker
from appstart import utils
CERT_PATH = os.path.join(os.path.dirname(os.path.dirname(__file__)),
'test_data/certs')
APP_DIR = os.path.join(os.path.dirname(__file__), 'system_tests')
class DockerTest(fake_docker.FakeDockerTestBase):
"""Test error detection in Docker build results."""
def test_get_docker_client(self):
os.environ['DOCKER_HOST'] = 'tcp://192.168.59.103:2376'
os.environ['DOCKER_TLS_VERIFY'] = '1'
os.environ['DOCKER_CERT_PATH'] = CERT_PATH
dclient = utils.get_docker_client()
self.assertIn('tls', dclient.kwargs)
self.assertIn('base_url', dclient.kwargs)
def test_build_from_directory(self):
utils.build_from_directory(APP_DIR, 'test')
self.assertEqual(len(fake_docker.images),
1 + len(fake_docker.DEFAULT_IMAGES))
self.assertIn('test', fake_docker.images)
def test_failed_build(self):
bad_build_res = fake_docker.FAILED_BUILD_RES
with self.assertRaises(utils.AppstartAbort):
utils.log_and_check_build_results(bad_build_res, 'temp')
def test_successful_build(self):
good_build_res = fake_docker.BUILD_RES
utils.log_and_check_build_results(good_build_res, 'temp')
def test_good_version(self):
dclient = fake_docker.FakeDockerClient()
utils.check_docker_version(dclient)
def test_bad_version(self):
dclient = fake_docker.FakeDockerClient()
dclient.version = lambda: {'Version': '1.4.0'}
with self.assertRaises(utils.AppstartAbort):
utils.check_docker_version(dclient)
def test_find_image(self):
dclient = fake_docker.FakeDockerClient()
fake_docker.images.append('test')
self.assertTrue(utils.find_image('test'))
class TarTest(unittest.TestCase):
"""Test the feature in utils that deal with tarfiles."""
def setUp(self):
self.tempfile1 = tempfile.NamedTemporaryFile()
self.tempfile1.write('foo')
self.tempfile1.seek(0)
self.tempfile2 = tempfile.NamedTemporaryFile()
self.tempfile2.write('bar')
self.tempfile2.seek(0)
def test_make_build_context(self):
dockerfile = io.BytesIO('FROM debian'.encode('utf-8'))
context_files = {self.tempfile1.name: 'foo.txt',
self.tempfile2.name: '/baz/bar.txt'}
context = utils.make_tar_build_context(dockerfile, context_files)
tar = tarfile.TarFile(fileobj=context)
self.assertEqual(tar.extractfile('foo.txt').read(), 'foo')
self.assertEqual(tar.extractfile('baz/bar.txt').read(), 'bar')
def test_tar_wrapper(self):
temp = tempfile.NamedTemporaryFile()
tar = tarfile.open(mode='w', fileobj=temp)
tinfo1 = tar.gettarinfo(fileobj=self.tempfile1,
arcname='/root/baz/foo.txt')
tar.addfile(tinfo1, self.tempfile1)
tinfo2 = tar.gettarinfo(fileobj=self.tempfile2,
arcname='/root/bar.txt')
tar.addfile(tinfo2, self.tempfile2)
fake_root = tarfile.TarInfo('root')
fake_root.type = tarfile.DIRTYPE
tar.addfile(fake_root)
fake_baz = tarfile.TarInfo('root/baz')
fake_baz.type = tarfile.DIRTYPE
tar.addfile(fake_baz)
tar.close()
temp.seek(0)
wrapped_tar = utils.TarWrapper(tarfile.open(mode='r', fileobj=temp))
self.assertEqual(wrapped_tar.get_file('root/bar.txt').read(), 'bar')
self.assertEqual(wrapped_tar.get_file('root/baz/foo.txt').read(), 'foo')
with self.assertRaises(ValueError):
wrapped_tar.get_file('root')
files, dirs = wrapped_tar.list('root')
self.assertEqual(files, ['bar.txt'])
self.assertEqual(dirs, ['baz'])
with self.assertRaises(ValueError):
wrapped_tar.list('root/bar.txt')
class FileCollectionTest(unittest.TestCase):
def setUp(self):
self.temp_dir = tempfile.mkdtemp()
self.files = []
for name in ('foo', 'bar', 'baz'):
name_dir = os.path.join(self.temp_dir, name)
os.mkdir(name_dir)
file_name = os.path.join(name_dir, name + '.txt')
with open(file_name, 'w') as f:
f.write('example file')
# 'baz' is excluded from the static dirs.
if name != 'baz':
self.files.append(file_name)
self.config_file = os.path.join(self.temp_dir, 'app.yaml')
with open(self.config_file, 'w') as f:
f.write(textwrap.dedent("""\
handlers:
- url: foo
static_dir: foo
- url: bar
static_dir: bar
"""))
def tearDown(self):
shutil.rmtree(self.temp_dir)
def test_add_files(self):
data = {}
utils.add_files_from_static_dirs(data, self.config_file)
self.assertEqual(
data,
dict((name, None) for name in self.files))
class LoggerTest(unittest.TestCase):
def test_get_logger(self):
logger = utils.get_logger()
self.assertIsInstance(logger, logging.Logger)
if __name__ == '__main__':
unittest.main()
| StarcoderdataPython |
1624556 | from sequences.majority import *
class TestMajority:
def test_given_empty_array_then_return_none(self):
assert majority_element([]) is None
def test_given_array_of_length_one(self):
assert majority_element([1]) == 1
def test_given_array_of_length_two(self):
assert majority_element([1, 1]) == 1
assert majority_element([1, -1]) is None
def test_given_example_1(self):
assert majority_element([1, 2, 5, 9, 5, 9, 5, 5, 5]) == 5
def test_given_most_frequent_item_is_not_majority_then_return_none(self):
assert majority_element([3, 1, 7, 1, 3, 7, 3, 7, 1, 7, 7]) is None
def test_given_most_frequent_item_is_majority_then_return_item(self):
assert majority_element([3, 1, 7, 1, 3, 7, 3, 7, 1, 7, 7, 7, 7]) == 7
| StarcoderdataPython |
1777134 | """
Use Blender to render a scene to one or more image files.
"""
# Copyright (c) 2021 <NAME>. All rights reserved.
import math
import pickle
import os
import json
import sys
from typing import Optional
import bpy
import bpy.types as btypes
# TODO: there is probably a better way to find the script directory at runtime
CODE_DIRNAME = '/home/ben/code/modeling'
if CODE_DIRNAME not in sys.path:
sys.path.append(CODE_DIRNAME)
from modeling.blender import util as butil, scene as msc, materials as ms
from modeling.blender.scene import MaterialKeys as Mk
DO_RENDER = True
# TODO: make these options
CYCLES_RENDER_SAMPLES = 128
# CYCLES_PREVIEW_SAMPLES = 32
CYCLES_PREVIEW_SAMPLES = 4
LIGHT_SUN = 'sun'
# keys, mainly useed for the config render portion of the input config
SIZE_KEY = 'size'
CENTER_KEY = 'center'
POS_SCALE_KEY = 'pos_scale'
FILM_TRANSPARENT_KEY = 'film_transparent'
LINE_THICKNESS_KEY = 'line_thickness'
ORTHO_SCALE_KEY = 'ortho_scale'
RENDER_EEVEE_USE_BLOOM_KEY = 'render_eevee_use_bloom'
RENDER_USE_EEVEE_KEY = 'render_use_eevee'
CYCLES_GPU_KEY = 'cycles_gpu'
ANIMATION_USE_EEVEE_KEY = 'animation_use_eevee'
RENDER_RESOLUTION_KEY = 'render_resolution'
ROOT_OFFSET_KEY = 'root_offset'
DO_QUIT_KEY = 'do_quit'
DO_OUTLINE_KEY = 'do_outline'
DO_RENDER_ANIMATION_KEY = 'do_render_animation'
DO_RENDER_KEY = 'do_render'
RENDER_BLENDER_KEY = 'render_blender'
WORLD_KEY = 'world'
def main(args):
"""main program"""
butil.disable_splash()
input_filename = args[0]
output_filename = args[1]
if len(args) > 2:
animation_filename = args[2]
else:
animation_filename = None
output_filename_prefix = os.path.splitext(output_filename)[0]
print('input filename: ', input_filename)
print('output filename:', output_filename)
print('animation input filename:', animation_filename)
# ~~~~ load input json
with open(input_filename, 'r') as json_file:
config = json.load(json_file)
# a couple of hard-coded things for now
clip_scale = 4.0
# some render_blender specific settings
config_render = config[RENDER_BLENDER_KEY]
do_render = config_render.get(DO_RENDER_KEY, True)
do_render_animation = config_render.get(DO_RENDER_ANIMATION_KEY, False)
do_outline = config_render.get(DO_OUTLINE_KEY, False)
do_quit = config_render.get(DO_QUIT_KEY, True)
root_offset = config_render.get(ROOT_OFFSET_KEY, True)
render_resolution = config_render.get(RENDER_RESOLUTION_KEY, [1920, 1080])
animation_use_eevee = config_render.get(ANIMATION_USE_EEVEE_KEY, False)
render_use_eevee = config_render.get(RENDER_USE_EEVEE_KEY, False)
render_eevee_use_bloom = config_render.get(RENDER_EEVEE_USE_BLOOM_KEY, False)
cycles_gpu = config_render.get(CYCLES_GPU_KEY, False)
ortho_scale = config_render.get(ORTHO_SCALE_KEY, 1.1)
line_thickness = config_render.get(LINE_THICKNESS_KEY, 1.0)
film_transparent = config_render.get(FILM_TRANSPARENT_KEY, True)
scale = config_render.get(POS_SCALE_KEY, 1.5)
center = config_render[CENTER_KEY]
pos = config_render[SIZE_KEY] * scale
clip_end = config_render[SIZE_KEY] * clip_scale
world_config = config_render.get(WORLD_KEY, {})
root_obj_loc = (-center[0], -center[1], -center[2])
# ~~~~ set some blender defaults
view_space: btypes.SpaceView3D = find_space('VIEW_3D')
view_space.shading.show_cavity = True
bpy.context.scene.transform_orientation_slots[1].type = 'LOCAL'
view_space.show_gizmo_object_translate = True
# ~~~~ clear scene
butil.delete_all_objects()
butil.reset_scene()
# ~~~~ create materials
materials = config.get('materials')
if materials is not None:
if materials:
parent = msc.add_model({'name': 'MATERIAL PREVIEWS', 'hide': True}, None)
for material in materials:
# if there is no name field, it's expected that we
# are loading the material by name from the materials library
if Mk.NAME in material:
material_name = material[Mk.NAME]
else:
material_name = (
material[Mk.MATLIB][Mk.MATLIB_LIB_NAME] + ' / ' +
material[Mk.MATLIB][Mk.MATLIB_MAT_NAME]
)
msc.add_model({
'name': 'MATERIAL PREVIEW - ' + material_name,
'filename': 'models/sphere.obj',
'auto_smooth_angle': 30.0,
'material': material,
'props': [
{
'type': 'blender:subsurface',
'levels': 2,
'render_levels': 4,
'use_adaptive_subdivision': False
}
],
'hide': True
}, parent)
# ~~~~ load OBJ files
msc.PROFILER.tick('all loading')
# for now, just load starting from the first model
root_obj = msc.add_model(config['models'][0], None)
if len(config['models']) > 1:
for model in config['models'][1:]:
msc.add_model(model, None)
msc.PROFILER.tock('all loading')
# blender.purge_orphans()
# apply offset from center in configuration
if root_offset:
root_obj.location = root_obj_loc
# ~~~~ special origin object
origin_obj = bpy.data.objects.new('origin', None)
bpy.context.scene.collection.objects.link(origin_obj)
origin_obj.location = (0, 0, 0)
# ~~~~ camera
# for now assume one camera
bpy.ops.object.camera_add()
cam = bpy.context.object
cam.name = 'Camera'
cam.data.clip_end = clip_end
msc.set_transformation(cam, config['camera']['transformation'])
fov = config['camera'].get('fov')
if fov is not None:
cam.data.lens_unit = 'FOV'
cam.data.angle = math.radians(fov)
bpy.context.scene.camera = cam
# ~~~~ lights
for light_name, light in config['lights'].items():
if light['type'] == LIGHT_SUN:
light_obj = butil.sun(
name=light_name,
loc=(0.0, 0.0, 0.0),
rot_euler=(0.0, 0.0, 0.0),
energy=light['energy'],
angle=light['angle']
)
shadow_cascade_max_distance = light.get('shadow_cascade_max_distance')
if shadow_cascade_max_distance is not None:
light_obj.data.shadow_cascade_max_distance = shadow_cascade_max_distance
else:
print(f'Invalid light type')
msc.set_transformation(light_obj, light['transformation'])
# ~~~~ render settings
scene = bpy.context.scene
# set background color
background = scene.world.node_tree.nodes['Background']
background.inputs[0].default_value = (0.0, 0.0, 0.0, 1.0)
scene.render.film_transparent = film_transparent
scene.render.engine = 'CYCLES'
butil.configure_cycles(
scene,
samples=CYCLES_RENDER_SAMPLES,
preview_samples=CYCLES_PREVIEW_SAMPLES,
gpu=cycles_gpu)
scene.eevee.use_bloom = render_eevee_use_bloom
scene.render.resolution_x = render_resolution[0]
scene.render.resolution_y = render_resolution[1]
# bpy.context.scene.render.filepath = working_dirname + '/'
# bpy.context.scene.cycles.use_denoising = True
scene.view_settings.look = 'Medium High Contrast'
if world_config:
# bpy.context.space_data.shader_type = 'WORLD'
add_node = ms.build_add_node(scene.world)
tex_env = add_node(btypes.ShaderNodeTexEnvironment)
tex_env.image = bpy.data.images.load(world_config['tex_environment_filepath'])
tex_env.interpolation = 'Cubic'
hsv = add_node(btypes.ShaderNodeHueSaturation)
hsv.inputs['Hue'].default_value = world_config.get('hue', 0.5)
hsv.inputs['Saturation'].default_value = world_config.get('saturation', 1.0)
hsv.inputs['Value'].default_value = world_config.get('value', 1.0)
bright_contrast = add_node(btypes.ShaderNodeBrightContrast)
bright_contrast.inputs['Bright'].default_value = world_config.get('brightness', 0.0)
bright_contrast.inputs['Contrast'].default_value = world_config.get('contrast', 0.0)
world_output = scene.world.node_tree.nodes['World Output']
links = [
((tex_env, 'Color'), (hsv, 'Color')),
((hsv, 'Color'), (bright_contrast, 'Color')),
((bright_contrast, 'Color'), (background, 'Color'))
]
for link in links:
butil.add_link(scene.world, *link)
try:
butil.arrange_nodes([
add_node,
tex_env,
bright_contrast,
background,
world_output
])
except:
# do nothing
print('problem while arranging world nodes')
# bpy.context.space_data.shader_type = 'OBJECT'
# ~~~~ animations
if animation_filename is not None:
states = []
with open(animation_filename, 'rb') as animation_file:
while True:
try:
states.append(pickle.load(animation_file))
except EOFError:
break
scene = bpy.context.scene
scene.render.fps = 60
scene.frame_start = 0
scene.frame_end = len(states) - 1
for config_model in config['models']:
obj = butil.get_obj_by_name(config_model['name'])
obj.rotation_mode = 'QUATERNION'
for frame, state in enumerate(states):
# print(frame, state)
print(frame, '/', len(states), flush=True)
scene.frame_set(frame)
for name, entity in state['objects'].items():
# print('\t' + name)
obj = butil.get_obj_by_name(name)
if obj is not None:
# print('\t\t', entity)
msc.add_keyframes(
obj,
entity.get('transformation'),
entity.get('nodes'),
entity.get('hide'))
# TODO: additional beam specific stuff would be handled here
else:
print('\t\tobject not found')
scene.frame_set(0)
for config_model in config['models']:
name = config_model['name']
obj = butil.get_obj_by_name(name)
anim_data = obj.animation_data
if anim_data is not None:
action = anim_data.action
if action is not None:
for fcurve in action.fcurves:
for keyframe in fcurve.keyframe_points:
keyframe.interpolation = 'CONSTANT'
else:
print(f'object {name} has no action')
else:
print(f'object {name} has no animation data')
if do_render_animation:
if animation_use_eevee:
scene.render.engine = 'BLENDER_EEVEE'
scene.render.filepath = os.path.join(output_filename_prefix, 'frames') + '/'
bpy.ops.render.render(animation=True)
# reset to cycles
scene.render.engine = 'CYCLES'
if do_render:
# standard render
if render_use_eevee:
scene.render.engine = 'BLENDER_EEVEE'
render(output_filename)
if do_outline:
# outline mode for schematics
# TODO: is there a way disable rendering everything except freestyle?
scene.render.engine = 'BLENDER_EEVEE'
scene.render.resolution_x = 1080
scene.render.resolution_y = 1080
set_render_outlines(scene, line_thickness=line_thickness)
cam.data.type = 'ORTHO'
cam.data.clip_start = 0
cam.data.clip_end = pos * 2.0
cam.data.ortho_scale = config_render['size'] * ortho_scale
# root_obj.location = (0, 0, 0)
for name, cam_pos, up_dir in [
('pos_x', (1, 0, 0), butil.UP_Y),
('pos_y', (0, 1, 0), butil.UP_Y),
('neg_y', (0, -1, 0), butil.UP_Y),
('pos_z', (0, 0, 1), butil.UP_X),
('neg_z', (0, 0, -1), butil.UP_X)]:
print(name)
cam_pos = (
cam_pos[0] * pos,
cam_pos[1] * pos,
cam_pos[2] * pos)
cam.location = cam_pos
butil.point_at(
cam, origin_obj, butil.TRACK_NEGATIVE_Z, up_dir)
render(output_filename_prefix + '_outline_' + name + '.png')
if do_quit:
butil.quit()
msc.PROFILER.summary()
print('', flush=True)
def set_render_outlines(scene: bpy.types.Scene, line_thickness: float) -> None:
"""set up a scene for rendering outlines using freestyle"""
scene.use_nodes = True
scene.render.use_freestyle = True
scene.render.line_thickness = line_thickness
scene.view_layers['View Layer'].freestyle_settings.as_render_pass = True
butil.add_link(
scene,
(scene.node_tree.nodes['Render Layers'], 'Freestyle'),
(scene.node_tree.nodes['Composite'], 'Image'))
def render(output_filename: str) -> None:
"""render to an output file"""
print('rendering...', end='', flush=True)
bpy.ops.render.render()
bpy.data.images["Render Result"].save_render(filepath=output_filename)
print('done')
def find_space(space_type: str) -> Optional[btypes.Space]:
"""find the first space of a given type"""
# see here for a list of types:
# https://docs.blender.org/api/current/bpy.types.Space.html#bpy.types.Space
for window in bpy.context.window_manager.windows:
for area in window.screen.areas:
if area.type == space_type:
for space in area.spaces:
if space.type == space_type:
return space
return None
main(butil.find_args(sys.argv))
| StarcoderdataPython |
1711455 | <gh_stars>1-10
import pytest
from setuptools.config import read_configuration
import zoloto
def test_exposes_version() -> None:
assert hasattr(zoloto, "__version__")
def test_exposes_marker() -> None:
assert zoloto.Marker == zoloto.marker.Marker
def test_exposes_marker_type() -> None:
assert zoloto.MarkerType == zoloto.marker_type.MarkerType
@pytest.mark.parametrize(
"coordinate_struct",
["Coordinates", "Orientation", "ThreeDCoordinates", "Spherical"],
)
def test_exposes_coordinates(coordinate_struct: str) -> None:
assert getattr(zoloto, coordinate_struct) == getattr(
zoloto.coords, coordinate_struct
)
def test_matching_version() -> None:
config = read_configuration("setup.cfg")
assert config["metadata"]["version"] == zoloto.__version__
| StarcoderdataPython |
161049 |
from .forest import RandomForestClassifier, RandomForestRegressor
from .boosting import GradientBoostingRegressor, GradientBoostingClassifier
from .voting import VotingClassifier
__all__ = ['RandomForestClassifier','RandomForestRegressor','VotingClassifier',
'GradientBoostingRegressor','GradientBoostingClassifier'] | StarcoderdataPython |
3291052 | <gh_stars>0
import os, shutil, glob, random
from wpkit.fsutil import copy_files_to
def newdir(out_dir):
if os.path.exists(out_dir): shutil.rmtree(out_dir)
os.makedirs(out_dir)
def split_train_val(data_dir, train_dir, val_dir, val_split=0.1, num_val=None, ext='.jpg', shuffle=True, sort=False):
newdir(train_dir)
newdir(val_dir)
fs = glob.glob(data_dir + '/*' + ext)
if sort:
fs.sort()
elif shuffle:
random.shuffle(fs)
if not num_val:
num_val = int(len(fs) * val_split)
val_files = fs[:num_val]
train_files = fs[num_val:]
copy_files_to(train_files, train_dir)
copy_files_to(val_files, val_dir)
def split_train_val_imagefolder(data_dir, train_dir, val_dir, val_split=0.1, num_val=None, ext='.jpg', shuffle=True,
sort=False):
newdir(train_dir)
newdir(val_dir)
for cls in os.listdir(data_dir):
cls_dir = data_dir + '/' + cls
train_cls_dir = train_dir + '/' + cls
val_cls_dir = val_dir + '/' + cls
split_train_val(cls_dir, train_dir=train_cls_dir, val_dir=val_cls_dir, val_split=val_split, num_val=num_val,
ext=ext, shuffle=shuffle, sort=sort)
if __name__ == '__main__':
split_train_val_imagefolder(
data_dir='/home/ars/disk/datasets/car_datasets/stanford/images/train',
train_dir='/home/ars/disk/datasets/car_datasets/stanford/classify_dataset/train',
val_dir='/home/ars/disk/datasets/car_datasets/stanford/classify_dataset/val',
val_split=0.1
) | StarcoderdataPython |
33779 | <gh_stars>0
from nexpose_rest.nexpose import _GET
def getPolicies(config, filter=None, scannedOnly=None):
getParameters=[]
if filter is not None:
getParameters.append('filter=' + filter)
if scannedOnly is not None:
getParameters.append('scannedOnly=' + scannedOnly)
code, data = _GET('/api/3/policies', config, getParameters=getParameters)
return data
def getPolicyRuleControls(config, policyId, ruleId):
getParameters=[]
code, data = _GET('/api/3/policies/' + str(policyId) + '/rules/' + str(ruleId) + '/controls', config, getParameters=getParameters)
return data
def getAssetPolicyRulesSummary(config, assetId, policyId):
getParameters=[]
code, data = _GET('/api/3/assets/' + str(assetId) + '/policies/' + str(policyId) + '/rules', config, getParameters=getParameters)
return data
def getPolicyGroup(config, policyId, groupId):
getParameters=[]
code, data = _GET('/api/3/policies/' + str(policyId) + '/groups/' + str(groupId) + '', config, getParameters=getParameters)
return data
def getPolicyRule(config, policyId, ruleId):
getParameters=[]
code, data = _GET('/api/3/policies/' + str(policyId) + '/rules/' + str(ruleId) + '', config, getParameters=getParameters)
return data
def getPolicyRuleAssetResultProof(config, policyId, ruleId, assetId):
getParameters=[]
code, data = _GET('/api/3/policies/' + str(policyId) + '/rules/' + str(ruleId) + '/assets/' + str(assetId) + '/proof', config, getParameters=getParameters)
return data
def getDisabledPolicyRules(config, policyId):
getParameters=[]
code, data = _GET('/api/3/policies/' + str(policyId) + '/rules/disabled', config, getParameters=getParameters)
return data
def getPolicyChildren(config, id):
getParameters=[]
code, data = _GET('/api/3/policies/' + str(id) + '/children', config, getParameters=getParameters)
return data
def getPolicyGroups(config, policyId):
getParameters=[]
code, data = _GET('/api/3/policies/' + str(policyId) + '/groups', config, getParameters=getParameters)
return data
def getPolicyAssetResults(config, policyId, applicableOnly=None):
getParameters=[]
if applicableOnly is not None:
getParameters.append('applicableOnly=' + applicableOnly)
code, data = _GET('/api/3/policies/' + str(policyId) + '/assets', config, getParameters=getParameters)
return data
def getAssetPolicyChildren(config, assetId, policyId):
getParameters=[]
code, data = _GET('/api/3/assets/' + str(assetId) + '/policies/' + str(policyId) + '/children', config, getParameters=getParameters)
return data
def getPolicyRuleRationale(config, policyId, ruleId):
getParameters=[]
code, data = _GET('/api/3/policies/' + str(policyId) + '/rules/' + str(ruleId) + '/rationale', config, getParameters=getParameters)
return data
def getPolicyGroupRulesWithAssetAssessment(config, assetId, policyId, groupId):
getParameters=[]
code, data = _GET('/api/3/assets/' + str(assetId) + '/policies/' + str(policyId) + '/groups/' + str(groupId) + '/rules', config, getParameters=getParameters)
return data
def getPolicyRuleAssetResults(config, policyId, ruleId, applicableOnly=None):
getParameters=[]
if applicableOnly is not None:
getParameters.append('applicableOnly=' + applicableOnly)
code, data = _GET('/api/3/policies/' + str(policyId) + '/rules/' + str(ruleId) + '/assets', config, getParameters=getParameters)
return data
def getAssetPolicyGroupChildren(config, assetId, policyId, groupId):
getParameters=[]
code, data = _GET('/api/3/assets/' + str(assetId) + '/policies/' + str(policyId) + '/groups/' + str(groupId) + '/children', config, getParameters=getParameters)
return data
def getPoliciesForAsset(config, assetId, applicableOnly=None):
getParameters=[]
if applicableOnly is not None:
getParameters.append('applicableOnly=' + applicableOnly)
code, data = _GET('/api/3/assets/' + str(assetId) + '/policies', config, getParameters=getParameters)
return data
def getPolicyRuleRemediation(config, policyId, ruleId):
getParameters=[]
code, data = _GET('/api/3/policies/' + str(policyId) + '/rules/' + str(ruleId) + '/remediation', config, getParameters=getParameters)
return data
def getPolicyRules(config, policyId):
getParameters=[]
code, data = _GET('/api/3/policies/' + str(policyId) + '/rules', config, getParameters=getParameters)
return data
def getPolicySummary(config):
getParameters=[]
code, data = _GET('/api/3/policy/summary', config, getParameters=getParameters)
return data
def getPolicyGroupAssetResult(config, policyId, groupId, assetId):
getParameters=[]
code, data = _GET('/api/3/policies/' + str(policyId) + '/groups/' + str(groupId) + '/assets/' + str(assetId) + '', config, getParameters=getParameters)
return data
def getPolicyAssetResult(config, policyId, assetId):
getParameters=[]
code, data = _GET('/api/3/policies/' + str(policyId) + '/assets/' + str(assetId) + '', config, getParameters=getParameters)
return data
def getPolicyGroupAssetResults(config, policyId, groupId, applicableOnly=None):
getParameters=[]
if applicableOnly is not None:
getParameters.append('applicableOnly=' + applicableOnly)
code, data = _GET('/api/3/policies/' + str(policyId) + '/groups/' + str(groupId) + '/assets', config, getParameters=getParameters)
return data
def getDescendantPolicyRules(config, policyId, groupId):
getParameters=[]
code, data = _GET('/api/3/policies/' + str(policyId) + '/groups/' + str(groupId) + '/rules', config, getParameters=getParameters)
return data
def getPolicyRuleAssetResult(config, policyId, ruleId, assetId):
getParameters=[]
code, data = _GET('/api/3/policies/' + str(policyId) + '/rules/' + str(ruleId) + '/assets/' + str(assetId) + '', config, getParameters=getParameters)
return data
def getPolicyGroupChildren(config, policyId, groupId):
getParameters=[]
code, data = _GET('/api/3/policies/' + str(policyId) + '/groups/' + str(groupId) + '/children', config, getParameters=getParameters)
return data
def getPolicy(config, policyId):
getParameters=[]
code, data = _GET('/api/3/policies/' + str(policyId) + '', config, getParameters=getParameters)
return data
| StarcoderdataPython |
1614286 | <filename>geo/o2ld.geo.py
#!/usr/bin/env python
from ipdata import ipdata
from pprint import pprint
f = open('/home/cam/projects/project_secrets/geo.key', 'r')
#print(f) # debugging#don't print API keys
apiKey = f.readline().strip() # strips away \n at EOL
ipdata = ipdata.IPData(apiKey) # api key goes here
# gonna do this manually to get everything online
ipAsk = input("Enter IP Address: ")
#ipdata = ipdata.IPData('') #api key
response = ipdata.lookup(ipAsk) #ip address - this will be !manual later
#pprint(response)
with open(
| StarcoderdataPython |
89484 | <filename>tests/kyu_7_tests/test_digitize.py<gh_stars>10-100
import unittest
from katas.kyu_7.digitize import digitize
class DigitizeTestCase(unittest.TestCase):
def test_equals(self):
self.assertEqual(digitize(123), [1, 2, 3])
def test_equals_2(self):
self.assertEqual(digitize(1), [1])
def test_equals_3(self):
self.assertEqual(digitize(0), [0])
def test_equals_4(self):
self.assertEqual(digitize(1230), [1, 2, 3, 0])
def test_equals_5(self):
self.assertEqual(digitize(8675309), [8, 6, 7, 5, 3, 0, 9])
| StarcoderdataPython |
62354 | import subprocess
import tempfile
import unittest
COMMON_SUBPROCESS_ARGS = {
'timeout': 5,
'stdout': subprocess.PIPE,
'universal_newlines': True
}
class TestCommandLineInterface(unittest.TestCase):
def test_simple_invocation(self):
"""Test simple execution: read from stdin, write to stdout"""
process = subprocess.run(
['python3', '-m', 'hasami.cli'],
**COMMON_SUBPROCESS_ARGS,
input='これは単純な文です。\nこれが最初の文です。これは二番目の文です。これが最後の文です。',
)
self.assertEqual(0, process.returncode)
self.assertEqual('これは単純な文です。\nこれが最初の文です。\nこれは二番目の文です。\nこれが最後の文です。\n', process.stdout)
def test_reading_from_file(self):
"""Test reading input from file"""
with tempfile.NamedTemporaryFile() as file:
file.write('これは単純な文です。\nこれが最初の文です。これは二番目の文です。これが最後の文です。'.encode('utf-8'))
file.flush()
process = subprocess.run(
['python3', '-m', 'hasami.cli', file.name],
**COMMON_SUBPROCESS_ARGS,
)
self.assertEqual(0, process.returncode)
self.assertEqual('これは単純な文です。\nこれが最初の文です。\nこれは二番目の文です。\nこれが最後の文です。\n', process.stdout)
| StarcoderdataPython |
3319911 | #!/usr/bin/env python3
from aws_cdk import core
from image_recognition_processing.image_recognition_processing import ImageRecognitionProcessingStack
app = core.App()
ImageRecognitionProcessingStack(app, "reinvent-dop336-2019")
app.synth()
| StarcoderdataPython |
3378375 | <reponame>iamovrhere/lpthw
import time
tabby_cat = "\tGet it? Because tabs!"
persian_cat = "Split \non a line;\n was this meant to be a pun?"
backslash_cat = "I'm \\ a \\ cat?"
fat_cat = """
I'' do a list:
\t* Cat food
\t* Fishes
\t* Catnip\n\t* Grass?
"""
print tabby_cat
print persian_cat
print backslash_cat
print fat_cat
while True:
for i in ["/","-","|","\\","|"]:
time.sleep(1.0 / 1000.0)
print("\b\b%s") % i,
| StarcoderdataPython |
3397540 | <gh_stars>0
class Solution:
def countStudents(self, students: List[int], sandwiches: List[int]) -> int:
flag = 0
n_stu = len(students)
queue = deque(students)
san_index = 0
while flag < n_stu:
s = queue.popleft()
if s== sandwiches[san_index]:
san_index += 1
n_stu = n_stu-1
flag = 0
else:
queue.append(s)
flag += 1
return len(queue)
| StarcoderdataPython |
1631664 | <filename>python/primary/func_return.py<gh_stars>1-10
#!/usr/bin/python
# Filename: func_key.py
def maximun(a, b):
if a>b:
return a
else:
return b
print(maximun(2,5))
| StarcoderdataPython |
3368914 | <gh_stars>1-10
from survae.data.datasets.image.supervised_wrappers.mnist import SupervisedMNISTDataset
from survae.data.datasets.image.supervised_wrappers.cifar10 import SupervisedCIFAR10Dataset
from survae.data.datasets.image.supervised_wrappers.celeba import SupervisedCelebADataset
from survae.data.datasets.image.supervised_wrappers.svnh import SupervisedSVHNDataset
from survae.data.datasets.image.supervised_wrappers.fashion_mnist import SupervisedFashionMNISTDataset
| StarcoderdataPython |
3269820 | SMS_SENDSMS = "https://open.ucpaas.com/ol/sms/sendsms"
MOBILE_NONE_SMS = {
"sid": '9af4399595c7658687fe927341449d07',
"token": 'dc43<PASSWORD>535<PASSWORD>',
"appid": '8b1338016ca24ed5918bdd2ad3ff5b6a',
"templateid": "526540",
"param": None,
"mobile": None,
} | StarcoderdataPython |
129943 | <reponame>andrii-grytsenko/NBU_CurrencyExchange_Rates_Bot
import keyboard as kbd
STATES = {
"START": {
"description": "Initial state",
"keyboard": kbd.kbd_main_screen,
"message": """
Welcome!
I am ready to inform You about actual currency exchange rates provided by National bank of Ukraine.
Let's start. Choose currency.\n
"""
},
"STATE01": {
"description": "Main menu",
"keyboard": kbd.kbd_main_screen,
"message": "Choose currency:"
},
"STATE02": {
"description": "All currencies",
"keyboard": kbd.kbd_all_currencies,
"message": "Choose currency:"
}
}
| StarcoderdataPython |
4410 | <gh_stars>1-10
import io
import time
import datetime
from readme_metrics.Metrics import Metrics
from readme_metrics.MetricsApiConfig import MetricsApiConfig
from readme_metrics.ResponseInfoWrapper import ResponseInfoWrapper
from werkzeug import Request
class MetricsMiddleware:
"""Core middleware class for ReadMe Metrics
Attributes:
config (MetricsApiConfig): Contains the configuration settings for the running
middleware instance
"""
def __init__(self, wsgi_app_reference, config: MetricsApiConfig):
"""
Constructs and initializes MetricsMiddleware WSGI middleware to be passed into
the currently running WSGI web server.
Args:
wsgi_app_reference ([type]): Reference to the current WSGI application,
which will be wrapped
config (MetricsApiConfig): Instance of MetricsApiConfig object
"""
self.config = config
self.app = wsgi_app_reference
self.metrics_core = Metrics(config)
def __call__(self, environ, start_response):
"""Method that is called by the running WSGI server.
You should NOT be calling this method yourself under normal circumstances.
"""
response_headers = {}
response_status = 0
iterable = None
req = Request(environ)
def _start_response(_status, _response_headers, *args):
write = start_response(_status, _response_headers, *args)
# Populate response info (headers & status)
nonlocal response_headers, response_status
response_headers = _response_headers
response_status = _status
return write
try:
req.rm_start_dt = str(datetime.datetime.utcnow())
req.rm_start_ts = int(time.time() * 1000)
if req.method == "POST":
# The next 4 lines are a workaround for a serious shortcoming in the
# WSGI spec.
#
# The data can only be read once, after which the socket is exhausted
# and cannot be read again. As such, we read the data and then
# repopulate the variable so that it can be used by other code down the
# pipeline.
#
# For more info: https://stackoverflow.com/a/13106009/643951
# the environment variable CONTENT_LENGTH may be empty or missing
try:
content_length = int(environ.get("CONTENT_LENGTH", 0))
except (ValueError):
content_length = 0
content_body = environ["wsgi.input"].read(content_length)
# guarding check to close stream
if hasattr(environ["CONTENT_LENGTH"], "close"):
environ["wsgi.input"].close()
environ["wsgi.input"] = io.BytesIO(content_body)
req.rm_content_length = content_length
req.rm_body = content_body
iterable = self.app(environ, _start_response)
for data in iterable:
res_ctype = ""
res_clength = 0
htype = next(
(h for h in response_headers if h[0] == "Content-Type"), None
)
hlength = next(
(h for h in response_headers if h[0] == "Content-Length"), None
)
if htype and hlength:
res_ctype = htype[1]
res_clength = int(hlength[1])
# Populate response body
res = ResponseInfoWrapper(
response_headers,
response_status,
res_ctype,
res_clength,
data.decode("utf-8"),
)
# Send off data to be queued (and processed) by ReadMe if allowed
self.metrics_core.process(req, res)
yield data
finally:
# Undocumented in WSGI spec but the iterable has to be closed
if hasattr(iterable, "close"):
iterable.close()
| StarcoderdataPython |
3308571 | <reponame>amirhRahimi1993/info<filename>Super_users/migrations/0010_auto_20180524_1233.py
# Generated by Django 2.0.2 on 2018-05-24 12:33
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('Super_users', '0009_auto_20180524_1230'),
]
operations = [
migrations.CreateModel(
name='RelatedImage',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('image', models.ImageField(upload_to='survey_images/')),
],
),
migrations.CreateModel(
name='SurveyImage',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('image', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='Super_users.RelatedImage')),
],
),
migrations.RemoveField(
model_name='survey',
name='image',
),
migrations.AddField(
model_name='surveyimage',
name='survey',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='Super_users.Survey'),
),
]
| StarcoderdataPython |
1689855 | <filename>solidfire/common/__init__.py<gh_stars>10-100
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright © 2014-2016 NetApp, Inc. All Rights Reserved.
#
# CONFIDENTIALITY NOTICE: THIS SOFTWARE CONTAINS CONFIDENTIAL INFORMATION OF
# NETAPP, INC. USE, DISCLOSURE OR REPRODUCTION IS PROHIBITED WITHOUT THE PRIOR
# EXPRESS WRITTEN PERMISSION OF NETAPP, INC.
"""API Common Library"""
import itertools
import json
import logging
import requests
from requests.auth import HTTPBasicAuth
from requests.packages.urllib3.exceptions import InsecureRequestWarning
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
from solidfire.common import model
LOG = logging.getLogger('solidfire.Element')
LOG.setLevel(logging.INFO)
CH = logging.StreamHandler()
CH.setLevel(logging.INFO)
FORMATTER = logging.Formatter(
'%(asctime)s - %(name)s - %(levelname)s - %(message)s'
)
CH.setFormatter(FORMATTER)
LOG.addHandler(CH)
ATOMIC_COUNTER = itertools.count()
def setLogLevel(level):
"""
Set the logging level of Element logger and all handlers.
>>> from logging
>>> from solidfire import common
>>> common.setLogLevel(logging.DEBUG)
:param level: level must be an int or a str.
"""
LOG.setLevel(level)
for handler in LOG.handlers:
handler.setLevel(level)
class SdkOperationError(Exception):
def __init__(self, *args, **kwargs):
Exception.__init__(self, *args, **kwargs)
class ApiServerError(Exception):
"""
ApiServerError is an exception that occurs on the server and is passes as a
response back to the sdk.
"""
def __init__(self, method_name, err_json):
"""
ApiServerError constructor.
:param method_name: name of the service method where the error
occurred.
:type method_name: str
:param err_json: the json formatted error received from the service.
:type err_json: str
"""
try:
json.loads(err_json)
except:
err_json = '{}'
self._method_name = method_name
self._err_json = err_json
Exception.__init__(self)
def __repr__(self):
return '%s(method_name="%s", err_json=%s)' % (
self.__class__.__name__,
self._method_name,
self._err_json
)
def __str__(self):
return repr(self)
@property
def method_name(self):
"""The name of the service method causing the error."""
return self._method_name
@property
def error_name(self):
"""The name of the error."""
maybeDict = json.loads(self._err_json)
if isinstance(maybeDict, str):
maybeDict = json.loads(maybeDict)
return maybeDict.get('error', {}).get('name', 'Unknown')
@property
def error_code(self):
"""The numeric code for this error."""
maybeDict = json.loads(self._err_json)
if isinstance(maybeDict, str):
maybeDict = json.loads(maybeDict)
return int(maybeDict.get('error', {}).get('code', 500))
@property
def message(self):
"""A user-friendly message returned from the server."""
try:
json_err = json.loads(self._err_json)
return json_err.get('error', {}).get('message', None)
except:
return self._err_json
class ApiMethodVersionError(Exception):
"""
An ApiMethodVersionError occurs when a service method is not compatible
with the version of the connected server.
"""
def __init__(self,
method_name,
api_version,
since,
deprecated=None):
"""
ApiMethodVersionError constructor.
:param method_name: name of the service method where the error
occurred.
:type method_name: str
:param api_version: the version of API used to instantiate the
connection to the server.
:type api_version: str or float
:param since: the earliest version of the API a service method is
compatible.
:type since: str or float
:param deprecated: the latest version of the API that a method is
compatible.
:type deprecated: str or float
"""
self._method_name = method_name
self._api_version = float(api_version)
self._since = float(since) if since is not None else since
self._deprecated = float(deprecated) \
if deprecated is not None else deprecated
Exception.__init__(self)
def __repr__(self):
return '%s(method_name="%s", ' \
'api_version=%s, ' \
'since=%s, ' \
'deprecated=%s) ' % (
self.__class__.__name__,
self._method_name,
self._api_version,
self._since,
self._deprecated
)
def __str__(self):
return str.format(
'\n Invalid Method:\n'
' Method Name: {_method_name}\n'
' Service Api Version: {_api_version}\n'
' Method Exists Since: {_since}\n'
' Method Deprecated: {_deprecated}\n',
**self.__dict__
)
@property
def method_name(self):
"""The name of the service method causing the error."""
return self._method_name
@property
def api_version(self):
"""The version of the Element API Service"""
return self._api_version
@property
def since(self):
"""The version a service was introduced"""
return self._since
@property
def deprecated(self):
"""The version a service was deprecated"""
return self._deprecated
class ApiParameterVersionError(Exception):
"""
An ApiParameterVersionError occurs when a parameter supplied to a service
method is not compatible with the version of the connected server.
"""
def __init__(self,
method_name,
api_version,
params):
"""
ApiParameterVersionError constructor.
:param method_name: name of the service method where the error
occurred.
:type method_name: str
:param api_version: the version of API used to instantiate the
connection to the server.
:type api_version: str or float
:param params: the list of incompatible parameters provided to a
service method call. This tuple should include name, value, since,
and deprecated values for each offending parameter.
:type params: list of tuple
"""
self._method_name = method_name
self._api_version = float(api_version)
self._params = params
self._violations = []
if params is not None:
for (name, value, since, deprecated) in params:
self._violations.append(
name + ' (version: ' + str(since) + ')'
)
Exception.__init__(self)
def __repr__(self):
return '%s(method_name="%s", ' \
'api_version=%s, ' \
'params=%s) ' % (
self.__class__.__name__,
self._method_name,
self._api_version,
self._params,
)
def __str__(self):
return str.format(
'\n Invalid Parameter:\n'
' Method: {_method_name}\n'
' Api Version: {_api_version}\n'
' Invalid Parameters: {_violations}\n',
**self.__dict__
)
@property
def method_name(self):
"""The name of the service method causing the error."""
return self._method_name
@property
def api_version(self):
"""The version of the Element API Service"""
return self._api_version
@property
def params(self):
"""The parameters checked with a service call"""
return self._params
@property
def violations(self):
"""The parameters violated with the service call"""
return self._violations
class ApiVersionExceededError(Exception):
"""
An ApiVersionExceededError occurs when connecting to a server with a
version lower then the provided api_version.
"""
def __init__(self,
api_version,
current_version):
"""
ApiVersionExceededError constructor.
:param api_version: the version of API used to instantiate the
connection to the server.
:type api_version: str or float
:param current_version: the current version of the server.
:type current_version: float
"""
self._api_version = float(api_version)
self._current_version = float(current_version)
Exception.__init__(self)
def __repr__(self):
return '%s(api_version=%s, ' \
'current_version=%s) ' % (
self.__class__.__name__,
self._api_version,
self._current_version
)
def __str__(self):
return str.format(
'\n Version Exceeded:\n'
' Provided Api Version: {_api_version}\n'
' Max Version: {current_version}\n',
current_version=self._current_version,
**self.__dict__
)
@property
def api_version(self):
"""The version of the Element API Service"""
return self._api_version
@property
def current_version(self):
"""The current version of the connected Element OS"""
return self._current_version
class ApiVersionUnsupportedError(Exception):
"""
An ApiVersionUnsupportedError occurs when connecting to a server unable
to support the provided api_version.
"""
def __init__(self,
api_version,
supported_versions):
"""
ApiVersionUnsupportedError constructor.
:param api_version: the version of API used to instantiate the
connection to the server.
:type api_version: str or float
:param supported_versions: the list of supported versions provided by
a server.
:type supported_versions: float[]
"""
self._api_version = float(api_version)
self._supported_versions = [float(i) for i in supported_versions]
Exception.__init__(self)
def __repr__(self):
return '%s(api_version=%s, ' \
'supported_versions=%s)' % (
self.__class__.__name__,
self._api_version,
self._supported_versions
)
def __str__(self):
return str.format(
'\n Version Unsupported:\n'
' Provided Api Version: {_api_version}\n'
' Supported Version: {_supported_versions}\n',
**self.__dict__
)
@property
def api_version(self):
"""The version of the Element API Service"""
return self._api_version
@property
def supported_versions(self):
"""The versions supported by the connected Element OS"""
return self._supported_versions
class ApiConnectionError(Exception):
def __init__(self, message):
super(ApiConnectionError, self).__init__(message)
class CurlDispatcher(object):
"""
The CurlDispatcher is responsible for connecting, sending, and receiving
data to a server.
"""
def __init__(self, endpoint, username, password, verify_ssl):
"""
The CurlDispatcher constructor.
:param endpoint: the server URL
:type endpoint: str
:param username: the username for authentication
:type username: str
:param password: the password for authentication
:type password: str
:param verify_ssl: If True, ssl errors will cause an exception to be
raised, otherwise, if False, they are ignored.
:type verify_ssl: bool
"""
self._endpoint = endpoint
self._username = username
self._password = password
self._verify_ssl = verify_ssl
self._timeout = 300
self._connect_timeout = 30
def timeout(self, timeout_in_sec):
"""
Set the time to wait for a response before timeout.
:param timeout_in_sec: the read timeout in seconds.
:type timeout_in_sec: int
:raise ValueError: if timeout_in_sec is less than 0
"""
temp_timeout = int(timeout_in_sec)
if temp_timeout < 0:
raise ValueError("Read Timeout less than 0")
self._timeout = temp_timeout
def connect_timeout(self, timeout_in_sec):
"""
Set the time to wait for a connection to be established before timeout.
:param timeout_in_sec: the connection timeout in seconds.
:type timeout_in_sec: int
:raise ValueError: if timeout_in_sec is less than 0
"""
temp_timeout = int(timeout_in_sec)
if temp_timeout < 0:
raise ValueError("Connection Timeout less than 0")
self._connect_timeout = temp_timeout
def restore_timeout_defaults(self):
"""
Restores the Connection and Read Timeout to their original durations of
30 seconds for connection timeout and 300 seconds (5 minutes) for read
timeout.
"""
self._timeout = 300
self._connect_timeout = 30
def post(self, data):
"""
Post data to the associated endpoint and await the server's response.
:param data: the data to be posted.
:type data: str or json
"""
auth = None
if self._username is None or self._password is None:
raise ValueError("Username or Password is not set")
else:
auth = HTTPBasicAuth(self._username, self._password)
resp = requests.post(self._endpoint, data=data, json=None,
verify=self._verify_ssl, timeout=self._timeout,
auth=auth)
if resp.text == '':
return {"code": resp.status_code, "name": resp.reason, "message": ""}
return resp.text
class ServiceBase(object):
"""
The base type for API services.
This performs the sending, encoding and decoding of requests.
"""
def __init__(self, mvip=None, username=None, password=<PASSWORD>,
api_version=8.0, verify_ssl=True, dispatcher=None):
"""
Constructor for initializing a connection to an instance of Element OS
:param mvip: the management IP (IP or hostname)
:type mvip: str
:param username: username use to connect to the Element OS instance.
:type username: str
:param password: authentication for username
:type password: str
:param api_version: specific version of Element OS to connect.
:type api_version: float
:param verify_ssl: disable to avoid ssl connection errors especially
when using an IP instead of a hostname
:type verify_ssl: bool
:param dispatcher: a prebuilt or custom http dispatcher
:return: a configured connection to an Element OS instance
"""
self._api_version = float(api_version)
self._private_keys = ["clusterPairingKey", "volumePairingKey", "password", "initiatorSecret", "scriptParameters", "targetSecret", "searchBindPassword"]
endpoint = str.format('https://{mvip}/json-rpc/{api_version}',
mvip=mvip, api_version=self._api_version)
if 'https' in endpoint:
self._port = 443
else:
self._port = ''
if not dispatcher:
dispatcher = CurlDispatcher(endpoint, username, password,
verify_ssl)
self._dispatcher = dispatcher
if mvip is not None:
mvipArr = mvip.split(':')
if len(mvipArr) == 2:
self._port = mvipArr[1]
def timeout(self, timeout_in_sec):
"""
Set the time to wait for a response before timeout.
:param timeout_in_sec: the read timeout in seconds.
:type timeout_in_sec: int
:raise ValueError: if timeout_in_sec is less than 0
"""
self._dispatcher.timeout(timeout_in_sec)
def connect_timeout(self, timeout_in_sec):
"""
Set the time to wait for a connection to be established before timeout.
:param timeout_in_sec: the connection timeout in seconds.
:type timeout_in_sec: int
:raise ValueError: if timeout_in_sec is less than 0
"""
self._dispatcher.connect_timeout(timeout_in_sec)
def restore_timeout_defaults(self):
"""
Restores the Connection and Read Timeout to their original durations of
300 seconds (5 minutes) each.
"""
self._dispatcher.restore_timeout_defaults()
@property
def api_version(self):
"""
Returns the version of the Element API
:return: the version of the Element API
:rtype: float
"""
return self._api_version
def send_request(self, method_name,
result_type,
params=None,
since=None,
deprecated=None,
return_response_raw=False):
"""
:param method_name: the name of the API method to call
:type method_name: str
:param result_type: the type of the result object returned from the API
method called.
:type result_type: DataObject
:param params: the parameters supplied to the API call.
:type params: dict
:param since: the first version this service was available
:type since: str or float
:param deprecated: the final version this service was available
:type deprecated: str or float
:return: the result of the API service call
:rtype: DataObject
"""
self._check_method_version(method_name, since, deprecated)
if params is None:
params = {}
global ATOMIC_COUNTER
if hasattr(ATOMIC_COUNTER, 'next'):
atomic_id = ATOMIC_COUNTER.next()
else:
atomic_id = ATOMIC_COUNTER.__next__()
request_dict = {
'method': method_name,
'id': atomic_id if atomic_id > 0 else 0,
'params': dict(
(name, model.serialize(val))
for name, val in params.items()
),
}
obfuscated_request_raw = json.dumps(self._obfuscate_keys(request_dict))
encoded = json.dumps(request_dict)
try:
LOG.info(msg=obfuscated_request_raw)
response_raw = self._dispatcher.post(encoded)
except requests.ConnectionError as e:
if ("Errno 8" in str(e)):
raise ApiConnectionError("Unknown host based on target.")
elif ("Errno 60" in str(e)):
raise ApiConnectionError("Connection timed out.")
elif ("Errno 61" in str(e)):
raise ApiConnectionError("Connection Refused. Confirm your target is a SolidFire cluster or node.")
elif ("Errno 51" in str(e)):
raise ApiConnectionError("Network is unreachable")
raise ApiConnectionError(e)
except requests.exceptions.ChunkedEncodingError as error:
raise ApiConnectionError(error.args)
except Exception as error:
raise ApiServerError(method_name, error)
# noinspection PyBroadException
LOG.debug(msg=response_raw)
if isinstance(response_raw, dict): #if isinstance(response_raw, dict) and "name" in response_raw and "code" in response_raw:
response = {
'error': response_raw
}
else:
try:
response = json.loads(response_raw)
LOG.debug(msg=response_raw)
except Exception as error:
LOG.error(msg=response_raw)
if "401 Unauthorized." in response_raw:
raise ApiConnectionError("Bad Credentials")
if "404 Not Found" in response_raw:
raise ApiConnectionError("404 Not Found")
response = {
'error':
{
'name': 'JSONDecodeError',
'code': 400,
'message': str(error)
}
}
if return_response_raw:
return response_raw
if 'error' in response:
if response["error"]["code"] == 400:
raise requests.HTTPError(str(response["error"]["code"]) + " " + response["error"]["name"] + " " +
response["error"]["message"])
else:
raise ApiServerError(method_name,
json.dumps(response))
else:
return model.extract(result_type, response['result'])
# For logging purposes, there are a set of keys we don't want to be in plain text.
# This goes through the response and obfuscates the secret keys.
def _obfuscate_keys(self, response, obfuscate = False):
if type(response) == dict:
private_dict = dict()
for key in response:
if key in self._private_keys:
private_dict[key] = self._obfuscate_keys(response[key], True)
else:
private_dict[key] = self._obfuscate_keys(response[key])
return private_dict
if type(response) == list:
return [self._obfuscate_keys(item) for item in response]
if obfuscate:
return "*****"
else:
return response
def _check_connection_type(self, method_name,
connection_type):
"""
Check the connection type to verify that it is right.
:param connection_type: connection type the method expects.
:type connection_type: str
"""
if(connection_type == "Cluster" and int(self._port) == 442):
error = method_name+" cannot be called on a node connection. It is a cluster-only method."
raise ApiConnectionError(error)
elif(connection_type == "Node" and int(self._port) == 443):
error = method_name+" cannot be called on a cluster connection. It is a node-only method."
raise ApiConnectionError(error)
def _check_method_version(self,
method_name,
since,
deprecated=None):
"""
Check method version against the initialized api_version of the
service.
:param method_name: service method name performing the check.
:type method_name: str
:param since: service method inception version
:type since: float or str
:param deprecated: service method deprecation version
:type deprecated: float or str
:raise ApiMethodVersionError: if the configured version of the
ServiceBase is less then the inception version. Deprecation is
not currently checked.
"""
if since is not None and float(since) > self._api_version:
raise ApiMethodVersionError(method_name,
self._api_version,
since=float(since),
deprecated=deprecated)
def _check_param_versions(self,
method_name,
params):
"""
Checks parameters against the initialized api_version of the service.
:param method_name: service method name performing the check.
:type method_name: str
:param params: the list of versioned parameters, their value, inception
version, and optionally, their deprecation version as a tuple
:type params: list of tuple
:raise ApiParameterVersionError: if the configured version of the
ServiceBase is less then the inception version of the parameter.
Deprecation is not currently checked.
"""
invalid = []
if params is None:
params = []
for (name, value, since, deprecated) in params:
if value is not None and float(since) > self._api_version:
invalid.append((name, value, float(since), deprecated))
if len(invalid) > 0:
raise ApiParameterVersionError(method_name,
self._api_version,
invalid)
| StarcoderdataPython |
1620376 | <filename>admirarchy/tests/testapp/models.py
from django.db import models
class AdjacencyListModel(models.Model):
title = models.CharField(max_length=100)
parent = models.ForeignKey(
'self', related_name='%(class)s_parent', on_delete=models.CASCADE, db_index=True, null=True, blank=True)
def __str__(self):
return 'adjacencylistmodel_%s' % self.title
class NestedSetModel(models.Model):
title = models.CharField(max_length=100)
lft = models.IntegerField(db_index=True)
rgt = models.IntegerField(db_index=True)
level = models.IntegerField(db_index=True)
def __str__(self):
return 'nestedsetmodel_%s' % self.title
| StarcoderdataPython |
3229835 | import numpy as np
import logging
from scipy.sparse import csr_matrix
from .segmentanalyzer import SegmentSplitter
from ..peakcollection import Peak
from .graphs import PosDividedLineGraph, SubGraph
from .reference_based_max_path import max_path_func
class SparseMaxPaths:
def __init__(self, sparse_values, graph, score_pileup, variant_maps=None):
self._node_indexes = graph.node_indexes
self._sparse_values = sparse_values
self._graph = graph
self._score_pileup = score_pileup
self._segments = self.get_segments()
self._node_ids = self.get_node_ids()
self._analyzer = SegmentSplitter(self._segments, self._node_ids,
graph.node_indexes)
if variant_maps is not None:
logging.info("Will use variant maps when finding max paths")
self._variant_maps = variant_maps
else:
self._variant_maps = None
logging.info("Not using variant maps when finding max path")
def _handle_internal(self, mask):
ids = self._analyzer._internal_ids[:, 0][mask]
positions = self._analyzer._internal_segments[mask]
offsets = positions-self._node_indexes[ids-1, None]
ids -= self._graph.min_node-1
logging.info("Found %s internal peaks", len(ids))
self.internal_paths = [Peak(offset[0], offset[1], [_id],
graph=self._graph)
for offset, _id in zip(offsets, ids)]
for path in self.internal_paths:
path.info = (False, False)
def get_segments(self):
start_idx = 0
if self._sparse_values.values[0] != 1:
start_idx += 1
indices = self._sparse_values.indices[start_idx:]
if self._sparse_values.values[-1] == 1:
indices = np.r_[indices, self._node_indexes[-1]]
return indices.reshape(indices.size//2, 2)
def get_node_ids(self):
node_ids = np.empty_like(self._segments)
node_ids[:, 0] = np.digitize(self._segments[:, 0], self._node_indexes)
node_ids[:, 1] = np.digitize(self._segments[:, 1],
self._node_indexes, True)
return node_ids
def run(self):
self._analyzer.run()
self._segments = self._analyzer.splitted_segments
self.get_segment_scores()
if self._variant_maps is not None:
return self._run_refmaxpath()
return self._run_maxpath()
def _run_refmaxpath(self):
scored_segments = [np.vstack((self._analyzer._internal_ids[:, 0][mask],
self.scores[mask]))
for mask in [self._analyzer.internal_mask,
self._analyzer.start_mask,
self._analyzer.end_mask,
self._analyzer.full_mask]]
self._handle_internal(self._analyzer.internal_mask)
linegraph = PosDividedLineGraph(scored_segments[2],
scored_segments[3],
scored_segments[1],
self._graph)
components, subgraphs = linegraph.get_connected_components()
components = self._convert_connected_components(components)
subgraphs = [SubGraph(*pair) for pair in zip(components, subgraphs)]
get_max_path = max_path_func(self._score_pileup, self._graph, self._variant_maps)
max_paths = []
for i, component in enumerate(components):
if i % 100 == 0:
print("path: ", i)
max_paths.append(get_max_path(component))
start_offset = self.get_start_offsets([max_path[0] for max_path in max_paths])
end_offset = self.get_end_offsets([max_path[-1] for max_path in max_paths])
peaks = [Peak(start, end, path, graph=self._graph) for path, start, end in
zip(max_paths, start_offset, end_offset)]
return peaks, subgraphs
def _run_maxpath(self):
scored_segments = [np.vstack((self._analyzer._internal_ids[:, 0][mask],
self.scores[mask]))
for mask in [self._analyzer.internal_mask,
self._analyzer.start_mask,
self._analyzer.end_mask,
self._analyzer.full_mask]]
self._handle_internal(self._analyzer.internal_mask)
linegraph = PosDividedLineGraph(scored_segments[2],
scored_segments[3],
scored_segments[1],
self._graph)
paths, infos, subgraphs = linegraph.max_paths()
converted = self._convert_paths(paths, infos)
small_subgraphs = [
SubGraph(path.region_paths,
csr_matrix(([], ([], [])), shape=(1, 1)))
for path in self.internal_paths]
return converted+self.internal_paths, subgraphs+small_subgraphs
def _convert_paths(self, paths, infos):
reverse_map = np.concatenate(
[np.flatnonzero(mask) for mask in
[self._analyzer.end_mask,
self._analyzer.full_mask,
self._analyzer.start_mask,
]])
peaks = [self._convert_path(path, reverse_map)
for path in paths]
for peak, info in zip(peaks, infos):
peak.info = info
return peaks
def get_start_offsets(self, start_nodes):
start_nodes = np.asanyarray(start_nodes)-self._graph.min_node
start_args = np.digitize(self._graph.node_indexes[start_nodes+1],
self._sparse_values.indices, right=True)-1
return np.maximum(0, self._sparse_values.indices[start_args]-self._graph.node_indexes[start_nodes])
def get_end_offsets(self, end_nodes):
end_nodes = np.asanyarray(end_nodes)-self._graph.min_node
end_args = np.digitize(self._graph.node_indexes[end_nodes], self._sparse_values.indices)-1
next_indexes = self._sparse_values.indices[end_args+1]
return np.minimum(next_indexes, self._graph.node_indexes[end_nodes+1])-self._graph.node_indexes[end_nodes]
def _get_reverse_map(self):
return np.concatenate(
[np.flatnonzero(mask) for mask in
[self._analyzer.end_mask,
self._analyzer.full_mask,
self._analyzer.start_mask,
]])
def _convert_paths(self, paths, infos):
reverse_map = self._get_reverse_map()
peaks = [self._convert_path(path, reverse_map)
for path in paths]
for peak, info in zip(peaks, infos):
peak.info = info
return peaks
def _convert_path(self, path, reverse_map):
idxs = reverse_map[path]
node_ids = self._analyzer._internal_ids[:, 0]
node_ids = node_ids[idxs]
start_offset = self._segments[idxs[0], 0 ] - self._node_indexes[node_ids[0]-1]
end_offset = self._segments[idxs[-1], 1] - self._node_indexes[node_ids[-1]-1]
return Peak(start_offset, end_offset, list(node_ids + self._graph.min_node-1), self._graph)
def _convert_connected_components(self, nodes_list):
reverse_map = self._get_reverse_map()
return [self._convert_node_ids(node_ids, reverse_map) for node_ids in nodes_list]
def _convert_node_ids(self, raw_node_ids, reverse_map):
idxs = reverse_map[raw_node_ids]
node_ids = self._analyzer._internal_ids[:, 0]
node_ids = node_ids[idxs]+self._graph.min_node-1
return node_ids
def get_segment_scores(self):
pileup_idxs = np.empty_like(self._segments)
pileup_idxs[:, 0] = np.digitize(self._segments[:, 0],
self._score_pileup.indices)
pileup_idxs[:, 1] = np.digitize(self._segments[:, 1],
self._score_pileup.indices, True)
weighted_values = np.ediff1d(
self._score_pileup.indices,
self._score_pileup.track_size-self._score_pileup.indices[-1])*self._score_pileup.values
pileup_cumsum = np.r_[0, np.cumsum(weighted_values)]
base_scores = pileup_cumsum[pileup_idxs[:, 1]-1]-pileup_cumsum[
pileup_idxs[:, 0]-1]
diffs = self._segments-self._score_pileup.indices[pileup_idxs-1]
values = self._score_pileup.values[pileup_idxs-1]
val_diffs = diffs*values
offsets = val_diffs[:, 1] - val_diffs[:, 0]
self.scores = base_scores + offsets
| StarcoderdataPython |
8581 | <gh_stars>1-10
#!/usr/bin/env python
# encoding: utf-8
import sys
import getopt
import re
import os
import pylab as plt
import numpy as np
# Define the variables for which the residuals will be plotted
variables = ["Ux", "Uy", "T", "p_rgh", "k", "epsilon"]
# Get the arguments of the script
def usage():
print("Usage: residuals.py -l logfile\nPlot the residuals versus Time/Iteration")
try:
options, args = getopt.getopt(sys.argv[1:], 'l:h', ['help', 'logfile='])
except getopt.GetoptError:
usage()
sys.exit(2)
for opt, arg in options:
if opt in ("-l", "--logfile"):
log_file = arg
elif opt in ("-h", "--help"):
usage()
sys.exit(1)
# Get the lines of the logfile 'log_file'
lines = open(log_file, "r" ).readlines()
# Get the time and continuity values
time = [] # Time(s) or iterations counter
continuity = [] # Continuity values
for line in lines:
if re.search(r"^Time = ", line): # Search for string 'Time' at the begining of the line in file
start = 'Time = '
value = line.split(start)[1] # Take the Time value as the string just after start
time.append(np.float(value)) # Transform the string in a float value
elif re.search(r"continuity errors :", line): # Search for string 'continuity' in the lines of file 'log_file'
start = 'sum local = '
end = ', global'
value = line.split(start)[1].split(end)[0] # Take the continuity value as string between start and end
continuity.append(np.float(value)) # Transform the string in a float value
# Get the residual values for each variable
for variable in variables:
data = []
for line in lines:
if re.search(r"Solving for " + variable, line):# Search for string variable in line of file 'log_file'
start = 'Final residual = '
end = ', No Iterations'
value = line.split(start)[1].split(end)[0]
data.append(np.float(value))
plt.plot(np.array(time),np.array(data), label=variable) # Plot the residual values of variable
plt.plot(np.array(time),np.array(continuity), label="Continuity") # Plot the continuity values
# Plot
plt.title("Residuals plot:\n * logfile: " + log_file + "\n * case dir: " + os.getcwd().split('/')[-1], loc='left')
plt.xlabel("Time(s)/Iterations")
plt.ylabel("Residuals (Log Scale)")
plt.yscale('log')
plt.legend()
plt.grid()
plt.show()
| StarcoderdataPython |
1623433 | <reponame>tfxsoftware/Nutricionista-TKINTER
from tkinter import *
from tkinter.ttk import Treeview
from tkinter import ttk
from tkinter import messagebox
import sqlite3
## BACKEND ##
class funcoes():
def calcular_imc(self):
if (self.cadastro_entry_altura.get() == "") or (self.cadastro_entry_peso.get() == ""):
self.msg = "Informe os valores"
messagebox.showerror("Erro!", self.msg)
else:
self.peso = self.cadastro_entry_peso.get()
self.altura = self.cadastro_entry_altura.get()
self.peso = self.peso.replace(",", ".")
self.altura = self.altura.replace(",", ".")
self.peso = float(self.peso)
self.altura = float(self.altura)
self.imc = self.peso / (self.altura * self.altura)
if (self.imc < 17):
self.cadastro_label_imcpaciente["text"] = "%.2f" % self.imc + " (muito abaixo do peso)"
self.cadastro_label_imcpaciente["foreground"] = "red"
if (self.imc >= 17) and (self.imc < 18.5):
self.cadastro_label_imcpaciente["text"] = "%.2f" % self.imc + " (abaixo do peso)"
self.cadastro_label_imcpaciente["foreground"] = "#DECD00"
if (self.imc >= 18.5) and (self.imc < 25):
self.cadastro_label_imcpaciente["text"] = "%.2f" % self.imc + " (peso normal)"
self.cadastro_label_imcpaciente["foreground"] = "green"
if (self.imc >= 25) and (self.imc < 30):
self.cadastro_label_imcpaciente["text"] = "%.2f" % self.imc + " (acima do peso)"
self.cadastro_label_imcpaciente["foreground"] = "#DECD00"
if (self.imc >= 30):
self.cadastro_label_imcpaciente["text"] = "%.2f" % self.imc + "(obesidade)"
self.cadastro_label_imcpaciente["foreground"] = "red"
def limpar_cadastro(self):
self.cadastro_entry_nome.delete(0, END)
self.cadastro_entry_email.delete(0, END)
self.cadastro_entry_telefone.delete(0, END)
self.cadastro_entry_altura.delete(0, END)
self.cadastro_entry_peso.delete(0, END)
self.cadastro_label_imcpaciente["text"] = ""
def add_paciente(self):
if (self.cadastro_entry_nome.get() == "") or (self.cadastro_entry_email.get() == "") or (
self.cadastro_entry_telefone.get() == "") or (self.cadastro_entry_telefone.get() == "") or (
self.cadastro_entry_altura.get() == "") or (self.cadastro_entry_peso.get() == "") or (
self.cadastro_label_imcpaciente["text"] == ""):
self.msg = "Preencha todos os campos e calcule o IMC!"
messagebox.showerror("Erro!", self.msg)
else:
try:
self.nome_bd = self.cadastro_entry_nome.get()
self.email_bd = self.cadastro_entry_email.get()
self.telefone_bd = self.cadastro_entry_telefone.get()
self.peso_bd = self.cadastro_entry_peso.get()
self.altura_bd = self.cadastro_entry_altura.get()
self.imc_bd = self.imc
self.sexo_bd = self.cadastro_drop_sexo['text']
self.conecta_bd()
self.cursor.execute(""" INSERT INTO pacientes (nome, email, telefone, peso, altura, imc, sexo)
VALUES(?,?,?,?,?,?,?)""", (
self.nome_bd, self.email_bd, self.telefone_bd, self.peso_bd, self.altura_bd, self.imc_bd, self.sexo_bd))
self.conn.commit()
self.desconecta_bd()
self.limpar_cadastro()
self.msg = "Paciente cadastrado com sucesso!"
messagebox.showinfo("Sucesso!", self.msg)
self.select_lista()
except:
messagebox.showinfo("Erro!", "Email já registrado!")
def conecta_bd(self):
self.conn = sqlite3.connect("pacientes.db")
self.cursor = self.conn.cursor()
def desconecta_bd(self):
self.cursor.close()
def monta_tabela(self):
self.conecta_bd()
self.cursor.execute("""CREATE TABLE IF NOT EXISTS pacientes(
nome CHAR(40) NOT NULL,
email CHAR(40) PRIMARY KEY,
telefone CHAR(40) NOT NULL,
peso FLOAT NOT NULL,
altura FLOAT NOT NULL,
imc FLOAT NOT NULL,
sexo CHAR(40) NOT NULL)
""")
self.conn.commit()
self.desconecta_bd()
print('DATABASE CONECTADA')
def select_lista(self):
self.lista_pacientes.delete(*self.lista_pacientes.get_children())
self.conecta_bd()
lista = self.cursor.execute(""" SELECT nome, email FROM pacientes
ORDER BY nome ASC; """)
for i in lista:
self.lista_pacientes.insert("", END, values=i)
self.desconecta_bd()
def lista_click(self, event):
selecionado = self.lista_pacientes.item(self.lista_pacientes.selection())
self.paciente_selecionado = selecionado['values'][0]
self.paciente_primarykey = selecionado['values'][1]
def inserir_vizualiza(self):
self.conecta_bd()
self.pacientes_selecionado = self.cursor.execute("""SELECT * FROM pacientes WHERE email = ?""",
(self.paciente_primarykey,))
for row in self.pacientes_selecionado:
self.nome_paciente = row[0]
self.email_paciente = row[1]
self.telefone_paciente = row[2]
self.peso_paciente = row[3]
self.altura_paciente = row[4]
self.imc_paciente = row[5]
self.sexo_paciente = row[6]
self.visualizar_db_nome['text'] = self.nome_paciente
self.visualizar_db_email['text'] = self.email_paciente
self.visualizar_db_telefone['text'] = self.telefone_paciente
self.visualizar_db_peso['text'] = self.peso_paciente
self.visualizar_db_altura['text'] = self.altura_paciente
self.visualizar_db_imcpaciente['text'] = "%.2f" % self.imc_paciente
self.visualizar_db_sexo['text'] = self.sexo_paciente
def deletar_paciente(self):
reply = messagebox.askyesno("Atenção!", "Você tem certeza que quer remover paciente?")
if reply == True:
self.conecta_bd()
self.cursor.execute("""DELETE FROM pacientes WHERE email = ? """, (self.paciente_primarykey,))
self.conn.commit()
self.select_lista()
self.desconecta_bd()
self.janela_paciente.destroy()
self.msg_remover = "Paciente removido com sucesso!"
messagebox.showinfo("sucesso", self.msg_remover)
else:
pass
def lista_calorias_click(self, event):
selecionado = self.lista_alimentos.item(self.lista_alimentos.selection())
self.calorias_alimento_selecionado = int(selecionado['values'][1])
def calcula_calorias(self):
self.caloria_dieta = self.calorias_alimento_selecionado + self.caloria_dieta
self.visualizar_label_kcal['text'] = self.caloria_dieta
if self.caloria_dieta >= 2500:
self.visualizar_label_kcal['foreground'] = "red"
def reseta_calorias(self):
self.caloria_dieta = 0
self.visualizar_label_kcal['text'] = self.caloria_dieta
self.visualizar_label_calculo['text'] = "0"
def editar_paciente(self):
pass
def calcular_calorias_restantes(self):
self.nova_dieta = abs(self.caloria_dieta - 2500)
if self.caloria_dieta >= 2500:
self.text = "Remover"
else:
self.text = "Adicionar"
self.visualizar_label_calculo['text'] = self.text, str(self.nova_dieta)
def inserir_alimentos(self):
self.lista_alimentos.insert(parent='', index=1, values=('Água de coco(200ml)', '41'))
self.lista_alimentos.insert(parent='', index=2, values=('Café com açúcar(50ml)', '33'))
self.lista_alimentos.insert(parent='', index=3, values=('Café sem açúcar(50ml)', '20'))
self.lista_alimentos.insert(parent='', index=4, values=('Suco de abacaxi(240ml)', '100'))
self.lista_alimentos.insert(parent='', index=5, values=('Suco de acerola(240ml)', '36'))
self.lista_alimentos.insert(parent='', index=6, values=('Suco de maçã(240ml)', '154'))
self.lista_alimentos.insert(parent='', index=7, values=('Suco de manga(240ml)', '109'))
self.lista_alimentos.insert(parent='', index=8, values=('Suco de melão(240ml)', '60'))
self.lista_alimentos.insert(parent='', index=9, values=('Champanhe(125ml)', '85'))
self.lista_alimentos.insert(parent='', index=10, values=('Uísque(100ml)', '240'))
self.lista_alimentos.insert(parent='', index=11, values=('Vinho branco doce(125ml)', '178'))
self.lista_alimentos.insert(parent='', index=12, values=('Vinho branco seco (125ml)', '107'))
self.lista_alimentos.insert(parent='', index=13, values=('Vinho tinto seco (125ml)', '107'))
self.lista_alimentos.insert(parent='', index=14, values=('Vodka (20ml)', '48'))
self.lista_alimentos.insert(parent='', index=15, values=('Coca cola(350ml)', '137'))
self.lista_alimentos.insert(parent='', index=16, values=('Coca cola zero(350ml)', '1,5'))
self.lista_alimentos.insert(parent='', index=17, values=('Fanta laranja(350ml)', '189'))
self.lista_alimentos.insert(parent='', index=18, values=('Guaraná antártica(240ml)', '75'))
self.lista_alimentos.insert(parent='', index=19, values=('Kuat light(350ml)', '4'))
self.lista_alimentos.insert(parent='', index=20, values=('Almôndega de carne(30g)', '61'))
self.lista_alimentos.insert(parent='', index=21, values=('Almôndega de frango(25g)', '54'))
self.lista_alimentos.insert(parent='', index=22, values=('Bacon frito(30g)', '198'))
self.lista_alimentos.insert(parent='', index=23, values=('Bisteca de porco(100g)', '337'))
self.lista_alimentos.insert(parent='', index=24, values=('Costeleta de porco(100g)', '483'))
self.lista_alimentos.insert(parent='', index=25, values=('Coxa de frango(100g)', '144'))
self.lista_alimentos.insert(parent='', index=26, values=('Coxa de frango c/pele(100g)', '110'))
self.lista_alimentos.insert(parent='', index=27, values=('Coxa de frango s/pele(100g)', '98'))
self.lista_alimentos.insert(parent='', index=28, values=('Coxa de frango cozida(100g)', '120'))
self.lista_alimentos.insert(parent='', index=29, values=('Cupim(150g)', '375'))
self.lista_alimentos.insert(parent='', index=30, values=('Filé de frango(100g)', '101'))
self.lista_alimentos.insert(parent='', index=31, values=('Filé-mignon(100g)', '140'))
self.lista_alimentos.insert(parent='', index=32, values=('Hambúrguer bovino(56g)', '116'))
self.lista_alimentos.insert(parent='', index=33, values=('Hambúrguer calabresa(56g)', '149'))
self.lista_alimentos.insert(parent='', index=35, values=('Hambúrguer de chester(56g)', '105'))
self.lista_alimentos.insert(parent='', index=36, values=('Hambúrguer de frango(96g)', '179'))
self.lista_alimentos.insert(parent='', index=37, values=('Peito de frango s/pele(100g)', '100'))
self.lista_alimentos.insert(parent='', index=38, values=('Pernil de porco assado(100g)', '196'))
self.lista_alimentos.insert(parent='', index=39, values=('Peru(100g)', '155'))
self.lista_alimentos.insert(parent='', index=40, values=('Picanha(100g)', '287'))
self.lista_alimentos.insert(parent='', index=41, values=('Rosbife(50g)', '83'))
self.lista_alimentos.insert(parent='', index=42, values=('Tender(100g)', '210'))
self.lista_alimentos.insert(parent='', index=43, values=('Linguiça de frango(100g)', '166'))
self.lista_alimentos.insert(parent='', index=44, values=('Linguiça tradicional(60g)', '190'))
self.lista_alimentos.insert(parent='', index=45, values=('Mortadela (15g)', '41'))
self.lista_alimentos.insert(parent='', index=46, values=('Salsicha (50g)', '115'))
self.lista_alimentos.insert(parent='', index=7, values=('Ovo frito(1 un)', '108'))
self.lista_alimentos.insert(parent='', index=1, values=('Arroz branco(25g)', '41'))
self.lista_alimentos.insert(parent='', index=2, values=('Feijão(20g)', '78'))
self.lista_alimentos.insert(parent='', index=3, values=('Tomate(100g)', '20'))
self.lista_alimentos.insert(parent='', index=4, values=('Pão frances(50g)', '135'))
self.lista_alimentos.insert(parent='', index=5, values=('Pizza(140g)', '400'))
self.lista_alimentos.insert(parent='', index=6, values=('Macarrão(150g)', '400'))
## FRONTEND ##
class App(funcoes):
def __init__(self):
self.root = Tk()
self.principal_config()
self.monta_tabela()
self.select_lista()
self.paciente_selecionado = ""
self.root.mainloop()
def principal_config(self):
self.root.title('Nutricionista app')
self.root.geometry("625x350")
self.root.resizable(False, False)
self.root.configure()
self.root_janela_menu = PhotoImage(file="nutricionista_app/imagem menu.png")
self.root.iconbitmap('nutricionista_app\icon_nutri.ico')
# WIDGETS
self.imagem_janela_menu = Label(image=self.root_janela_menu)
self.label_pacientes = Label(self.root, text="PACIENTES", bg="#58af9c")
self.botao_adicionar = Button(self.root, text="Adicionar paciente", command=self.cadastro_config)
self.botao_visualizar = Button(self.root, text="Vizualizar paciente", command=self.visualiza_config)
self.botao_sair = Button(self.root, text=" Sair ", command=self.root.destroy)
self.lista_pacientes = ttk.Treeview(self.root, height=10, column=("col1", "col2"))
self.lista_pacientes.heading("#0", text="")
self.lista_pacientes.heading("#1", text="nome")
self.lista_pacientes.heading("#2", text="email")
self.lista_pacientes.column("#0", width=0, stretch=NO)
self.lista_pacientes.column("#1", width=300, anchor=W)
self.lista_pacientes.column("#2", width=300)
self.scroll_lista = Scrollbar(self.root, orient="vertical")
self.lista_pacientes.configure(yscrollcommand=self.scroll_lista.set)
self.lista_pacientes.bind("<ButtonRelease-1>", self.lista_click)
# GRID
self.imagem_janela_menu.place(x=0, y=0)
self.label_pacientes.place(x=10, y=10)
self.botao_adicionar.place(x=10, y=310)
self.botao_visualizar.place(x=125, y=310)
self.botao_sair.place(x=530, y=310)
self.lista_pacientes.place(x=10, y=50)
self.scroll_lista.place(x=596, y=50, height=225.5)
def cadastro_config(self):
self.janela_cadastro = Toplevel()
self.janela_cadastro.title('Cadastro de paciente')
self.janela_cadastro.geometry("450x300")
self.janela_cadastro.resizable(False, False)
self.janela_cadastro.transient(self.root)
self.janela_cadastro.focus_force()
self.janela_cadastro.grab_set()
self.janela_cadastro.config(background="#58af9c")
self.bg_janela_cadastro = PhotoImage(file="nutricionista_app/imagem cadastro.png")
self.janela_cadastro.iconbitmap('nutricionista_app\icon_nutri.ico')
# WIDGETS
self.imagem_janela_cadastro = Label(self.janela_cadastro, image=self.bg_janela_cadastro)
self.cadastro_label_novo = Label(self.janela_cadastro, text="NOVO PACIENTE", bg="#58af9c")
self.cadastro_label_nome = Label(self.janela_cadastro, text="Nome: ", bg="#ffffff")
self.cadastro_label_email = Label(self.janela_cadastro, text="Email: ", bg="#ffffff")
self.cadastro_label_telefone = Label(self.janela_cadastro, text="Telefone: ", bg="#ffffff")
self.cadastro_label_sexo = Label(self.janela_cadastro, text="Sexo: ", bg="#ffffff")
self.cadastro_label_altura = Label(self.janela_cadastro, text="Altura: ", bg="#ffffff")
self.cadastro_label_peso = Label(self.janela_cadastro, text="Peso: ", bg="#ffffff")
self.cadastro_label_imc = Label(self.janela_cadastro, text="IMC: ")
self.cadastro_label_imcpaciente = Label(self.janela_cadastro, text="", foreground="black")
self.cadastro_entry_nome = Entry(self.janela_cadastro)
self.cadastro_entry_email = Entry(self.janela_cadastro)
self.cadastro_entry_telefone = Entry(self.janela_cadastro)
self.cadastro_entry_altura = Entry(self.janela_cadastro)
self.cadastro_entry_peso = Entry(self.janela_cadastro)
self.cadastro_botao_calculaimc = Button(self.janela_cadastro, text="Calcular IMC", command=self.calcular_imc)
self.cadastro_botao_cadastrar = Button(self.janela_cadastro, text=" Cadastrar ", command=self.add_paciente)
self.cadastro_botao_limpar = Button(self.janela_cadastro, text=" Limpar ", command=self.limpar_cadastro)
self.cadastro_botao_sair = Button(self.janela_cadastro, text=" Voltar ",
command=self.janela_cadastro.destroy)
self.SexoTipvar = StringVar(self.janela_cadastro)
self.SexoTipv = ("Masculino", "Feminino ")
self.SexoTipvar.set("Masculino")
self.cadastro_drop_sexo = OptionMenu(self.janela_cadastro, self.SexoTipvar, *self.SexoTipv)
# CADASTRO GRID
self.imagem_janela_cadastro.place(x=0, y=0)
self.cadastro_label_novo.place(x=10, y=10)
self.cadastro_label_nome.place(x=10, y=50)
self.cadastro_label_email.place(x=10, y=100)
self.cadastro_label_telefone.place(x=10, y=150)
self.cadastro_label_sexo.place(x=10, y=200)
self.cadastro_label_altura.place(x=250, y=50)
self.cadastro_label_peso.place(x=250, y=100)
self.cadastro_label_imc.place(x=250, y=200)
self.cadastro_label_imcpaciente.place(x=275, y=200)
self.cadastro_entry_nome.place(x=75, y=50)
self.cadastro_entry_email.place(x=75, y=100)
self.cadastro_entry_telefone.place(x=75, y=150)
self.cadastro_entry_altura.place(x=290, y=50)
self.cadastro_entry_peso.place(x=290, y=100)
self.cadastro_botao_calculaimc.place(x=250, y=150)
self.cadastro_botao_cadastrar.place(x=10, y=260)
self.cadastro_botao_limpar.place(x=100, y=260)
self.cadastro_botao_sair.place(x=375, y=260)
self.cadastro_drop_sexo.place(x=75, y=200)
def visualiza_config(self):
self.caloria_dieta = 0
if self.paciente_selecionado != "":
self.janela_paciente = Toplevel()
self.janela_paciente.title('Visualizar paciente')
self.janela_paciente.geometry("900x300")
self.janela_paciente.resizable(False, False)
self.janela_paciente.transient(self.root)
self.janela_paciente.focus_force()
self.janela_paciente.grab_set()
self.bg_janela_paciente = PhotoImage(file="nutricionista_app/imagem paciente.png")
self.janela_paciente.iconbitmap('nutricionista_app\icon_nutri.ico')
# WIDGETS
self.imagem_janela_paciente = Label(self.janela_paciente, image=self.bg_janela_paciente)
self.visualizar_label_novo = Label(self.janela_paciente, text="PACIENTE", bg="#58af9c")
self.visualizar_label_nome = Label(self.janela_paciente, text="Nome: ", bg="#ffffff")
self.visualizar_label_email = Label(self.janela_paciente, text="Email: ", bg="#ffffff")
self.visualizar_label_telefone = Label(self.janela_paciente, text="Telefone: ", bg="#ffffff")
self.visualizar_label_sexo = Label(self.janela_paciente, text="Sexo: ", bg="#ffffff")
self.visualizar_label_altura = Label(self.janela_paciente, text="Altura: ", bg="#ffffff")
self.visualizar_label_peso = Label(self.janela_paciente, text="Peso: ", bg="#ffffff")
self.visualizar_label_imc = Label(self.janela_paciente, text="IMC: ", bg="#ffffff")
self.visualizar_label_kcal_text = Label(self.janela_paciente, text="Calorias(Kcal):", bg="#ffffff")
self.visualizar_label_kcal = Label(self.janela_paciente, text="0", foreground="green", bg="#ffffff")
self.visualizar_label_calculo = Label(self.janela_paciente, text="-----", bg="#ffffff")
self.visualizar_db_imcpaciente = Label(self.janela_paciente, text="", foreground="black", bg="#ffffff")
self.visualizar_db_nome = Label(self.janela_paciente, text="", bg="#ffffff")
self.visualizar_db_email = Label(self.janela_paciente, text="", bg="#ffffff")
self.visualizar_db_telefone = Label(self.janela_paciente, text="", bg="#ffffff")
self.visualizar_db_altura = Label(self.janela_paciente, text="", bg="#ffffff")
self.visualizar_db_peso = Label(self.janela_paciente, text="", bg="#ffffff")
self.visualizar_db_sexo = Label(self.janela_paciente, text="", bg="#ffffff")
self.visualizar_button_reset = Button(self.janela_paciente, text="Limpar selecionado", width=20,
command=self.reseta_calorias)
self.visualizar_button_remover = Button(self.janela_paciente, text="Remover paciente",
command=self.deletar_paciente)
self.visualizar_button_calcula = Button(self.janela_paciente, text="Calcular dieta", width=20,
command=self.calcular_calorias_restantes)
self.lista_button_add = Button(self.janela_paciente, text='Adicionar selecionado', width=20,
command=self.calcula_calorias)
self.lista_alimentos = Treeview(self.janela_paciente, height=10, column=("col1", "col2"))
self.lista_alimentos.heading("#0", text="")
self.lista_alimentos.heading("#1", text="Alimento")
self.lista_alimentos.heading("#2", text="Kcal")
self.lista_alimentos.column("#0", width=0, stretch=NO)
self.lista_alimentos.column("#1", width=150, anchor=W)
self.lista_alimentos.column("#2", width=50)
self.alimentos_scroll = Scrollbar(self.janela_paciente, orient="vertical")
self.lista_alimentos.configure(yscrollcommand=self.alimentos_scroll.set)
self.lista_alimentos.bind("<ButtonRelease-1>", self.lista_calorias_click)
# Visualizar GRID
self.imagem_janela_paciente.place(x=0, y=0)
self.visualizar_label_novo.place(x=10, y=10)
self.visualizar_label_nome.place(x=10, y=50)
self.visualizar_label_email.place(x=10, y=100)
self.visualizar_label_telefone.place(x=10, y=150)
self.visualizar_label_sexo.place(x=10, y=200)
self.visualizar_label_altura.place(x=250, y=50)
self.visualizar_label_peso.place(x=250, y=100)
self.visualizar_label_imc.place(x=250, y=200)
self.visualizar_label_kcal_text.place(x=500, y=50)
self.visualizar_label_kcal.place(x=500, y=80)
self.visualizar_label_calculo.place(x=500, y=250)
self.visualizar_db_imcpaciente.place(x=275, y=200)
self.visualizar_db_nome.place(x=75, y=50)
self.visualizar_db_email.place(x=75, y=100)
self.visualizar_db_telefone.place(x=75, y=150)
self.visualizar_db_altura.place(x=290, y=50)
self.visualizar_db_peso.place(x=290, y=100)
self.visualizar_db_sexo.place(x=75, y=200)
self.visualizar_button_remover.place(x=250, y=250)
self.lista_button_add.place(x=500, y=150)
self.visualizar_button_reset.place(x=500, y=180)
self.visualizar_button_calcula.place(x=500, y=210)
self.lista_alimentos.place(x=660, y=50)
self.alimentos_scroll.place(x=850, y=50, height=225.5)
self.inserir_vizualiza()
self.inserir_alimentos()
else:
self.msg_remover = "Nem um paciente selecionado!"
messagebox.showerror("Erro!", self.msg_remover)
App() | StarcoderdataPython |
80487 | <gh_stars>0
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Dec 15 10:00:19 2020
Generate measurements.
@author: <NAME>
"""
import os
import time as tm
import numpy as np
from joblib import Parallel, delayed
import multiprocessing
from VoterModel import markov_jump_process
from auxiliary.auxiliary import random_init
def __jumpprocess(x_init, num_samples, gamma, gamma_prime, t_step, T_max):
"""
Auxiliary function to parallelize the generation of trajectory data for
the jump process
Parameters
----------
x_init : ndarray
Initial population.
num_samples : int
Number of samples (repetions) generated from one initial values.
gamma : ndarray
Square array of size len(x_init).
gamma_prime : ndarray
Square array of size len(x_init).
t_step : float
Time step.
T_max : int or float
Maximum time horizon (also known as lag time).
Returns
-------
x_trajectory : ndarray
num_samples total trajectory of the system state for given time
horizon and initial value. The order is
[num_types, num_timesteps, num_samples, num_testpoints]
"""
num_timesteps = int(np.round(T_max / t_step)) + 1
num_types = x_init.shape[0]
x_trajectory = np.empty([num_types, num_timesteps, num_samples])
for j in range(num_samples):
x_trajectory[:, :, j] = markov_jump_process(x_init,
gamma,
gamma_prime,
t_step,
T_max,
seed=None)
return x_trajectory
def generate_data(num_types, num_agents, num_testpoints, num_samples, T_max,
t_step, gamma, gamma_prime):
"""
Generates trajectory data using the jump process (JP)
Parameters
----------
num_types : int
Number of different types.
num_agents : int
Number of agents.
num_testpoints : int
Number of training points.
num_samples : int
Number of repetitions per training point.
T_max : float
Maximum simulation time (also known as lag time).
t_step : float
RTime step.
gamma : ndarray
Transition rate constants (adaptive).
gamma_prime : ndarray
Transition rate constants (spontaneous).
Returns
-------
x_trajectory : ndarray
num_samples total trajectory of the system state for given time
horizon and initial value. The order is
[num_types, num_timesteps, num_samples, num_testpoints]
x_init : ndarray
Initial population.
"""
x_init = np.empty([num_types, num_testpoints])
num_cores = multiprocessing.cpu_count()
for i in range(num_testpoints):
x_init[:, i] = random_init(num_agents, num_types)
x_trajectory = Parallel(n_jobs=num_cores, verbose=11)(delayed(__jumpprocess)(x_init[:, i],
num_samples,
gamma,
gamma_prime,
t_step,
T_max) for i in range(num_testpoints))
x_trajectory = np.transpose(x_trajectory, (1, 2, 3, 0))
return x_trajectory, x_init
# %% Settings
# Workspace directory
dir_path = 'data/'
dir_name = dir_path + 'raw'
# Lag time and time step
T_max = 0.01
t_step = 0.01
# Rate constants
gamma = np.array([[0, 2, 1],
[1, 0, 2],
[2, 1, 0]], dtype=float)
gamma_prime = 0.01 * (np.ones_like(gamma) - np.eye(len(gamma)))
num_agent_list = [10, 100, 1000]
num_samples_list = [10, 100, 1000]
num_trainingpoints_list = [7, 515, 10000]
# %% Create target directory
num_types = len(gamma)
overall_time = tm.time()
# Create directory
try:
# Create target Directory
os.mkdir(dir_name)
print("Directory ", dir_name, "Created")
except FileExistsError:
print("Directory ", dir_name, "already exists")
# %% Measurements and preparations for point-wise estimates
for i, num_agents in enumerate(num_agent_list):
for j, num_samples in enumerate(num_samples_list):
num_trainingpoints = num_trainingpoints_list[i]
# Check if file already exists
if os.path.isfile(dir_name + '/out_' + str(i) + '_' + str(j) + '.npz'):
print('\nSimulation with %d agents, %d samples, %d trainingpoints already exists. Continue with next.\n' % (num_agents, num_samples, num_trainingpoints))
continue
else:
print('\nSimulating %d agents, %d samples, %d trainingpoints\n' % (num_agents, num_samples, num_trainingpoints))
# Print setting to file
with open(dir_name + '/parameter_settings_' + str(i) + '_' + str(j) + '.txt', 'w') as file:
file.write('- - - - Parameter settings - - - -\n\n')
file.write('\nNumber of types: ' + str(num_types))
file.write('\nGamma:\n' + str(gamma))
file.write('\nGamma_prime:\n' + str(gamma_prime))
file.write('\n\nTime step: ' + str(t_step))
file.write('\nMaximum simulation time: ' + str(T_max))
file.write('\nList of all agent numbers:\n' + str(num_agent_list))
file.write('\nList of all sample numbers:\n' + str(num_samples_list))
file.write('\n\n\n- - - - Current settings - - - -\n\n')
file.write('\nNumber of agents: ' + str(num_agents))
file.write('\nNumber of samples: ' + str(num_samples))
file.write('\nNumber of trainingpoints: ' + str(num_trainingpoints))
# Start clock
start_time = tm.time()
# Run simulation
trajectory, x_init = generate_data(num_types,
num_agents,
num_trainingpoints,
num_samples,
T_max,
t_step,
gamma,
gamma_prime)
# Save result and parameters
np.savez_compressed(dir_name + '/out_' + str(i) + '_' + str(j),
trajectory=trajectory,
x_init=x_init,
gamma=gamma,
gamma_prime=gamma_prime,
num_agents=num_agents,
num_samples=num_samples,
num_trainingpoints=num_trainingpoints,
t_step=t_step,
T_max=T_max)
# End clock
string = '{:.2f} seconds'.format(tm.time() - start_time)
with open(dir_name + '/parameter_settings_' + str(i) + '_' + str(j) + '.txt', 'a') as file:
file.write('\n\nElapsed time: ' + string)
# Total end clock in last file
string = '{:.2f} seconds'.format(tm.time() - overall_time)
with open(dir_name + '/parameter_settings_' + str(i) + '_' + str(j) + '.txt', 'a') as file:
file.write('\n\nTotal elapsed time: ' + string)
| StarcoderdataPython |
168198 | from django.urls import include, path
from rest_framework import routers
from . import views
router = routers.DefaultRouter()
router.register(r"", views.AnalysisViewSet, basename="Analysis")
urlpatterns = [path("v1/analysis/", include(router.urls))]
| StarcoderdataPython |
76246 | from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class catalogModel(models.Model):
class Meta:
managed=False
permissions = (
('catalog_admin', 'Catalog Admin'),
('catalog_create','Create Catalog Collections'),
)
| StarcoderdataPython |
1622232 | from twisted.internet import reactor
from scrapy.crawler import CrawlerRunner
from .modules import cnn_spider
from scrapy.utils.project import get_project_settings
from scrapy.settings import Settings
from . import settings as import_settings
import os
import sys
from sys import path
def run_spider():
try:
dir_path = os.path.dirname(os.path.realpath(__file__))
sys.path.insert(0, dir_path)
settings = Settings()
os.environ['SCRAPY_SETTINGS_MODULE'] = 'settings'
settings_module_path = os.environ['SCRAPY_SETTINGS_MODULE']
settings.setmodule(import_settings, priority='project')
# runner = CrawlerRunner(get_project_settings())
runner = CrawlerRunner(settings)
d = runner.crawl(cnn_spider.CNNSpider)
d.addBoth(lambda _: reactor.stop())
reactor.run(installSignalHandlers=0) # the script will block here until all crawling jobs are finished
return True
except Exception as e:
print(e)
return False
| StarcoderdataPython |
3257515 | #024 - Verificando as primeiras letras de um texto
cid = str(input('em que cidade voce nasceu? ')).split()
print(cid[:5].upper() == 'SANTO')
| StarcoderdataPython |
4807095 | <filename>src/bootstrapData.py
import sys
import numpy as np
data = open(sys.argv[1],"r").readlines()
m=[]
e=[]
si=[[] for i in range(7)]
for dat in data:
if "#" not in dat:
observables = dat.split()
m.append(float(observables[3]))
e.append(float(observables[4]))
for j in range(7):
si[j].append(float(observables[j+5]))
eMean = np.mean(np.array(e))
mMean = np.mean(np.array(m))
sMean=[]
for i in range(7):
sMean.append(np.mean(np.array(si[i])))
NBS = 100
BSLENGTH=len(m)
sError=[]
bootstrapIndices = np.random.randint(0, BSLENGTH, [NBS, BSLENGTH])
mError = np.std(np.array([np.mean(np.array([m[cfg] for cfg in bootstrapIndices[sample]]), axis=0) for sample in range(NBS) ] ), axis=0)
eError = np.std(np.array([np.mean(np.array([e[cfg] for cfg in bootstrapIndices[sample]]), axis=0) for sample in range(NBS) ] ), axis=0)
for i in range(7):
sError.append(np.std(np.array([np.mean(np.array([si[i][cfg] for cfg in bootstrapIndices[sample]]), axis=0) for sample in range(NBS) ] ), axis=0))
print("# <E> <dE> <m> <dm>")
print(eMean," ",eError," ",mMean," ",mError)
print("")
print("# i <S_i> <dS_i>")
for i in range(7):
print(i+1," ",sMean[i]," ",sError[i])
| StarcoderdataPython |
1616696 | #!/usr/bin/env python3
class Solution:
def maxDistance(self, colors: [int]) -> int:
max_distance = 0
for i in range(len(colors)):
distance = 0
if max_distance>(len(colors) - i):
return max_distance
for j in range(i, len(colors)):
if colors[j]!=colors[i]:
distance = (j-i)
max_distance = max(max_distance, distance)
return max_distance
a= Solution()
b = [1,1,1,6,1,1,1]
b2 = [0,1]
b3 = [1,8,3,8,3]
print(Solution.maxDistance(a, b3)) | StarcoderdataPython |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.