hexsha stringlengths 40 40 | size int64 1 1.03M | ext stringclasses 10 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 3 239 | max_stars_repo_name stringlengths 5 130 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 239 | max_issues_repo_name stringlengths 5 130 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 239 | max_forks_repo_name stringlengths 5 130 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 1 1.03M | avg_line_length float64 1 958k | max_line_length int64 1 1.03M | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ace1ce8a61ec8f90e69d5c5ae89e69c64720b4b9 | 2,753 | py | Python | packs/orion/tests/test_action_list_sdk_verbs.py | prajwal222/prajwal | ce1431858a9b54ae2a9546e9afab9f4b722bd210 | [
"Apache-2.0"
] | null | null | null | packs/orion/tests/test_action_list_sdk_verbs.py | prajwal222/prajwal | ce1431858a9b54ae2a9546e9afab9f4b722bd210 | [
"Apache-2.0"
] | 1 | 2022-03-08T17:03:46.000Z | 2022-03-08T17:03:46.000Z | packs/orion/tests/test_action_list_sdk_verbs.py | isabella232/st2contrib | 182af2fb6e26a1d002954b19a5cc7afc73307872 | [
"Apache-2.0"
] | 1 | 2019-07-10T21:23:49.000Z | 2019-07-10T21:23:49.000Z | # Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
from mock import MagicMock
from orion_base_action_test_case import OrionBaseActionTestCase
from list_sdk_verbs import ListSdkVerbs
__all__ = [
'ListSdkVerbsTestCase'
]
class ListSdkVerbsTestCase(OrionBaseActionTestCase):
__test__ = True
action_cls = ListSdkVerbs
def test_run_connect_fail(self):
action = self.setup_connect_fail()
self.assertRaises(ValueError,
action.run,
"orion")
def test_run_listsdk_verbs(self):
expected = {'Entities': []}
expected['Entities'].append({'Entity': "Orion.Nodes",
'Method': "Unmanage"})
expected['Entities'].append({'Entity': "Orion.Nodes",
'Method': "Remanage"})
expected['Entities'].append({'Entity': "Orion.Nodes",
'Method': "PollNow"})
query_data = self.load_yaml("results_sdk_verbs.yaml")
action = self.get_action_instance(self.full_config)
action.connect = MagicMock(return_value=True)
action.query = MagicMock(return_value=query_data)
result = action.run("orion")
self.assertEqual(result, expected)
def test_run_listsdk_verbs_filtered(self):
expected = {'Entities': []}
expected['Entities'].append({'Entity': "Orion.Nodes",
'Method': "Unmanage"})
expected['Entities'].append({'Entity': "Orion.Nodes",
'Method': "Remanage"})
expected['Entities'].append({'Entity': "Orion.Nodes",
'Method': "PollNow"})
query_data = self.load_yaml("results_sdk_verbs.yaml")
action = self.get_action_instance(self.full_config)
action.connect = MagicMock(return_value=True)
action.query = MagicMock(return_value=query_data)
result = action.run("orion", "PollNow")
self.assertEqual(result, expected)
| 38.774648 | 74 | 0.636033 |
ace1ceee576c5415d261a5ea248207fcb854c02a | 4,147 | py | Python | inspect_camera.py | kotaroito/chainer_imagenet_tools | f9d3572821c13a9e675da0d5fc45eb53b122541a | [
"MIT"
] | 22 | 2015-09-08T22:17:45.000Z | 2018-02-25T14:00:22.000Z | inspect_camera.py | kotaroito/chainer_imagenet_tools | f9d3572821c13a9e675da0d5fc45eb53b122541a | [
"MIT"
] | null | null | null | inspect_camera.py | kotaroito/chainer_imagenet_tools | f9d3572821c13a9e675da0d5fc45eb53b122541a | [
"MIT"
] | 9 | 2016-04-05T14:53:23.000Z | 2018-11-10T06:02:35.000Z | #!/usr/bin/env python
"""
Realtime image inspection
"""
from __future__ import print_function
import argparse
import os
import sys
import random
import cv2
import numpy as np
from PIL import Image
import chainer
from chainer import cuda
import chainer.functions as F
from chainer.functions import caffe
parser = argparse.ArgumentParser(
description='Evaluate a Caffe reference model on ILSVRC2012 dataset')
parser.add_argument('model_type', choices=('alexnet', 'caffenet', 'googlenet'),
help='Model type (alexnet, caffenet, googlenet)')
parser.add_argument('model', help='Path to the pretrained Caffe model')
parser.add_argument('--mean', '-m', default='ilsvrc_2012_mean.npy',
help='Path to the mean file')
parser.add_argument('--gpu', '-g', type=int, default=-1,
help='Zero-origin GPU ID (nevative value indicates CPU)')
args = parser.parse_args()
print('Loading Caffe model file %s...' % args.model, file=sys.stderr)
func = caffe.CaffeFunction(args.model)
print('Loaded', file=sys.stderr)
if args.gpu >= 0:
cuda.init(args.gpu)
func.to_gpu()
if args.model_type == 'alexnet' or args.model_type == 'caffenet':
in_size = 227
mean_image = np.load(args.mean)
def forward(x, t):
y, = func(inputs={'data': x}, outputs=['fc8'], train=False)
return F.softmax_cross_entropy(y, t), F.accuracy(y, t)
def predict(x):
y, = func(inputs={'data': x}, outputs=['fc8'], train=False)
return F.softmax(y)
elif args.model_type == 'googlenet':
in_size = 224
# Constant mean over spatial pixels
mean_image = np.ndarray((3, 256, 256), dtype=np.float32)
mean_image[0] = 104
mean_image[1] = 117
mean_image[2] = 123
def forward(x, t):
y, = func(inputs={'data': x}, outputs=['loss3/classifier'],
disable=['loss1/ave_pool', 'loss2/ave_pool'],
train=False)
return F.softmax_cross_entropy(y, t), F.accuracy(y, t)
def predict(x):
y, = func(inputs={'data': x}, outputs=['loss3/classifier'],
disable=['loss1/ave_pool', 'loss2/ave_pool'],
train=False)
return F.softmax(y)
cropwidth = 256 - in_size
start = cropwidth // 2
stop = start + in_size
mean_image = mean_image[:, start:stop, start:stop].copy()
target_shape = (256, 256)
output_side_length=256
categories = np.loadtxt("labels.txt", str, delimiter="\t")
cam = cv2.VideoCapture(0)
count=0
while True:
ret, capture = cam.read()
if not ret:
print('error')
break
cv2.imshow('chainer inspector', capture)
count += 1
if count == 30:
image = capture.copy()
# image = cv2.imread(args.image)
height, width, depth = image.shape
new_height = output_side_length
new_width = output_side_length
if height > width:
new_height = output_side_length * height / width
else:
new_width = output_side_length * width / height
resized_img = cv2.resize(image, (new_width, new_height))
height_offset = (new_height - output_side_length) / 2
width_offset = (new_width - output_side_length) / 2
image= resized_img[height_offset:height_offset + output_side_length,
width_offset:width_offset + output_side_length]
image = image.transpose(2, 0, 1)
image = image[:, start:stop, start:stop].astype(np.float32)
image -= mean_image
x_batch = np.ndarray(
(1, 3, in_size,in_size), dtype=np.float32)
x_batch[0]=image
if args.gpu >= 0:
x_batch=cuda.to_gpu(x_batch)
x = chainer.Variable(x_batch, volatile=True)
score = predict(x)
if args.gpu >= 0:
score=cuda.to_cpu(score.data)
top_k = 5
prediction = zip(score.data[0].tolist(), categories)
prediction.sort(cmp=lambda x, y: cmp(x[0], y[0]), reverse=True)
for rank, (score, name) in enumerate(prediction[:top_k], start=1):
print('#%d | %s | %4.1f%%' % (rank, name, score * 100))
count=0
cam.release()
cv2.destroyAllWindows()
| 31.656489 | 79 | 0.627924 |
ace1cf646b9ae8c231b1bb645e6db25ffcd638ea | 13,833 | py | Python | yocto/poky/bitbake/lib/bb/server/xmlrpc.py | libreswitch/libreswitch | 1bb99e4bbc55aff46048453e28a1466b08d338aa | [
"Apache-2.0"
] | 16 | 2017-01-17T15:20:43.000Z | 2021-03-19T05:45:14.000Z | yocto/poky/bitbake/lib/bb/server/xmlrpc.py | libreswitch/libreswitch | 1bb99e4bbc55aff46048453e28a1466b08d338aa | [
"Apache-2.0"
] | 415 | 2016-12-20T17:20:45.000Z | 2018-09-23T07:59:23.000Z | yocto/poky/bitbake/lib/bb/server/xmlrpc.py | libreswitch/libreswitch | 1bb99e4bbc55aff46048453e28a1466b08d338aa | [
"Apache-2.0"
] | 10 | 2016-12-20T13:24:50.000Z | 2021-03-19T05:46:43.000Z | #
# BitBake XMLRPC Server
#
# Copyright (C) 2006 - 2007 Michael 'Mickey' Lauer
# Copyright (C) 2006 - 2008 Richard Purdie
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
This module implements an xmlrpc server for BitBake.
Use this by deriving a class from BitBakeXMLRPCServer and then adding
methods which you want to "export" via XMLRPC. If the methods have the
prefix xmlrpc_, then registering those function will happen automatically,
if not, you need to call register_function.
Use register_idle_function() to add a function which the xmlrpc server
calls from within server_forever when no requests are pending. Make sure
that those functions are non-blocking or else you will introduce latency
in the server's main loop.
"""
import bb
import xmlrpclib, sys
from bb import daemonize
from bb.ui import uievent
import hashlib, time
import socket
import os, signal
import threading
try:
import cPickle as pickle
except ImportError:
import pickle
DEBUG = False
from SimpleXMLRPCServer import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler
import inspect, select, httplib
from . import BitBakeBaseServer, BitBakeBaseServerConnection, BaseImplServer
class BBTransport(xmlrpclib.Transport):
def __init__(self, timeout):
self.timeout = timeout
self.connection_token = None
xmlrpclib.Transport.__init__(self)
# Modified from default to pass timeout to HTTPConnection
def make_connection(self, host):
#return an existing connection if possible. This allows
#HTTP/1.1 keep-alive.
if self._connection and host == self._connection[0]:
return self._connection[1]
# create a HTTP connection object from a host descriptor
chost, self._extra_headers, x509 = self.get_host_info(host)
#store the host argument along with the connection object
self._connection = host, httplib.HTTPConnection(chost, timeout=self.timeout)
return self._connection[1]
def set_connection_token(self, token):
self.connection_token = token
def send_content(self, h, body):
if self.connection_token:
h.putheader("Bitbake-token", self.connection_token)
xmlrpclib.Transport.send_content(self, h, body)
def _create_server(host, port, timeout = 60):
t = BBTransport(timeout)
s = xmlrpclib.ServerProxy("http://%s:%d/" % (host, port), transport=t, allow_none=True)
return s, t
class BitBakeServerCommands():
def __init__(self, server):
self.server = server
self.has_client = False
def registerEventHandler(self, host, port):
"""
Register a remote UI Event Handler
"""
s, t = _create_server(host, port)
# we don't allow connections if the cooker is running
if (self.cooker.state in [bb.cooker.state.parsing, bb.cooker.state.running]):
return None, "Cooker is busy: %s" % bb.cooker.state.get_name(self.cooker.state)
self.event_handle = bb.event.register_UIHhandler(s, True)
return self.event_handle, 'OK'
def unregisterEventHandler(self, handlerNum):
"""
Unregister a remote UI Event Handler
"""
return bb.event.unregister_UIHhandler(handlerNum)
def runCommand(self, command):
"""
Run a cooker command on the server
"""
return self.cooker.command.runCommand(command, self.server.readonly)
def getEventHandle(self):
return self.event_handle
def terminateServer(self):
"""
Trigger the server to quit
"""
self.server.quit = True
print("Server (cooker) exiting")
return
def addClient(self):
if self.has_client:
return None
token = hashlib.md5(str(time.time())).hexdigest()
self.server.set_connection_token(token)
self.has_client = True
return token
def removeClient(self):
if self.has_client:
self.server.set_connection_token(None)
self.has_client = False
if self.server.single_use:
self.server.quit = True
# This request handler checks if the request has a "Bitbake-token" header
# field (this comes from the client side) and compares it with its internal
# "Bitbake-token" field (this comes from the server). If the two are not
# equal, it is assumed that a client is trying to connect to the server
# while another client is connected to the server. In this case, a 503 error
# ("service unavailable") is returned to the client.
class BitBakeXMLRPCRequestHandler(SimpleXMLRPCRequestHandler):
def __init__(self, request, client_address, server):
self.server = server
SimpleXMLRPCRequestHandler.__init__(self, request, client_address, server)
def do_POST(self):
try:
remote_token = self.headers["Bitbake-token"]
except:
remote_token = None
if remote_token != self.server.connection_token and remote_token != "observer":
self.report_503()
else:
if remote_token == "observer":
self.server.readonly = True
else:
self.server.readonly = False
SimpleXMLRPCRequestHandler.do_POST(self)
def report_503(self):
self.send_response(503)
response = 'No more client allowed'
self.send_header("Content-type", "text/plain")
self.send_header("Content-length", str(len(response)))
self.end_headers()
self.wfile.write(response)
class XMLRPCProxyServer(BaseImplServer):
""" not a real working server, but a stub for a proxy server connection
"""
def __init__(self, host, port):
self.host = host
self.port = port
class XMLRPCServer(SimpleXMLRPCServer, BaseImplServer):
# remove this when you're done with debugging
# allow_reuse_address = True
def __init__(self, interface, single_use=False):
"""
Constructor
"""
BaseImplServer.__init__(self)
self.single_use = single_use
# Use auto port configuration
if (interface[1] == -1):
interface = (interface[0], 0)
SimpleXMLRPCServer.__init__(self, interface,
requestHandler=BitBakeXMLRPCRequestHandler,
logRequests=False, allow_none=True)
self.host, self.port = self.socket.getsockname()
self.connection_token = None
#self.register_introspection_functions()
self.commands = BitBakeServerCommands(self)
self.autoregister_all_functions(self.commands, "")
self.interface = interface
def addcooker(self, cooker):
BaseImplServer.addcooker(self, cooker)
self.commands.cooker = cooker
def autoregister_all_functions(self, context, prefix):
"""
Convenience method for registering all functions in the scope
of this class that start with a common prefix
"""
methodlist = inspect.getmembers(context, inspect.ismethod)
for name, method in methodlist:
if name.startswith(prefix):
self.register_function(method, name[len(prefix):])
def serve_forever(self):
# Start the actual XMLRPC server
bb.cooker.server_main(self.cooker, self._serve_forever)
def _serve_forever(self):
"""
Serve Requests. Overloaded to honor a quit command
"""
self.quit = False
while not self.quit:
fds = [self]
nextsleep = 0.1
for function, data in self._idlefuns.items():
retval = None
try:
retval = function(self, data, False)
if retval is False:
del self._idlefuns[function]
elif retval is True:
nextsleep = 0
elif isinstance(retval, float):
if (retval < nextsleep):
nextsleep = retval
else:
fds = fds + retval
except SystemExit:
raise
except:
import traceback
traceback.print_exc()
if retval == None:
# the function execute failed; delete it
del self._idlefuns[function]
pass
socktimeout = self.socket.gettimeout() or nextsleep
socktimeout = min(socktimeout, nextsleep)
# Mirror what BaseServer handle_request would do
try:
fd_sets = select.select(fds, [], [], socktimeout)
if fd_sets[0] and self in fd_sets[0]:
self._handle_request_noblock()
except IOError:
# we ignore interrupted calls
pass
# Tell idle functions we're exiting
for function, data in self._idlefuns.items():
try:
retval = function(self, data, True)
except:
pass
self.server_close()
return
def set_connection_token(self, token):
self.connection_token = token
class BitBakeXMLRPCServerConnection(BitBakeBaseServerConnection):
def __init__(self, serverImpl, clientinfo=("localhost", 0), observer_only = False, featureset = None):
self.connection, self.transport = _create_server(serverImpl.host, serverImpl.port)
self.clientinfo = clientinfo
self.serverImpl = serverImpl
self.observer_only = observer_only
if featureset:
self.featureset = featureset
else:
self.featureset = []
def connect(self, token = None):
if token is None:
if self.observer_only:
token = "observer"
else:
token = self.connection.addClient()
if token is None:
return None
self.transport.set_connection_token(token)
return self
def setupEventQueue(self):
self.events = uievent.BBUIEventQueue(self.connection, self.clientinfo)
for event in bb.event.ui_queue:
self.events.queue_event(event)
_, error = self.connection.runCommand(["setFeatures", self.featureset])
if error:
# disconnect the client, we can't make the setFeature work
self.connection.removeClient()
# no need to log it here, the error shall be sent to the client
raise BaseException(error)
def removeClient(self):
if not self.observer_only:
self.connection.removeClient()
def terminate(self):
# Don't wait for server indefinitely
import socket
socket.setdefaulttimeout(2)
try:
self.events.system_quit()
except:
pass
try:
self.connection.removeClient()
except:
pass
class BitBakeServer(BitBakeBaseServer):
def initServer(self, interface = ("localhost", 0), single_use = False):
self.interface = interface
self.serverImpl = XMLRPCServer(interface, single_use)
def detach(self):
daemonize.createDaemon(self.serverImpl.serve_forever, "bitbake-cookerdaemon.log")
del self.cooker
def establishConnection(self, featureset):
self.connection = BitBakeXMLRPCServerConnection(self.serverImpl, self.interface, False, featureset)
return self.connection.connect()
def set_connection_token(self, token):
self.connection.transport.set_connection_token(token)
class BitBakeXMLRPCClient(BitBakeBaseServer):
def __init__(self, observer_only = False, token = None):
self.token = token
self.observer_only = observer_only
# if we need extra caches, just tell the server to load them all
pass
def saveConnectionDetails(self, remote):
self.remote = remote
def establishConnection(self, featureset):
# The format of "remote" must be "server:port"
try:
[host, port] = self.remote.split(":")
port = int(port)
except Exception as e:
bb.warn("Failed to read remote definition (%s)" % str(e))
raise e
# We need our IP for the server connection. We get the IP
# by trying to connect with the server
try:
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect((host, port))
ip = s.getsockname()[0]
s.close()
except Exception as e:
bb.warn("Could not create socket for %s:%s (%s)" % (host, port, str(e)))
raise e
try:
self.serverImpl = XMLRPCProxyServer(host, port)
self.connection = BitBakeXMLRPCServerConnection(self.serverImpl, (ip, 0), self.observer_only, featureset)
return self.connection.connect(self.token)
except Exception as e:
bb.warn("Could not connect to server at %s:%s (%s)" % (host, port, str(e)))
raise e
def endSession(self):
self.connection.removeClient()
| 35.378517 | 117 | 0.629437 |
ace1d03fdcfbeed01e2d506a54fc2d0351ecaab3 | 370 | py | Python | rest_api/urls.py | pmdarrow/rest-api | 83d9d1bfb06e3e7731a7ebe1a6f2aea8975a874f | [
"MIT"
] | null | null | null | rest_api/urls.py | pmdarrow/rest-api | 83d9d1bfb06e3e7731a7ebe1a6f2aea8975a874f | [
"MIT"
] | null | null | null | rest_api/urls.py | pmdarrow/rest-api | 83d9d1bfb06e3e7731a7ebe1a6f2aea8975a874f | [
"MIT"
] | null | null | null | """
rest_api URL Configuration
"""
from django.conf.urls import url
from django.views.generic import RedirectView
from users.views import Users, UserDetail
urlpatterns = [
url(r'^$', RedirectView.as_view(pattern_name='users')),
url(r'^users/$', Users.as_view(), name='users'),
url(r'^users/(?P<username>\w+)/$', UserDetail.as_view(), name='user-detail')
]
| 26.428571 | 80 | 0.694595 |
ace1d0808da54be32089b17b4052b53fb8f1572f | 32,295 | py | Python | plugins/modules/oci_mysql_db_system_facts.py | hanielburton/oci-ansible-collection | dfdffde637f746d346ba35569be8c3a3407022f2 | [
"Apache-2.0"
] | null | null | null | plugins/modules/oci_mysql_db_system_facts.py | hanielburton/oci-ansible-collection | dfdffde637f746d346ba35569be8c3a3407022f2 | [
"Apache-2.0"
] | null | null | null | plugins/modules/oci_mysql_db_system_facts.py | hanielburton/oci-ansible-collection | dfdffde637f746d346ba35569be8c3a3407022f2 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python
# Copyright (c) 2017, 2021 Oracle and/or its affiliates.
# This software is made available to you under the terms of the GPL 3.0 license or the Apache 2.0 license.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Apache License v2.0
# See LICENSE.TXT for details.
# GENERATED FILE - DO NOT EDIT - MANUAL CHANGES WILL BE OVERWRITTEN
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
"metadata_version": "1.1",
"status": ["preview"],
"supported_by": "community",
}
DOCUMENTATION = """
---
module: oci_mysql_db_system_facts
short_description: Fetches details about one or multiple DbSystem resources in Oracle Cloud Infrastructure
description:
- Fetches details about one or multiple DbSystem resources in Oracle Cloud Infrastructure
- Get a list of DB Systems in the specified compartment.
The default sort order is by timeUpdated, descending.
- If I(db_system_id) is specified, the details of a single DbSystem will be returned.
version_added: "2.9"
author: Oracle (@oracle)
options:
db_system_id:
description:
- The DB System L(OCID,https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm).
- Required to get a specific db_system.
type: str
aliases: ["id"]
compartment_id:
description:
- The compartment L(OCID,https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm).
- Required to list multiple db_systems.
type: str
is_analytics_cluster_attached:
description:
- If true, return only DB Systems with an Analytics Cluster attached, if false
return only DB Systems with no Analytics Cluster attached. If not
present, return all DB Systems.
type: bool
display_name:
description:
- A filter to return only the resource matching the given display name exactly.
type: str
aliases: ["name"]
lifecycle_state:
description:
- DbSystem Lifecycle State
type: str
choices:
- "CREATING"
- "ACTIVE"
- "INACTIVE"
- "UPDATING"
- "DELETING"
- "DELETED"
- "FAILED"
configuration_id:
description:
- The requested Configuration instance.
type: str
is_up_to_date:
description:
- Filter instances if they are using the latest revision of the
Configuration they are associated with.
type: bool
sort_by:
description:
- The field to sort by. Only one sort order may be provided. Time fields are default ordered as descending. Display name is default ordered as
ascending.
type: str
choices:
- "displayName"
- "timeCreated"
sort_order:
description:
- The sort order to use (ASC or DESC).
type: str
choices:
- "ASC"
- "DESC"
extends_documentation_fragment: [ oracle.oci.oracle ]
"""
EXAMPLES = """
- name: List db_systems
oci_mysql_db_system_facts:
compartment_id: ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx
- name: Get a specific db_system
oci_mysql_db_system_facts:
db_system_id: ocid1.dbsystem.oc1..xxxxxxEXAMPLExxxxxx
"""
RETURN = """
db_systems:
description:
- List of DbSystem resources
returned: on success
type: complex
contains:
id:
description:
- The OCID of the DB System.
returned: on success
type: string
sample: ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx
display_name:
description:
- The user-friendly name for the DB System. It does not have to be unique.
returned: on success
type: string
sample: display_name_example
description:
description:
- User-provided data about the DB System.
returned: on success
type: string
sample: description_example
compartment_id:
description:
- The OCID of the compartment the DB System belongs in.
returned: on success
type: string
sample: ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx
subnet_id:
description:
- The OCID of the subnet the DB System is associated with.
returned: on success
type: string
sample: ocid1.subnet.oc1..xxxxxxEXAMPLExxxxxx
is_analytics_cluster_attached:
description:
- If the DB System has an Analytics Cluster attached.
returned: on success
type: bool
sample: true
analytics_cluster:
description:
- ""
returned: on success
type: complex
contains:
shape_name:
description:
- "The shape determines resources to allocate to the Analytics
Cluster nodes - CPU cores, memory."
returned: on success
type: string
sample: shape_name_example
cluster_size:
description:
- The number of analytics-processing compute instances, of the
specified shape, in the Analytics Cluster.
returned: on success
type: int
sample: 56
lifecycle_state:
description:
- The current state of the MySQL Analytics Cluster.
returned: on success
type: string
sample: lifecycle_state_example
time_created:
description:
- The date and time the Analytics Cluster was created, as described by L(RFC 3339,https://tools.ietf.org/rfc/rfc3339).
returned: on success
type: string
sample: 2013-10-20T19:20:30+01:00
time_updated:
description:
- The time the Analytics Cluster was last updated, as described by L(RFC 3339,https://tools.ietf.org/rfc/rfc3339).
returned: on success
type: string
sample: 2013-10-20T19:20:30+01:00
availability_domain:
description:
- The Availability Domain where the primary DB System should be located.
returned: on success
type: string
sample: Uocm:PHX-AD-1
fault_domain:
description:
- The name of the Fault Domain the DB System is located in.
returned: on success
type: string
sample: fault_domain_example
shape_name:
description:
- "The shape of the primary instances of the DB System. The shape
determines resources allocated to a DB System - CPU cores
and memory for VM shapes; CPU cores, memory and storage for non-VM
(or bare metal) shapes. To get a list of shapes, use (the
L(ListShapes,https://docs.cloud.oracle.com/en-us/iaas/api/#/en/mysql/20181021/ShapeSummary/ListShapes) operation."
returned: on success
type: string
sample: shape_name_example
mysql_version:
description:
- Name of the MySQL Version in use for the DB System.
returned: on success
type: string
sample: mysql_version_example
backup_policy:
description:
- ""
returned: on success
type: complex
contains:
is_enabled:
description:
- If automated backups are enabled or disabled.
returned: on success
type: bool
sample: true
window_start_time:
description:
- The start of a 30-minute window of time in which daily, automated backups occur.
- "This should be in the format of the \\"Time\\" portion of an RFC3339-formatted timestamp. Any second or sub-second time data will be
truncated to zero."
- At some point in the window, the system may incur a brief service disruption as the backup is performed.
- "If not defined, a window is selected from the following Region-based time-spans:
- eu-frankfurt-1: 20:00 - 04:00 UTC
- us-ashburn-1: 03:00 - 11:00 UTC
- uk-london-1: 06:00 - 14:00 UTC
- ap-tokyo-1: 13:00 - 21:00
- us-phoenix-1: 06:00 - 14:00"
returned: on success
type: string
sample: window_start_time_example
retention_in_days:
description:
- The number of days automated backups are retained.
returned: on success
type: int
sample: 56
freeform_tags:
description:
- Simple key-value pair applied without any predefined name, type or scope. Exists for cross-compatibility only.
- Tags defined here will be copied verbatim as tags on the Backup resource created by this BackupPolicy.
- "Example: `{\\"bar-key\\": \\"value\\"}`"
returned: on success
type: dict
sample: {'Department': 'Finance'}
defined_tags:
description:
- Usage of predefined tag keys. These predefined keys are scoped to namespaces.
- Tags defined here will be copied verbatim as tags on the Backup resource created by this BackupPolicy.
- "Example: `{\\"foo-namespace\\": {\\"bar-key\\": \\"value\\"}}`"
returned: on success
type: dict
sample: {'Operations': {'CostCenter': 'US'}}
source:
description:
- ""
returned: on success
type: complex
contains:
source_type:
description:
- The specific source identifier.
returned: on success
type: string
sample: NONE
backup_id:
description:
- The OCID of the backup to be used as the source for the new DB System.
returned: on success
type: string
sample: ocid1.backup.oc1..xxxxxxEXAMPLExxxxxx
configuration_id:
description:
- The OCID of the Configuration to be used for Instances in this DB System.
returned: on success
type: string
sample: ocid1.configuration.oc1..xxxxxxEXAMPLExxxxxx
data_storage_size_in_gbs:
description:
- Initial size of the data volume in GiBs that will be created and attached.
returned: on success
type: int
sample: 56
hostname_label:
description:
- "The hostname for the primary endpoint of the DB System. Used for DNS.
The value is the hostname portion of the primary private IP's fully qualified domain name (FQDN)
(for example, \\"dbsystem-1\\" in FQDN \\"dbsystem-1.subnet123.vcn1.oraclevcn.com\\").
Must be unique across all VNICs in the subnet and comply with RFC 952 and RFC 1123."
returned: on success
type: string
sample: hostname_label_example
ip_address:
description:
- "The IP address the DB System is configured to listen on. A private
IP address of the primary endpoint of the DB System. Must be an
available IP address within the subnet's CIDR. This will be a
\\"dotted-quad\\" style IPv4 address."
returned: on success
type: string
sample: ip_address_example
port:
description:
- The port for primary endpoint of the DB System to listen on.
returned: on success
type: int
sample: 56
port_x:
description:
- The network port on which X Plugin listens for TCP/IP connections. This is the X Plugin equivalent of port.
returned: on success
type: int
sample: 56
endpoints:
description:
- The network endpoints available for this DB System.
returned: on success
type: complex
contains:
hostname:
description:
- The network address of the DB System.
returned: on success
type: string
sample: hostname_example
ip_address:
description:
- The IP address the DB System is configured to listen on.
returned: on success
type: string
sample: ip_address_example
port:
description:
- The port the MySQL instance listens on.
returned: on success
type: int
sample: 56
port_x:
description:
- The network port where to connect to use this endpoint using the X protocol.
returned: on success
type: int
sample: 56
modes:
description:
- The access modes from the client that this endpoint supports.
returned: on success
type: list
sample: []
status:
description:
- The state of the endpoints, as far as it can seen from the DB System.
There may be some inconsistency with the actual state of the MySQL service.
returned: on success
type: string
sample: ACTIVE
status_details:
description:
- Additional information about the current endpoint status.
returned: on success
type: string
sample: status_details_example
channels:
description:
- A list with a summary of all the Channels attached to the DB System.
returned: on success
type: complex
contains:
id:
description:
- The OCID of the Channel.
returned: on success
type: string
sample: ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx
compartment_id:
description:
- The OCID of the compartment.
returned: on success
type: string
sample: ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx
is_enabled:
description:
- Whether the Channel has been enabled by the user.
returned: on success
type: bool
sample: true
source:
description:
- ""
returned: on success
type: complex
contains:
source_type:
description:
- The specific source identifier.
returned: on success
type: string
sample: MYSQL
hostname:
description:
- The network address of the MySQL instance.
returned: on success
type: string
sample: hostname_example
port:
description:
- The port the source MySQL instance listens on.
returned: on success
type: int
sample: 56
username:
description:
- The name of the replication user on the source MySQL instance.
The username has a maximum length of 96 characters. For more information,
please see the L(MySQL documentation,https://dev.mysql.com/doc/refman/8.0/en/change-master-to.html)
returned: on success
type: string
sample: username_example
ssl_mode:
description:
- The SSL mode of the Channel.
returned: on success
type: string
sample: VERIFY_IDENTITY
ssl_ca_certificate:
description:
- ""
returned: on success
type: complex
contains:
certificate_type:
description:
- The type of CA certificate.
returned: on success
type: string
sample: PEM
contents:
description:
- The string containing the CA certificate in PEM format.
returned: on success
type: string
sample: contents_example
target:
description:
- ""
returned: on success
type: complex
contains:
target_type:
description:
- The specific target identifier.
returned: on success
type: string
sample: DBSYSTEM
db_system_id:
description:
- The OCID of the source DB System.
returned: on success
type: string
sample: ocid1.dbsystem.oc1..xxxxxxEXAMPLExxxxxx
channel_name:
description:
- The case-insensitive name that identifies the replication channel. Channel names
must follow the rules defined for L(MySQL identifiers,https://dev.mysql.com/doc/refman/8.0/en/identifiers.html).
The names of non-Deleted Channels must be unique for each DB System.
returned: on success
type: string
sample: channel_name_example
applier_username:
description:
- The username for the replication applier of the target MySQL DB System.
returned: on success
type: string
sample: applier_username_example
lifecycle_state:
description:
- The state of the Channel.
returned: on success
type: string
sample: lifecycle_state_example
lifecycle_details:
description:
- A message describing the state of the Channel.
returned: on success
type: string
sample: lifecycle_details_example
display_name:
description:
- The user-friendly name for the Channel. It does not have to be unique.
returned: on success
type: string
sample: display_name_example
time_created:
description:
- The date and time the Channel was created, as described by L(RFC 3339,https://tools.ietf.org/rfc/rfc3339).
returned: on success
type: string
sample: 2013-10-20T19:20:30+01:00
time_updated:
description:
- The time the Channel was last updated, as described by L(RFC 3339,https://tools.ietf.org/rfc/rfc3339).
returned: on success
type: string
sample: 2013-10-20T19:20:30+01:00
freeform_tags:
description:
- "Simple key-value pair applied without any predefined name, type or scope. Exists for cross-compatibility only.
Example: `{\\"bar-key\\": \\"value\\"}`"
returned: on success
type: dict
sample: {'Department': 'Finance'}
defined_tags:
description:
- "Usage of predefined tag keys. These predefined keys are scoped to namespaces.
Example: `{\\"foo-namespace\\": {\\"bar-key\\": \\"value\\"}}`"
returned: on success
type: dict
sample: {'Operations': {'CostCenter': 'US'}}
lifecycle_state:
description:
- The current state of the DB System.
returned: on success
type: string
sample: CREATING
lifecycle_details:
description:
- Additional information about the current lifecycleState.
returned: on success
type: string
sample: lifecycle_details_example
maintenance:
description:
- ""
returned: on success
type: complex
contains:
window_start_time:
description:
- The start time of the maintenance window.
- "This string is of the format: \\"{day-of-week} {time-of-day}\\"."
- "\\"{day-of-week}\\" is a case-insensitive string like \\"mon\\", \\"tue\\", &c."
- "\\"{time-of-day}\\" is the \\"Time\\" portion of an RFC3339-formatted timestamp. Any second or sub-second time data will be truncated
to zero."
returned: on success
type: string
sample: window_start_time_example
time_created:
description:
- The date and time the DB System was created.
returned: on success
type: string
sample: 2013-10-20T19:20:30+01:00
time_updated:
description:
- The time the DB System was last updated.
returned: on success
type: string
sample: 2013-10-20T19:20:30+01:00
freeform_tags:
description:
- "Simple key-value pair that is applied without any predefined name, type or scope. Exists for cross-compatibility only.
Example: `{\\"bar-key\\": \\"value\\"}`"
returned: on success
type: dict
sample: {'Department': 'Finance'}
defined_tags:
description:
- "Defined tags for this resource. Each key is predefined and scoped to a namespace.
Example: `{\\"foo-namespace\\": {\\"bar-key\\": \\"value\\"}}`"
returned: on success
type: dict
sample: {'Operations': {'CostCenter': 'US'}}
sample: [{
"id": "ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx",
"display_name": "display_name_example",
"description": "description_example",
"compartment_id": "ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx",
"subnet_id": "ocid1.subnet.oc1..xxxxxxEXAMPLExxxxxx",
"is_analytics_cluster_attached": true,
"analytics_cluster": {
"shape_name": "shape_name_example",
"cluster_size": 56,
"lifecycle_state": "lifecycle_state_example",
"time_created": "2013-10-20T19:20:30+01:00",
"time_updated": "2013-10-20T19:20:30+01:00"
},
"availability_domain": "Uocm:PHX-AD-1",
"fault_domain": "fault_domain_example",
"shape_name": "shape_name_example",
"mysql_version": "mysql_version_example",
"backup_policy": {
"is_enabled": true,
"window_start_time": "window_start_time_example",
"retention_in_days": 56,
"freeform_tags": {'Department': 'Finance'},
"defined_tags": {'Operations': {'CostCenter': 'US'}}
},
"source": {
"source_type": "NONE",
"backup_id": "ocid1.backup.oc1..xxxxxxEXAMPLExxxxxx"
},
"configuration_id": "ocid1.configuration.oc1..xxxxxxEXAMPLExxxxxx",
"data_storage_size_in_gbs": 56,
"hostname_label": "hostname_label_example",
"ip_address": "ip_address_example",
"port": 56,
"port_x": 56,
"endpoints": [{
"hostname": "hostname_example",
"ip_address": "ip_address_example",
"port": 56,
"port_x": 56,
"modes": [],
"status": "ACTIVE",
"status_details": "status_details_example"
}],
"channels": [{
"id": "ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx",
"compartment_id": "ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx",
"is_enabled": true,
"source": {
"source_type": "MYSQL",
"hostname": "hostname_example",
"port": 56,
"username": "username_example",
"ssl_mode": "VERIFY_IDENTITY",
"ssl_ca_certificate": {
"certificate_type": "PEM",
"contents": "contents_example"
}
},
"target": {
"target_type": "DBSYSTEM",
"db_system_id": "ocid1.dbsystem.oc1..xxxxxxEXAMPLExxxxxx",
"channel_name": "channel_name_example",
"applier_username": "applier_username_example"
},
"lifecycle_state": "lifecycle_state_example",
"lifecycle_details": "lifecycle_details_example",
"display_name": "display_name_example",
"time_created": "2013-10-20T19:20:30+01:00",
"time_updated": "2013-10-20T19:20:30+01:00",
"freeform_tags": {'Department': 'Finance'},
"defined_tags": {'Operations': {'CostCenter': 'US'}}
}],
"lifecycle_state": "CREATING",
"lifecycle_details": "lifecycle_details_example",
"maintenance": {
"window_start_time": "window_start_time_example"
},
"time_created": "2013-10-20T19:20:30+01:00",
"time_updated": "2013-10-20T19:20:30+01:00",
"freeform_tags": {'Department': 'Finance'},
"defined_tags": {'Operations': {'CostCenter': 'US'}}
}]
"""
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.oracle.oci.plugins.module_utils import oci_common_utils
from ansible_collections.oracle.oci.plugins.module_utils.oci_resource_utils import (
OCIResourceFactsHelperBase,
get_custom_class,
)
try:
from oci.mysql import DbSystemClient
HAS_OCI_PY_SDK = True
except ImportError:
HAS_OCI_PY_SDK = False
class MysqlDbSystemFactsHelperGen(OCIResourceFactsHelperBase):
"""Supported operations: get, list"""
def get_required_params_for_get(self):
return [
"db_system_id",
]
def get_required_params_for_list(self):
return [
"compartment_id",
]
def get_resource(self):
return oci_common_utils.call_with_backoff(
self.client.get_db_system,
db_system_id=self.module.params.get("db_system_id"),
)
def list_resources(self):
optional_list_method_params = [
"is_analytics_cluster_attached",
"db_system_id",
"display_name",
"lifecycle_state",
"configuration_id",
"is_up_to_date",
"sort_by",
"sort_order",
]
optional_kwargs = dict(
(param, self.module.params[param])
for param in optional_list_method_params
if self.module.params.get(param) is not None
)
return oci_common_utils.list_all_resources(
self.client.list_db_systems,
compartment_id=self.module.params.get("compartment_id"),
**optional_kwargs
)
MysqlDbSystemFactsHelperCustom = get_custom_class("MysqlDbSystemFactsHelperCustom")
class ResourceFactsHelper(MysqlDbSystemFactsHelperCustom, MysqlDbSystemFactsHelperGen):
pass
def main():
module_args = oci_common_utils.get_common_arg_spec()
module_args.update(
dict(
db_system_id=dict(aliases=["id"], type="str"),
compartment_id=dict(type="str"),
is_analytics_cluster_attached=dict(type="bool"),
display_name=dict(aliases=["name"], type="str"),
lifecycle_state=dict(
type="str",
choices=[
"CREATING",
"ACTIVE",
"INACTIVE",
"UPDATING",
"DELETING",
"DELETED",
"FAILED",
],
),
configuration_id=dict(type="str"),
is_up_to_date=dict(type="bool"),
sort_by=dict(type="str", choices=["displayName", "timeCreated"]),
sort_order=dict(type="str", choices=["ASC", "DESC"]),
)
)
module = AnsibleModule(argument_spec=module_args)
if not HAS_OCI_PY_SDK:
module.fail_json(msg="oci python sdk required for this module.")
resource_facts_helper = ResourceFactsHelper(
module=module,
resource_type="db_system",
service_client_class=DbSystemClient,
namespace="mysql",
)
result = []
if resource_facts_helper.is_get():
result = [resource_facts_helper.get()]
elif resource_facts_helper.is_list():
result = resource_facts_helper.list()
else:
resource_facts_helper.fail()
module.exit_json(db_systems=result)
if __name__ == "__main__":
main()
| 41.245211 | 160 | 0.507168 |
ace1d1d4d6ad73047f0167ea4edc0f1cc93dc651 | 6,997 | py | Python | jupyter_client/threaded.py | EnjoyLifeFund/py36pkgs | 0ac677fbbfa7b6d8c527fe2c759ba05117b07fd2 | [
"MIT",
"BSD-2-Clause",
"BSD-3-Clause"
] | 4 | 2018-01-19T17:15:06.000Z | 2018-01-24T00:06:42.000Z | Python/PythonProgrammingLanguage/Encapsulation/encap_env/lib/python3.5/site-packages/jupyter_client/threaded.py | nitin-cherian/LifeLongLearning | 84084792058358365162c645742c70064a2d5fd6 | [
"MIT"
] | 10 | 2017-07-13T00:24:03.000Z | 2017-07-17T07:39:03.000Z | Python/PythonProgrammingLanguage/Encapsulation/encap_env/lib/python3.5/site-packages/jupyter_client/threaded.py | nitin-cherian/LifeLongLearning | 84084792058358365162c645742c70064a2d5fd6 | [
"MIT"
] | 7 | 2017-08-01T04:02:07.000Z | 2018-10-06T21:07:20.000Z | """ Defines a KernelClient that provides thread-safe sockets with async callbacks on message replies.
"""
from __future__ import absolute_import
import atexit
import errno
from threading import Thread
import time
# import ZMQError in top-level namespace, to avoid ugly attribute-error messages
# during garbage collection of threads at exit:
from zmq import ZMQError
from zmq.eventloop import ioloop, zmqstream
# Local imports
from traitlets import Type, Instance
from jupyter_client.channels import HBChannel
from jupyter_client import KernelClient
class ThreadedZMQSocketChannel(object):
"""A ZMQ socket invoking a callback in the ioloop"""
session = None
socket = None
ioloop = None
stream = None
_inspect = None
def __init__(self, socket, session, loop):
"""Create a channel.
Parameters
----------
socket : :class:`zmq.Socket`
The ZMQ socket to use.
session : :class:`session.Session`
The session to use.
loop
A pyzmq ioloop to connect the socket to using a ZMQStream
"""
super(ThreadedZMQSocketChannel, self).__init__()
self.socket = socket
self.session = session
self.ioloop = loop
self.stream = zmqstream.ZMQStream(self.socket, self.ioloop)
self.stream.on_recv(self._handle_recv)
_is_alive = False
def is_alive(self):
return self._is_alive
def start(self):
self._is_alive = True
def stop(self):
self._is_alive = False
def close(self):
if self.socket is not None:
try:
self.socket.close(linger=0)
except Exception:
pass
self.socket = None
def send(self, msg):
"""Queue a message to be sent from the IOLoop's thread.
Parameters
----------
msg : message to send
This is threadsafe, as it uses IOLoop.add_callback to give the loop's
thread control of the action.
"""
def thread_send():
self.session.send(self.stream, msg)
self.ioloop.add_callback(thread_send)
def _handle_recv(self, msg):
"""Callback for stream.on_recv.
Unpacks message, and calls handlers with it.
"""
ident,smsg = self.session.feed_identities(msg)
msg = self.session.deserialize(smsg)
# let client inspect messages
if self._inspect:
self._inspect(msg)
self.call_handlers(msg)
def call_handlers(self, msg):
"""This method is called in the ioloop thread when a message arrives.
Subclasses should override this method to handle incoming messages.
It is important to remember that this method is called in the thread
so that some logic must be done to ensure that the application level
handlers are called in the application thread.
"""
pass
def process_events(self):
"""Subclasses should override this with a method
processing any pending GUI events.
"""
pass
def flush(self, timeout=1.0):
"""Immediately processes all pending messages on this channel.
This is only used for the IOPub channel.
Callers should use this method to ensure that :meth:`call_handlers`
has been called for all messages that have been received on the
0MQ SUB socket of this channel.
This method is thread safe.
Parameters
----------
timeout : float, optional
The maximum amount of time to spend flushing, in seconds. The
default is one second.
"""
# We do the IOLoop callback process twice to ensure that the IOLoop
# gets to perform at least one full poll.
stop_time = time.time() + timeout
for i in range(2):
self._flushed = False
self.ioloop.add_callback(self._flush)
while not self._flushed and time.time() < stop_time:
time.sleep(0.01)
def _flush(self):
"""Callback for :method:`self.flush`."""
self.stream.flush()
self._flushed = True
class IOLoopThread(Thread):
"""Run a pyzmq ioloop in a thread to send and receive messages
"""
_exiting = False
def __init__(self, loop):
super(IOLoopThread, self).__init__()
self.daemon = True
self.ioloop = loop or ioloop.IOLoop()
@staticmethod
@atexit.register
def _notice_exit():
IOLoopThread._exiting = True
def run(self):
"""Run my loop, ignoring EINTR events in the poller"""
while True:
try:
self.ioloop.start()
except ZMQError as e:
if e.errno == errno.EINTR:
continue
else:
raise
except Exception:
if self._exiting:
break
else:
raise
else:
break
def stop(self):
"""Stop the channel's event loop and join its thread.
This calls :meth:`~threading.Thread.join` and returns when the thread
terminates. :class:`RuntimeError` will be raised if
:meth:`~threading.Thread.start` is called again.
"""
if self.ioloop is not None:
self.ioloop.stop()
self.join()
self.close()
def close(self):
if self.ioloop is not None:
try:
self.ioloop.close(all_fds=True)
except Exception:
pass
class ThreadedKernelClient(KernelClient):
""" A KernelClient that provides thread-safe sockets with async callbacks on message replies.
"""
_ioloop = None
@property
def ioloop(self):
if self._ioloop is None:
self._ioloop = ioloop.IOLoop()
return self._ioloop
ioloop_thread = Instance(IOLoopThread, allow_none=True)
def start_channels(self, shell=True, iopub=True, stdin=True, hb=True):
if shell:
self.shell_channel._inspect = self._check_kernel_info_reply
self.ioloop_thread = IOLoopThread(self.ioloop)
self.ioloop_thread.start()
super(ThreadedKernelClient, self).start_channels(shell, iopub, stdin, hb)
def _check_kernel_info_reply(self, msg):
"""This is run in the ioloop thread when the kernel info reply is recieved
"""
if msg['msg_type'] == 'kernel_info_reply':
self._handle_kernel_info_reply(msg)
self.shell_channel._inspect = None
def stop_channels(self):
super(ThreadedKernelClient, self).stop_channels()
if self.ioloop_thread.is_alive():
self.ioloop_thread.stop()
iopub_channel_class = Type(ThreadedZMQSocketChannel)
shell_channel_class = Type(ThreadedZMQSocketChannel)
stdin_channel_class = Type(ThreadedZMQSocketChannel)
hb_channel_class = Type(HBChannel)
| 30.159483 | 101 | 0.615835 |
ace1d1e9a040852bdb5ab1625488e161978829a8 | 1,388 | py | Python | test.py | aishenghuomeidaoli/hash_vs_array | c6efe68ef491fccea475fe37fa4102f4db7c6f81 | [
"MIT"
] | null | null | null | test.py | aishenghuomeidaoli/hash_vs_array | c6efe68ef491fccea475fe37fa4102f4db7c6f81 | [
"MIT"
] | null | null | null | test.py | aishenghuomeidaoli/hash_vs_array | c6efe68ef491fccea475fe37fa4102f4db7c6f81 | [
"MIT"
] | null | null | null | import os
import random
import time
def hash_vs_arrary(total_nums, target_nums, test_times):
"""
:param total_nums: 样本总数量
:param target_nums: 查找的目标元素数量
:param test_times: 测试次数
:return:
"""
file_name = os.path.join(os.path.dirname(os.path.abspath(__file__)),
'gameofthrones.txt')
all_data_list = list()
all_data_set = set()
target_data = list()
# 随机生成目标元素索引
target_data_index = set(random.sample(range(total_nums), target_nums))
with open(file_name, encoding="utf8", mode="r") as f_open:
for count, line in enumerate(f_open):
if count in target_data_index:
target_data.append(line)
if count < total_nums:
all_data_list.append(line)
all_data_set.add(line)
else:
break
print('generating data set finished')
start = time.time()
for i in range(test_times):
for line in target_data:
if line in all_data_list:
pass
break_point = time.time()
for i in range(test_times):
for line in target_data:
if line in all_data_set:
pass
end = time.time()
print('list time is ', break_point - start)
print('set time is ', end - break_point)
if __name__ == "__main__":
hash_vs_arrary(10000, 1000, 100)
| 25.703704 | 74 | 0.59438 |
ace1d3301df9b7ed5e07137928a096ac0fb9b5ec | 27,149 | py | Python | colour/appearance/nayatani95.py | MaxSchambach/colour | 3f3685d616fda4be58cec20bc1e16194805d7e2d | [
"BSD-3-Clause"
] | null | null | null | colour/appearance/nayatani95.py | MaxSchambach/colour | 3f3685d616fda4be58cec20bc1e16194805d7e2d | [
"BSD-3-Clause"
] | null | null | null | colour/appearance/nayatani95.py | MaxSchambach/colour | 3f3685d616fda4be58cec20bc1e16194805d7e2d | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Nayatani (1995) Colour Appearance Model
=======================================
Defines *Nayatani (1995)* colour appearance model objects:
- :class:`colour.Nayatani95_Specification`
- :func:`colour.XYZ_to_Nayatani95`
See Also
--------
`Nayatani (1995) Colour Appearance Model Jupyter Notebook
<http://nbviewer.jupyter.org/github/colour-science/colour-notebooks/\
blob/master/notebooks/appearance/nayatani95.ipynb>`_
References
----------
- :cite:`Fairchild2013ba` : Fairchild, M. D. (2013). The Nayatani et al.
Model. In Color Appearance Models (3rd ed., pp. 4810-5085). Wiley.
ISBN:B00DAYO8E2
- :cite:`Nayatani1995a` : Nayatani, Y., Sobagaki, H., & Yano, K. H. T.
(1995). Lightness dependency of chroma scales of a nonlinear
color-appearance model and its latest formulation. Color Research &
Application, 20(3), 156-167. doi:10.1002/col.5080200305
"""
from __future__ import division, unicode_literals
import numpy as np
from collections import namedtuple
from colour.algebra import spow
from colour.adaptation.cie1994 import (CIE1994_XYZ_TO_RGB_MATRIX, beta_1,
exponential_factors,
intermediate_values)
from colour.models import XYZ_to_xy
from colour.utilities import (as_float_array, dot_vector, from_range_degrees,
to_domain_100, tsplit, tstack)
__author__ = 'Colour Developers'
__copyright__ = 'Copyright (C) 2013-2019 - Colour Developers'
__license__ = 'New BSD License - https://opensource.org/licenses/BSD-3-Clause'
__maintainer__ = 'Colour Developers'
__email__ = 'colour-developers@colour-science.org'
__status__ = 'Production'
__all__ = [
'NAYATANI95_XYZ_TO_RGB_MATRIX', 'Nayatani95_ReferenceSpecification',
'Nayatani95_Specification', 'XYZ_to_Nayatani95',
'illuminance_to_luminance', 'XYZ_to_RGB_Nayatani95', 'scaling_coefficient',
'achromatic_response', 'tritanopic_response', 'protanopic_response',
'brightness_correlate', 'ideal_white_brightness_correlate',
'achromatic_lightness_correlate',
'normalised_achromatic_lightness_correlate', 'hue_angle',
'saturation_components', 'saturation_correlate', 'chroma_components',
'chroma_correlate', 'colourfulness_components', 'colourfulness_correlate',
'chromatic_strength_function'
]
NAYATANI95_XYZ_TO_RGB_MATRIX = CIE1994_XYZ_TO_RGB_MATRIX
"""
*Nayatani (1995)* colour appearance model *CIE XYZ* tristimulus values to cone
responses matrix.
NAYATANI95_XYZ_TO_RGB_MATRIX : array_like, (3, 3)
"""
class Nayatani95_ReferenceSpecification(
namedtuple('Nayatani95_ReferenceSpecification',
('L_star_P', 'C', 'theta', 'S', 'B_r', 'M', 'H', 'H_C',
'L_star_N'))):
"""
Defines the *Nayatani (1995)* colour appearance model reference
specification.
This specification has field names consistent with *Fairchild (2013)*
reference.
Parameters
----------
L_star_P : numeric or array_like
Correlate of *achromatic Lightness* :math:`L_p^\\star`.
C : numeric or array_like
Correlate of *chroma* :math:`C`.
theta : numeric or array_like
*Hue* angle :math:`\\theta` in degrees.
S : numeric or array_like
Correlate of *saturation* :math:`S`.
B_r : numeric or array_like
Correlate of *brightness* :math:`B_r`.
M : numeric or array_like
Correlate of *colourfulness* :math:`M`.
H : numeric or array_like
*Hue* :math:`h` quadrature :math:`H`.
H_C : numeric or array_like
*Hue* :math:`h` composition :math:`H_C`.
L_star_N : numeric or array_like
Correlate of *normalised achromatic Lightness* :math:`L_n^\\star`.
References
----------
:cite:`Fairchild2013ba`, :cite:`Nayatani1995a`
"""
class Nayatani95_Specification(
namedtuple(
'Nayatani95_Specification',
('L_star_P', 'C', 'h', 's', 'Q', 'M', 'H', 'HC', 'L_star_N'))):
"""
Defines the *Nayatani (1995)* colour appearance model specification.
This specification has field names consistent with the remaining colour
appearance models in :mod:`colour.appearance` but diverge from
*Fairchild (2013)* reference.
Parameters
----------
L_star_P : numeric or array_like
Correlate of *achromatic Lightness* :math:`L_p^\\star`.
C : numeric or array_like
Correlate of *chroma* :math:`C`.
h : numeric or array_like
*Hue* angle :math:`\\theta` in degrees.
s : numeric or array_like
Correlate of *saturation* :math:`S`.
Q : numeric or array_like
Correlate of *brightness* :math:`B_r`.
M : numeric or array_like
Correlate of *colourfulness* :math:`M`.
H : numeric or array_like
*Hue* :math:`h` quadrature :math:`H`.
HC : numeric or array_like
*Hue* :math:`h` composition :math:`H_C`.
L_star_N : numeric or array_like
Correlate of *normalised achromatic Lightness* :math:`L_n^\\star`.
Notes
-----
- This specification is the one used in the current model implementation.
References
----------
:cite:`Fairchild2013ba`, :cite:`Nayatani1995a`
"""
def XYZ_to_Nayatani95(XYZ, XYZ_n, Y_o, E_o, E_or, n=1):
"""
Computes the *Nayatani (1995)* colour appearance model correlates.
Parameters
----------
XYZ : array_like
*CIE XYZ* tristimulus values of test sample / stimulus.
XYZ_n : array_like
*CIE XYZ* tristimulus values of reference white.
Y_o : numeric or array_like
Luminance factor :math:`Y_o` of achromatic background as percentage
normalised to domain [0.18, 1.0] in **'Reference'** domain-range scale.
E_o : numeric or array_like
Illuminance :math:`E_o` of the viewing field in lux.
E_or : numeric or array_like
Normalising illuminance :math:`E_{or}` in lux usually normalised to
domain [1000, 3000].
n : numeric or array_like, optional
Noise term used in the non linear chromatic adaptation model.
Returns
-------
Nayatani95_Specification
*Nayatani (1995)* colour appearance model specification.
Notes
-----
+--------------------------------+-----------------------+---------------+
| **Domain** | **Scale - Reference** | **Scale - 1** |
+================================+=======================+===============+
| ``XYZ`` | [0, 100] | [0, 1] |
+--------------------------------+-----------------------+---------------+
| ``XYZ_n`` | [0, 100] | [0, 1] |
+--------------------------------+-----------------------+---------------+
+--------------------------------+-----------------------+---------------+
| **Range** | **Scale - Reference** | **Scale - 1** |
+================================+=======================+===============+
| ``Nayatani95_Specification.h`` | [0, 360] | [0, 1] |
+--------------------------------+-----------------------+---------------+
References
----------
:cite:`Fairchild2013ba`, :cite:`Nayatani1995a`
Examples
--------
>>> XYZ = np.array([19.01, 20.00, 21.78])
>>> XYZ_n = np.array([95.05, 100.00, 108.88])
>>> Y_o = 20.0
>>> E_o = 5000.0
>>> E_or = 1000.0
>>> XYZ_to_Nayatani95(XYZ, XYZ_n, Y_o, E_o, E_or) # doctest: +ELLIPSIS
Nayatani95_Specification(L_star_P=49.9998829..., C=0.0133550..., \
h=257.5232268..., s=0.0133550..., Q=62.6266734..., M=0.0167262..., H=None, \
HC=None, L_star_N=50.0039154...)
"""
XYZ = to_domain_100(XYZ)
XYZ_n = to_domain_100(XYZ_n)
Y_o = as_float_array(Y_o)
E_o = as_float_array(E_o)
E_or = as_float_array(E_or)
# Computing adapting luminance :math:`L_o` and normalising luminance
# :math:`L_{or}` in in :math:`cd/m^2`.
# L_o = illuminance_to_luminance(E_o, Y_o)
L_or = illuminance_to_luminance(E_or, Y_o)
# Computing :math:`\\xi` :math:`\\eta`, :math:`\\zeta` values.
xez = intermediate_values(XYZ_to_xy(XYZ_n / 100))
xi, eta, _zeta = tsplit(xez)
# Computing adapting field cone responses.
RGB_o = ((
(Y_o[..., np.newaxis] * E_o[..., np.newaxis]) / (100 * np.pi)) * xez)
# Computing stimulus cone responses.
RGB = XYZ_to_RGB_Nayatani95(XYZ)
R, G, _B = tsplit(RGB)
# Computing exponential factors of the chromatic adaptation.
bRGB_o = exponential_factors(RGB_o)
bL_or = beta_1(L_or)
# Computing scaling coefficients :math:`e(R)` and :math:`e(G)`
eR = scaling_coefficient(R, xi)
eG = scaling_coefficient(G, eta)
# Computing opponent colour dimensions.
# Computing achromatic response :math:`Q`:
Q_response = achromatic_response(RGB, bRGB_o, xez, bL_or, eR, eG, n)
# Computing tritanopic response :math:`t`:
t_response = tritanopic_response(RGB, bRGB_o, xez, n)
# Computing protanopic response :math:`p`:
p_response = protanopic_response(RGB, bRGB_o, xez, n)
# Computing the correlate of *brightness* :math:`B_r`.
B_r = brightness_correlate(bRGB_o, bL_or, Q_response)
# Computing *brightness* :math:`B_{rw}` of ideal white.
brightness_ideal_white = ideal_white_brightness_correlate(
bRGB_o, xez, bL_or, n)
# Computing the correlate of achromatic *Lightness* :math:`L_p^\\star`.
L_star_P = (achromatic_lightness_correlate(Q_response))
# Computing the correlate of normalised achromatic *Lightness*
# :math:`L_n^\\star`.
L_star_N = (normalised_achromatic_lightness_correlate(
B_r, brightness_ideal_white))
# Computing the *hue* angle :math:`\\theta`.
theta = hue_angle(p_response, t_response)
# TODO: Implement hue quadrature & composition computation.
# Computing the correlate of *saturation* :math:`S`.
S_RG, S_YB = tsplit(
saturation_components(theta, bL_or, t_response, p_response))
S = saturation_correlate(S_RG, S_YB)
# Computing the correlate of *chroma* :math:`C`.
# C_RG, C_YB = tsplit(chroma_components(L_star_P, S_RG, S_YB))
C = chroma_correlate(L_star_P, S)
# Computing the correlate of *colourfulness* :math:`M`.
# TODO: Investigate components usage.
# M_RG, M_YB = tsplit(colourfulness_components(C_RG, C_YB,
# brightness_ideal_white))
M = colourfulness_correlate(C, brightness_ideal_white)
return Nayatani95_Specification(L_star_P, C, from_range_degrees(theta), S,
B_r, M, None, None, L_star_N)
def illuminance_to_luminance(E, Y_f):
"""
Converts given *illuminance* :math:`E` value in lux to *luminance* in
:math:`cd/m^2`.
Parameters
----------
E : numeric or array_like
*Illuminance* :math:`E` in lux.
Y_f : numeric or array_like
*Luminance* factor :math:`Y_f` in :math:`cd/m^2`.
Returns
-------
numeric or ndarray
*Luminance* :math:`Y` in :math:`cd/m^2`.
Examples
--------
>>> illuminance_to_luminance(5000.0, 20.0) # doctest: +ELLIPSIS
318.3098861...
"""
E = as_float_array(E)
Y_f = as_float_array(Y_f)
return Y_f * E / (100 * np.pi)
def XYZ_to_RGB_Nayatani95(XYZ):
"""
Converts from *CIE XYZ* tristimulus values to cone responses.
Parameters
----------
XYZ : array_like
*CIE XYZ* tristimulus values.
Returns
-------
ndarray
Cone responses.
Examples
--------
>>> XYZ = np.array([19.01, 20.00, 21.78])
>>> XYZ_to_RGB_Nayatani95(XYZ) # doctest: +ELLIPSIS
array([ 20.0005206..., 19.999783 ..., 19.9988316...])
"""
return dot_vector(NAYATANI95_XYZ_TO_RGB_MATRIX, XYZ)
def scaling_coefficient(x, y):
"""
Returns the scaling coefficient :math:`e(R)` or :math:`e(G)`.
Parameters
----------
x: numeric or array_like
Cone response.
y: numeric or array_like
Intermediate value.
Returns
-------
numeric or ndarray
Scaling coefficient :math:`e(R)` or :math:`e(G)`.
Examples
--------
>>> x = 20.000520600000002
>>> y = 1.000042192
>>> scaling_coefficient(x, y)
array(1.0)
"""
x = as_float_array(x)
y = as_float_array(y)
return np.where(x >= (20 * y), 1.758, 1)
def achromatic_response(RGB, bRGB_o, xez, bL_or, eR, eG, n=1):
"""
Returns the achromatic response :math:`Q` from given stimulus cone
responses.
Parameters
----------
RGB: ndarray
Stimulus cone responses.
bRGB_o: ndarray
Chromatic adaptation exponential factors :math:`\\beta_1(R_o)`,
:math:`\\beta_1(G_o)` and :math:`\\beta_2(B_o)`.
xez: ndarray
Intermediate values :math:`\\xi`, :math:`\\eta`, :math:`\\zeta`.
bL_or: numeric or array_like
Normalising chromatic adaptation exponential factor
:math:`\\beta_1(B_or)`.
eR: numeric or array_like
Scaling coefficient :math:`e(R)`.
eG: numeric or array_like
Scaling coefficient :math:`e(G)`.
n : numeric or array_like, optional
Noise term used in the non linear chromatic adaptation model.
Returns
-------
numeric or ndarray
Achromatic response :math:`Q`.
Examples
--------
>>> RGB = np.array([20.00052060, 19.99978300, 19.99883160])
>>> bRGB_o = np.array([4.61062223, 4.61058926, 4.65206986])
>>> xez = np.array([1.00004219, 0.99998001, 0.99975794])
>>> bL_or = 3.681021495604089
>>> eR = 1.0
>>> eG = 1.758
>>> n = 1.0
>>> achromatic_response(RGB, bRGB_o, xez, bL_or, eR, eG, n)
... # doctest: +ELLIPSIS
-0.0001169...
"""
R, G, _B = tsplit(RGB)
bR_o, bG_o, _bB_o = tsplit(bRGB_o)
xi, eta, _zeta = tsplit(xez)
bL_or = as_float_array(bL_or)
eR = as_float_array(eR)
eG = as_float_array(eG)
Q = (2 / 3) * bR_o * eR * np.log10((R + n) / (20 * xi + n))
Q += (1 / 3) * bG_o * eG * np.log10((G + n) / (20 * eta + n))
Q *= 41.69 / bL_or
return Q
def tritanopic_response(RGB, bRGB_o, xez, n):
"""
Returns the tritanopic response :math:`t` from given stimulus cone
responses.
Parameters
----------
RGB: ndarray
Stimulus cone responses.
bRGB_o: ndarray
Chromatic adaptation exponential factors :math:`\\beta_1(R_o)`,
:math:`\\beta_1(G_o)` and :math:`\\beta_2(B_o)`.
xez: ndarray
Intermediate values :math:`\\xi`, :math:`\\eta`, :math:`\\zeta`.
n : numeric or array_like, optional
Noise term used in the non linear chromatic adaptation model.
Returns
-------
numeric or ndarray
Tritanopic response :math:`t`.
Examples
--------
>>> RGB = np.array([20.00052060, 19.99978300, 19.99883160])
>>> bRGB_o = np.array([4.61062223, 4.61058926, 4.65206986])
>>> xez = np.array([1.00004219, 0.99998001, 0.99975794])
>>> n = 1.0
>>> tritanopic_response(RGB, bRGB_o, xez, n) # doctest: +ELLIPSIS
-1.7703650...e-05
"""
R, G, B = tsplit(RGB)
bR_o, bG_o, bB_o = tsplit(bRGB_o)
xi, eta, zeta = tsplit(xez)
t = bR_o * np.log10((R + n) / (20 * xi + n))
t += -(12 / 11) * bG_o * np.log10((G + n) / (20 * eta + n))
t += (1 / 11) * bB_o * np.log10((B + n) / (20 * zeta + n))
return t
def protanopic_response(RGB, bRGB_o, xez, n):
"""
Returns the protanopic response :math:`p` from given stimulus cone
responses.
Parameters
----------
RGB: ndarray
Stimulus cone responses.
bRGB_o: ndarray
Chromatic adaptation exponential factors :math:`\\beta_1(R_o)`,
:math:`\\beta_1(G_o)` and :math:`\\beta_2(B_o)`.
xez: ndarray
Intermediate values :math:`\\xi`, :math:`\\eta`, :math:`\\zeta`.
n : numeric or array_like, optional
Noise term used in the non linear chromatic adaptation model.
Returns
-------
numeric or ndarray
Protanopic response :math:`p`.
Examples
--------
>>> RGB = np.array([20.00052060, 19.99978300, 19.99883160])
>>> bRGB_o = np.array([4.61062223, 4.61058926, 4.65206986])
>>> xez = np.array([1.00004219, 0.99998001, 0.99975794])
>>> n = 1.0
>>> protanopic_response(RGB, bRGB_o, xez, n) # doctest: +ELLIPSIS
-8.0021426...e-05
"""
R, G, B = tsplit(RGB)
bR_o, bG_o, bB_o = tsplit(bRGB_o)
xi, eta, zeta = tsplit(xez)
p = (1 / 9) * bR_o * np.log10((R + n) / (20 * xi + n))
p += (1 / 9) * bG_o * np.log10((G + n) / (20 * eta + n))
p += -(2 / 9) * bB_o * np.log10((B + n) / (20 * zeta + n))
return p
def brightness_correlate(bRGB_o, bL_or, Q):
"""
Returns the *brightness* correlate :math:`B_r`.
Parameters
----------
bRGB_o: ndarray
Chromatic adaptation exponential factors :math:`\\beta_1(R_o)`,
:math:`\\beta_1(G_o)` and :math:`\\beta_2(B_o)`.
bL_or: numeric or array_like
Normalising chromatic adaptation exponential factor
:math:`\\beta_1(B_or)`.
Q : numeric or array_like
Achromatic response :math:`Q`.
Returns
-------
numeric or ndarray
*Brightness* correlate :math:`B_r`.
Examples
--------
>>> bRGB_o = np.array([4.61062223, 4.61058926, 4.65206986])
>>> bL_or = 3.681021495604089
>>> Q = -0.000117024294955
>>> brightness_correlate(bRGB_o, bL_or, Q) # doctest: +ELLIPSIS
62.6266734...
"""
bR_o, bG_o, _bB_o = tsplit(bRGB_o)
bL_or = as_float_array(bL_or)
Q = as_float_array(Q)
B_r = (50 / bL_or) * ((2 / 3) * bR_o + (1 / 3) * bG_o) + Q
return B_r
def ideal_white_brightness_correlate(bRGB_o, xez, bL_or, n):
"""
Returns the ideal white *brightness* correlate :math:`B_{rw}`.
Parameters
----------
bRGB_o: ndarray
Chromatic adaptation exponential factors :math:`\\beta_1(R_o)`,
:math:`\\beta_1(G_o)` and :math:`\\beta_2(B_o)`.
xez: ndarray
Intermediate values :math:`\\xi`, :math:`\\eta`, :math:`\\zeta`.
bL_or: numeric or array_like
Normalising chromatic adaptation exponential factor
:math:`\\beta_1(B_or)`.
n : numeric or array_like, optional
Noise term used in the non linear chromatic adaptation model.
Returns
-------
numeric or ndarray
Ideal white *brightness* correlate :math:`B_{rw}`.
Examples
--------
>>> bRGB_o = np.array([4.61062223, 4.61058926, 4.65206986])
>>> xez = np.array([1.00004219, 0.99998001, 0.99975794])
>>> bL_or = 3.681021495604089
>>> n = 1.0
>>> ideal_white_brightness_correlate(bRGB_o, xez, bL_or, n)
... # doctest: +ELLIPSIS
125.2435392...
"""
bR_o, bG_o, _bB_o = tsplit(bRGB_o)
xi, eta, _zeta = tsplit(xez)
bL_or = as_float_array(bL_or)
B_rw = (2 / 3) * bR_o * 1.758 * np.log10((100 * xi + n) / (20 * xi + n))
B_rw += (1 / 3) * bG_o * 1.758 * np.log10((100 * eta + n) / (20 * eta + n))
B_rw *= 41.69 / bL_or
B_rw += (50 / bL_or) * (2 / 3) * bR_o
B_rw += (50 / bL_or) * (1 / 3) * bG_o
return B_rw
def achromatic_lightness_correlate(Q):
"""
Returns the *achromatic Lightness* correlate :math:`L_p^\\star`.
Parameters
----------
Q : numeric or array_like
Achromatic response :math:`Q`.
Returns
-------
numeric or ndarray
*Achromatic Lightness* correlate :math:`L_p^\\star`.
Examples
--------
>>> Q = -0.000117024294955
>>> achromatic_lightness_correlate(Q) # doctest: +ELLIPSIS
49.9998829...
"""
Q = as_float_array(Q)
return Q + 50
def normalised_achromatic_lightness_correlate(B_r, B_rw):
"""
Returns the *normalised achromatic Lightness* correlate :math:`L_n^\\star`.
Parameters
----------
B_r : numeric or array_like
*Brightness* correlate :math:`B_r`.
B_rw : numeric or array_like
Ideal white *brightness* correlate :math:`B_{rw}`.
Returns
-------
numeric or ndarray
*Normalised achromatic Lightness* correlate :math:`L_n^\\star`.
Examples
--------
>>> B_r = 62.626673467230766
>>> B_rw = 125.24353925846037
>>> normalised_achromatic_lightness_correlate(B_r, B_rw)
... # doctest: +ELLIPSIS
50.0039154...
"""
B_r = as_float_array(B_r)
B_rw = as_float_array(B_rw)
return 100 * (B_r / B_rw)
def hue_angle(p, t):
"""
Returns the *hue* angle :math:`h` in degrees.
Parameters
----------
p : numeric or array_like
Protanopic response :math:`p`.
t : numeric or array_like
Tritanopic response :math:`t`.
Returns
-------
numeric or ndarray
*Hue* angle :math:`h` in degrees.
Examples
--------
>>> p = -8.002142682085493e-05
>>> t = -0.000017703650669
>>> hue_angle(p, t) # doctest: +ELLIPSIS
257.5250300...
"""
p = as_float_array(p)
t = as_float_array(t)
h_L = np.degrees(np.arctan2(p, t)) % 360
return h_L
def chromatic_strength_function(theta):
"""
Defines the chromatic strength function :math:`E_s(\\theta)` used to
correct saturation scale as function of hue angle :math:`\\theta` in
degrees.
Parameters
----------
theta : numeric or array_like
Hue angle :math:`\\theta` in degrees.
Returns
-------
numeric or ndarray
Corrected saturation scale.
Examples
--------
>>> h = 257.52322689806243
>>> chromatic_strength_function(h) # doctest: +ELLIPSIS
1.2267869...
"""
theta = np.radians(theta)
E_s = 0.9394
E_s += -0.2478 * np.sin(1 * theta)
E_s += -0.0743 * np.sin(2 * theta)
E_s += +0.0666 * np.sin(3 * theta)
E_s += -0.0186 * np.sin(4 * theta)
E_s += -0.0055 * np.cos(1 * theta)
E_s += -0.0521 * np.cos(2 * theta)
E_s += -0.0573 * np.cos(3 * theta)
E_s += -0.0061 * np.cos(4 * theta)
return E_s
def saturation_components(h, bL_or, t, p):
"""
Returns the *saturation* components :math:`S_{RG}` and :math:`S_{YB}`.
Parameters
----------
h: numeric or array_like
Correlate of *hue* :math:`h` in degrees.
bL_or: numeric or array_like
Normalising chromatic adaptation exponential factor
:math:`\\beta_1(B_or)`.
t : numeric or array_like
Tritanopic response :math:`t`.
p : numeric or array_like
Protanopic response :math:`p`.
Returns
-------
numeric or ndarray
*Saturation* components :math:`S_{RG}` and :math:`S_{YB}`.
Examples
--------
>>> h = 257.52322689806243
>>> bL_or = 3.681021495604089
>>> t = -0.000017706764677
>>> p = -0.000080023561356
>>> saturation_components(h, bL_or, t, p) # doctest: +ELLIPSIS
array([-0.0028852..., -0.0130396...])
"""
h = as_float_array(h)
bL_or = as_float_array(bL_or)
t = as_float_array(t)
p = as_float_array(p)
E_s = chromatic_strength_function(h)
S_RG = (488.93 / bL_or) * E_s * t
S_YB = (488.93 / bL_or) * E_s * p
return tstack([S_RG, S_YB])
def saturation_correlate(S_RG, S_YB):
"""
Returns the correlate of *saturation* :math:`S`.
Parameters
----------
S_RG : numeric or array_like
*Saturation* component :math:`S_{RG}`.
S_YB : numeric or array_like
*Saturation* component :math:`S_{YB}`.
Returns
-------
numeric or ndarray
Correlate of *saturation* :math:`S`.
Examples
--------
>>> S_RG = -0.002885271638197
>>> S_YB = -0.013039632941332
>>> saturation_correlate(S_RG, S_YB) # doctest: +ELLIPSIS
0.0133550...
"""
S_RG = as_float_array(S_RG)
S_YB = as_float_array(S_YB)
S = np.hypot(S_RG, S_YB)
return S
def chroma_components(L_star_P, S_RG, S_YB):
"""
Returns the *chroma* components :math:`C_{RG}` and :math:`C_{YB}`.
Parameters
----------
L_star_P : numeric or array_like
*Achromatic Lightness* correlate :math:`L_p^\\star`.
S_RG : numeric or array_like
*Saturation* component :math:`S_{RG}`.
S_YB : numeric or array_like
*Saturation* component :math:`S_{YB}`.
Returns
-------
ndarray
*Chroma* components :math:`C_{RG}` and :math:`C_{YB}`.
Examples
--------
>>> L_star_P = 49.99988297570504
>>> S_RG = -0.002885271638197
>>> S_YB = -0.013039632941332
>>> chroma_components(L_star_P, S_RG, S_YB) # doctest: +ELLIPSIS
array([-0.00288527, -0.01303961])
"""
L_star_P = as_float_array(L_star_P)
S_RG = as_float_array(S_RG)
S_YB = as_float_array(S_YB)
C_RG = spow(L_star_P / 50, 0.7) * S_RG
C_YB = spow(L_star_P / 50, 0.7) * S_YB
return tstack([C_RG, C_YB])
def chroma_correlate(L_star_P, S):
"""
Returns the correlate of *chroma* :math:`C`.
Parameters
----------
L_star_P : numeric or array_like
*Achromatic Lightness* correlate :math:`L_p^\\star`.
S : numeric or array_like
Correlate of *saturation* :math:`S`.
Returns
-------
numeric or ndarray
Correlate of *chroma* :math:`C`.
Examples
--------
>>> L_star_P = 49.99988297570504
>>> S = 0.013355029751778
>>> chroma_correlate(L_star_P, S) # doctest: +ELLIPSIS
0.0133550...
"""
L_star_P = as_float_array(L_star_P)
S = as_float_array(S)
C = spow(L_star_P / 50, 0.7) * S
return C
def colourfulness_components(C_RG, C_YB, B_rw):
"""
Returns the *colourfulness* components :math:`M_{RG}` and :math:`M_{YB}`.
Parameters
----------
C_RG : numeric or array_like
*Chroma* component :math:`C_{RG}`.
C_YB : numeric or array_like
*Chroma* component :math:`C_{YB}`.
B_rw : numeric or array_like
Ideal white *brightness* correlate :math:`B_{rw}`.
Returns
-------
numeric or ndarray
*Colourfulness* components :math:`M_{RG}` and :math:`M_{YB}`.
Examples
--------
>>> C_RG = -0.002885271638197
>>> C_YB = -0.013039632941332
>>> B_rw = 125.24353925846037
>>> colourfulness_components(C_RG, C_YB, B_rw) # doctest: +ELLIPSIS
(-0.0036136..., -0.0163312...)
"""
C_RG = as_float_array(C_RG)
C_YB = as_float_array(C_YB)
B_rw = as_float_array(B_rw)
M_RG = C_RG * B_rw / 100
M_YB = C_YB * B_rw / 100
return M_RG, M_YB
def colourfulness_correlate(C, B_rw):
"""
Returns the correlate of *colourfulness* :math:`M`.
Parameters
----------
C : numeric or array_like
Correlate of *chroma* :math:`C`.
B_rw : numeric or array_like
Ideal white *brightness* correlate :math:`B_{rw}`.
Returns
-------
numeric or ndarray
Correlate of *colourfulness* :math:`M`.
Examples
--------
>>> C = 0.013355007871689
>>> B_rw = 125.24353925846037
>>> colourfulness_correlate(C, B_rw) # doctest: +ELLIPSIS
0.0167262...
"""
C = as_float_array(C)
B_rw = as_float_array(B_rw)
M = C * B_rw / 100
return M
| 29.005342 | 79 | 0.590151 |
ace1d407b3316b10a85f07cf4268b04bcdd87f27 | 561 | py | Python | tkinter/tkinter-pindex.py | demonMOE-s/Toys | 09ddd7e3f1d3956427c2f3d9e99ca05fc63c177e | [
"BSD-2-Clause"
] | 2 | 2019-01-06T05:32:18.000Z | 2019-12-12T04:54:56.000Z | tkinter/tkinter-pindex.py | demonMOE-s/Toys | 09ddd7e3f1d3956427c2f3d9e99ca05fc63c177e | [
"BSD-2-Clause"
] | null | null | null | tkinter/tkinter-pindex.py | demonMOE-s/Toys | 09ddd7e3f1d3956427c2f3d9e99ca05fc63c177e | [
"BSD-2-Clause"
] | null | null | null | # wm, 12.11.2006
from Tkinter import *
from random import randint
def motion(event):
i = canv.index(poly, "@%f,%f" % (event.x, event.y))
xi, yi = points[i], points[i+1]
canv.itemconfig(point, state='normal')
r = 4
canv.coords(point, xi-r, yi-r, xi+r, yi+r)
if __name__ == '__main__':
root = Tk()
canv = Canvas(root)
canv.pack()
n = 10
points = [randint(10, 190) for i in xrange(2*n)]
poly = canv.create_line(*points)
point = canv.create_oval(0,0,0,0, state='hidden', fill='white')
canv.bind('<Motion>', motion)
root.mainloop()
| 21.576923 | 65 | 0.627451 |
ace1d4f82ecc7d63186987b6dea35944d838f6e6 | 2,986 | py | Python | FinHelper/gen/ui files/newacc.py | felipepo/FinancialHelper---PyQT | f959f968ffe6b598035edadaddbe57766dab778d | [
"MIT"
] | null | null | null | FinHelper/gen/ui files/newacc.py | felipepo/FinancialHelper---PyQT | f959f968ffe6b598035edadaddbe57766dab778d | [
"MIT"
] | 1 | 2019-07-01T12:01:02.000Z | 2019-07-01T12:01:02.000Z | FinHelper/gen/ui files/newacc.py | felipepo/FinancialHelper---PyQT | f959f968ffe6b598035edadaddbe57766dab778d | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'newacc.ui',
# licensing of 'newacc.ui' applies.
#
# Created: Sun Jul 7 17:11:16 2019
# by: pyside2-uic running on PySide2 5.12.3
#
# WARNING! All changes made in this file will be lost!
from PySide2 import QtCore, QtGui, QtWidgets
class Ui_Dialog(object):
def setupUi(self, Dialog):
Dialog.setObjectName("Dialog")
Dialog.resize(306, 207)
self.gridLayout = QtWidgets.QGridLayout(Dialog)
self.gridLayout.setObjectName("gridLayout")
self.AccRadio = QtWidgets.QRadioButton(Dialog)
self.AccRadio.setObjectName("AccRadio")
self.gridLayout.addWidget(self.AccRadio, 0, 0, 1, 1)
self.OK = QtWidgets.QPushButton(Dialog)
self.OK.setObjectName("OK")
self.gridLayout.addWidget(self.OK, 4, 1, 1, 1)
self.label = QtWidgets.QLabel(Dialog)
self.label.setStyleSheet("font: 63 11pt \"Segoe UI Semibold\";")
self.label.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.label.setObjectName("label")
self.gridLayout.addWidget(self.label, 1, 0, 1, 1)
self.label_3 = QtWidgets.QLabel(Dialog)
self.label_3.setStyleSheet("font: 63 11pt \"Segoe UI Semibold\";")
self.label_3.setObjectName("label_3")
self.gridLayout.addWidget(self.label_3, 2, 0, 1, 1)
self.CCRadio = QtWidgets.QRadioButton(Dialog)
self.CCRadio.setObjectName("CCRadio")
self.gridLayout.addWidget(self.CCRadio, 0, 1, 1, 1)
self.lineEdit_3 = QtWidgets.QLineEdit(Dialog)
self.lineEdit_3.setAlignment(QtCore.Qt.AlignCenter)
self.lineEdit_3.setObjectName("lineEdit_3")
self.gridLayout.addWidget(self.lineEdit_3, 2, 1, 1, 1)
self.lineEdit = QtWidgets.QLineEdit(Dialog)
self.lineEdit.setAlignment(QtCore.Qt.AlignCenter)
self.lineEdit.setObjectName("lineEdit")
self.gridLayout.addWidget(self.lineEdit, 1, 1, 1, 1)
spacerItem = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout.addItem(spacerItem, 3, 1, 1, 1)
self.retranslateUi(Dialog)
QtCore.QMetaObject.connectSlotsByName(Dialog)
def retranslateUi(self, Dialog):
Dialog.setWindowTitle(QtWidgets.QApplication.translate("Dialog", "Dialog", None, -1))
self.AccRadio.setText(QtWidgets.QApplication.translate("Dialog", "Conta", None, -1))
self.OK.setText(QtWidgets.QApplication.translate("Dialog", "Ok", None, -1))
self.label.setText(QtWidgets.QApplication.translate("Dialog", "Nome", None, -1))
self.label_3.setText(QtWidgets.QApplication.translate("Dialog", "Valor Inicial (R$)", None, -1))
self.CCRadio.setText(QtWidgets.QApplication.translate("Dialog", "Cartão de Crédito", None, -1))
self.lineEdit_3.setPlaceholderText(QtWidgets.QApplication.translate("Dialog", "00", None, -1))
| 49.766667 | 114 | 0.687542 |
ace1d5c6453b033106c752c564deb934d2a5857c | 2,797 | py | Python | recipes/aws-c-sdkutils/all/conanfile.py | rockandsalt/conan-center-index | d739adcec3e4dd4c250eff559ceb738e420673dd | [
"MIT"
] | null | null | null | recipes/aws-c-sdkutils/all/conanfile.py | rockandsalt/conan-center-index | d739adcec3e4dd4c250eff559ceb738e420673dd | [
"MIT"
] | 3 | 2020-05-05T11:27:44.000Z | 2022-02-28T20:19:50.000Z | recipes/aws-c-sdkutils/all/conanfile.py | rockandsalt/conan-center-index | d739adcec3e4dd4c250eff559ceb738e420673dd | [
"MIT"
] | 1 | 2020-10-12T10:45:13.000Z | 2020-10-12T10:45:13.000Z | from conans import ConanFile, CMake, tools
import os
required_conan_version = ">=1.43.0"
class AwsCSDKUtils(ConanFile):
name = "aws-c-sdkutils"
description = "aws c language sdk utility library."
topics = ("aws", "amazon", "cloud", "utility")
url = "https://github.com/conan-io/conan-center-index"
homepage = "https://github.com/awslabs/aws-c-sdkutils"
license = "Apache-2.0",
exports_sources = "CMakeLists.txt",
generators = "cmake", "cmake_find_package_multi"
settings = "os", "arch", "compiler", "build_type"
options = {
"shared": [True, False],
"fPIC": [True, False],
}
default_options = {
"shared": False,
"fPIC": True,
}
_cmake = None
@property
def _source_subfolder(self):
return "source_subfolder"
def config_options(self):
if self.settings.os == "Windows":
del self.options.fPIC
def configure(self):
if self.options.shared:
del self.options.fPIC
del self.settings.compiler.cppstd
del self.settings.compiler.libcxx
def requirements(self):
self.requires("aws-c-common/0.6.15")
def source(self):
tools.get(**self.conan_data["sources"][self.version],
destination=self._source_subfolder, strip_root=True)
def _configure_cmake(self):
if self._cmake:
return self._cmake
self._cmake = CMake(self)
self._cmake.configure()
return self._cmake
def build(self):
cmake = self._configure_cmake()
cmake.build()
def package(self):
self.copy(pattern="LICENSE", dst="licenses", src=self._source_subfolder)
cmake = self._configure_cmake()
cmake.install()
tools.rmdir(os.path.join(self.package_folder, "lib", "aws-c-sdkutils"))
def package_info(self):
self.cpp_info.set_property("cmake_file_name", "aws-c-sdkutils")
self.cpp_info.set_property("cmake_target_name", "AWS::aws-c-sdkutils")
self.cpp_info.filenames["cmake_find_package"] = "aws-c-sdkutils"
self.cpp_info.filenames["cmake_find_package_multi"] = "aws-c-sdkutils"
self.cpp_info.names["cmake_find_package"] = "AWS"
self.cpp_info.names["cmake_find_package_multi"] = "AWS"
self.cpp_info.components["aws-c-sdkutils-lib"].names["cmake_find_package"] = "aws-c-sdkutils"
self.cpp_info.components["aws-c-sdkutils-lib"].names["cmake_find_package_multi"] = "aws-c-sdkutils"
self.cpp_info.components["aws-c-sdkutils-lib"].set_property("cmake_target_name", "AWS::aws-c-sdkutils")
self.cpp_info.components["aws-c-sdkutils-lib"].libs = ["aws-c-sdkutils"]
self.cpp_info.components["aws-c-sdkutils-lib"].requires = ["aws-c-common::aws-c-common-lib"]
| 34.530864 | 111 | 0.644977 |
ace1d68725d53ef9bca3622864abb11e29faba5c | 349 | py | Python | tests/internal/instance_type/test_instance_type_dl_auto.py | frolovv/aws.ec2.compare | 582805823492f833d65c0441c4a14dce697c12aa | [
"Apache-2.0"
] | null | null | null | tests/internal/instance_type/test_instance_type_dl_auto.py | frolovv/aws.ec2.compare | 582805823492f833d65c0441c4a14dce697c12aa | [
"Apache-2.0"
] | null | null | null | tests/internal/instance_type/test_instance_type_dl_auto.py | frolovv/aws.ec2.compare | 582805823492f833d65c0441c4a14dce697c12aa | [
"Apache-2.0"
] | null | null | null |
# Testing module instance_type.dl
import pytest
import ec2_compare.internal.instance_type.dl
def test_get_internal_data_instance_type_dl_get_instances_list():
assert len(ec2_compare.internal.instance_type.dl.get_instances_list()) > 0
def test_get_internal_data_instance_type_dl_get():
assert len(ec2_compare.internal.instance_type.dl.get) > 0
| 34.9 | 76 | 0.848138 |
ace1d6eb7ba72da60a15c4b9d2342b09798fdc92 | 925 | py | Python | jaxlib/version.py | PKUFlyingPig/jax-alpa | de6d763590336d5be89fe2f11f2165c3d28056ca | [
"Apache-2.0"
] | 1 | 2022-01-13T00:22:47.000Z | 2022-01-13T00:22:47.000Z | jaxlib/version.py | Little-Pea-2022/jax | a254bc88b84a02e988b6a752ce3d14c7dbe94cbb | [
"Apache-2.0"
] | 8 | 2022-01-03T16:09:12.000Z | 2022-03-24T14:18:05.000Z | jaxlib/version.py | jschiavon/jax | 6411f8a03388ce63eb365188f2e2880815745125 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# After a new jaxlib release, please remember to update the values of
# `_current_jaxlib_version` and `_available_cuda_versions` in setup.py to
# reflect the most recent available binaries.
# __version__ should be increased after releasing the current version
# (i.e. on main, this is always the next version to be released).
__version__ = "0.1.76"
| 44.047619 | 74 | 0.76973 |
ace1d72ccea34e49898dbd3c59d70012e2e12323 | 1,109 | py | Python | phpsyntax.py | fujimakishouten/svn-tools | 69b9b2206a89595721cc4b85488deee61925e97c | [
"MIT"
] | null | null | null | phpsyntax.py | fujimakishouten/svn-tools | 69b9b2206a89595721cc4b85488deee61925e97c | [
"MIT"
] | null | null | null | phpsyntax.py | fujimakishouten/svn-tools | 69b9b2206a89595721cc4b85488deee61925e97c | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# vim: set expandtab tabstop=4 shiftwidth=4 softtabstop=4 fileencoding=utf-8:
import argparse
import sys
import subprocess
from xml.etree import ElementTree
# Parse command line options.
parser = argparse.ArgumentParser(description="「svn status --xml」で出力したファイルのうち、拡張子が「.php」のもののシンタックスをチェックします。")
args = parser.parse_args()
# Read data from stdin.
data = sys.stdin.read()
if not data:
sys.exit(0)
# Parse XML and display log entry.
try:
xml = ElementTree.fromstring(data)
except:
sys.exit(0)
for entry in xml.iter("entry"):
path = entry.get("path")
if '.php' == path[-4:]:
try:
subprocess.check_output(["/usr/bin/env", "php", "-l", path])
except subprocess.CalledProcessError as e:
print("\033[31mError : {0}\033[0m".format(path))
print("\033[34mOutput: \033[0m")
for line in e.output.split("\n"):
if line: print(" " + line.strip())
print("")
# Local variables:
# tab-width: 4
# c-basic-offset: 4
# c-hanging-comment-ender-p: nil
# End:
| 23.104167 | 108 | 0.629396 |
ace1d92cc194d1b6d78c53cf0e0f6781ac0cc761 | 1,787 | py | Python | DIGDriver/data_tools/mappability_tools.py | maxwellsh/DIGDriver | 1f8503c8c22861d6f9b601fd8c5a131e3dc31fc1 | [
"BSD-3-Clause"
] | 1 | 2021-08-07T00:05:48.000Z | 2021-08-07T00:05:48.000Z | DIGDriver/data_tools/mappability_tools.py | maxwellsh/DIGDriver | 1f8503c8c22861d6f9b601fd8c5a131e3dc31fc1 | [
"BSD-3-Clause"
] | null | null | null | DIGDriver/data_tools/mappability_tools.py | maxwellsh/DIGDriver | 1f8503c8c22861d6f9b601fd8c5a131e3dc31fc1 | [
"BSD-3-Clause"
] | null | null | null | import numpy as np
import pandas as pd
import bbi
import pysam
##only mappabilty_by_idx called from top level
def load_chromsizes(f_bw):
chroms = bbi.chromsizes(f_bw)
chroms.pop('chrM')
chroms.pop('chrX')
chroms.pop('chrY')
return chroms
def mappability_by_window(f_mapp, window, overlap=0):
chroms = load_chromsizes(f_mapp)
mapp_lst = []
for chr_id, chr_size in chroms.items():
print(chr_id, end=' ')
i = 0
while i + window < chr_size:
# print(i)
mapp = bbi.fetch(f_mapp, chr_id, i, i + window, bins=1)[0]
mapp_lst.append([chr_id, i, i+window, mapp])
i += window - overlap
return pd.DataFrame(np.array(mapp_lst),
columns=['CHROM', 'START', 'END', 'MAPP'])
def mappability_by_idx(f_mapp, idx):
mapp_lst = []
chr_prev = ''
for row in idx:
chr_id = 'chr{}'.format(row[0])
start = row[1]
end = row[2]
if chr_id != chr_prev:
print(chr_id)
mapp = bbi.fetch(f_mapp, chr_id, start, end, bins=1)[0]
mapp_lst.append([row[0], start, end, mapp])
chr_prev = chr_id
return mapp_lst
def P_bases_by_window(f_fasta, window, overlap=0):
fasta = pysam.FastaFile(f_fasta)
sizes = fasta.lengths
chroms = fasta.references
mapp_lst = []
for chr_id, chr_size in zip(chroms, sizes):
print(chr_id, end=' ')
i = 0
while i + window < chr_size:
seq = fasta.fetch(chr_id, i, i + window)
mapp = seq.count('P') / window
mapp_lst.append([chr_id, i, i+window, mapp])
i += window - overlap
return pd.DataFrame(np.array(mapp_lst),
columns=['CHROM', 'START', 'END', 'MAPP'])
| 27.492308 | 70 | 0.571908 |
ace1da116f61f0bd20c9e29b262ff5495b5d5b17 | 5,918 | py | Python | app.py | TR0UBLESH00TER/Bio-Link-Tool | 95636645ac440c1d42a67ebb5ef92eb47ff91149 | [
"MIT"
] | 1 | 2021-02-10T13:57:27.000Z | 2021-02-10T13:57:27.000Z | app.py | TR0UBLESH00TER/Bio-Link-Tool | 95636645ac440c1d42a67ebb5ef92eb47ff91149 | [
"MIT"
] | null | null | null | app.py | TR0UBLESH00TER/Bio-Link-Tool | 95636645ac440c1d42a67ebb5ef92eb47ff91149 | [
"MIT"
] | null | null | null | # Importing required modules and functions.
from flask import Flask, render_template, request, redirect
import datetime
import sqlite3
import os
# Created App
Tree = Flask(__name__)
# Main page where all links are displayed.
@Tree.route('/')
def tree():
# Fetching all data for buttons.
db = sqlite3.connect('links.db')
cursor = db.cursor()
cursor.execute(f"SELECT * FROM links;")
linkList = cursor.fetchall()[::-1]
db.close()
# All required variables used in the webpage.
CurrentYear = datetime.datetime.now().year
tab_title = os.getenv("tab_title")
organisation_name = os.getenv("organisation_name")
title = os.getenv("title")
avatar = os.getenv("avatar_link")
page_icon = os.getenv("page_icon")
return render_template('tree.html', CurrentYear=CurrentYear, title=title, avatar=avatar, \
linkList=linkList, organisation_name=organisation_name, tab_title=tab_title, page_icon=page_icon)
# Login page for admin login.
@Tree.route('/login')
def login():
# All required variables used in the webpage.
CurrentYear = datetime.datetime.now().year
tab_title = "Login"
organisation_name = os.getenv("organisation_name")
page_icon = os.getenv("page_icon")
# Creating database if not existing.
try:
db = sqlite3.connect('links.db')
cursor = db.cursor()
cursor.execute("CREATE TABLE links (DisplayName VARCHAR(265) PRIMARY KEY, Link VARCHAR(256))")
db.commit()
db.close()
except Exception: pass
return render_template('login.html', CurrentYear=CurrentYear, organisation_name=organisation_name, \
tab_title=tab_title, page_icon=page_icon)
# To verify fetch data entered in login page and redirect to /admin where checking/ verifying of
# username and passwrd is done and a page is displayed accordingly.
@Tree.route('/verify',methods=["POST"])
def verify():
global user_name, _password
user_name = request.form.get("user_name")
_password = request.form.get("_password")
return redirect('/admin')
# Admin page where adding and removing of links from the page can be done.
@Tree.route('/admin')
def admin():
global user_name, _password
# All required variables used in the webpage.
CurrentYear = datetime.datetime.now().year
tab_title = "Admin"
organisation_name = os.getenv("organisation_name")
title = os.getenv("title")
avatar = os.getenv("avatar_link")
page_icon = os.getenv("page_icon")
# Fetching all data for buttons.
db = sqlite3.connect('links.db')
cursor = db.cursor()
cursor.execute(f"SELECT * FROM links;")
linkList = cursor.fetchall()[::-1]
db.close()
try:
# Checking password and username entered in login page.
if user_name == os.getenv("user_name") and _password == os.getenv("_password"):
print(user_name,_password)
user_name = _password = ""
return render_template('admin.html', CurrentYear=CurrentYear, title=title, avatar=avatar, \
linkList=linkList, organisation_name=organisation_name, tab_title=tab_title, page_icon=page_icon)
else:
return render_template('invalid.html', CurrentYear=CurrentYear, tab_title=tab_title, \
organisation_name=organisation_name, page_icon=page_icon)
except Exception:
return render_template('invalid.html', CurrentYear=CurrentYear, tab_title=tab_title, \
organisation_name=organisation_name, page_icon=page_icon)
# To add the display name and link passed in admin and redirect back to admin (in a way refreshes the page).
@Tree.route('/add',methods=["POST"])
def add():
try:
global user_name, _password
display_name = request.form.get("display_name")
link = request.form.get("link")
if display_name != None and link != None:
# Adding Links to Database.
db = sqlite3.connect('links.db')
cursor = db.cursor()
cursor.execute(f"INSERT INTO links VALUES(\"{display_name}\",\"{link}\");")
db.commit()
db.close()
user_name=os.getenv("user_name")
_password=os.getenv("_password")
return redirect('/admin')
except Exception: pass
# To delete the display name passed in admin and redirect back to admin (in a way refreshes the page).
@Tree.route('/delete',methods=["POST"])
def delete():
try:
global user_name, _password
del_display_name = request.form.get("del_display_name")
if del_display_name != None:
# Deleting Links to Database.
db = sqlite3.connect('links.db')
cursor = db.cursor()
cursor.execute(f"DELETE FROM links WHERE DisplayName=\"{del_display_name}\";")
db.commit()
db.close()
user_name=os.getenv("user_name")
_password=os.getenv("_password")
return redirect('/admin')
except Exception: pass
# Page for Error 404
@Tree.errorhandler(404)
def error404(e):
# All required variables used in the webpage.
CurrentYear = datetime.datetime.now().year
tab_title = "ERROR 404"
page_icon = os.getenv("page_icon")
organisation_name = os.getenv("organisation_name")
return render_template("error404.html", CurrentYear=CurrentYear, tab_title=tab_title, \
organisation_name=organisation_name, page_icon=page_icon ), 404
# Page for Error 405
@Tree.errorhandler(405)
def error405(e):
# All required variables used in the webpage.
CurrentYear = datetime.datetime.now().year
tab_title = "ERROR 405"
page_icon = os.getenv("page_icon")
organisation_name = os.getenv("organisation_name")
return render_template('error405.html', CurrentYear=CurrentYear, tab_title=tab_title, \
organisation_name=organisation_name, page_icon=page_icon ), 405 | 31.989189 | 113 | 0.672525 |
ace1da5bd1c977cfe2e63a80e98f03dfb9034c7a | 3,815 | py | Python | src/backend/web/handlers/suggestions/suggest_event_media_review_controller.py | bovlb/the-blue-alliance | 29389649d96fe060688f218d463e642dcebfd6cc | [
"MIT"
] | 266 | 2015-01-04T00:10:48.000Z | 2022-03-28T18:42:05.000Z | src/backend/web/handlers/suggestions/suggest_event_media_review_controller.py | bovlb/the-blue-alliance | 29389649d96fe060688f218d463e642dcebfd6cc | [
"MIT"
] | 2,673 | 2015-01-01T20:14:33.000Z | 2022-03-31T18:17:16.000Z | src/backend/web/handlers/suggestions/suggest_event_media_review_controller.py | ZachOrr/the-blue-alliance | b9a2e6e07374fb12c70f8fae1948bfe90e34adfe | [
"MIT"
] | 230 | 2015-01-04T00:10:48.000Z | 2022-03-26T18:12:04.000Z | import json
from typing import Optional
from flask import redirect, request
from google.appengine.ext import ndb
from werkzeug.wrappers import Response
from backend.common.consts.account_permission import AccountPermission
from backend.common.consts.media_type import IMAGE_TYPES
from backend.common.consts.suggestion_state import SuggestionState
from backend.common.manipulators.media_manipulator import MediaManipulator
from backend.common.models.media import Media
from backend.common.models.suggestion import Suggestion
from backend.common.suggestions.media_creator import MediaCreator
from backend.web.handlers.suggestions.suggestion_review_base import (
SuggestionsReviewBase,
)
from backend.web.profiled_render import render_template
class SuggestEventMediaReviewController(SuggestionsReviewBase[Media]):
REQUIRED_PERMISSIONS = [AccountPermission.REVIEW_EVENT_MEDIA]
def __init__(self, *args, **kw) -> None:
super().__init__(*args, **kw)
"""
View the list of suggestions.
"""
def get(self) -> Response:
super().get()
suggestions = (
Suggestion.query()
.filter(Suggestion.review_state == SuggestionState.REVIEW_PENDING)
.filter(Suggestion.target_model == "event_media")
.fetch(limit=50)
)
# Quick and dirty way to group images together
suggestions = sorted(
suggestions,
key=lambda x: 0 if x.contents["media_type_enum"] in IMAGE_TYPES else 1,
)
reference_keys = []
for suggestion in suggestions:
reference_key = suggestion.contents["reference_key"]
reference = Media.create_reference(
suggestion.contents["reference_type"], reference_key
)
reference_keys.append(reference)
if "details_json" in suggestion.contents:
suggestion.details = json.loads(suggestion.contents["details_json"])
if "image_partial" in suggestion.details:
suggestion.details["thumbnail"] = suggestion.details[
"image_partial"
].replace("_l", "_m")
reference_futures = ndb.get_multi_async(reference_keys)
references = map(lambda r: r.get_result(), reference_futures)
suggestions_and_references = list(zip(suggestions, references))
template_values = {
"suggestions_and_references": suggestions_and_references,
}
return render_template(
"suggestions/suggest_event_media_review_list.html", template_values
)
def create_target_model(self, suggestion: Suggestion) -> Optional[Media]:
# Setup
# Remove preferred reference from another Media if specified
event_reference = Media.create_reference(
suggestion.contents["reference_type"], suggestion.contents["reference_key"]
)
media = MediaCreator.create_media_model(suggestion, event_reference, [])
# Do all DB writes
return MediaManipulator.createOrUpdate(media)
def post(self) -> Response:
super().post()
accept_keys = []
reject_keys = []
for value in request.form.values():
split_value = value.split("::")
if len(split_value) == 2:
key = split_value[1]
else:
continue
if value.startswith("accept"):
accept_keys.append(key)
elif value.startswith("reject"):
reject_keys.append(key)
# Process accepts
for accept_key in accept_keys:
self._process_accepted(accept_key)
# Process rejects
self._process_rejected(reject_keys)
return redirect("/suggest/event/media/review")
| 34.681818 | 87 | 0.657405 |
ace1da68a2e8cab4b38b5716e3cb069976dbf45e | 928 | py | Python | src/syft/lib/pandas/frame.py | dnabanita7/PySyft | ce2510e65f5bad382e88806bcde30fa38c3c76c4 | [
"Apache-2.0"
] | 1 | 2021-06-06T06:57:36.000Z | 2021-06-06T06:57:36.000Z | src/syft/lib/pandas/frame.py | dnabanita7/PySyft | ce2510e65f5bad382e88806bcde30fa38c3c76c4 | [
"Apache-2.0"
] | null | null | null | src/syft/lib/pandas/frame.py | dnabanita7/PySyft | ce2510e65f5bad382e88806bcde30fa38c3c76c4 | [
"Apache-2.0"
] | null | null | null | # third party
import pandas as pd
# syft relative
from ...generate_wrapper import GenerateWrapper
from ...lib.python.dict import Dict
from ...lib.python.primitive_factory import PrimitiveFactory
from ...proto.lib.pandas.frame_pb2 import PandasDataFrame as PandasDataFrame_PB
def object2proto(obj: pd.DataFrame) -> PandasDataFrame_PB:
pd_dict = PrimitiveFactory.generate_primitive(value=obj.to_dict())
dict_proto = pd_dict._object2proto()
return PandasDataFrame_PB(
id=dict_proto.id,
dataframe=dict_proto,
)
def proto2object(proto: PandasDataFrame_PB) -> pd.DataFrame:
dataframe_dict = Dict._proto2object(proto=proto.dataframe)
return pd.DataFrame.from_dict(dataframe_dict.upcast())
GenerateWrapper(
wrapped_type=pd.DataFrame,
import_path="pandas.DataFrame",
protobuf_scheme=PandasDataFrame_PB,
type_object2proto=object2proto,
type_proto2object=proto2object,
)
| 28.121212 | 79 | 0.771552 |
ace1dabf8b8ce9abd670aec5abbddd8b0214a176 | 12,738 | py | Python | python/labours/readers.py | liserjrqlxue/hercules | 5263b0a6fa8bd2a0af2246106d7b1b076aac75aa | [
"Apache-2.0"
] | null | null | null | python/labours/readers.py | liserjrqlxue/hercules | 5263b0a6fa8bd2a0af2246106d7b1b076aac75aa | [
"Apache-2.0"
] | null | null | null | python/labours/readers.py | liserjrqlxue/hercules | 5263b0a6fa8bd2a0af2246106d7b1b076aac75aa | [
"Apache-2.0"
] | null | null | null | from argparse import Namespace
from importlib import import_module
import re
import sys
from typing import Any, Dict, List, Tuple, TYPE_CHECKING
import numpy
import yaml
from labours.objects import DevDay
if TYPE_CHECKING:
from scipy.sparse.csr import csr_matrix
class Reader(object):
def read(self, file):
raise NotImplementedError
def get_name(self):
raise NotImplementedError
def get_header(self):
raise NotImplementedError
def get_burndown_parameters(self):
raise NotImplementedError
def get_project_burndown(self):
raise NotImplementedError
def get_files_burndown(self):
raise NotImplementedError
def get_people_burndown(self):
raise NotImplementedError
def get_ownership_burndown(self):
raise NotImplementedError
def get_people_interaction(self):
raise NotImplementedError
def get_files_coocc(self):
raise NotImplementedError
def get_people_coocc(self):
raise NotImplementedError
def get_shotness_coocc(self):
raise NotImplementedError
def get_shotness(self):
raise NotImplementedError
def get_sentiment(self):
raise NotImplementedError
def get_devs(self):
raise NotImplementedError
class YamlReader(Reader):
def read(self, file: str):
yaml.reader.Reader.NON_PRINTABLE = re.compile(r"(?!x)x")
try:
loader = yaml.CLoader
except AttributeError:
print(
"Warning: failed to import yaml.CLoader, falling back to slow yaml.Loader"
)
loader = yaml.Loader
try:
if file != "-":
with open(file) as fin:
data = yaml.load(fin, Loader=loader)
else:
data = yaml.load(sys.stdin, Loader=loader)
except (UnicodeEncodeError, yaml.reader.ReaderError) as e:
print(
"\nInvalid unicode in the input: %s\nPlease filter it through "
"fix_yaml_unicode.py" % e
)
sys.exit(1)
if data is None:
print("\nNo data has been read - has Hercules crashed?")
sys.exit(1)
self.data = data
def get_run_times(self):
return {}
def get_name(self):
return self.data["hercules"]["repository"]
def get_header(self):
header = self.data["hercules"]
return header["begin_unix_time"], header["end_unix_time"]
def get_burndown_parameters(self):
header = self.data["Burndown"]
return header["sampling"], header["granularity"], header["tick_size"]
def get_project_burndown(self):
return (
self.data["hercules"]["repository"],
self._parse_burndown_matrix(self.data["Burndown"]["project"]).T,
)
def get_files_burndown(self):
return [
(p[0], self._parse_burndown_matrix(p[1]).T)
for p in self.data["Burndown"]["files"].items()
]
def get_people_burndown(self):
return [
(p[0], self._parse_burndown_matrix(p[1]).T)
for p in self.data["Burndown"]["people"].items()
]
def get_ownership_burndown(self):
return (
self.data["Burndown"]["people_sequence"].copy(),
{
p[0]: self._parse_burndown_matrix(p[1])
for p in self.data["Burndown"]["people"].items()
},
)
def get_people_interaction(self):
return (
self.data["Burndown"]["people_sequence"].copy(),
self._parse_burndown_matrix(self.data["Burndown"]["people_interaction"]),
)
def get_files_coocc(self):
coocc = self.data["Couples"]["files_coocc"]
return coocc["index"], self._parse_coocc_matrix(coocc["matrix"])
def get_people_coocc(self):
coocc = self.data["Couples"]["people_coocc"]
return coocc["index"], self._parse_coocc_matrix(coocc["matrix"])
def get_shotness_coocc(self):
shotness = self.data["Shotness"]
index = ["%s:%s" % (i["file"], i["name"]) for i in shotness]
indptr = numpy.zeros(len(shotness) + 1, dtype=numpy.int64)
indices = []
data = []
for i, record in enumerate(shotness):
pairs = [(int(k), v) for k, v in record["counters"].items()]
pairs.sort()
indptr[i + 1] = indptr[i] + len(pairs)
for k, v in pairs:
indices.append(k)
data.append(v)
indices = numpy.array(indices, dtype=numpy.int32)
data = numpy.array(data, dtype=numpy.int32)
from scipy.sparse import csr_matrix
return index, csr_matrix((data, indices, indptr), shape=(len(shotness),) * 2)
def get_shotness(self):
from munch import munchify
obj = munchify(self.data["Shotness"])
# turn strings into ints
for item in obj:
item.counters = {int(k): v for k, v in item.counters.items()}
if len(obj) == 0:
raise KeyError
return obj
def get_sentiment(self):
from munch import munchify
return munchify(
{
int(key): {
"Comments": vals[2].split("|"),
"Commits": vals[1],
"Value": float(vals[0]),
}
for key, vals in self.data["Sentiment"].items()
}
)
def get_devs(self):
people = self.data["Devs"]["people"]
days = {
int(d): {
int(dev): DevDay(*(int(x) for x in day[:-1]), day[-1])
for dev, day in devs.items()
}
for d, devs in self.data["Devs"]["ticks"].items()
}
return people, days
def _parse_burndown_matrix(self, matrix):
return numpy.array(
[numpy.fromstring(line, dtype=int, sep=" ") for line in matrix.split("\n")]
)
def _parse_coocc_matrix(self, matrix):
from scipy.sparse import csr_matrix
data = []
indices = []
indptr = [0]
for row in matrix:
for k, v in sorted(row.items()):
data.append(v)
indices.append(k)
indptr.append(indptr[-1] + len(row))
return csr_matrix((data, indices, indptr), shape=(len(matrix),) * 2)
class ProtobufReader(Reader):
def read(self, file: str) -> None:
try:
from labours.pb_pb2 import AnalysisResults
except ImportError as e:
print(
"\n\n>>> You need to generate python/hercules/pb/pb_pb2.py - run \"make\"\n",
file=sys.stderr,
)
raise e from None
self.data = AnalysisResults()
if file != "-":
with open(file, "rb") as fin:
bytes = fin.read()
else:
bytes = sys.stdin.buffer.read()
if not bytes:
raise ValueError("empty input")
self.data.ParseFromString(bytes)
self.contents = {}
for key, val in self.data.contents.items():
try:
mod, name = PB_MESSAGES[key].rsplit(".", 1)
except KeyError:
sys.stderr.write(
"Warning: there is no registered PB decoder for %s\n" % key
)
continue
cls = getattr(import_module(mod), name)
self.contents[key] = msg = cls()
msg.ParseFromString(val)
def get_run_times(self):
return {key: val for key, val in self.data.header.run_time_per_item.items()}
def get_name(self) -> str:
return self.data.header.repository
def get_header(self) -> Tuple[int, int]:
header = self.data.header
return header.begin_unix_time, header.end_unix_time
def get_burndown_parameters(self) -> Tuple[int, int, float]:
burndown = self.contents["Burndown"]
return burndown.sampling, burndown.granularity, burndown.tick_size / 1000000000
def get_project_burndown(self) -> Tuple[str, numpy.ndarray]:
return self._parse_burndown_matrix(self.contents["Burndown"].project)
def get_files_burndown(self):
return [self._parse_burndown_matrix(i) for i in self.contents["Burndown"].files]
def get_people_burndown(self) -> List[Any]:
return [
self._parse_burndown_matrix(i) for i in self.contents["Burndown"].people
]
def get_ownership_burndown(self) -> Tuple[List[Any], Dict[Any, Any]]:
people = self.get_people_burndown()
return [p[0] for p in people], {p[0]: p[1].T for p in people}
def get_people_interaction(self):
burndown = self.contents["Burndown"]
return (
[i.name for i in burndown.people],
self._parse_sparse_matrix(burndown.people_interaction).toarray(),
)
def get_files_coocc(self) -> Tuple[List[str], 'csr_matrix']:
node = self.contents["Couples"].file_couples
return list(node.index), self._parse_sparse_matrix(node.matrix)
def get_people_coocc(self) -> Tuple[List[str], 'csr_matrix']:
node = self.contents["Couples"].people_couples
return list(node.index), self._parse_sparse_matrix(node.matrix)
def get_shotness_coocc(self):
shotness = self.get_shotness()
index = ["%s:%s" % (i.file, i.name) for i in shotness]
indptr = numpy.zeros(len(shotness) + 1, dtype=numpy.int32)
indices = []
data = []
for i, record in enumerate(shotness):
pairs = list(record.counters.items())
pairs.sort()
indptr[i + 1] = indptr[i] + len(pairs)
for k, v in pairs:
indices.append(k)
data.append(v)
indices = numpy.array(indices, dtype=numpy.int32)
data = numpy.array(data, dtype=numpy.int32)
from scipy.sparse import csr_matrix
return index, csr_matrix((data, indices, indptr), shape=(len(shotness),) * 2)
def get_shotness(self):
records = self.contents["Shotness"].records
if len(records) == 0:
raise KeyError
return records
def get_sentiment(self):
byday = self.contents["Sentiment"].SentimentByDay
if len(byday) == 0:
raise KeyError
return byday
def get_devs(self) -> Tuple[List[str], Dict[int, Dict[int, DevDay]]]:
people = list(self.contents["Devs"].dev_index)
days = {
d: {
dev: DevDay(
stats.commits,
stats.stats.added,
stats.stats.removed,
stats.stats.changed,
{
k: [v.added, v.removed, v.changed]
for k, v in stats.languages.items()
},
)
for dev, stats in day.devs.items()
}
for d, day in self.contents["Devs"].ticks.items()
}
return people, days
def _parse_burndown_matrix(self, matrix):
dense = numpy.zeros(
(matrix.number_of_rows, matrix.number_of_columns), dtype=int
)
for y, row in enumerate(matrix.rows):
for x, col in enumerate(row.columns):
dense[y, x] = col
return matrix.name, dense.T
def _parse_sparse_matrix(self, matrix):
from scipy.sparse import csr_matrix
return csr_matrix(
(list(matrix.data), list(matrix.indices), list(matrix.indptr)),
shape=(matrix.number_of_rows, matrix.number_of_columns),
)
READERS = {"yaml": YamlReader, "yml": YamlReader, "pb": ProtobufReader}
PB_MESSAGES = {
"Burndown": "labours.pb_pb2.BurndownAnalysisResults",
"Couples": "labours.pb_pb2.CouplesAnalysisResults",
"Shotness": "labours.pb_pb2.ShotnessAnalysisResults",
"Devs": "labours.pb_pb2.DevsAnalysisResults",
}
def read_input(args: Namespace) -> ProtobufReader:
sys.stdout.write("Reading the input... ")
sys.stdout.flush()
if args.input != "-":
if args.input_format == "auto":
try:
args.input_format = args.input.rsplit(".", 1)[1]
except IndexError:
try:
with open(args.input) as f:
f.read(1 << 16)
args.input_format = "yaml"
except UnicodeDecodeError:
args.input_format = "pb"
elif args.input_format == "auto":
args.input_format = "yaml"
reader = READERS[args.input_format]()
reader.read(args.input)
print("done")
return reader
| 32.412214 | 93 | 0.570105 |
ace1db2b8a4a12aed6f386b8476eccff64b247e8 | 5,314 | py | Python | src/m9_using_objects.py | wenningr/03-AccumulatorsAndFunctionsWithParameters | f55307822d4ba9b3f87e1b2a32e512955e38d1d0 | [
"MIT"
] | null | null | null | src/m9_using_objects.py | wenningr/03-AccumulatorsAndFunctionsWithParameters | f55307822d4ba9b3f87e1b2a32e512955e38d1d0 | [
"MIT"
] | null | null | null | src/m9_using_objects.py | wenningr/03-AccumulatorsAndFunctionsWithParameters | f55307822d4ba9b3f87e1b2a32e512955e38d1d0 | [
"MIT"
] | null | null | null | """
This module lets you practice ** using objects **, including:
-- CONSTRUCTING objects,
-- applying METHODS to them, and
-- accessing their DATA via INSTANCE VARIABLES
Authors: David Mutchler, Vibha Alangar, Matt Boutell, Dave Fisher, Mark Hays,
Aaron Wilkin, their colleagues, and Greg Wenning.
""" # DONE: 1. PUT YOUR NAME IN THE ABOVE LINE.
import rosegraphics as rg
def main():
two_circles()
circle_and_rectangle()
lines()
""" Calls the other functions to demonstrate and/or test them. """
# Test your functions by putting calls to them here:
def two_circles():
"""
-- Constructs an rg.RoseWindow.
-- Constructs and draws two rg.Circle objects on the window
such that:
-- They fit in the window and are easily visible.
-- They have different radii.
-- One is filled with some color and one is not filled.
-- Waits for the user to press the mouse, then closes the window.
"""
# -------------------------------------------------------------------------
# DONE: 2. Implement this function, per its green doc-string above.
# -- ANY two rg.Circle objects that meet the criteria are fine.
# -- File COLORS.pdf lists all legal color-names.
# Put a statement in main to test this function
# (by calling this function).
# -------------------------------------------------------------------------
window = rg.RoseWindow(400, 400)
circle = rg.Circle(rg.Point(150,150), 10)
circle.outline_color = 'green'
circle.attach_to(window)
circle = rg.Circle(rg.Point(250,250), 20)
circle.fill_color = 'green'
circle.attach_to(window)
window.render()
window.close_on_mouse_click()
def circle_and_rectangle():
"""
-- Constructs an rg.RoseWindow.
-- Constructs and draws a rg.Circle and rg.Rectangle
on the window such that:
-- They fit in the window and are easily visible.
-- The rg.Circle is filled with 'blue'
-- Prints (on the console, on SEPARATE lines) the following data
associated with your rg.Circle:
-- Its outline thickness.
-- Its fill color.
-- Its center.
-- Its center's x coordinate.
-- Its center's y coordinate.
-- Prints (on the console, on SEPARATE lines) the same data
but for your rg.Rectangle.
-- Waits for the user to press the mouse, then closes the window.
Here is an example of the output on the console,
for one particular circle and rectangle:
1
blue
Point(180.0, 115.0)
180
115
1
None
Point(75.0, 150.0)
75.0
150.0
"""
# -------------------------------------------------------------------------
# DONE: 3. Implement this function, per its green doc-string above.
# -- ANY objects that meet the criteria are fine.
# Put a statement in main to test this function
# (by calling this function).
#
# IMPORTANT: Use the DOT TRICK to guess the names of the relevant
# instance variables for outline thickness, etc.
# -------------------------------------------------------------------------
window = rg.RoseWindow(400, 400)
circle = rg.Circle(rg.Point(150, 150), 10)
circle.fill_color = 'blue'
circle.attach_to(window)
window.render()
print(1)
print('blue')
print('Point(150,150)')
print(150)
print(150)
rectangle = rg.Rectangle(rg.Point(250,250),rg.Point(200,200))
rectangle.fill_color = 'blue'
rectangle.attach_to(window)
window.render()
print(1)
print('blue')
print('Point(225,225)')
print(225)
print(225)
window.close_on_mouse_click()
def lines():
"""
-- Constructs a rg.RoseWindow.
-- Constructs and draws on the window two rg.Lines such that:
-- They both fit in the window and are easily visible.
-- One rg.Line has the default thickness.
-- The other rg.Line is thicker (i.e., has a bigger width).
-- Uses a rg.Line method to get the midpoint (center) of the
thicker rg.Line.
-- Then prints (on the console, on SEPARATE lines):
-- the midpoint itself
-- the x-coordinate of the midpoint
-- the y-coordinate of the midpoint
Here is an example of the output on the console, if the two
endpoints of the thicker line are at (100, 100) and (121, 200):
Point(110.5, 150.0)
110.5
150.0
-- Waits for the user to press the mouse, then closes the window.
"""
# DONE: 4. Implement and test this function.
window = rg.RoseWindow(400, 400)
line = rg.Line(rg.Point(150,150),rg.Point(100,100))
line.color = 'blue'
line.attach_to(window)
line = rg.Line(rg.Point(250,250),rg.Point(200,200))
line.thickness = 8
line.color = 'blue'
line.attach_to(window)
window.render()
print('Point(225,225')
print(225)
print(225)
window.close_on_mouse_click()
# -----------------------------------------------------------------------------
# Calls main to start the ball rolling.
# -----------------------------------------------------------------------------
main()
| 34.960526 | 79 | 0.566052 |
ace1dc01d5bd2fbb7c4c401122df665c9ad8628d | 1,914 | py | Python | tests/fpu/test_raw_fpu_floor_nan_single.py | capt-hb/cheritest | d3b3637a81a0005ee7272eca0f33a9f9911fdb32 | [
"Apache-2.0"
] | null | null | null | tests/fpu/test_raw_fpu_floor_nan_single.py | capt-hb/cheritest | d3b3637a81a0005ee7272eca0f33a9f9911fdb32 | [
"Apache-2.0"
] | 2 | 2020-06-02T13:44:55.000Z | 2020-06-02T14:06:29.000Z | tests/fpu/test_raw_fpu_floor_nan_single.py | capt-hb/cheritest | d3b3637a81a0005ee7272eca0f33a9f9911fdb32 | [
"Apache-2.0"
] | null | null | null | #-
# Copyright (c) 2015 Michael Roe
# All rights reserved.
#
# This software was developed by the University of Cambridge Computer
# Laboratory as part of the Rigorous Engineering of Mainstream Systems (REMS)
# project, funded by EPSRC grant EP/K008528/1.
#
# @BERI_LICENSE_HEADER_START@
#
# Licensed to BERI Open Systems C.I.C. (BERI) under one or more contributor
# license agreements. See the NOTICE file distributed with this work for
# additional information regarding copyright ownership. BERI licenses this
# file to you under the BERI Hardware-Software License, Version 1.0 (the
# "License"); you may not use this file except in compliance with the
# License. You may obtain a copy of the License at:
#
# http://www.beri-open-systems.org/legal/license-1-0.txt
#
# Unless required by applicable law or agreed to in writing, Work distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
#
# @BERI_LICENSE_HEADER_END@
#
from beritest_tools import BaseBERITestCase
from beritest_tools import attr
class test_raw_fpu_floor_nan_single(BaseBERITestCase):
@attr('float_round_maxint')
def test_raw_fpu_floor_nan_single_1(self):
'''Test FLOOR.W.S of QNan'''
self.assertRegisterEqual(self.MIPS.a0, 0x7fffffff, "FLOOR.W.S of QNaN did not return MAXINT")
@attr('float_round_maxint')
def test_raw_fpu_floor_nan_single_2(self):
'''Test FLOOR.W.S of +Inf'''
self.assertRegisterEqual(self.MIPS.a1, 0x7fffffff, "FLOOR.W.S of +Infinity did not return MAXINT")
@attr('float_round_maxint')
def test_raw_fpu_floor_nan_single_3(self):
'''Test FLOOR.W.S of 2^32'''
self.assertRegisterEqual(self.MIPS.a2, 0x7fffffff, "FLOOR.W.S of 2^32 did not return MAXINT")
| 39.875 | 106 | 0.748694 |
ace1dcf2d23cec3b2ebf6c8a8cbb8860504078b4 | 23,656 | py | Python | zipline/data/bundles/core.py | lookis/zipline-trader | 97bfe2f5b052e2bdc9daf08f12413066daa71c12 | [
"Apache-2.0"
] | null | null | null | zipline/data/bundles/core.py | lookis/zipline-trader | 97bfe2f5b052e2bdc9daf08f12413066daa71c12 | [
"Apache-2.0"
] | null | null | null | zipline/data/bundles/core.py | lookis/zipline-trader | 97bfe2f5b052e2bdc9daf08f12413066daa71c12 | [
"Apache-2.0"
] | null | null | null | from collections import namedtuple
import errno
import os
import shutil
import warnings
import click
from logbook import Logger
import pandas as pd
from trading_calendars import get_calendar
from toolz import curry, complement, take
from ..adjustments import SQLiteAdjustmentReader, SQLiteAdjustmentWriter
from ..bcolz_daily_bars import BcolzDailyBarReader, BcolzDailyBarWriter
from ..minute_bars import (
BcolzMinuteBarReader,
BcolzMinuteBarWriter,
)
from ..psql_daily_bars import PSQLDailyBarReader, PSQLDailyBarWriter
from zipline.assets import (
AssetDBWriter,
AssetFinder,
ASSET_DB_VERSION,
)
from zipline.assets.asset_db_migrations import downgrade
from zipline.utils.cache import (
dataframe_cache,
working_dir,
working_file,
)
from zipline.utils.compat import ExitStack, mappingproxy
from zipline.utils.input_validation import ensure_timestamp, optionally
import zipline.utils.paths as pth
from zipline.utils.preprocess import preprocess
from sqlalchemy.exc import InvalidRequestError
log = Logger(__name__)
def asset_db_path(bundle_name, timestr, environ=None, db_version=None):
return pth.data_path(
asset_db_relative(bundle_name, timestr, db_version),
environ=environ,
)
def minute_equity_path(bundle_name, timestr, environ=None):
return pth.data_path(
minute_equity_relative(bundle_name, timestr),
environ=environ,
)
def daily_equity_path(bundle_name, timestr, environ=None):
return pth.data_path(
daily_equity_relative(bundle_name, timestr),
environ=environ,
)
def adjustment_db_path(bundle_name, timestr, environ=None):
return pth.data_path(
adjustment_db_relative(bundle_name, timestr),
environ=environ,
)
def cache_path(bundle_name, environ=None):
return pth.data_path(
cache_relative(bundle_name),
environ=environ,
)
def adjustment_db_relative(bundle_name, timestr):
return bundle_name, timestr, 'adjustments.sqlite'
def cache_relative(bundle_name):
return bundle_name, '.cache'
def daily_equity_relative(bundle_name, timestr):
return bundle_name, timestr, 'daily_equities.bcolz'
def minute_equity_relative(bundle_name, timestr):
return bundle_name, timestr, 'minute_equities.bcolz'
def asset_db_relative(bundle_name, timestr, db_version=None):
db_version = ASSET_DB_VERSION if db_version is None else db_version
return bundle_name, timestr, 'assets-%d.sqlite' % db_version
def external_db_path(bundle_name, environ):
import zipline.config.data_backend
path = None
if zipline.config.data_backend.db_backend_configured():
if zipline.config.data_backend.db_backend_configured() == 'postgres':
db = zipline.config.data_backend.PostgresDB()
host = db.host
port = db.port
user = db.user
password = db.password
user_pwd_str = f'{user}:{password}@' if user != '' else ''
host_port_str = f'{host}:{port}' if port != '' else f'{host}'
# we assume bundle-name as database-name
path = f'postgresql://{user_pwd_str}{host_port_str}/{bundle_name}'
else:
backend = environ['ZIPLINE_DATA_BACKEND']
raise Exception(f'Backend {backend} currently not supported')
return path
def to_bundle_ingest_dirname(ts):
"""Convert a pandas Timestamp into the name of the directory for the
ingestion.
Parameters
----------
ts : pandas.Timestamp
The time of the ingestions
Returns
-------
name : str
The name of the directory for this ingestion.
"""
return ts.isoformat().replace(':', ';')
def from_bundle_ingest_dirname(cs):
"""Read a bundle ingestion directory name into a pandas Timestamp.
Parameters
----------
cs : str
The name of the directory.
Returns
-------
ts : pandas.Timestamp
The time when this ingestion happened.
"""
return pd.Timestamp(cs.replace(';', ':'))
def ingestions_for_bundle(bundle, environ=None):
print(os.listdir(pth.data_path([bundle], environ)))
return sorted(
(from_bundle_ingest_dirname(ing)
for ing in os.listdir(pth.data_path([bundle], environ))
if not pth.hidden(ing)),
reverse=True,
)
RegisteredBundle = namedtuple(
'RegisteredBundle',
['calendar_name',
'start_session',
'end_session',
'minutes_per_day',
'ingest',
'create_writers']
)
BundleData = namedtuple(
'BundleData',
'asset_finder equity_minute_bar_reader equity_daily_bar_reader '
'adjustment_reader',
)
BundleCore = namedtuple(
'BundleCore',
'bundles register unregister ingest load clean',
)
class UnknownBundle(click.ClickException, LookupError):
"""Raised if no bundle with the given name was registered.
"""
exit_code = 1
def __init__(self, name):
super(UnknownBundle, self).__init__(
'No bundle registered with the name %r' % name,
)
self.name = name
def __str__(self):
return self.message
class BadClean(click.ClickException, ValueError):
"""Exception indicating that an invalid argument set was passed to
``clean``.
Parameters
----------
before, after, keep_last : any
The bad arguments to ``clean``.
See Also
--------
clean
"""
def __init__(self, before, after, keep_last):
super(BadClean, self).__init__(
'Cannot pass a combination of `before` and `after` with '
'`keep_last`. Must pass one. '
'Got: before=%r, after=%r, keep_last=%r\n' % (
before,
after,
keep_last,
),
)
def __str__(self):
return self.message
def _make_bundle_core():
"""Create a family of data bundle functions that read from the same
bundle mapping.
Returns
-------
bundles : mappingproxy
The mapping of bundles to bundle payloads.
register : callable
The function which registers new bundles in the ``bundles`` mapping.
unregister : callable
The function which deregisters bundles from the ``bundles`` mapping.
ingest : callable
The function which downloads and write data for a given data bundle.
load : callable
The function which loads the ingested bundles back into memory.
clean : callable
The function which cleans up data written with ``ingest``.
"""
_bundles = {} # the registered bundles
# Expose _bundles through a proxy so that users cannot mutate this
# accidentally. Users may go through `register` to update this which will
# warn when trampling another bundle.
bundles = mappingproxy(_bundles)
@curry
def register(name,
f,
calendar_name='NYSE',
start_session=None,
end_session=None,
minutes_per_day=390,
create_writers=True):
"""Register a data bundle ingest function.
Parameters
----------
name : str
The name of the bundle.
f : callable
The ingest function. This function will be passed:
environ : mapping
The environment this is being run with.
asset_db_writer : AssetDBWriter
The asset db writer to write into.
minute_bar_writer : BcolzMinuteBarWriter
The minute bar writer to write into.
daily_bar_writer : BcolzDailyBarWriter
The daily bar writer to write into.
adjustment_writer : SQLiteAdjustmentWriter
The adjustment db writer to write into.
calendar : trading_calendars.TradingCalendar
The trading calendar to ingest for.
start_session : pd.Timestamp
The first session of data to ingest.
end_session : pd.Timestamp
The last session of data to ingest.
cache : DataFrameCache
A mapping object to temporarily store dataframes.
This should be used to cache intermediates in case the load
fails. This will be automatically cleaned up after a
successful load.
show_progress : bool
Show the progress for the current load where possible.
calendar_name : str, optional
The name of a calendar used to align bundle data.
Default is 'NYSE'.
start_session : pd.Timestamp, optional
The first session for which we want data. If not provided,
or if the date lies outside the range supported by the
calendar, the first_session of the calendar is used.
end_session : pd.Timestamp, optional
The last session for which we want data. If not provided,
or if the date lies outside the range supported by the
calendar, the last_session of the calendar is used.
minutes_per_day : int, optional
The number of minutes in each normal trading day.
create_writers : bool, optional
Should the ingest machinery create the writers for the ingest
function. This can be disabled as an optimization for cases where
they are not needed, like the ``quantopian-quandl`` bundle.
Notes
-----
This function my be used as a decorator, for example:
.. code-block:: python
@register('quandl')
def quandl_ingest_function(...):
...
See Also
--------
zipline.data.bundles.bundles
"""
if name in bundles:
warnings.warn(
'Overwriting bundle with name %r' % name,
stacklevel=3,
)
# NOTE: We don't eagerly compute calendar values here because
# `register` is called at module scope in zipline, and creating a
# calendar currently takes between 0.5 and 1 seconds, which causes a
# noticeable delay on the zipline CLI.
_bundles[name] = RegisteredBundle(
calendar_name=calendar_name,
start_session=start_session,
end_session=end_session,
minutes_per_day=minutes_per_day,
ingest=f,
create_writers=create_writers,
)
return f
def unregister(name):
"""Unregister a bundle.
Parameters
----------
name : str
The name of the bundle to unregister.
Raises
------
UnknownBundle
Raised when no bundle has been registered with the given name.
See Also
--------
zipline.data.bundles.bundles
"""
try:
del _bundles[name]
except KeyError:
raise UnknownBundle(name)
def ingest(name,
environ=os.environ,
timestamp=None,
assets_versions=(),
show_progress=False):
"""Ingest data for a given bundle.
Parameters
----------
name : str
The name of the bundle.
environ : mapping, optional
The environment variables. By default this is os.environ.
timestamp : datetime, optional
The timestamp to use for the load.
By default this is the current time.
assets_versions : Iterable[int], optional
Versions of the assets db to which to downgrade.
show_progress : bool, optional
Tell the ingest function to display the progress where possible.
"""
try:
bundle = bundles[name]
except KeyError:
raise UnknownBundle(name)
calendar = get_calendar(bundle.calendar_name)
start_session = bundle.start_session
end_session = bundle.end_session
if start_session is None or start_session < calendar.first_session:
start_session = calendar.first_session
if end_session is None or end_session > calendar.last_session:
end_session = calendar.last_session
if timestamp is None:
timestamp = pd.Timestamp.utcnow()
timestamp = timestamp.tz_convert('utc').tz_localize(None)
timestr = to_bundle_ingest_dirname(timestamp)
cachepath = cache_path(name, environ=environ)
pth.ensure_directory(cachepath)
# depending on the environment we might want to get a path to
# an external postgres-db instead of one to a local sqlite-db
# also, we need an asset-finder in case we have an external db
# to make it possible to get ids for asset-symbols
db_path_external = external_db_path(name, environ)
# needs to be checkout outside of 'with' in case create_writers is false
# only 'sqlite-bcolz'-backend needs to ensure local folders
if not db_path_external:
pth.ensure_directory(pth.data_path([name, timestr], environ=environ))
with dataframe_cache(cachepath, clean_on_failure=False) as cache, \
ExitStack() as stack:
# we use `cleanup_on_failure=False` so that we don't purge the
# cache directory if the load fails in the middle
if bundle.create_writers:
wd = stack.enter_context(working_dir(
pth.data_path([], environ=environ))
)
asset_finder = None
if db_path_external:
assets_db_path = adjustments_db_path = daily_bar_writer = db_path_external
daily_bar_writer = PSQLDailyBarWriter(
db_path_external,
calendar,
start_session,
end_session,
)
daily_bar_reader = PSQLDailyBarReader(name, db_path_external)
minute_bar_writer = None
try:
asset_finder = AssetFinder(db_path_external)
except InvalidRequestError:
asset_finder = None
else:
pth.ensure_directory(pth.data_path([name, timestr], environ=environ))
assets_db_path = wd.getpath(*asset_db_relative(name, timestr))
adjustments_db_path = adjustment_db_path(name, timestr)
adjustments_db_path = wd.getpath(*adjustment_db_relative(name, timestr))
daily_bars_path = wd.ensure_dir(
*daily_equity_relative(name, timestr)
)
daily_bar_writer = BcolzDailyBarWriter(
daily_bars_path,
calendar,
start_session,
end_session,
)
daily_bar_reader = BcolzDailyBarReader(daily_bars_path)
minute_bar_writer = BcolzMinuteBarWriter(
wd.ensure_dir(*minute_equity_relative(name, timestr)),
calendar,
start_session,
end_session,
minutes_per_day=bundle.minutes_per_day,
)
# Do an empty write to ensure that the daily ctables exist
# when we create the SQLiteAdjustmentWriter below. The
# SQLiteAdjustmentWriter needs to open the daily ctables so
# that it can compute the adjustment ratios for the dividends.
daily_bar_writer.write(())
asset_db_writer = AssetDBWriter(assets_db_path, asset_finder)
adjustment_db_writer = stack.enter_context(
SQLiteAdjustmentWriter(
adjustments_db_path,
daily_bar_reader,
overwrite=True,
)
)
else:
daily_bar_writer = None
minute_bar_writer = None
asset_db_writer = None
adjustment_db_writer = None
if assets_versions:
raise ValueError('Need to ingest a bundle that creates '
'writers in order to downgrade the assets'
' db.')
log.info("Ingesting {}.", name)
bundle.ingest(
environ,
asset_db_writer,
minute_bar_writer,
daily_bar_writer,
adjustment_db_writer,
calendar,
start_session,
end_session,
cache,
show_progress,
pth.data_path([name, timestr], environ=environ),
)
for version in sorted(set(assets_versions), reverse=True):
version_path = wd.getpath(*asset_db_relative(
name, timestr, db_version=version,
))
with working_file(version_path) as wf:
shutil.copy2(assets_db_path, wf.path)
downgrade(wf.path, version)
def most_recent_data(bundle_name, timestamp, environ=None):
"""Get the path to the most recent data after ``date``for the
given bundle.
Parameters
----------
bundle_name : str
The name of the bundle to lookup.
timestamp : datetime
The timestamp to begin searching on or before.
environ : dict, optional
An environment dict to forward to zipline_root.
"""
if bundle_name not in bundles:
raise UnknownBundle(bundle_name)
try:
candidates = os.listdir(
pth.data_path([bundle_name], environ=environ),
)
return pth.data_path(
[bundle_name,
max(
filter(complement(pth.hidden), candidates),
key=from_bundle_ingest_dirname,
)],
environ=environ,
)
except (ValueError, OSError) as e:
if getattr(e, 'errno', errno.ENOENT) != errno.ENOENT:
raise
raise ValueError(
'no data for bundle {bundle!r} on or before {timestamp}\n'
'maybe you need to run: $ zipline ingest -b {bundle}'.format(
bundle=bundle_name,
timestamp=timestamp,
),
)
def load(name, environ=os.environ, timestamp=None):
"""Loads a previously ingested bundle.
Parameters
----------
name : str
The name of the bundle.
environ : mapping, optional
The environment variables. Defaults of os.environ.
timestamp : datetime, optional
The timestamp of the data to lookup.
Defaults to the current time.
Returns
-------
bundle_data : BundleData
The raw data readers for this bundle.
"""
if timestamp is None:
timestamp = pd.Timestamp.utcnow()
db_path_external = external_db_path(name, environ)
if db_path_external:
assets_db_path = db_path_external
adjustments_db_path = db_path_external
# assets_db_path = asset_db_path(name, timestr, environ=environ)
# adjustments_db_path = adjustment_db_path(name, timestr, environ=environ)
daily_bar_reader = PSQLDailyBarReader(name, db_path_external)
minute_bar_reader = None
else:
timestr = most_recent_data(name, timestamp, environ=environ)
assets_db_path = asset_db_path(name, timestr, environ=environ)
adjustments_db_path = adjustment_db_path(name, timestr, environ=environ)
daily_bar_reader = BcolzDailyBarReader(daily_equity_path(name, timestr, environ=environ))
minute_bar_reader = BcolzMinuteBarReader(minute_equity_path(name, timestr, environ=environ))
return BundleData(
asset_finder=AssetFinder(
assets_db_path
),
equity_minute_bar_reader=minute_bar_reader,
equity_daily_bar_reader=daily_bar_reader,
adjustment_reader=SQLiteAdjustmentReader(
adjustments_db_path
),
)
@preprocess(
before=optionally(ensure_timestamp),
after=optionally(ensure_timestamp),
)
def clean(name,
before=None,
after=None,
keep_last=None,
environ=os.environ):
"""Clean up data that was created with ``ingest`` or
``$ python -m zipline ingest``
Parameters
----------
name : str
The name of the bundle to remove data for.
before : datetime, optional
Remove data ingested before this date.
This argument is mutually exclusive with: keep_last
after : datetime, optional
Remove data ingested after this date.
This argument is mutually exclusive with: keep_last
keep_last : int, optional
Remove all but the last ``keep_last`` ingestions.
This argument is mutually exclusive with:
before
after
environ : mapping, optional
The environment variables. Defaults of os.environ.
Returns
-------
cleaned : set[str]
The names of the runs that were removed.
Raises
------
BadClean
Raised when ``before`` and or ``after`` are passed with
``keep_last``. This is a subclass of ``ValueError``.
"""
try:
all_runs = sorted(
filter(
complement(pth.hidden),
os.listdir(pth.data_path([name], environ=environ)),
),
key=from_bundle_ingest_dirname,
)
except OSError as e:
if e.errno != errno.ENOENT:
raise
raise UnknownBundle(name)
if before is after is keep_last is None:
raise BadClean(before, after, keep_last)
if ((before is not None or after is not None) and
keep_last is not None):
raise BadClean(before, after, keep_last)
if keep_last is None:
def should_clean(name):
dt = from_bundle_ingest_dirname(name)
return (
(before is not None and dt < before) or
(after is not None and dt > after)
)
elif keep_last >= 0:
last_n_dts = set(take(keep_last, reversed(all_runs)))
def should_clean(name):
return name not in last_n_dts
else:
raise BadClean(before, after, keep_last)
cleaned = set()
for run in all_runs:
if should_clean(run):
log.info("Cleaning {}.", run)
path = pth.data_path([name, run], environ=environ)
shutil.rmtree(path)
cleaned.add(path)
return cleaned
return BundleCore(bundles, register, unregister, ingest, load, clean)
bundles, register, unregister, ingest, load, clean = _make_bundle_core()
| 33.794286 | 104 | 0.587589 |
ace1dd724e046cfbc9d6462120101568a07ff8cd | 18 | py | Python | datumaro/version.py | jenhaoyang/datumaro | add81ddb59502362fa65fa07e5bc4d8c9f61afde | [
"MIT"
] | 1 | 2015-03-08T23:21:58.000Z | 2015-03-08T23:21:58.000Z | web_interface/suit/__init__.py | luxnovalabs/enjigo_door | fd29fd9bd28c1d7915e4f9ad5e29635f5cbc4601 | [
"Unlicense"
] | 7 | 2017-01-10T15:16:05.000Z | 2019-05-30T22:40:16.000Z | web_interface/suit/__init__.py | luxnovalabs/enjigo_door | fd29fd9bd28c1d7915e4f9ad5e29635f5cbc4601 | [
"Unlicense"
] | 1 | 2017-04-19T18:40:56.000Z | 2017-04-19T18:40:56.000Z | VERSION = '0.2.2'
| 9 | 17 | 0.555556 |
ace1dd8143dc5edf1e70334926c5e51e0bf48891 | 13,221 | py | Python | Scripts/FNN/train_test_model.py | mike72353/FragFeatureNet | ef61ae52e3d6dcc6d2d56df2a6bd5fe1a298c930 | [
"BSD-3-Clause"
] | 1 | 2021-10-13T11:49:37.000Z | 2021-10-13T11:49:37.000Z | Scripts/FNN/train_test_model.py | mike72353/FragFeatureNet | ef61ae52e3d6dcc6d2d56df2a6bd5fe1a298c930 | [
"BSD-3-Clause"
] | null | null | null | Scripts/FNN/train_test_model.py | mike72353/FragFeatureNet | ef61ae52e3d6dcc6d2d56df2a6bd5fe1a298c930 | [
"BSD-3-Clause"
] | 1 | 2021-09-09T04:42:20.000Z | 2021-09-09T04:42:20.000Z | # -*- coding: utf-8 -*-
from __future__ import division
"""
Trains a ResNeXt Model on FEATURE Vectors
"""
__author__ = "Michael Suarez"
__email__ = "masv@connect.ust.hk"
__copyright__ = "Copyright 2019, Hong Kong University of Science and Technology"
__license__ = "3-clause BSD"
import argparse
import os
import json
import pickle
import torch
import torch.nn.functional as F
import torchvision.transforms as transforms
from models.model011 import FeatureResNeXt
import torch.utils.data as utils
import numpy as np
import time
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Trains ResNeXt on FEATURE Vectors', formatter_class= argparse.ArgumentDefaultsHelpFormatter)
# Positional arguments
parser.add_argument('features_data_path', type=str, help='Root for Features Dict.')
parser.add_argument('data_folder', type=str, help='Root for Data.')
# Optimization options
parser.add_argument('--epochs', '-e', type=int, default=20, help='Number of epochs to train.')
parser.add_argument('--batch_size', '-b', type=int, default=256, help='Batch size.')
parser.add_argument('--learning_rate', '-lr', type=float, default=0.01, help='The Learning Rate.') #def 0.1
parser.add_argument('--momentum', '-m', type=float, default=0.3, help='Momentum.')
parser.add_argument('--decay', '-d', type=float, default=0.0005, help='Weight decay (L2 penalty).')
parser.add_argument('--schedule', type=int, nargs='+', default=[10, 15], help='Decrease learning rate at these epochs.')
parser.add_argument('--gamma', type=float, default=0.1, help='LR is multiplied by gamma on schedule.')
# Checkpoints
parser.add_argument('--save', '-s', type=str, default='./outputmodels', help='Folder to save checkpoints.')
# Architecture
parser.add_argument('--depth', type=int, default=65, help='Model depth - Multiple of 3*no_stages (5, 10)')
parser.add_argument('--cardinality', type=int, default=8, help='Model cardinality (group) in the DxD convolutionary layer.')
parser.add_argument('--base_width', type=int, default=8, help='Number of channels in each group. Output of the first convolution layer. Modify stages in model.py')
parser.add_argument('--widen_factor', type=int, default=4, help='Widen factor between every block')
# Acceleration
parser.add_argument('--ngpu', type=int, default=1, help='0 = CPU.')
parser.add_argument('--prefetch', type=int, default=8, help='Pre-fetching threads.')
# i/o
parser.add_argument('--name', type=str, default='model', help='Name your model')
parser.add_argument('--checkpoint', type=bool, default=False, help='Load from checkpoint?')
args = parser.parse_args()
# Init logger
if not os.path.isdir(args.save):
os.makedirs(args.save)
log = open(os.path.join(args.save, args.name + '_log.txt'), 'w')
state = {k: v for k, v in args._get_kwargs()}
log.write(json.dumps(state) + '\n')
# Calculate number of epochs wrt batch size
args.epochs = args.epochs * args.batch_size // args.batch_size
args.schedule = [x * args.batch_size // args.batch_size for x in args.schedule]
#initialise Dataset
# Binary molecules
test_b = pickle.load(open("%s/test_b_Vec.mtr" %args.data_folder, "rb"))
train_b = pickle.load(open("%s/train_b_Vec.mtr" %args.data_folder, "rb"))
test_nb = pickle.load(open("%s/test_nb_Vec.mtr" %args.data_folder, "rb"))
train_nb = pickle.load(open("%s/train_nb_Vec.mtr" %args.data_folder, "rb"))
# Indices of training and testing FF
test_FF = pickle.load(open("%s/test_FF.mtr" %args.data_folder, "rb"))
train_FF = pickle.load(open("%s/train_FF.mtr" %args.data_folder, "rb"))
# Fragfeaturevectors
Features_all = pickle.load(open(args.features_data_path, "rb"))
# multiplicate Featurevectors
train_FFAll = Features_all[train_FF]
test_FFAll = Features_all[test_FF]
print("loaded data")
#reduce size of dataset for initial testing
# cutTrain = 2**17
# np.random.seed(0)
# selTrain = np.random.choice(train_FF.shape[0], cutTrain, replace=False)
# cutTest = 2**13
# np.random.seed(0)
# selTest = np.random.choice(test_FF.shape[0], cutTest, replace=False)
# train_FFAll = train_FFAll[selTrain]
# test_FFAll = test_FFAll[selTest]
# train_b = train_b[selTrain]
# train_nb = train_nb[selTrain]
# test_b = test_b[selTest]
# test_nb = test_nb[selTest]
#validation split 1%
np.random.seed(0)
ss = np.random.choice(range(train_FFAll.shape[0]), int(0.01*train_FFAll.shape[0]), replace=False)
val_FFAll = train_FFAll[ss]
val_b = train_b[ss]
val_nb = train_nb[ss]
train_FFAll = np.delete(train_FFAll, ss, 0)
train_b = np.delete(train_b, ss, 0)
train_nb = np.delete(train_nb, ss, 0)
#normalise Featurevectors
start = time.time()
mean = [train_FFAll[:,i].mean() for i in range(480)]
std = [train_FFAll[:,i].std() for i in range(480)]
for i in range (480):
if std[i] != 0:
train_FFAll[:,i] = (train_FFAll[:,i] - mean[i])/std[i]
test_FFAll[:,i] = (test_FFAll[:,i] - mean[i])/std[i]
val_FFAll[:,i] = (val_FFAll[:,i] - mean[i])/std[i]
else:
train_FFAll[:,i] = train_FFAll[:,i]
test_FFAll[:,i] = test_FFAll[:,i]
val_FFAll[:,i] = val_FFAll[:,i]
end = time.time()
print("FF normalising time: ", end-start)
train_FFAll = np.resize(train_FFAll, (train_FFAll.shape[0], 1, 6, 80))
test_FFAll = np.resize(test_FFAll, (test_FFAll.shape[0], 1, 6, 80))
val_FFAll = np.resize(val_FFAll, (val_FFAll.shape[0], 1, 6, 80))
#construct binary class system
train_y_values1 = np.full((train_FFAll.shape[0],1),1)
train_y_values0 = np.full((train_FFAll.shape[0],1),0)
test_y_values1 = np.full((test_FFAll.shape[0],1),1)
test_y_values0 = np.full((test_FFAll.shape[0],1),0)
val_y_values1 = np.full((val_FFAll.shape[0],1),1)
val_y_values0 = np.full((val_FFAll.shape[0],1),0)
#convert numpy arrays into tensors
start1 = time.time()
train_tensor_x = torch.from_numpy(train_FFAll)
train_tensor_x2_b = torch.stack([torch.Tensor(i) for i in train_b])
train_tensor_y_b = torch.from_numpy(train_y_values1)
train_tensor_x2_nb = torch.stack([torch.Tensor(i) for i in train_nb])
train_tensor_y_nb = torch.from_numpy(train_y_values0)
end1 = time.time()
print("train data arrays2tensor: ", end1-start1)
start2 = time.time()
test_tensor_x = torch.from_numpy(test_FFAll)
test_tensor_x2_b = torch.stack([torch.Tensor(i) for i in test_b])
test_tensor_y_b = torch.from_numpy(test_y_values1)
test_tensor_x2_nb = torch.stack([torch.Tensor(i) for i in test_nb])
test_tensor_y_nb = torch.from_numpy(test_y_values0)
end2 = time.time()
print("test data arrays2tensor: ", end2-start2)
start3 = time.time()
val_tensor_x = torch.from_numpy(val_FFAll)
val_tensor_x2_b = torch.stack([torch.Tensor(i) for i in val_b])
val_tensor_y_b = torch.from_numpy(val_y_values1)
val_tensor_x2_nb = torch.stack([torch.Tensor(i) for i in val_nb])
val_tensor_y_nb = torch.from_numpy(val_y_values0)
end3 = time.time()
print("test data arrays2tensor: ", end3-start3)
train_tensor_x = torch.cat([train_tensor_x,train_tensor_x])
train_tensor_x2 = torch.cat([train_tensor_x2_b,train_tensor_x2_nb])
train_tensor_y = torch.cat([train_tensor_y_b,train_tensor_y_nb])
test_tensor_x = torch.cat([test_tensor_x,test_tensor_x])
test_tensor_x2 = torch.cat([test_tensor_x2_b,test_tensor_x2_nb])
test_tensor_y = torch.cat([test_tensor_y_b,test_tensor_y_nb])
val_tensor_x = torch.cat([val_tensor_x,val_tensor_x])
val_tensor_x2 = torch.cat([val_tensor_x2_b,val_tensor_x2_nb])
val_tensor_y = torch.cat([val_tensor_y_b,val_tensor_y_nb])
train_data = utils.TensorDataset(train_tensor_x, train_tensor_x2, train_tensor_y)
test_data = utils.TensorDataset(test_tensor_x, test_tensor_x2, test_tensor_y)
val_data = utils.TensorDataset(val_tensor_x, val_tensor_x2, val_tensor_y)
train_loader = utils.DataLoader(train_data,batch_size=args.batch_size, shuffle=True, num_workers=args.prefetch, pin_memory=True)
test_loader = utils.DataLoader(test_data,batch_size=args.batch_size, shuffle=False, num_workers=args.prefetch, pin_memory=True)
val_loader = utils.DataLoader(val_data,batch_size=args.batch_size, shuffle=False, num_workers=args.prefetch, pin_memory=True)
# Init model, criterion, and optimizer
net = FeatureResNeXt(args.cardinality, args.depth, args.base_width, args.widen_factor)
net = net.float()
print(net)
if args.ngpu > 1:
net = torch.nn.DataParallel(net, device_ids=list(range(args.ngpu)))
if args.ngpu > 0:
net.cuda()
optimizer = torch.optim.SGD(net.parameters(), state['learning_rate'], momentum=state['momentum'], weight_decay=state['decay'], nesterov=True)
# train function (forward, backward, update)
def train():
net.train()
loss_avg = 0.0
for batch_idx, (X, X2, Y) in enumerate(train_loader):
X, X2, Y = torch.autograd.Variable(X.cuda()), torch.autograd.Variable(X2.cuda()), torch.autograd.Variable(Y.cuda())
output = net(X.float(), X2.float())
# forward
output = net(X.float(), X2.float())
# backward
optimizer.zero_grad()
#LOSS FUNCTION
loss = F.binary_cross_entropy(output, Y.float())
print('train - loss',loss)
loss.backward()
optimizer.step()
# exponential moving average
loss_avg = loss_avg * 0.2 + float(loss) * 0.8
print('train - loss avg', loss_avg)
state['train_loss'] = loss_avg
# val function (forward only)
def val():
net.eval()
loss_avg = 0.0
correct = 0
total = 0
for batch_idx, (X, X2, Y) in enumerate(val_loader):
X, X2, Y = torch.autograd.Variable(X.cuda()), torch.autograd.Variable(X2.cuda()), torch.autograd.Variable(Y.cuda())
output = net(X.float(), X2.float())
groundT = Y
predictT = torch.gt(output, 0.5)
total += groundT.size(0)
correct += (predictT.float() == groundT.float()).sum().item()
loss = F.binary_cross_entropy(output, Y.float())
print('val - loss', loss)
# test loss average
loss_avg += float(loss)
print('val_loss_avg', loss_avg/(total/args.batch_size), correct, total, 'Val. Percent Hits', correct/total*100)
state['val_loss'] = loss_avg/(total/args.batch_size)
state['val_percent_hit'] = correct/total*100
# test function (forward only)
def test():
net.eval()
loss_avg = 0.0
correct = 0
total = 0
for batch_idx, (X, X2, Y) in enumerate(test_loader):
X, X2, Y = torch.autograd.Variable(X.cuda()), torch.autograd.Variable(X2.cuda()), torch.autograd.Variable(Y.cuda())
output = net(X.float(), X2.float())
groundT = Y
predictT = torch.gt(output, 0.5)
total += groundT.size(0)
correct += (predictT.float() == groundT.float()).sum().item()
loss = F.binary_cross_entropy(output, Y.float())
print('test - loss', loss)
# test loss average
loss_avg += float(loss)
print('test_loss_avg', loss_avg/(total/args.batch_size), correct, total, 'Percent Hits', correct/total*100)
state['test_loss'] = loss_avg/(total/args.batch_size)
state['percent_hit'] = correct/total*100
# Main loop
best_accuracy = 100.0
if args.checkpoint == True:
loaded_state_dict = torch.load('Data/KIN.ALL/ModelOutput/model00_cp.pytorch')
# temp = {}
# for key, val in list(loaded_state_dict.items()):
# temp[key[7:]] = val
# loaded_state_dict = temp
net.load_state_dict(loaded_state_dict)
for epoch in range(args.epochs):
#updates learning rate
if epoch in args.schedule:
state['learning_rate'] *= args.gamma
for param_group in optimizer.param_groups:
param_group['lr'] = state['learning_rate']
state['epoch'] = epoch
train()
val()
test()
#decide whether to save the model
if state['test_loss'] < best_accuracy:
best_accuracy = state['test_loss']
torch.save(net.state_dict(), os.path.join(args.save, args.name + '.pytorch'))
if epoch%20 == 0 and epoch > 0:
torch.save(net.state_dict(), os.path.join(args.save, args.name + '_cp.pytorch'))
#write in log file
log.write('%s\n' % json.dumps(state))
log.flush()
print(state)
print("Best accuracy: %f" % best_accuracy)
torch.save(net.state_dict(), os.path.join(args.save, args.name + '_final.pytorch'))
log.close()
| 43.205882 | 167 | 0.647682 |
ace1dd851f45be076d8fd7b79f1110a75f03ccb0 | 5,298 | py | Python | dmfrbloom/bloomfilter.py | droberson/dmfrbloom | 9f431424fbbe6b3400cc6c6d9af6794f656661f9 | [
"MIT"
] | null | null | null | dmfrbloom/bloomfilter.py | droberson/dmfrbloom | 9f431424fbbe6b3400cc6c6d9af6794f656661f9 | [
"MIT"
] | 1 | 2019-12-22T00:55:57.000Z | 2019-12-22T00:55:57.000Z | dmfrbloom/bloomfilter.py | droberson/dmfrbloom | 9f431424fbbe6b3400cc6c6d9af6794f656661f9 | [
"MIT"
] | null | null | null | """ bloomfilter.py - Bloom filters using Python standard library. """
from math import ceil, log
try:
import mmh3
except ImportError:
import dmfrbloom.pymmh3 as mmh3
from .bitfield import BitField
class BloomFilter():
"""BloomFilter class - Implements bloom filters using the standard library.
Attributes:
size (int) - size of the filter in bits.
hashcount (int) - number of hashes per element.
filter - (BitField object) - bitfield containing the filter.
"""
def __init__(self, expected_items, fp_rate):
self.size = self.ideal_size(expected_items, fp_rate)
self.hashcount = self.ideal_hashcount(expected_items)
self.filter = BitField(self.size)
def add(self, element):
"""BloomFilter.add() - Add an element to the filter.
Args:
element (str) - Element to add to the filter.
Returns:
Nothing.
"""
for seed in range(self.hashcount):
result = mmh3.hash(str(element), seed) % self.size
self.filter.setbit(result)
def lookup(self, element):
"""BloomFilter.lookup() - Check if element exists in the filter.
Args:
element (str) - Element to look up.
Returns:
Nothing.
"""
for seed in range(self.hashcount):
result = mmh3.hash(str(element), seed) % self.size
if self.filter.getbit(result) is False:
return False
return True
def save(self, path):
"""BloomFilter.save() - Save the filter's current state to a file.
Args:
path (str) - Location to save the file
Returns:
Nothing.
TODO: error checking if file cant be written.
"""
with open(path, "wb") as filterfile:
filterfile.write(self.size.to_bytes(16, byteorder="little"))
filterfile.write(self.hashcount.to_bytes(16, byteorder="little"))
filterfile.write(self.filter.bitfield)
def load(self, path):
"""BloomFilter.load() - Load a saved filter.
Args:
path (str) - Location of filter to load.
TODO: error check if this exists + is readable!
"""
with open(path, "rb") as filterfile:
self.size = int.from_bytes(filterfile.read(16), byteorder="little")
self.hashcount = \
int.from_bytes(filterfile.read(16), byteorder="little")
self.filter.bitfield = filterfile.read()
@staticmethod
def accuracy(size, hashcount, elements):
"""BloomFilter.accuracy() - Calculate a filter's accuracy given
size, hash count, and expected number
of elements.
Args:
size (int) - Size of filter.
hashcount (int) - Number of hashes per element.
elements (int) - Number of expected elements.
Returns:
float containing a filter's percentage of accuracy.
"""
# fp = (1 - [1 - 1 / size] ^ hashcount * expected_items ^ hashcount
false_positive = \
(1 - (1 - 1 / size) ** (hashcount * int(elements))) \
** hashcount
print('FALSE: ', false_positive)
return round(100 - false_positive * 100, 4)
@staticmethod
def ideal_size(expected, fp_rate):
"""BloomFilter.ideal_size() - Calculate ideal filter size given an
expected number of elements and desired
rate of false positives.
Args:
expected (int) - Expected number of elements in the filter.
fp_rate (int) - Acceptable rate of false positives. Ex: 0.01 will
tolerate 0.01% chance of false positives.
Returns:
Ideal size.
"""
return int(-(expected * log(fp_rate)) / (log(2) ** 2))
def ideal_hashcount(self, expected):
"""BloomFilter.ideal_hashcount() - Calculate ideal number of hashes to
perform given the expected number of
elements to be stored in the filter.
Args:
expected (int) - Expected number of elements.
Returns:
Ideal number of hashes (int).
"""
# ideal = (size / expected items) * log(2)
return int((self.size / int(expected)) * log(2))
@property
def bytesize(self):
"""BloomFilter.bytesize() - Get size of filter.
Args:
None
Returns:
Size of filter (int)
"""
return ceil(self.size / 8)
@property
def bytesize_human(self):
"""BloomFilter.bytesize_human() - Get human-readable size of filter.
Stole this from someone on StackOverflow but don't remember where.
Args:
None.
Returns:
Filter size (str).
"""
suffix = ['bytes', 'Kb', 'Mb', 'Gb', 'Tb', 'Pb', 'Eb', 'Zb', 'Yb']
order = int(log(ceil(self.size) / 8, 2) / 10) if self.size else 0
rounded = round(ceil(self.size / 8) / (1 << (order * 10)), 4)
return str(rounded) + suffix[order]
| 32.503067 | 79 | 0.553039 |
ace1ddbb78b0514af1984108b646bfdd35c33380 | 1,960 | py | Python | app.py | 491-password-security/storage | e529ebe9b5804b7ab9eb72c6f0d6d9d6cf841915 | [
"MIT"
] | null | null | null | app.py | 491-password-security/storage | e529ebe9b5804b7ab9eb72c6f0d6d9d6cf841915 | [
"MIT"
] | null | null | null | app.py | 491-password-security/storage | e529ebe9b5804b7ab9eb72c6f0d6d9d6cf841915 | [
"MIT"
] | null | null | null | from flask import Flask
from flask_restful import Api
from flask_sqlalchemy import SQLAlchemy
from flask_jwt_extended import JWTManager
from pymongo import MongoClient
# Making Flask Application
app = Flask(__name__)
# Object of Api class
api = Api(app)
# Application Configuration
app.config['SQLALCHEMY_DATABASE_URI'] = 'postgres://postgres:postgres@postgres:5432/jwt_auth'
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
app.config['SECRET_KEY'] = 'binbuda'
app.config['JWT_SECRET_KEY'] = 'binbesyuzbuda'
app.config['JWT_BLACKLIST_ENABLED'] = True
app.config['JWT_BLACKLIST_TOKEN_CHECKS'] = ['access', 'refresh']
# SqlAlchemy object
sql = SQLAlchemy(app)
# Pymongo cli
#client = MongoClient('mongodb://mongo:mongo@mongo:27017/?authSource=admin')
#db = client.zipzip
#mongo_events = db.events
#mongo_event_themes = db.event_themes
# JwtManager object
jwt = JWTManager(app)
# Generating sql tables before first request is fetched
@app.before_first_request
def create_tables():
try:
sql.create_all()
except:
print("fatal error creating db")
# Checking that token is in blacklist or not
@jwt.token_in_blacklist_loader
def check_if_token_in_blacklist(decrypted_token):
jti = decrypted_token['jti']
return user_models.RevokedTokenModel.is_jti_blacklisted(jti)
# Importing models and resources
from service import auth_service
#from service import event_service
from models import user_models
# Api Endpoints
api.add_resource(auth_service.SimpleEndpointTest, '/test')
api.add_resource(auth_service.UserRegistration, '/registration')
api.add_resource(auth_service.UserLogoutAccess, '/logout/access')
api.add_resource(auth_service.UserLogoutRefresh, '/logout/refresh')
api.add_resource(auth_service.TokenRefresh, '/token/refresh')
api.add_resource(auth_service.PersonalInfo, '/user-info')
api.add_resource(auth_service.SendOtp, '/send-otp')
api.add_resource(auth_service.VerifyOtp, '/verify-otp') | 23.614458 | 93 | 0.785204 |
ace1defb5450343014fc9dd76040eaef3f1693ac | 5,100 | py | Python | connectomics/data/augmentation/__init__.py | matinraayai/pytorch_connectomics | b11a2f7e71a8d1442fb05f7a6edfaaaa7b0d9205 | [
"MIT"
] | null | null | null | connectomics/data/augmentation/__init__.py | matinraayai/pytorch_connectomics | b11a2f7e71a8d1442fb05f7a6edfaaaa7b0d9205 | [
"MIT"
] | null | null | null | connectomics/data/augmentation/__init__.py | matinraayai/pytorch_connectomics | b11a2f7e71a8d1442fb05f7a6edfaaaa7b0d9205 | [
"MIT"
] | null | null | null | from .composition import Compose
from .augmentor import DataAugment
from .test_augmentor import TestAugmentor
# augmentation methods
from .warp import Elastic
from .grayscale import Grayscale
from .flip import Flip
from .rotation import Rotate
from .rescale import Rescale
from .misalign import MisAlignment
from .missing_section import MissingSection
from .missing_parts import MissingParts
from .motion_blur import MotionBlur
from .cutblur import CutBlur
from .cutnoise import CutNoise
from .mixup import MixupAugmentor
__all__ = ['Compose',
'DataAugment',
'Elastic',
'Grayscale',
'Rotate',
'Rescale',
'MisAlignment',
'MissingSection',
'MissingParts',
'Flip',
'MotionBlur',
'CutBlur',
'CutNoise',
'MixupAugmentor',
'TestAugmentor']
def build_train_augmentor(model_input_size, do_2d, keep_uncropped=False, keep_non_smoothed=False, **kwargs):
# The two arguments, keep_uncropped and keep_non_smoothed, are used only
# for debugging, which are False by defaults and can not be adjusted
# in the config files.
aug_list = []
names = kwargs['ADDITIONAL_TARGETS_NAME']
types = kwargs['ADDITIONAL_TARGETS_TYPE']
if names is None:
additional_targets = None
else:
assert len(names) == len(types)
additional_targets = {}
for i in range(len(names)):
additional_targets[names[i]] = types[i]
# 1. rotate
if kwargs['ROTATE']['ENABLED']:
aug_list.append(Rotate(rot90=kwargs['ROTATE']['ROT90'],
p=kwargs['ROTATE']['P'],
additional_targets=additional_targets))
# 2. rescale
if kwargs['RESCALE']['ENABLED']:
aug_list.append(Rescale(p=kwargs['RESCALE']['P'],
additional_targets=additional_targets))
# 3. flip
if kwargs['FLIP']['ENABLED']:
aug_list.append(Flip(do_ztrans=kwargs['FLIP']['DO_ZTRANS'],
p=kwargs['FLIP']['P'],
additional_targets=additional_targets))
# 4. elastic
if kwargs['ELASTIC']['ENABLED']:
aug_list.append(Elastic(alpha=kwargs['ELASTIC']['ALPHA'],
sigma=kwargs['ELASTIC']['SIGMA'],
p=kwargs['ELASTIC']['P'],
additional_targets=additional_targets))
# 5. grayscale
if kwargs['GRAYSCALE']['ENABLED']:
aug_list.append(Grayscale(p=kwargs['GRAYSCALE']['P'],
additional_targets=additional_targets))
# 6. missing parts
if kwargs['MISSINGPARTS']['ENABLED']:
aug_list.append(MissingParts(p=kwargs['MISSINGPARTS']['P'],
additional_targets=additional_targets))
# 7. missing section
if kwargs['MISSINGSECTION']['ENABLED'] and not do_2d:
aug_list.append(MissingSection(num_sections=kwargs['MISSINGSECTION']['NUM_SECTION'],
p=kwargs['MISSINGSECTION']['P'],
additional_targets=additional_targets))
# 8. misalignment
if kwargs['MISALIGNMENT']['ENABLED'] and not do_2d:
aug_list.append(MisAlignment(displacement=kwargs['MISALIGNMENT']['DISPLACEMENT'],
rotate_ratio=kwargs['MISALIGNMENT']['ROTATE_RATIO'],
p=kwargs['MISALIGNMENT']['P'],
additional_targets=additional_targets))
# 9. motion-blur
if kwargs['MOTIONBLUR']['ENABLED']:
aug_list.append(MotionBlur(sections=kwargs['MOTIONBLUR']['SECTIONS'],
kernel_size=kwargs['MOTIONBLUR']['KERNEL_SIZE'],
p=kwargs['MOTIONBLUR']['P'],
additional_targets=additional_targets))
# 10. cut-blur
if kwargs['CUTBLUR']['ENABLED']:
aug_list.append(CutBlur(length_ratio=kwargs['CUTBLUR']['LENGTH_RATIO'],
down_ratio_min=kwargs['CUTBLUR']['DOWN_RATIO_MIN'],
down_ratio_max=kwargs['CUTBLUR']['DOWN_RATIO_MAX'],
downsample_z=kwargs['CUTBLUR']['DOWNSAMPLE_Z'],
p=kwargs['CUTBLUR']['P'],
additional_targets=additional_targets))
#11. cut-noise
if kwargs['CUTNOISE']['ENABLED']:
aug_list.append(CutNoise(length_ratio=kwargs['CUTNOISE']['LENGTH_RATIO'],
scale=kwargs['CUTNOISE']['SCALE'],
p=kwargs['CUTNOISE']['P'],
additional_targets=additional_targets))
augmentor = Compose(transforms=aug_list,
input_size=model_input_size,
smooth=kwargs['SMOOTH'],
keep_uncropped=keep_uncropped,
keep_non_smoothed=keep_non_smoothed,
additional_targets=additional_targets)
return augmentor
| 39.84375 | 108 | 0.576667 |
ace1e2802db2d417df8de8610e69a4d63b475e98 | 3,966 | py | Python | run_experiment.py | vribeiro1/covid19 | 2528ec2e67bee5ff864a513940fb0525f98740b0 | [
"MIT"
] | null | null | null | run_experiment.py | vribeiro1/covid19 | 2528ec2e67bee5ff864a513940fb0525f98740b0 | [
"MIT"
] | null | null | null | run_experiment.py | vribeiro1/covid19 | 2528ec2e67bee5ff864a513940fb0525f98740b0 | [
"MIT"
] | null | null | null | import numpy as np
from catboost import CatBoostClassifier, Pool
from sklearn.calibration import calibration_curve
from sklearn.model_selection import StratifiedKFold
from sklearn.metrics import (roc_auc_score,
roc_curve,
precision_recall_curve,
average_precision_score,
f1_score,
accuracy_score)
def run_experiment(df_model, train_valid_records, test_records, features, target):
df_test = df_model[df_model.record_id.isin(test_records.record_id)]
X_test = df_test[features]
y_test = df_test[target].map({False: 0, True: 1})
summaries = []
skf = StratifiedKFold(n_splits=5)
for i, (train, valid) in enumerate(skf.split(train_valid_records.record_id, train_valid_records[target])):
train_records = train_valid_records.iloc[train].record_id
df_train = df_model[df_model.record_id.isin(train_records)]
valid_records = train_valid_records.iloc[valid].record_id
df_valid = df_model[df_model.record_id.isin(valid_records)]
assert len(set(df_train.record_id.unique()) & set(df_valid.record_id.unique())) == 0
assert len(set(df_train.record_id.unique()) & set(df_test.record_id.unique())) == 0
assert len(set(df_test.record_id.unique()) & set(df_valid.record_id.unique())) == 0
model = CatBoostClassifier(max_depth=3, learning_rate=0.01, early_stopping_rounds=100)
X_train = df_train[features]
y_train = df_train[target].map({False: 0, True: 1})
X_valid = df_valid[features]
y_valid = df_valid[target].map({False: 0, True: 1})
valid_counts = y_valid.value_counts()
model.fit(X_train, y_train, eval_set=(X_valid, y_valid))
pred = model.predict_proba(X_test)
y_score = pred[:, 1]
auc = roc_auc_score(y_test.values, y_score)
fpr, tpr, _ = roc_curve(y_test.values, y_score)
auc = roc_auc_score(y_test.values, y_score)
fpr, tpr, thresholds = roc_curve(y_test.values, y_score)
ap = average_precision_score(y_test.values, y_score)
precision, recall, _ = precision_recall_curve(y_test.values, y_score)
f1s = [(thr, f1_score(y_test.values, y_score > thr)) for thr in np.arange(0, 1, 0.01)]
accs = [(thr, accuracy_score(y_test.values, y_score > thr)) for thr in np.arange(0, 1, 0.01)]
best_thr_f1, best_f1 = max(f1s, key=lambda t: t[1])
best_thr_acc, best_acc = max(accs, key=lambda t: t[1])
fraction_of_positives, mean_predicted_value = calibration_curve(y_test.values, y_score, n_bins=7)
shap_values = model.get_feature_importance(Pool(X_test, y_test), type="ShapValues")
expected_value = shap_values[0, -1]
shap_values = shap_values[:, :-1]
summary = {
"model": model,
"roc_auc": auc,
"avg_precision": ap,
"fpr": fpr,
"tpr": tpr,
"thresholds": thresholds,
"precision": precision,
"recall": recall,
"best_thr_f1": best_thr_f1,
"best_f1": best_f1,
"best_thr_acc": best_thr_acc,
"best_acc": best_acc,
"fraction_of_positives": fraction_of_positives,
"mean_predicted_value": mean_predicted_value,
"expected_value": expected_value,
"shap_values": shap_values
}
summaries.append(summary)
print(f"""
--------------------------------------------------------------------
Finished pipeline for fold #{i + 1}
Summary:
Positive cases : {valid_counts[1]}
Negative cases : {valid_counts[0]}
ROC AUC = {auc}
Average Precision = {ap}
Best F1 = {best_f1} (Threshold = {best_thr_f1})
Best Accuracy = {best_acc} (Threshold = {best_thr_acc})
--------------------------------------------------------------------
""")
return summaries, df_test
| 38.504854 | 110 | 0.616742 |
ace1e4ce0a8f1bfe60f277e19bd4cd3c80a94157 | 1,201 | py | Python | tests/dags_corrupted/test_nonstring_owner.py | holly-evans/airflow | 865406cbab4defd35c95afbf0a8d5987ff7788b1 | [
"Apache-2.0"
] | 8,092 | 2016-04-27T20:32:29.000Z | 2019-01-05T07:39:33.000Z | tests/dags_corrupted/test_nonstring_owner.py | holly-evans/airflow | 865406cbab4defd35c95afbf0a8d5987ff7788b1 | [
"Apache-2.0"
] | 2,961 | 2016-05-05T07:16:16.000Z | 2019-01-05T08:47:59.000Z | tests/dags_corrupted/test_nonstring_owner.py | holly-evans/airflow | 865406cbab4defd35c95afbf0a8d5987ff7788b1 | [
"Apache-2.0"
] | 3,546 | 2016-05-04T20:33:16.000Z | 2019-01-05T05:14:26.000Z | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from datetime import datetime, timedelta
from airflow import DAG
from airflow.operators.empty import EmptyOperator
with DAG(
dag_id="test_nonstring_owner",
schedule_interval="0 0 * * *",
start_date=datetime(2022, 1, 1),
dagrun_timeout=timedelta(minutes=60),
tags=["example"],
default_args={'owner': ['a']},
) as dag:
run_this_last = EmptyOperator(
task_id="test_task",
)
| 34.314286 | 62 | 0.741049 |
ace1e6fb6435d6a09f8b259669fb00c28ee51b21 | 3,217 | py | Python | lookml/lkml/tokens.py | Honcharov12/pylookml | d4ebb58e7177c5d0e40509ad8ebc0b4fb77483d9 | [
"MIT"
] | 17 | 2020-10-27T14:48:04.000Z | 2021-10-11T14:00:33.000Z | lookml/lkml/tokens.py | Honcharov12/pylookml | d4ebb58e7177c5d0e40509ad8ebc0b4fb77483d9 | [
"MIT"
] | 39 | 2020-09-08T07:17:40.000Z | 2021-09-06T20:04:38.000Z | lookml/lkml/tokens.py | Honcharov12/pylookml | d4ebb58e7177c5d0e40509ad8ebc0b4fb77483d9 | [
"MIT"
] | 7 | 2019-06-27T15:51:58.000Z | 2020-08-26T14:28:33.000Z | """Tokens used by the lexer to tokenize LookML."""
class Token:
"""Base class for LookML tokens, lexed from LookML strings."""
id: str = "<base token>"
value: str
def __init__(self, line_number: int):
"""Initializes a Token.
Args:
line_number: The corresponding line in the text where this token begins
"""
self.line_number: int = line_number
def __eq__(self, other):
"""Compare one Token to another by their type."""
return self.__class__ == other.__class__
def __repr__(self):
"""Returns the token's string representation, truncated to 25 characters.
If the token has a `value` attribute, include that in the output.
Examples:
>>> token = Token(1)
>>> token.__repr__()
'Token()'
>>> token.value = 'A string value'
>>> token.__repr__()
'Token(A string value)'
>>> token.value = 'A very, very, very long string value'
>>> token.__repr__()
'Token(A very, very, very long s ... )'
"""
value = getattr(self, "value", "").strip()
value = (value[:25].rstrip() + " ... ") if len(value) > 25 else value
return f"{self.__class__.__name__}({value})"
class ContentToken(Token):
"""Base class for LookML tokens that contain a string of content."""
def __init__(self, value: str, line_number: int):
"""Initializes a ContentToken with string content.
Args:
value: A string value for the token's content
line_number: The corresponding line in the text where this token begins
"""
self.value: str = value
self.line_number: int = line_number
def __eq__(self, other):
"""Compare one ContentToken to another by their values."""
return self.id == other.id and self.value == other.value
class StreamStartToken(Token):
"""Represents the start of a stream of characters."""
id = "<stream start>"
class StreamEndToken(Token):
"""Represents the end of a stream of characters."""
id = "<stream end>"
class BlockStartToken(Token):
"""Represents the start of a block."""
id = "{"
class BlockEndToken(Token):
"""Represents the end of a block."""
id = "}"
class ValueToken(Token):
"""Separates a key from a value."""
id = ":"
class ExpressionBlockEndToken(Token):
"""Represents the end of an expression block."""
id = ";;"
class CommaToken(Token):
"""Separates elements in a list."""
id = ","
class ListStartToken(Token):
"""Represents the start of a list."""
id = "["
class ListEndToken(Token):
"""Represents the end of a list."""
id = "]"
class ExpressionBlockToken(ContentToken):
"""Contains the value of an expression block."""
id = "<expression block>"
class LiteralToken(ContentToken):
"""Contains the value of an unquoted literal."""
id = "<literal>"
class QuotedLiteralToken(ContentToken):
"""Contains the value of a quoted literal."""
id = "<quoted literal>"
class RefinementToken(Token):
"""Indicates a LookML refinement."""
id = "+"
| 22.65493 | 83 | 0.60056 |
ace1e71f8ee08a9387f594dbbf07f1aa21905184 | 1,082 | py | Python | mytube/posts/migrations/0002_comment.py | ashowlsky/mytube_c | 122d75d7dcd23ed0240448e5db5ca130266d26a2 | [
"MIT"
] | null | null | null | mytube/posts/migrations/0002_comment.py | ashowlsky/mytube_c | 122d75d7dcd23ed0240448e5db5ca130266d26a2 | [
"MIT"
] | null | null | null | mytube/posts/migrations/0002_comment.py | ashowlsky/mytube_c | 122d75d7dcd23ed0240448e5db5ca130266d26a2 | [
"MIT"
] | null | null | null | # Generated by Django 2.2 on 2020-02-19 20:10
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('posts', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Comment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('text', models.TextField()),
('pub_date', models.DateTimeField(auto_now_add=True, verbose_name='date published')),
('last_edited', models.DateTimeField(verbose_name='date edited')),
('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='comments', to=settings.AUTH_USER_MODEL)),
('post', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='comments', to='posts.Post')),
],
),
]
| 38.642857 | 145 | 0.642329 |
ace1e995e68da7ee25121cc78cb2649af8ffb120 | 4,754 | py | Python | climlab/radiation/rrtm/utils.py | nfeldl/climlab | 2cabb49e2c3f54c1795f24338ef5ee44e49fc7e7 | [
"BSD-3-Clause",
"MIT"
] | 160 | 2015-02-25T15:56:37.000Z | 2022-03-14T23:51:23.000Z | climlab/radiation/rrtm/utils.py | nfeldl/climlab | 2cabb49e2c3f54c1795f24338ef5ee44e49fc7e7 | [
"BSD-3-Clause",
"MIT"
] | 137 | 2015-12-18T17:39:31.000Z | 2022-02-04T20:50:53.000Z | climlab/radiation/rrtm/utils.py | nfeldl/climlab | 2cabb49e2c3f54c1795f24338ef5ee44e49fc7e7 | [
"BSD-3-Clause",
"MIT"
] | 54 | 2015-04-28T05:57:39.000Z | 2022-02-17T08:15:11.000Z | from __future__ import division
import numpy as np
from scipy.interpolate import interp1d
from climlab.utils.thermo import mmr_to_vmr
def _prepare_general_arguments(RRTMGobject):
'''Prepare arguments needed for both RRTMG_SW and RRTMG_LW with correct dimensions.'''
tlay = _climlab_to_rrtm(RRTMGobject.Tatm)
tlev = _climlab_to_rrtm(interface_temperature(**RRTMGobject.state))
play = _climlab_to_rrtm(RRTMGobject.lev * np.ones_like(tlay))
plev = _climlab_to_rrtm(RRTMGobject.lev_bounds * np.ones_like(tlev))
ncol, nlay = tlay.shape
tsfc = _climlab_to_rrtm_sfc(RRTMGobject.Ts, RRTMGobject.Ts)
# GASES -- put them in proper dimensions and units
vapor_mixing_ratio = mmr_to_vmr(RRTMGobject.specific_humidity, gas='H2O')
h2ovmr = _climlab_to_rrtm(vapor_mixing_ratio * np.ones_like(RRTMGobject.Tatm))
o3vmr = _climlab_to_rrtm(RRTMGobject.absorber_vmr['O3'] * np.ones_like(RRTMGobject.Tatm))
co2vmr = _climlab_to_rrtm(RRTMGobject.absorber_vmr['CO2'] * np.ones_like(RRTMGobject.Tatm))
ch4vmr = _climlab_to_rrtm(RRTMGobject.absorber_vmr['CH4'] * np.ones_like(RRTMGobject.Tatm))
n2ovmr = _climlab_to_rrtm(RRTMGobject.absorber_vmr['N2O'] * np.ones_like(RRTMGobject.Tatm))
o2vmr = _climlab_to_rrtm(RRTMGobject.absorber_vmr['O2'] * np.ones_like(RRTMGobject.Tatm))
cfc11vmr = _climlab_to_rrtm(RRTMGobject.absorber_vmr['CFC11'] * np.ones_like(RRTMGobject.Tatm))
cfc12vmr = _climlab_to_rrtm(RRTMGobject.absorber_vmr['CFC12'] * np.ones_like(RRTMGobject.Tatm))
cfc22vmr = _climlab_to_rrtm(RRTMGobject.absorber_vmr['CFC22'] * np.ones_like(RRTMGobject.Tatm))
ccl4vmr = _climlab_to_rrtm(RRTMGobject.absorber_vmr['CCL4'] * np.ones_like(RRTMGobject.Tatm))
# Cloud parameters
cldfrac = _climlab_to_rrtm(RRTMGobject.cldfrac * np.ones_like(RRTMGobject.Tatm))
ciwp = _climlab_to_rrtm(RRTMGobject.ciwp * np.ones_like(RRTMGobject.Tatm))
clwp = _climlab_to_rrtm(RRTMGobject.clwp * np.ones_like(RRTMGobject.Tatm))
relq = _climlab_to_rrtm(RRTMGobject.r_liq * np.ones_like(RRTMGobject.Tatm))
reic = _climlab_to_rrtm(RRTMGobject.r_ice * np.ones_like(RRTMGobject.Tatm))
return (ncol, nlay, play, plev, tlay, tlev, tsfc,
h2ovmr, o3vmr, co2vmr, ch4vmr, n2ovmr, o2vmr, cfc11vmr,
cfc12vmr, cfc12vmr, cfc22vmr, ccl4vmr,
cldfrac, ciwp, clwp, relq, reic)
def interface_temperature(Ts, Tatm, **kwargs):
'''Compute temperature at model layer interfaces.'''
# Actually it's not clear to me how the RRTM code uses these values
lev = Tatm.domain.axes['lev'].points
lev_bounds = Tatm.domain.axes['lev'].bounds
# Interpolate to layer interfaces
f = interp1d(lev, Tatm, axis=-1) # interpolation function
Tinterp = f(lev_bounds[1:-1])
# add TOA value, Assume surface temperature at bottom boundary
Ttoa = Tatm[...,0]
Tinterp = np.concatenate((Ttoa[..., np.newaxis], Tinterp, Ts), axis=-1)
return Tinterp
def _climlab_to_rrtm(field):
'''Prepare field with proper dimension order.
RRTM code expects arrays with (ncol, nlay)
and with pressure decreasing from surface at element 0
climlab grid dimensions are any of:
- (num_lev,) --> (1, num_lev)
- (num_lat, num_lev) --> (num_lat, num_lev)
- (num_lat, num_lon, num_lev) --> (num_lat*num_lon, num_lev)
But lat-lon grids not yet supported here!
Case single column
'''
# Make this work just with 1D (KM,) arrays
# (KM,) --> (1, nlay)
try:
# Flip along the last axis to reverse the pressure order
field = field[..., ::-1]
except:
if np.isscalar(field):
return field
else:
raise ValueError('field must be array_like or scalar.')
shape = field.shape
if len(shape)==1: # (num_lev)
# Need to append an extra dimension for singleton horizontal ncol
return field[np.newaxis, ...]
elif len(shape)==2: # (num_lat, num_lev)
return field
elif len(shape) > 2:
raise ValueError('lat-lon grids not yet supported here.')
#elif len(shape)==3: # (num_lat, num_lon, num_lev)
# Need to reshape this array
def _rrtm_to_climlab(field):
try:
# Flip along the last axis to reverse the pressure order
field = field[..., ::-1]
except:
if np.isscalar(field):
return field
else:
raise ValueError('field must be array_like or scalar.')
return np.squeeze(field)
def _climlab_to_rrtm_sfc(field, Ts):
'''Return an array of size np.squeeze(Ts) to remove the singleton depth dimension'''
fieldsqueeze = np.squeeze(field)
Tsqueeze = np.squeeze(Ts)
return fieldsqueeze * np.ones_like(Tsqueeze)
| 45.27619 | 99 | 0.690997 |
ace1eacc56c7af8b3773020d88ea371dfb978b1e | 9,044 | py | Python | efficientnet_pytorch/model.py | nesara/TRAFFIC-SIGN-RECOGNITION-SYSTEM-USING-EFFICIENTNET | e24f50a41f029e339edc35b3ff66e650474d0bf5 | [
"Apache-2.0"
] | null | null | null | efficientnet_pytorch/model.py | nesara/TRAFFIC-SIGN-RECOGNITION-SYSTEM-USING-EFFICIENTNET | e24f50a41f029e339edc35b3ff66e650474d0bf5 | [
"Apache-2.0"
] | null | null | null | efficientnet_pytorch/model.py | nesara/TRAFFIC-SIGN-RECOGNITION-SYSTEM-USING-EFFICIENTNET | e24f50a41f029e339edc35b3ff66e650474d0bf5 | [
"Apache-2.0"
] | null | null | null | import torch
from torch import nn
from torch.nn import functional as F
from .utils import (
relu_fn,
round_filters,
round_repeats,
drop_connect,
get_same_padding_conv2d,
get_model_params,
efficientnet_params,
load_pretrained_weights,
_tf_weight_init,
)
class MBConvBlock(nn.Module):
"""
Mobile Inverted Residual Bottleneck Block
Args:
block_args (namedtuple): BlockArgs, see above
global_params (namedtuple): GlobalParam, see above
Attributes:
has_se (bool): Whether the block contains a Squeeze and Excitation layer.
"""
def __init__(self, block_args, global_params):
super().__init__()
self._block_args = block_args
self._bn_mom = 1 - global_params.batch_norm_momentum
self._bn_eps = global_params.batch_norm_epsilon
self.has_se = (self._block_args.se_ratio is not None) and (0 < self._block_args.se_ratio <= 1)
self.id_skip = block_args.id_skip # skip connection and drop connect
# Get static or dynamic convolution depending on image size
Conv2d = get_same_padding_conv2d(image_size=global_params.image_size)
# Expansion phase
inp = self._block_args.input_filters # number of input channels
oup = self._block_args.input_filters * self._block_args.expand_ratio # number of output channels
if self._block_args.expand_ratio != 1:
self._expand_conv = Conv2d(in_channels=inp, out_channels=oup, kernel_size=1, bias=False)
self._bn0 = nn.BatchNorm2d(num_features=oup, momentum=self._bn_mom, eps=self._bn_eps)
# Depthwise convolution phase
k = self._block_args.kernel_size
s = self._block_args.stride
self._depthwise_conv = Conv2d(
in_channels=oup, out_channels=oup, groups=oup, # groups makes it depthwise
kernel_size=k, stride=s, bias=False)
self._bn1 = nn.BatchNorm2d(num_features=oup, momentum=self._bn_mom, eps=self._bn_eps)
# Squeeze and Excitation layer, if desired
if self.has_se:
num_squeezed_channels = max(1, int(self._block_args.input_filters * self._block_args.se_ratio))
self._se_reduce = Conv2d(in_channels=oup, out_channels=num_squeezed_channels, kernel_size=1)
self._se_expand = Conv2d(in_channels=num_squeezed_channels, out_channels=oup, kernel_size=1)
# Output phase
final_oup = self._block_args.output_filters
self._project_conv = Conv2d(in_channels=oup, out_channels=final_oup, kernel_size=1, bias=False)
self._bn2 = nn.BatchNorm2d(num_features=final_oup, momentum=self._bn_mom, eps=self._bn_eps)
def forward(self, inputs, drop_connect_rate=None):
"""
:param inputs: input tensor
:param drop_connect_rate: drop connect rate (float, between 0 and 1)
:return: output of block
"""
# Expansion and Depthwise Convolution
x = inputs
if self._block_args.expand_ratio != 1:
x = relu_fn(self._bn0(self._expand_conv(inputs)))
x = relu_fn(self._bn1(self._depthwise_conv(x)))
# Squeeze and Excitation
if self.has_se:
x_squeezed = F.adaptive_avg_pool2d(x, 1)
x_squeezed = self._se_expand(relu_fn(self._se_reduce(x_squeezed)))
x = torch.sigmoid(x_squeezed) * x
x = self._bn2(self._project_conv(x))
# Skip connection and drop connect
input_filters, output_filters = self._block_args.input_filters, self._block_args.output_filters
if self.id_skip and self._block_args.stride == 1 and input_filters == output_filters:
if drop_connect_rate:
x = drop_connect(x, p=drop_connect_rate, training=self.training)
x = x + inputs # skip connection
return x
class EfficientNet(nn.Module):
"""
An EfficientNet model. Most easily loaded with the .from_name or .from_pretrained methods
Args:
blocks_args (list): A list of BlockArgs to construct blocks
global_params (namedtuple): A set of GlobalParams shared between blocks
Example:
model = EfficientNet.from_pretrained('efficientnet-b0')
"""
def __init__(self, blocks_args=None, global_params=None):
super().__init__()
assert isinstance(blocks_args, list), 'blocks_args should be a list'
assert len(blocks_args) > 0, 'block args must be greater than 0'
self._global_params = global_params
self._blocks_args = blocks_args
# Get static or dynamic convolution depending on image size
Conv2d = get_same_padding_conv2d(image_size=global_params.image_size)
# Batch norm parameters
bn_mom = 1 - self._global_params.batch_norm_momentum
bn_eps = self._global_params.batch_norm_epsilon
# Stem
in_channels = 3 # rgb
out_channels = round_filters(32, self._global_params) # number of output channels
self._conv_stem = Conv2d(in_channels, out_channels, kernel_size=3, stride=2, bias=False)
self._bn0 = nn.BatchNorm2d(num_features=out_channels, momentum=bn_mom, eps=bn_eps)
# Build blocks
self._blocks = nn.ModuleList([])
for block_args in self._blocks_args:
# Update block input and output filters based on depth multiplier.
block_args = block_args._replace(
input_filters=round_filters(block_args.input_filters, self._global_params),
output_filters=round_filters(block_args.output_filters, self._global_params),
num_repeat=round_repeats(block_args.num_repeat, self._global_params)
)
# The first block needs to take care of stride and filter size increase.
self._blocks.append(MBConvBlock(block_args, self._global_params))
if block_args.num_repeat > 1:
block_args = block_args._replace(input_filters=block_args.output_filters, stride=1)
for _ in range(block_args.num_repeat - 1):
self._blocks.append(MBConvBlock(block_args, self._global_params))
# Head
in_channels = block_args.output_filters # output of final block
out_channels = round_filters(1280, self._global_params)
self._conv_head = Conv2d(in_channels, out_channels, kernel_size=1, bias=False)
self._bn1 = nn.BatchNorm2d(num_features=out_channels, momentum=bn_mom, eps=bn_eps)
# Final linear layer
self._dropout = self._global_params.dropout_rate
self._fc = nn.Linear(out_channels, self._global_params.num_classes)
# Weight initialization
for module in self.modules():
_tf_weight_init(module)
def extract_features(self, inputs):
""" Returns output of the final convolution layer """
# Stem
x = relu_fn(self._bn0(self._conv_stem(inputs)))
# Blocks
for idx, block in enumerate(self._blocks):
drop_connect_rate = self._global_params.drop_connect_rate
if drop_connect_rate:
drop_connect_rate *= float(idx) / len(self._blocks)
x = block(x, drop_connect_rate=drop_connect_rate)
# Head
x = relu_fn(self._bn1(self._conv_head(x)))
return x
def forward(self, inputs):
""" Calls extract_features to extract features, applies final linear layer, and returns logits. """
# Convolution layers
x = self.extract_features(inputs)
# Pooling and final linear layer
x = F.adaptive_avg_pool2d(x, 1).squeeze(-1).squeeze(-1)
if self._dropout:
x = F.dropout(x, p=self._dropout, training=self.training)
x = self._fc(x)
return x
@classmethod
def from_name(cls, model_name, override_params=None):
cls._check_model_name_is_valid(model_name)
blocks_args, global_params = get_model_params(model_name, override_params)
return EfficientNet(blocks_args, global_params)
@classmethod
def from_pretrained(cls, model_name, num_classes=1000):
model = EfficientNet.from_name(model_name, override_params={'num_classes': num_classes})
load_pretrained_weights(model, model_name, load_fc=(num_classes == 1000))
return model
@classmethod
def get_image_size(cls, model_name):
cls._check_model_name_is_valid(model_name)
_, _, res, _ = efficientnet_params(model_name)
return res
@classmethod
def _check_model_name_is_valid(cls, model_name, also_need_pretrained_weights=False):
""" Validates model name. None that pretrained weights are only available for
the first four models (efficientnet-b{i} for i in 0,1,2,3) at the moment. """
num_models = 4 if also_need_pretrained_weights else 8
valid_models = ['efficientnet_b'+str(i) for i in range(num_models)]
if model_name.replace('-','_') not in valid_models:
raise ValueError('model_name should be one of: ' + ', '.join(valid_models))
| 41.296804 | 107 | 0.677576 |
ace1ead928aaecd4880a70696df63525823beffd | 5,077 | py | Python | learningdjango/settings.py | itsregalo/RetechBiz | 393d8337db28a6d21bed988a590c252f0f9906cd | [
"MIT"
] | 2 | 2020-10-15T15:31:57.000Z | 2021-06-17T10:00:40.000Z | learningdjango/settings.py | itsregalo/RetechBiz | 393d8337db28a6d21bed988a590c252f0f9906cd | [
"MIT"
] | null | null | null | learningdjango/settings.py | itsregalo/RetechBiz | 393d8337db28a6d21bed988a590c252f0f9906cd | [
"MIT"
] | null | null | null | """
Django settings for learningdjango project.
Generated by 'django-admin startproject' using Django 3.0.6.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.0/ref/settings/
"""
import os
from decouple import config
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = config('SECRET_KEY')
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = [*]
# Application definition
INSTALLED_APPS = [
'material.admin',
'material.admin.default',
# 'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'debug_toolbar',
'crispy_forms',
'imagekit',
'tinymce',
'accounts',
'mainbiz',
#allauth
'django.contrib.sites',
'allauth',
'allauth.account',
'allauth.socialaccount',
]
ACCOUNT_FORMS = {
'signup': 'accounts.forms.CustomSignupForm',
}
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'debug_toolbar.middleware.DebugToolbarMiddleware',
]
INTERNAL_IPS = ['127.0.0.1']
ROOT_URLCONF = 'learningdjango.urls'
CRISPY_TEMPLATE_PACK = "bootstrap4"
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
os.path.join(BASE_DIR,'mainbiz/templates'),
os.path.join(BASE_DIR,'accounts/templates'),
os.path.join(BASE_DIR,'accounts/templates/accounts'),
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'learningdjango.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators
# PASSWORD_HASHERS = [
# 'django.contrib.auth.hashers.Argon2PasswordHasher',
# 'django.contrib.auth.hashers.BCryptSHA256PasswordHasher',
# 'django.contrib.auth.hashers.BCryptPasswordHaher',
# 'django.contrib.auth.hashers.PBKDF2PasswordHasher',
# 'django.contrib.auth.hashers.PBKDF2SHA1PasswordHasher',
# ]
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
'OPTIONS':{'min_length':7} # sets min number of chars to be used
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.0/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = [
os.path.join(BASE_DIR,'mainbiz/static'),
]
AUTHENTICATION_BACKENDS = [
# Needed to login by username in Django admin, regardless of `allauth`
'django.contrib.auth.backends.ModelBackend',
# `allauth` specific authentication methods, such as login by e-mail
'allauth.account.auth_backends.AuthenticationBackend',
]
SITE_ID = 1
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
DEFAULT_FROM_MAIL = 'itsregalo047@gmail.com'
EMAIL_HOST = 'smtp.gmail.com'
EMAIL_HOST_USER = 'retechempire@gmail.com'
EMAIL_HOST_PASSWORD = config('EMAIL_HOST_PASSWORD')
EMAIL_PORT = 587
EMAIL_USE_TLS = True
ACCOUNT_EMAIL_REQUIRED = True
ACCOUNT_LOGIN_ATTEMPTS_LIMIT = 5
ACCOUNT_LOGIN_ATTEMPTS_TIMEOUT = 1000
ACCOUNT_LOGOUT_ON_PASSWORD_CHANGE =True
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(BASE_DIR,'media')
LOGIN_REDIRECT_URL ='/'
LOGOUT_REDIRECT_URL = '/'
| 25.903061 | 91 | 0.703959 |
ace1ec7864400c658bfa13f6e893200316c613f0 | 5,807 | py | Python | chemberta/masked-lm/train_roberta_mlm.py | BogdanDidenko/bert-loves-chemistry | 87cdddc1303c7d07f81613dbb3cb28c8bf910592 | [
"MIT"
] | 183 | 2020-04-07T18:57:16.000Z | 2022-03-25T08:10:28.000Z | chemberta/masked-lm/train_roberta_mlm.py | BogdanDidenko/bert-loves-chemistry | 87cdddc1303c7d07f81613dbb3cb28c8bf910592 | [
"MIT"
] | 11 | 2020-05-12T00:04:20.000Z | 2021-07-14T12:39:38.000Z | chemberta/masked-lm/train_roberta_mlm.py | BogdanDidenko/bert-loves-chemistry | 87cdddc1303c7d07f81613dbb3cb28c8bf910592 | [
"MIT"
] | 35 | 2020-04-03T01:24:06.000Z | 2022-03-26T12:30:55.000Z | """ Script for training a Roberta Masked-Language Model
Usage [SMILES tokenizer]:
python train_roberta_mlm.py --dataset_path=<DATASET_PATH> --output_dir=<OUTPUT_DIR> --run_name=<RUN_NAME> --tokenizer_type=smiles --tokenizer_path="seyonec/SMILES_tokenized_PubChem_shard00_160k"
Usage [BPE tokenizer]:
python train_roberta_mlm.py --dataset_path=<DATASET_PATH> --output_dir=<OUTPUT_DIR> --run_name=<RUN_NAME> --tokenizer_type=bpe
"""
import os
from absl import app
from absl import flags
import transformers
from transformers.trainer_callback import EarlyStoppingCallback
import torch
from torch.utils.data import random_split
import wandb
from transformers import RobertaConfig
from transformers import RobertaTokenizerFast
from transformers import RobertaForMaskedLM
from chemberta.utils.raw_text_dataset import RawTextDataset
from transformers import DataCollatorForLanguageModeling
from transformers import Trainer, TrainingArguments
from tokenizers import ByteLevelBPETokenizer
FLAGS = flags.FLAGS
# RobertaConfig params
flags.DEFINE_integer(name="vocab_size", default=600, help="")
flags.DEFINE_integer(name="max_position_embeddings", default=515, help="") # This needs to be longer than max_tokenizer_len. max_len is currently 514 in seyonec/SMILES_tokenized_PubChem_shard00_160k
flags.DEFINE_integer(name="num_attention_heads", default=1, help="")
flags.DEFINE_integer(name="num_hidden_layers", default=1, help="")
flags.DEFINE_integer(name="type_vocab_size", default=1, help="")
flags.DEFINE_bool(name="fp16", default=True, help="Mixed precision.")
# Tokenizer params
flags.DEFINE_enum(name="tokenizer_type", default="smiles", enum_values=["smiles", "bpe", "SMILES", "BPE"], help="")
flags.DEFINE_string(name="tokenizer_path", default="", help="")
flags.DEFINE_integer(name="BPE_min_frequency", default=2, help="")
flags.DEFINE_string(name="output_tokenizer_dir", default="tokenizer_dir", help="")
flags.DEFINE_integer(name="max_tokenizer_len", default=512, help="")
flags.DEFINE_integer(name="tokenizer_block_size", default=512, help="")
# Dataset params
flags.DEFINE_string(name="dataset_path", default=None, help="")
flags.DEFINE_string(name="output_dir", default="default_dir", help="")
flags.DEFINE_string(name="run_name", default="default_run", help="")
# MLM params
flags.DEFINE_float(name="mlm_probability", default=0.15, lower_bound=0.0, upper_bound=1.0, help="")
# Train params
flags.DEFINE_float(name="frac_train", default=0.95, help="")
flags.DEFINE_integer(name="eval_steps", default=1000, help="")
flags.DEFINE_integer(name="logging_steps", default=100, help="")
flags.DEFINE_boolean(name="overwrite_output_dir", default=True, help="")
flags.DEFINE_integer(name="num_train_epochs", default=1, help="")
flags.DEFINE_integer(name="per_device_train_batch_size", default=64, help="")
flags.DEFINE_integer(name="save_steps", default=10000, help="")
flags.DEFINE_integer(name="save_total_limit", default=2, help="")
flags.mark_flag_as_required("dataset_path")
def main(argv):
torch.manual_seed(0)
wandb.login()
is_gpu = torch.cuda.is_available()
config = RobertaConfig(
vocab_size=FLAGS.vocab_size,
max_position_embeddings=FLAGS.max_position_embeddings,
num_attention_heads=FLAGS.num_attention_heads,
num_hidden_layers=FLAGS.num_hidden_layers,
type_vocab_size=FLAGS.type_vocab_size,
)
if FLAGS.tokenizer_path:
tokenizer_path = FLAGS.tokenizer_path
elif FLAGS.tokenizer_type.upper() == "BPE":
tokenizer_path = FLAGS.output_tokenizer_dir
if not os.path.isdir(tokenizer_path):
os.makedirs(tokenizer_path)
tokenizer = ByteLevelBPETokenizer()
tokenizer.train(files=FLAGS.dataset_path, vocab_size=FLAGS.vocab_size, min_frequency=FLAGS.BPE_min_frequency, special_tokens=["<s>","<pad>","</s>","<unk>","<mask>"])
tokenizer.save_model(tokenizer_path)
else:
print("Please provide a tokenizer path if using the SMILES tokenizer")
tokenizer = RobertaTokenizerFast.from_pretrained(tokenizer_path, max_len=FLAGS.max_tokenizer_len)
model = RobertaForMaskedLM(config=config)
print(f"Model size: {model.num_parameters()} parameters.")
dataset = RawTextDataset(tokenizer=tokenizer, file_path=FLAGS.dataset_path, block_size=FLAGS.tokenizer_block_size)
train_size = max(int(FLAGS.frac_train * len(dataset)), 1)
eval_size = len(dataset) - train_size
print(f"Train size: {train_size}")
print(f"Eval size: {eval_size}")
train_dataset, eval_dataset = random_split(dataset, [train_size, eval_size])
data_collator = DataCollatorForLanguageModeling(
tokenizer=tokenizer, mlm=True, mlm_probability=FLAGS.mlm_probability
)
training_args = TrainingArguments(
evaluation_strategy="steps",
eval_steps=FLAGS.eval_steps,
load_best_model_at_end=True,
logging_steps=FLAGS.logging_steps,
output_dir=os.path.join(FLAGS.output_dir, FLAGS.run_name),
overwrite_output_dir=FLAGS.overwrite_output_dir,
num_train_epochs=FLAGS.num_train_epochs,
per_device_train_batch_size=FLAGS.per_device_train_batch_size,
save_steps=FLAGS.save_steps,
save_total_limit=FLAGS.save_total_limit,
fp16 = is_gpu and FLAGS.fp16, # fp16 only works on CUDA devices
report_to="wandb",
run_name=FLAGS.run_name,
)
trainer = Trainer(
model=model,
args=training_args,
data_collator=data_collator,
train_dataset=train_dataset,
eval_dataset=eval_dataset,
callbacks=[EarlyStoppingCallback(early_stopping_patience=1)],
)
trainer.train()
trainer.save_model(os.path.join(FLAGS.output_dir, FLAGS.run_name, "final"))
if __name__ == '__main__':
app.run(main)
| 39.503401 | 198 | 0.753401 |
ace1eccd6e35583ec4968f9b2020861427220223 | 521,853 | py | Python | clarifai_grpc/grpc/api/service_pb2.py | johntorcivia/clarifai-python-grpc | cc37f0b3eee374dfcc9912de4d0303dab3a28635 | [
"Apache-2.0"
] | null | null | null | clarifai_grpc/grpc/api/service_pb2.py | johntorcivia/clarifai-python-grpc | cc37f0b3eee374dfcc9912de4d0303dab3a28635 | [
"Apache-2.0"
] | null | null | null | clarifai_grpc/grpc/api/service_pb2.py | johntorcivia/clarifai-python-grpc | cc37f0b3eee374dfcc9912de4d0303dab3a28635 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: proto/clarifai/api/service.proto
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from clarifai_grpc.grpc.api import resources_pb2 as proto_dot_clarifai_dot_api_dot_resources__pb2
from clarifai_grpc.grpc.api.status import status_pb2 as proto_dot_clarifai_dot_api_dot_status_dot_status__pb2
from clarifai_grpc.grpc.api.utils import extensions_pb2 as proto_dot_clarifai_dot_api_dot_utils_dot_extensions__pb2
from clarifai_grpc.grpc.auth.scope import scope_pb2 as proto_dot_clarifai_dot_auth_dot_scope_dot_scope__pb2
from clarifai_grpc.grpc.auth.util import extension_pb2 as proto_dot_clarifai_dot_auth_dot_util_dot_extension__pb2
from clarifai_grpc.grpc.api.status import status_code_pb2 as proto_dot_clarifai_dot_api_dot_status_dot_status__code__pb2
from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2
from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2
from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='proto/clarifai/api/service.proto',
package='clarifai.api',
syntax='proto3',
serialized_options=b'\n\025com.clarifai.grpc.apiP\001Z;github.com/Clarifai/clarifai-go-grpc/proto/clarifai/api/api\242\002\004CAIP',
serialized_pb=b'\n proto/clarifai/api/service.proto\x12\x0c\x63larifai.api\x1a\"proto/clarifai/api/resources.proto\x1a&proto/clarifai/api/status/status.proto\x1a)proto/clarifai/api/utils/extensions.proto\x1a%proto/clarifai/auth/scope/scope.proto\x1a(proto/clarifai/auth/util/extension.proto\x1a+proto/clarifai/api/status/status_code.proto\x1a\x1cgoogle/api/annotations.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\",\n\nPagination\x12\x0c\n\x04page\x18\x01 \x01(\r\x12\x10\n\x08per_page\x18\x02 \x01(\r\"p\n\x14GetAnnotationRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x15\n\rannotation_id\x18\x02 \x01(\t\x12\x10\n\x08input_id\x18\x03 \x01(\t\"\x9a\x02\n\x16ListAnnotationsRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x0b\n\x03ids\x18\x02 \x03(\t\x12\x11\n\tinput_ids\x18\x03 \x03(\t\x12\x10\n\x08user_ids\x18\t \x03(\t\x12\x19\n\x11model_version_ids\x18\n \x03(\t\x12-\n\x08statuses\x18\x05 \x03(\x0b\x32\x1b.clarifai.api.status.Status\x12\x1c\n\x14list_all_annotations\x18\x06 \x01(\x08\x12\x0c\n\x04page\x18\x07 \x01(\r\x12\x10\n\x08per_page\x18\x08 \x01(\r\x12\x0f\n\x07task_id\x18\x0b \x01(\tJ\x04\x08\x04\x10\x05\"x\n\x16PostAnnotationsRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12-\n\x0b\x61nnotations\x18\x02 \x03(\x0b\x32\x18.clarifai.api.Annotation\"\x89\x01\n\x17PatchAnnotationsRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12-\n\x0b\x61nnotations\x18\x02 \x03(\x0b\x32\x18.clarifai.api.Annotation\x12\x0e\n\x06\x61\x63tion\x18\x03 \x01(\t\"\xb9\x01\n\x1dPatchAnnotationsStatusRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x34\n\x0bstatus_code\x18\x02 \x01(\x0e\x32\x1f.clarifai.api.status.StatusCode\x12\x10\n\x08user_ids\x18\x03 \x03(\t\x12\x0f\n\x07task_id\x18\x04 \x01(\t\x12\x0e\n\x06\x61\x63tion\x18\x05 \x01(\t\"v\n\x1ePatchAnnotationsStatusResponse\x12+\n\x06status\x18\x01 \x01(\x0b\x32\x1b.clarifai.api.status.Status\x12\x10\n\x08user_ids\x18\x02 \x03(\t\x12\x15\n\rupdated_count\x18\x03 \x01(\r\"s\n\x17\x44\x65leteAnnotationRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x10\n\x08input_id\x18\x02 \x01(\t\x12\x15\n\rannotation_id\x18\x03 \x01(\t\"k\n\x18\x44\x65leteAnnotationsRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x0b\n\x03ids\x18\x02 \x03(\t\x12\x11\n\tinput_ids\x18\x03 \x03(\t\"u\n\x18SingleAnnotationResponse\x12+\n\x06status\x18\x01 \x01(\x0b\x32\x1b.clarifai.api.status.Status\x12,\n\nannotation\x18\x02 \x01(\x0b\x32\x18.clarifai.api.Annotation\"{\n\x17MultiAnnotationResponse\x12+\n\x06status\x18\x01 \x01(\x0b\x32\x1b.clarifai.api.status.Status\x12\x33\n\x0b\x61nnotations\x18\x02 \x03(\x0b\x32\x18.clarifai.api.AnnotationB\x04\x80\xb5\x18\x01\"@\n\rGetAppRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\"\xf4\x01\n\x0fListAppsRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x0c\n\x04page\x18\x02 \x01(\r\x12\x10\n\x08per_page\x18\x03 \x01(\r\x12\x16\n\x0esort_ascending\x18\x05 \x01(\x08\x12\x16\n\x0csort_by_name\x18\x06 \x01(\x08H\x00\x12\x1d\n\x13sort_by_modified_at\x18\x07 \x01(\x08H\x00\x12\r\n\x05query\x18\x08 \x01(\t\x12\x10\n\x04name\x18\x04 \x01(\tB\x02\x18\x01\x12\x15\n\rfeatured_only\x18\t \x01(\x08\x42\t\n\x07sort_by\"c\n\x0fPostAppsRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x1f\n\x04\x61pps\x18\x02 \x03(\x0b\x32\x11.clarifai.api.App\"C\n\x10\x44\x65leteAppRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\"\xb9\x01\n\x10PatchAppsRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x1f\n\x04\x61pps\x18\x02 \x03(\x0b\x32\x11.clarifai.api.App\x12\x0e\n\x06\x61\x63tion\x18\x03 \x01(\t\x12\x32\n\x0fmetadata_action\x18\x04 \x01(\x0b\x32\x19.clarifai.api.PatchAction\x12\x0f\n\x07reindex\x18\x05 \x01(\x08\"\xa3\x01\n\x17PostAppsSearchesRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12)\n\tapp_query\x18\x02 \x01(\x0b\x32\x16.clarifai.api.AppQuery\x12,\n\npagination\x18\x03 \x01(\x0b\x32\x18.clarifai.api.Pagination\"`\n\x11SingleAppResponse\x12+\n\x06status\x18\x01 \x01(\x0b\x32\x1b.clarifai.api.status.Status\x12\x1e\n\x03\x61pp\x18\x02 \x01(\x0b\x32\x11.clarifai.api.App\"f\n\x10MultiAppResponse\x12+\n\x06status\x18\x01 \x01(\x0b\x32\x1b.clarifai.api.status.Status\x12%\n\x04\x61pps\x18\x02 \x03(\x0b\x32\x11.clarifai.api.AppB\x04\x80\xb5\x18\x01\"k\n\x18ListCollaboratorsRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x1e\n\x16list_all_collaborators\x18\x02 \x01(\x08\"~\n\x18PostCollaboratorsRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x31\n\rcollaborators\x18\x02 \x03(\x0b\x32\x1a.clarifai.api.Collaborator\"\x8f\x01\n\x19PatchCollaboratorsRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x31\n\rcollaborators\x18\x02 \x03(\x0b\x32\x1a.clarifai.api.Collaborator\x12\x0e\n\x06\x61\x63tion\x18\x03 \x01(\t\"|\n\x1a\x44\x65leteCollaboratorsRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x18\n\x10\x63ollaborator_ids\x18\x02 \x03(\t\x12\x13\n\x0buser_emails\x18\x03 \x03(\t\"\x82\x01\n\x1aMultiCollaboratorsResponse\x12+\n\x06status\x18\x01 \x01(\x0b\x32\x1b.clarifai.api.status.Status\x12\x37\n\rcollaborators\x18\x02 \x03(\x0b\x32\x1a.clarifai.api.CollaboratorB\x04\x80\xb5\x18\x01\"l\n\x19ListCollaborationsRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x0c\n\x04page\x18\x02 \x01(\r\x12\x10\n\x08per_page\x18\x03 \x01(\r\"\x85\x01\n\x1bMultiCollaborationsResponse\x12+\n\x06status\x18\x01 \x01(\x0b\x32\x1b.clarifai.api.status.Status\x12\x39\n\x0e\x63ollaborations\x18\x02 \x03(\x0b\x32\x1b.clarifai.api.CollaborationB\x04\x80\xb5\x18\x01\".\n\x14GetStatusCodeRequest\x12\x16\n\x0estatus_code_id\x18\x01 \x01(\t\"\x18\n\x16ListStatusCodesRequest\"G\n\x18SingleStatusCodeResponse\x12+\n\x06status\x18\x01 \x01(\x0b\x32\x1b.clarifai.api.status.Status\"u\n\x17MultiStatusCodeResponse\x12+\n\x06status\x18\x01 \x01(\x0b\x32\x1b.clarifai.api.status.Status\x12-\n\x08statuses\x18\x02 \x03(\x0b\x32\x1b.clarifai.api.status.Status\"X\n\x11GetConceptRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x12\n\nconcept_id\x18\x02 \x01(\t\"f\n\x13ListConceptsRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x0c\n\x04page\x18\x02 \x01(\r\x12\x10\n\x08per_page\x18\x03 \x01(\r\"\xaf\x01\n\x1bPostConceptsSearchesRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x31\n\rconcept_query\x18\x02 \x01(\x0b\x32\x1a.clarifai.api.ConceptQuery\x12,\n\npagination\x18\x03 \x01(\x0b\x32\x18.clarifai.api.Pagination\"o\n\x13PostConceptsRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\'\n\x08\x63oncepts\x18\x02 \x03(\x0b\x32\x15.clarifai.api.Concept\"\x80\x01\n\x14PatchConceptsRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\'\n\x08\x63oncepts\x18\x02 \x03(\x0b\x32\x15.clarifai.api.Concept\x12\x0e\n\x06\x61\x63tion\x18\x03 \x01(\t\"j\n\x17GetConceptCountsRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x0c\n\x04page\x18\x02 \x01(\r\x12\x10\n\x08per_page\x18\x03 \x01(\r\"l\n\x15SingleConceptResponse\x12+\n\x06status\x18\x01 \x01(\x0b\x32\x1b.clarifai.api.status.Status\x12&\n\x07\x63oncept\x18\x02 \x01(\x0b\x32\x15.clarifai.api.Concept\"r\n\x14MultiConceptResponse\x12+\n\x06status\x18\x01 \x01(\x0b\x32\x1b.clarifai.api.status.Status\x12-\n\x08\x63oncepts\x18\x02 \x03(\x0b\x32\x15.clarifai.api.ConceptB\x04\x80\xb5\x18\x01\"\x82\x01\n\x19MultiConceptCountResponse\x12+\n\x06status\x18\x01 \x01(\x0b\x32\x1b.clarifai.api.status.Status\x12\x38\n\x0e\x63oncept_counts\x18\x02 \x03(\x0b\x32\x1a.clarifai.api.ConceptCountB\x04\x80\xb5\x18\x01\"\xb1\x01\n\x1bListConceptRelationsRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x12\n\nconcept_id\x18\x02 \x01(\t\x12\x11\n\tpredicate\x18\x03 \x01(\t\x12\x1a\n\x12knowledge_graph_id\x18\x04 \x01(\t\x12\x0c\n\x04page\x18\x05 \x01(\r\x12\x10\n\x08per_page\x18\x06 \x01(\r\"\x9c\x01\n\x1bPostConceptRelationsRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x12\n\nconcept_id\x18\x02 \x01(\t\x12\x38\n\x11\x63oncept_relations\x18\x03 \x03(\x0b\x32\x1d.clarifai.api.ConceptRelation\"q\n\x1d\x44\x65leteConceptRelationsRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x12\n\nconcept_id\x18\x02 \x01(\t\x12\x0b\n\x03ids\x18\x03 \x03(\t\"M\n\x1aListKnowledgeGraphsRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\"\x85\x01\n\x1aPostKnowledgeGraphsRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x36\n\x10knowledge_graphs\x18\x02 \x03(\x0b\x32\x1c.clarifai.api.KnowledgeGraph\"\x8f\x01\n\x1dPostConceptMappingJobsRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12=\n\x14\x63oncept_mapping_jobs\x18\x02 \x03(\x0b\x32\x1f.clarifai.api.ConceptMappingJob\"\x8b\x01\n\x1cMultiConceptRelationResponse\x12+\n\x06status\x18\x01 \x01(\x0b\x32\x1b.clarifai.api.status.Status\x12>\n\x11\x63oncept_relations\x18\x02 \x03(\x0b\x32\x1d.clarifai.api.ConceptRelationB\x04\x80\xb5\x18\x01\"\x88\x01\n\x1bMultiKnowledgeGraphResponse\x12+\n\x06status\x18\x01 \x01(\x0b\x32\x1b.clarifai.api.status.Status\x12<\n\x10knowledge_graphs\x18\x02 \x03(\x0b\x32\x1c.clarifai.api.KnowledgeGraphB\x04\x80\xb5\x18\x01\"Z\n\x1eMultiConceptMappingJobResponse\x12+\n\x06status\x18\x01 \x01(\x0b\x32\x1b.clarifai.api.status.Status\x12\x0b\n\x03ids\x18\x02 \x03(\t\"r\n\x19GetConceptLanguageRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x12\n\nconcept_id\x18\x02 \x01(\t\x12\x10\n\x08language\x18\x03 \x01(\t\"\x82\x01\n\x1bListConceptLanguagesRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x12\n\nconcept_id\x18\x02 \x01(\t\x12\x0c\n\x04page\x18\x03 \x01(\r\x12\x10\n\x08per_page\x18\x04 \x01(\r\"\xad\x01\n\x1cPatchConceptLanguagesRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x12\n\nconcept_id\x18\x02 \x01(\t\x12\x38\n\x11\x63oncept_languages\x18\x03 \x03(\x0b\x32\x1d.clarifai.api.ConceptLanguage\x12\x0e\n\x06\x61\x63tion\x18\x04 \x01(\t\"\x9c\x01\n\x1bPostConceptLanguagesRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x12\n\nconcept_id\x18\x02 \x01(\t\x12\x38\n\x11\x63oncept_languages\x18\x03 \x03(\x0b\x32\x1d.clarifai.api.ConceptLanguage\"\x85\x01\n\x1dSingleConceptLanguageResponse\x12+\n\x06status\x18\x01 \x01(\x0b\x32\x1b.clarifai.api.status.Status\x12\x37\n\x10\x63oncept_language\x18\x02 \x01(\x0b\x32\x1d.clarifai.api.ConceptLanguage\"\x8b\x01\n\x1cMultiConceptLanguageResponse\x12+\n\x06status\x18\x01 \x01(\x0b\x32\x1b.clarifai.api.status.Status\x12>\n\x11\x63oncept_languages\x18\x02 \x03(\x0b\x32\x1d.clarifai.api.ConceptLanguageB\x04\x80\xb5\x18\x01\"T\n\x0fGetInputRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x10\n\x08input_id\x18\x02 \x01(\t\"l\n\x16GetInputSamplesRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x0f\n\x07task_id\x18\x02 \x01(\t\x12\x10\n\x08user_ids\x18\x03 \x03(\t\"\x9e\x01\n\x11ListInputsRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x0c\n\x04page\x18\x02 \x01(\r\x12\x10\n\x08per_page\x18\x03 \x01(\r\x12+\n\x06status\x18\x05 \x01(\x0b\x32\x1b.clarifai.api.status.Status\x12\x0b\n\x03ids\x18\x04 \x03(\t\"}\n\x13StreamInputsRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x10\n\x08per_page\x18\x02 \x01(\r\x12\x0f\n\x07last_id\x18\x03 \x01(\t\x12\x12\n\ndescending\x18\x04 \x01(\x08\"i\n\x11PostInputsRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12#\n\x06inputs\x18\x02 \x03(\x0b\x32\x13.clarifai.api.Input\"z\n\x12PatchInputsRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12#\n\x06inputs\x18\x02 \x03(\x0b\x32\x13.clarifai.api.Input\x12\x0e\n\x06\x61\x63tion\x18\x03 \x01(\t\"W\n\x12\x44\x65leteInputRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x10\n\x08input_id\x18\x02 \x01(\t\"Y\n\x13\x44\x65leteInputsRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x0b\n\x03ids\x18\x02 \x03(\tJ\x04\x08\x03\x10\x04\"f\n\x13SingleInputResponse\x12+\n\x06status\x18\x01 \x01(\x0b\x32\x1b.clarifai.api.status.Status\x12\"\n\x05input\x18\x02 \x01(\x0b\x32\x13.clarifai.api.Input\"l\n\x12MultiInputResponse\x12+\n\x06status\x18\x01 \x01(\x0b\x32\x1b.clarifai.api.status.Status\x12)\n\x06inputs\x18\x02 \x03(\x0b\x32\x13.clarifai.api.InputB\x04\x80\xb5\x18\x01\"r\n\x1cMultiInputAnnotationResponse\x12+\n\x06status\x18\x01 \x01(\x0b\x32\x1b.clarifai.api.status.Status\x12%\n\x04hits\x18\x03 \x03(\x0b\x32\x11.clarifai.api.HitB\x04\x80\xb5\x18\x01\"q\n\x18SingleInputCountResponse\x12+\n\x06status\x18\x01 \x01(\x0b\x32\x1b.clarifai.api.status.Status\x12(\n\x06\x63ounts\x18\x02 \x01(\x0b\x32\x18.clarifai.api.InputCount\"G\n\x14GetInputCountRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\"\xb9\x01\n\x17PostModelOutputsRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x10\n\x08model_id\x18\x02 \x01(\t\x12\x12\n\nversion_id\x18\x03 \x01(\t\x12#\n\x06inputs\x18\x04 \x03(\x0b\x32\x13.clarifai.api.Input\x12\"\n\x05model\x18\x05 \x01(\x0b\x32\x13.clarifai.api.Model\"\x8f\x01\n\x16ListModelInputsRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x10\n\x08model_id\x18\x02 \x01(\t\x12\x12\n\nversion_id\x18\x03 \x01(\t\x12\x0c\n\x04page\x18\x04 \x01(\r\x12\x10\n\x08per_page\x18\x05 \x01(\r\"P\n\rGetKeyRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x0e\n\x06key_id\x18\x02 \x01(\t\"b\n\x0fListKeysRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x0c\n\x04page\x18\x02 \x01(\r\x12\x10\n\x08per_page\x18\x03 \x01(\r\"e\n\x12ListAppKeysRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x0c\n\x04page\x18\x02 \x01(\r\x12\x10\n\x08per_page\x18\x03 \x01(\r\"c\n\x0fPostKeysRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x1f\n\x04keys\x18\x02 \x03(\x0b\x32\x11.clarifai.api.Key\"S\n\x10\x44\x65leteKeyRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x0e\n\x06key_id\x18\x02 \x01(\t\"t\n\x10PatchKeysRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x1f\n\x04keys\x18\x02 \x03(\x0b\x32\x11.clarifai.api.Key\x12\x0e\n\x06\x61\x63tion\x18\x03 \x01(\t\"`\n\x11SingleKeyResponse\x12+\n\x06status\x18\x01 \x01(\x0b\x32\x1b.clarifai.api.status.Status\x12\x1e\n\x03key\x18\x02 \x01(\x0b\x32\x11.clarifai.api.Key\"f\n\x10MultiKeyResponse\x12+\n\x06status\x18\x01 \x01(\x0b\x32\x1b.clarifai.api.status.Status\x12%\n\x04keys\x18\x02 \x03(\x0b\x32\x11.clarifai.api.KeyB\x04\x80\xb5\x18\x01\"\x92\x01\n\x0fGetModelRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x10\n\x08model_id\x18\x02 \x01(\t\x12\x12\n\nversion_id\x18\x03 \x01(\t\x12\x10\n\x08language\x18\x04 \x01(\t\x12\x16\n\x0etrained_before\x18\x05 \x01(\x08\"\x85\x03\n\x11ListModelsRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x0c\n\x04page\x18\x02 \x01(\r\x12\x10\n\x08per_page\x18\x03 \x01(\r\x12\x16\n\x0esort_ascending\x18\n \x01(\x08\x12\x16\n\x0csort_by_name\x18\x0b \x01(\x08H\x00\x12\x1c\n\x12sort_by_num_inputs\x18\x0c \x01(\x08H\x00\x12\x1d\n\x13sort_by_modified_at\x18\r \x01(\x08H\x00\x12\r\n\x05query\x18\x0e \x01(\t\x12\x10\n\x04name\x18\x05 \x01(\tB\x02\x18\x01\x12\x15\n\rmodel_type_id\x18\x06 \x01(\t\x12\x14\n\x0ctrained_only\x18\x07 \x01(\x08\x12\x14\n\x0cinput_fields\x18\x08 \x03(\t\x12\x15\n\routput_fields\x18\t \x03(\t\x12\x0f\n\x07license\x18\x0f \x01(\t\x12\x15\n\rfeatured_only\x18\x10 \x01(\x08\x42\t\n\x07sort_byJ\x04\x08\x04\x10\x05\"\x91\x01\n\x11PostModelsRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12&\n\x05model\x18\x02 \x01(\x0b\x32\x13.clarifai.api.ModelB\x02\x18\x01\x12#\n\x06models\x18\x03 \x03(\x0b\x32\x13.clarifai.api.Model\"z\n\x12PatchModelsRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12#\n\x06models\x18\x02 \x03(\x0b\x32\x13.clarifai.api.Model\x12\x0e\n\x06\x61\x63tion\x18\x03 \x01(\t\"W\n\x12\x44\x65leteModelRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x10\n\x08model_id\x18\x02 \x01(\t\"g\n\x13\x44\x65leteModelsRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x0b\n\x03ids\x18\x02 \x03(\t\x12\x12\n\ndelete_all\x18\x03 \x01(\x08\"\xa9\x01\n\x19PostModelsSearchesRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12-\n\x0bmodel_query\x18\x02 \x01(\x0b\x32\x18.clarifai.api.ModelQuery\x12,\n\npagination\x18\x03 \x01(\x0b\x32\x18.clarifai.api.Pagination\"f\n\x13SingleModelResponse\x12+\n\x06status\x18\x01 \x01(\x0b\x32\x1b.clarifai.api.status.Status\x12\"\n\x05model\x18\x02 \x01(\x0b\x32\x13.clarifai.api.Model\"l\n\x12MultiModelResponse\x12+\n\x06status\x18\x01 \x01(\x0b\x32\x1b.clarifai.api.status.Status\x12)\n\x06models\x18\x02 \x03(\x0b\x32\x13.clarifai.api.ModelB\x04\x80\xb5\x18\x01\"\xa2\x01\n\x19PatchModelVersionsRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x10\n\x08model_id\x18\x02 \x01(\t\x12\x32\n\x0emodel_versions\x18\x03 \x03(\x0b\x32\x1a.clarifai.api.ModelVersion\x12\x0e\n\x06\x61\x63tion\x18\x04 \x01(\t\"o\n\x16GetModelVersionRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x10\n\x08model_id\x18\x02 \x01(\t\x12\x12\n\nversion_id\x18\x03 \x01(\t\"\x92\x01\n\x18ListModelVersionsRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x10\n\x08model_id\x18\x02 \x01(\t\x12\x0c\n\x04page\x18\x03 \x01(\r\x12\x10\n\x08per_page\x18\x04 \x01(\r\x12\x13\n\x0b\x63oncept_ids\x18\x05 \x03(\t\"r\n\x19\x44\x65leteModelVersionRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x10\n\x08model_id\x18\x03 \x01(\t\x12\x12\n\nversion_id\x18\x04 \x01(\t\"|\n\x1aSingleModelVersionResponse\x12+\n\x06status\x18\x01 \x01(\x0b\x32\x1b.clarifai.api.status.Status\x12\x31\n\rmodel_version\x18\x02 \x01(\x0b\x32\x1a.clarifai.api.ModelVersion\"\x82\x01\n\x19MultiModelVersionResponse\x12+\n\x06status\x18\x01 \x01(\x0b\x32\x1b.clarifai.api.status.Status\x12\x38\n\x0emodel_versions\x18\x02 \x03(\x0b\x32\x1a.clarifai.api.ModelVersionB\x04\x80\xb5\x18\x01\"\xc8\x02\n\x18PostModelVersionsRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x10\n\x08model_id\x18\x02 \x01(\t\x12\x32\n\x0emodel_versions\x18\x03 \x03(\x0b\x32\x1a.clarifai.api.ModelVersion\x12(\n\x06search\x18\x04 \x01(\x0b\x32\x14.clarifai.api.SearchB\x02\x18\x01\x12*\n\x0ctrain_search\x18\x05 \x01(\x0b\x32\x14.clarifai.api.Search\x12)\n\x0btest_search\x18\x06 \x01(\x0b\x32\x14.clarifai.api.Search\x12\x1f\n\x17\x65valuate_after_training\x18\x07 \x01(\x08\x12\x13\n\x0b\x64\x65scription\x18\x08 \x01(\t\"\xb8\x01\n\x1ePostModelVersionMetricsRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x10\n\x08model_id\x18\x02 \x01(\t\x12\x12\n\nversion_id\x18\x03 \x01(\t\x12\x14\n\x0cmax_examples\x18\x04 \x01(\r\x12)\n\x0btest_search\x18\x05 \x01(\x0b\x32\x14.clarifai.api.Search\"\xa1\x01\n\x1dGetModelVersionMetricsRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x10\n\x08model_id\x18\x02 \x01(\t\x12\x12\n\nversion_id\x18\x03 \x01(\t\x12)\n\x06\x66ields\x18\x04 \x01(\x0b\x32\x19.clarifai.api.FieldsValue\"]\n\x13GetModelTypeRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x15\n\rmodel_type_id\x18\x02 \x01(\t\"h\n\x15ListModelTypesRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x0c\n\x04page\x18\x02 \x01(\r\x12\x10\n\x08per_page\x18\x03 \x01(\r\"\x1f\n\x1dListOpenSourceLicensesRequest\"_\n\x1eListOpenSourceLicensesResponse\x12+\n\x06status\x18\x01 \x01(\x0b\x32\x1b.clarifai.api.status.Status\x12\x10\n\x08licenses\x18\x02 \x03(\t\"y\n\x17SingleModelTypeResponse\x12+\n\x06status\x18\x01 \x01(\x0b\x32\x1b.clarifai.api.status.Status\x12\x31\n\nmodel_type\x18\x02 \x01(\x0b\x32\x17.clarifai.api.ModelTypeB\x04\x80\xb5\x18\x01\"y\n\x16MultiModelTypeResponse\x12+\n\x06status\x18\x01 \x01(\x0b\x32\x1b.clarifai.api.status.Status\x12\x32\n\x0bmodel_types\x18\x02 \x03(\x0b\x32\x17.clarifai.api.ModelTypeB\x04\x80\xb5\x18\x01\"\x95\x01\n\"GetModelVersionInputExampleRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x10\n\x08model_id\x18\x02 \x01(\t\x12\x18\n\x10model_version_id\x18\x03 \x01(\t\x12\x12\n\nexample_id\x18\x04 \x01(\t\"\xa3\x01\n$ListModelVersionInputExamplesRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x10\n\x08model_id\x18\x02 \x01(\t\x12\x18\n\x10model_version_id\x18\x03 \x01(\t\x12\x0c\n\x04page\x18\x04 \x01(\r\x12\x10\n\x08per_page\x18\x05 \x01(\r\"\xa2\x01\n&SingleModelVersionInputExampleResponse\x12+\n\x06status\x18\x01 \x01(\x0b\x32\x1b.clarifai.api.status.Status\x12K\n\x1bmodel_version_input_example\x18\x02 \x01(\x0b\x32&.clarifai.api.ModelVersionInputExample\"\xa2\x01\n%MultiModelVersionInputExampleResponse\x12+\n\x06status\x18\x01 \x01(\x0b\x32\x1b.clarifai.api.status.Status\x12L\n\x1cmodel_version_input_examples\x18\x02 \x03(\x0b\x32&.clarifai.api.ModelVersionInputExample\"\x7f\n\x1aListModelReferencesRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x10\n\x08model_id\x18\x02 \x01(\t\x12\x0c\n\x04page\x18\x03 \x01(\r\x12\x10\n\x08per_page\x18\x04 \x01(\r\"\x82\x01\n\x1bMultiModelReferenceResponse\x12+\n\x06status\x18\x01 \x01(\x0b\x32\x1b.clarifai.api.status.Status\x12\x36\n\x10model_references\x18\x02 \x03(\x0b\x32\x1c.clarifai.api.ModelReference\"o\n\x13MultiOutputResponse\x12+\n\x06status\x18\x01 \x01(\x0b\x32\x1b.clarifai.api.status.Status\x12+\n\x07outputs\x18\x02 \x03(\x0b\x32\x14.clarifai.api.OutputB\x04\x80\xb5\x18\x01\"V\n\x11ListScopesRequest\x12\x10\n\x08key_type\x18\x01 \x01(\t\x12/\n\x0buser_app_id\x18\x02 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\"B\n\x0fMyScopesRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\"\xa5\x01\n\x16MultiScopeDepsResponse\x12+\n\x06status\x18\x01 \x01(\x0b\x32\x1b.clarifai.api.status.Status\x12+\n\nscope_deps\x18\x02 \x03(\x0b\x32\x17.clarifai.api.ScopeDeps\x12\x31\n\rendpoint_deps\x18\x03 \x03(\x0b\x32\x1a.clarifai.api.EndpointDeps\"\x84\x01\n\x12MultiScopeResponse\x12+\n\x06status\x18\x01 \x01(\x0b\x32\x1b.clarifai.api.status.Status\x12\x0e\n\x06scopes\x18\x02 \x03(\t\x12\x1e\n\x03\x61pp\x18\x03 \x01(\x0b\x32\x11.clarifai.api.App\x12\x11\n\tendpoints\x18\x04 \x03(\t\"O\n\x10GetSearchRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\n\n\x02id\x18\x02 \x01(\t\"f\n\x13ListSearchesRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x0c\n\x04page\x18\x02 \x01(\r\x12\x10\n\x08per_page\x18\x03 \x01(\r\"\xc4\x01\n\x13PostSearchesRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12&\n\x05query\x18\x02 \x01(\x0b\x32\x13.clarifai.api.QueryB\x02\x18\x01\x12&\n\x08searches\x18\x03 \x03(\x0b\x32\x14.clarifai.api.Search\x12,\n\npagination\x18\x04 \x01(\x0b\x32\x18.clarifai.api.Pagination\"\x84\x01\n\x17PostSearchesByIDRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\n\n\x02id\x18\x02 \x01(\t\x12,\n\npagination\x18\x03 \x01(\x0b\x32\x18.clarifai.api.Pagination\"R\n\x13\x44\x65leteSearchRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\n\n\x02id\x18\x02 \x01(\t\"\xa7\x01\n\x1ePostAnnotationsSearchesRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12&\n\x08searches\x18\x02 \x03(\x0b\x32\x14.clarifai.api.Search\x12,\n\npagination\x18\x03 \x01(\x0b\x32\x18.clarifai.api.Pagination\"c\n$DeleteAnnotationSearchMetricsRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\n\n\x02id\x18\x02 \x01(\t\"\xa2\x01\n\x19PostInputsSearchesRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12&\n\x08searches\x18\x02 \x03(\x0b\x32\x14.clarifai.api.Search\x12,\n\npagination\x18\x03 \x01(\x0b\x32\x18.clarifai.api.Pagination\"i\n\x14SingleSearchResponse\x12+\n\x06status\x18\x01 \x01(\x0b\x32\x1b.clarifai.api.status.Status\x12$\n\x06search\x18\x05 \x01(\x0b\x32\x14.clarifai.api.Search\"\xc1\x01\n\x13MultiSearchResponse\x12+\n\x06status\x18\x01 \x01(\x0b\x32\x1b.clarifai.api.status.Status\x12\n\n\x02id\x18\x02 \x01(\t\x12%\n\x04hits\x18\x03 \x03(\x0b\x32\x11.clarifai.api.HitB\x04\x80\xb5\x18\x01\x12\"\n\x05query\x18\x04 \x01(\x0b\x32\x13.clarifai.api.Query\x12&\n\x08searches\x18\x05 \x03(\x0b\x32\x14.clarifai.api.Search\"\x94\x02\n\"PostAnnotationSearchMetricsRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\n\n\x02id\x18\x02 \x01(\t\x12*\n\x0cground_truth\x18\x03 \x01(\x0b\x32\x14.clarifai.api.Search\x12,\n\x0esearch_to_eval\x18\x04 \x01(\x0b\x32\x14.clarifai.api.Search\x12 \n\x04\x64\x61ta\x18\x05 \x01(\x0b\x32\x12.clarifai.api.Data\x12\x35\n\x0f\x65valuation_type\x18\x06 \x01(\x0e\x32\x1c.clarifai.api.EvaluationType\"`\n!GetAnnotationSearchMetricsRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\n\n\x02id\x18\x02 \x01(\t\"U\n\"ListAnnotationSearchMetricsRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\"\x9d\x01\n$MultiAnnotationSearchMetricsResponse\x12+\n\x06status\x18\x01 \x01(\x0b\x32\x1b.clarifai.api.status.Status\x12H\n\x19\x61nnotation_search_metrics\x18\x02 \x03(\x0b\x32%.clarifai.api.AnnotationSearchMetrics\"x\n\x1bPostValidatePasswordRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12(\n\x08password\x18\x02 \x01(\x0b\x32\x16.clarifai.api.Password\"\x8e\x01\n SinglePasswordValidationResponse\x12+\n\x06status\x18\x01 \x01(\x0b\x32\x1b.clarifai.api.status.Status\x12=\n\x13password_violations\x18\x02 \x01(\x0b\x32 .clarifai.api.PasswordViolations\"|\n\x12GetWorkflowRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x13\n\x0bworkflow_id\x18\x02 \x01(\t\x12 \n\x18\x66\x61vor_clarifai_workflows\x18\x03 \x01(\x08\"\xf5\x01\n\x14ListWorkflowsRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x0c\n\x04page\x18\x02 \x01(\r\x12\x10\n\x08per_page\x18\x03 \x01(\r\x12\x16\n\x0esort_ascending\x18\x05 \x01(\x08\x12\x14\n\nsort_by_id\x18\x06 \x01(\x08H\x00\x12\x1d\n\x13sort_by_modified_at\x18\x07 \x01(\x08H\x00\x12\r\n\x05query\x18\x08 \x01(\t\x12\x0e\n\x02id\x18\x04 \x01(\tB\x02\x18\x01\x12\x15\n\rfeatured_only\x18\t \x01(\x08\x42\t\n\x07sort_by\"r\n\x14PostWorkflowsRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12)\n\tworkflows\x18\x02 \x03(\x0b\x32\x16.clarifai.api.Workflow\"\x83\x01\n\x15PatchWorkflowsRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12)\n\tworkflows\x18\x02 \x03(\x0b\x32\x16.clarifai.api.Workflow\x12\x0e\n\x06\x61\x63tion\x18\x03 \x01(\t\"]\n\x15\x44\x65leteWorkflowRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x13\n\x0bworkflow_id\x18\x02 \x01(\t\"j\n\x16\x44\x65leteWorkflowsRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x0b\n\x03ids\x18\x02 \x03(\t\x12\x12\n\ndelete_all\x18\x03 \x01(\x08\"o\n\x16SingleWorkflowResponse\x12+\n\x06status\x18\x01 \x01(\x0b\x32\x1b.clarifai.api.status.Status\x12(\n\x08workflow\x18\x02 \x01(\x0b\x32\x16.clarifai.api.Workflow\"u\n\x15MultiWorkflowResponse\x12+\n\x06status\x18\x01 \x01(\x0b\x32\x1b.clarifai.api.status.Status\x12/\n\tworkflows\x18\x02 \x03(\x0b\x32\x16.clarifai.api.WorkflowB\x04\x80\xb5\x18\x01\"\xa5\x02\n\x1aPostWorkflowResultsRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x13\n\x0bworkflow_id\x18\x02 \x01(\t\x12\x12\n\nversion_id\x18\x07 \x01(\t\x12#\n\x06inputs\x18\x03 \x03(\x0b\x32\x13.clarifai.api.Input\x12\x31\n\routput_config\x18\x04 \x01(\x0b\x32\x1a.clarifai.api.OutputConfig\x12 \n\x18\x66\x61vor_clarifai_workflows\x18\x05 \x01(\x08\x12\x33\n\x0eworkflow_state\x18\x06 \x01(\x0b\x32\x1b.clarifai.api.WorkflowState\"\xd8\x01\n\x1bPostWorkflowResultsResponse\x12+\n\x06status\x18\x01 \x01(\x0b\x32\x1b.clarifai.api.status.Status\x12(\n\x08workflow\x18\x02 \x01(\x0b\x32\x16.clarifai.api.Workflow\x12-\n\x07results\x18\x03 \x03(\x0b\x32\x1c.clarifai.api.WorkflowResult\x12\x33\n\x0eworkflow_state\x18\x04 \x01(\x0b\x32\x1b.clarifai.api.WorkflowState\"\x91\x02\n$PostWorkflowResultsSimilarityRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x13\n\x0bworkflow_id\x18\x02 \x01(\t\x12\x12\n\nversion_id\x18\x07 \x01(\t\x12\x18\n\x10model_version_id\x18\x03 \x01(\t\x12)\n\x0cprobe_inputs\x18\x04 \x03(\x0b\x32\x13.clarifai.api.Input\x12(\n\x0bpool_inputs\x18\x05 \x03(\x0b\x32\x13.clarifai.api.Input\x12 \n\x18\x66\x61vor_clarifai_workflows\x18\x06 \x01(\x08\"\x8e\x01\n%PostWorkflowResultsSimilarityResponse\x12+\n\x06status\x18\x01 \x01(\x0b\x32\x1b.clarifai.api.status.Status\x12\x38\n\x07results\x18\x02 \x03(\x0b\x32\'.clarifai.api.WorkflowResultsSimilarity\"\x83\x01\n\x1bListWorkflowVersionsRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x13\n\x0bworkflow_id\x18\x02 \x01(\t\x12\x0c\n\x04page\x18\x03 \x01(\r\x12\x10\n\x08per_page\x18\x04 \x01(\r\"~\n\x19GetWorkflowVersionRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x13\n\x0bworkflow_id\x18\x02 \x01(\t\x12\x1b\n\x13workflow_version_id\x18\x03 \x01(\t\"\x83\x01\n\x1d\x44\x65leteWorkflowVersionsRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x13\n\x0bworkflow_id\x18\x02 \x01(\t\x12\x1c\n\x14workflow_version_ids\x18\x03 \x03(\t\"\xae\x01\n\x1cPatchWorkflowVersionsRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x13\n\x0bworkflow_id\x18\x02 \x01(\t\x12\x38\n\x11workflow_versions\x18\x03 \x03(\x0b\x32\x1d.clarifai.api.WorkflowVersion\x12\x0e\n\x06\x61\x63tion\x18\x04 \x01(\t\"\x8b\x01\n\x1cMultiWorkflowVersionResponse\x12+\n\x06status\x18\x01 \x01(\x0b\x32\x1b.clarifai.api.status.Status\x12>\n\x11workflow_versions\x18\x02 \x03(\x0b\x32\x1d.clarifai.api.WorkflowVersionB\x04\x80\xb5\x18\x01\"\x85\x01\n\x1dSingleWorkflowVersionResponse\x12+\n\x06status\x18\x01 \x01(\x0b\x32\x1b.clarifai.api.status.Status\x12\x37\n\x10workflow_version\x18\x02 \x01(\x0b\x32\x1d.clarifai.api.WorkflowVersion\"\x85\x01\n\x1aPostAppDuplicationsRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x36\n\x10\x61pp_duplications\x18\x02 \x03(\x0b\x32\x1c.clarifai.api.AppDuplication\"g\n\x18GetAppDuplicationRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x1a\n\x12\x61pp_duplication_id\x18\x02 \x01(\t\"m\n\x1aListAppDuplicationsRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x0c\n\x04page\x18\x02 \x01(\r\x12\x10\n\x08per_page\x18\x03 \x01(\r\"\x83\x01\n\x1cMultiAppDuplicationsResponse\x12+\n\x06status\x18\x01 \x01(\x0b\x32\x1b.clarifai.api.status.Status\x12\x36\n\x10\x61pp_duplications\x18\x02 \x03(\x0b\x32\x1c.clarifai.api.AppDuplication\"\x82\x01\n\x1cSingleAppDuplicationResponse\x12+\n\x06status\x18\x01 \x01(\x0b\x32\x1b.clarifai.api.status.Status\x12\x35\n\x0f\x61pp_duplication\x18\x02 \x01(\x0b\x32\x1c.clarifai.api.AppDuplication\"f\n\x10PostTasksRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12!\n\x05tasks\x18\x02 \x03(\x0b\x32\x12.clarifai.api.Task\"R\n\x0eGetTaskRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x0f\n\x07task_id\x18\x02 \x01(\t\"\xba\x01\n\x10ListTasksRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x0c\n\x04page\x18\x02 \x01(\r\x12\x10\n\x08per_page\x18\x03 \x01(\r\x12\x17\n\x0fworker_user_ids\x18\x04 \x03(\t\x12\x17\n\x0freview_user_ids\x18\x05 \x03(\t\x12#\n\x1bincluding_label_order_tasks\x18\x06 \x01(\x08\"w\n\x11PatchTasksRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12!\n\x05tasks\x18\x02 \x03(\x0b\x32\x12.clarifai.api.Task\x12\x0e\n\x06\x61\x63tion\x18\x03 \x01(\t\"R\n\x12\x44\x65leteTasksRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x0b\n\x03ids\x18\x02 \x03(\t\"i\n\x11MultiTaskResponse\x12+\n\x06status\x18\x01 \x01(\x0b\x32\x1b.clarifai.api.status.Status\x12\'\n\x05tasks\x18\x02 \x03(\x0b\x32\x12.clarifai.api.TaskB\x04\x80\xb5\x18\x01\"c\n\x12SingleTaskResponse\x12+\n\x06status\x18\x01 \x01(\x0b\x32\x1b.clarifai.api.status.Status\x12 \n\x04task\x18\x02 \x01(\x0b\x32\x12.clarifai.api.Task\"i\n\x13GetTaskCountRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x0f\n\x07task_id\x18\x02 \x01(\t\x12\x10\n\x08user_ids\x18\x03 \x03(\t\"\x9d\x01\n\x17SingleTaskCountResponse\x12+\n\x06status\x18\x01 \x01(\x0b\x32\x1b.clarifai.api.status.Status\x12\x0e\n\x06\x61pp_id\x18\x02 \x01(\t\x12\x0f\n\x07task_id\x18\x03 \x01(\t\x12\x34\n\x06\x63ounts\x18\x04 \x03(\x0b\x32$.clarifai.api.TaskStatusCountPerUser\"u\n\x15PostCollectorsRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12+\n\ncollectors\x18\x02 \x03(\x0b\x32\x17.clarifai.api.Collector\"\x86\x01\n\x16PatchCollectorsRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12+\n\ncollectors\x18\x02 \x03(\x0b\x32\x17.clarifai.api.Collector\x12\x0e\n\x06\x61\x63tion\x18\x03 \x01(\t\"k\n\x17\x44\x65leteCollectorsRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x0b\n\x03ids\x18\x02 \x03(\t\x12\x12\n\ndelete_all\x18\x03 \x01(\x08\"\\\n\x13GetCollectorRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x14\n\x0c\x63ollector_id\x18\x02 \x01(\t\"h\n\x15ListCollectorsRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x0c\n\x04page\x18\x02 \x01(\r\x12\x10\n\x08per_page\x18\x03 \x01(\r\"r\n\x16MultiCollectorResponse\x12+\n\x06status\x18\x01 \x01(\x0b\x32\x1b.clarifai.api.status.Status\x12+\n\ncollectors\x18\x02 \x03(\x0b\x32\x17.clarifai.api.Collector\"r\n\x17SingleCollectorResponse\x12+\n\x06status\x18\x01 \x01(\x0b\x32\x1b.clarifai.api.status.Status\x12*\n\tcollector\x18\x02 \x01(\x0b\x32\x17.clarifai.api.Collector\"v\n\x15PostStatValuesRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12,\n\x0bstat_values\x18\x02 \x03(\x0b\x32\x17.clarifai.api.StatValue\"y\n\x16MultiStatValueResponse\x12+\n\x06status\x18\x01 \x01(\x0b\x32\x1b.clarifai.api.status.Status\x12\x32\n\x0bstat_values\x18\x02 \x03(\x0b\x32\x17.clarifai.api.StatValueB\x04\x80\xb5\x18\x01\"\x9e\x01\n\x1ePostStatValuesAggregateRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12K\n\x1cstat_value_aggregate_queries\x18\x02 \x03(\x0b\x32%.clarifai.api.StatValueAggregateQuery\"\x9c\x01\n\x1fMultiStatValueAggregateResponse\x12+\n\x06status\x18\x01 \x01(\x0b\x32\x1b.clarifai.api.status.Status\x12L\n\x1cstat_value_aggregate_results\x18\x02 \x03(\x0b\x32&.clarifai.api.StatValueAggregateResult\"w\n\x1ePostTrendingMetricsViewRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x11\n\tview_type\x18\x02 \x01(\t\x12\x11\n\tobject_id\x18\x03 \x01(\t\"\x85\x01\n\x1fListTrendingMetricsViewsRequest\x12/\n\x0buser_app_id\x18\x01 \x01(\x0b\x32\x1a.clarifai.api.UserAppIDSet\x12\x11\n\tview_type\x18\x02 \x01(\t\x12\x0c\n\x04page\x18\x03 \x01(\r\x12\x10\n\x08per_page\x18\x04 \x01(\r\"~\n MultiTrendingMetricsViewResponse\x12+\n\x06status\x18\x01 \x01(\x0b\x32\x1b.clarifai.api.status.Status\x12-\n\x07metrics\x18\x02 \x03(\x0b\x32\x1c.clarifai.api.TrendingMetric*p\n\x1cOrganizationInvitationStatus\x12\x0b\n\x07NOT_SET\x10\x00\x12\x0b\n\x07PENDING\x10\x01\x12\x0c\n\x08\x41\x43\x43\x45PTED\x10\x02\x12\r\n\tCANCELLED\x10\x03\x12\x0c\n\x08\x44\x45\x43LINED\x10\x04\x12\x0b\n\x07\x45XPIRED\x10\x05\x32\xe8\xd5\x01\n\x02V2\x12\xaa\x02\n\x14ListConceptRelations\x12).clarifai.api.ListConceptRelationsRequest\x1a*.clarifai.api.MultiConceptRelationResponse\"\xba\x01\x82\xd3\xe4\x93\x02\xab\x01\x12Y/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/concepts/{concept_id}/relationsZN\x12L/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/concepts/relations\x98\x9c\'\x05\x90\x9c\'\x0b\x12\xdf\x01\n\x14PostConceptRelations\x12).clarifai.api.PostConceptRelationsRequest\x1a*.clarifai.api.MultiConceptRelationResponse\"p\x82\xd3\xe4\x93\x02^\"Y/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/concepts/{concept_id}/relations:\x01*\x98\x9c\'\x05\x90\x9c\'\n\x90\x9c\'\x0b\x12\xde\x01\n\x16\x44\x65leteConceptRelations\x12+.clarifai.api.DeleteConceptRelationsRequest\x1a!.clarifai.api.status.BaseResponse\"t\x82\xd3\xe4\x93\x02^*Y/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/concepts/{concept_id}/relations:\x01*\x98\x9c\'\x05\x90\x9c\'\r\x90\x9c\'\n\x90\x9c\'\x0b\x12\xdc\x01\n\x10GetConceptCounts\x12%.clarifai.api.GetConceptCountsRequest\x1a\'.clarifai.api.MultiConceptCountResponse\"x\x82\xd3\xe4\x93\x02\x62\x12I/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/concepts/statusZ\x15\x12\x13/v2/concepts/status\x98\x9c\'\x02\x90\x9c\'&\x90\x9c\'\x0b\x90\x9c\'\x05\x12\xd0\x01\n\nGetConcept\x12\x1f.clarifai.api.GetConceptRequest\x1a#.clarifai.api.SingleConceptResponse\"|\x82\xd3\xe4\x93\x02n\x12O/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/concepts/{concept_id}Z\x1b\x12\x19/v2/concepts/{concept_id}\x98\x9c\'\x02\x90\x9c\'\x0b\x12\xb9\x01\n\x0cListConcepts\x12!.clarifai.api.ListConceptsRequest\x1a\".clarifai.api.MultiConceptResponse\"b\x82\xd3\xe4\x93\x02T\x12\x42/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/conceptsZ\x0e\x12\x0c/v2/concepts\x98\x9c\'\x02\x90\x9c\'\x0b\x12\xea\x01\n\x14PostConceptsSearches\x12).clarifai.api.PostConceptsSearchesRequest\x1a\".clarifai.api.MultiConceptResponse\"\x82\x01\x82\xd3\xe4\x93\x02l\"K/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/concepts/searches:\x01*Z\x1a\"\x15/v2/concepts/searches:\x01*\x98\x9c\'\x02\x90\x9c\'\x0b\x90\x9c\'\x0f\x90\x9c\'\x13\x12\xc3\x01\n\x0cPostConcepts\x12!.clarifai.api.PostConceptsRequest\x1a\".clarifai.api.MultiConceptResponse\"l\x82\xd3\xe4\x93\x02Z\"B/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/concepts:\x01*Z\x11\"\x0c/v2/concepts:\x01*\x98\x9c\'\x02\x90\x9c\'\n\x90\x9c\'\x0b\x12\xc5\x01\n\rPatchConcepts\x12\".clarifai.api.PatchConceptsRequest\x1a\".clarifai.api.MultiConceptResponse\"l\x82\xd3\xe4\x93\x02Z2B/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/concepts:\x01*Z\x11\x32\x0c/v2/concepts:\x01*\x98\x9c\'\x02\x90\x9c\'\n\x90\x9c\'\x0b\x12\x94\x02\n\x12GetConceptLanguage\x12\'.clarifai.api.GetConceptLanguageRequest\x1a+.clarifai.api.SingleConceptLanguageResponse\"\xa7\x01\x82\xd3\xe4\x93\x02\x98\x01\x12\x64/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/concepts/{concept_id}/languages/{language}Z0\x12./v2/concepts/{concept_id}/languages/{language}\x98\x9c\'\x02\x90\x9c\'\x0b\x12\x81\x02\n\x14ListConceptLanguages\x12).clarifai.api.ListConceptLanguagesRequest\x1a*.clarifai.api.MultiConceptLanguageResponse\"\x91\x01\x82\xd3\xe4\x93\x02\x82\x01\x12Y/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/concepts/{concept_id}/languagesZ%\x12#/v2/concepts/{concept_id}/languages\x98\x9c\'\x02\x90\x9c\'\x0b\x12\x8b\x02\n\x14PostConceptLanguages\x12).clarifai.api.PostConceptLanguagesRequest\x1a*.clarifai.api.MultiConceptLanguageResponse\"\x9b\x01\x82\xd3\xe4\x93\x02\x88\x01\"Y/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/concepts/{concept_id}/languages:\x01*Z(\"#/v2/concepts/{concept_id}/languages:\x01*\x98\x9c\'\x02\x90\x9c\'\n\x90\x9c\'\x0b\x12\x8d\x02\n\x15PatchConceptLanguages\x12*.clarifai.api.PatchConceptLanguagesRequest\x1a*.clarifai.api.MultiConceptLanguageResponse\"\x9b\x01\x82\xd3\xe4\x93\x02\x88\x01\x32Y/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/concepts/{concept_id}/languages:\x01*Z(2#/v2/concepts/{concept_id}/languages:\x01*\x98\x9c\'\x02\x90\x9c\'\n\x90\x9c\'\x0b\x12\xf1\x01\n\x13ListKnowledgeGraphs\x12(.clarifai.api.ListKnowledgeGraphsRequest\x1a).clarifai.api.MultiKnowledgeGraphResponse\"\x84\x01\x82\xd3\xe4\x93\x02v\x12S/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/concepts/knowledge_graphsZ\x1f\x12\x1d/v2/concepts/knowledge_graphs\x98\x9c\'\x02\x90\x9c\'\x0b\x12\xfb\x01\n\x13PostKnowledgeGraphs\x12(.clarifai.api.PostKnowledgeGraphsRequest\x1a).clarifai.api.MultiKnowledgeGraphResponse\"\x8e\x01\x82\xd3\xe4\x93\x02|\"S/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/concepts/knowledge_graphs:\x01*Z\"\"\x1d/v2/concepts/knowledge_graphs:\x01*\x98\x9c\'\x02\x90\x9c\'\n\x90\x9c\'\x0b\x12\xfe\x01\n\x16PostConceptMappingJobs\x12+.clarifai.api.PostConceptMappingJobsRequest\x1a,.clarifai.api.MultiConceptMappingJobResponse\"\x88\x01\x82\xd3\xe4\x93\x02v\"P/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/concepts/mappings/jobs:\x01*Z\x1f\"\x1a/v2/concepts/mappings/jobs:\x01*\x98\x9c\'\x02\x90\x9c\'\n\x90\x9c\'\x0b\x12\x93\x02\n\rGetAnnotation\x12\".clarifai.api.GetAnnotationRequest\x1a&.clarifai.api.SingleAnnotationResponse\"\xb5\x01\x82\xd3\xe4\x93\x02\x9e\x01\x12g/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/inputs/{input_id}/annotations/{annotation_id}Z3\x12\x31/v2/inputs/{input_id}/annotations/{annotation_id}\x98\x9c\'\x02\x90\x9c\'&\x90\x9c\'\x0b\x90\x9c\'\x0f\x12\xd0\x01\n\x0fListAnnotations\x12$.clarifai.api.ListAnnotationsRequest\x1a%.clarifai.api.MultiAnnotationResponse\"p\x82\xd3\xe4\x93\x02Z\x12\x45/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/annotationsZ\x11\x12\x0f/v2/annotations\x98\x9c\'\x02\x90\x9c\'&\x90\x9c\'\x0b\x90\x9c\'\x0f\x12\xe3\x01\n\x0fPostAnnotations\x12$.clarifai.api.PostAnnotationsRequest\x1a%.clarifai.api.MultiAnnotationResponse\"\x82\x01\x82\xd3\xe4\x93\x02`\"E/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/annotations:\x01*Z\x14\"\x0f/v2/annotations:\x01*\x98\x9c\'\x02\x90\x9c\'\x05\x90\x9c\'%\x90\x9c\'&\x90\x9c\'\x0b\x90\x9c\'\x0f\x90\x9c\'\x13\x12\xe5\x01\n\x10PatchAnnotations\x12%.clarifai.api.PatchAnnotationsRequest\x1a%.clarifai.api.MultiAnnotationResponse\"\x82\x01\x82\xd3\xe4\x93\x02`2E/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/annotations:\x01*Z\x14\x32\x0f/v2/annotations:\x01*\x98\x9c\'\x02\x90\x9c\'\x05\x90\x9c\'%\x90\x9c\'&\x90\x9c\'\x0b\x90\x9c\'\x13\x90\x9c\'\x0f\x12\xf3\x01\n\x16PatchAnnotationsStatus\x12+.clarifai.api.PatchAnnotationsStatusRequest\x1a,.clarifai.api.PatchAnnotationsStatusResponse\"~\x82\xd3\xe4\x93\x02`2[/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/task/{task_id}/annotations/status:\x01*\x98\x9c\'\x02\x90\x9c\'%\x90\x9c\'&\x90\x9c\'\x0b\x90\x9c\'\x13\x90\x9c\'\x0f\x12\x9c\x02\n\x10\x44\x65leteAnnotation\x12%.clarifai.api.DeleteAnnotationRequest\x1a!.clarifai.api.status.BaseResponse\"\xbd\x01\x82\xd3\xe4\x93\x02\x9e\x01*g/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/inputs/{input_id}/annotations/{annotation_id}Z3*1/v2/inputs/{input_id}/annotations/{annotation_id}\x98\x9c\'\x02\x90\x9c\'%\x90\x9c\'(\x90\x9c\'&\x90\x9c\'\x13\x90\x9c\'\x0f\x12\xd0\x02\n\x11\x44\x65leteAnnotations\x12&.clarifai.api.DeleteAnnotationsRequest\x1a!.clarifai.api.status.BaseResponse\"\xef\x01\x82\xd3\xe4\x93\x02\xd0\x01*L/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/inputs/annotations:\x01*Z\x1b*\x16/v2/inputs/annotations:\x01*ZJ*E/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/annotations:\x01*Z\x14*\x0f/v2/annotations:\x01*\x98\x9c\'\x02\x90\x9c\'%\x90\x9c\'(\x90\x9c\'&\x90\x9c\'\x13\x90\x9c\'\x0f\x12\x81\x02\n\x17PostAnnotationsSearches\x12,.clarifai.api.PostAnnotationsSearchesRequest\x1a!.clarifai.api.MultiSearchResponse\"\x94\x01\x82\xd3\xe4\x93\x02r\"N/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/annotations/searches:\x01*Z\x1d\"\x18/v2/annotations/searches:\x01*\x98\x9c\'\x02\x90\x9c\'&\x90\x9c\'\x0b\x90\x9c\'\x05\x90\x9c\'\x0f\x90\x9c\'\x03\x90\x9c\'\x13\x12\xd1\x01\n\rGetInputCount\x12\".clarifai.api.GetInputCountRequest\x1a&.clarifai.api.SingleInputCountResponse\"t\x82\xd3\xe4\x93\x02^\x12G/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/inputs/statusZ\x13\x12\x11/v2/inputs/status\x98\x9c\'\x02\x90\x9c\'&\x90\x9c\'\x0b\x90\x9c\'\x05\x12\xc9\x01\n\x0cStreamInputs\x12!.clarifai.api.StreamInputsRequest\x1a .clarifai.api.MultiInputResponse\"t\x82\xd3\xe4\x93\x02^\x12G/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/inputs/streamZ\x13\x12\x11/v2/inputs/stream\x98\x9c\'\x02\x90\x9c\'&\x90\x9c\'\x0b\x90\x9c\'\x05\x12\xfd\x01\n\x0fGetInputSamples\x12$.clarifai.api.GetInputSamplesRequest\x1a*.clarifai.api.MultiInputAnnotationResponse\"\x97\x01\x82\xd3\xe4\x93\x02\x80\x01\x12X/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/tasks/{task_id}/inputs/samplesZ$\x12\"/v2/tasks/{task_id}/inputs/samples\x98\x9c\'\x02\x90\x9c\'&\x90\x9c\'\x0b\x90\x9c\'\x05\x12\xca\x01\n\x08GetInput\x12\x1d.clarifai.api.GetInputRequest\x1a!.clarifai.api.SingleInputResponse\"|\x82\xd3\xe4\x93\x02\x66\x12K/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/inputs/{input_id}Z\x17\x12\x15/v2/inputs/{input_id}\x98\x9c\'\x02\x90\x9c\'&\x90\x9c\'\x0b\x90\x9c\'\x05\x12\xb7\x01\n\nListInputs\x12\x1f.clarifai.api.ListInputsRequest\x1a .clarifai.api.MultiInputResponse\"f\x82\xd3\xe4\x93\x02P\x12@/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/inputsZ\x0c\x12\n/v2/inputs\x98\x9c\'\x02\x90\x9c\'&\x90\x9c\'\x0b\x90\x9c\'\x05\x12\xd2\x01\n\nPostInputs\x12\x1f.clarifai.api.PostInputsRequest\x1a .clarifai.api.MultiInputResponse\"\x80\x01\x82\xd3\xe4\x93\x02V\"@/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/inputs:\x01*Z\x0f\"\n/v2/inputs:\x01*\x98\x9c\'\x02\x90\x9c\'%\x90\x9c\'&\x90\x9c\'\x0b\x90\x9c\'\x04\x90\x9c\'\x05\x90\x9c\'\x0f\x90\x9c\'\x02\x90\x9c\'\x13\x12\xcb\x01\n\x0bPatchInputs\x12 .clarifai.api.PatchInputsRequest\x1a .clarifai.api.MultiInputResponse\"x\x82\xd3\xe4\x93\x02V2@/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/inputs:\x01*Z\x0f\x32\n/v2/inputs:\x01*\x98\x9c\'\x02\x90\x9c\'%\x90\x9c\'&\x90\x9c\'\x0b\x90\x9c\'\x04\x90\x9c\'\x05\x90\x9c\'\x0f\x12\xe5\x01\n\x0b\x44\x65leteInput\x12 .clarifai.api.DeleteInputRequest\x1a!.clarifai.api.status.BaseResponse\"\x90\x01\x82\xd3\xe4\x93\x02\x66*K/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/inputs/{input_id}Z\x17*\x15/v2/inputs/{input_id}\x98\x9c\'\x02\x90\x9c\'%\x90\x9c\'(\x90\x9c\'&\x90\x9c\'\x04\x90\x9c\'\x08\x90\x9c\'\x05\x90\x9c\'\x0f\x90\x9c\'\x13\x12\xd7\x01\n\x0c\x44\x65leteInputs\x12!.clarifai.api.DeleteInputsRequest\x1a!.clarifai.api.status.BaseResponse\"\x80\x01\x82\xd3\xe4\x93\x02V*@/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/inputs:\x01*Z\x0f*\n/v2/inputs:\x01*\x98\x9c\'\x02\x90\x9c\'%\x90\x9c\'(\x90\x9c\'&\x90\x9c\'\x04\x90\x9c\'\x08\x90\x9c\'\x05\x90\x9c\'\x0f\x90\x9c\'\x13\x12\xed\x01\n\x12PostInputsSearches\x12\'.clarifai.api.PostInputsSearchesRequest\x1a!.clarifai.api.MultiSearchResponse\"\x8a\x01\x82\xd3\xe4\x93\x02h\"I/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/inputs/searches:\x01*Z\x18\"\x13/v2/inputs/searches:\x01*\x98\x9c\'\x02\x90\x9c\'&\x90\x9c\'\x0b\x90\x9c\'\x05\x90\x9c\'\x0f\x90\x9c\'\x03\x90\x9c\'\x13\x12\x9c\x03\n\x10PostModelOutputs\x12%.clarifai.api.PostModelOutputsRequest\x1a!.clarifai.api.MultiOutputResponse\"\xbd\x02\x82\xd3\xe4\x93\x02\xa6\x02\"i/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/models/{model_id}/versions/{version_id}/outputs:\x01*Z8\"3/v2/models/{model_id}/versions/{version_id}/outputs:\x01*ZX\"S/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/models/{model_id}/outputs:\x01*Z\"\"\x1d/v2/models/{model_id}/outputs:\x01*\x98\x9c\'\x02\x90\x9c\'\x0b\x90\x9c\'\x0f\x90\x9c\'\x02\x12\xe5\x01\n\x0cGetModelType\x12!.clarifai.api.GetModelTypeRequest\x1a%.clarifai.api.SingleModelTypeResponse\"\x8a\x01\x82\xd3\xe4\x93\x02|\x12V/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/models/types/{model_type_id}Z\"\x12 /v2/models/types/{model_type_id}\x98\x9c\'\x02\x90\x9c\'\x0f\x12\x99\x01\n\x16ListOpenSourceLicenses\x12+.clarifai.api.ListOpenSourceLicensesRequest\x1a,.clarifai.api.ListOpenSourceLicensesResponse\"$\x82\xd3\xe4\x93\x02\x1a\x12\x18/v2/open_source_licenses\x98\x9c\'\x01\x12\xc7\x01\n\x0eListModelTypes\x12#.clarifai.api.ListModelTypesRequest\x1a$.clarifai.api.MultiModelTypeResponse\"j\x82\xd3\xe4\x93\x02\\\x12\x46/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/models/typesZ\x12\x12\x10/v2/models/types\x98\x9c\'\x02\x90\x9c\'\x0f\x12\xc6\x01\n\x08GetModel\x12\x1d.clarifai.api.GetModelRequest\x1a!.clarifai.api.SingleModelResponse\"x\x82\xd3\xe4\x93\x02\x66\x12K/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/models/{model_id}Z\x17\x12\x15/v2/models/{model_id}\x98\x9c\'\x02\x90\x9c\'\x0b\x90\x9c\'\x0f\x12\x96\x03\n\x12GetModelOutputInfo\x12\x1d.clarifai.api.GetModelRequest\x1a!.clarifai.api.SingleModelResponse\"\xbd\x02\x82\xd3\xe4\x93\x02\xaa\x02\x12W/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/models/{model_id}/output_infoZ#\x12!/v2/models/{model_id}/output_infoZo\x12m/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/models/{model_id}/versions/{version_id}/output_infoZ9\x12\x37/v2/models/{model_id}/versions/{version_id}/output_info\x98\x9c\'\x02\x90\x9c\'\x0b\x90\x9c\'\x0f\x12\xb3\x01\n\nListModels\x12\x1f.clarifai.api.ListModelsRequest\x1a .clarifai.api.MultiModelResponse\"b\x82\xd3\xe4\x93\x02P\x12@/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/modelsZ\x0c\x12\n/v2/models\x98\x9c\'\x02\x90\x9c\'\x0b\x90\x9c\'\x0f\x12\xdb\x01\n\x12PostModelsSearches\x12\'.clarifai.api.PostModelsSearchesRequest\x1a .clarifai.api.MultiModelResponse\"z\x82\xd3\xe4\x93\x02h\"I/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/models/searches:\x01*Z\x18\"\x13/v2/models/searches:\x01*\x98\x9c\'\x02\x90\x9c\'\x0b\x90\x9c\'\x0f\x12\xc6\x01\n\nPostModels\x12\x1f.clarifai.api.PostModelsRequest\x1a!.clarifai.api.SingleModelResponse\"t\x82\xd3\xe4\x93\x02V\"@/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/models:\x01*Z\x0f\"\n/v2/models:\x01*\x98\x9c\'\x02\x90\x9c\'\x0b\x90\x9c\'\x0e\x90\x9c\'\x0f\x90\x9c\'\x1a\x90\x9c\'\x13\x12\xc3\x01\n\x0bPatchModels\x12 .clarifai.api.PatchModelsRequest\x1a .clarifai.api.MultiModelResponse\"p\x82\xd3\xe4\x93\x02V2@/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/models:\x01*Z\x0f\x32\n/v2/models:\x01*\x98\x9c\'\x02\x90\x9c\'\x0b\x90\x9c\'\x0e\x90\x9c\'\x0f\x90\x9c\'\x1a\x12\xd9\x01\n\x0b\x44\x65leteModel\x12 .clarifai.api.DeleteModelRequest\x1a!.clarifai.api.status.BaseResponse\"\x84\x01\x82\xd3\xe4\x93\x02\x66*K/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/models/{model_id}Z\x17*\x15/v2/models/{model_id}\x98\x9c\'\x02\x90\x9c\'\x0b\x90\x9c\'\x0e\x90\x9c\'\x11\x90\x9c\'\x0f\x90\x9c\'\x13\x12\xca\x01\n\x0c\x44\x65leteModels\x12!.clarifai.api.DeleteModelsRequest\x1a!.clarifai.api.status.BaseResponse\"t\x82\xd3\xe4\x93\x02V*@/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/models:\x01*Z\x0f*\n/v2/models:\x01*\x98\x9c\'\x02\x90\x9c\'\x0b\x90\x9c\'\x0e\x90\x9c\'\x11\x90\x9c\'\x0f\x90\x9c\'\x13\x12\x8d\x03\n\x0fListModelInputs\x12$.clarifai.api.ListModelInputsRequest\x1a .clarifai.api.MultiInputResponse\"\xb1\x02\x82\xd3\xe4\x93\x02\x96\x02\x12R/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/models/{model_id}/inputsZ\x1e\x12\x1c/v2/models/{model_id}/inputsZj\x12h/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/models/{model_id}/versions/{version_id}/inputsZ4\x12\x32/v2/models/{model_id}/versions/{version_id}/inputs\x98\x9c\'\x02\x90\x9c\'&\x90\x9c\'\x0b\x90\x9c\'\x05\x90\x9c\'\x0f\x12\x89\x02\n\x0fGetModelVersion\x12$.clarifai.api.GetModelVersionRequest\x1a(.clarifai.api.SingleModelVersionResponse\"\xa5\x01\x82\xd3\xe4\x93\x02\x92\x01\x12\x61/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/models/{model_id}/versions/{version_id}Z-\x12+/v2/models/{model_id}/versions/{version_id}\x98\x9c\'\x02\x90\x9c\'\x0b\x90\x9c\'\x0f\x12\xf1\x01\n\x11ListModelVersions\x12&.clarifai.api.ListModelVersionsRequest\x1a\'.clarifai.api.MultiModelVersionResponse\"\x8a\x01\x82\xd3\xe4\x93\x02x\x12T/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/models/{model_id}/versionsZ \x12\x1e/v2/models/{model_id}/versions\x98\x9c\'\x02\x90\x9c\'\x0b\x90\x9c\'\x0f\x12\x8d\x02\n\x11PostModelVersions\x12&.clarifai.api.PostModelVersionsRequest\x1a!.clarifai.api.SingleModelResponse\"\xac\x01\x82\xd3\xe4\x93\x02~\"T/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/models/{model_id}/versions:\x01*Z#\"\x1e/v2/models/{model_id}/versions:\x01*\x98\x9c\'\x02\x90\x9c\'&\x90\x9c\'\x0b\x90\x9c\'\x05\x90\x9c\'\x0e\x90\x9c\'\x0f\x90\x9c\'\x1a\x90\x9c\'\x13\x90\x9c\'5\x90\x9c\'6\x12\xdb\x01\n\x12PatchModelVersions\x12\'.clarifai.api.PatchModelVersionsRequest\x1a\'.clarifai.api.MultiModelVersionResponse\"s\x82\xd3\xe4\x93\x02Y2T/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/models/{model_id}/versions:\x01*\x98\x9c\'\x02\x90\x9c\'\x0b\x90\x9c\'\x0f\x90\x9c\'\x0e\x90\x9c\'\x1a\x12\x94\x02\n\x12\x44\x65leteModelVersion\x12\'.clarifai.api.DeleteModelVersionRequest\x1a!.clarifai.api.status.BaseResponse\"\xb1\x01\x82\xd3\xe4\x93\x02\x92\x01*a/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/models/{model_id}/versions/{version_id}Z-*+/v2/models/{model_id}/versions/{version_id}\x98\x9c\'\x02\x90\x9c\'\x0b\x90\x9c\'\x0e\x90\x9c\'\x11\x90\x9c\'\x0f\x90\x9c\'\x13\x12\xb3\x02\n\x16GetModelVersionMetrics\x12+.clarifai.api.GetModelVersionMetricsRequest\x1a(.clarifai.api.SingleModelVersionResponse\"\xc1\x01\x82\xd3\xe4\x93\x02\xa2\x01\x12i/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/models/{model_id}/versions/{version_id}/metricsZ5\x12\x33/v2/models/{model_id}/versions/{version_id}/metrics\x98\x9c\'\x02\x90\x9c\'&\x90\x9c\'\x0b\x90\x9c\'\x0f\x90\x9c\'\x05\x90\x9c\'5\x12\xcf\x02\n\x17PostModelVersionMetrics\x12,.clarifai.api.PostModelVersionMetricsRequest\x1a(.clarifai.api.SingleModelVersionResponse\"\xdb\x01\x82\xd3\xe4\x93\x02\xa8\x01\"i/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/models/{model_id}/versions/{version_id}/metrics:\x01*Z8\"3/v2/models/{model_id}/versions/{version_id}/metrics:\x01*\x98\x9c\'\x02\x90\x9c\'&\x90\x9c\'\x0b\x90\x9c\'\x05\x90\x9c\'\x0e\x90\x9c\'\x0f\x90\x9c\'\x1a\x90\x9c\'\x02\x90\x9c\'\x13\x90\x9c\'5\x90\x9c\'6\x12\xf7\x01\n\x13ListModelReferences\x12(.clarifai.api.ListModelReferencesRequest\x1a).clarifai.api.MultiModelReferenceResponse\"\x8a\x01\x82\xd3\xe4\x93\x02|\x12V/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/models/{model_id}/referencesZ\"\x12 /v2/models/{model_id}/references\x98\x9c\'\x02\x90\x9c\'\x0f\x12\xee\x02\n\x1bGetModelVersionInputExample\x12\x30.clarifai.api.GetModelVersionInputExampleRequest\x1a\x34.clarifai.api.SingleModelVersionInputExampleResponse\"\xe6\x01\x82\xd3\xe4\x93\x02\xd7\x01\x12\x83\x01/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/models/{model_id}/versions/{model_version_id}/input_examples/{example_id}ZO\x12M/v2/models/{model_id}/versions/{model_version_id}/input_examples/{example_id}\x98\x9c\'\x02\x90\x9c\'\x0f\x12\xd6\x02\n\x1dListModelVersionInputExamples\x12\x32.clarifai.api.ListModelVersionInputExamplesRequest\x1a\x33.clarifai.api.MultiModelVersionInputExampleResponse\"\xcb\x01\x82\xd3\xe4\x93\x02\xbc\x01\x12v/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/models/{model_id}/versions/{model_version_id}/input_examplesZB\x12@/v2/models/{model_id}/versions/{model_version_id}/input_examples\x98\x9c\'\x02\x90\x9c\'\x0f\x12\xdc\x01\n\x0bGetWorkflow\x12 .clarifai.api.GetWorkflowRequest\x1a$.clarifai.api.SingleWorkflowResponse\"\x84\x01\x82\xd3\xe4\x93\x02r\x12Q/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/workflows/{workflow_id}Z\x1d\x12\x1b/v2/workflows/{workflow_id}\x98\x9c\'\x02\x90\x9c\'\x0f\x90\x9c\'\x13\x12\xc2\x01\n\rListWorkflows\x12\".clarifai.api.ListWorkflowsRequest\x1a#.clarifai.api.MultiWorkflowResponse\"h\x82\xd3\xe4\x93\x02V\x12\x43/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/workflowsZ\x0f\x12\r/v2/workflows\x98\x9c\'\x02\x90\x9c\'\x0f\x90\x9c\'\x13\x12\xcc\x01\n\rPostWorkflows\x12\".clarifai.api.PostWorkflowsRequest\x1a#.clarifai.api.MultiWorkflowResponse\"r\x82\xd3\xe4\x93\x02\\\"C/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/workflows:\x01*Z\x12\"\r/v2/workflows:\x01*\x98\x9c\'\x02\x90\x9c\'\x0f\x90\x9c\'\x12\x90\x9c\'\x13\x12\xce\x01\n\x0ePatchWorkflows\x12#.clarifai.api.PatchWorkflowsRequest\x1a#.clarifai.api.MultiWorkflowResponse\"r\x82\xd3\xe4\x93\x02\\2C/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/workflows:\x01*Z\x12\x32\r/v2/workflows:\x01*\x98\x9c\'\x02\x90\x9c\'\x0f\x90\x9c\'\x12\x90\x9c\'\x13\x12\xe3\x01\n\x0e\x44\x65leteWorkflow\x12#.clarifai.api.DeleteWorkflowRequest\x1a!.clarifai.api.status.BaseResponse\"\x88\x01\x82\xd3\xe4\x93\x02r*Q/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/workflows/{workflow_id}Z\x1d*\x1b/v2/workflows/{workflow_id}\x98\x9c\'\x02\x90\x9c\'\x12\x90\x9c\'\x15\x90\x9c\'\x13\x12\xce\x01\n\x0f\x44\x65leteWorkflows\x12$.clarifai.api.DeleteWorkflowsRequest\x1a!.clarifai.api.status.BaseResponse\"r\x82\xd3\xe4\x93\x02\\*C/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/workflows:\x01*Z\x12*\r/v2/workflows:\x01*\x98\x9c\'\x02\x90\x9c\'\x12\x90\x9c\'\x15\x90\x9c\'\x13\x12\x86\x03\n\x13PostWorkflowResults\x12(.clarifai.api.PostWorkflowResultsRequest\x1a).clarifai.api.PostWorkflowResultsResponse\"\x99\x02\x82\xd3\xe4\x93\x02\xfe\x01\"o/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/workflows/{workflow_id}/versions/{version_id}/results:\x01*Z^\"Y/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/workflows/{workflow_id}/results:\x01*Z(\"#/v2/workflows/{workflow_id}/results:\x01*\x98\x9c\'\x02\x90\x9c\'\x0f\x90\x9c\'\x0b\x90\x9c\'\x02\x90\x9c\'\x13\x12\xc5\x03\n\x1dPostWorkflowResultsSimilarity\x12\x32.clarifai.api.PostWorkflowResultsSimilarityRequest\x1a\x33.clarifai.api.PostWorkflowResultsSimilarityResponse\"\xba\x02\x82\xd3\xe4\x93\x02\x9f\x02\"z/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/workflows/{workflow_id}/versions/{version_id}/results/similarity:\x01*Zi\"d/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/workflows/{workflow_id}/results/similarity:\x01*Z3\"./v2/workflows/{workflow_id}/results/similarity:\x01*\x98\x9c\'\x02\x90\x9c\'\x0f\x90\x9c\'\x0b\x90\x9c\'\x02\x90\x9c\'\x13\x12\x87\x02\n\x14ListWorkflowVersions\x12).clarifai.api.ListWorkflowVersionsRequest\x1a*.clarifai.api.MultiWorkflowVersionResponse\"\x97\x01\x82\xd3\xe4\x93\x02\x84\x01\x12Z/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/workflows/{workflow_id}/versionsZ&\x12$/v2/workflows/{workflow_id}/versions\x98\x9c\'\x02\x90\x9c\'\x0f\x90\x9c\'\x13\x12\xb0\x02\n\x12GetWorkflowVersion\x12\'.clarifai.api.GetWorkflowVersionRequest\x1a+.clarifai.api.SingleWorkflowVersionResponse\"\xc3\x01\x82\xd3\xe4\x93\x02\xb0\x01\x12p/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/workflows/{workflow_id}/versions/{workflow_version_id}Z<\x12:/v2/workflows/{workflow_id}/versions/{workflow_version_id}\x98\x9c\'\x02\x90\x9c\'\x0f\x90\x9c\'\x13\x12\x8c\x02\n\x16\x44\x65leteWorkflowVersions\x12+.clarifai.api.DeleteWorkflowVersionsRequest\x1a!.clarifai.api.status.BaseResponse\"\xa1\x01\x82\xd3\xe4\x93\x02\x8a\x01*Z/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/workflows/{workflow_id}/versions:\x01*Z)*$/v2/workflows/{workflow_id}/versions:\x01*\x98\x9c\'\x02\x90\x9c\'\x13\x90\x9c\'\x12\x90\x9c\'\x15\x12\x93\x02\n\x15PatchWorkflowVersions\x12*.clarifai.api.PatchWorkflowVersionsRequest\x1a*.clarifai.api.MultiWorkflowVersionResponse\"\xa1\x01\x82\xd3\xe4\x93\x02\x8a\x01\x32Z/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/workflows/{workflow_id}/versions:\x01*Z)2$/v2/workflows/{workflow_id}/versions:\x01*\x98\x9c\'\x02\x90\x9c\'\x0f\x90\x9c\'\x13\x90\x9c\'\x12\x12\x85\x01\n\x06GetKey\x12\x1b.clarifai.api.GetKeyRequest\x1a\x1f.clarifai.api.SingleKeyResponse\"=\x82\xd3\xe4\x93\x02/\x12-/v2/users/{user_app_id.user_id}/keys/{key_id}\x98\x9c\'\x05\x90\x9c\'0\x12\x7f\n\x08ListKeys\x12\x1d.clarifai.api.ListKeysRequest\x1a\x1e.clarifai.api.MultiKeyResponse\"4\x82\xd3\xe4\x93\x02&\x12$/v2/users/{user_app_id.user_id}/keys\x98\x9c\'\x05\x90\x9c\'0\x12\x9f\x01\n\x0bListAppKeys\x12 .clarifai.api.ListAppKeysRequest\x1a\x1e.clarifai.api.MultiKeyResponse\"N\x82\xd3\xe4\x93\x02@\x12>/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/keys\x98\x9c\'\x05\x90\x9c\'0\x12\x95\x01\n\tDeleteKey\x12\x1e.clarifai.api.DeleteKeyRequest\x1a!.clarifai.api.status.BaseResponse\"E\x82\xd3\xe4\x93\x02/*-/v2/users/{user_app_id.user_id}/keys/{key_id}\x98\x9c\'\x05\x90\x9c\'/\x90\x9c\'1\x90\x9c\'0\x12\x8a\x01\n\x08PostKeys\x12\x1d.clarifai.api.PostKeysRequest\x1a\x1e.clarifai.api.MultiKeyResponse\"?\x82\xd3\xe4\x93\x02)\"$/v2/users/{user_app_id.user_id}/keys:\x01*\x98\x9c\'\x05\x90\x9c\'-\x90\x9c\'/\x90\x9c\'0\x12\x88\x01\n\tPatchKeys\x12\x1e.clarifai.api.PatchKeysRequest\x1a\x1e.clarifai.api.MultiKeyResponse\";\x82\xd3\xe4\x93\x02)2$/v2/users/{user_app_id.user_id}/keys:\x01*\x98\x9c\'\x05\x90\x9c\'/\x90\x9c\'0\x12\xbc\x01\n\x08MyScopes\x12\x1d.clarifai.api.MyScopesRequest\x1a .clarifai.api.MultiScopeResponse\"o\x82\xd3\xe4\x93\x02\x65\x12\x42/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/myscopesZ\x0e\x12\x0c/v2/myscopesZ\x0f\x12\r/v2/my_scopes\x98\x9c\'\x02\x12\x87\x01\n\nListScopes\x12\x1f.clarifai.api.ListScopesRequest\x1a$.clarifai.api.MultiScopeDepsResponse\"2\x82\xd3\xe4\x93\x02(\x12&/v2/users/{user_app_id.user_id}/scopes\x98\x9c\'\x03\x12\x95\x01\n\x06GetApp\x12\x1b.clarifai.api.GetAppRequest\x1a\x1f.clarifai.api.SingleAppResponse\"M\x82\xd3\xe4\x93\x02;\x12\x39/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}\x98\x9c\'\x05\x90\x9c\'-\x90\x9c\'\x13\x12\x8f\x01\n\x08ListApps\x12\x1d.clarifai.api.ListAppsRequest\x1a\x1e.clarifai.api.MultiAppResponse\"D\x82\xd3\xe4\x93\x02\x32\x12$/v2/users/{user_app_id.user_id}/appsZ\n\x12\x08/v2/apps\x98\x9c\'\x05\x90\x9c\'-\x90\x9c\'\x13\x12\xa5\x01\n\tDeleteApp\x12\x1e.clarifai.api.DeleteAppRequest\x1a!.clarifai.api.status.BaseResponse\"U\x82\xd3\xe4\x93\x02;*9/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}\x98\x9c\'\x05\x90\x9c\',\x90\x9c\'.\x90\x9c\'-\x90\x9c\'\x13\x12\x92\x01\n\x08PostApps\x12\x1d.clarifai.api.PostAppsRequest\x1a\x1e.clarifai.api.MultiAppResponse\"G\x82\xd3\xe4\x93\x02)\"$/v2/users/{user_app_id.user_id}/apps:\x01*\x98\x9c\'\x05\x90\x9c\',\x90\x9c\'-\x90\x9c\'\x0f\x90\x9c\'\x13\x90\x9c\'\x12\x12\x88\x01\n\tPatchApps\x12\x1e.clarifai.api.PatchAppsRequest\x1a\x1e.clarifai.api.MultiAppResponse\";\x82\xd3\xe4\x93\x02)2$/v2/users/{user_app_id.user_id}/apps:\x01*\x98\x9c\'\x05\x90\x9c\',\x90\x9c\'-\x12\x9f\x01\n\x10PostAppsSearches\x12%.clarifai.api.PostAppsSearchesRequest\x1a\x1e.clarifai.api.MultiAppResponse\"D\x82\xd3\xe4\x93\x02\x32\"-/v2/users/{user_app_id.user_id}/apps/searches:\x01*\x98\x9c\'\x05\x90\x9c\'-\x90\x9c\'\x13\x12\xd3\x01\n\x14PostValidatePassword\x12).clarifai.api.PostValidatePasswordRequest\x1a..clarifai.api.SinglePasswordValidationResponse\"`\x82\xd3\xe4\x93\x02R\"1/v2/users/{user_app_id.user_id}/validate_password:\x01*Z\x1a\"\x15/v2/validate_password:\x01*\x98\x9c\'\x05\x90\x9c\':\x12\xbd\x01\n\tGetSearch\x12\x1e.clarifai.api.GetSearchRequest\x1a\".clarifai.api.SingleSearchResponse\"l\x82\xd3\xe4\x93\x02^\x12G/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/searches/{id}Z\x13\x12\x11/v2/searches/{id}\x98\x9c\'\x02\x90\x9c\'\x03\x12\xb8\x01\n\x0cListSearches\x12!.clarifai.api.ListSearchesRequest\x1a!.clarifai.api.MultiSearchResponse\"b\x82\xd3\xe4\x93\x02T\x12\x42/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/searchesZ\x0e\x12\x0c/v2/searches\x98\x9c\'\x02\x90\x9c\'\x03\x12\xd2\x01\n\x0cPostSearches\x12!.clarifai.api.PostSearchesRequest\x1a!.clarifai.api.MultiSearchResponse\"|\x82\xd3\xe4\x93\x02Z\"B/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/searches:\x01*Z\x11\"\x0c/v2/searches:\x01*\x98\x9c\'\x02\x90\x9c\'&\x90\x9c\'\x0b\x90\x9c\'\x05\x90\x9c\'\x0f\x90\x9c\'\x03\x90\x9c\'\x13\x12\xd0\x01\n\x10PostSearchesByID\x12%.clarifai.api.PostSearchesByIDRequest\x1a!.clarifai.api.MultiSearchResponse\"r\x82\xd3\xe4\x93\x02\x64\"G/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/searches/{id}:\x01*Z\x16\"\x11/v2/searches/{id}:\x01*\x98\x9c\'\x02\x90\x9c\'\x03\x12\xaf\x02\n\x1bPostAnnotationSearchMetrics\x12\x30.clarifai.api.PostAnnotationSearchMetricsRequest\x1a\x32.clarifai.api.MultiAnnotationSearchMetricsResponse\"\xa9\x01\x82\xd3\xe4\x93\x02\x82\x01\"V/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/annotations/searches/metrics:\x01*Z%\" /v2/annotations/searches/metrics:\x01*\x98\x9c\'\x02\x90\x9c\'&\x90\x9c\'\x0b\x90\x9c\'\x05\x90\x9c\'5\x90\x9c\'6\x90\x9c\'\x0f\x90\x9c\'\x13\x12\xa5\x02\n\x1aGetAnnotationSearchMetrics\x12/.clarifai.api.GetAnnotationSearchMetricsRequest\x1a\x32.clarifai.api.MultiAnnotationSearchMetricsResponse\"\xa1\x01\x82\xd3\xe4\x93\x02\x86\x01\x12[/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/annotations/searches/metrics/{id}Z\'\x12%/v2/annotations/searches/metrics/{id}\x98\x9c\'\x02\x90\x9c\'&\x90\x9c\'\x0b\x90\x9c\'\x05\x90\x9c\'5\x12\x9c\x02\n\x1bListAnnotationSearchMetrics\x12\x30.clarifai.api.ListAnnotationSearchMetricsRequest\x1a\x32.clarifai.api.MultiAnnotationSearchMetricsResponse\"\x96\x01\x82\xd3\xe4\x93\x02|\x12V/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/annotations/searches/metricsZ\"\x12 /v2/annotations/searches/metrics\x98\x9c\'\x02\x90\x9c\'&\x90\x9c\'\x0b\x90\x9c\'\x05\x90\x9c\'5\x12\x96\x02\n\x1d\x44\x65leteAnnotationSearchMetrics\x12\x32.clarifai.api.DeleteAnnotationSearchMetricsRequest\x1a!.clarifai.api.status.BaseResponse\"\x9d\x01\x82\xd3\xe4\x93\x02\x86\x01*[/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/annotations/searches/metrics/{id}Z\'*%/v2/annotations/searches/metrics/{id}\x98\x9c\'\x02\x90\x9c\'5\x90\x9c\'6\x90\x9c\'?\x12\xc2\x01\n\x0c\x44\x65leteSearch\x12!.clarifai.api.DeleteSearchRequest\x1a!.clarifai.api.status.BaseResponse\"l\x82\xd3\xe4\x93\x02^*G/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/searches/{id}Z\x13*\x11/v2/searches/{id}\x98\x9c\'\x02\x90\x9c\'\x03\x12|\n\x0fListStatusCodes\x12$.clarifai.api.ListStatusCodesRequest\x1a%.clarifai.api.MultiStatusCodeResponse\"\x1c\x82\xd3\xe4\x93\x02\x12\x12\x10/v2/status_codes\x98\x9c\'\x01\x12\x8a\x01\n\rGetStatusCode\x12\".clarifai.api.GetStatusCodeRequest\x1a&.clarifai.api.SingleStatusCodeResponse\"-\x82\xd3\xe4\x93\x02#\x12!/v2/status_codes/{status_code_id}\x98\x9c\'\x01\x12\xbe\x01\n\x11ListCollaborators\x12&.clarifai.api.ListCollaboratorsRequest\x1a(.clarifai.api.MultiCollaboratorsResponse\"W\x82\xd3\xe4\x93\x02I\x12G/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/collaborators\x98\x9c\'\x02\x90\x9c\'2\x12\xc5\x01\n\x11PostCollaborators\x12&.clarifai.api.PostCollaboratorsRequest\x1a(.clarifai.api.MultiCollaboratorsResponse\"^\x82\xd3\xe4\x93\x02L\"G/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/collaborators:\x01*\x98\x9c\'\x02\x90\x9c\'3\x90\x9c\'2\x12\xcb\x01\n\x12PatchCollaborators\x12\'.clarifai.api.PatchCollaboratorsRequest\x1a(.clarifai.api.MultiCollaboratorsResponse\"b\x82\xd3\xe4\x93\x02L2G/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/collaborators:\x01*\x98\x9c\'\x02\x90\x9c\'3\x90\x9c\'2\x90\x9c\'4\x12\xce\x01\n\x13\x44\x65leteCollaborators\x12(.clarifai.api.DeleteCollaboratorsRequest\x1a!.clarifai.api.status.BaseResponse\"j\x82\xd3\xe4\x93\x02L*G/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/collaborators:\x01*\x98\x9c\'\x02\x90\x9c\'3\x90\x9c\'2\x90\x9c\'4\x90\x9c\'7\x90\x9c\'8\x12\xa4\x01\n\x12ListCollaborations\x12\'.clarifai.api.ListCollaborationsRequest\x1a).clarifai.api.MultiCollaborationsResponse\":\x82\xd3\xe4\x93\x02\x30\x12./v2/users/{user_app_id.user_id}/collaborations\x98\x9c\'\x03\x12\xef\x01\n\x13PostAppDuplications\x12(.clarifai.api.PostAppDuplicationsRequest\x1a*.clarifai.api.MultiAppDuplicationsResponse\"\x81\x01\x82\xd3\xe4\x93\x02K\"F/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/duplications:\x01*\x98\x9c\'\x02\x90\x9c\'\x0e\x90\x9c\'\x0f\x90\x9c\'\x1a\x90\x9c\'\n\x90\x9c\'\x0b\x90\x9c\'\x04\x90\x9c\'\x05\x90\x9c\'%\x90\x9c\'&\x90\x9c\'\x12\x90\x9c\'\x13\x12\xa5\x01\n\x13ListAppDuplications\x12(.clarifai.api.ListAppDuplicationsRequest\x1a*.clarifai.api.MultiAppDuplicationsResponse\"8\x82\xd3\xe4\x93\x02.\x12,/v2/users/{user_app_id.user_id}/duplications\x98\x9c\'\x03\x12\xb6\x01\n\x11GetAppDuplication\x12&.clarifai.api.GetAppDuplicationRequest\x1a*.clarifai.api.SingleAppDuplicationResponse\"M\x82\xd3\xe4\x93\x02\x43\x12\x41/v2/users/{user_app_id.user_id}/duplications/{app_duplication_id}\x98\x9c\'\x03\x12\xd1\x01\n\tPostTasks\x12\x1e.clarifai.api.PostTasksRequest\x1a\x1f.clarifai.api.MultiTaskResponse\"\x82\x01\x82\xd3\xe4\x93\x02T\"?/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/tasks:\x01*Z\x0e\"\t/v2/tasks:\x01*\x98\x9c\'\x02\x90\x9c\'7\x90\x9c\'8\x90\x9c\'\x05\x90\x9c\'\x03\x90\x9c\'\x0b\x90\x9c\'\x13\x90\x9c\'\x0f\x90\x9c\'%\x90\x9c\'&\x12\xfa\x01\n\x16GetTaskAnnotationCount\x12!.clarifai.api.GetTaskCountRequest\x1a%.clarifai.api.SingleTaskCountResponse\"\x95\x01\x82\xd3\xe4\x93\x02\x86\x01\x12[/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/tasks/{task_id}/annotations/countZ\'\x12%/v2/tasks/{task_id}/annotations/count\x98\x9c\'\x02\x90\x9c\'8\x12\xea\x01\n\x11GetTaskInputCount\x12!.clarifai.api.GetTaskCountRequest\x1a%.clarifai.api.SingleTaskCountResponse\"\x8a\x01\x82\xd3\xe4\x93\x02|\x12V/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/tasks/{task_id}/inputs/countZ\"\x12 /v2/tasks/{task_id}/inputs/count\x98\x9c\'\x02\x90\x9c\'8\x12\xcc\x01\n\x07GetTask\x12\x1c.clarifai.api.GetTaskRequest\x1a .clarifai.api.SingleTaskResponse\"\x80\x01\x82\xd3\xe4\x93\x02\x62\x12I/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/tasks/{task_id}Z\x15\x12\x13/v2/tasks/{task_id}\x98\x9c\'\x02\x90\x9c\'8\x90\x9c\'\x03\x90\x9c\'\x0b\x90\x9c\'\x13\x90\x9c\'\x0f\x12\xba\x01\n\tListTasks\x12\x1e.clarifai.api.ListTasksRequest\x1a\x1f.clarifai.api.MultiTaskResponse\"l\x82\xd3\xe4\x93\x02N\x12?/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/tasksZ\x0b\x12\t/v2/tasks\x98\x9c\'\x02\x90\x9c\'8\x90\x9c\'\x03\x90\x9c\'\x0b\x90\x9c\'\x13\x90\x9c\'\x0f\x12\xd3\x01\n\nPatchTasks\x12\x1f.clarifai.api.PatchTasksRequest\x1a\x1f.clarifai.api.MultiTaskResponse\"\x82\x01\x82\xd3\xe4\x93\x02T2?/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/tasks:\x01*Z\x0e\x32\t/v2/tasks:\x01*\x98\x9c\'\x02\x90\x9c\'7\x90\x9c\'8\x90\x9c\'\x05\x90\x9c\'\x03\x90\x9c\'\x0b\x90\x9c\'\x13\x90\x9c\'\x0f\x90\x9c\'%\x90\x9c\'&\x12\xbe\x01\n\x0b\x44\x65leteTasks\x12 .clarifai.api.DeleteTasksRequest\x1a!.clarifai.api.status.BaseResponse\"j\x82\xd3\xe4\x93\x02T*?/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/tasks:\x01*Z\x0e*\t/v2/tasks:\x01*\x98\x9c\'\x02\x90\x9c\'7\x90\x9c\'8\x90\x9c\'F\x12\xee\x01\n\x0ePostCollectors\x12#.clarifai.api.PostCollectorsRequest\x1a$.clarifai.api.MultiCollectorResponse\"\x90\x01\x82\xd3\xe4\x93\x02^\"D/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/collectors:\x01*Z\x13\"\x0e/v2/collectors:\x01*\x98\x9c\'\x02\x90\x9c\'\x04\x90\x9c\'%\x90\x9c\'&\x90\x9c\')\x90\x9c\'\x0b\x90\x9c\'\x04\x90\x9c\'\x05\x90\x9c\'\x0f\x90\x9c\'\x02\x90\x9c\'\x13\x12\xdf\x01\n\x0cGetCollector\x12!.clarifai.api.GetCollectorRequest\x1a%.clarifai.api.SingleCollectorResponse\"\x84\x01\x82\xd3\xe4\x93\x02v\x12S/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/collectors/{collector_id}Z\x1f\x12\x1d/v2/collectors/{collector_id}\x98\x9c\'\x02\x90\x9c\'*\x12\xc3\x01\n\x0eListCollectors\x12#.clarifai.api.ListCollectorsRequest\x1a$.clarifai.api.MultiCollectorResponse\"f\x82\xd3\xe4\x93\x02X\x12\x44/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/collectorsZ\x10\x12\x0e/v2/collectors\x98\x9c\'\x02\x90\x9c\'*\x12\xcf\x01\n\x0fPatchCollectors\x12$.clarifai.api.PatchCollectorsRequest\x1a$.clarifai.api.MultiCollectorResponse\"p\x82\xd3\xe4\x93\x02^2D/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/collectors:\x01*Z\x13\x32\x0e/v2/collectors:\x01*\x98\x9c\'\x02\x90\x9c\')\x90\x9c\'*\x12\xd2\x01\n\x10\x44\x65leteCollectors\x12%.clarifai.api.DeleteCollectorsRequest\x1a!.clarifai.api.status.BaseResponse\"t\x82\xd3\xe4\x93\x02^*D/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/collectors:\x01*Z\x13*\x0e/v2/collectors:\x01*\x98\x9c\'\x02\x90\x9c\')\x90\x9c\'+\x90\x9c\'*\x12\xc9\x01\n\x0ePostStatValues\x12#.clarifai.api.PostStatValuesRequest\x1a$.clarifai.api.MultiStatValueResponse\"l\x82\xd3\xe4\x93\x02\x62\"F/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/stats/values:\x01*Z\x15\"\x10/v2/stats/values:\x01*\x98\x9c\'\x02\x12\xf9\x01\n\x17PostStatValuesAggregate\x12,.clarifai.api.PostStatValuesAggregateRequest\x1a-.clarifai.api.MultiStatValueAggregateResponse\"\x80\x01\x82\xd3\xe4\x93\x02v\"P/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/stats/values/aggregate:\x01*Z\x1f\"\x1a/v2/stats/values/aggregate:\x01*\x98\x9c\'\x02\x12\xe3\x01\n\x17PostTrendingMetricsView\x12,.clarifai.api.PostTrendingMetricsViewRequest\x1a!.clarifai.api.status.BaseResponse\"w\x82\xd3\xe4\x93\x02m\"h/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/trending_metrics/views/{view_type}/{object_id}:\x01*\x98\x9c\'\x02\x12\x8f\x02\n\x18ListTrendingMetricsViews\x12-.clarifai.api.ListTrendingMetricsViewsRequest\x1a..clarifai.api.MultiTrendingMetricsViewResponse\"\x93\x01\x82\xd3\xe4\x93\x02\x88\x01\x12\\/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/trending_metrics/views/{view_type}Z(\x12&/v2/trending_metrics/views/{view_type}\x98\x9c\'\x02\x42]\n\x15\x63om.clarifai.grpc.apiP\x01Z;github.com/Clarifai/clarifai-go-grpc/proto/clarifai/api/api\xa2\x02\x04\x43\x41IPb\x06proto3'
,
dependencies=[proto_dot_clarifai_dot_api_dot_resources__pb2.DESCRIPTOR,proto_dot_clarifai_dot_api_dot_status_dot_status__pb2.DESCRIPTOR,proto_dot_clarifai_dot_api_dot_utils_dot_extensions__pb2.DESCRIPTOR,proto_dot_clarifai_dot_auth_dot_scope_dot_scope__pb2.DESCRIPTOR,proto_dot_clarifai_dot_auth_dot_util_dot_extension__pb2.DESCRIPTOR,proto_dot_clarifai_dot_api_dot_status_dot_status__code__pb2.DESCRIPTOR,google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_protobuf_dot_duration__pb2.DESCRIPTOR,google_dot_protobuf_dot_struct__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,])
_ORGANIZATIONINVITATIONSTATUS = _descriptor.EnumDescriptor(
name='OrganizationInvitationStatus',
full_name='clarifai.api.OrganizationInvitationStatus',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='NOT_SET', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='PENDING', index=1, number=1,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='ACCEPTED', index=2, number=2,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CANCELLED', index=3, number=3,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DECLINED', index=4, number=4,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='EXPIRED', index=5, number=5,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=23205,
serialized_end=23317,
)
_sym_db.RegisterEnumDescriptor(_ORGANIZATIONINVITATIONSTATUS)
OrganizationInvitationStatus = enum_type_wrapper.EnumTypeWrapper(_ORGANIZATIONINVITATIONSTATUS)
NOT_SET = 0
PENDING = 1
ACCEPTED = 2
CANCELLED = 3
DECLINED = 4
EXPIRED = 5
_PAGINATION = _descriptor.Descriptor(
name='Pagination',
full_name='clarifai.api.Pagination',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='page', full_name='clarifai.api.Pagination.page', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='per_page', full_name='clarifai.api.Pagination.per_page', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=420,
serialized_end=464,
)
_GETANNOTATIONREQUEST = _descriptor.Descriptor(
name='GetAnnotationRequest',
full_name='clarifai.api.GetAnnotationRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.GetAnnotationRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='annotation_id', full_name='clarifai.api.GetAnnotationRequest.annotation_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='input_id', full_name='clarifai.api.GetAnnotationRequest.input_id', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=466,
serialized_end=578,
)
_LISTANNOTATIONSREQUEST = _descriptor.Descriptor(
name='ListAnnotationsRequest',
full_name='clarifai.api.ListAnnotationsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.ListAnnotationsRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ids', full_name='clarifai.api.ListAnnotationsRequest.ids', index=1,
number=2, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='input_ids', full_name='clarifai.api.ListAnnotationsRequest.input_ids', index=2,
number=3, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='user_ids', full_name='clarifai.api.ListAnnotationsRequest.user_ids', index=3,
number=9, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='model_version_ids', full_name='clarifai.api.ListAnnotationsRequest.model_version_ids', index=4,
number=10, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='statuses', full_name='clarifai.api.ListAnnotationsRequest.statuses', index=5,
number=5, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='list_all_annotations', full_name='clarifai.api.ListAnnotationsRequest.list_all_annotations', index=6,
number=6, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='page', full_name='clarifai.api.ListAnnotationsRequest.page', index=7,
number=7, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='per_page', full_name='clarifai.api.ListAnnotationsRequest.per_page', index=8,
number=8, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='task_id', full_name='clarifai.api.ListAnnotationsRequest.task_id', index=9,
number=11, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=581,
serialized_end=863,
)
_POSTANNOTATIONSREQUEST = _descriptor.Descriptor(
name='PostAnnotationsRequest',
full_name='clarifai.api.PostAnnotationsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.PostAnnotationsRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='annotations', full_name='clarifai.api.PostAnnotationsRequest.annotations', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=865,
serialized_end=985,
)
_PATCHANNOTATIONSREQUEST = _descriptor.Descriptor(
name='PatchAnnotationsRequest',
full_name='clarifai.api.PatchAnnotationsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.PatchAnnotationsRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='annotations', full_name='clarifai.api.PatchAnnotationsRequest.annotations', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='action', full_name='clarifai.api.PatchAnnotationsRequest.action', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=988,
serialized_end=1125,
)
_PATCHANNOTATIONSSTATUSREQUEST = _descriptor.Descriptor(
name='PatchAnnotationsStatusRequest',
full_name='clarifai.api.PatchAnnotationsStatusRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.PatchAnnotationsStatusRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='status_code', full_name='clarifai.api.PatchAnnotationsStatusRequest.status_code', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='user_ids', full_name='clarifai.api.PatchAnnotationsStatusRequest.user_ids', index=2,
number=3, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='task_id', full_name='clarifai.api.PatchAnnotationsStatusRequest.task_id', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='action', full_name='clarifai.api.PatchAnnotationsStatusRequest.action', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1128,
serialized_end=1313,
)
_PATCHANNOTATIONSSTATUSRESPONSE = _descriptor.Descriptor(
name='PatchAnnotationsStatusResponse',
full_name='clarifai.api.PatchAnnotationsStatusResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='clarifai.api.PatchAnnotationsStatusResponse.status', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='user_ids', full_name='clarifai.api.PatchAnnotationsStatusResponse.user_ids', index=1,
number=2, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='updated_count', full_name='clarifai.api.PatchAnnotationsStatusResponse.updated_count', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1315,
serialized_end=1433,
)
_DELETEANNOTATIONREQUEST = _descriptor.Descriptor(
name='DeleteAnnotationRequest',
full_name='clarifai.api.DeleteAnnotationRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.DeleteAnnotationRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='input_id', full_name='clarifai.api.DeleteAnnotationRequest.input_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='annotation_id', full_name='clarifai.api.DeleteAnnotationRequest.annotation_id', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1435,
serialized_end=1550,
)
_DELETEANNOTATIONSREQUEST = _descriptor.Descriptor(
name='DeleteAnnotationsRequest',
full_name='clarifai.api.DeleteAnnotationsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.DeleteAnnotationsRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ids', full_name='clarifai.api.DeleteAnnotationsRequest.ids', index=1,
number=2, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='input_ids', full_name='clarifai.api.DeleteAnnotationsRequest.input_ids', index=2,
number=3, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1552,
serialized_end=1659,
)
_SINGLEANNOTATIONRESPONSE = _descriptor.Descriptor(
name='SingleAnnotationResponse',
full_name='clarifai.api.SingleAnnotationResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='clarifai.api.SingleAnnotationResponse.status', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='annotation', full_name='clarifai.api.SingleAnnotationResponse.annotation', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1661,
serialized_end=1778,
)
_MULTIANNOTATIONRESPONSE = _descriptor.Descriptor(
name='MultiAnnotationResponse',
full_name='clarifai.api.MultiAnnotationResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='clarifai.api.MultiAnnotationResponse.status', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='annotations', full_name='clarifai.api.MultiAnnotationResponse.annotations', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\200\265\030\001', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1780,
serialized_end=1903,
)
_GETAPPREQUEST = _descriptor.Descriptor(
name='GetAppRequest',
full_name='clarifai.api.GetAppRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.GetAppRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1905,
serialized_end=1969,
)
_LISTAPPSREQUEST = _descriptor.Descriptor(
name='ListAppsRequest',
full_name='clarifai.api.ListAppsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.ListAppsRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='page', full_name='clarifai.api.ListAppsRequest.page', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='per_page', full_name='clarifai.api.ListAppsRequest.per_page', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='sort_ascending', full_name='clarifai.api.ListAppsRequest.sort_ascending', index=3,
number=5, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='sort_by_name', full_name='clarifai.api.ListAppsRequest.sort_by_name', index=4,
number=6, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='sort_by_modified_at', full_name='clarifai.api.ListAppsRequest.sort_by_modified_at', index=5,
number=7, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='query', full_name='clarifai.api.ListAppsRequest.query', index=6,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='name', full_name='clarifai.api.ListAppsRequest.name', index=7,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\030\001', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='featured_only', full_name='clarifai.api.ListAppsRequest.featured_only', index=8,
number=9, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='sort_by', full_name='clarifai.api.ListAppsRequest.sort_by',
index=0, containing_type=None, fields=[]),
],
serialized_start=1972,
serialized_end=2216,
)
_POSTAPPSREQUEST = _descriptor.Descriptor(
name='PostAppsRequest',
full_name='clarifai.api.PostAppsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.PostAppsRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='apps', full_name='clarifai.api.PostAppsRequest.apps', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2218,
serialized_end=2317,
)
_DELETEAPPREQUEST = _descriptor.Descriptor(
name='DeleteAppRequest',
full_name='clarifai.api.DeleteAppRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.DeleteAppRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2319,
serialized_end=2386,
)
_PATCHAPPSREQUEST = _descriptor.Descriptor(
name='PatchAppsRequest',
full_name='clarifai.api.PatchAppsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.PatchAppsRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='apps', full_name='clarifai.api.PatchAppsRequest.apps', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='action', full_name='clarifai.api.PatchAppsRequest.action', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='metadata_action', full_name='clarifai.api.PatchAppsRequest.metadata_action', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='reindex', full_name='clarifai.api.PatchAppsRequest.reindex', index=4,
number=5, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2389,
serialized_end=2574,
)
_POSTAPPSSEARCHESREQUEST = _descriptor.Descriptor(
name='PostAppsSearchesRequest',
full_name='clarifai.api.PostAppsSearchesRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.PostAppsSearchesRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='app_query', full_name='clarifai.api.PostAppsSearchesRequest.app_query', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pagination', full_name='clarifai.api.PostAppsSearchesRequest.pagination', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2577,
serialized_end=2740,
)
_SINGLEAPPRESPONSE = _descriptor.Descriptor(
name='SingleAppResponse',
full_name='clarifai.api.SingleAppResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='clarifai.api.SingleAppResponse.status', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='app', full_name='clarifai.api.SingleAppResponse.app', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2742,
serialized_end=2838,
)
_MULTIAPPRESPONSE = _descriptor.Descriptor(
name='MultiAppResponse',
full_name='clarifai.api.MultiAppResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='clarifai.api.MultiAppResponse.status', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='apps', full_name='clarifai.api.MultiAppResponse.apps', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\200\265\030\001', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2840,
serialized_end=2942,
)
_LISTCOLLABORATORSREQUEST = _descriptor.Descriptor(
name='ListCollaboratorsRequest',
full_name='clarifai.api.ListCollaboratorsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.ListCollaboratorsRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='list_all_collaborators', full_name='clarifai.api.ListCollaboratorsRequest.list_all_collaborators', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2944,
serialized_end=3051,
)
_POSTCOLLABORATORSREQUEST = _descriptor.Descriptor(
name='PostCollaboratorsRequest',
full_name='clarifai.api.PostCollaboratorsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.PostCollaboratorsRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='collaborators', full_name='clarifai.api.PostCollaboratorsRequest.collaborators', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3053,
serialized_end=3179,
)
_PATCHCOLLABORATORSREQUEST = _descriptor.Descriptor(
name='PatchCollaboratorsRequest',
full_name='clarifai.api.PatchCollaboratorsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.PatchCollaboratorsRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='collaborators', full_name='clarifai.api.PatchCollaboratorsRequest.collaborators', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='action', full_name='clarifai.api.PatchCollaboratorsRequest.action', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3182,
serialized_end=3325,
)
_DELETECOLLABORATORSREQUEST = _descriptor.Descriptor(
name='DeleteCollaboratorsRequest',
full_name='clarifai.api.DeleteCollaboratorsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.DeleteCollaboratorsRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='collaborator_ids', full_name='clarifai.api.DeleteCollaboratorsRequest.collaborator_ids', index=1,
number=2, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='user_emails', full_name='clarifai.api.DeleteCollaboratorsRequest.user_emails', index=2,
number=3, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3327,
serialized_end=3451,
)
_MULTICOLLABORATORSRESPONSE = _descriptor.Descriptor(
name='MultiCollaboratorsResponse',
full_name='clarifai.api.MultiCollaboratorsResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='clarifai.api.MultiCollaboratorsResponse.status', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='collaborators', full_name='clarifai.api.MultiCollaboratorsResponse.collaborators', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\200\265\030\001', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3454,
serialized_end=3584,
)
_LISTCOLLABORATIONSREQUEST = _descriptor.Descriptor(
name='ListCollaborationsRequest',
full_name='clarifai.api.ListCollaborationsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.ListCollaborationsRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='page', full_name='clarifai.api.ListCollaborationsRequest.page', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='per_page', full_name='clarifai.api.ListCollaborationsRequest.per_page', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3586,
serialized_end=3694,
)
_MULTICOLLABORATIONSRESPONSE = _descriptor.Descriptor(
name='MultiCollaborationsResponse',
full_name='clarifai.api.MultiCollaborationsResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='clarifai.api.MultiCollaborationsResponse.status', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='collaborations', full_name='clarifai.api.MultiCollaborationsResponse.collaborations', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\200\265\030\001', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3697,
serialized_end=3830,
)
_GETSTATUSCODEREQUEST = _descriptor.Descriptor(
name='GetStatusCodeRequest',
full_name='clarifai.api.GetStatusCodeRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='status_code_id', full_name='clarifai.api.GetStatusCodeRequest.status_code_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3832,
serialized_end=3878,
)
_LISTSTATUSCODESREQUEST = _descriptor.Descriptor(
name='ListStatusCodesRequest',
full_name='clarifai.api.ListStatusCodesRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3880,
serialized_end=3904,
)
_SINGLESTATUSCODERESPONSE = _descriptor.Descriptor(
name='SingleStatusCodeResponse',
full_name='clarifai.api.SingleStatusCodeResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='clarifai.api.SingleStatusCodeResponse.status', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3906,
serialized_end=3977,
)
_MULTISTATUSCODERESPONSE = _descriptor.Descriptor(
name='MultiStatusCodeResponse',
full_name='clarifai.api.MultiStatusCodeResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='clarifai.api.MultiStatusCodeResponse.status', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='statuses', full_name='clarifai.api.MultiStatusCodeResponse.statuses', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3979,
serialized_end=4096,
)
_GETCONCEPTREQUEST = _descriptor.Descriptor(
name='GetConceptRequest',
full_name='clarifai.api.GetConceptRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.GetConceptRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='concept_id', full_name='clarifai.api.GetConceptRequest.concept_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4098,
serialized_end=4186,
)
_LISTCONCEPTSREQUEST = _descriptor.Descriptor(
name='ListConceptsRequest',
full_name='clarifai.api.ListConceptsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.ListConceptsRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='page', full_name='clarifai.api.ListConceptsRequest.page', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='per_page', full_name='clarifai.api.ListConceptsRequest.per_page', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4188,
serialized_end=4290,
)
_POSTCONCEPTSSEARCHESREQUEST = _descriptor.Descriptor(
name='PostConceptsSearchesRequest',
full_name='clarifai.api.PostConceptsSearchesRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.PostConceptsSearchesRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='concept_query', full_name='clarifai.api.PostConceptsSearchesRequest.concept_query', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pagination', full_name='clarifai.api.PostConceptsSearchesRequest.pagination', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4293,
serialized_end=4468,
)
_POSTCONCEPTSREQUEST = _descriptor.Descriptor(
name='PostConceptsRequest',
full_name='clarifai.api.PostConceptsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.PostConceptsRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='concepts', full_name='clarifai.api.PostConceptsRequest.concepts', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4470,
serialized_end=4581,
)
_PATCHCONCEPTSREQUEST = _descriptor.Descriptor(
name='PatchConceptsRequest',
full_name='clarifai.api.PatchConceptsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.PatchConceptsRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='concepts', full_name='clarifai.api.PatchConceptsRequest.concepts', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='action', full_name='clarifai.api.PatchConceptsRequest.action', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4584,
serialized_end=4712,
)
_GETCONCEPTCOUNTSREQUEST = _descriptor.Descriptor(
name='GetConceptCountsRequest',
full_name='clarifai.api.GetConceptCountsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.GetConceptCountsRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='page', full_name='clarifai.api.GetConceptCountsRequest.page', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='per_page', full_name='clarifai.api.GetConceptCountsRequest.per_page', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4714,
serialized_end=4820,
)
_SINGLECONCEPTRESPONSE = _descriptor.Descriptor(
name='SingleConceptResponse',
full_name='clarifai.api.SingleConceptResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='clarifai.api.SingleConceptResponse.status', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='concept', full_name='clarifai.api.SingleConceptResponse.concept', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4822,
serialized_end=4930,
)
_MULTICONCEPTRESPONSE = _descriptor.Descriptor(
name='MultiConceptResponse',
full_name='clarifai.api.MultiConceptResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='clarifai.api.MultiConceptResponse.status', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='concepts', full_name='clarifai.api.MultiConceptResponse.concepts', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\200\265\030\001', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4932,
serialized_end=5046,
)
_MULTICONCEPTCOUNTRESPONSE = _descriptor.Descriptor(
name='MultiConceptCountResponse',
full_name='clarifai.api.MultiConceptCountResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='clarifai.api.MultiConceptCountResponse.status', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='concept_counts', full_name='clarifai.api.MultiConceptCountResponse.concept_counts', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\200\265\030\001', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5049,
serialized_end=5179,
)
_LISTCONCEPTRELATIONSREQUEST = _descriptor.Descriptor(
name='ListConceptRelationsRequest',
full_name='clarifai.api.ListConceptRelationsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.ListConceptRelationsRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='concept_id', full_name='clarifai.api.ListConceptRelationsRequest.concept_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='predicate', full_name='clarifai.api.ListConceptRelationsRequest.predicate', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='knowledge_graph_id', full_name='clarifai.api.ListConceptRelationsRequest.knowledge_graph_id', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='page', full_name='clarifai.api.ListConceptRelationsRequest.page', index=4,
number=5, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='per_page', full_name='clarifai.api.ListConceptRelationsRequest.per_page', index=5,
number=6, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5182,
serialized_end=5359,
)
_POSTCONCEPTRELATIONSREQUEST = _descriptor.Descriptor(
name='PostConceptRelationsRequest',
full_name='clarifai.api.PostConceptRelationsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.PostConceptRelationsRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='concept_id', full_name='clarifai.api.PostConceptRelationsRequest.concept_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='concept_relations', full_name='clarifai.api.PostConceptRelationsRequest.concept_relations', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5362,
serialized_end=5518,
)
_DELETECONCEPTRELATIONSREQUEST = _descriptor.Descriptor(
name='DeleteConceptRelationsRequest',
full_name='clarifai.api.DeleteConceptRelationsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.DeleteConceptRelationsRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='concept_id', full_name='clarifai.api.DeleteConceptRelationsRequest.concept_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ids', full_name='clarifai.api.DeleteConceptRelationsRequest.ids', index=2,
number=3, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5520,
serialized_end=5633,
)
_LISTKNOWLEDGEGRAPHSREQUEST = _descriptor.Descriptor(
name='ListKnowledgeGraphsRequest',
full_name='clarifai.api.ListKnowledgeGraphsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.ListKnowledgeGraphsRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5635,
serialized_end=5712,
)
_POSTKNOWLEDGEGRAPHSREQUEST = _descriptor.Descriptor(
name='PostKnowledgeGraphsRequest',
full_name='clarifai.api.PostKnowledgeGraphsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.PostKnowledgeGraphsRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='knowledge_graphs', full_name='clarifai.api.PostKnowledgeGraphsRequest.knowledge_graphs', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5715,
serialized_end=5848,
)
_POSTCONCEPTMAPPINGJOBSREQUEST = _descriptor.Descriptor(
name='PostConceptMappingJobsRequest',
full_name='clarifai.api.PostConceptMappingJobsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.PostConceptMappingJobsRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='concept_mapping_jobs', full_name='clarifai.api.PostConceptMappingJobsRequest.concept_mapping_jobs', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5851,
serialized_end=5994,
)
_MULTICONCEPTRELATIONRESPONSE = _descriptor.Descriptor(
name='MultiConceptRelationResponse',
full_name='clarifai.api.MultiConceptRelationResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='clarifai.api.MultiConceptRelationResponse.status', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='concept_relations', full_name='clarifai.api.MultiConceptRelationResponse.concept_relations', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\200\265\030\001', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5997,
serialized_end=6136,
)
_MULTIKNOWLEDGEGRAPHRESPONSE = _descriptor.Descriptor(
name='MultiKnowledgeGraphResponse',
full_name='clarifai.api.MultiKnowledgeGraphResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='clarifai.api.MultiKnowledgeGraphResponse.status', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='knowledge_graphs', full_name='clarifai.api.MultiKnowledgeGraphResponse.knowledge_graphs', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\200\265\030\001', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6139,
serialized_end=6275,
)
_MULTICONCEPTMAPPINGJOBRESPONSE = _descriptor.Descriptor(
name='MultiConceptMappingJobResponse',
full_name='clarifai.api.MultiConceptMappingJobResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='clarifai.api.MultiConceptMappingJobResponse.status', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ids', full_name='clarifai.api.MultiConceptMappingJobResponse.ids', index=1,
number=2, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6277,
serialized_end=6367,
)
_GETCONCEPTLANGUAGEREQUEST = _descriptor.Descriptor(
name='GetConceptLanguageRequest',
full_name='clarifai.api.GetConceptLanguageRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.GetConceptLanguageRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='concept_id', full_name='clarifai.api.GetConceptLanguageRequest.concept_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='language', full_name='clarifai.api.GetConceptLanguageRequest.language', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6369,
serialized_end=6483,
)
_LISTCONCEPTLANGUAGESREQUEST = _descriptor.Descriptor(
name='ListConceptLanguagesRequest',
full_name='clarifai.api.ListConceptLanguagesRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.ListConceptLanguagesRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='concept_id', full_name='clarifai.api.ListConceptLanguagesRequest.concept_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='page', full_name='clarifai.api.ListConceptLanguagesRequest.page', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='per_page', full_name='clarifai.api.ListConceptLanguagesRequest.per_page', index=3,
number=4, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6486,
serialized_end=6616,
)
_PATCHCONCEPTLANGUAGESREQUEST = _descriptor.Descriptor(
name='PatchConceptLanguagesRequest',
full_name='clarifai.api.PatchConceptLanguagesRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.PatchConceptLanguagesRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='concept_id', full_name='clarifai.api.PatchConceptLanguagesRequest.concept_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='concept_languages', full_name='clarifai.api.PatchConceptLanguagesRequest.concept_languages', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='action', full_name='clarifai.api.PatchConceptLanguagesRequest.action', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6619,
serialized_end=6792,
)
_POSTCONCEPTLANGUAGESREQUEST = _descriptor.Descriptor(
name='PostConceptLanguagesRequest',
full_name='clarifai.api.PostConceptLanguagesRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.PostConceptLanguagesRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='concept_id', full_name='clarifai.api.PostConceptLanguagesRequest.concept_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='concept_languages', full_name='clarifai.api.PostConceptLanguagesRequest.concept_languages', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6795,
serialized_end=6951,
)
_SINGLECONCEPTLANGUAGERESPONSE = _descriptor.Descriptor(
name='SingleConceptLanguageResponse',
full_name='clarifai.api.SingleConceptLanguageResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='clarifai.api.SingleConceptLanguageResponse.status', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='concept_language', full_name='clarifai.api.SingleConceptLanguageResponse.concept_language', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6954,
serialized_end=7087,
)
_MULTICONCEPTLANGUAGERESPONSE = _descriptor.Descriptor(
name='MultiConceptLanguageResponse',
full_name='clarifai.api.MultiConceptLanguageResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='clarifai.api.MultiConceptLanguageResponse.status', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='concept_languages', full_name='clarifai.api.MultiConceptLanguageResponse.concept_languages', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\200\265\030\001', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=7090,
serialized_end=7229,
)
_GETINPUTREQUEST = _descriptor.Descriptor(
name='GetInputRequest',
full_name='clarifai.api.GetInputRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.GetInputRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='input_id', full_name='clarifai.api.GetInputRequest.input_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=7231,
serialized_end=7315,
)
_GETINPUTSAMPLESREQUEST = _descriptor.Descriptor(
name='GetInputSamplesRequest',
full_name='clarifai.api.GetInputSamplesRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.GetInputSamplesRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='task_id', full_name='clarifai.api.GetInputSamplesRequest.task_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='user_ids', full_name='clarifai.api.GetInputSamplesRequest.user_ids', index=2,
number=3, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=7317,
serialized_end=7425,
)
_LISTINPUTSREQUEST = _descriptor.Descriptor(
name='ListInputsRequest',
full_name='clarifai.api.ListInputsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.ListInputsRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='page', full_name='clarifai.api.ListInputsRequest.page', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='per_page', full_name='clarifai.api.ListInputsRequest.per_page', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='status', full_name='clarifai.api.ListInputsRequest.status', index=3,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ids', full_name='clarifai.api.ListInputsRequest.ids', index=4,
number=4, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=7428,
serialized_end=7586,
)
_STREAMINPUTSREQUEST = _descriptor.Descriptor(
name='StreamInputsRequest',
full_name='clarifai.api.StreamInputsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.StreamInputsRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='per_page', full_name='clarifai.api.StreamInputsRequest.per_page', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='last_id', full_name='clarifai.api.StreamInputsRequest.last_id', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='descending', full_name='clarifai.api.StreamInputsRequest.descending', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=7588,
serialized_end=7713,
)
_POSTINPUTSREQUEST = _descriptor.Descriptor(
name='PostInputsRequest',
full_name='clarifai.api.PostInputsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.PostInputsRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='inputs', full_name='clarifai.api.PostInputsRequest.inputs', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=7715,
serialized_end=7820,
)
_PATCHINPUTSREQUEST = _descriptor.Descriptor(
name='PatchInputsRequest',
full_name='clarifai.api.PatchInputsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.PatchInputsRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='inputs', full_name='clarifai.api.PatchInputsRequest.inputs', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='action', full_name='clarifai.api.PatchInputsRequest.action', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=7822,
serialized_end=7944,
)
_DELETEINPUTREQUEST = _descriptor.Descriptor(
name='DeleteInputRequest',
full_name='clarifai.api.DeleteInputRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.DeleteInputRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='input_id', full_name='clarifai.api.DeleteInputRequest.input_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=7946,
serialized_end=8033,
)
_DELETEINPUTSREQUEST = _descriptor.Descriptor(
name='DeleteInputsRequest',
full_name='clarifai.api.DeleteInputsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.DeleteInputsRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ids', full_name='clarifai.api.DeleteInputsRequest.ids', index=1,
number=2, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=8035,
serialized_end=8124,
)
_SINGLEINPUTRESPONSE = _descriptor.Descriptor(
name='SingleInputResponse',
full_name='clarifai.api.SingleInputResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='clarifai.api.SingleInputResponse.status', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='input', full_name='clarifai.api.SingleInputResponse.input', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=8126,
serialized_end=8228,
)
_MULTIINPUTRESPONSE = _descriptor.Descriptor(
name='MultiInputResponse',
full_name='clarifai.api.MultiInputResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='clarifai.api.MultiInputResponse.status', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='inputs', full_name='clarifai.api.MultiInputResponse.inputs', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\200\265\030\001', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=8230,
serialized_end=8338,
)
_MULTIINPUTANNOTATIONRESPONSE = _descriptor.Descriptor(
name='MultiInputAnnotationResponse',
full_name='clarifai.api.MultiInputAnnotationResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='clarifai.api.MultiInputAnnotationResponse.status', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='hits', full_name='clarifai.api.MultiInputAnnotationResponse.hits', index=1,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\200\265\030\001', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=8340,
serialized_end=8454,
)
_SINGLEINPUTCOUNTRESPONSE = _descriptor.Descriptor(
name='SingleInputCountResponse',
full_name='clarifai.api.SingleInputCountResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='clarifai.api.SingleInputCountResponse.status', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='counts', full_name='clarifai.api.SingleInputCountResponse.counts', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=8456,
serialized_end=8569,
)
_GETINPUTCOUNTREQUEST = _descriptor.Descriptor(
name='GetInputCountRequest',
full_name='clarifai.api.GetInputCountRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.GetInputCountRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=8571,
serialized_end=8642,
)
_POSTMODELOUTPUTSREQUEST = _descriptor.Descriptor(
name='PostModelOutputsRequest',
full_name='clarifai.api.PostModelOutputsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.PostModelOutputsRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='model_id', full_name='clarifai.api.PostModelOutputsRequest.model_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='version_id', full_name='clarifai.api.PostModelOutputsRequest.version_id', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='inputs', full_name='clarifai.api.PostModelOutputsRequest.inputs', index=3,
number=4, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='model', full_name='clarifai.api.PostModelOutputsRequest.model', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=8645,
serialized_end=8830,
)
_LISTMODELINPUTSREQUEST = _descriptor.Descriptor(
name='ListModelInputsRequest',
full_name='clarifai.api.ListModelInputsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.ListModelInputsRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='model_id', full_name='clarifai.api.ListModelInputsRequest.model_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='version_id', full_name='clarifai.api.ListModelInputsRequest.version_id', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='page', full_name='clarifai.api.ListModelInputsRequest.page', index=3,
number=4, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='per_page', full_name='clarifai.api.ListModelInputsRequest.per_page', index=4,
number=5, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=8833,
serialized_end=8976,
)
_GETKEYREQUEST = _descriptor.Descriptor(
name='GetKeyRequest',
full_name='clarifai.api.GetKeyRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.GetKeyRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='key_id', full_name='clarifai.api.GetKeyRequest.key_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=8978,
serialized_end=9058,
)
_LISTKEYSREQUEST = _descriptor.Descriptor(
name='ListKeysRequest',
full_name='clarifai.api.ListKeysRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.ListKeysRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='page', full_name='clarifai.api.ListKeysRequest.page', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='per_page', full_name='clarifai.api.ListKeysRequest.per_page', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=9060,
serialized_end=9158,
)
_LISTAPPKEYSREQUEST = _descriptor.Descriptor(
name='ListAppKeysRequest',
full_name='clarifai.api.ListAppKeysRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.ListAppKeysRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='page', full_name='clarifai.api.ListAppKeysRequest.page', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='per_page', full_name='clarifai.api.ListAppKeysRequest.per_page', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=9160,
serialized_end=9261,
)
_POSTKEYSREQUEST = _descriptor.Descriptor(
name='PostKeysRequest',
full_name='clarifai.api.PostKeysRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.PostKeysRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='keys', full_name='clarifai.api.PostKeysRequest.keys', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=9263,
serialized_end=9362,
)
_DELETEKEYREQUEST = _descriptor.Descriptor(
name='DeleteKeyRequest',
full_name='clarifai.api.DeleteKeyRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.DeleteKeyRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='key_id', full_name='clarifai.api.DeleteKeyRequest.key_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=9364,
serialized_end=9447,
)
_PATCHKEYSREQUEST = _descriptor.Descriptor(
name='PatchKeysRequest',
full_name='clarifai.api.PatchKeysRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.PatchKeysRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='keys', full_name='clarifai.api.PatchKeysRequest.keys', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='action', full_name='clarifai.api.PatchKeysRequest.action', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=9449,
serialized_end=9565,
)
_SINGLEKEYRESPONSE = _descriptor.Descriptor(
name='SingleKeyResponse',
full_name='clarifai.api.SingleKeyResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='clarifai.api.SingleKeyResponse.status', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='key', full_name='clarifai.api.SingleKeyResponse.key', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=9567,
serialized_end=9663,
)
_MULTIKEYRESPONSE = _descriptor.Descriptor(
name='MultiKeyResponse',
full_name='clarifai.api.MultiKeyResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='clarifai.api.MultiKeyResponse.status', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='keys', full_name='clarifai.api.MultiKeyResponse.keys', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\200\265\030\001', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=9665,
serialized_end=9767,
)
_GETMODELREQUEST = _descriptor.Descriptor(
name='GetModelRequest',
full_name='clarifai.api.GetModelRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.GetModelRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='model_id', full_name='clarifai.api.GetModelRequest.model_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='version_id', full_name='clarifai.api.GetModelRequest.version_id', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='language', full_name='clarifai.api.GetModelRequest.language', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='trained_before', full_name='clarifai.api.GetModelRequest.trained_before', index=4,
number=5, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=9770,
serialized_end=9916,
)
_LISTMODELSREQUEST = _descriptor.Descriptor(
name='ListModelsRequest',
full_name='clarifai.api.ListModelsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.ListModelsRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='page', full_name='clarifai.api.ListModelsRequest.page', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='per_page', full_name='clarifai.api.ListModelsRequest.per_page', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='sort_ascending', full_name='clarifai.api.ListModelsRequest.sort_ascending', index=3,
number=10, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='sort_by_name', full_name='clarifai.api.ListModelsRequest.sort_by_name', index=4,
number=11, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='sort_by_num_inputs', full_name='clarifai.api.ListModelsRequest.sort_by_num_inputs', index=5,
number=12, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='sort_by_modified_at', full_name='clarifai.api.ListModelsRequest.sort_by_modified_at', index=6,
number=13, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='query', full_name='clarifai.api.ListModelsRequest.query', index=7,
number=14, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='name', full_name='clarifai.api.ListModelsRequest.name', index=8,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\030\001', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='model_type_id', full_name='clarifai.api.ListModelsRequest.model_type_id', index=9,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='trained_only', full_name='clarifai.api.ListModelsRequest.trained_only', index=10,
number=7, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='input_fields', full_name='clarifai.api.ListModelsRequest.input_fields', index=11,
number=8, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='output_fields', full_name='clarifai.api.ListModelsRequest.output_fields', index=12,
number=9, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='license', full_name='clarifai.api.ListModelsRequest.license', index=13,
number=15, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='featured_only', full_name='clarifai.api.ListModelsRequest.featured_only', index=14,
number=16, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='sort_by', full_name='clarifai.api.ListModelsRequest.sort_by',
index=0, containing_type=None, fields=[]),
],
serialized_start=9919,
serialized_end=10308,
)
_POSTMODELSREQUEST = _descriptor.Descriptor(
name='PostModelsRequest',
full_name='clarifai.api.PostModelsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.PostModelsRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='model', full_name='clarifai.api.PostModelsRequest.model', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\030\001', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='models', full_name='clarifai.api.PostModelsRequest.models', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=10311,
serialized_end=10456,
)
_PATCHMODELSREQUEST = _descriptor.Descriptor(
name='PatchModelsRequest',
full_name='clarifai.api.PatchModelsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.PatchModelsRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='models', full_name='clarifai.api.PatchModelsRequest.models', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='action', full_name='clarifai.api.PatchModelsRequest.action', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=10458,
serialized_end=10580,
)
_DELETEMODELREQUEST = _descriptor.Descriptor(
name='DeleteModelRequest',
full_name='clarifai.api.DeleteModelRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.DeleteModelRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='model_id', full_name='clarifai.api.DeleteModelRequest.model_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=10582,
serialized_end=10669,
)
_DELETEMODELSREQUEST = _descriptor.Descriptor(
name='DeleteModelsRequest',
full_name='clarifai.api.DeleteModelsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.DeleteModelsRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ids', full_name='clarifai.api.DeleteModelsRequest.ids', index=1,
number=2, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='delete_all', full_name='clarifai.api.DeleteModelsRequest.delete_all', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=10671,
serialized_end=10774,
)
_POSTMODELSSEARCHESREQUEST = _descriptor.Descriptor(
name='PostModelsSearchesRequest',
full_name='clarifai.api.PostModelsSearchesRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.PostModelsSearchesRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='model_query', full_name='clarifai.api.PostModelsSearchesRequest.model_query', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pagination', full_name='clarifai.api.PostModelsSearchesRequest.pagination', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=10777,
serialized_end=10946,
)
_SINGLEMODELRESPONSE = _descriptor.Descriptor(
name='SingleModelResponse',
full_name='clarifai.api.SingleModelResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='clarifai.api.SingleModelResponse.status', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='model', full_name='clarifai.api.SingleModelResponse.model', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=10948,
serialized_end=11050,
)
_MULTIMODELRESPONSE = _descriptor.Descriptor(
name='MultiModelResponse',
full_name='clarifai.api.MultiModelResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='clarifai.api.MultiModelResponse.status', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='models', full_name='clarifai.api.MultiModelResponse.models', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\200\265\030\001', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=11052,
serialized_end=11160,
)
_PATCHMODELVERSIONSREQUEST = _descriptor.Descriptor(
name='PatchModelVersionsRequest',
full_name='clarifai.api.PatchModelVersionsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.PatchModelVersionsRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='model_id', full_name='clarifai.api.PatchModelVersionsRequest.model_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='model_versions', full_name='clarifai.api.PatchModelVersionsRequest.model_versions', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='action', full_name='clarifai.api.PatchModelVersionsRequest.action', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=11163,
serialized_end=11325,
)
_GETMODELVERSIONREQUEST = _descriptor.Descriptor(
name='GetModelVersionRequest',
full_name='clarifai.api.GetModelVersionRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.GetModelVersionRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='model_id', full_name='clarifai.api.GetModelVersionRequest.model_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='version_id', full_name='clarifai.api.GetModelVersionRequest.version_id', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=11327,
serialized_end=11438,
)
_LISTMODELVERSIONSREQUEST = _descriptor.Descriptor(
name='ListModelVersionsRequest',
full_name='clarifai.api.ListModelVersionsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.ListModelVersionsRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='model_id', full_name='clarifai.api.ListModelVersionsRequest.model_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='page', full_name='clarifai.api.ListModelVersionsRequest.page', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='per_page', full_name='clarifai.api.ListModelVersionsRequest.per_page', index=3,
number=4, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='concept_ids', full_name='clarifai.api.ListModelVersionsRequest.concept_ids', index=4,
number=5, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=11441,
serialized_end=11587,
)
_DELETEMODELVERSIONREQUEST = _descriptor.Descriptor(
name='DeleteModelVersionRequest',
full_name='clarifai.api.DeleteModelVersionRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.DeleteModelVersionRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='model_id', full_name='clarifai.api.DeleteModelVersionRequest.model_id', index=1,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='version_id', full_name='clarifai.api.DeleteModelVersionRequest.version_id', index=2,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=11589,
serialized_end=11703,
)
_SINGLEMODELVERSIONRESPONSE = _descriptor.Descriptor(
name='SingleModelVersionResponse',
full_name='clarifai.api.SingleModelVersionResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='clarifai.api.SingleModelVersionResponse.status', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='model_version', full_name='clarifai.api.SingleModelVersionResponse.model_version', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=11705,
serialized_end=11829,
)
_MULTIMODELVERSIONRESPONSE = _descriptor.Descriptor(
name='MultiModelVersionResponse',
full_name='clarifai.api.MultiModelVersionResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='clarifai.api.MultiModelVersionResponse.status', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='model_versions', full_name='clarifai.api.MultiModelVersionResponse.model_versions', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\200\265\030\001', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=11832,
serialized_end=11962,
)
_POSTMODELVERSIONSREQUEST = _descriptor.Descriptor(
name='PostModelVersionsRequest',
full_name='clarifai.api.PostModelVersionsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.PostModelVersionsRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='model_id', full_name='clarifai.api.PostModelVersionsRequest.model_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='model_versions', full_name='clarifai.api.PostModelVersionsRequest.model_versions', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='search', full_name='clarifai.api.PostModelVersionsRequest.search', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\030\001', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='train_search', full_name='clarifai.api.PostModelVersionsRequest.train_search', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='test_search', full_name='clarifai.api.PostModelVersionsRequest.test_search', index=5,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='evaluate_after_training', full_name='clarifai.api.PostModelVersionsRequest.evaluate_after_training', index=6,
number=7, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='description', full_name='clarifai.api.PostModelVersionsRequest.description', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=11965,
serialized_end=12293,
)
_POSTMODELVERSIONMETRICSREQUEST = _descriptor.Descriptor(
name='PostModelVersionMetricsRequest',
full_name='clarifai.api.PostModelVersionMetricsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.PostModelVersionMetricsRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='model_id', full_name='clarifai.api.PostModelVersionMetricsRequest.model_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='version_id', full_name='clarifai.api.PostModelVersionMetricsRequest.version_id', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='max_examples', full_name='clarifai.api.PostModelVersionMetricsRequest.max_examples', index=3,
number=4, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='test_search', full_name='clarifai.api.PostModelVersionMetricsRequest.test_search', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=12296,
serialized_end=12480,
)
_GETMODELVERSIONMETRICSREQUEST = _descriptor.Descriptor(
name='GetModelVersionMetricsRequest',
full_name='clarifai.api.GetModelVersionMetricsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.GetModelVersionMetricsRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='model_id', full_name='clarifai.api.GetModelVersionMetricsRequest.model_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='version_id', full_name='clarifai.api.GetModelVersionMetricsRequest.version_id', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fields', full_name='clarifai.api.GetModelVersionMetricsRequest.fields', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=12483,
serialized_end=12644,
)
_GETMODELTYPEREQUEST = _descriptor.Descriptor(
name='GetModelTypeRequest',
full_name='clarifai.api.GetModelTypeRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.GetModelTypeRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='model_type_id', full_name='clarifai.api.GetModelTypeRequest.model_type_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=12646,
serialized_end=12739,
)
_LISTMODELTYPESREQUEST = _descriptor.Descriptor(
name='ListModelTypesRequest',
full_name='clarifai.api.ListModelTypesRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.ListModelTypesRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='page', full_name='clarifai.api.ListModelTypesRequest.page', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='per_page', full_name='clarifai.api.ListModelTypesRequest.per_page', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=12741,
serialized_end=12845,
)
_LISTOPENSOURCELICENSESREQUEST = _descriptor.Descriptor(
name='ListOpenSourceLicensesRequest',
full_name='clarifai.api.ListOpenSourceLicensesRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=12847,
serialized_end=12878,
)
_LISTOPENSOURCELICENSESRESPONSE = _descriptor.Descriptor(
name='ListOpenSourceLicensesResponse',
full_name='clarifai.api.ListOpenSourceLicensesResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='clarifai.api.ListOpenSourceLicensesResponse.status', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='licenses', full_name='clarifai.api.ListOpenSourceLicensesResponse.licenses', index=1,
number=2, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=12880,
serialized_end=12975,
)
_SINGLEMODELTYPERESPONSE = _descriptor.Descriptor(
name='SingleModelTypeResponse',
full_name='clarifai.api.SingleModelTypeResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='clarifai.api.SingleModelTypeResponse.status', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='model_type', full_name='clarifai.api.SingleModelTypeResponse.model_type', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\200\265\030\001', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=12977,
serialized_end=13098,
)
_MULTIMODELTYPERESPONSE = _descriptor.Descriptor(
name='MultiModelTypeResponse',
full_name='clarifai.api.MultiModelTypeResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='clarifai.api.MultiModelTypeResponse.status', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='model_types', full_name='clarifai.api.MultiModelTypeResponse.model_types', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\200\265\030\001', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=13100,
serialized_end=13221,
)
_GETMODELVERSIONINPUTEXAMPLEREQUEST = _descriptor.Descriptor(
name='GetModelVersionInputExampleRequest',
full_name='clarifai.api.GetModelVersionInputExampleRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.GetModelVersionInputExampleRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='model_id', full_name='clarifai.api.GetModelVersionInputExampleRequest.model_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='model_version_id', full_name='clarifai.api.GetModelVersionInputExampleRequest.model_version_id', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='example_id', full_name='clarifai.api.GetModelVersionInputExampleRequest.example_id', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=13224,
serialized_end=13373,
)
_LISTMODELVERSIONINPUTEXAMPLESREQUEST = _descriptor.Descriptor(
name='ListModelVersionInputExamplesRequest',
full_name='clarifai.api.ListModelVersionInputExamplesRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.ListModelVersionInputExamplesRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='model_id', full_name='clarifai.api.ListModelVersionInputExamplesRequest.model_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='model_version_id', full_name='clarifai.api.ListModelVersionInputExamplesRequest.model_version_id', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='page', full_name='clarifai.api.ListModelVersionInputExamplesRequest.page', index=3,
number=4, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='per_page', full_name='clarifai.api.ListModelVersionInputExamplesRequest.per_page', index=4,
number=5, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=13376,
serialized_end=13539,
)
_SINGLEMODELVERSIONINPUTEXAMPLERESPONSE = _descriptor.Descriptor(
name='SingleModelVersionInputExampleResponse',
full_name='clarifai.api.SingleModelVersionInputExampleResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='clarifai.api.SingleModelVersionInputExampleResponse.status', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='model_version_input_example', full_name='clarifai.api.SingleModelVersionInputExampleResponse.model_version_input_example', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=13542,
serialized_end=13704,
)
_MULTIMODELVERSIONINPUTEXAMPLERESPONSE = _descriptor.Descriptor(
name='MultiModelVersionInputExampleResponse',
full_name='clarifai.api.MultiModelVersionInputExampleResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='clarifai.api.MultiModelVersionInputExampleResponse.status', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='model_version_input_examples', full_name='clarifai.api.MultiModelVersionInputExampleResponse.model_version_input_examples', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=13707,
serialized_end=13869,
)
_LISTMODELREFERENCESREQUEST = _descriptor.Descriptor(
name='ListModelReferencesRequest',
full_name='clarifai.api.ListModelReferencesRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.ListModelReferencesRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='model_id', full_name='clarifai.api.ListModelReferencesRequest.model_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='page', full_name='clarifai.api.ListModelReferencesRequest.page', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='per_page', full_name='clarifai.api.ListModelReferencesRequest.per_page', index=3,
number=4, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=13871,
serialized_end=13998,
)
_MULTIMODELREFERENCERESPONSE = _descriptor.Descriptor(
name='MultiModelReferenceResponse',
full_name='clarifai.api.MultiModelReferenceResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='clarifai.api.MultiModelReferenceResponse.status', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='model_references', full_name='clarifai.api.MultiModelReferenceResponse.model_references', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=14001,
serialized_end=14131,
)
_MULTIOUTPUTRESPONSE = _descriptor.Descriptor(
name='MultiOutputResponse',
full_name='clarifai.api.MultiOutputResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='clarifai.api.MultiOutputResponse.status', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='outputs', full_name='clarifai.api.MultiOutputResponse.outputs', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\200\265\030\001', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=14133,
serialized_end=14244,
)
_LISTSCOPESREQUEST = _descriptor.Descriptor(
name='ListScopesRequest',
full_name='clarifai.api.ListScopesRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key_type', full_name='clarifai.api.ListScopesRequest.key_type', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.ListScopesRequest.user_app_id', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=14246,
serialized_end=14332,
)
_MYSCOPESREQUEST = _descriptor.Descriptor(
name='MyScopesRequest',
full_name='clarifai.api.MyScopesRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.MyScopesRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=14334,
serialized_end=14400,
)
_MULTISCOPEDEPSRESPONSE = _descriptor.Descriptor(
name='MultiScopeDepsResponse',
full_name='clarifai.api.MultiScopeDepsResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='clarifai.api.MultiScopeDepsResponse.status', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='scope_deps', full_name='clarifai.api.MultiScopeDepsResponse.scope_deps', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='endpoint_deps', full_name='clarifai.api.MultiScopeDepsResponse.endpoint_deps', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=14403,
serialized_end=14568,
)
_MULTISCOPERESPONSE = _descriptor.Descriptor(
name='MultiScopeResponse',
full_name='clarifai.api.MultiScopeResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='clarifai.api.MultiScopeResponse.status', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='scopes', full_name='clarifai.api.MultiScopeResponse.scopes', index=1,
number=2, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='app', full_name='clarifai.api.MultiScopeResponse.app', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='endpoints', full_name='clarifai.api.MultiScopeResponse.endpoints', index=3,
number=4, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=14571,
serialized_end=14703,
)
_GETSEARCHREQUEST = _descriptor.Descriptor(
name='GetSearchRequest',
full_name='clarifai.api.GetSearchRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.GetSearchRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='id', full_name='clarifai.api.GetSearchRequest.id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=14705,
serialized_end=14784,
)
_LISTSEARCHESREQUEST = _descriptor.Descriptor(
name='ListSearchesRequest',
full_name='clarifai.api.ListSearchesRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.ListSearchesRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='page', full_name='clarifai.api.ListSearchesRequest.page', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='per_page', full_name='clarifai.api.ListSearchesRequest.per_page', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=14786,
serialized_end=14888,
)
_POSTSEARCHESREQUEST = _descriptor.Descriptor(
name='PostSearchesRequest',
full_name='clarifai.api.PostSearchesRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.PostSearchesRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='query', full_name='clarifai.api.PostSearchesRequest.query', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\030\001', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='searches', full_name='clarifai.api.PostSearchesRequest.searches', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pagination', full_name='clarifai.api.PostSearchesRequest.pagination', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=14891,
serialized_end=15087,
)
_POSTSEARCHESBYIDREQUEST = _descriptor.Descriptor(
name='PostSearchesByIDRequest',
full_name='clarifai.api.PostSearchesByIDRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.PostSearchesByIDRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='id', full_name='clarifai.api.PostSearchesByIDRequest.id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pagination', full_name='clarifai.api.PostSearchesByIDRequest.pagination', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=15090,
serialized_end=15222,
)
_DELETESEARCHREQUEST = _descriptor.Descriptor(
name='DeleteSearchRequest',
full_name='clarifai.api.DeleteSearchRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.DeleteSearchRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='id', full_name='clarifai.api.DeleteSearchRequest.id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=15224,
serialized_end=15306,
)
_POSTANNOTATIONSSEARCHESREQUEST = _descriptor.Descriptor(
name='PostAnnotationsSearchesRequest',
full_name='clarifai.api.PostAnnotationsSearchesRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.PostAnnotationsSearchesRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='searches', full_name='clarifai.api.PostAnnotationsSearchesRequest.searches', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pagination', full_name='clarifai.api.PostAnnotationsSearchesRequest.pagination', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=15309,
serialized_end=15476,
)
_DELETEANNOTATIONSEARCHMETRICSREQUEST = _descriptor.Descriptor(
name='DeleteAnnotationSearchMetricsRequest',
full_name='clarifai.api.DeleteAnnotationSearchMetricsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.DeleteAnnotationSearchMetricsRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='id', full_name='clarifai.api.DeleteAnnotationSearchMetricsRequest.id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=15478,
serialized_end=15577,
)
_POSTINPUTSSEARCHESREQUEST = _descriptor.Descriptor(
name='PostInputsSearchesRequest',
full_name='clarifai.api.PostInputsSearchesRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.PostInputsSearchesRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='searches', full_name='clarifai.api.PostInputsSearchesRequest.searches', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pagination', full_name='clarifai.api.PostInputsSearchesRequest.pagination', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=15580,
serialized_end=15742,
)
_SINGLESEARCHRESPONSE = _descriptor.Descriptor(
name='SingleSearchResponse',
full_name='clarifai.api.SingleSearchResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='clarifai.api.SingleSearchResponse.status', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='search', full_name='clarifai.api.SingleSearchResponse.search', index=1,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=15744,
serialized_end=15849,
)
_MULTISEARCHRESPONSE = _descriptor.Descriptor(
name='MultiSearchResponse',
full_name='clarifai.api.MultiSearchResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='clarifai.api.MultiSearchResponse.status', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='id', full_name='clarifai.api.MultiSearchResponse.id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='hits', full_name='clarifai.api.MultiSearchResponse.hits', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\200\265\030\001', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='query', full_name='clarifai.api.MultiSearchResponse.query', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='searches', full_name='clarifai.api.MultiSearchResponse.searches', index=4,
number=5, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=15852,
serialized_end=16045,
)
_POSTANNOTATIONSEARCHMETRICSREQUEST = _descriptor.Descriptor(
name='PostAnnotationSearchMetricsRequest',
full_name='clarifai.api.PostAnnotationSearchMetricsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.PostAnnotationSearchMetricsRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='id', full_name='clarifai.api.PostAnnotationSearchMetricsRequest.id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ground_truth', full_name='clarifai.api.PostAnnotationSearchMetricsRequest.ground_truth', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='search_to_eval', full_name='clarifai.api.PostAnnotationSearchMetricsRequest.search_to_eval', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='data', full_name='clarifai.api.PostAnnotationSearchMetricsRequest.data', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='evaluation_type', full_name='clarifai.api.PostAnnotationSearchMetricsRequest.evaluation_type', index=5,
number=6, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=16048,
serialized_end=16324,
)
_GETANNOTATIONSEARCHMETRICSREQUEST = _descriptor.Descriptor(
name='GetAnnotationSearchMetricsRequest',
full_name='clarifai.api.GetAnnotationSearchMetricsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.GetAnnotationSearchMetricsRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='id', full_name='clarifai.api.GetAnnotationSearchMetricsRequest.id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=16326,
serialized_end=16422,
)
_LISTANNOTATIONSEARCHMETRICSREQUEST = _descriptor.Descriptor(
name='ListAnnotationSearchMetricsRequest',
full_name='clarifai.api.ListAnnotationSearchMetricsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.ListAnnotationSearchMetricsRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=16424,
serialized_end=16509,
)
_MULTIANNOTATIONSEARCHMETRICSRESPONSE = _descriptor.Descriptor(
name='MultiAnnotationSearchMetricsResponse',
full_name='clarifai.api.MultiAnnotationSearchMetricsResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='clarifai.api.MultiAnnotationSearchMetricsResponse.status', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='annotation_search_metrics', full_name='clarifai.api.MultiAnnotationSearchMetricsResponse.annotation_search_metrics', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=16512,
serialized_end=16669,
)
_POSTVALIDATEPASSWORDREQUEST = _descriptor.Descriptor(
name='PostValidatePasswordRequest',
full_name='clarifai.api.PostValidatePasswordRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.PostValidatePasswordRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='password', full_name='clarifai.api.PostValidatePasswordRequest.password', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=16671,
serialized_end=16791,
)
_SINGLEPASSWORDVALIDATIONRESPONSE = _descriptor.Descriptor(
name='SinglePasswordValidationResponse',
full_name='clarifai.api.SinglePasswordValidationResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='clarifai.api.SinglePasswordValidationResponse.status', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='password_violations', full_name='clarifai.api.SinglePasswordValidationResponse.password_violations', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=16794,
serialized_end=16936,
)
_GETWORKFLOWREQUEST = _descriptor.Descriptor(
name='GetWorkflowRequest',
full_name='clarifai.api.GetWorkflowRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.GetWorkflowRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='workflow_id', full_name='clarifai.api.GetWorkflowRequest.workflow_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='favor_clarifai_workflows', full_name='clarifai.api.GetWorkflowRequest.favor_clarifai_workflows', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=16938,
serialized_end=17062,
)
_LISTWORKFLOWSREQUEST = _descriptor.Descriptor(
name='ListWorkflowsRequest',
full_name='clarifai.api.ListWorkflowsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.ListWorkflowsRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='page', full_name='clarifai.api.ListWorkflowsRequest.page', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='per_page', full_name='clarifai.api.ListWorkflowsRequest.per_page', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='sort_ascending', full_name='clarifai.api.ListWorkflowsRequest.sort_ascending', index=3,
number=5, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='sort_by_id', full_name='clarifai.api.ListWorkflowsRequest.sort_by_id', index=4,
number=6, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='sort_by_modified_at', full_name='clarifai.api.ListWorkflowsRequest.sort_by_modified_at', index=5,
number=7, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='query', full_name='clarifai.api.ListWorkflowsRequest.query', index=6,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='id', full_name='clarifai.api.ListWorkflowsRequest.id', index=7,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\030\001', file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='featured_only', full_name='clarifai.api.ListWorkflowsRequest.featured_only', index=8,
number=9, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='sort_by', full_name='clarifai.api.ListWorkflowsRequest.sort_by',
index=0, containing_type=None, fields=[]),
],
serialized_start=17065,
serialized_end=17310,
)
_POSTWORKFLOWSREQUEST = _descriptor.Descriptor(
name='PostWorkflowsRequest',
full_name='clarifai.api.PostWorkflowsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.PostWorkflowsRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='workflows', full_name='clarifai.api.PostWorkflowsRequest.workflows', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=17312,
serialized_end=17426,
)
_PATCHWORKFLOWSREQUEST = _descriptor.Descriptor(
name='PatchWorkflowsRequest',
full_name='clarifai.api.PatchWorkflowsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.PatchWorkflowsRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='workflows', full_name='clarifai.api.PatchWorkflowsRequest.workflows', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='action', full_name='clarifai.api.PatchWorkflowsRequest.action', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=17429,
serialized_end=17560,
)
_DELETEWORKFLOWREQUEST = _descriptor.Descriptor(
name='DeleteWorkflowRequest',
full_name='clarifai.api.DeleteWorkflowRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.DeleteWorkflowRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='workflow_id', full_name='clarifai.api.DeleteWorkflowRequest.workflow_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=17562,
serialized_end=17655,
)
_DELETEWORKFLOWSREQUEST = _descriptor.Descriptor(
name='DeleteWorkflowsRequest',
full_name='clarifai.api.DeleteWorkflowsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.DeleteWorkflowsRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ids', full_name='clarifai.api.DeleteWorkflowsRequest.ids', index=1,
number=2, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='delete_all', full_name='clarifai.api.DeleteWorkflowsRequest.delete_all', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=17657,
serialized_end=17763,
)
_SINGLEWORKFLOWRESPONSE = _descriptor.Descriptor(
name='SingleWorkflowResponse',
full_name='clarifai.api.SingleWorkflowResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='clarifai.api.SingleWorkflowResponse.status', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='workflow', full_name='clarifai.api.SingleWorkflowResponse.workflow', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=17765,
serialized_end=17876,
)
_MULTIWORKFLOWRESPONSE = _descriptor.Descriptor(
name='MultiWorkflowResponse',
full_name='clarifai.api.MultiWorkflowResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='clarifai.api.MultiWorkflowResponse.status', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='workflows', full_name='clarifai.api.MultiWorkflowResponse.workflows', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\200\265\030\001', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=17878,
serialized_end=17995,
)
_POSTWORKFLOWRESULTSREQUEST = _descriptor.Descriptor(
name='PostWorkflowResultsRequest',
full_name='clarifai.api.PostWorkflowResultsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.PostWorkflowResultsRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='workflow_id', full_name='clarifai.api.PostWorkflowResultsRequest.workflow_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='version_id', full_name='clarifai.api.PostWorkflowResultsRequest.version_id', index=2,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='inputs', full_name='clarifai.api.PostWorkflowResultsRequest.inputs', index=3,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='output_config', full_name='clarifai.api.PostWorkflowResultsRequest.output_config', index=4,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='favor_clarifai_workflows', full_name='clarifai.api.PostWorkflowResultsRequest.favor_clarifai_workflows', index=5,
number=5, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='workflow_state', full_name='clarifai.api.PostWorkflowResultsRequest.workflow_state', index=6,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=17998,
serialized_end=18291,
)
_POSTWORKFLOWRESULTSRESPONSE = _descriptor.Descriptor(
name='PostWorkflowResultsResponse',
full_name='clarifai.api.PostWorkflowResultsResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='clarifai.api.PostWorkflowResultsResponse.status', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='workflow', full_name='clarifai.api.PostWorkflowResultsResponse.workflow', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='results', full_name='clarifai.api.PostWorkflowResultsResponse.results', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='workflow_state', full_name='clarifai.api.PostWorkflowResultsResponse.workflow_state', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=18294,
serialized_end=18510,
)
_POSTWORKFLOWRESULTSSIMILARITYREQUEST = _descriptor.Descriptor(
name='PostWorkflowResultsSimilarityRequest',
full_name='clarifai.api.PostWorkflowResultsSimilarityRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.PostWorkflowResultsSimilarityRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='workflow_id', full_name='clarifai.api.PostWorkflowResultsSimilarityRequest.workflow_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='version_id', full_name='clarifai.api.PostWorkflowResultsSimilarityRequest.version_id', index=2,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='model_version_id', full_name='clarifai.api.PostWorkflowResultsSimilarityRequest.model_version_id', index=3,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='probe_inputs', full_name='clarifai.api.PostWorkflowResultsSimilarityRequest.probe_inputs', index=4,
number=4, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pool_inputs', full_name='clarifai.api.PostWorkflowResultsSimilarityRequest.pool_inputs', index=5,
number=5, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='favor_clarifai_workflows', full_name='clarifai.api.PostWorkflowResultsSimilarityRequest.favor_clarifai_workflows', index=6,
number=6, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=18513,
serialized_end=18786,
)
_POSTWORKFLOWRESULTSSIMILARITYRESPONSE = _descriptor.Descriptor(
name='PostWorkflowResultsSimilarityResponse',
full_name='clarifai.api.PostWorkflowResultsSimilarityResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='clarifai.api.PostWorkflowResultsSimilarityResponse.status', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='results', full_name='clarifai.api.PostWorkflowResultsSimilarityResponse.results', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=18789,
serialized_end=18931,
)
_LISTWORKFLOWVERSIONSREQUEST = _descriptor.Descriptor(
name='ListWorkflowVersionsRequest',
full_name='clarifai.api.ListWorkflowVersionsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.ListWorkflowVersionsRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='workflow_id', full_name='clarifai.api.ListWorkflowVersionsRequest.workflow_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='page', full_name='clarifai.api.ListWorkflowVersionsRequest.page', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='per_page', full_name='clarifai.api.ListWorkflowVersionsRequest.per_page', index=3,
number=4, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=18934,
serialized_end=19065,
)
_GETWORKFLOWVERSIONREQUEST = _descriptor.Descriptor(
name='GetWorkflowVersionRequest',
full_name='clarifai.api.GetWorkflowVersionRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.GetWorkflowVersionRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='workflow_id', full_name='clarifai.api.GetWorkflowVersionRequest.workflow_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='workflow_version_id', full_name='clarifai.api.GetWorkflowVersionRequest.workflow_version_id', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=19067,
serialized_end=19193,
)
_DELETEWORKFLOWVERSIONSREQUEST = _descriptor.Descriptor(
name='DeleteWorkflowVersionsRequest',
full_name='clarifai.api.DeleteWorkflowVersionsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.DeleteWorkflowVersionsRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='workflow_id', full_name='clarifai.api.DeleteWorkflowVersionsRequest.workflow_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='workflow_version_ids', full_name='clarifai.api.DeleteWorkflowVersionsRequest.workflow_version_ids', index=2,
number=3, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=19196,
serialized_end=19327,
)
_PATCHWORKFLOWVERSIONSREQUEST = _descriptor.Descriptor(
name='PatchWorkflowVersionsRequest',
full_name='clarifai.api.PatchWorkflowVersionsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.PatchWorkflowVersionsRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='workflow_id', full_name='clarifai.api.PatchWorkflowVersionsRequest.workflow_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='workflow_versions', full_name='clarifai.api.PatchWorkflowVersionsRequest.workflow_versions', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='action', full_name='clarifai.api.PatchWorkflowVersionsRequest.action', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=19330,
serialized_end=19504,
)
_MULTIWORKFLOWVERSIONRESPONSE = _descriptor.Descriptor(
name='MultiWorkflowVersionResponse',
full_name='clarifai.api.MultiWorkflowVersionResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='clarifai.api.MultiWorkflowVersionResponse.status', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='workflow_versions', full_name='clarifai.api.MultiWorkflowVersionResponse.workflow_versions', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\200\265\030\001', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=19507,
serialized_end=19646,
)
_SINGLEWORKFLOWVERSIONRESPONSE = _descriptor.Descriptor(
name='SingleWorkflowVersionResponse',
full_name='clarifai.api.SingleWorkflowVersionResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='clarifai.api.SingleWorkflowVersionResponse.status', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='workflow_version', full_name='clarifai.api.SingleWorkflowVersionResponse.workflow_version', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=19649,
serialized_end=19782,
)
_POSTAPPDUPLICATIONSREQUEST = _descriptor.Descriptor(
name='PostAppDuplicationsRequest',
full_name='clarifai.api.PostAppDuplicationsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.PostAppDuplicationsRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='app_duplications', full_name='clarifai.api.PostAppDuplicationsRequest.app_duplications', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=19785,
serialized_end=19918,
)
_GETAPPDUPLICATIONREQUEST = _descriptor.Descriptor(
name='GetAppDuplicationRequest',
full_name='clarifai.api.GetAppDuplicationRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.GetAppDuplicationRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='app_duplication_id', full_name='clarifai.api.GetAppDuplicationRequest.app_duplication_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=19920,
serialized_end=20023,
)
_LISTAPPDUPLICATIONSREQUEST = _descriptor.Descriptor(
name='ListAppDuplicationsRequest',
full_name='clarifai.api.ListAppDuplicationsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.ListAppDuplicationsRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='page', full_name='clarifai.api.ListAppDuplicationsRequest.page', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='per_page', full_name='clarifai.api.ListAppDuplicationsRequest.per_page', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=20025,
serialized_end=20134,
)
_MULTIAPPDUPLICATIONSRESPONSE = _descriptor.Descriptor(
name='MultiAppDuplicationsResponse',
full_name='clarifai.api.MultiAppDuplicationsResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='clarifai.api.MultiAppDuplicationsResponse.status', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='app_duplications', full_name='clarifai.api.MultiAppDuplicationsResponse.app_duplications', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=20137,
serialized_end=20268,
)
_SINGLEAPPDUPLICATIONRESPONSE = _descriptor.Descriptor(
name='SingleAppDuplicationResponse',
full_name='clarifai.api.SingleAppDuplicationResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='clarifai.api.SingleAppDuplicationResponse.status', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='app_duplication', full_name='clarifai.api.SingleAppDuplicationResponse.app_duplication', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=20271,
serialized_end=20401,
)
_POSTTASKSREQUEST = _descriptor.Descriptor(
name='PostTasksRequest',
full_name='clarifai.api.PostTasksRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.PostTasksRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='tasks', full_name='clarifai.api.PostTasksRequest.tasks', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=20403,
serialized_end=20505,
)
_GETTASKREQUEST = _descriptor.Descriptor(
name='GetTaskRequest',
full_name='clarifai.api.GetTaskRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.GetTaskRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='task_id', full_name='clarifai.api.GetTaskRequest.task_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=20507,
serialized_end=20589,
)
_LISTTASKSREQUEST = _descriptor.Descriptor(
name='ListTasksRequest',
full_name='clarifai.api.ListTasksRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.ListTasksRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='page', full_name='clarifai.api.ListTasksRequest.page', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='per_page', full_name='clarifai.api.ListTasksRequest.per_page', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='worker_user_ids', full_name='clarifai.api.ListTasksRequest.worker_user_ids', index=3,
number=4, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='review_user_ids', full_name='clarifai.api.ListTasksRequest.review_user_ids', index=4,
number=5, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='including_label_order_tasks', full_name='clarifai.api.ListTasksRequest.including_label_order_tasks', index=5,
number=6, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=20592,
serialized_end=20778,
)
_PATCHTASKSREQUEST = _descriptor.Descriptor(
name='PatchTasksRequest',
full_name='clarifai.api.PatchTasksRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.PatchTasksRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='tasks', full_name='clarifai.api.PatchTasksRequest.tasks', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='action', full_name='clarifai.api.PatchTasksRequest.action', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=20780,
serialized_end=20899,
)
_DELETETASKSREQUEST = _descriptor.Descriptor(
name='DeleteTasksRequest',
full_name='clarifai.api.DeleteTasksRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.DeleteTasksRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ids', full_name='clarifai.api.DeleteTasksRequest.ids', index=1,
number=2, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=20901,
serialized_end=20983,
)
_MULTITASKRESPONSE = _descriptor.Descriptor(
name='MultiTaskResponse',
full_name='clarifai.api.MultiTaskResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='clarifai.api.MultiTaskResponse.status', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='tasks', full_name='clarifai.api.MultiTaskResponse.tasks', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\200\265\030\001', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=20985,
serialized_end=21090,
)
_SINGLETASKRESPONSE = _descriptor.Descriptor(
name='SingleTaskResponse',
full_name='clarifai.api.SingleTaskResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='clarifai.api.SingleTaskResponse.status', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='task', full_name='clarifai.api.SingleTaskResponse.task', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=21092,
serialized_end=21191,
)
_GETTASKCOUNTREQUEST = _descriptor.Descriptor(
name='GetTaskCountRequest',
full_name='clarifai.api.GetTaskCountRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.GetTaskCountRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='task_id', full_name='clarifai.api.GetTaskCountRequest.task_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='user_ids', full_name='clarifai.api.GetTaskCountRequest.user_ids', index=2,
number=3, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=21193,
serialized_end=21298,
)
_SINGLETASKCOUNTRESPONSE = _descriptor.Descriptor(
name='SingleTaskCountResponse',
full_name='clarifai.api.SingleTaskCountResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='clarifai.api.SingleTaskCountResponse.status', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='app_id', full_name='clarifai.api.SingleTaskCountResponse.app_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='task_id', full_name='clarifai.api.SingleTaskCountResponse.task_id', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='counts', full_name='clarifai.api.SingleTaskCountResponse.counts', index=3,
number=4, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=21301,
serialized_end=21458,
)
_POSTCOLLECTORSREQUEST = _descriptor.Descriptor(
name='PostCollectorsRequest',
full_name='clarifai.api.PostCollectorsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.PostCollectorsRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='collectors', full_name='clarifai.api.PostCollectorsRequest.collectors', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=21460,
serialized_end=21577,
)
_PATCHCOLLECTORSREQUEST = _descriptor.Descriptor(
name='PatchCollectorsRequest',
full_name='clarifai.api.PatchCollectorsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.PatchCollectorsRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='collectors', full_name='clarifai.api.PatchCollectorsRequest.collectors', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='action', full_name='clarifai.api.PatchCollectorsRequest.action', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=21580,
serialized_end=21714,
)
_DELETECOLLECTORSREQUEST = _descriptor.Descriptor(
name='DeleteCollectorsRequest',
full_name='clarifai.api.DeleteCollectorsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.DeleteCollectorsRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ids', full_name='clarifai.api.DeleteCollectorsRequest.ids', index=1,
number=2, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='delete_all', full_name='clarifai.api.DeleteCollectorsRequest.delete_all', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=21716,
serialized_end=21823,
)
_GETCOLLECTORREQUEST = _descriptor.Descriptor(
name='GetCollectorRequest',
full_name='clarifai.api.GetCollectorRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.GetCollectorRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='collector_id', full_name='clarifai.api.GetCollectorRequest.collector_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=21825,
serialized_end=21917,
)
_LISTCOLLECTORSREQUEST = _descriptor.Descriptor(
name='ListCollectorsRequest',
full_name='clarifai.api.ListCollectorsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.ListCollectorsRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='page', full_name='clarifai.api.ListCollectorsRequest.page', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='per_page', full_name='clarifai.api.ListCollectorsRequest.per_page', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=21919,
serialized_end=22023,
)
_MULTICOLLECTORRESPONSE = _descriptor.Descriptor(
name='MultiCollectorResponse',
full_name='clarifai.api.MultiCollectorResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='clarifai.api.MultiCollectorResponse.status', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='collectors', full_name='clarifai.api.MultiCollectorResponse.collectors', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=22025,
serialized_end=22139,
)
_SINGLECOLLECTORRESPONSE = _descriptor.Descriptor(
name='SingleCollectorResponse',
full_name='clarifai.api.SingleCollectorResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='clarifai.api.SingleCollectorResponse.status', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='collector', full_name='clarifai.api.SingleCollectorResponse.collector', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=22141,
serialized_end=22255,
)
_POSTSTATVALUESREQUEST = _descriptor.Descriptor(
name='PostStatValuesRequest',
full_name='clarifai.api.PostStatValuesRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.PostStatValuesRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='stat_values', full_name='clarifai.api.PostStatValuesRequest.stat_values', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=22257,
serialized_end=22375,
)
_MULTISTATVALUERESPONSE = _descriptor.Descriptor(
name='MultiStatValueResponse',
full_name='clarifai.api.MultiStatValueResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='clarifai.api.MultiStatValueResponse.status', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='stat_values', full_name='clarifai.api.MultiStatValueResponse.stat_values', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\200\265\030\001', file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=22377,
serialized_end=22498,
)
_POSTSTATVALUESAGGREGATEREQUEST = _descriptor.Descriptor(
name='PostStatValuesAggregateRequest',
full_name='clarifai.api.PostStatValuesAggregateRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.PostStatValuesAggregateRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='stat_value_aggregate_queries', full_name='clarifai.api.PostStatValuesAggregateRequest.stat_value_aggregate_queries', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=22501,
serialized_end=22659,
)
_MULTISTATVALUEAGGREGATERESPONSE = _descriptor.Descriptor(
name='MultiStatValueAggregateResponse',
full_name='clarifai.api.MultiStatValueAggregateResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='clarifai.api.MultiStatValueAggregateResponse.status', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='stat_value_aggregate_results', full_name='clarifai.api.MultiStatValueAggregateResponse.stat_value_aggregate_results', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=22662,
serialized_end=22818,
)
_POSTTRENDINGMETRICSVIEWREQUEST = _descriptor.Descriptor(
name='PostTrendingMetricsViewRequest',
full_name='clarifai.api.PostTrendingMetricsViewRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.PostTrendingMetricsViewRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='view_type', full_name='clarifai.api.PostTrendingMetricsViewRequest.view_type', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='object_id', full_name='clarifai.api.PostTrendingMetricsViewRequest.object_id', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=22820,
serialized_end=22939,
)
_LISTTRENDINGMETRICSVIEWSREQUEST = _descriptor.Descriptor(
name='ListTrendingMetricsViewsRequest',
full_name='clarifai.api.ListTrendingMetricsViewsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_app_id', full_name='clarifai.api.ListTrendingMetricsViewsRequest.user_app_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='view_type', full_name='clarifai.api.ListTrendingMetricsViewsRequest.view_type', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='page', full_name='clarifai.api.ListTrendingMetricsViewsRequest.page', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='per_page', full_name='clarifai.api.ListTrendingMetricsViewsRequest.per_page', index=3,
number=4, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=22942,
serialized_end=23075,
)
_MULTITRENDINGMETRICSVIEWRESPONSE = _descriptor.Descriptor(
name='MultiTrendingMetricsViewResponse',
full_name='clarifai.api.MultiTrendingMetricsViewResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='clarifai.api.MultiTrendingMetricsViewResponse.status', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='metrics', full_name='clarifai.api.MultiTrendingMetricsViewResponse.metrics', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=23077,
serialized_end=23203,
)
_GETANNOTATIONREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_LISTANNOTATIONSREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_LISTANNOTATIONSREQUEST.fields_by_name['statuses'].message_type = proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._STATUS
_POSTANNOTATIONSREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_POSTANNOTATIONSREQUEST.fields_by_name['annotations'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._ANNOTATION
_PATCHANNOTATIONSREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_PATCHANNOTATIONSREQUEST.fields_by_name['annotations'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._ANNOTATION
_PATCHANNOTATIONSSTATUSREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_PATCHANNOTATIONSSTATUSREQUEST.fields_by_name['status_code'].enum_type = proto_dot_clarifai_dot_api_dot_status_dot_status__code__pb2._STATUSCODE
_PATCHANNOTATIONSSTATUSRESPONSE.fields_by_name['status'].message_type = proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._STATUS
_DELETEANNOTATIONREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_DELETEANNOTATIONSREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_SINGLEANNOTATIONRESPONSE.fields_by_name['status'].message_type = proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._STATUS
_SINGLEANNOTATIONRESPONSE.fields_by_name['annotation'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._ANNOTATION
_MULTIANNOTATIONRESPONSE.fields_by_name['status'].message_type = proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._STATUS
_MULTIANNOTATIONRESPONSE.fields_by_name['annotations'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._ANNOTATION
_GETAPPREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_LISTAPPSREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_LISTAPPSREQUEST.oneofs_by_name['sort_by'].fields.append(
_LISTAPPSREQUEST.fields_by_name['sort_by_name'])
_LISTAPPSREQUEST.fields_by_name['sort_by_name'].containing_oneof = _LISTAPPSREQUEST.oneofs_by_name['sort_by']
_LISTAPPSREQUEST.oneofs_by_name['sort_by'].fields.append(
_LISTAPPSREQUEST.fields_by_name['sort_by_modified_at'])
_LISTAPPSREQUEST.fields_by_name['sort_by_modified_at'].containing_oneof = _LISTAPPSREQUEST.oneofs_by_name['sort_by']
_POSTAPPSREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_POSTAPPSREQUEST.fields_by_name['apps'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._APP
_DELETEAPPREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_PATCHAPPSREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_PATCHAPPSREQUEST.fields_by_name['apps'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._APP
_PATCHAPPSREQUEST.fields_by_name['metadata_action'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._PATCHACTION
_POSTAPPSSEARCHESREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_POSTAPPSSEARCHESREQUEST.fields_by_name['app_query'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._APPQUERY
_POSTAPPSSEARCHESREQUEST.fields_by_name['pagination'].message_type = _PAGINATION
_SINGLEAPPRESPONSE.fields_by_name['status'].message_type = proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._STATUS
_SINGLEAPPRESPONSE.fields_by_name['app'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._APP
_MULTIAPPRESPONSE.fields_by_name['status'].message_type = proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._STATUS
_MULTIAPPRESPONSE.fields_by_name['apps'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._APP
_LISTCOLLABORATORSREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_POSTCOLLABORATORSREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_POSTCOLLABORATORSREQUEST.fields_by_name['collaborators'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._COLLABORATOR
_PATCHCOLLABORATORSREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_PATCHCOLLABORATORSREQUEST.fields_by_name['collaborators'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._COLLABORATOR
_DELETECOLLABORATORSREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_MULTICOLLABORATORSRESPONSE.fields_by_name['status'].message_type = proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._STATUS
_MULTICOLLABORATORSRESPONSE.fields_by_name['collaborators'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._COLLABORATOR
_LISTCOLLABORATIONSREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_MULTICOLLABORATIONSRESPONSE.fields_by_name['status'].message_type = proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._STATUS
_MULTICOLLABORATIONSRESPONSE.fields_by_name['collaborations'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._COLLABORATION
_SINGLESTATUSCODERESPONSE.fields_by_name['status'].message_type = proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._STATUS
_MULTISTATUSCODERESPONSE.fields_by_name['status'].message_type = proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._STATUS
_MULTISTATUSCODERESPONSE.fields_by_name['statuses'].message_type = proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._STATUS
_GETCONCEPTREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_LISTCONCEPTSREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_POSTCONCEPTSSEARCHESREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_POSTCONCEPTSSEARCHESREQUEST.fields_by_name['concept_query'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._CONCEPTQUERY
_POSTCONCEPTSSEARCHESREQUEST.fields_by_name['pagination'].message_type = _PAGINATION
_POSTCONCEPTSREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_POSTCONCEPTSREQUEST.fields_by_name['concepts'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._CONCEPT
_PATCHCONCEPTSREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_PATCHCONCEPTSREQUEST.fields_by_name['concepts'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._CONCEPT
_GETCONCEPTCOUNTSREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_SINGLECONCEPTRESPONSE.fields_by_name['status'].message_type = proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._STATUS
_SINGLECONCEPTRESPONSE.fields_by_name['concept'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._CONCEPT
_MULTICONCEPTRESPONSE.fields_by_name['status'].message_type = proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._STATUS
_MULTICONCEPTRESPONSE.fields_by_name['concepts'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._CONCEPT
_MULTICONCEPTCOUNTRESPONSE.fields_by_name['status'].message_type = proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._STATUS
_MULTICONCEPTCOUNTRESPONSE.fields_by_name['concept_counts'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._CONCEPTCOUNT
_LISTCONCEPTRELATIONSREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_POSTCONCEPTRELATIONSREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_POSTCONCEPTRELATIONSREQUEST.fields_by_name['concept_relations'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._CONCEPTRELATION
_DELETECONCEPTRELATIONSREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_LISTKNOWLEDGEGRAPHSREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_POSTKNOWLEDGEGRAPHSREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_POSTKNOWLEDGEGRAPHSREQUEST.fields_by_name['knowledge_graphs'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._KNOWLEDGEGRAPH
_POSTCONCEPTMAPPINGJOBSREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_POSTCONCEPTMAPPINGJOBSREQUEST.fields_by_name['concept_mapping_jobs'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._CONCEPTMAPPINGJOB
_MULTICONCEPTRELATIONRESPONSE.fields_by_name['status'].message_type = proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._STATUS
_MULTICONCEPTRELATIONRESPONSE.fields_by_name['concept_relations'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._CONCEPTRELATION
_MULTIKNOWLEDGEGRAPHRESPONSE.fields_by_name['status'].message_type = proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._STATUS
_MULTIKNOWLEDGEGRAPHRESPONSE.fields_by_name['knowledge_graphs'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._KNOWLEDGEGRAPH
_MULTICONCEPTMAPPINGJOBRESPONSE.fields_by_name['status'].message_type = proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._STATUS
_GETCONCEPTLANGUAGEREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_LISTCONCEPTLANGUAGESREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_PATCHCONCEPTLANGUAGESREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_PATCHCONCEPTLANGUAGESREQUEST.fields_by_name['concept_languages'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._CONCEPTLANGUAGE
_POSTCONCEPTLANGUAGESREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_POSTCONCEPTLANGUAGESREQUEST.fields_by_name['concept_languages'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._CONCEPTLANGUAGE
_SINGLECONCEPTLANGUAGERESPONSE.fields_by_name['status'].message_type = proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._STATUS
_SINGLECONCEPTLANGUAGERESPONSE.fields_by_name['concept_language'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._CONCEPTLANGUAGE
_MULTICONCEPTLANGUAGERESPONSE.fields_by_name['status'].message_type = proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._STATUS
_MULTICONCEPTLANGUAGERESPONSE.fields_by_name['concept_languages'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._CONCEPTLANGUAGE
_GETINPUTREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_GETINPUTSAMPLESREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_LISTINPUTSREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_LISTINPUTSREQUEST.fields_by_name['status'].message_type = proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._STATUS
_STREAMINPUTSREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_POSTINPUTSREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_POSTINPUTSREQUEST.fields_by_name['inputs'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._INPUT
_PATCHINPUTSREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_PATCHINPUTSREQUEST.fields_by_name['inputs'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._INPUT
_DELETEINPUTREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_DELETEINPUTSREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_SINGLEINPUTRESPONSE.fields_by_name['status'].message_type = proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._STATUS
_SINGLEINPUTRESPONSE.fields_by_name['input'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._INPUT
_MULTIINPUTRESPONSE.fields_by_name['status'].message_type = proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._STATUS
_MULTIINPUTRESPONSE.fields_by_name['inputs'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._INPUT
_MULTIINPUTANNOTATIONRESPONSE.fields_by_name['status'].message_type = proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._STATUS
_MULTIINPUTANNOTATIONRESPONSE.fields_by_name['hits'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._HIT
_SINGLEINPUTCOUNTRESPONSE.fields_by_name['status'].message_type = proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._STATUS
_SINGLEINPUTCOUNTRESPONSE.fields_by_name['counts'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._INPUTCOUNT
_GETINPUTCOUNTREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_POSTMODELOUTPUTSREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_POSTMODELOUTPUTSREQUEST.fields_by_name['inputs'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._INPUT
_POSTMODELOUTPUTSREQUEST.fields_by_name['model'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._MODEL
_LISTMODELINPUTSREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_GETKEYREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_LISTKEYSREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_LISTAPPKEYSREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_POSTKEYSREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_POSTKEYSREQUEST.fields_by_name['keys'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._KEY
_DELETEKEYREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_PATCHKEYSREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_PATCHKEYSREQUEST.fields_by_name['keys'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._KEY
_SINGLEKEYRESPONSE.fields_by_name['status'].message_type = proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._STATUS
_SINGLEKEYRESPONSE.fields_by_name['key'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._KEY
_MULTIKEYRESPONSE.fields_by_name['status'].message_type = proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._STATUS
_MULTIKEYRESPONSE.fields_by_name['keys'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._KEY
_GETMODELREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_LISTMODELSREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_LISTMODELSREQUEST.oneofs_by_name['sort_by'].fields.append(
_LISTMODELSREQUEST.fields_by_name['sort_by_name'])
_LISTMODELSREQUEST.fields_by_name['sort_by_name'].containing_oneof = _LISTMODELSREQUEST.oneofs_by_name['sort_by']
_LISTMODELSREQUEST.oneofs_by_name['sort_by'].fields.append(
_LISTMODELSREQUEST.fields_by_name['sort_by_num_inputs'])
_LISTMODELSREQUEST.fields_by_name['sort_by_num_inputs'].containing_oneof = _LISTMODELSREQUEST.oneofs_by_name['sort_by']
_LISTMODELSREQUEST.oneofs_by_name['sort_by'].fields.append(
_LISTMODELSREQUEST.fields_by_name['sort_by_modified_at'])
_LISTMODELSREQUEST.fields_by_name['sort_by_modified_at'].containing_oneof = _LISTMODELSREQUEST.oneofs_by_name['sort_by']
_POSTMODELSREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_POSTMODELSREQUEST.fields_by_name['model'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._MODEL
_POSTMODELSREQUEST.fields_by_name['models'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._MODEL
_PATCHMODELSREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_PATCHMODELSREQUEST.fields_by_name['models'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._MODEL
_DELETEMODELREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_DELETEMODELSREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_POSTMODELSSEARCHESREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_POSTMODELSSEARCHESREQUEST.fields_by_name['model_query'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._MODELQUERY
_POSTMODELSSEARCHESREQUEST.fields_by_name['pagination'].message_type = _PAGINATION
_SINGLEMODELRESPONSE.fields_by_name['status'].message_type = proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._STATUS
_SINGLEMODELRESPONSE.fields_by_name['model'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._MODEL
_MULTIMODELRESPONSE.fields_by_name['status'].message_type = proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._STATUS
_MULTIMODELRESPONSE.fields_by_name['models'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._MODEL
_PATCHMODELVERSIONSREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_PATCHMODELVERSIONSREQUEST.fields_by_name['model_versions'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._MODELVERSION
_GETMODELVERSIONREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_LISTMODELVERSIONSREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_DELETEMODELVERSIONREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_SINGLEMODELVERSIONRESPONSE.fields_by_name['status'].message_type = proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._STATUS
_SINGLEMODELVERSIONRESPONSE.fields_by_name['model_version'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._MODELVERSION
_MULTIMODELVERSIONRESPONSE.fields_by_name['status'].message_type = proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._STATUS
_MULTIMODELVERSIONRESPONSE.fields_by_name['model_versions'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._MODELVERSION
_POSTMODELVERSIONSREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_POSTMODELVERSIONSREQUEST.fields_by_name['model_versions'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._MODELVERSION
_POSTMODELVERSIONSREQUEST.fields_by_name['search'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._SEARCH
_POSTMODELVERSIONSREQUEST.fields_by_name['train_search'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._SEARCH
_POSTMODELVERSIONSREQUEST.fields_by_name['test_search'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._SEARCH
_POSTMODELVERSIONMETRICSREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_POSTMODELVERSIONMETRICSREQUEST.fields_by_name['test_search'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._SEARCH
_GETMODELVERSIONMETRICSREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_GETMODELVERSIONMETRICSREQUEST.fields_by_name['fields'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._FIELDSVALUE
_GETMODELTYPEREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_LISTMODELTYPESREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_LISTOPENSOURCELICENSESRESPONSE.fields_by_name['status'].message_type = proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._STATUS
_SINGLEMODELTYPERESPONSE.fields_by_name['status'].message_type = proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._STATUS
_SINGLEMODELTYPERESPONSE.fields_by_name['model_type'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._MODELTYPE
_MULTIMODELTYPERESPONSE.fields_by_name['status'].message_type = proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._STATUS
_MULTIMODELTYPERESPONSE.fields_by_name['model_types'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._MODELTYPE
_GETMODELVERSIONINPUTEXAMPLEREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_LISTMODELVERSIONINPUTEXAMPLESREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_SINGLEMODELVERSIONINPUTEXAMPLERESPONSE.fields_by_name['status'].message_type = proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._STATUS
_SINGLEMODELVERSIONINPUTEXAMPLERESPONSE.fields_by_name['model_version_input_example'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._MODELVERSIONINPUTEXAMPLE
_MULTIMODELVERSIONINPUTEXAMPLERESPONSE.fields_by_name['status'].message_type = proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._STATUS
_MULTIMODELVERSIONINPUTEXAMPLERESPONSE.fields_by_name['model_version_input_examples'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._MODELVERSIONINPUTEXAMPLE
_LISTMODELREFERENCESREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_MULTIMODELREFERENCERESPONSE.fields_by_name['status'].message_type = proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._STATUS
_MULTIMODELREFERENCERESPONSE.fields_by_name['model_references'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._MODELREFERENCE
_MULTIOUTPUTRESPONSE.fields_by_name['status'].message_type = proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._STATUS
_MULTIOUTPUTRESPONSE.fields_by_name['outputs'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._OUTPUT
_LISTSCOPESREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_MYSCOPESREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_MULTISCOPEDEPSRESPONSE.fields_by_name['status'].message_type = proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._STATUS
_MULTISCOPEDEPSRESPONSE.fields_by_name['scope_deps'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._SCOPEDEPS
_MULTISCOPEDEPSRESPONSE.fields_by_name['endpoint_deps'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._ENDPOINTDEPS
_MULTISCOPERESPONSE.fields_by_name['status'].message_type = proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._STATUS
_MULTISCOPERESPONSE.fields_by_name['app'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._APP
_GETSEARCHREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_LISTSEARCHESREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_POSTSEARCHESREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_POSTSEARCHESREQUEST.fields_by_name['query'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._QUERY
_POSTSEARCHESREQUEST.fields_by_name['searches'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._SEARCH
_POSTSEARCHESREQUEST.fields_by_name['pagination'].message_type = _PAGINATION
_POSTSEARCHESBYIDREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_POSTSEARCHESBYIDREQUEST.fields_by_name['pagination'].message_type = _PAGINATION
_DELETESEARCHREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_POSTANNOTATIONSSEARCHESREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_POSTANNOTATIONSSEARCHESREQUEST.fields_by_name['searches'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._SEARCH
_POSTANNOTATIONSSEARCHESREQUEST.fields_by_name['pagination'].message_type = _PAGINATION
_DELETEANNOTATIONSEARCHMETRICSREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_POSTINPUTSSEARCHESREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_POSTINPUTSSEARCHESREQUEST.fields_by_name['searches'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._SEARCH
_POSTINPUTSSEARCHESREQUEST.fields_by_name['pagination'].message_type = _PAGINATION
_SINGLESEARCHRESPONSE.fields_by_name['status'].message_type = proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._STATUS
_SINGLESEARCHRESPONSE.fields_by_name['search'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._SEARCH
_MULTISEARCHRESPONSE.fields_by_name['status'].message_type = proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._STATUS
_MULTISEARCHRESPONSE.fields_by_name['hits'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._HIT
_MULTISEARCHRESPONSE.fields_by_name['query'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._QUERY
_MULTISEARCHRESPONSE.fields_by_name['searches'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._SEARCH
_POSTANNOTATIONSEARCHMETRICSREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_POSTANNOTATIONSEARCHMETRICSREQUEST.fields_by_name['ground_truth'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._SEARCH
_POSTANNOTATIONSEARCHMETRICSREQUEST.fields_by_name['search_to_eval'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._SEARCH
_POSTANNOTATIONSEARCHMETRICSREQUEST.fields_by_name['data'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._DATA
_POSTANNOTATIONSEARCHMETRICSREQUEST.fields_by_name['evaluation_type'].enum_type = proto_dot_clarifai_dot_api_dot_resources__pb2._EVALUATIONTYPE
_GETANNOTATIONSEARCHMETRICSREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_LISTANNOTATIONSEARCHMETRICSREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_MULTIANNOTATIONSEARCHMETRICSRESPONSE.fields_by_name['status'].message_type = proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._STATUS
_MULTIANNOTATIONSEARCHMETRICSRESPONSE.fields_by_name['annotation_search_metrics'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._ANNOTATIONSEARCHMETRICS
_POSTVALIDATEPASSWORDREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_POSTVALIDATEPASSWORDREQUEST.fields_by_name['password'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._PASSWORD
_SINGLEPASSWORDVALIDATIONRESPONSE.fields_by_name['status'].message_type = proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._STATUS
_SINGLEPASSWORDVALIDATIONRESPONSE.fields_by_name['password_violations'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._PASSWORDVIOLATIONS
_GETWORKFLOWREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_LISTWORKFLOWSREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_LISTWORKFLOWSREQUEST.oneofs_by_name['sort_by'].fields.append(
_LISTWORKFLOWSREQUEST.fields_by_name['sort_by_id'])
_LISTWORKFLOWSREQUEST.fields_by_name['sort_by_id'].containing_oneof = _LISTWORKFLOWSREQUEST.oneofs_by_name['sort_by']
_LISTWORKFLOWSREQUEST.oneofs_by_name['sort_by'].fields.append(
_LISTWORKFLOWSREQUEST.fields_by_name['sort_by_modified_at'])
_LISTWORKFLOWSREQUEST.fields_by_name['sort_by_modified_at'].containing_oneof = _LISTWORKFLOWSREQUEST.oneofs_by_name['sort_by']
_POSTWORKFLOWSREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_POSTWORKFLOWSREQUEST.fields_by_name['workflows'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._WORKFLOW
_PATCHWORKFLOWSREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_PATCHWORKFLOWSREQUEST.fields_by_name['workflows'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._WORKFLOW
_DELETEWORKFLOWREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_DELETEWORKFLOWSREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_SINGLEWORKFLOWRESPONSE.fields_by_name['status'].message_type = proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._STATUS
_SINGLEWORKFLOWRESPONSE.fields_by_name['workflow'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._WORKFLOW
_MULTIWORKFLOWRESPONSE.fields_by_name['status'].message_type = proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._STATUS
_MULTIWORKFLOWRESPONSE.fields_by_name['workflows'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._WORKFLOW
_POSTWORKFLOWRESULTSREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_POSTWORKFLOWRESULTSREQUEST.fields_by_name['inputs'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._INPUT
_POSTWORKFLOWRESULTSREQUEST.fields_by_name['output_config'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._OUTPUTCONFIG
_POSTWORKFLOWRESULTSREQUEST.fields_by_name['workflow_state'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._WORKFLOWSTATE
_POSTWORKFLOWRESULTSRESPONSE.fields_by_name['status'].message_type = proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._STATUS
_POSTWORKFLOWRESULTSRESPONSE.fields_by_name['workflow'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._WORKFLOW
_POSTWORKFLOWRESULTSRESPONSE.fields_by_name['results'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._WORKFLOWRESULT
_POSTWORKFLOWRESULTSRESPONSE.fields_by_name['workflow_state'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._WORKFLOWSTATE
_POSTWORKFLOWRESULTSSIMILARITYREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_POSTWORKFLOWRESULTSSIMILARITYREQUEST.fields_by_name['probe_inputs'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._INPUT
_POSTWORKFLOWRESULTSSIMILARITYREQUEST.fields_by_name['pool_inputs'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._INPUT
_POSTWORKFLOWRESULTSSIMILARITYRESPONSE.fields_by_name['status'].message_type = proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._STATUS
_POSTWORKFLOWRESULTSSIMILARITYRESPONSE.fields_by_name['results'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._WORKFLOWRESULTSSIMILARITY
_LISTWORKFLOWVERSIONSREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_GETWORKFLOWVERSIONREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_DELETEWORKFLOWVERSIONSREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_PATCHWORKFLOWVERSIONSREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_PATCHWORKFLOWVERSIONSREQUEST.fields_by_name['workflow_versions'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._WORKFLOWVERSION
_MULTIWORKFLOWVERSIONRESPONSE.fields_by_name['status'].message_type = proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._STATUS
_MULTIWORKFLOWVERSIONRESPONSE.fields_by_name['workflow_versions'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._WORKFLOWVERSION
_SINGLEWORKFLOWVERSIONRESPONSE.fields_by_name['status'].message_type = proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._STATUS
_SINGLEWORKFLOWVERSIONRESPONSE.fields_by_name['workflow_version'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._WORKFLOWVERSION
_POSTAPPDUPLICATIONSREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_POSTAPPDUPLICATIONSREQUEST.fields_by_name['app_duplications'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._APPDUPLICATION
_GETAPPDUPLICATIONREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_LISTAPPDUPLICATIONSREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_MULTIAPPDUPLICATIONSRESPONSE.fields_by_name['status'].message_type = proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._STATUS
_MULTIAPPDUPLICATIONSRESPONSE.fields_by_name['app_duplications'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._APPDUPLICATION
_SINGLEAPPDUPLICATIONRESPONSE.fields_by_name['status'].message_type = proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._STATUS
_SINGLEAPPDUPLICATIONRESPONSE.fields_by_name['app_duplication'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._APPDUPLICATION
_POSTTASKSREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_POSTTASKSREQUEST.fields_by_name['tasks'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._TASK
_GETTASKREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_LISTTASKSREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_PATCHTASKSREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_PATCHTASKSREQUEST.fields_by_name['tasks'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._TASK
_DELETETASKSREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_MULTITASKRESPONSE.fields_by_name['status'].message_type = proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._STATUS
_MULTITASKRESPONSE.fields_by_name['tasks'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._TASK
_SINGLETASKRESPONSE.fields_by_name['status'].message_type = proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._STATUS
_SINGLETASKRESPONSE.fields_by_name['task'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._TASK
_GETTASKCOUNTREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_SINGLETASKCOUNTRESPONSE.fields_by_name['status'].message_type = proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._STATUS
_SINGLETASKCOUNTRESPONSE.fields_by_name['counts'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._TASKSTATUSCOUNTPERUSER
_POSTCOLLECTORSREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_POSTCOLLECTORSREQUEST.fields_by_name['collectors'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._COLLECTOR
_PATCHCOLLECTORSREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_PATCHCOLLECTORSREQUEST.fields_by_name['collectors'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._COLLECTOR
_DELETECOLLECTORSREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_GETCOLLECTORREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_LISTCOLLECTORSREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_MULTICOLLECTORRESPONSE.fields_by_name['status'].message_type = proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._STATUS
_MULTICOLLECTORRESPONSE.fields_by_name['collectors'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._COLLECTOR
_SINGLECOLLECTORRESPONSE.fields_by_name['status'].message_type = proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._STATUS
_SINGLECOLLECTORRESPONSE.fields_by_name['collector'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._COLLECTOR
_POSTSTATVALUESREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_POSTSTATVALUESREQUEST.fields_by_name['stat_values'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._STATVALUE
_MULTISTATVALUERESPONSE.fields_by_name['status'].message_type = proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._STATUS
_MULTISTATVALUERESPONSE.fields_by_name['stat_values'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._STATVALUE
_POSTSTATVALUESAGGREGATEREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_POSTSTATVALUESAGGREGATEREQUEST.fields_by_name['stat_value_aggregate_queries'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._STATVALUEAGGREGATEQUERY
_MULTISTATVALUEAGGREGATERESPONSE.fields_by_name['status'].message_type = proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._STATUS
_MULTISTATVALUEAGGREGATERESPONSE.fields_by_name['stat_value_aggregate_results'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._STATVALUEAGGREGATERESULT
_POSTTRENDINGMETRICSVIEWREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_LISTTRENDINGMETRICSVIEWSREQUEST.fields_by_name['user_app_id'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._USERAPPIDSET
_MULTITRENDINGMETRICSVIEWRESPONSE.fields_by_name['status'].message_type = proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._STATUS
_MULTITRENDINGMETRICSVIEWRESPONSE.fields_by_name['metrics'].message_type = proto_dot_clarifai_dot_api_dot_resources__pb2._TRENDINGMETRIC
DESCRIPTOR.message_types_by_name['Pagination'] = _PAGINATION
DESCRIPTOR.message_types_by_name['GetAnnotationRequest'] = _GETANNOTATIONREQUEST
DESCRIPTOR.message_types_by_name['ListAnnotationsRequest'] = _LISTANNOTATIONSREQUEST
DESCRIPTOR.message_types_by_name['PostAnnotationsRequest'] = _POSTANNOTATIONSREQUEST
DESCRIPTOR.message_types_by_name['PatchAnnotationsRequest'] = _PATCHANNOTATIONSREQUEST
DESCRIPTOR.message_types_by_name['PatchAnnotationsStatusRequest'] = _PATCHANNOTATIONSSTATUSREQUEST
DESCRIPTOR.message_types_by_name['PatchAnnotationsStatusResponse'] = _PATCHANNOTATIONSSTATUSRESPONSE
DESCRIPTOR.message_types_by_name['DeleteAnnotationRequest'] = _DELETEANNOTATIONREQUEST
DESCRIPTOR.message_types_by_name['DeleteAnnotationsRequest'] = _DELETEANNOTATIONSREQUEST
DESCRIPTOR.message_types_by_name['SingleAnnotationResponse'] = _SINGLEANNOTATIONRESPONSE
DESCRIPTOR.message_types_by_name['MultiAnnotationResponse'] = _MULTIANNOTATIONRESPONSE
DESCRIPTOR.message_types_by_name['GetAppRequest'] = _GETAPPREQUEST
DESCRIPTOR.message_types_by_name['ListAppsRequest'] = _LISTAPPSREQUEST
DESCRIPTOR.message_types_by_name['PostAppsRequest'] = _POSTAPPSREQUEST
DESCRIPTOR.message_types_by_name['DeleteAppRequest'] = _DELETEAPPREQUEST
DESCRIPTOR.message_types_by_name['PatchAppsRequest'] = _PATCHAPPSREQUEST
DESCRIPTOR.message_types_by_name['PostAppsSearchesRequest'] = _POSTAPPSSEARCHESREQUEST
DESCRIPTOR.message_types_by_name['SingleAppResponse'] = _SINGLEAPPRESPONSE
DESCRIPTOR.message_types_by_name['MultiAppResponse'] = _MULTIAPPRESPONSE
DESCRIPTOR.message_types_by_name['ListCollaboratorsRequest'] = _LISTCOLLABORATORSREQUEST
DESCRIPTOR.message_types_by_name['PostCollaboratorsRequest'] = _POSTCOLLABORATORSREQUEST
DESCRIPTOR.message_types_by_name['PatchCollaboratorsRequest'] = _PATCHCOLLABORATORSREQUEST
DESCRIPTOR.message_types_by_name['DeleteCollaboratorsRequest'] = _DELETECOLLABORATORSREQUEST
DESCRIPTOR.message_types_by_name['MultiCollaboratorsResponse'] = _MULTICOLLABORATORSRESPONSE
DESCRIPTOR.message_types_by_name['ListCollaborationsRequest'] = _LISTCOLLABORATIONSREQUEST
DESCRIPTOR.message_types_by_name['MultiCollaborationsResponse'] = _MULTICOLLABORATIONSRESPONSE
DESCRIPTOR.message_types_by_name['GetStatusCodeRequest'] = _GETSTATUSCODEREQUEST
DESCRIPTOR.message_types_by_name['ListStatusCodesRequest'] = _LISTSTATUSCODESREQUEST
DESCRIPTOR.message_types_by_name['SingleStatusCodeResponse'] = _SINGLESTATUSCODERESPONSE
DESCRIPTOR.message_types_by_name['MultiStatusCodeResponse'] = _MULTISTATUSCODERESPONSE
DESCRIPTOR.message_types_by_name['GetConceptRequest'] = _GETCONCEPTREQUEST
DESCRIPTOR.message_types_by_name['ListConceptsRequest'] = _LISTCONCEPTSREQUEST
DESCRIPTOR.message_types_by_name['PostConceptsSearchesRequest'] = _POSTCONCEPTSSEARCHESREQUEST
DESCRIPTOR.message_types_by_name['PostConceptsRequest'] = _POSTCONCEPTSREQUEST
DESCRIPTOR.message_types_by_name['PatchConceptsRequest'] = _PATCHCONCEPTSREQUEST
DESCRIPTOR.message_types_by_name['GetConceptCountsRequest'] = _GETCONCEPTCOUNTSREQUEST
DESCRIPTOR.message_types_by_name['SingleConceptResponse'] = _SINGLECONCEPTRESPONSE
DESCRIPTOR.message_types_by_name['MultiConceptResponse'] = _MULTICONCEPTRESPONSE
DESCRIPTOR.message_types_by_name['MultiConceptCountResponse'] = _MULTICONCEPTCOUNTRESPONSE
DESCRIPTOR.message_types_by_name['ListConceptRelationsRequest'] = _LISTCONCEPTRELATIONSREQUEST
DESCRIPTOR.message_types_by_name['PostConceptRelationsRequest'] = _POSTCONCEPTRELATIONSREQUEST
DESCRIPTOR.message_types_by_name['DeleteConceptRelationsRequest'] = _DELETECONCEPTRELATIONSREQUEST
DESCRIPTOR.message_types_by_name['ListKnowledgeGraphsRequest'] = _LISTKNOWLEDGEGRAPHSREQUEST
DESCRIPTOR.message_types_by_name['PostKnowledgeGraphsRequest'] = _POSTKNOWLEDGEGRAPHSREQUEST
DESCRIPTOR.message_types_by_name['PostConceptMappingJobsRequest'] = _POSTCONCEPTMAPPINGJOBSREQUEST
DESCRIPTOR.message_types_by_name['MultiConceptRelationResponse'] = _MULTICONCEPTRELATIONRESPONSE
DESCRIPTOR.message_types_by_name['MultiKnowledgeGraphResponse'] = _MULTIKNOWLEDGEGRAPHRESPONSE
DESCRIPTOR.message_types_by_name['MultiConceptMappingJobResponse'] = _MULTICONCEPTMAPPINGJOBRESPONSE
DESCRIPTOR.message_types_by_name['GetConceptLanguageRequest'] = _GETCONCEPTLANGUAGEREQUEST
DESCRIPTOR.message_types_by_name['ListConceptLanguagesRequest'] = _LISTCONCEPTLANGUAGESREQUEST
DESCRIPTOR.message_types_by_name['PatchConceptLanguagesRequest'] = _PATCHCONCEPTLANGUAGESREQUEST
DESCRIPTOR.message_types_by_name['PostConceptLanguagesRequest'] = _POSTCONCEPTLANGUAGESREQUEST
DESCRIPTOR.message_types_by_name['SingleConceptLanguageResponse'] = _SINGLECONCEPTLANGUAGERESPONSE
DESCRIPTOR.message_types_by_name['MultiConceptLanguageResponse'] = _MULTICONCEPTLANGUAGERESPONSE
DESCRIPTOR.message_types_by_name['GetInputRequest'] = _GETINPUTREQUEST
DESCRIPTOR.message_types_by_name['GetInputSamplesRequest'] = _GETINPUTSAMPLESREQUEST
DESCRIPTOR.message_types_by_name['ListInputsRequest'] = _LISTINPUTSREQUEST
DESCRIPTOR.message_types_by_name['StreamInputsRequest'] = _STREAMINPUTSREQUEST
DESCRIPTOR.message_types_by_name['PostInputsRequest'] = _POSTINPUTSREQUEST
DESCRIPTOR.message_types_by_name['PatchInputsRequest'] = _PATCHINPUTSREQUEST
DESCRIPTOR.message_types_by_name['DeleteInputRequest'] = _DELETEINPUTREQUEST
DESCRIPTOR.message_types_by_name['DeleteInputsRequest'] = _DELETEINPUTSREQUEST
DESCRIPTOR.message_types_by_name['SingleInputResponse'] = _SINGLEINPUTRESPONSE
DESCRIPTOR.message_types_by_name['MultiInputResponse'] = _MULTIINPUTRESPONSE
DESCRIPTOR.message_types_by_name['MultiInputAnnotationResponse'] = _MULTIINPUTANNOTATIONRESPONSE
DESCRIPTOR.message_types_by_name['SingleInputCountResponse'] = _SINGLEINPUTCOUNTRESPONSE
DESCRIPTOR.message_types_by_name['GetInputCountRequest'] = _GETINPUTCOUNTREQUEST
DESCRIPTOR.message_types_by_name['PostModelOutputsRequest'] = _POSTMODELOUTPUTSREQUEST
DESCRIPTOR.message_types_by_name['ListModelInputsRequest'] = _LISTMODELINPUTSREQUEST
DESCRIPTOR.message_types_by_name['GetKeyRequest'] = _GETKEYREQUEST
DESCRIPTOR.message_types_by_name['ListKeysRequest'] = _LISTKEYSREQUEST
DESCRIPTOR.message_types_by_name['ListAppKeysRequest'] = _LISTAPPKEYSREQUEST
DESCRIPTOR.message_types_by_name['PostKeysRequest'] = _POSTKEYSREQUEST
DESCRIPTOR.message_types_by_name['DeleteKeyRequest'] = _DELETEKEYREQUEST
DESCRIPTOR.message_types_by_name['PatchKeysRequest'] = _PATCHKEYSREQUEST
DESCRIPTOR.message_types_by_name['SingleKeyResponse'] = _SINGLEKEYRESPONSE
DESCRIPTOR.message_types_by_name['MultiKeyResponse'] = _MULTIKEYRESPONSE
DESCRIPTOR.message_types_by_name['GetModelRequest'] = _GETMODELREQUEST
DESCRIPTOR.message_types_by_name['ListModelsRequest'] = _LISTMODELSREQUEST
DESCRIPTOR.message_types_by_name['PostModelsRequest'] = _POSTMODELSREQUEST
DESCRIPTOR.message_types_by_name['PatchModelsRequest'] = _PATCHMODELSREQUEST
DESCRIPTOR.message_types_by_name['DeleteModelRequest'] = _DELETEMODELREQUEST
DESCRIPTOR.message_types_by_name['DeleteModelsRequest'] = _DELETEMODELSREQUEST
DESCRIPTOR.message_types_by_name['PostModelsSearchesRequest'] = _POSTMODELSSEARCHESREQUEST
DESCRIPTOR.message_types_by_name['SingleModelResponse'] = _SINGLEMODELRESPONSE
DESCRIPTOR.message_types_by_name['MultiModelResponse'] = _MULTIMODELRESPONSE
DESCRIPTOR.message_types_by_name['PatchModelVersionsRequest'] = _PATCHMODELVERSIONSREQUEST
DESCRIPTOR.message_types_by_name['GetModelVersionRequest'] = _GETMODELVERSIONREQUEST
DESCRIPTOR.message_types_by_name['ListModelVersionsRequest'] = _LISTMODELVERSIONSREQUEST
DESCRIPTOR.message_types_by_name['DeleteModelVersionRequest'] = _DELETEMODELVERSIONREQUEST
DESCRIPTOR.message_types_by_name['SingleModelVersionResponse'] = _SINGLEMODELVERSIONRESPONSE
DESCRIPTOR.message_types_by_name['MultiModelVersionResponse'] = _MULTIMODELVERSIONRESPONSE
DESCRIPTOR.message_types_by_name['PostModelVersionsRequest'] = _POSTMODELVERSIONSREQUEST
DESCRIPTOR.message_types_by_name['PostModelVersionMetricsRequest'] = _POSTMODELVERSIONMETRICSREQUEST
DESCRIPTOR.message_types_by_name['GetModelVersionMetricsRequest'] = _GETMODELVERSIONMETRICSREQUEST
DESCRIPTOR.message_types_by_name['GetModelTypeRequest'] = _GETMODELTYPEREQUEST
DESCRIPTOR.message_types_by_name['ListModelTypesRequest'] = _LISTMODELTYPESREQUEST
DESCRIPTOR.message_types_by_name['ListOpenSourceLicensesRequest'] = _LISTOPENSOURCELICENSESREQUEST
DESCRIPTOR.message_types_by_name['ListOpenSourceLicensesResponse'] = _LISTOPENSOURCELICENSESRESPONSE
DESCRIPTOR.message_types_by_name['SingleModelTypeResponse'] = _SINGLEMODELTYPERESPONSE
DESCRIPTOR.message_types_by_name['MultiModelTypeResponse'] = _MULTIMODELTYPERESPONSE
DESCRIPTOR.message_types_by_name['GetModelVersionInputExampleRequest'] = _GETMODELVERSIONINPUTEXAMPLEREQUEST
DESCRIPTOR.message_types_by_name['ListModelVersionInputExamplesRequest'] = _LISTMODELVERSIONINPUTEXAMPLESREQUEST
DESCRIPTOR.message_types_by_name['SingleModelVersionInputExampleResponse'] = _SINGLEMODELVERSIONINPUTEXAMPLERESPONSE
DESCRIPTOR.message_types_by_name['MultiModelVersionInputExampleResponse'] = _MULTIMODELVERSIONINPUTEXAMPLERESPONSE
DESCRIPTOR.message_types_by_name['ListModelReferencesRequest'] = _LISTMODELREFERENCESREQUEST
DESCRIPTOR.message_types_by_name['MultiModelReferenceResponse'] = _MULTIMODELREFERENCERESPONSE
DESCRIPTOR.message_types_by_name['MultiOutputResponse'] = _MULTIOUTPUTRESPONSE
DESCRIPTOR.message_types_by_name['ListScopesRequest'] = _LISTSCOPESREQUEST
DESCRIPTOR.message_types_by_name['MyScopesRequest'] = _MYSCOPESREQUEST
DESCRIPTOR.message_types_by_name['MultiScopeDepsResponse'] = _MULTISCOPEDEPSRESPONSE
DESCRIPTOR.message_types_by_name['MultiScopeResponse'] = _MULTISCOPERESPONSE
DESCRIPTOR.message_types_by_name['GetSearchRequest'] = _GETSEARCHREQUEST
DESCRIPTOR.message_types_by_name['ListSearchesRequest'] = _LISTSEARCHESREQUEST
DESCRIPTOR.message_types_by_name['PostSearchesRequest'] = _POSTSEARCHESREQUEST
DESCRIPTOR.message_types_by_name['PostSearchesByIDRequest'] = _POSTSEARCHESBYIDREQUEST
DESCRIPTOR.message_types_by_name['DeleteSearchRequest'] = _DELETESEARCHREQUEST
DESCRIPTOR.message_types_by_name['PostAnnotationsSearchesRequest'] = _POSTANNOTATIONSSEARCHESREQUEST
DESCRIPTOR.message_types_by_name['DeleteAnnotationSearchMetricsRequest'] = _DELETEANNOTATIONSEARCHMETRICSREQUEST
DESCRIPTOR.message_types_by_name['PostInputsSearchesRequest'] = _POSTINPUTSSEARCHESREQUEST
DESCRIPTOR.message_types_by_name['SingleSearchResponse'] = _SINGLESEARCHRESPONSE
DESCRIPTOR.message_types_by_name['MultiSearchResponse'] = _MULTISEARCHRESPONSE
DESCRIPTOR.message_types_by_name['PostAnnotationSearchMetricsRequest'] = _POSTANNOTATIONSEARCHMETRICSREQUEST
DESCRIPTOR.message_types_by_name['GetAnnotationSearchMetricsRequest'] = _GETANNOTATIONSEARCHMETRICSREQUEST
DESCRIPTOR.message_types_by_name['ListAnnotationSearchMetricsRequest'] = _LISTANNOTATIONSEARCHMETRICSREQUEST
DESCRIPTOR.message_types_by_name['MultiAnnotationSearchMetricsResponse'] = _MULTIANNOTATIONSEARCHMETRICSRESPONSE
DESCRIPTOR.message_types_by_name['PostValidatePasswordRequest'] = _POSTVALIDATEPASSWORDREQUEST
DESCRIPTOR.message_types_by_name['SinglePasswordValidationResponse'] = _SINGLEPASSWORDVALIDATIONRESPONSE
DESCRIPTOR.message_types_by_name['GetWorkflowRequest'] = _GETWORKFLOWREQUEST
DESCRIPTOR.message_types_by_name['ListWorkflowsRequest'] = _LISTWORKFLOWSREQUEST
DESCRIPTOR.message_types_by_name['PostWorkflowsRequest'] = _POSTWORKFLOWSREQUEST
DESCRIPTOR.message_types_by_name['PatchWorkflowsRequest'] = _PATCHWORKFLOWSREQUEST
DESCRIPTOR.message_types_by_name['DeleteWorkflowRequest'] = _DELETEWORKFLOWREQUEST
DESCRIPTOR.message_types_by_name['DeleteWorkflowsRequest'] = _DELETEWORKFLOWSREQUEST
DESCRIPTOR.message_types_by_name['SingleWorkflowResponse'] = _SINGLEWORKFLOWRESPONSE
DESCRIPTOR.message_types_by_name['MultiWorkflowResponse'] = _MULTIWORKFLOWRESPONSE
DESCRIPTOR.message_types_by_name['PostWorkflowResultsRequest'] = _POSTWORKFLOWRESULTSREQUEST
DESCRIPTOR.message_types_by_name['PostWorkflowResultsResponse'] = _POSTWORKFLOWRESULTSRESPONSE
DESCRIPTOR.message_types_by_name['PostWorkflowResultsSimilarityRequest'] = _POSTWORKFLOWRESULTSSIMILARITYREQUEST
DESCRIPTOR.message_types_by_name['PostWorkflowResultsSimilarityResponse'] = _POSTWORKFLOWRESULTSSIMILARITYRESPONSE
DESCRIPTOR.message_types_by_name['ListWorkflowVersionsRequest'] = _LISTWORKFLOWVERSIONSREQUEST
DESCRIPTOR.message_types_by_name['GetWorkflowVersionRequest'] = _GETWORKFLOWVERSIONREQUEST
DESCRIPTOR.message_types_by_name['DeleteWorkflowVersionsRequest'] = _DELETEWORKFLOWVERSIONSREQUEST
DESCRIPTOR.message_types_by_name['PatchWorkflowVersionsRequest'] = _PATCHWORKFLOWVERSIONSREQUEST
DESCRIPTOR.message_types_by_name['MultiWorkflowVersionResponse'] = _MULTIWORKFLOWVERSIONRESPONSE
DESCRIPTOR.message_types_by_name['SingleWorkflowVersionResponse'] = _SINGLEWORKFLOWVERSIONRESPONSE
DESCRIPTOR.message_types_by_name['PostAppDuplicationsRequest'] = _POSTAPPDUPLICATIONSREQUEST
DESCRIPTOR.message_types_by_name['GetAppDuplicationRequest'] = _GETAPPDUPLICATIONREQUEST
DESCRIPTOR.message_types_by_name['ListAppDuplicationsRequest'] = _LISTAPPDUPLICATIONSREQUEST
DESCRIPTOR.message_types_by_name['MultiAppDuplicationsResponse'] = _MULTIAPPDUPLICATIONSRESPONSE
DESCRIPTOR.message_types_by_name['SingleAppDuplicationResponse'] = _SINGLEAPPDUPLICATIONRESPONSE
DESCRIPTOR.message_types_by_name['PostTasksRequest'] = _POSTTASKSREQUEST
DESCRIPTOR.message_types_by_name['GetTaskRequest'] = _GETTASKREQUEST
DESCRIPTOR.message_types_by_name['ListTasksRequest'] = _LISTTASKSREQUEST
DESCRIPTOR.message_types_by_name['PatchTasksRequest'] = _PATCHTASKSREQUEST
DESCRIPTOR.message_types_by_name['DeleteTasksRequest'] = _DELETETASKSREQUEST
DESCRIPTOR.message_types_by_name['MultiTaskResponse'] = _MULTITASKRESPONSE
DESCRIPTOR.message_types_by_name['SingleTaskResponse'] = _SINGLETASKRESPONSE
DESCRIPTOR.message_types_by_name['GetTaskCountRequest'] = _GETTASKCOUNTREQUEST
DESCRIPTOR.message_types_by_name['SingleTaskCountResponse'] = _SINGLETASKCOUNTRESPONSE
DESCRIPTOR.message_types_by_name['PostCollectorsRequest'] = _POSTCOLLECTORSREQUEST
DESCRIPTOR.message_types_by_name['PatchCollectorsRequest'] = _PATCHCOLLECTORSREQUEST
DESCRIPTOR.message_types_by_name['DeleteCollectorsRequest'] = _DELETECOLLECTORSREQUEST
DESCRIPTOR.message_types_by_name['GetCollectorRequest'] = _GETCOLLECTORREQUEST
DESCRIPTOR.message_types_by_name['ListCollectorsRequest'] = _LISTCOLLECTORSREQUEST
DESCRIPTOR.message_types_by_name['MultiCollectorResponse'] = _MULTICOLLECTORRESPONSE
DESCRIPTOR.message_types_by_name['SingleCollectorResponse'] = _SINGLECOLLECTORRESPONSE
DESCRIPTOR.message_types_by_name['PostStatValuesRequest'] = _POSTSTATVALUESREQUEST
DESCRIPTOR.message_types_by_name['MultiStatValueResponse'] = _MULTISTATVALUERESPONSE
DESCRIPTOR.message_types_by_name['PostStatValuesAggregateRequest'] = _POSTSTATVALUESAGGREGATEREQUEST
DESCRIPTOR.message_types_by_name['MultiStatValueAggregateResponse'] = _MULTISTATVALUEAGGREGATERESPONSE
DESCRIPTOR.message_types_by_name['PostTrendingMetricsViewRequest'] = _POSTTRENDINGMETRICSVIEWREQUEST
DESCRIPTOR.message_types_by_name['ListTrendingMetricsViewsRequest'] = _LISTTRENDINGMETRICSVIEWSREQUEST
DESCRIPTOR.message_types_by_name['MultiTrendingMetricsViewResponse'] = _MULTITRENDINGMETRICSVIEWRESPONSE
DESCRIPTOR.enum_types_by_name['OrganizationInvitationStatus'] = _ORGANIZATIONINVITATIONSTATUS
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Pagination = _reflection.GeneratedProtocolMessageType('Pagination', (_message.Message,), {
'DESCRIPTOR' : _PAGINATION,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.Pagination)
})
_sym_db.RegisterMessage(Pagination)
GetAnnotationRequest = _reflection.GeneratedProtocolMessageType('GetAnnotationRequest', (_message.Message,), {
'DESCRIPTOR' : _GETANNOTATIONREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.GetAnnotationRequest)
})
_sym_db.RegisterMessage(GetAnnotationRequest)
ListAnnotationsRequest = _reflection.GeneratedProtocolMessageType('ListAnnotationsRequest', (_message.Message,), {
'DESCRIPTOR' : _LISTANNOTATIONSREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.ListAnnotationsRequest)
})
_sym_db.RegisterMessage(ListAnnotationsRequest)
PostAnnotationsRequest = _reflection.GeneratedProtocolMessageType('PostAnnotationsRequest', (_message.Message,), {
'DESCRIPTOR' : _POSTANNOTATIONSREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.PostAnnotationsRequest)
})
_sym_db.RegisterMessage(PostAnnotationsRequest)
PatchAnnotationsRequest = _reflection.GeneratedProtocolMessageType('PatchAnnotationsRequest', (_message.Message,), {
'DESCRIPTOR' : _PATCHANNOTATIONSREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.PatchAnnotationsRequest)
})
_sym_db.RegisterMessage(PatchAnnotationsRequest)
PatchAnnotationsStatusRequest = _reflection.GeneratedProtocolMessageType('PatchAnnotationsStatusRequest', (_message.Message,), {
'DESCRIPTOR' : _PATCHANNOTATIONSSTATUSREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.PatchAnnotationsStatusRequest)
})
_sym_db.RegisterMessage(PatchAnnotationsStatusRequest)
PatchAnnotationsStatusResponse = _reflection.GeneratedProtocolMessageType('PatchAnnotationsStatusResponse', (_message.Message,), {
'DESCRIPTOR' : _PATCHANNOTATIONSSTATUSRESPONSE,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.PatchAnnotationsStatusResponse)
})
_sym_db.RegisterMessage(PatchAnnotationsStatusResponse)
DeleteAnnotationRequest = _reflection.GeneratedProtocolMessageType('DeleteAnnotationRequest', (_message.Message,), {
'DESCRIPTOR' : _DELETEANNOTATIONREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.DeleteAnnotationRequest)
})
_sym_db.RegisterMessage(DeleteAnnotationRequest)
DeleteAnnotationsRequest = _reflection.GeneratedProtocolMessageType('DeleteAnnotationsRequest', (_message.Message,), {
'DESCRIPTOR' : _DELETEANNOTATIONSREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.DeleteAnnotationsRequest)
})
_sym_db.RegisterMessage(DeleteAnnotationsRequest)
SingleAnnotationResponse = _reflection.GeneratedProtocolMessageType('SingleAnnotationResponse', (_message.Message,), {
'DESCRIPTOR' : _SINGLEANNOTATIONRESPONSE,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.SingleAnnotationResponse)
})
_sym_db.RegisterMessage(SingleAnnotationResponse)
MultiAnnotationResponse = _reflection.GeneratedProtocolMessageType('MultiAnnotationResponse', (_message.Message,), {
'DESCRIPTOR' : _MULTIANNOTATIONRESPONSE,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.MultiAnnotationResponse)
})
_sym_db.RegisterMessage(MultiAnnotationResponse)
GetAppRequest = _reflection.GeneratedProtocolMessageType('GetAppRequest', (_message.Message,), {
'DESCRIPTOR' : _GETAPPREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.GetAppRequest)
})
_sym_db.RegisterMessage(GetAppRequest)
ListAppsRequest = _reflection.GeneratedProtocolMessageType('ListAppsRequest', (_message.Message,), {
'DESCRIPTOR' : _LISTAPPSREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.ListAppsRequest)
})
_sym_db.RegisterMessage(ListAppsRequest)
PostAppsRequest = _reflection.GeneratedProtocolMessageType('PostAppsRequest', (_message.Message,), {
'DESCRIPTOR' : _POSTAPPSREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.PostAppsRequest)
})
_sym_db.RegisterMessage(PostAppsRequest)
DeleteAppRequest = _reflection.GeneratedProtocolMessageType('DeleteAppRequest', (_message.Message,), {
'DESCRIPTOR' : _DELETEAPPREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.DeleteAppRequest)
})
_sym_db.RegisterMessage(DeleteAppRequest)
PatchAppsRequest = _reflection.GeneratedProtocolMessageType('PatchAppsRequest', (_message.Message,), {
'DESCRIPTOR' : _PATCHAPPSREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.PatchAppsRequest)
})
_sym_db.RegisterMessage(PatchAppsRequest)
PostAppsSearchesRequest = _reflection.GeneratedProtocolMessageType('PostAppsSearchesRequest', (_message.Message,), {
'DESCRIPTOR' : _POSTAPPSSEARCHESREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.PostAppsSearchesRequest)
})
_sym_db.RegisterMessage(PostAppsSearchesRequest)
SingleAppResponse = _reflection.GeneratedProtocolMessageType('SingleAppResponse', (_message.Message,), {
'DESCRIPTOR' : _SINGLEAPPRESPONSE,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.SingleAppResponse)
})
_sym_db.RegisterMessage(SingleAppResponse)
MultiAppResponse = _reflection.GeneratedProtocolMessageType('MultiAppResponse', (_message.Message,), {
'DESCRIPTOR' : _MULTIAPPRESPONSE,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.MultiAppResponse)
})
_sym_db.RegisterMessage(MultiAppResponse)
ListCollaboratorsRequest = _reflection.GeneratedProtocolMessageType('ListCollaboratorsRequest', (_message.Message,), {
'DESCRIPTOR' : _LISTCOLLABORATORSREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.ListCollaboratorsRequest)
})
_sym_db.RegisterMessage(ListCollaboratorsRequest)
PostCollaboratorsRequest = _reflection.GeneratedProtocolMessageType('PostCollaboratorsRequest', (_message.Message,), {
'DESCRIPTOR' : _POSTCOLLABORATORSREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.PostCollaboratorsRequest)
})
_sym_db.RegisterMessage(PostCollaboratorsRequest)
PatchCollaboratorsRequest = _reflection.GeneratedProtocolMessageType('PatchCollaboratorsRequest', (_message.Message,), {
'DESCRIPTOR' : _PATCHCOLLABORATORSREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.PatchCollaboratorsRequest)
})
_sym_db.RegisterMessage(PatchCollaboratorsRequest)
DeleteCollaboratorsRequest = _reflection.GeneratedProtocolMessageType('DeleteCollaboratorsRequest', (_message.Message,), {
'DESCRIPTOR' : _DELETECOLLABORATORSREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.DeleteCollaboratorsRequest)
})
_sym_db.RegisterMessage(DeleteCollaboratorsRequest)
MultiCollaboratorsResponse = _reflection.GeneratedProtocolMessageType('MultiCollaboratorsResponse', (_message.Message,), {
'DESCRIPTOR' : _MULTICOLLABORATORSRESPONSE,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.MultiCollaboratorsResponse)
})
_sym_db.RegisterMessage(MultiCollaboratorsResponse)
ListCollaborationsRequest = _reflection.GeneratedProtocolMessageType('ListCollaborationsRequest', (_message.Message,), {
'DESCRIPTOR' : _LISTCOLLABORATIONSREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.ListCollaborationsRequest)
})
_sym_db.RegisterMessage(ListCollaborationsRequest)
MultiCollaborationsResponse = _reflection.GeneratedProtocolMessageType('MultiCollaborationsResponse', (_message.Message,), {
'DESCRIPTOR' : _MULTICOLLABORATIONSRESPONSE,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.MultiCollaborationsResponse)
})
_sym_db.RegisterMessage(MultiCollaborationsResponse)
GetStatusCodeRequest = _reflection.GeneratedProtocolMessageType('GetStatusCodeRequest', (_message.Message,), {
'DESCRIPTOR' : _GETSTATUSCODEREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.GetStatusCodeRequest)
})
_sym_db.RegisterMessage(GetStatusCodeRequest)
ListStatusCodesRequest = _reflection.GeneratedProtocolMessageType('ListStatusCodesRequest', (_message.Message,), {
'DESCRIPTOR' : _LISTSTATUSCODESREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.ListStatusCodesRequest)
})
_sym_db.RegisterMessage(ListStatusCodesRequest)
SingleStatusCodeResponse = _reflection.GeneratedProtocolMessageType('SingleStatusCodeResponse', (_message.Message,), {
'DESCRIPTOR' : _SINGLESTATUSCODERESPONSE,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.SingleStatusCodeResponse)
})
_sym_db.RegisterMessage(SingleStatusCodeResponse)
MultiStatusCodeResponse = _reflection.GeneratedProtocolMessageType('MultiStatusCodeResponse', (_message.Message,), {
'DESCRIPTOR' : _MULTISTATUSCODERESPONSE,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.MultiStatusCodeResponse)
})
_sym_db.RegisterMessage(MultiStatusCodeResponse)
GetConceptRequest = _reflection.GeneratedProtocolMessageType('GetConceptRequest', (_message.Message,), {
'DESCRIPTOR' : _GETCONCEPTREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.GetConceptRequest)
})
_sym_db.RegisterMessage(GetConceptRequest)
ListConceptsRequest = _reflection.GeneratedProtocolMessageType('ListConceptsRequest', (_message.Message,), {
'DESCRIPTOR' : _LISTCONCEPTSREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.ListConceptsRequest)
})
_sym_db.RegisterMessage(ListConceptsRequest)
PostConceptsSearchesRequest = _reflection.GeneratedProtocolMessageType('PostConceptsSearchesRequest', (_message.Message,), {
'DESCRIPTOR' : _POSTCONCEPTSSEARCHESREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.PostConceptsSearchesRequest)
})
_sym_db.RegisterMessage(PostConceptsSearchesRequest)
PostConceptsRequest = _reflection.GeneratedProtocolMessageType('PostConceptsRequest', (_message.Message,), {
'DESCRIPTOR' : _POSTCONCEPTSREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.PostConceptsRequest)
})
_sym_db.RegisterMessage(PostConceptsRequest)
PatchConceptsRequest = _reflection.GeneratedProtocolMessageType('PatchConceptsRequest', (_message.Message,), {
'DESCRIPTOR' : _PATCHCONCEPTSREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.PatchConceptsRequest)
})
_sym_db.RegisterMessage(PatchConceptsRequest)
GetConceptCountsRequest = _reflection.GeneratedProtocolMessageType('GetConceptCountsRequest', (_message.Message,), {
'DESCRIPTOR' : _GETCONCEPTCOUNTSREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.GetConceptCountsRequest)
})
_sym_db.RegisterMessage(GetConceptCountsRequest)
SingleConceptResponse = _reflection.GeneratedProtocolMessageType('SingleConceptResponse', (_message.Message,), {
'DESCRIPTOR' : _SINGLECONCEPTRESPONSE,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.SingleConceptResponse)
})
_sym_db.RegisterMessage(SingleConceptResponse)
MultiConceptResponse = _reflection.GeneratedProtocolMessageType('MultiConceptResponse', (_message.Message,), {
'DESCRIPTOR' : _MULTICONCEPTRESPONSE,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.MultiConceptResponse)
})
_sym_db.RegisterMessage(MultiConceptResponse)
MultiConceptCountResponse = _reflection.GeneratedProtocolMessageType('MultiConceptCountResponse', (_message.Message,), {
'DESCRIPTOR' : _MULTICONCEPTCOUNTRESPONSE,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.MultiConceptCountResponse)
})
_sym_db.RegisterMessage(MultiConceptCountResponse)
ListConceptRelationsRequest = _reflection.GeneratedProtocolMessageType('ListConceptRelationsRequest', (_message.Message,), {
'DESCRIPTOR' : _LISTCONCEPTRELATIONSREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.ListConceptRelationsRequest)
})
_sym_db.RegisterMessage(ListConceptRelationsRequest)
PostConceptRelationsRequest = _reflection.GeneratedProtocolMessageType('PostConceptRelationsRequest', (_message.Message,), {
'DESCRIPTOR' : _POSTCONCEPTRELATIONSREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.PostConceptRelationsRequest)
})
_sym_db.RegisterMessage(PostConceptRelationsRequest)
DeleteConceptRelationsRequest = _reflection.GeneratedProtocolMessageType('DeleteConceptRelationsRequest', (_message.Message,), {
'DESCRIPTOR' : _DELETECONCEPTRELATIONSREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.DeleteConceptRelationsRequest)
})
_sym_db.RegisterMessage(DeleteConceptRelationsRequest)
ListKnowledgeGraphsRequest = _reflection.GeneratedProtocolMessageType('ListKnowledgeGraphsRequest', (_message.Message,), {
'DESCRIPTOR' : _LISTKNOWLEDGEGRAPHSREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.ListKnowledgeGraphsRequest)
})
_sym_db.RegisterMessage(ListKnowledgeGraphsRequest)
PostKnowledgeGraphsRequest = _reflection.GeneratedProtocolMessageType('PostKnowledgeGraphsRequest', (_message.Message,), {
'DESCRIPTOR' : _POSTKNOWLEDGEGRAPHSREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.PostKnowledgeGraphsRequest)
})
_sym_db.RegisterMessage(PostKnowledgeGraphsRequest)
PostConceptMappingJobsRequest = _reflection.GeneratedProtocolMessageType('PostConceptMappingJobsRequest', (_message.Message,), {
'DESCRIPTOR' : _POSTCONCEPTMAPPINGJOBSREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.PostConceptMappingJobsRequest)
})
_sym_db.RegisterMessage(PostConceptMappingJobsRequest)
MultiConceptRelationResponse = _reflection.GeneratedProtocolMessageType('MultiConceptRelationResponse', (_message.Message,), {
'DESCRIPTOR' : _MULTICONCEPTRELATIONRESPONSE,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.MultiConceptRelationResponse)
})
_sym_db.RegisterMessage(MultiConceptRelationResponse)
MultiKnowledgeGraphResponse = _reflection.GeneratedProtocolMessageType('MultiKnowledgeGraphResponse', (_message.Message,), {
'DESCRIPTOR' : _MULTIKNOWLEDGEGRAPHRESPONSE,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.MultiKnowledgeGraphResponse)
})
_sym_db.RegisterMessage(MultiKnowledgeGraphResponse)
MultiConceptMappingJobResponse = _reflection.GeneratedProtocolMessageType('MultiConceptMappingJobResponse', (_message.Message,), {
'DESCRIPTOR' : _MULTICONCEPTMAPPINGJOBRESPONSE,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.MultiConceptMappingJobResponse)
})
_sym_db.RegisterMessage(MultiConceptMappingJobResponse)
GetConceptLanguageRequest = _reflection.GeneratedProtocolMessageType('GetConceptLanguageRequest', (_message.Message,), {
'DESCRIPTOR' : _GETCONCEPTLANGUAGEREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.GetConceptLanguageRequest)
})
_sym_db.RegisterMessage(GetConceptLanguageRequest)
ListConceptLanguagesRequest = _reflection.GeneratedProtocolMessageType('ListConceptLanguagesRequest', (_message.Message,), {
'DESCRIPTOR' : _LISTCONCEPTLANGUAGESREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.ListConceptLanguagesRequest)
})
_sym_db.RegisterMessage(ListConceptLanguagesRequest)
PatchConceptLanguagesRequest = _reflection.GeneratedProtocolMessageType('PatchConceptLanguagesRequest', (_message.Message,), {
'DESCRIPTOR' : _PATCHCONCEPTLANGUAGESREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.PatchConceptLanguagesRequest)
})
_sym_db.RegisterMessage(PatchConceptLanguagesRequest)
PostConceptLanguagesRequest = _reflection.GeneratedProtocolMessageType('PostConceptLanguagesRequest', (_message.Message,), {
'DESCRIPTOR' : _POSTCONCEPTLANGUAGESREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.PostConceptLanguagesRequest)
})
_sym_db.RegisterMessage(PostConceptLanguagesRequest)
SingleConceptLanguageResponse = _reflection.GeneratedProtocolMessageType('SingleConceptLanguageResponse', (_message.Message,), {
'DESCRIPTOR' : _SINGLECONCEPTLANGUAGERESPONSE,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.SingleConceptLanguageResponse)
})
_sym_db.RegisterMessage(SingleConceptLanguageResponse)
MultiConceptLanguageResponse = _reflection.GeneratedProtocolMessageType('MultiConceptLanguageResponse', (_message.Message,), {
'DESCRIPTOR' : _MULTICONCEPTLANGUAGERESPONSE,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.MultiConceptLanguageResponse)
})
_sym_db.RegisterMessage(MultiConceptLanguageResponse)
GetInputRequest = _reflection.GeneratedProtocolMessageType('GetInputRequest', (_message.Message,), {
'DESCRIPTOR' : _GETINPUTREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.GetInputRequest)
})
_sym_db.RegisterMessage(GetInputRequest)
GetInputSamplesRequest = _reflection.GeneratedProtocolMessageType('GetInputSamplesRequest', (_message.Message,), {
'DESCRIPTOR' : _GETINPUTSAMPLESREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.GetInputSamplesRequest)
})
_sym_db.RegisterMessage(GetInputSamplesRequest)
ListInputsRequest = _reflection.GeneratedProtocolMessageType('ListInputsRequest', (_message.Message,), {
'DESCRIPTOR' : _LISTINPUTSREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.ListInputsRequest)
})
_sym_db.RegisterMessage(ListInputsRequest)
StreamInputsRequest = _reflection.GeneratedProtocolMessageType('StreamInputsRequest', (_message.Message,), {
'DESCRIPTOR' : _STREAMINPUTSREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.StreamInputsRequest)
})
_sym_db.RegisterMessage(StreamInputsRequest)
PostInputsRequest = _reflection.GeneratedProtocolMessageType('PostInputsRequest', (_message.Message,), {
'DESCRIPTOR' : _POSTINPUTSREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.PostInputsRequest)
})
_sym_db.RegisterMessage(PostInputsRequest)
PatchInputsRequest = _reflection.GeneratedProtocolMessageType('PatchInputsRequest', (_message.Message,), {
'DESCRIPTOR' : _PATCHINPUTSREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.PatchInputsRequest)
})
_sym_db.RegisterMessage(PatchInputsRequest)
DeleteInputRequest = _reflection.GeneratedProtocolMessageType('DeleteInputRequest', (_message.Message,), {
'DESCRIPTOR' : _DELETEINPUTREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.DeleteInputRequest)
})
_sym_db.RegisterMessage(DeleteInputRequest)
DeleteInputsRequest = _reflection.GeneratedProtocolMessageType('DeleteInputsRequest', (_message.Message,), {
'DESCRIPTOR' : _DELETEINPUTSREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.DeleteInputsRequest)
})
_sym_db.RegisterMessage(DeleteInputsRequest)
SingleInputResponse = _reflection.GeneratedProtocolMessageType('SingleInputResponse', (_message.Message,), {
'DESCRIPTOR' : _SINGLEINPUTRESPONSE,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.SingleInputResponse)
})
_sym_db.RegisterMessage(SingleInputResponse)
MultiInputResponse = _reflection.GeneratedProtocolMessageType('MultiInputResponse', (_message.Message,), {
'DESCRIPTOR' : _MULTIINPUTRESPONSE,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.MultiInputResponse)
})
_sym_db.RegisterMessage(MultiInputResponse)
MultiInputAnnotationResponse = _reflection.GeneratedProtocolMessageType('MultiInputAnnotationResponse', (_message.Message,), {
'DESCRIPTOR' : _MULTIINPUTANNOTATIONRESPONSE,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.MultiInputAnnotationResponse)
})
_sym_db.RegisterMessage(MultiInputAnnotationResponse)
SingleInputCountResponse = _reflection.GeneratedProtocolMessageType('SingleInputCountResponse', (_message.Message,), {
'DESCRIPTOR' : _SINGLEINPUTCOUNTRESPONSE,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.SingleInputCountResponse)
})
_sym_db.RegisterMessage(SingleInputCountResponse)
GetInputCountRequest = _reflection.GeneratedProtocolMessageType('GetInputCountRequest', (_message.Message,), {
'DESCRIPTOR' : _GETINPUTCOUNTREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.GetInputCountRequest)
})
_sym_db.RegisterMessage(GetInputCountRequest)
PostModelOutputsRequest = _reflection.GeneratedProtocolMessageType('PostModelOutputsRequest', (_message.Message,), {
'DESCRIPTOR' : _POSTMODELOUTPUTSREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.PostModelOutputsRequest)
})
_sym_db.RegisterMessage(PostModelOutputsRequest)
ListModelInputsRequest = _reflection.GeneratedProtocolMessageType('ListModelInputsRequest', (_message.Message,), {
'DESCRIPTOR' : _LISTMODELINPUTSREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.ListModelInputsRequest)
})
_sym_db.RegisterMessage(ListModelInputsRequest)
GetKeyRequest = _reflection.GeneratedProtocolMessageType('GetKeyRequest', (_message.Message,), {
'DESCRIPTOR' : _GETKEYREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.GetKeyRequest)
})
_sym_db.RegisterMessage(GetKeyRequest)
ListKeysRequest = _reflection.GeneratedProtocolMessageType('ListKeysRequest', (_message.Message,), {
'DESCRIPTOR' : _LISTKEYSREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.ListKeysRequest)
})
_sym_db.RegisterMessage(ListKeysRequest)
ListAppKeysRequest = _reflection.GeneratedProtocolMessageType('ListAppKeysRequest', (_message.Message,), {
'DESCRIPTOR' : _LISTAPPKEYSREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.ListAppKeysRequest)
})
_sym_db.RegisterMessage(ListAppKeysRequest)
PostKeysRequest = _reflection.GeneratedProtocolMessageType('PostKeysRequest', (_message.Message,), {
'DESCRIPTOR' : _POSTKEYSREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.PostKeysRequest)
})
_sym_db.RegisterMessage(PostKeysRequest)
DeleteKeyRequest = _reflection.GeneratedProtocolMessageType('DeleteKeyRequest', (_message.Message,), {
'DESCRIPTOR' : _DELETEKEYREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.DeleteKeyRequest)
})
_sym_db.RegisterMessage(DeleteKeyRequest)
PatchKeysRequest = _reflection.GeneratedProtocolMessageType('PatchKeysRequest', (_message.Message,), {
'DESCRIPTOR' : _PATCHKEYSREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.PatchKeysRequest)
})
_sym_db.RegisterMessage(PatchKeysRequest)
SingleKeyResponse = _reflection.GeneratedProtocolMessageType('SingleKeyResponse', (_message.Message,), {
'DESCRIPTOR' : _SINGLEKEYRESPONSE,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.SingleKeyResponse)
})
_sym_db.RegisterMessage(SingleKeyResponse)
MultiKeyResponse = _reflection.GeneratedProtocolMessageType('MultiKeyResponse', (_message.Message,), {
'DESCRIPTOR' : _MULTIKEYRESPONSE,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.MultiKeyResponse)
})
_sym_db.RegisterMessage(MultiKeyResponse)
GetModelRequest = _reflection.GeneratedProtocolMessageType('GetModelRequest', (_message.Message,), {
'DESCRIPTOR' : _GETMODELREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.GetModelRequest)
})
_sym_db.RegisterMessage(GetModelRequest)
ListModelsRequest = _reflection.GeneratedProtocolMessageType('ListModelsRequest', (_message.Message,), {
'DESCRIPTOR' : _LISTMODELSREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.ListModelsRequest)
})
_sym_db.RegisterMessage(ListModelsRequest)
PostModelsRequest = _reflection.GeneratedProtocolMessageType('PostModelsRequest', (_message.Message,), {
'DESCRIPTOR' : _POSTMODELSREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.PostModelsRequest)
})
_sym_db.RegisterMessage(PostModelsRequest)
PatchModelsRequest = _reflection.GeneratedProtocolMessageType('PatchModelsRequest', (_message.Message,), {
'DESCRIPTOR' : _PATCHMODELSREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.PatchModelsRequest)
})
_sym_db.RegisterMessage(PatchModelsRequest)
DeleteModelRequest = _reflection.GeneratedProtocolMessageType('DeleteModelRequest', (_message.Message,), {
'DESCRIPTOR' : _DELETEMODELREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.DeleteModelRequest)
})
_sym_db.RegisterMessage(DeleteModelRequest)
DeleteModelsRequest = _reflection.GeneratedProtocolMessageType('DeleteModelsRequest', (_message.Message,), {
'DESCRIPTOR' : _DELETEMODELSREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.DeleteModelsRequest)
})
_sym_db.RegisterMessage(DeleteModelsRequest)
PostModelsSearchesRequest = _reflection.GeneratedProtocolMessageType('PostModelsSearchesRequest', (_message.Message,), {
'DESCRIPTOR' : _POSTMODELSSEARCHESREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.PostModelsSearchesRequest)
})
_sym_db.RegisterMessage(PostModelsSearchesRequest)
SingleModelResponse = _reflection.GeneratedProtocolMessageType('SingleModelResponse', (_message.Message,), {
'DESCRIPTOR' : _SINGLEMODELRESPONSE,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.SingleModelResponse)
})
_sym_db.RegisterMessage(SingleModelResponse)
MultiModelResponse = _reflection.GeneratedProtocolMessageType('MultiModelResponse', (_message.Message,), {
'DESCRIPTOR' : _MULTIMODELRESPONSE,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.MultiModelResponse)
})
_sym_db.RegisterMessage(MultiModelResponse)
PatchModelVersionsRequest = _reflection.GeneratedProtocolMessageType('PatchModelVersionsRequest', (_message.Message,), {
'DESCRIPTOR' : _PATCHMODELVERSIONSREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.PatchModelVersionsRequest)
})
_sym_db.RegisterMessage(PatchModelVersionsRequest)
GetModelVersionRequest = _reflection.GeneratedProtocolMessageType('GetModelVersionRequest', (_message.Message,), {
'DESCRIPTOR' : _GETMODELVERSIONREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.GetModelVersionRequest)
})
_sym_db.RegisterMessage(GetModelVersionRequest)
ListModelVersionsRequest = _reflection.GeneratedProtocolMessageType('ListModelVersionsRequest', (_message.Message,), {
'DESCRIPTOR' : _LISTMODELVERSIONSREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.ListModelVersionsRequest)
})
_sym_db.RegisterMessage(ListModelVersionsRequest)
DeleteModelVersionRequest = _reflection.GeneratedProtocolMessageType('DeleteModelVersionRequest', (_message.Message,), {
'DESCRIPTOR' : _DELETEMODELVERSIONREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.DeleteModelVersionRequest)
})
_sym_db.RegisterMessage(DeleteModelVersionRequest)
SingleModelVersionResponse = _reflection.GeneratedProtocolMessageType('SingleModelVersionResponse', (_message.Message,), {
'DESCRIPTOR' : _SINGLEMODELVERSIONRESPONSE,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.SingleModelVersionResponse)
})
_sym_db.RegisterMessage(SingleModelVersionResponse)
MultiModelVersionResponse = _reflection.GeneratedProtocolMessageType('MultiModelVersionResponse', (_message.Message,), {
'DESCRIPTOR' : _MULTIMODELVERSIONRESPONSE,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.MultiModelVersionResponse)
})
_sym_db.RegisterMessage(MultiModelVersionResponse)
PostModelVersionsRequest = _reflection.GeneratedProtocolMessageType('PostModelVersionsRequest', (_message.Message,), {
'DESCRIPTOR' : _POSTMODELVERSIONSREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.PostModelVersionsRequest)
})
_sym_db.RegisterMessage(PostModelVersionsRequest)
PostModelVersionMetricsRequest = _reflection.GeneratedProtocolMessageType('PostModelVersionMetricsRequest', (_message.Message,), {
'DESCRIPTOR' : _POSTMODELVERSIONMETRICSREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.PostModelVersionMetricsRequest)
})
_sym_db.RegisterMessage(PostModelVersionMetricsRequest)
GetModelVersionMetricsRequest = _reflection.GeneratedProtocolMessageType('GetModelVersionMetricsRequest', (_message.Message,), {
'DESCRIPTOR' : _GETMODELVERSIONMETRICSREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.GetModelVersionMetricsRequest)
})
_sym_db.RegisterMessage(GetModelVersionMetricsRequest)
GetModelTypeRequest = _reflection.GeneratedProtocolMessageType('GetModelTypeRequest', (_message.Message,), {
'DESCRIPTOR' : _GETMODELTYPEREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.GetModelTypeRequest)
})
_sym_db.RegisterMessage(GetModelTypeRequest)
ListModelTypesRequest = _reflection.GeneratedProtocolMessageType('ListModelTypesRequest', (_message.Message,), {
'DESCRIPTOR' : _LISTMODELTYPESREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.ListModelTypesRequest)
})
_sym_db.RegisterMessage(ListModelTypesRequest)
ListOpenSourceLicensesRequest = _reflection.GeneratedProtocolMessageType('ListOpenSourceLicensesRequest', (_message.Message,), {
'DESCRIPTOR' : _LISTOPENSOURCELICENSESREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.ListOpenSourceLicensesRequest)
})
_sym_db.RegisterMessage(ListOpenSourceLicensesRequest)
ListOpenSourceLicensesResponse = _reflection.GeneratedProtocolMessageType('ListOpenSourceLicensesResponse', (_message.Message,), {
'DESCRIPTOR' : _LISTOPENSOURCELICENSESRESPONSE,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.ListOpenSourceLicensesResponse)
})
_sym_db.RegisterMessage(ListOpenSourceLicensesResponse)
SingleModelTypeResponse = _reflection.GeneratedProtocolMessageType('SingleModelTypeResponse', (_message.Message,), {
'DESCRIPTOR' : _SINGLEMODELTYPERESPONSE,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.SingleModelTypeResponse)
})
_sym_db.RegisterMessage(SingleModelTypeResponse)
MultiModelTypeResponse = _reflection.GeneratedProtocolMessageType('MultiModelTypeResponse', (_message.Message,), {
'DESCRIPTOR' : _MULTIMODELTYPERESPONSE,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.MultiModelTypeResponse)
})
_sym_db.RegisterMessage(MultiModelTypeResponse)
GetModelVersionInputExampleRequest = _reflection.GeneratedProtocolMessageType('GetModelVersionInputExampleRequest', (_message.Message,), {
'DESCRIPTOR' : _GETMODELVERSIONINPUTEXAMPLEREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.GetModelVersionInputExampleRequest)
})
_sym_db.RegisterMessage(GetModelVersionInputExampleRequest)
ListModelVersionInputExamplesRequest = _reflection.GeneratedProtocolMessageType('ListModelVersionInputExamplesRequest', (_message.Message,), {
'DESCRIPTOR' : _LISTMODELVERSIONINPUTEXAMPLESREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.ListModelVersionInputExamplesRequest)
})
_sym_db.RegisterMessage(ListModelVersionInputExamplesRequest)
SingleModelVersionInputExampleResponse = _reflection.GeneratedProtocolMessageType('SingleModelVersionInputExampleResponse', (_message.Message,), {
'DESCRIPTOR' : _SINGLEMODELVERSIONINPUTEXAMPLERESPONSE,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.SingleModelVersionInputExampleResponse)
})
_sym_db.RegisterMessage(SingleModelVersionInputExampleResponse)
MultiModelVersionInputExampleResponse = _reflection.GeneratedProtocolMessageType('MultiModelVersionInputExampleResponse', (_message.Message,), {
'DESCRIPTOR' : _MULTIMODELVERSIONINPUTEXAMPLERESPONSE,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.MultiModelVersionInputExampleResponse)
})
_sym_db.RegisterMessage(MultiModelVersionInputExampleResponse)
ListModelReferencesRequest = _reflection.GeneratedProtocolMessageType('ListModelReferencesRequest', (_message.Message,), {
'DESCRIPTOR' : _LISTMODELREFERENCESREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.ListModelReferencesRequest)
})
_sym_db.RegisterMessage(ListModelReferencesRequest)
MultiModelReferenceResponse = _reflection.GeneratedProtocolMessageType('MultiModelReferenceResponse', (_message.Message,), {
'DESCRIPTOR' : _MULTIMODELREFERENCERESPONSE,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.MultiModelReferenceResponse)
})
_sym_db.RegisterMessage(MultiModelReferenceResponse)
MultiOutputResponse = _reflection.GeneratedProtocolMessageType('MultiOutputResponse', (_message.Message,), {
'DESCRIPTOR' : _MULTIOUTPUTRESPONSE,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.MultiOutputResponse)
})
_sym_db.RegisterMessage(MultiOutputResponse)
ListScopesRequest = _reflection.GeneratedProtocolMessageType('ListScopesRequest', (_message.Message,), {
'DESCRIPTOR' : _LISTSCOPESREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.ListScopesRequest)
})
_sym_db.RegisterMessage(ListScopesRequest)
MyScopesRequest = _reflection.GeneratedProtocolMessageType('MyScopesRequest', (_message.Message,), {
'DESCRIPTOR' : _MYSCOPESREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.MyScopesRequest)
})
_sym_db.RegisterMessage(MyScopesRequest)
MultiScopeDepsResponse = _reflection.GeneratedProtocolMessageType('MultiScopeDepsResponse', (_message.Message,), {
'DESCRIPTOR' : _MULTISCOPEDEPSRESPONSE,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.MultiScopeDepsResponse)
})
_sym_db.RegisterMessage(MultiScopeDepsResponse)
MultiScopeResponse = _reflection.GeneratedProtocolMessageType('MultiScopeResponse', (_message.Message,), {
'DESCRIPTOR' : _MULTISCOPERESPONSE,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.MultiScopeResponse)
})
_sym_db.RegisterMessage(MultiScopeResponse)
GetSearchRequest = _reflection.GeneratedProtocolMessageType('GetSearchRequest', (_message.Message,), {
'DESCRIPTOR' : _GETSEARCHREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.GetSearchRequest)
})
_sym_db.RegisterMessage(GetSearchRequest)
ListSearchesRequest = _reflection.GeneratedProtocolMessageType('ListSearchesRequest', (_message.Message,), {
'DESCRIPTOR' : _LISTSEARCHESREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.ListSearchesRequest)
})
_sym_db.RegisterMessage(ListSearchesRequest)
PostSearchesRequest = _reflection.GeneratedProtocolMessageType('PostSearchesRequest', (_message.Message,), {
'DESCRIPTOR' : _POSTSEARCHESREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.PostSearchesRequest)
})
_sym_db.RegisterMessage(PostSearchesRequest)
PostSearchesByIDRequest = _reflection.GeneratedProtocolMessageType('PostSearchesByIDRequest', (_message.Message,), {
'DESCRIPTOR' : _POSTSEARCHESBYIDREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.PostSearchesByIDRequest)
})
_sym_db.RegisterMessage(PostSearchesByIDRequest)
DeleteSearchRequest = _reflection.GeneratedProtocolMessageType('DeleteSearchRequest', (_message.Message,), {
'DESCRIPTOR' : _DELETESEARCHREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.DeleteSearchRequest)
})
_sym_db.RegisterMessage(DeleteSearchRequest)
PostAnnotationsSearchesRequest = _reflection.GeneratedProtocolMessageType('PostAnnotationsSearchesRequest', (_message.Message,), {
'DESCRIPTOR' : _POSTANNOTATIONSSEARCHESREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.PostAnnotationsSearchesRequest)
})
_sym_db.RegisterMessage(PostAnnotationsSearchesRequest)
DeleteAnnotationSearchMetricsRequest = _reflection.GeneratedProtocolMessageType('DeleteAnnotationSearchMetricsRequest', (_message.Message,), {
'DESCRIPTOR' : _DELETEANNOTATIONSEARCHMETRICSREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.DeleteAnnotationSearchMetricsRequest)
})
_sym_db.RegisterMessage(DeleteAnnotationSearchMetricsRequest)
PostInputsSearchesRequest = _reflection.GeneratedProtocolMessageType('PostInputsSearchesRequest', (_message.Message,), {
'DESCRIPTOR' : _POSTINPUTSSEARCHESREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.PostInputsSearchesRequest)
})
_sym_db.RegisterMessage(PostInputsSearchesRequest)
SingleSearchResponse = _reflection.GeneratedProtocolMessageType('SingleSearchResponse', (_message.Message,), {
'DESCRIPTOR' : _SINGLESEARCHRESPONSE,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.SingleSearchResponse)
})
_sym_db.RegisterMessage(SingleSearchResponse)
MultiSearchResponse = _reflection.GeneratedProtocolMessageType('MultiSearchResponse', (_message.Message,), {
'DESCRIPTOR' : _MULTISEARCHRESPONSE,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.MultiSearchResponse)
})
_sym_db.RegisterMessage(MultiSearchResponse)
PostAnnotationSearchMetricsRequest = _reflection.GeneratedProtocolMessageType('PostAnnotationSearchMetricsRequest', (_message.Message,), {
'DESCRIPTOR' : _POSTANNOTATIONSEARCHMETRICSREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.PostAnnotationSearchMetricsRequest)
})
_sym_db.RegisterMessage(PostAnnotationSearchMetricsRequest)
GetAnnotationSearchMetricsRequest = _reflection.GeneratedProtocolMessageType('GetAnnotationSearchMetricsRequest', (_message.Message,), {
'DESCRIPTOR' : _GETANNOTATIONSEARCHMETRICSREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.GetAnnotationSearchMetricsRequest)
})
_sym_db.RegisterMessage(GetAnnotationSearchMetricsRequest)
ListAnnotationSearchMetricsRequest = _reflection.GeneratedProtocolMessageType('ListAnnotationSearchMetricsRequest', (_message.Message,), {
'DESCRIPTOR' : _LISTANNOTATIONSEARCHMETRICSREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.ListAnnotationSearchMetricsRequest)
})
_sym_db.RegisterMessage(ListAnnotationSearchMetricsRequest)
MultiAnnotationSearchMetricsResponse = _reflection.GeneratedProtocolMessageType('MultiAnnotationSearchMetricsResponse', (_message.Message,), {
'DESCRIPTOR' : _MULTIANNOTATIONSEARCHMETRICSRESPONSE,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.MultiAnnotationSearchMetricsResponse)
})
_sym_db.RegisterMessage(MultiAnnotationSearchMetricsResponse)
PostValidatePasswordRequest = _reflection.GeneratedProtocolMessageType('PostValidatePasswordRequest', (_message.Message,), {
'DESCRIPTOR' : _POSTVALIDATEPASSWORDREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.PostValidatePasswordRequest)
})
_sym_db.RegisterMessage(PostValidatePasswordRequest)
SinglePasswordValidationResponse = _reflection.GeneratedProtocolMessageType('SinglePasswordValidationResponse', (_message.Message,), {
'DESCRIPTOR' : _SINGLEPASSWORDVALIDATIONRESPONSE,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.SinglePasswordValidationResponse)
})
_sym_db.RegisterMessage(SinglePasswordValidationResponse)
GetWorkflowRequest = _reflection.GeneratedProtocolMessageType('GetWorkflowRequest', (_message.Message,), {
'DESCRIPTOR' : _GETWORKFLOWREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.GetWorkflowRequest)
})
_sym_db.RegisterMessage(GetWorkflowRequest)
ListWorkflowsRequest = _reflection.GeneratedProtocolMessageType('ListWorkflowsRequest', (_message.Message,), {
'DESCRIPTOR' : _LISTWORKFLOWSREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.ListWorkflowsRequest)
})
_sym_db.RegisterMessage(ListWorkflowsRequest)
PostWorkflowsRequest = _reflection.GeneratedProtocolMessageType('PostWorkflowsRequest', (_message.Message,), {
'DESCRIPTOR' : _POSTWORKFLOWSREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.PostWorkflowsRequest)
})
_sym_db.RegisterMessage(PostWorkflowsRequest)
PatchWorkflowsRequest = _reflection.GeneratedProtocolMessageType('PatchWorkflowsRequest', (_message.Message,), {
'DESCRIPTOR' : _PATCHWORKFLOWSREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.PatchWorkflowsRequest)
})
_sym_db.RegisterMessage(PatchWorkflowsRequest)
DeleteWorkflowRequest = _reflection.GeneratedProtocolMessageType('DeleteWorkflowRequest', (_message.Message,), {
'DESCRIPTOR' : _DELETEWORKFLOWREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.DeleteWorkflowRequest)
})
_sym_db.RegisterMessage(DeleteWorkflowRequest)
DeleteWorkflowsRequest = _reflection.GeneratedProtocolMessageType('DeleteWorkflowsRequest', (_message.Message,), {
'DESCRIPTOR' : _DELETEWORKFLOWSREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.DeleteWorkflowsRequest)
})
_sym_db.RegisterMessage(DeleteWorkflowsRequest)
SingleWorkflowResponse = _reflection.GeneratedProtocolMessageType('SingleWorkflowResponse', (_message.Message,), {
'DESCRIPTOR' : _SINGLEWORKFLOWRESPONSE,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.SingleWorkflowResponse)
})
_sym_db.RegisterMessage(SingleWorkflowResponse)
MultiWorkflowResponse = _reflection.GeneratedProtocolMessageType('MultiWorkflowResponse', (_message.Message,), {
'DESCRIPTOR' : _MULTIWORKFLOWRESPONSE,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.MultiWorkflowResponse)
})
_sym_db.RegisterMessage(MultiWorkflowResponse)
PostWorkflowResultsRequest = _reflection.GeneratedProtocolMessageType('PostWorkflowResultsRequest', (_message.Message,), {
'DESCRIPTOR' : _POSTWORKFLOWRESULTSREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.PostWorkflowResultsRequest)
})
_sym_db.RegisterMessage(PostWorkflowResultsRequest)
PostWorkflowResultsResponse = _reflection.GeneratedProtocolMessageType('PostWorkflowResultsResponse', (_message.Message,), {
'DESCRIPTOR' : _POSTWORKFLOWRESULTSRESPONSE,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.PostWorkflowResultsResponse)
})
_sym_db.RegisterMessage(PostWorkflowResultsResponse)
PostWorkflowResultsSimilarityRequest = _reflection.GeneratedProtocolMessageType('PostWorkflowResultsSimilarityRequest', (_message.Message,), {
'DESCRIPTOR' : _POSTWORKFLOWRESULTSSIMILARITYREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.PostWorkflowResultsSimilarityRequest)
})
_sym_db.RegisterMessage(PostWorkflowResultsSimilarityRequest)
PostWorkflowResultsSimilarityResponse = _reflection.GeneratedProtocolMessageType('PostWorkflowResultsSimilarityResponse', (_message.Message,), {
'DESCRIPTOR' : _POSTWORKFLOWRESULTSSIMILARITYRESPONSE,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.PostWorkflowResultsSimilarityResponse)
})
_sym_db.RegisterMessage(PostWorkflowResultsSimilarityResponse)
ListWorkflowVersionsRequest = _reflection.GeneratedProtocolMessageType('ListWorkflowVersionsRequest', (_message.Message,), {
'DESCRIPTOR' : _LISTWORKFLOWVERSIONSREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.ListWorkflowVersionsRequest)
})
_sym_db.RegisterMessage(ListWorkflowVersionsRequest)
GetWorkflowVersionRequest = _reflection.GeneratedProtocolMessageType('GetWorkflowVersionRequest', (_message.Message,), {
'DESCRIPTOR' : _GETWORKFLOWVERSIONREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.GetWorkflowVersionRequest)
})
_sym_db.RegisterMessage(GetWorkflowVersionRequest)
DeleteWorkflowVersionsRequest = _reflection.GeneratedProtocolMessageType('DeleteWorkflowVersionsRequest', (_message.Message,), {
'DESCRIPTOR' : _DELETEWORKFLOWVERSIONSREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.DeleteWorkflowVersionsRequest)
})
_sym_db.RegisterMessage(DeleteWorkflowVersionsRequest)
PatchWorkflowVersionsRequest = _reflection.GeneratedProtocolMessageType('PatchWorkflowVersionsRequest', (_message.Message,), {
'DESCRIPTOR' : _PATCHWORKFLOWVERSIONSREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.PatchWorkflowVersionsRequest)
})
_sym_db.RegisterMessage(PatchWorkflowVersionsRequest)
MultiWorkflowVersionResponse = _reflection.GeneratedProtocolMessageType('MultiWorkflowVersionResponse', (_message.Message,), {
'DESCRIPTOR' : _MULTIWORKFLOWVERSIONRESPONSE,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.MultiWorkflowVersionResponse)
})
_sym_db.RegisterMessage(MultiWorkflowVersionResponse)
SingleWorkflowVersionResponse = _reflection.GeneratedProtocolMessageType('SingleWorkflowVersionResponse', (_message.Message,), {
'DESCRIPTOR' : _SINGLEWORKFLOWVERSIONRESPONSE,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.SingleWorkflowVersionResponse)
})
_sym_db.RegisterMessage(SingleWorkflowVersionResponse)
PostAppDuplicationsRequest = _reflection.GeneratedProtocolMessageType('PostAppDuplicationsRequest', (_message.Message,), {
'DESCRIPTOR' : _POSTAPPDUPLICATIONSREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.PostAppDuplicationsRequest)
})
_sym_db.RegisterMessage(PostAppDuplicationsRequest)
GetAppDuplicationRequest = _reflection.GeneratedProtocolMessageType('GetAppDuplicationRequest', (_message.Message,), {
'DESCRIPTOR' : _GETAPPDUPLICATIONREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.GetAppDuplicationRequest)
})
_sym_db.RegisterMessage(GetAppDuplicationRequest)
ListAppDuplicationsRequest = _reflection.GeneratedProtocolMessageType('ListAppDuplicationsRequest', (_message.Message,), {
'DESCRIPTOR' : _LISTAPPDUPLICATIONSREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.ListAppDuplicationsRequest)
})
_sym_db.RegisterMessage(ListAppDuplicationsRequest)
MultiAppDuplicationsResponse = _reflection.GeneratedProtocolMessageType('MultiAppDuplicationsResponse', (_message.Message,), {
'DESCRIPTOR' : _MULTIAPPDUPLICATIONSRESPONSE,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.MultiAppDuplicationsResponse)
})
_sym_db.RegisterMessage(MultiAppDuplicationsResponse)
SingleAppDuplicationResponse = _reflection.GeneratedProtocolMessageType('SingleAppDuplicationResponse', (_message.Message,), {
'DESCRIPTOR' : _SINGLEAPPDUPLICATIONRESPONSE,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.SingleAppDuplicationResponse)
})
_sym_db.RegisterMessage(SingleAppDuplicationResponse)
PostTasksRequest = _reflection.GeneratedProtocolMessageType('PostTasksRequest', (_message.Message,), {
'DESCRIPTOR' : _POSTTASKSREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.PostTasksRequest)
})
_sym_db.RegisterMessage(PostTasksRequest)
GetTaskRequest = _reflection.GeneratedProtocolMessageType('GetTaskRequest', (_message.Message,), {
'DESCRIPTOR' : _GETTASKREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.GetTaskRequest)
})
_sym_db.RegisterMessage(GetTaskRequest)
ListTasksRequest = _reflection.GeneratedProtocolMessageType('ListTasksRequest', (_message.Message,), {
'DESCRIPTOR' : _LISTTASKSREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.ListTasksRequest)
})
_sym_db.RegisterMessage(ListTasksRequest)
PatchTasksRequest = _reflection.GeneratedProtocolMessageType('PatchTasksRequest', (_message.Message,), {
'DESCRIPTOR' : _PATCHTASKSREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.PatchTasksRequest)
})
_sym_db.RegisterMessage(PatchTasksRequest)
DeleteTasksRequest = _reflection.GeneratedProtocolMessageType('DeleteTasksRequest', (_message.Message,), {
'DESCRIPTOR' : _DELETETASKSREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.DeleteTasksRequest)
})
_sym_db.RegisterMessage(DeleteTasksRequest)
MultiTaskResponse = _reflection.GeneratedProtocolMessageType('MultiTaskResponse', (_message.Message,), {
'DESCRIPTOR' : _MULTITASKRESPONSE,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.MultiTaskResponse)
})
_sym_db.RegisterMessage(MultiTaskResponse)
SingleTaskResponse = _reflection.GeneratedProtocolMessageType('SingleTaskResponse', (_message.Message,), {
'DESCRIPTOR' : _SINGLETASKRESPONSE,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.SingleTaskResponse)
})
_sym_db.RegisterMessage(SingleTaskResponse)
GetTaskCountRequest = _reflection.GeneratedProtocolMessageType('GetTaskCountRequest', (_message.Message,), {
'DESCRIPTOR' : _GETTASKCOUNTREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.GetTaskCountRequest)
})
_sym_db.RegisterMessage(GetTaskCountRequest)
SingleTaskCountResponse = _reflection.GeneratedProtocolMessageType('SingleTaskCountResponse', (_message.Message,), {
'DESCRIPTOR' : _SINGLETASKCOUNTRESPONSE,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.SingleTaskCountResponse)
})
_sym_db.RegisterMessage(SingleTaskCountResponse)
PostCollectorsRequest = _reflection.GeneratedProtocolMessageType('PostCollectorsRequest', (_message.Message,), {
'DESCRIPTOR' : _POSTCOLLECTORSREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.PostCollectorsRequest)
})
_sym_db.RegisterMessage(PostCollectorsRequest)
PatchCollectorsRequest = _reflection.GeneratedProtocolMessageType('PatchCollectorsRequest', (_message.Message,), {
'DESCRIPTOR' : _PATCHCOLLECTORSREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.PatchCollectorsRequest)
})
_sym_db.RegisterMessage(PatchCollectorsRequest)
DeleteCollectorsRequest = _reflection.GeneratedProtocolMessageType('DeleteCollectorsRequest', (_message.Message,), {
'DESCRIPTOR' : _DELETECOLLECTORSREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.DeleteCollectorsRequest)
})
_sym_db.RegisterMessage(DeleteCollectorsRequest)
GetCollectorRequest = _reflection.GeneratedProtocolMessageType('GetCollectorRequest', (_message.Message,), {
'DESCRIPTOR' : _GETCOLLECTORREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.GetCollectorRequest)
})
_sym_db.RegisterMessage(GetCollectorRequest)
ListCollectorsRequest = _reflection.GeneratedProtocolMessageType('ListCollectorsRequest', (_message.Message,), {
'DESCRIPTOR' : _LISTCOLLECTORSREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.ListCollectorsRequest)
})
_sym_db.RegisterMessage(ListCollectorsRequest)
MultiCollectorResponse = _reflection.GeneratedProtocolMessageType('MultiCollectorResponse', (_message.Message,), {
'DESCRIPTOR' : _MULTICOLLECTORRESPONSE,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.MultiCollectorResponse)
})
_sym_db.RegisterMessage(MultiCollectorResponse)
SingleCollectorResponse = _reflection.GeneratedProtocolMessageType('SingleCollectorResponse', (_message.Message,), {
'DESCRIPTOR' : _SINGLECOLLECTORRESPONSE,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.SingleCollectorResponse)
})
_sym_db.RegisterMessage(SingleCollectorResponse)
PostStatValuesRequest = _reflection.GeneratedProtocolMessageType('PostStatValuesRequest', (_message.Message,), {
'DESCRIPTOR' : _POSTSTATVALUESREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.PostStatValuesRequest)
})
_sym_db.RegisterMessage(PostStatValuesRequest)
MultiStatValueResponse = _reflection.GeneratedProtocolMessageType('MultiStatValueResponse', (_message.Message,), {
'DESCRIPTOR' : _MULTISTATVALUERESPONSE,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.MultiStatValueResponse)
})
_sym_db.RegisterMessage(MultiStatValueResponse)
PostStatValuesAggregateRequest = _reflection.GeneratedProtocolMessageType('PostStatValuesAggregateRequest', (_message.Message,), {
'DESCRIPTOR' : _POSTSTATVALUESAGGREGATEREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.PostStatValuesAggregateRequest)
})
_sym_db.RegisterMessage(PostStatValuesAggregateRequest)
MultiStatValueAggregateResponse = _reflection.GeneratedProtocolMessageType('MultiStatValueAggregateResponse', (_message.Message,), {
'DESCRIPTOR' : _MULTISTATVALUEAGGREGATERESPONSE,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.MultiStatValueAggregateResponse)
})
_sym_db.RegisterMessage(MultiStatValueAggregateResponse)
PostTrendingMetricsViewRequest = _reflection.GeneratedProtocolMessageType('PostTrendingMetricsViewRequest', (_message.Message,), {
'DESCRIPTOR' : _POSTTRENDINGMETRICSVIEWREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.PostTrendingMetricsViewRequest)
})
_sym_db.RegisterMessage(PostTrendingMetricsViewRequest)
ListTrendingMetricsViewsRequest = _reflection.GeneratedProtocolMessageType('ListTrendingMetricsViewsRequest', (_message.Message,), {
'DESCRIPTOR' : _LISTTRENDINGMETRICSVIEWSREQUEST,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.ListTrendingMetricsViewsRequest)
})
_sym_db.RegisterMessage(ListTrendingMetricsViewsRequest)
MultiTrendingMetricsViewResponse = _reflection.GeneratedProtocolMessageType('MultiTrendingMetricsViewResponse', (_message.Message,), {
'DESCRIPTOR' : _MULTITRENDINGMETRICSVIEWRESPONSE,
'__module__' : 'proto.clarifai.api.service_pb2'
# @@protoc_insertion_point(class_scope:clarifai.api.MultiTrendingMetricsViewResponse)
})
_sym_db.RegisterMessage(MultiTrendingMetricsViewResponse)
DESCRIPTOR._options = None
_MULTIANNOTATIONRESPONSE.fields_by_name['annotations']._options = None
_LISTAPPSREQUEST.fields_by_name['name']._options = None
_MULTIAPPRESPONSE.fields_by_name['apps']._options = None
_MULTICOLLABORATORSRESPONSE.fields_by_name['collaborators']._options = None
_MULTICOLLABORATIONSRESPONSE.fields_by_name['collaborations']._options = None
_MULTICONCEPTRESPONSE.fields_by_name['concepts']._options = None
_MULTICONCEPTCOUNTRESPONSE.fields_by_name['concept_counts']._options = None
_MULTICONCEPTRELATIONRESPONSE.fields_by_name['concept_relations']._options = None
_MULTIKNOWLEDGEGRAPHRESPONSE.fields_by_name['knowledge_graphs']._options = None
_MULTICONCEPTLANGUAGERESPONSE.fields_by_name['concept_languages']._options = None
_MULTIINPUTRESPONSE.fields_by_name['inputs']._options = None
_MULTIINPUTANNOTATIONRESPONSE.fields_by_name['hits']._options = None
_MULTIKEYRESPONSE.fields_by_name['keys']._options = None
_LISTMODELSREQUEST.fields_by_name['name']._options = None
_POSTMODELSREQUEST.fields_by_name['model']._options = None
_MULTIMODELRESPONSE.fields_by_name['models']._options = None
_MULTIMODELVERSIONRESPONSE.fields_by_name['model_versions']._options = None
_POSTMODELVERSIONSREQUEST.fields_by_name['search']._options = None
_SINGLEMODELTYPERESPONSE.fields_by_name['model_type']._options = None
_MULTIMODELTYPERESPONSE.fields_by_name['model_types']._options = None
_MULTIOUTPUTRESPONSE.fields_by_name['outputs']._options = None
_POSTSEARCHESREQUEST.fields_by_name['query']._options = None
_MULTISEARCHRESPONSE.fields_by_name['hits']._options = None
_LISTWORKFLOWSREQUEST.fields_by_name['id']._options = None
_MULTIWORKFLOWRESPONSE.fields_by_name['workflows']._options = None
_MULTIWORKFLOWVERSIONRESPONSE.fields_by_name['workflow_versions']._options = None
_MULTITASKRESPONSE.fields_by_name['tasks']._options = None
_MULTISTATVALUERESPONSE.fields_by_name['stat_values']._options = None
_V2 = _descriptor.ServiceDescriptor(
name='V2',
full_name='clarifai.api.V2',
file=DESCRIPTOR,
index=0,
serialized_options=None,
serialized_start=23321,
serialized_end=50689,
methods=[
_descriptor.MethodDescriptor(
name='ListConceptRelations',
full_name='clarifai.api.V2.ListConceptRelations',
index=0,
containing_service=None,
input_type=_LISTCONCEPTRELATIONSREQUEST,
output_type=_MULTICONCEPTRELATIONRESPONSE,
serialized_options=b'\202\323\344\223\002\253\001\022Y/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/concepts/{concept_id}/relationsZN\022L/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/concepts/relations\230\234\'\005\220\234\'\013',
),
_descriptor.MethodDescriptor(
name='PostConceptRelations',
full_name='clarifai.api.V2.PostConceptRelations',
index=1,
containing_service=None,
input_type=_POSTCONCEPTRELATIONSREQUEST,
output_type=_MULTICONCEPTRELATIONRESPONSE,
serialized_options=b'\202\323\344\223\002^\"Y/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/concepts/{concept_id}/relations:\001*\230\234\'\005\220\234\'\n\220\234\'\013',
),
_descriptor.MethodDescriptor(
name='DeleteConceptRelations',
full_name='clarifai.api.V2.DeleteConceptRelations',
index=2,
containing_service=None,
input_type=_DELETECONCEPTRELATIONSREQUEST,
output_type=proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._BASERESPONSE,
serialized_options=b'\202\323\344\223\002^*Y/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/concepts/{concept_id}/relations:\001*\230\234\'\005\220\234\'\r\220\234\'\n\220\234\'\013',
),
_descriptor.MethodDescriptor(
name='GetConceptCounts',
full_name='clarifai.api.V2.GetConceptCounts',
index=3,
containing_service=None,
input_type=_GETCONCEPTCOUNTSREQUEST,
output_type=_MULTICONCEPTCOUNTRESPONSE,
serialized_options=b'\202\323\344\223\002b\022I/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/concepts/statusZ\025\022\023/v2/concepts/status\230\234\'\002\220\234\'&\220\234\'\013\220\234\'\005',
),
_descriptor.MethodDescriptor(
name='GetConcept',
full_name='clarifai.api.V2.GetConcept',
index=4,
containing_service=None,
input_type=_GETCONCEPTREQUEST,
output_type=_SINGLECONCEPTRESPONSE,
serialized_options=b'\202\323\344\223\002n\022O/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/concepts/{concept_id}Z\033\022\031/v2/concepts/{concept_id}\230\234\'\002\220\234\'\013',
),
_descriptor.MethodDescriptor(
name='ListConcepts',
full_name='clarifai.api.V2.ListConcepts',
index=5,
containing_service=None,
input_type=_LISTCONCEPTSREQUEST,
output_type=_MULTICONCEPTRESPONSE,
serialized_options=b'\202\323\344\223\002T\022B/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/conceptsZ\016\022\014/v2/concepts\230\234\'\002\220\234\'\013',
),
_descriptor.MethodDescriptor(
name='PostConceptsSearches',
full_name='clarifai.api.V2.PostConceptsSearches',
index=6,
containing_service=None,
input_type=_POSTCONCEPTSSEARCHESREQUEST,
output_type=_MULTICONCEPTRESPONSE,
serialized_options=b'\202\323\344\223\002l\"K/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/concepts/searches:\001*Z\032\"\025/v2/concepts/searches:\001*\230\234\'\002\220\234\'\013\220\234\'\017\220\234\'\023',
),
_descriptor.MethodDescriptor(
name='PostConcepts',
full_name='clarifai.api.V2.PostConcepts',
index=7,
containing_service=None,
input_type=_POSTCONCEPTSREQUEST,
output_type=_MULTICONCEPTRESPONSE,
serialized_options=b'\202\323\344\223\002Z\"B/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/concepts:\001*Z\021\"\014/v2/concepts:\001*\230\234\'\002\220\234\'\n\220\234\'\013',
),
_descriptor.MethodDescriptor(
name='PatchConcepts',
full_name='clarifai.api.V2.PatchConcepts',
index=8,
containing_service=None,
input_type=_PATCHCONCEPTSREQUEST,
output_type=_MULTICONCEPTRESPONSE,
serialized_options=b'\202\323\344\223\002Z2B/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/concepts:\001*Z\0212\014/v2/concepts:\001*\230\234\'\002\220\234\'\n\220\234\'\013',
),
_descriptor.MethodDescriptor(
name='GetConceptLanguage',
full_name='clarifai.api.V2.GetConceptLanguage',
index=9,
containing_service=None,
input_type=_GETCONCEPTLANGUAGEREQUEST,
output_type=_SINGLECONCEPTLANGUAGERESPONSE,
serialized_options=b'\202\323\344\223\002\230\001\022d/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/concepts/{concept_id}/languages/{language}Z0\022./v2/concepts/{concept_id}/languages/{language}\230\234\'\002\220\234\'\013',
),
_descriptor.MethodDescriptor(
name='ListConceptLanguages',
full_name='clarifai.api.V2.ListConceptLanguages',
index=10,
containing_service=None,
input_type=_LISTCONCEPTLANGUAGESREQUEST,
output_type=_MULTICONCEPTLANGUAGERESPONSE,
serialized_options=b'\202\323\344\223\002\202\001\022Y/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/concepts/{concept_id}/languagesZ%\022#/v2/concepts/{concept_id}/languages\230\234\'\002\220\234\'\013',
),
_descriptor.MethodDescriptor(
name='PostConceptLanguages',
full_name='clarifai.api.V2.PostConceptLanguages',
index=11,
containing_service=None,
input_type=_POSTCONCEPTLANGUAGESREQUEST,
output_type=_MULTICONCEPTLANGUAGERESPONSE,
serialized_options=b'\202\323\344\223\002\210\001\"Y/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/concepts/{concept_id}/languages:\001*Z(\"#/v2/concepts/{concept_id}/languages:\001*\230\234\'\002\220\234\'\n\220\234\'\013',
),
_descriptor.MethodDescriptor(
name='PatchConceptLanguages',
full_name='clarifai.api.V2.PatchConceptLanguages',
index=12,
containing_service=None,
input_type=_PATCHCONCEPTLANGUAGESREQUEST,
output_type=_MULTICONCEPTLANGUAGERESPONSE,
serialized_options=b'\202\323\344\223\002\210\0012Y/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/concepts/{concept_id}/languages:\001*Z(2#/v2/concepts/{concept_id}/languages:\001*\230\234\'\002\220\234\'\n\220\234\'\013',
),
_descriptor.MethodDescriptor(
name='ListKnowledgeGraphs',
full_name='clarifai.api.V2.ListKnowledgeGraphs',
index=13,
containing_service=None,
input_type=_LISTKNOWLEDGEGRAPHSREQUEST,
output_type=_MULTIKNOWLEDGEGRAPHRESPONSE,
serialized_options=b'\202\323\344\223\002v\022S/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/concepts/knowledge_graphsZ\037\022\035/v2/concepts/knowledge_graphs\230\234\'\002\220\234\'\013',
),
_descriptor.MethodDescriptor(
name='PostKnowledgeGraphs',
full_name='clarifai.api.V2.PostKnowledgeGraphs',
index=14,
containing_service=None,
input_type=_POSTKNOWLEDGEGRAPHSREQUEST,
output_type=_MULTIKNOWLEDGEGRAPHRESPONSE,
serialized_options=b'\202\323\344\223\002|\"S/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/concepts/knowledge_graphs:\001*Z\"\"\035/v2/concepts/knowledge_graphs:\001*\230\234\'\002\220\234\'\n\220\234\'\013',
),
_descriptor.MethodDescriptor(
name='PostConceptMappingJobs',
full_name='clarifai.api.V2.PostConceptMappingJobs',
index=15,
containing_service=None,
input_type=_POSTCONCEPTMAPPINGJOBSREQUEST,
output_type=_MULTICONCEPTMAPPINGJOBRESPONSE,
serialized_options=b'\202\323\344\223\002v\"P/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/concepts/mappings/jobs:\001*Z\037\"\032/v2/concepts/mappings/jobs:\001*\230\234\'\002\220\234\'\n\220\234\'\013',
),
_descriptor.MethodDescriptor(
name='GetAnnotation',
full_name='clarifai.api.V2.GetAnnotation',
index=16,
containing_service=None,
input_type=_GETANNOTATIONREQUEST,
output_type=_SINGLEANNOTATIONRESPONSE,
serialized_options=b'\202\323\344\223\002\236\001\022g/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/inputs/{input_id}/annotations/{annotation_id}Z3\0221/v2/inputs/{input_id}/annotations/{annotation_id}\230\234\'\002\220\234\'&\220\234\'\013\220\234\'\017',
),
_descriptor.MethodDescriptor(
name='ListAnnotations',
full_name='clarifai.api.V2.ListAnnotations',
index=17,
containing_service=None,
input_type=_LISTANNOTATIONSREQUEST,
output_type=_MULTIANNOTATIONRESPONSE,
serialized_options=b'\202\323\344\223\002Z\022E/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/annotationsZ\021\022\017/v2/annotations\230\234\'\002\220\234\'&\220\234\'\013\220\234\'\017',
),
_descriptor.MethodDescriptor(
name='PostAnnotations',
full_name='clarifai.api.V2.PostAnnotations',
index=18,
containing_service=None,
input_type=_POSTANNOTATIONSREQUEST,
output_type=_MULTIANNOTATIONRESPONSE,
serialized_options=b'\202\323\344\223\002`\"E/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/annotations:\001*Z\024\"\017/v2/annotations:\001*\230\234\'\002\220\234\'\005\220\234\'%\220\234\'&\220\234\'\013\220\234\'\017\220\234\'\023',
),
_descriptor.MethodDescriptor(
name='PatchAnnotations',
full_name='clarifai.api.V2.PatchAnnotations',
index=19,
containing_service=None,
input_type=_PATCHANNOTATIONSREQUEST,
output_type=_MULTIANNOTATIONRESPONSE,
serialized_options=b'\202\323\344\223\002`2E/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/annotations:\001*Z\0242\017/v2/annotations:\001*\230\234\'\002\220\234\'\005\220\234\'%\220\234\'&\220\234\'\013\220\234\'\023\220\234\'\017',
),
_descriptor.MethodDescriptor(
name='PatchAnnotationsStatus',
full_name='clarifai.api.V2.PatchAnnotationsStatus',
index=20,
containing_service=None,
input_type=_PATCHANNOTATIONSSTATUSREQUEST,
output_type=_PATCHANNOTATIONSSTATUSRESPONSE,
serialized_options=b'\202\323\344\223\002`2[/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/task/{task_id}/annotations/status:\001*\230\234\'\002\220\234\'%\220\234\'&\220\234\'\013\220\234\'\023\220\234\'\017',
),
_descriptor.MethodDescriptor(
name='DeleteAnnotation',
full_name='clarifai.api.V2.DeleteAnnotation',
index=21,
containing_service=None,
input_type=_DELETEANNOTATIONREQUEST,
output_type=proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._BASERESPONSE,
serialized_options=b'\202\323\344\223\002\236\001*g/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/inputs/{input_id}/annotations/{annotation_id}Z3*1/v2/inputs/{input_id}/annotations/{annotation_id}\230\234\'\002\220\234\'%\220\234\'(\220\234\'&\220\234\'\023\220\234\'\017',
),
_descriptor.MethodDescriptor(
name='DeleteAnnotations',
full_name='clarifai.api.V2.DeleteAnnotations',
index=22,
containing_service=None,
input_type=_DELETEANNOTATIONSREQUEST,
output_type=proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._BASERESPONSE,
serialized_options=b'\202\323\344\223\002\320\001*L/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/inputs/annotations:\001*Z\033*\026/v2/inputs/annotations:\001*ZJ*E/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/annotations:\001*Z\024*\017/v2/annotations:\001*\230\234\'\002\220\234\'%\220\234\'(\220\234\'&\220\234\'\023\220\234\'\017',
),
_descriptor.MethodDescriptor(
name='PostAnnotationsSearches',
full_name='clarifai.api.V2.PostAnnotationsSearches',
index=23,
containing_service=None,
input_type=_POSTANNOTATIONSSEARCHESREQUEST,
output_type=_MULTISEARCHRESPONSE,
serialized_options=b'\202\323\344\223\002r\"N/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/annotations/searches:\001*Z\035\"\030/v2/annotations/searches:\001*\230\234\'\002\220\234\'&\220\234\'\013\220\234\'\005\220\234\'\017\220\234\'\003\220\234\'\023',
),
_descriptor.MethodDescriptor(
name='GetInputCount',
full_name='clarifai.api.V2.GetInputCount',
index=24,
containing_service=None,
input_type=_GETINPUTCOUNTREQUEST,
output_type=_SINGLEINPUTCOUNTRESPONSE,
serialized_options=b'\202\323\344\223\002^\022G/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/inputs/statusZ\023\022\021/v2/inputs/status\230\234\'\002\220\234\'&\220\234\'\013\220\234\'\005',
),
_descriptor.MethodDescriptor(
name='StreamInputs',
full_name='clarifai.api.V2.StreamInputs',
index=25,
containing_service=None,
input_type=_STREAMINPUTSREQUEST,
output_type=_MULTIINPUTRESPONSE,
serialized_options=b'\202\323\344\223\002^\022G/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/inputs/streamZ\023\022\021/v2/inputs/stream\230\234\'\002\220\234\'&\220\234\'\013\220\234\'\005',
),
_descriptor.MethodDescriptor(
name='GetInputSamples',
full_name='clarifai.api.V2.GetInputSamples',
index=26,
containing_service=None,
input_type=_GETINPUTSAMPLESREQUEST,
output_type=_MULTIINPUTANNOTATIONRESPONSE,
serialized_options=b'\202\323\344\223\002\200\001\022X/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/tasks/{task_id}/inputs/samplesZ$\022\"/v2/tasks/{task_id}/inputs/samples\230\234\'\002\220\234\'&\220\234\'\013\220\234\'\005',
),
_descriptor.MethodDescriptor(
name='GetInput',
full_name='clarifai.api.V2.GetInput',
index=27,
containing_service=None,
input_type=_GETINPUTREQUEST,
output_type=_SINGLEINPUTRESPONSE,
serialized_options=b'\202\323\344\223\002f\022K/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/inputs/{input_id}Z\027\022\025/v2/inputs/{input_id}\230\234\'\002\220\234\'&\220\234\'\013\220\234\'\005',
),
_descriptor.MethodDescriptor(
name='ListInputs',
full_name='clarifai.api.V2.ListInputs',
index=28,
containing_service=None,
input_type=_LISTINPUTSREQUEST,
output_type=_MULTIINPUTRESPONSE,
serialized_options=b'\202\323\344\223\002P\022@/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/inputsZ\014\022\n/v2/inputs\230\234\'\002\220\234\'&\220\234\'\013\220\234\'\005',
),
_descriptor.MethodDescriptor(
name='PostInputs',
full_name='clarifai.api.V2.PostInputs',
index=29,
containing_service=None,
input_type=_POSTINPUTSREQUEST,
output_type=_MULTIINPUTRESPONSE,
serialized_options=b'\202\323\344\223\002V\"@/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/inputs:\001*Z\017\"\n/v2/inputs:\001*\230\234\'\002\220\234\'%\220\234\'&\220\234\'\013\220\234\'\004\220\234\'\005\220\234\'\017\220\234\'\002\220\234\'\023',
),
_descriptor.MethodDescriptor(
name='PatchInputs',
full_name='clarifai.api.V2.PatchInputs',
index=30,
containing_service=None,
input_type=_PATCHINPUTSREQUEST,
output_type=_MULTIINPUTRESPONSE,
serialized_options=b'\202\323\344\223\002V2@/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/inputs:\001*Z\0172\n/v2/inputs:\001*\230\234\'\002\220\234\'%\220\234\'&\220\234\'\013\220\234\'\004\220\234\'\005\220\234\'\017',
),
_descriptor.MethodDescriptor(
name='DeleteInput',
full_name='clarifai.api.V2.DeleteInput',
index=31,
containing_service=None,
input_type=_DELETEINPUTREQUEST,
output_type=proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._BASERESPONSE,
serialized_options=b'\202\323\344\223\002f*K/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/inputs/{input_id}Z\027*\025/v2/inputs/{input_id}\230\234\'\002\220\234\'%\220\234\'(\220\234\'&\220\234\'\004\220\234\'\010\220\234\'\005\220\234\'\017\220\234\'\023',
),
_descriptor.MethodDescriptor(
name='DeleteInputs',
full_name='clarifai.api.V2.DeleteInputs',
index=32,
containing_service=None,
input_type=_DELETEINPUTSREQUEST,
output_type=proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._BASERESPONSE,
serialized_options=b'\202\323\344\223\002V*@/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/inputs:\001*Z\017*\n/v2/inputs:\001*\230\234\'\002\220\234\'%\220\234\'(\220\234\'&\220\234\'\004\220\234\'\010\220\234\'\005\220\234\'\017\220\234\'\023',
),
_descriptor.MethodDescriptor(
name='PostInputsSearches',
full_name='clarifai.api.V2.PostInputsSearches',
index=33,
containing_service=None,
input_type=_POSTINPUTSSEARCHESREQUEST,
output_type=_MULTISEARCHRESPONSE,
serialized_options=b'\202\323\344\223\002h\"I/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/inputs/searches:\001*Z\030\"\023/v2/inputs/searches:\001*\230\234\'\002\220\234\'&\220\234\'\013\220\234\'\005\220\234\'\017\220\234\'\003\220\234\'\023',
),
_descriptor.MethodDescriptor(
name='PostModelOutputs',
full_name='clarifai.api.V2.PostModelOutputs',
index=34,
containing_service=None,
input_type=_POSTMODELOUTPUTSREQUEST,
output_type=_MULTIOUTPUTRESPONSE,
serialized_options=b'\202\323\344\223\002\246\002\"i/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/models/{model_id}/versions/{version_id}/outputs:\001*Z8\"3/v2/models/{model_id}/versions/{version_id}/outputs:\001*ZX\"S/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/models/{model_id}/outputs:\001*Z\"\"\035/v2/models/{model_id}/outputs:\001*\230\234\'\002\220\234\'\013\220\234\'\017\220\234\'\002',
),
_descriptor.MethodDescriptor(
name='GetModelType',
full_name='clarifai.api.V2.GetModelType',
index=35,
containing_service=None,
input_type=_GETMODELTYPEREQUEST,
output_type=_SINGLEMODELTYPERESPONSE,
serialized_options=b'\202\323\344\223\002|\022V/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/models/types/{model_type_id}Z\"\022 /v2/models/types/{model_type_id}\230\234\'\002\220\234\'\017',
),
_descriptor.MethodDescriptor(
name='ListOpenSourceLicenses',
full_name='clarifai.api.V2.ListOpenSourceLicenses',
index=36,
containing_service=None,
input_type=_LISTOPENSOURCELICENSESREQUEST,
output_type=_LISTOPENSOURCELICENSESRESPONSE,
serialized_options=b'\202\323\344\223\002\032\022\030/v2/open_source_licenses\230\234\'\001',
),
_descriptor.MethodDescriptor(
name='ListModelTypes',
full_name='clarifai.api.V2.ListModelTypes',
index=37,
containing_service=None,
input_type=_LISTMODELTYPESREQUEST,
output_type=_MULTIMODELTYPERESPONSE,
serialized_options=b'\202\323\344\223\002\\\022F/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/models/typesZ\022\022\020/v2/models/types\230\234\'\002\220\234\'\017',
),
_descriptor.MethodDescriptor(
name='GetModel',
full_name='clarifai.api.V2.GetModel',
index=38,
containing_service=None,
input_type=_GETMODELREQUEST,
output_type=_SINGLEMODELRESPONSE,
serialized_options=b'\202\323\344\223\002f\022K/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/models/{model_id}Z\027\022\025/v2/models/{model_id}\230\234\'\002\220\234\'\013\220\234\'\017',
),
_descriptor.MethodDescriptor(
name='GetModelOutputInfo',
full_name='clarifai.api.V2.GetModelOutputInfo',
index=39,
containing_service=None,
input_type=_GETMODELREQUEST,
output_type=_SINGLEMODELRESPONSE,
serialized_options=b'\202\323\344\223\002\252\002\022W/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/models/{model_id}/output_infoZ#\022!/v2/models/{model_id}/output_infoZo\022m/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/models/{model_id}/versions/{version_id}/output_infoZ9\0227/v2/models/{model_id}/versions/{version_id}/output_info\230\234\'\002\220\234\'\013\220\234\'\017',
),
_descriptor.MethodDescriptor(
name='ListModels',
full_name='clarifai.api.V2.ListModels',
index=40,
containing_service=None,
input_type=_LISTMODELSREQUEST,
output_type=_MULTIMODELRESPONSE,
serialized_options=b'\202\323\344\223\002P\022@/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/modelsZ\014\022\n/v2/models\230\234\'\002\220\234\'\013\220\234\'\017',
),
_descriptor.MethodDescriptor(
name='PostModelsSearches',
full_name='clarifai.api.V2.PostModelsSearches',
index=41,
containing_service=None,
input_type=_POSTMODELSSEARCHESREQUEST,
output_type=_MULTIMODELRESPONSE,
serialized_options=b'\202\323\344\223\002h\"I/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/models/searches:\001*Z\030\"\023/v2/models/searches:\001*\230\234\'\002\220\234\'\013\220\234\'\017',
),
_descriptor.MethodDescriptor(
name='PostModels',
full_name='clarifai.api.V2.PostModels',
index=42,
containing_service=None,
input_type=_POSTMODELSREQUEST,
output_type=_SINGLEMODELRESPONSE,
serialized_options=b'\202\323\344\223\002V\"@/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/models:\001*Z\017\"\n/v2/models:\001*\230\234\'\002\220\234\'\013\220\234\'\016\220\234\'\017\220\234\'\032\220\234\'\023',
),
_descriptor.MethodDescriptor(
name='PatchModels',
full_name='clarifai.api.V2.PatchModels',
index=43,
containing_service=None,
input_type=_PATCHMODELSREQUEST,
output_type=_MULTIMODELRESPONSE,
serialized_options=b'\202\323\344\223\002V2@/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/models:\001*Z\0172\n/v2/models:\001*\230\234\'\002\220\234\'\013\220\234\'\016\220\234\'\017\220\234\'\032',
),
_descriptor.MethodDescriptor(
name='DeleteModel',
full_name='clarifai.api.V2.DeleteModel',
index=44,
containing_service=None,
input_type=_DELETEMODELREQUEST,
output_type=proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._BASERESPONSE,
serialized_options=b'\202\323\344\223\002f*K/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/models/{model_id}Z\027*\025/v2/models/{model_id}\230\234\'\002\220\234\'\013\220\234\'\016\220\234\'\021\220\234\'\017\220\234\'\023',
),
_descriptor.MethodDescriptor(
name='DeleteModels',
full_name='clarifai.api.V2.DeleteModels',
index=45,
containing_service=None,
input_type=_DELETEMODELSREQUEST,
output_type=proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._BASERESPONSE,
serialized_options=b'\202\323\344\223\002V*@/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/models:\001*Z\017*\n/v2/models:\001*\230\234\'\002\220\234\'\013\220\234\'\016\220\234\'\021\220\234\'\017\220\234\'\023',
),
_descriptor.MethodDescriptor(
name='ListModelInputs',
full_name='clarifai.api.V2.ListModelInputs',
index=46,
containing_service=None,
input_type=_LISTMODELINPUTSREQUEST,
output_type=_MULTIINPUTRESPONSE,
serialized_options=b'\202\323\344\223\002\226\002\022R/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/models/{model_id}/inputsZ\036\022\034/v2/models/{model_id}/inputsZj\022h/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/models/{model_id}/versions/{version_id}/inputsZ4\0222/v2/models/{model_id}/versions/{version_id}/inputs\230\234\'\002\220\234\'&\220\234\'\013\220\234\'\005\220\234\'\017',
),
_descriptor.MethodDescriptor(
name='GetModelVersion',
full_name='clarifai.api.V2.GetModelVersion',
index=47,
containing_service=None,
input_type=_GETMODELVERSIONREQUEST,
output_type=_SINGLEMODELVERSIONRESPONSE,
serialized_options=b'\202\323\344\223\002\222\001\022a/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/models/{model_id}/versions/{version_id}Z-\022+/v2/models/{model_id}/versions/{version_id}\230\234\'\002\220\234\'\013\220\234\'\017',
),
_descriptor.MethodDescriptor(
name='ListModelVersions',
full_name='clarifai.api.V2.ListModelVersions',
index=48,
containing_service=None,
input_type=_LISTMODELVERSIONSREQUEST,
output_type=_MULTIMODELVERSIONRESPONSE,
serialized_options=b'\202\323\344\223\002x\022T/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/models/{model_id}/versionsZ \022\036/v2/models/{model_id}/versions\230\234\'\002\220\234\'\013\220\234\'\017',
),
_descriptor.MethodDescriptor(
name='PostModelVersions',
full_name='clarifai.api.V2.PostModelVersions',
index=49,
containing_service=None,
input_type=_POSTMODELVERSIONSREQUEST,
output_type=_SINGLEMODELRESPONSE,
serialized_options=b'\202\323\344\223\002~\"T/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/models/{model_id}/versions:\001*Z#\"\036/v2/models/{model_id}/versions:\001*\230\234\'\002\220\234\'&\220\234\'\013\220\234\'\005\220\234\'\016\220\234\'\017\220\234\'\032\220\234\'\023\220\234\'5\220\234\'6',
),
_descriptor.MethodDescriptor(
name='PatchModelVersions',
full_name='clarifai.api.V2.PatchModelVersions',
index=50,
containing_service=None,
input_type=_PATCHMODELVERSIONSREQUEST,
output_type=_MULTIMODELVERSIONRESPONSE,
serialized_options=b'\202\323\344\223\002Y2T/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/models/{model_id}/versions:\001*\230\234\'\002\220\234\'\013\220\234\'\017\220\234\'\016\220\234\'\032',
),
_descriptor.MethodDescriptor(
name='DeleteModelVersion',
full_name='clarifai.api.V2.DeleteModelVersion',
index=51,
containing_service=None,
input_type=_DELETEMODELVERSIONREQUEST,
output_type=proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._BASERESPONSE,
serialized_options=b'\202\323\344\223\002\222\001*a/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/models/{model_id}/versions/{version_id}Z-*+/v2/models/{model_id}/versions/{version_id}\230\234\'\002\220\234\'\013\220\234\'\016\220\234\'\021\220\234\'\017\220\234\'\023',
),
_descriptor.MethodDescriptor(
name='GetModelVersionMetrics',
full_name='clarifai.api.V2.GetModelVersionMetrics',
index=52,
containing_service=None,
input_type=_GETMODELVERSIONMETRICSREQUEST,
output_type=_SINGLEMODELVERSIONRESPONSE,
serialized_options=b'\202\323\344\223\002\242\001\022i/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/models/{model_id}/versions/{version_id}/metricsZ5\0223/v2/models/{model_id}/versions/{version_id}/metrics\230\234\'\002\220\234\'&\220\234\'\013\220\234\'\017\220\234\'\005\220\234\'5',
),
_descriptor.MethodDescriptor(
name='PostModelVersionMetrics',
full_name='clarifai.api.V2.PostModelVersionMetrics',
index=53,
containing_service=None,
input_type=_POSTMODELVERSIONMETRICSREQUEST,
output_type=_SINGLEMODELVERSIONRESPONSE,
serialized_options=b'\202\323\344\223\002\250\001\"i/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/models/{model_id}/versions/{version_id}/metrics:\001*Z8\"3/v2/models/{model_id}/versions/{version_id}/metrics:\001*\230\234\'\002\220\234\'&\220\234\'\013\220\234\'\005\220\234\'\016\220\234\'\017\220\234\'\032\220\234\'\002\220\234\'\023\220\234\'5\220\234\'6',
),
_descriptor.MethodDescriptor(
name='ListModelReferences',
full_name='clarifai.api.V2.ListModelReferences',
index=54,
containing_service=None,
input_type=_LISTMODELREFERENCESREQUEST,
output_type=_MULTIMODELREFERENCERESPONSE,
serialized_options=b'\202\323\344\223\002|\022V/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/models/{model_id}/referencesZ\"\022 /v2/models/{model_id}/references\230\234\'\002\220\234\'\017',
),
_descriptor.MethodDescriptor(
name='GetModelVersionInputExample',
full_name='clarifai.api.V2.GetModelVersionInputExample',
index=55,
containing_service=None,
input_type=_GETMODELVERSIONINPUTEXAMPLEREQUEST,
output_type=_SINGLEMODELVERSIONINPUTEXAMPLERESPONSE,
serialized_options=b'\202\323\344\223\002\327\001\022\203\001/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/models/{model_id}/versions/{model_version_id}/input_examples/{example_id}ZO\022M/v2/models/{model_id}/versions/{model_version_id}/input_examples/{example_id}\230\234\'\002\220\234\'\017',
),
_descriptor.MethodDescriptor(
name='ListModelVersionInputExamples',
full_name='clarifai.api.V2.ListModelVersionInputExamples',
index=56,
containing_service=None,
input_type=_LISTMODELVERSIONINPUTEXAMPLESREQUEST,
output_type=_MULTIMODELVERSIONINPUTEXAMPLERESPONSE,
serialized_options=b'\202\323\344\223\002\274\001\022v/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/models/{model_id}/versions/{model_version_id}/input_examplesZB\022@/v2/models/{model_id}/versions/{model_version_id}/input_examples\230\234\'\002\220\234\'\017',
),
_descriptor.MethodDescriptor(
name='GetWorkflow',
full_name='clarifai.api.V2.GetWorkflow',
index=57,
containing_service=None,
input_type=_GETWORKFLOWREQUEST,
output_type=_SINGLEWORKFLOWRESPONSE,
serialized_options=b'\202\323\344\223\002r\022Q/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/workflows/{workflow_id}Z\035\022\033/v2/workflows/{workflow_id}\230\234\'\002\220\234\'\017\220\234\'\023',
),
_descriptor.MethodDescriptor(
name='ListWorkflows',
full_name='clarifai.api.V2.ListWorkflows',
index=58,
containing_service=None,
input_type=_LISTWORKFLOWSREQUEST,
output_type=_MULTIWORKFLOWRESPONSE,
serialized_options=b'\202\323\344\223\002V\022C/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/workflowsZ\017\022\r/v2/workflows\230\234\'\002\220\234\'\017\220\234\'\023',
),
_descriptor.MethodDescriptor(
name='PostWorkflows',
full_name='clarifai.api.V2.PostWorkflows',
index=59,
containing_service=None,
input_type=_POSTWORKFLOWSREQUEST,
output_type=_MULTIWORKFLOWRESPONSE,
serialized_options=b'\202\323\344\223\002\\\"C/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/workflows:\001*Z\022\"\r/v2/workflows:\001*\230\234\'\002\220\234\'\017\220\234\'\022\220\234\'\023',
),
_descriptor.MethodDescriptor(
name='PatchWorkflows',
full_name='clarifai.api.V2.PatchWorkflows',
index=60,
containing_service=None,
input_type=_PATCHWORKFLOWSREQUEST,
output_type=_MULTIWORKFLOWRESPONSE,
serialized_options=b'\202\323\344\223\002\\2C/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/workflows:\001*Z\0222\r/v2/workflows:\001*\230\234\'\002\220\234\'\017\220\234\'\022\220\234\'\023',
),
_descriptor.MethodDescriptor(
name='DeleteWorkflow',
full_name='clarifai.api.V2.DeleteWorkflow',
index=61,
containing_service=None,
input_type=_DELETEWORKFLOWREQUEST,
output_type=proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._BASERESPONSE,
serialized_options=b'\202\323\344\223\002r*Q/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/workflows/{workflow_id}Z\035*\033/v2/workflows/{workflow_id}\230\234\'\002\220\234\'\022\220\234\'\025\220\234\'\023',
),
_descriptor.MethodDescriptor(
name='DeleteWorkflows',
full_name='clarifai.api.V2.DeleteWorkflows',
index=62,
containing_service=None,
input_type=_DELETEWORKFLOWSREQUEST,
output_type=proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._BASERESPONSE,
serialized_options=b'\202\323\344\223\002\\*C/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/workflows:\001*Z\022*\r/v2/workflows:\001*\230\234\'\002\220\234\'\022\220\234\'\025\220\234\'\023',
),
_descriptor.MethodDescriptor(
name='PostWorkflowResults',
full_name='clarifai.api.V2.PostWorkflowResults',
index=63,
containing_service=None,
input_type=_POSTWORKFLOWRESULTSREQUEST,
output_type=_POSTWORKFLOWRESULTSRESPONSE,
serialized_options=b'\202\323\344\223\002\376\001\"o/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/workflows/{workflow_id}/versions/{version_id}/results:\001*Z^\"Y/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/workflows/{workflow_id}/results:\001*Z(\"#/v2/workflows/{workflow_id}/results:\001*\230\234\'\002\220\234\'\017\220\234\'\013\220\234\'\002\220\234\'\023',
),
_descriptor.MethodDescriptor(
name='PostWorkflowResultsSimilarity',
full_name='clarifai.api.V2.PostWorkflowResultsSimilarity',
index=64,
containing_service=None,
input_type=_POSTWORKFLOWRESULTSSIMILARITYREQUEST,
output_type=_POSTWORKFLOWRESULTSSIMILARITYRESPONSE,
serialized_options=b'\202\323\344\223\002\237\002\"z/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/workflows/{workflow_id}/versions/{version_id}/results/similarity:\001*Zi\"d/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/workflows/{workflow_id}/results/similarity:\001*Z3\"./v2/workflows/{workflow_id}/results/similarity:\001*\230\234\'\002\220\234\'\017\220\234\'\013\220\234\'\002\220\234\'\023',
),
_descriptor.MethodDescriptor(
name='ListWorkflowVersions',
full_name='clarifai.api.V2.ListWorkflowVersions',
index=65,
containing_service=None,
input_type=_LISTWORKFLOWVERSIONSREQUEST,
output_type=_MULTIWORKFLOWVERSIONRESPONSE,
serialized_options=b'\202\323\344\223\002\204\001\022Z/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/workflows/{workflow_id}/versionsZ&\022$/v2/workflows/{workflow_id}/versions\230\234\'\002\220\234\'\017\220\234\'\023',
),
_descriptor.MethodDescriptor(
name='GetWorkflowVersion',
full_name='clarifai.api.V2.GetWorkflowVersion',
index=66,
containing_service=None,
input_type=_GETWORKFLOWVERSIONREQUEST,
output_type=_SINGLEWORKFLOWVERSIONRESPONSE,
serialized_options=b'\202\323\344\223\002\260\001\022p/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/workflows/{workflow_id}/versions/{workflow_version_id}Z<\022:/v2/workflows/{workflow_id}/versions/{workflow_version_id}\230\234\'\002\220\234\'\017\220\234\'\023',
),
_descriptor.MethodDescriptor(
name='DeleteWorkflowVersions',
full_name='clarifai.api.V2.DeleteWorkflowVersions',
index=67,
containing_service=None,
input_type=_DELETEWORKFLOWVERSIONSREQUEST,
output_type=proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._BASERESPONSE,
serialized_options=b'\202\323\344\223\002\212\001*Z/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/workflows/{workflow_id}/versions:\001*Z)*$/v2/workflows/{workflow_id}/versions:\001*\230\234\'\002\220\234\'\023\220\234\'\022\220\234\'\025',
),
_descriptor.MethodDescriptor(
name='PatchWorkflowVersions',
full_name='clarifai.api.V2.PatchWorkflowVersions',
index=68,
containing_service=None,
input_type=_PATCHWORKFLOWVERSIONSREQUEST,
output_type=_MULTIWORKFLOWVERSIONRESPONSE,
serialized_options=b'\202\323\344\223\002\212\0012Z/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/workflows/{workflow_id}/versions:\001*Z)2$/v2/workflows/{workflow_id}/versions:\001*\230\234\'\002\220\234\'\017\220\234\'\023\220\234\'\022',
),
_descriptor.MethodDescriptor(
name='GetKey',
full_name='clarifai.api.V2.GetKey',
index=69,
containing_service=None,
input_type=_GETKEYREQUEST,
output_type=_SINGLEKEYRESPONSE,
serialized_options=b'\202\323\344\223\002/\022-/v2/users/{user_app_id.user_id}/keys/{key_id}\230\234\'\005\220\234\'0',
),
_descriptor.MethodDescriptor(
name='ListKeys',
full_name='clarifai.api.V2.ListKeys',
index=70,
containing_service=None,
input_type=_LISTKEYSREQUEST,
output_type=_MULTIKEYRESPONSE,
serialized_options=b'\202\323\344\223\002&\022$/v2/users/{user_app_id.user_id}/keys\230\234\'\005\220\234\'0',
),
_descriptor.MethodDescriptor(
name='ListAppKeys',
full_name='clarifai.api.V2.ListAppKeys',
index=71,
containing_service=None,
input_type=_LISTAPPKEYSREQUEST,
output_type=_MULTIKEYRESPONSE,
serialized_options=b'\202\323\344\223\002@\022>/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/keys\230\234\'\005\220\234\'0',
),
_descriptor.MethodDescriptor(
name='DeleteKey',
full_name='clarifai.api.V2.DeleteKey',
index=72,
containing_service=None,
input_type=_DELETEKEYREQUEST,
output_type=proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._BASERESPONSE,
serialized_options=b'\202\323\344\223\002/*-/v2/users/{user_app_id.user_id}/keys/{key_id}\230\234\'\005\220\234\'/\220\234\'1\220\234\'0',
),
_descriptor.MethodDescriptor(
name='PostKeys',
full_name='clarifai.api.V2.PostKeys',
index=73,
containing_service=None,
input_type=_POSTKEYSREQUEST,
output_type=_MULTIKEYRESPONSE,
serialized_options=b'\202\323\344\223\002)\"$/v2/users/{user_app_id.user_id}/keys:\001*\230\234\'\005\220\234\'-\220\234\'/\220\234\'0',
),
_descriptor.MethodDescriptor(
name='PatchKeys',
full_name='clarifai.api.V2.PatchKeys',
index=74,
containing_service=None,
input_type=_PATCHKEYSREQUEST,
output_type=_MULTIKEYRESPONSE,
serialized_options=b'\202\323\344\223\002)2$/v2/users/{user_app_id.user_id}/keys:\001*\230\234\'\005\220\234\'/\220\234\'0',
),
_descriptor.MethodDescriptor(
name='MyScopes',
full_name='clarifai.api.V2.MyScopes',
index=75,
containing_service=None,
input_type=_MYSCOPESREQUEST,
output_type=_MULTISCOPERESPONSE,
serialized_options=b'\202\323\344\223\002e\022B/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/myscopesZ\016\022\014/v2/myscopesZ\017\022\r/v2/my_scopes\230\234\'\002',
),
_descriptor.MethodDescriptor(
name='ListScopes',
full_name='clarifai.api.V2.ListScopes',
index=76,
containing_service=None,
input_type=_LISTSCOPESREQUEST,
output_type=_MULTISCOPEDEPSRESPONSE,
serialized_options=b'\202\323\344\223\002(\022&/v2/users/{user_app_id.user_id}/scopes\230\234\'\003',
),
_descriptor.MethodDescriptor(
name='GetApp',
full_name='clarifai.api.V2.GetApp',
index=77,
containing_service=None,
input_type=_GETAPPREQUEST,
output_type=_SINGLEAPPRESPONSE,
serialized_options=b'\202\323\344\223\002;\0229/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}\230\234\'\005\220\234\'-\220\234\'\023',
),
_descriptor.MethodDescriptor(
name='ListApps',
full_name='clarifai.api.V2.ListApps',
index=78,
containing_service=None,
input_type=_LISTAPPSREQUEST,
output_type=_MULTIAPPRESPONSE,
serialized_options=b'\202\323\344\223\0022\022$/v2/users/{user_app_id.user_id}/appsZ\n\022\010/v2/apps\230\234\'\005\220\234\'-\220\234\'\023',
),
_descriptor.MethodDescriptor(
name='DeleteApp',
full_name='clarifai.api.V2.DeleteApp',
index=79,
containing_service=None,
input_type=_DELETEAPPREQUEST,
output_type=proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._BASERESPONSE,
serialized_options=b'\202\323\344\223\002;*9/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}\230\234\'\005\220\234\',\220\234\'.\220\234\'-\220\234\'\023',
),
_descriptor.MethodDescriptor(
name='PostApps',
full_name='clarifai.api.V2.PostApps',
index=80,
containing_service=None,
input_type=_POSTAPPSREQUEST,
output_type=_MULTIAPPRESPONSE,
serialized_options=b'\202\323\344\223\002)\"$/v2/users/{user_app_id.user_id}/apps:\001*\230\234\'\005\220\234\',\220\234\'-\220\234\'\017\220\234\'\023\220\234\'\022',
),
_descriptor.MethodDescriptor(
name='PatchApps',
full_name='clarifai.api.V2.PatchApps',
index=81,
containing_service=None,
input_type=_PATCHAPPSREQUEST,
output_type=_MULTIAPPRESPONSE,
serialized_options=b'\202\323\344\223\002)2$/v2/users/{user_app_id.user_id}/apps:\001*\230\234\'\005\220\234\',\220\234\'-',
),
_descriptor.MethodDescriptor(
name='PostAppsSearches',
full_name='clarifai.api.V2.PostAppsSearches',
index=82,
containing_service=None,
input_type=_POSTAPPSSEARCHESREQUEST,
output_type=_MULTIAPPRESPONSE,
serialized_options=b'\202\323\344\223\0022\"-/v2/users/{user_app_id.user_id}/apps/searches:\001*\230\234\'\005\220\234\'-\220\234\'\023',
),
_descriptor.MethodDescriptor(
name='PostValidatePassword',
full_name='clarifai.api.V2.PostValidatePassword',
index=83,
containing_service=None,
input_type=_POSTVALIDATEPASSWORDREQUEST,
output_type=_SINGLEPASSWORDVALIDATIONRESPONSE,
serialized_options=b'\202\323\344\223\002R\"1/v2/users/{user_app_id.user_id}/validate_password:\001*Z\032\"\025/v2/validate_password:\001*\230\234\'\005\220\234\':',
),
_descriptor.MethodDescriptor(
name='GetSearch',
full_name='clarifai.api.V2.GetSearch',
index=84,
containing_service=None,
input_type=_GETSEARCHREQUEST,
output_type=_SINGLESEARCHRESPONSE,
serialized_options=b'\202\323\344\223\002^\022G/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/searches/{id}Z\023\022\021/v2/searches/{id}\230\234\'\002\220\234\'\003',
),
_descriptor.MethodDescriptor(
name='ListSearches',
full_name='clarifai.api.V2.ListSearches',
index=85,
containing_service=None,
input_type=_LISTSEARCHESREQUEST,
output_type=_MULTISEARCHRESPONSE,
serialized_options=b'\202\323\344\223\002T\022B/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/searchesZ\016\022\014/v2/searches\230\234\'\002\220\234\'\003',
),
_descriptor.MethodDescriptor(
name='PostSearches',
full_name='clarifai.api.V2.PostSearches',
index=86,
containing_service=None,
input_type=_POSTSEARCHESREQUEST,
output_type=_MULTISEARCHRESPONSE,
serialized_options=b'\202\323\344\223\002Z\"B/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/searches:\001*Z\021\"\014/v2/searches:\001*\230\234\'\002\220\234\'&\220\234\'\013\220\234\'\005\220\234\'\017\220\234\'\003\220\234\'\023',
),
_descriptor.MethodDescriptor(
name='PostSearchesByID',
full_name='clarifai.api.V2.PostSearchesByID',
index=87,
containing_service=None,
input_type=_POSTSEARCHESBYIDREQUEST,
output_type=_MULTISEARCHRESPONSE,
serialized_options=b'\202\323\344\223\002d\"G/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/searches/{id}:\001*Z\026\"\021/v2/searches/{id}:\001*\230\234\'\002\220\234\'\003',
),
_descriptor.MethodDescriptor(
name='PostAnnotationSearchMetrics',
full_name='clarifai.api.V2.PostAnnotationSearchMetrics',
index=88,
containing_service=None,
input_type=_POSTANNOTATIONSEARCHMETRICSREQUEST,
output_type=_MULTIANNOTATIONSEARCHMETRICSRESPONSE,
serialized_options=b'\202\323\344\223\002\202\001\"V/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/annotations/searches/metrics:\001*Z%\" /v2/annotations/searches/metrics:\001*\230\234\'\002\220\234\'&\220\234\'\013\220\234\'\005\220\234\'5\220\234\'6\220\234\'\017\220\234\'\023',
),
_descriptor.MethodDescriptor(
name='GetAnnotationSearchMetrics',
full_name='clarifai.api.V2.GetAnnotationSearchMetrics',
index=89,
containing_service=None,
input_type=_GETANNOTATIONSEARCHMETRICSREQUEST,
output_type=_MULTIANNOTATIONSEARCHMETRICSRESPONSE,
serialized_options=b'\202\323\344\223\002\206\001\022[/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/annotations/searches/metrics/{id}Z\'\022%/v2/annotations/searches/metrics/{id}\230\234\'\002\220\234\'&\220\234\'\013\220\234\'\005\220\234\'5',
),
_descriptor.MethodDescriptor(
name='ListAnnotationSearchMetrics',
full_name='clarifai.api.V2.ListAnnotationSearchMetrics',
index=90,
containing_service=None,
input_type=_LISTANNOTATIONSEARCHMETRICSREQUEST,
output_type=_MULTIANNOTATIONSEARCHMETRICSRESPONSE,
serialized_options=b'\202\323\344\223\002|\022V/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/annotations/searches/metricsZ\"\022 /v2/annotations/searches/metrics\230\234\'\002\220\234\'&\220\234\'\013\220\234\'\005\220\234\'5',
),
_descriptor.MethodDescriptor(
name='DeleteAnnotationSearchMetrics',
full_name='clarifai.api.V2.DeleteAnnotationSearchMetrics',
index=91,
containing_service=None,
input_type=_DELETEANNOTATIONSEARCHMETRICSREQUEST,
output_type=proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._BASERESPONSE,
serialized_options=b'\202\323\344\223\002\206\001*[/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/annotations/searches/metrics/{id}Z\'*%/v2/annotations/searches/metrics/{id}\230\234\'\002\220\234\'5\220\234\'6\220\234\'?',
),
_descriptor.MethodDescriptor(
name='DeleteSearch',
full_name='clarifai.api.V2.DeleteSearch',
index=92,
containing_service=None,
input_type=_DELETESEARCHREQUEST,
output_type=proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._BASERESPONSE,
serialized_options=b'\202\323\344\223\002^*G/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/searches/{id}Z\023*\021/v2/searches/{id}\230\234\'\002\220\234\'\003',
),
_descriptor.MethodDescriptor(
name='ListStatusCodes',
full_name='clarifai.api.V2.ListStatusCodes',
index=93,
containing_service=None,
input_type=_LISTSTATUSCODESREQUEST,
output_type=_MULTISTATUSCODERESPONSE,
serialized_options=b'\202\323\344\223\002\022\022\020/v2/status_codes\230\234\'\001',
),
_descriptor.MethodDescriptor(
name='GetStatusCode',
full_name='clarifai.api.V2.GetStatusCode',
index=94,
containing_service=None,
input_type=_GETSTATUSCODEREQUEST,
output_type=_SINGLESTATUSCODERESPONSE,
serialized_options=b'\202\323\344\223\002#\022!/v2/status_codes/{status_code_id}\230\234\'\001',
),
_descriptor.MethodDescriptor(
name='ListCollaborators',
full_name='clarifai.api.V2.ListCollaborators',
index=95,
containing_service=None,
input_type=_LISTCOLLABORATORSREQUEST,
output_type=_MULTICOLLABORATORSRESPONSE,
serialized_options=b'\202\323\344\223\002I\022G/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/collaborators\230\234\'\002\220\234\'2',
),
_descriptor.MethodDescriptor(
name='PostCollaborators',
full_name='clarifai.api.V2.PostCollaborators',
index=96,
containing_service=None,
input_type=_POSTCOLLABORATORSREQUEST,
output_type=_MULTICOLLABORATORSRESPONSE,
serialized_options=b'\202\323\344\223\002L\"G/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/collaborators:\001*\230\234\'\002\220\234\'3\220\234\'2',
),
_descriptor.MethodDescriptor(
name='PatchCollaborators',
full_name='clarifai.api.V2.PatchCollaborators',
index=97,
containing_service=None,
input_type=_PATCHCOLLABORATORSREQUEST,
output_type=_MULTICOLLABORATORSRESPONSE,
serialized_options=b'\202\323\344\223\002L2G/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/collaborators:\001*\230\234\'\002\220\234\'3\220\234\'2\220\234\'4',
),
_descriptor.MethodDescriptor(
name='DeleteCollaborators',
full_name='clarifai.api.V2.DeleteCollaborators',
index=98,
containing_service=None,
input_type=_DELETECOLLABORATORSREQUEST,
output_type=proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._BASERESPONSE,
serialized_options=b'\202\323\344\223\002L*G/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/collaborators:\001*\230\234\'\002\220\234\'3\220\234\'2\220\234\'4\220\234\'7\220\234\'8',
),
_descriptor.MethodDescriptor(
name='ListCollaborations',
full_name='clarifai.api.V2.ListCollaborations',
index=99,
containing_service=None,
input_type=_LISTCOLLABORATIONSREQUEST,
output_type=_MULTICOLLABORATIONSRESPONSE,
serialized_options=b'\202\323\344\223\0020\022./v2/users/{user_app_id.user_id}/collaborations\230\234\'\003',
),
_descriptor.MethodDescriptor(
name='PostAppDuplications',
full_name='clarifai.api.V2.PostAppDuplications',
index=100,
containing_service=None,
input_type=_POSTAPPDUPLICATIONSREQUEST,
output_type=_MULTIAPPDUPLICATIONSRESPONSE,
serialized_options=b'\202\323\344\223\002K\"F/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/duplications:\001*\230\234\'\002\220\234\'\016\220\234\'\017\220\234\'\032\220\234\'\n\220\234\'\013\220\234\'\004\220\234\'\005\220\234\'%\220\234\'&\220\234\'\022\220\234\'\023',
),
_descriptor.MethodDescriptor(
name='ListAppDuplications',
full_name='clarifai.api.V2.ListAppDuplications',
index=101,
containing_service=None,
input_type=_LISTAPPDUPLICATIONSREQUEST,
output_type=_MULTIAPPDUPLICATIONSRESPONSE,
serialized_options=b'\202\323\344\223\002.\022,/v2/users/{user_app_id.user_id}/duplications\230\234\'\003',
),
_descriptor.MethodDescriptor(
name='GetAppDuplication',
full_name='clarifai.api.V2.GetAppDuplication',
index=102,
containing_service=None,
input_type=_GETAPPDUPLICATIONREQUEST,
output_type=_SINGLEAPPDUPLICATIONRESPONSE,
serialized_options=b'\202\323\344\223\002C\022A/v2/users/{user_app_id.user_id}/duplications/{app_duplication_id}\230\234\'\003',
),
_descriptor.MethodDescriptor(
name='PostTasks',
full_name='clarifai.api.V2.PostTasks',
index=103,
containing_service=None,
input_type=_POSTTASKSREQUEST,
output_type=_MULTITASKRESPONSE,
serialized_options=b'\202\323\344\223\002T\"?/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/tasks:\001*Z\016\"\t/v2/tasks:\001*\230\234\'\002\220\234\'7\220\234\'8\220\234\'\005\220\234\'\003\220\234\'\013\220\234\'\023\220\234\'\017\220\234\'%\220\234\'&',
),
_descriptor.MethodDescriptor(
name='GetTaskAnnotationCount',
full_name='clarifai.api.V2.GetTaskAnnotationCount',
index=104,
containing_service=None,
input_type=_GETTASKCOUNTREQUEST,
output_type=_SINGLETASKCOUNTRESPONSE,
serialized_options=b'\202\323\344\223\002\206\001\022[/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/tasks/{task_id}/annotations/countZ\'\022%/v2/tasks/{task_id}/annotations/count\230\234\'\002\220\234\'8',
),
_descriptor.MethodDescriptor(
name='GetTaskInputCount',
full_name='clarifai.api.V2.GetTaskInputCount',
index=105,
containing_service=None,
input_type=_GETTASKCOUNTREQUEST,
output_type=_SINGLETASKCOUNTRESPONSE,
serialized_options=b'\202\323\344\223\002|\022V/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/tasks/{task_id}/inputs/countZ\"\022 /v2/tasks/{task_id}/inputs/count\230\234\'\002\220\234\'8',
),
_descriptor.MethodDescriptor(
name='GetTask',
full_name='clarifai.api.V2.GetTask',
index=106,
containing_service=None,
input_type=_GETTASKREQUEST,
output_type=_SINGLETASKRESPONSE,
serialized_options=b'\202\323\344\223\002b\022I/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/tasks/{task_id}Z\025\022\023/v2/tasks/{task_id}\230\234\'\002\220\234\'8\220\234\'\003\220\234\'\013\220\234\'\023\220\234\'\017',
),
_descriptor.MethodDescriptor(
name='ListTasks',
full_name='clarifai.api.V2.ListTasks',
index=107,
containing_service=None,
input_type=_LISTTASKSREQUEST,
output_type=_MULTITASKRESPONSE,
serialized_options=b'\202\323\344\223\002N\022?/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/tasksZ\013\022\t/v2/tasks\230\234\'\002\220\234\'8\220\234\'\003\220\234\'\013\220\234\'\023\220\234\'\017',
),
_descriptor.MethodDescriptor(
name='PatchTasks',
full_name='clarifai.api.V2.PatchTasks',
index=108,
containing_service=None,
input_type=_PATCHTASKSREQUEST,
output_type=_MULTITASKRESPONSE,
serialized_options=b'\202\323\344\223\002T2?/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/tasks:\001*Z\0162\t/v2/tasks:\001*\230\234\'\002\220\234\'7\220\234\'8\220\234\'\005\220\234\'\003\220\234\'\013\220\234\'\023\220\234\'\017\220\234\'%\220\234\'&',
),
_descriptor.MethodDescriptor(
name='DeleteTasks',
full_name='clarifai.api.V2.DeleteTasks',
index=109,
containing_service=None,
input_type=_DELETETASKSREQUEST,
output_type=proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._BASERESPONSE,
serialized_options=b'\202\323\344\223\002T*?/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/tasks:\001*Z\016*\t/v2/tasks:\001*\230\234\'\002\220\234\'7\220\234\'8\220\234\'F',
),
_descriptor.MethodDescriptor(
name='PostCollectors',
full_name='clarifai.api.V2.PostCollectors',
index=110,
containing_service=None,
input_type=_POSTCOLLECTORSREQUEST,
output_type=_MULTICOLLECTORRESPONSE,
serialized_options=b'\202\323\344\223\002^\"D/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/collectors:\001*Z\023\"\016/v2/collectors:\001*\230\234\'\002\220\234\'\004\220\234\'%\220\234\'&\220\234\')\220\234\'\013\220\234\'\004\220\234\'\005\220\234\'\017\220\234\'\002\220\234\'\023',
),
_descriptor.MethodDescriptor(
name='GetCollector',
full_name='clarifai.api.V2.GetCollector',
index=111,
containing_service=None,
input_type=_GETCOLLECTORREQUEST,
output_type=_SINGLECOLLECTORRESPONSE,
serialized_options=b'\202\323\344\223\002v\022S/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/collectors/{collector_id}Z\037\022\035/v2/collectors/{collector_id}\230\234\'\002\220\234\'*',
),
_descriptor.MethodDescriptor(
name='ListCollectors',
full_name='clarifai.api.V2.ListCollectors',
index=112,
containing_service=None,
input_type=_LISTCOLLECTORSREQUEST,
output_type=_MULTICOLLECTORRESPONSE,
serialized_options=b'\202\323\344\223\002X\022D/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/collectorsZ\020\022\016/v2/collectors\230\234\'\002\220\234\'*',
),
_descriptor.MethodDescriptor(
name='PatchCollectors',
full_name='clarifai.api.V2.PatchCollectors',
index=113,
containing_service=None,
input_type=_PATCHCOLLECTORSREQUEST,
output_type=_MULTICOLLECTORRESPONSE,
serialized_options=b'\202\323\344\223\002^2D/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/collectors:\001*Z\0232\016/v2/collectors:\001*\230\234\'\002\220\234\')\220\234\'*',
),
_descriptor.MethodDescriptor(
name='DeleteCollectors',
full_name='clarifai.api.V2.DeleteCollectors',
index=114,
containing_service=None,
input_type=_DELETECOLLECTORSREQUEST,
output_type=proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._BASERESPONSE,
serialized_options=b'\202\323\344\223\002^*D/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/collectors:\001*Z\023*\016/v2/collectors:\001*\230\234\'\002\220\234\')\220\234\'+\220\234\'*',
),
_descriptor.MethodDescriptor(
name='PostStatValues',
full_name='clarifai.api.V2.PostStatValues',
index=115,
containing_service=None,
input_type=_POSTSTATVALUESREQUEST,
output_type=_MULTISTATVALUERESPONSE,
serialized_options=b'\202\323\344\223\002b\"F/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/stats/values:\001*Z\025\"\020/v2/stats/values:\001*\230\234\'\002',
),
_descriptor.MethodDescriptor(
name='PostStatValuesAggregate',
full_name='clarifai.api.V2.PostStatValuesAggregate',
index=116,
containing_service=None,
input_type=_POSTSTATVALUESAGGREGATEREQUEST,
output_type=_MULTISTATVALUEAGGREGATERESPONSE,
serialized_options=b'\202\323\344\223\002v\"P/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/stats/values/aggregate:\001*Z\037\"\032/v2/stats/values/aggregate:\001*\230\234\'\002',
),
_descriptor.MethodDescriptor(
name='PostTrendingMetricsView',
full_name='clarifai.api.V2.PostTrendingMetricsView',
index=117,
containing_service=None,
input_type=_POSTTRENDINGMETRICSVIEWREQUEST,
output_type=proto_dot_clarifai_dot_api_dot_status_dot_status__pb2._BASERESPONSE,
serialized_options=b'\202\323\344\223\002m\"h/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/trending_metrics/views/{view_type}/{object_id}:\001*\230\234\'\002',
),
_descriptor.MethodDescriptor(
name='ListTrendingMetricsViews',
full_name='clarifai.api.V2.ListTrendingMetricsViews',
index=118,
containing_service=None,
input_type=_LISTTRENDINGMETRICSVIEWSREQUEST,
output_type=_MULTITRENDINGMETRICSVIEWRESPONSE,
serialized_options=b'\202\323\344\223\002\210\001\022\\/v2/users/{user_app_id.user_id}/apps/{user_app_id.app_id}/trending_metrics/views/{view_type}Z(\022&/v2/trending_metrics/views/{view_type}\230\234\'\002',
),
])
_sym_db.RegisterServiceDescriptor(_V2)
DESCRIPTOR.services_by_name['V2'] = _V2
# @@protoc_insertion_point(module_scope)
| 49.157216 | 75,068 | 0.771696 |
ace1ef26ab8c700ffb8d43fa62ed6c941598aefd | 11,648 | py | Python | code/make_snp_file.py | kchennen/TRAPD | c86d70f1fbe1cfa3f410a7d4ef93f5e3c7dea7a6 | [
"MIT"
] | 1 | 2021-02-08T06:40:15.000Z | 2021-02-08T06:40:15.000Z | code/make_snp_file.py | kchennen/TRAPD | c86d70f1fbe1cfa3f410a7d4ef93f5e3c7dea7a6 | [
"MIT"
] | null | null | null | code/make_snp_file.py | kchennen/TRAPD | c86d70f1fbe1cfa3f410a7d4ef93f5e3c7dea7a6 | [
"MIT"
] | null | null | null | #!/usr/bin/python
import optparse
import operator
import re
import sys
import gzip
import pybedtools
from pybedtools import BedTool
#Parse options
parser = optparse.OptionParser()
parser.add_option("-v", "--vcffile", action="store",dest="vcffilename")
parser.add_option("-o", "--outfile", action="store",dest="outfilename", default="snpfile.txt")
parser.add_option("--genecolname", action="store", dest="genecolname")
#Filters
parser.add_option("--includeinfo", action="append",dest="includeinfo")
parser.add_option("--excludeinfo", action="append",dest="excludeinfo")
parser.add_option("--includevep", action="append",dest="includevep")
parser.add_option("--excludevep", action="append",dest="excludevep")
parser.add_option("--pass", action="store_true", dest="passfilter")
parser.add_option("--vep", action="store_true", dest="vep")
parser.add_option("--snponly", action="store_true", dest="snponly")
parser.add_option("--indelonly", action="store_true", dest="indelonly")
parser.add_option("--bedfile", action="store", dest="bedfilename")
parser.add_option("--snpformat", action="store",dest="snpformat", default="VCFID")
parser.add_option("--genenull", action="store", dest="genenull", default=".,NA")
options, args = parser.parse_args()
#Try to catch potential errors
if not options.vcffilename: # if filename is not given
parser.error('A vcf file is needed')
sys.exit()
if options.vcffilename.endswith(".gz") is False: # if vcf filename is not given
parser.error('Is your vcf file gzipped?')
sys.exit()
if not options.genecolname:
parser.error('An INFO field with the gene names to use must be provided')
sys.exit()
if (options.includevep is not None) or (options.excludevep is not None):
if not options.vep:
parser.error('--vep option must be supplied if using VEP annotations')
sys.exit()
if options.snpformat!="VCFID" and options.snpformat!="CHRPOSREFALT": # if filename is not given
parser.error('SNP format must be "VCFID" or "CHRPOSREFALT"')
sys.exit()
if options.snponly and options.indelonly:
parser.error('Please select only --snponly or --indelonly')
sys.exit()
#Check to make sure all the filters seem well formed
def checkfilter(infofilter):
if ("[" not in infofilter) or (infofilter.startswith("]")) or (infofilter.endswith("]")) or str(infofilter.split("[")[1].split("]")[0]) not in ["<", ">", "<=", ">=", "=", "!=", "in", "%"]:
return 0
else:
return 1
#Read in vcf header and extract all INFO fields
info_fields=[]
chrformat="number"
vcffile=gzip.open(options.vcffilename, "rb")
for line_vcf1 in vcffile:
if line_vcf1[0]=="#":
if "##INFO=<ID=" in line_vcf1:
temp_field=line_vcf1.split("##INFO=<ID=")[1].split(",")[0]
info_fields.append(temp_field)
elif "##contig" in line_vcf1:
if "ID=chr" in line_vcf1:
chrformat="chr"
else:
break
vcffile.close()
#Read in vcf header to get VEP CSQ fields
if options.vep:
vcffile=gzip.open(options.vcffilename, "rb")
csq_found=0
for line_vcf1 in vcffile:
if line_vcf1[0]=="#" and ("ID=CSQ" in line_vcf1):
csq_anno=line_vcf1.rstrip('\n').replace('"', '').strip('>').split("Format: ")[1].split("|")
csq_found=1
break
if csq_found==0:
sys.stdout.write("VEP CSQ annotations not found in vcf header\n")
sys.exit()
vcffile.close()
if options.vep:
if options.genecolname not in csq_anno:
sys.stdout.write("Gene column name not found in VEP annotations\n")
sys.exit()
#Run through all filters to make sure they're okay
if options.includeinfo is not None:
for i in range(0, len(options.includeinfo), 1):
if checkfilter(options.includeinfo[i])==0:
sys.stdout.write(str(options.includeinfo[i])+" is malformed\n")
sys.exit()
if options.includeinfo[i].split("[")[0] not in info_fields:
sys.stdout.write(str(options.includeinfo[i])+" is not in VCF file\n")
sys.exit()
if options.excludeinfo is not None:
for i in range(0, len(options.excludeinfo), 1):
if checkfilter(options.excludeinfo[i])==0:
sys.stdout.write(str(options.excludeinfo[i])+" is malformed\n")
sys.exit()
if options.excludeinfo[i].split("[")[0] not in info_fields:
sys.stdout.write(str(options.excludeinfo[i])+" is not in VCF file\n")
sys.exit()
if options.includevep is not None:
for i in range(0, len(options.includevep), 1):
if checkfilter(options.includevep[i])==0:
sys.stdout.write(str(options.includevep[i])+" is malformed\n")
sys.exit()
if options.includevep[i].split("[")[0] not in csq_anno:
sys.stdout.write(str(options.includeinfo[i])+" is not in VCF file\n")
sys.exit()
if options.excludevep is not None:
for i in range(0, len(options.excludevep), 1):
if checkfilter(options.excludevep[i])==0:
sys.stdout.write(str(options.excludevep[i])+" is malformed\n")
sys.exit()
if options.excludevep[i].split("[")[0] not in csq_anno:
sys.stdout.write(str(options.excludeinfo[i])+" is not in VCF file\n")
sys.exit()
#Test if something is a number
def is_number(s):
try:
float(s)
return True
except ValueError:
return False
def test_include_info(filter, vcfline):
option_field=filter.split("[")[0]
option_value=filter.split("]")[1]
if option_field in vcfline:
field_value=(";"+vcfline).split((";"+option_field+"="))[1].split(";")[0].split(",")[0]
if filter.split("[")[1].split("]")[0]=="in":
listvalues=option_value.lstrip("(").rstrip(")").split(',')
counter=0
for i in range(0, len(listvalues), 1):
if operator.eq(field_value, listvalues[i]):
counter+=1
if counter>0:
return 1
else:
return 0
else:
if get_operator_fn(filter.split("[")[1].split("]")[0])(field_value, option_value):
return 1
else:
return 0
else:
return 1
def test_exclude_info(filter, vcfline):
option_field=filter.split("[")[0]
option_value=filter.split("]")[1]
if option_field in vcfline:
field_value=(";"+vcfline).split((";"+option_field+"="))[1].split(";")[0].split(",")[0]
if filter.split("[")[1].split("]")[0]=="in":
listvalues=option_value.lstrip("(").rstrip(")").split(',')
counter=0
for i in range(0, len(listvalues), 1):
if operator.eq(field_value, listvalues[i]):
counter+=1
if counter>0:
return 0
else:
return 1
else:
if get_operator_fn(filter.split("[")[1].split("]")[0])(field_value, option_value):
return 0
else:
return 1
else:
return 1
def test_include_vep(filter, vcfline, csq_anno):
option_field=filter.split("[")[0]
csq_index=csq_anno.index(option_field)
option_value=filter.split("]")[1]
if "CSQ=" in vcfline and (csq_index-1)<=len((";"+vcfline).split((";CSQ="))[1].split(";")[0].split("|")):
field_value=(";"+vcfline).split((";CSQ="))[1].split(";")[0].split("|")[csq_index]
if filter.split("[")[1].split("]")[0]=="in":
listvalues=option_value.lstrip("(").rstrip(")").split(',')
counter=0
for i in range(0, len(listvalues), 1):
if operator.eq(field_value, listvalues[i]):
counter+=1
if counter>0:
return 1
else:
return 0
else:
if get_operator_fn(filter.split("[")[1].split("]")[0])(field_value, option_value):
return 1
else:
return 0
else:
return 1
def test_exclude_vep(filter, vcfline, csq_anno):
option_field=filter.split("[")[0]
csq_index=csq_anno.index(option_field)
option_value=filter.split("]")[1]
if "CSQ=" in vcfline and (csq_index-1)<=len((";"+vcfline).split((";CSQ="))[1].split(";")[0].split("|")):
field_value=(";"+vcfline).split((";CSQ="))[1].split(";")[0].split("|")[csq_index]
if filter.split("[")[1].split("]")[0]=="in":
listvalues=option_value.lstrip("(").rstrip(")").split(',')
counter=0
for i in range(0, len(listvalues), 1):
if operator.eq(field_value, listvalues[i]):
counter+=1
if counter>0:
return 0
else:
return 1
else:
if get_operator_fn(filter.split("[")[1].split("]")[0])(field_value, option_value):
return 0
else:
return 1
else:
return 1
def find_vep_gene(genecolname, vcfline, csq_anno):
csq_index=csq_anno.index(genecolname)
if "CSQ" in vcfline:
genename=(";"+vcfline).split(";CSQ=")[1].split(";")[0].split(",")[0].split("|")[csq_index]
else:
genename=""
return genename
def find_info_gene(genecolname, vcfline):
if genecolname in vcfline:
genename=(";"+vcfline).split(";"+genecolname+"=")[1].split(";")[0]
return genename
#Function to match operator strings
def get_operator_fn(op):
return {
'<' : operator.lt,
'<=' : operator.le,
'>' : operator.gt,
'>=' : operator.gt,
'=' : operator.eq,
'!=' : operator.ne,
'%' : operator.contains,
}[op]
#Create empty snptable
snptable={}
#Open vcf file
vcffile=BedTool(options.vcffilename)
if options.bedfilename is not None:
bed=BedTool(options.bedfilename)
vcffile_temp=vcffile.intersect(bed)
else:
if chrformat=="chr":
dummy_bed=BedTool('chr1000 100000000 100000001', from_string=True)
else:
dummy_bed=BedTool('1000 100000000 100000001', from_string=True)
vcffile_temp=vcffile.subtract(dummy_bed)
for line_vcf1 in open(vcffile_temp.fn):
line_vcf=line_vcf1.rstrip().split('\t')
keep=1
if line_vcf[0][0]!="#":
if keep==1 and options.passfilter:
if line_vcf[6]!="PASS":
keep=0
if keep==1 and options.snponly:
if len(line_vcf[3])>1 or len(line_vcf[4])>1:
keep==0
if keep==1 and options.indelonly:
if len(line_vcf[3])==1 and len(line_vcf[4])==1:
keep==0
#Go through INFO field filters
if keep==1 and options.includeinfo is not None:
iter=0
while keep==1 and iter<len(options.includeinfo):
filter=options.includeinfo[iter]
keep=test_include_info(filter, line_vcf[7])
iter=iter+1
if keep==1 and options.excludeinfo is not None:
iter=0
while keep==1 and iter<len(options.excludeinfo):
filter=options.excludeinfo[iter]
keep=test_exclude_info(filter, line_vcf[7])
iter=iter+1
#Go through INFO/VEP field filters
if keep==1 and options.includevep is not None:
iter=0
while keep==1 and iter<len(options.includevep):
filter=options.includevep[iter]
keep=test_include_vep(filter, line_vcf[7], csq_anno)
iter=iter+1
if keep==1 and options.excludevep is not None:
iter=0
while keep==1 and iter<len(options.excludevep):
filter=options.excludevep[iter]
keep=test_exclude_vep(filter, line_vcf[7], csq_anno)
iter=iter+1
#If variant meets all filters, then extract gene name
if keep==1:
if options.vep:
gene=find_vep_gene(options.genecolname, line_vcf[7], csq_anno)
else:
gene=find_info_gene(options.genecolname, line_vcf[7])
if gene not in options.genenull.split(","):
if options.snpformat=="VCFID":
snpid=str(line_vcf[2])
else:
snpid=str(line_vcf[0].lstrip("chr"))+":"+str(line_vcf[1])+":"+str(line_vcf[3])+":"+str(line_vcf[4])
if gene not in snptable:
snptable[gene]=[gene, [snpid]]
else:
snptable[gene][1].append(snpid)
pybedtools.cleanup()
#Write Output
outfile=open(options.outfilename, "w")
outfile.write("#GENE\tSNPS\n")
for x in snptable:
if len(x)>0:
#Read through hash table and print out variants
snp_out=','.join(snptable[x][1])
outfile.write(str(x)+"\t"+snp_out+"\n")
outfile.close()
#python make_snp_file.py -o test.out.txt -v gnomad.test.vcf.gz --vep --genecolname SYMBOL --snpformat CHRPOSREFALT --pass --includeinfo "AC[<]5"
| 32.903955 | 189 | 0.655821 |
ace1efb294f815230c1ebc90ae5b138680d63e30 | 8,359 | py | Python | cdsdashboards/app.py | MarcSkovMadsen/cdsdashboards | 80e1d8a91e5a9cf119ce4b62f8bc58a54461a32b | [
"BSD-3-Clause"
] | null | null | null | cdsdashboards/app.py | MarcSkovMadsen/cdsdashboards | 80e1d8a91e5a9cf119ce4b62f8bc58a54461a32b | [
"BSD-3-Clause"
] | null | null | null | cdsdashboards/app.py | MarcSkovMadsen/cdsdashboards | 80e1d8a91e5a9cf119ce4b62f8bc58a54461a32b | [
"BSD-3-Clause"
] | null | null | null | """
Application for configuring and building the app environments.
"""
import os, re, sys
import logging
from concurrent.futures import ThreadPoolExecutor
from urllib.parse import urlparse
from traitlets import Unicode, Integer, Bool, Dict, validate, Any, default, observe, List, TraitError
from traitlets.config import Application, catch_config_error, SingletonConfigurable
from tornado.httpclient import AsyncHTTPClient
from tornado.httpserver import HTTPServer
import tornado.ioloop
import tornado.options
import tornado.log
from sqlalchemy.exc import OperationalError
from jinja2 import Environment, FileSystemLoader, PrefixLoader, ChoiceLoader
from jupyterhub.services.auth import HubOAuthCallbackHandler
from jupyterhub import __version__ as __jh_version__
from jupyterhub import dbutil
from jupyterhub.traitlets import EntryPointType
from .dashboard import DashboardRepr
from .util import url_path_join
from jupyterhub import orm as jhorm
from .builder.builders import BuildersDict, Builder
from ._data import DATA_FILES_PATH
from .pluggymanager import pm
from . import hookimpl
_TEMPLATE_PATH_OPEN = os.path.join(DATA_FILES_PATH, 'templates-open')
_TEMPLATE_PATH_RESTRICTED = os.path.join(DATA_FILES_PATH, 'templates-restricted')
_TEMPLATE_PATH_COMMON = os.path.join(DATA_FILES_PATH, 'templates-common')
CDS_TEMPLATE_PATHS = [_TEMPLATE_PATH_OPEN, _TEMPLATE_PATH_COMMON]
CDS_TEMPLATE_PATHS_RESTRICTED = [_TEMPLATE_PATH_RESTRICTED, _TEMPLATE_PATH_COMMON]
_all_allowed_presentation_types = ['voila', 'streamlit', 'plotlydash', 'bokeh', 'rshiny']
class CDSDashboardsConfig(SingletonConfigurable):
builder_class = EntryPointType(
default_value='cdsdashboards.builder.builders.Builder',
klass=Builder,
entry_point_group="cdsdashboards.builders",
help="""The class to use for building dashboard servers.
Should be a subclass of :class:`cdsdashboards.builder.builders.Builder`.
May be registered via entry points,
e.g. `c.cdsdashboards.builders = 'localprocess'`
""",
).tag(config=True)
server_name_template = Unicode(
'dash-{urlname}',
help="""
How to name the final user server that runs the dashboard. Template vars will be expanded:
{urlname} : dashboard URL-safe name
{date} : <current date in YYmmdd format>
{time} : <current date in HHMMSS format>
"""
).tag(config=True)
presentation_types = List(
trait=Unicode,
default_value=_all_allowed_presentation_types,
minlen=1,
help="""
Allowed presentation types for Dashboards. A list, allowed strings are: {}.
There must be at least one valid entry.
Default value is all the allowed presentation types.
Add any custom frameworks to the extra_presentation_types config if you want to augment instead of overwrite the default list.
""".format(_all_allowed_presentation_types)
).tag(config=True)
extra_presentation_types = List(
trait=Unicode,
default_value=[],
minlen=0,
help="""
Extra custom presentation types for Dashboards, to be added to the presentation_types list.
A list.
Default value is the empty list.
"""
).tag(config=True)
@property
def merged_presentation_types(self):
return self.presentation_types + self.extra_presentation_types
show_source_servers = Bool(
False,
help="""
Allow the user to select a source server when creating a Dashboard (currently only relevant for DockerSpawner).
"""
).tag(config=True)
require_source_server = Bool(
False,
help="""
Require the user to select a source server when creating a Dashboard (currently only relevant for DockerSpawner).
You must set show_source_servers to True if you set require_source_server to True.
"""
).tag(config=True)
show_source_git = Bool(
True,
help="""
Allow the user to enter a git repo to fetch files for a dashboard.
"""
).tag(config=True)
default_allow_all = Bool(
True,
help="""
If True (default) then newly-created Dashboards will be accessible to all authenticated JupyterHub users.
If False, only members of the dashboard's own group will be allowed to access it.
This flag is passed on to the Dashboard object's allow_all field so can be subsequently overridden in the database.
"""
).tag(config=True)
conda_envs = List(
trait=Unicode,
default_value=[],
minlen=0,
help="""
A list of Conda env names for the dashboard creator to select.
A list.
Default value is the empty list.
"""
).tag(config=True)
allow_custom_conda_env = Bool(
False,
help="""
If True then dashboard creators can type any value for the Conda env to use, even if none is provided in the conda_envs list of pre-defined env names.
If False (default), only Conda env names listed in the conda_envs setting can be selected - if any are present.
"""
).tag(config=True)
spawn_allow_group = Unicode(
'',
help="""
Name of a JupyterHub group whose users should be allowed to spawn servers and create dashboards.
See also spawn_block_group.
"""
).tag(config=True)
spawn_block_group = Unicode(
'',
help="""
Name of a JupyterHub group whose users should be blocked from spawning servers and creating dashboards.
If blank, spawn_allow_group will determine which users should be able to spawn.
If both settings are blank, all users will be allowed to spawn.
If both are non-blank, spawn_block_group will take priority.
"""
).tag(config=True)
include_auth_state = Bool(
False,
help="""
Whether to include `auth_state` as part of the user information returned by
dashboards-api/hub-info/user endpoint; default False.
Warning: `auth_state` can contained sensitive information about the user
(like credentials).
"""
).tag(config=True)
include_servers = Bool(
False,
help="""
Whether to include the user servers as part of the user information returned by
dashboards-api/hub-info/user endpoint; default False.
"""
).tag(config=True)
include_servers_state = Bool(
False,
help="""
Whether to include the user servers state as part of the user information returned by
dashboards-api/hub-info/user endpoint; default False.
"""
).tag(config=True)
spawn_default_options = Bool(
True,
help="""
This applies when the spawner may display options to the user before starting a server.
If True, a Dashboard will always be started with the default spawner options, so no form will be displayed
and the dashboard will start without waiting.
If False (and form options are available in the spawner) then the Dashboard will not start until the
dashboard creator has accessed the dashboard and provided options to the spawner.
"""
).tag(config=True)
class CDSConfigStore():
_instance = None
@classmethod
def get_instance(cls, config):
"""
Supply a config object to get the singleton CDSDashboardsConfig instance - only normally available from web handlers
"""
if cls._instance:
return cls._instance
cls._instance = CDSDashboardsConfig(config=config)
return cls._instance
class BuildersStore():
_instance = None
@classmethod
def get_instance(cls, config):
"""
Supply a config object to get the singleton instance - only normally available from web handlers
"""
if cls._instance:
return cls._instance
cdsconfig = CDSConfigStore.get_instance(config)
builder_class = cdsconfig.builder_class
def builder_factory(dashboard):
return builder_class(dashboard=dashboard, cdsconfig=cdsconfig)
cls._instance = BuildersDict(builder_factory)
return cls._instance
cds_tornado_settings = {}
| 34.118367 | 158 | 0.682737 |
ace1f062bffae14bcd27e58acd024687156f2e92 | 1,176 | py | Python | scripts/print_tx_info.py | Matthew-Jennings/anchorman | f5e26668e70acbf7eee037bf3414cfb6d8ace433 | [
"Apache-2.0"
] | 1 | 2022-02-20T13:36:48.000Z | 2022-02-20T13:36:48.000Z | scripts/print_tx_info.py | Matthew-Jennings/anchorpy | f5e26668e70acbf7eee037bf3414cfb6d8ace433 | [
"Apache-2.0"
] | null | null | null | scripts/print_tx_info.py | Matthew-Jennings/anchorpy | f5e26668e70acbf7eee037bf3414cfb6d8ace433 | [
"Apache-2.0"
] | null | null | null | import json
from terra_sdk.client import lcd
import anchorpy
if __name__ == "__main__":
TX_HASH_TESTS = {
"invalid type": (
"71B74D813CFC48585CF758D01C73F49CBAA5864B9FB43715C9D464A73A1D0B0B"
),
"invalid base64 msg": (
"44A2D644D1F92F3D373107014C0DD50CC357BB63CFD2CB8F4E14D5B6647EC476"
),
"invalid uint28 amount": (
"44A2D644D1F92F3D373107014C0DD50CC357BB63CFD2CB8F4E14D5B6647EC476"
),
"underflow": (
"47B9FD82FC76CBC9CACC346658629262813EC5D33724A4338F509168EF168056"
),
# "insufficient funds for fees": (
# "CDD975460DBE52D4153B81AFCB4EBDC914B6377796EEFC1BB252EA2B87257DEC"
# ),
"success": ("71B74D813CFC48585CF758D01C73F49CBAA5864B9FB43715C9D464A73A1D0B0B"),
}
CHAIN_ID = "tequila-0004"
LCD = lcd.LCDClient(
chain_id=CHAIN_ID, url=anchorpy.settings.PUBLIC_NODE_URLS[CHAIN_ID]
)
for k, tx_hash in TX_HASH_TESTS.items():
print(f"\n{k}")
print(
json.dumps(
LCD.tx.tx_info(tx_hash=tx_hash).to_data(), sort_keys=True, indent=2
)
)
| 30.153846 | 88 | 0.639456 |
ace1f0df18ff614579c4f8e82c967e71570aafcf | 454 | py | Python | diagrams/aws/enablement.py | bry-c/diagrams | 4c377a073e0aa8fe41934195da7a0869f31c58eb | [
"MIT"
] | 4 | 2020-11-16T05:56:59.000Z | 2022-02-27T15:57:12.000Z | diagrams/aws/enablement.py | bry-c/diagrams | 4c377a073e0aa8fe41934195da7a0869f31c58eb | [
"MIT"
] | 54 | 2020-12-14T08:37:05.000Z | 2022-01-14T14:51:59.000Z | diagrams/aws/enablement.py | bry-c/diagrams | 4c377a073e0aa8fe41934195da7a0869f31c58eb | [
"MIT"
] | 1 | 2020-09-11T02:10:16.000Z | 2020-09-11T02:10:16.000Z | # This module is automatically generated by autogen.sh. DO NOT EDIT.
from . import _AWS
class _Enablement(_AWS):
_type = "enablement"
_icon_dir = "resources/aws/enablement"
class Iq(_Enablement):
_icon = "iq.png"
class ManagedServices(_Enablement):
_icon = "managed-services.png"
class ProfessionalServices(_Enablement):
_icon = "professional-services.png"
class Support(_Enablement):
_icon = "support.png"
# Aliases
| 16.214286 | 68 | 0.715859 |
ace1f1e46847c5a51aaaff96f8748f3b0f1934fc | 800 | py | Python | footmark/rds/rdsobject.py | xzq-joy/footmark | 6fb749e35e94b7832f4c2b2bc93ac19c83e18cd4 | [
"Apache-2.0"
] | 18 | 2017-04-29T10:11:41.000Z | 2021-11-18T03:44:22.000Z | footmark/rds/rdsobject.py | xzq-joy/footmark | 6fb749e35e94b7832f4c2b2bc93ac19c83e18cd4 | [
"Apache-2.0"
] | 25 | 2017-06-01T09:35:57.000Z | 2021-06-24T08:09:26.000Z | footmark/rds/rdsobject.py | xzq-joy/footmark | 6fb749e35e94b7832f4c2b2bc93ac19c83e18cd4 | [
"Apache-2.0"
] | 47 | 2017-05-05T08:02:59.000Z | 2021-07-05T12:44:56.000Z | class RDSObject(object):
def __init__(self, connection=None):
self.connection = connection
if self.connection and hasattr(self.connection, 'region'):
self.region = connection.region
else:
self.region = None
class TaggedRDSObject(RDSObject):
"""
Any ECS resource that can be tagged should be represented
by a Python object that subclasses this class. This class
has the mechanism in place to handle the tagSet element in
the Describe* responses. If tags are found, it will create
a TagSet object and allow it to parse and collect the tags
into a dict that is stored in the "tags" attribute of the
object.
"""
def __init__(self, connection=None):
super(TaggedRDSObject, self).__init__(connection)
| 34.782609 | 66 | 0.68625 |
ace1f221baff4c898bcd4959b6a7b7d0b25dcb9a | 14,231 | py | Python | tensorlayer/layers/convolution/expert_deconv.py | Howdy-Personally/tensorlayer-master | bb92e4e187419d5e7ded8331d5c7cbf5615ee744 | [
"Apache-2.0"
] | 4,484 | 2017-12-27T03:28:35.000Z | 2021-12-02T14:42:58.000Z | tensorlayer/layers/convolution/expert_deconv.py | Howdy-Personally/tensorlayer-master | bb92e4e187419d5e7ded8331d5c7cbf5615ee744 | [
"Apache-2.0"
] | 549 | 2017-12-28T07:19:52.000Z | 2021-11-05T02:34:20.000Z | tensorlayer/layers/convolution/expert_deconv.py | Howdy-Personally/tensorlayer-master | bb92e4e187419d5e7ded8331d5c7cbf5615ee744 | [
"Apache-2.0"
] | 1,076 | 2017-12-27T12:25:46.000Z | 2021-11-24T09:12:36.000Z | #! /usr/bin/python
# -*- coding: utf-8 -*-
import tensorflow as tf
import tensorlayer as tl
from tensorlayer import logging
from tensorlayer.decorators import deprecated_alias
from tensorlayer.layers.core import Layer
# from tensorlayer.layers.core import LayersConfig
__all__ = [
'DeConv1dLayer',
'DeConv2dLayer',
'DeConv3dLayer',
]
class DeConv1dLayer(Layer):
"""A de-convolution 1D layer.
See `tf.nn.conv1d_transpose <https://tensorflow.google.cn/versions/r2.0/api_docs/python/tf/nn/conv1d_transpose>`__.
Parameters
----------
act : activation function or None
The activation function of this layer.
shape : tuple of int
Shape of the filters: (height, width, output_channels, in_channels).
The filter's ``in_channels`` dimension must match that of value.
outputs_shape : tuple of int
Output shape of the deconvolution,
strides : tuple of int
The sliding window strides for corresponding input dimensions.
padding : str
The padding algorithm type: "SAME" or "VALID".
data_format : str
"NWC" or "NCW", default is "NWC".
dilation_rate : int
Filter up-sampling/input down-sampling rate.
W_init : initializer
The initializer for the weight matrix.
b_init : initializer or None
The initializer for the bias vector. If None, skip biases.
name : None or str
A unique layer name.
Notes
-----
- shape = [w, the number of output channels of this layer, the number of output channel of the previous layer].
- outputs_shape = [batch_size, any, the number of output channels of this layer].
- the number of output channel of a layer is its last dimension.
Examples
--------
>>> input_layer = Input([8, 25, 32], name='input_layer')
>>> deconv1d = tl.layers.DeConv1dLayer(
... shape=(5, 64, 32), outputs_shape=(8, 50, 64), strides=(1, 2, 1), name='deconv1dlayer'
... )
>>> print(deconv1d)
>>> tensor = tl.layers.DeConv1dLayer(
... shape=(5, 64, 32), outputs_shape=(8, 50, 64), strides=(1, 2, 1), name='deconv1dlayer'
... )(input_layer)
>>> print(tensor)
>>> output shape : (8, 50, 64)
"""
def __init__(
self,
act=None,
shape=(3, 128, 256),
outputs_shape=(1, 256, 128),
strides=(1, 2, 1),
padding='SAME',
data_format='NWC',
dilation_rate=(1, 1, 1),
W_init=tl.initializers.truncated_normal(stddev=0.02),
b_init=tl.initializers.constant(value=0.0),
name=None # 'decnn1d_layer',
):
super().__init__(name, act=act)
self.shape = shape
self.outputs_shape = outputs_shape
self.strides = strides
self.padding = padding
self.data_format = data_format
self.dilation_rate = dilation_rate
self.W_init = W_init
self.b_init = b_init
self.in_channels = self.shape[-1]
self.build(None)
self._built = True
logging.info(
"DeConv1dLayer %s: shape: %s out_shape: %s strides: %s pad: %s act: %s" % (
self.name, str(shape), str(outputs_shape), str(strides), padding,
self.act.__name__ if self.act is not None else 'No Activation'
)
)
def __repr__(self):
actstr = self.act.__name__ if self.act is not None else 'No Activation'
s = (
'{classname}(in_channels={in_channels}, out_channels={n_filter}, kernel_size={filter_size}'
', strides={strides}, padding={padding}'
)
if self.dilation_rate != (1, ) * len(self.dilation_rate):
s += ', dilation={dilation_rate}'
if self.b_init is None:
s += ', bias=False'
s += (', ' + actstr)
if self.name is not None:
s += ', name=\'{name}\''
s += ')'
return s.format(
classname=self.__class__.__name__, n_filter=self.shape[-2], filter_size=self.shape[0], **self.__dict__
)
def build(self, inputs):
self.W = self._get_weights("filters", shape=self.shape, init=self.W_init)
if self.b_init:
self.b = self._get_weights("biases", shape=(self.shape[-2]), init=self.b_init)
def forward(self, inputs):
outputs = tf.nn.conv1d_transpose(
input=inputs,
filters=self.W,
output_shape=self.outputs_shape,
strides=list(self.strides),
padding=self.padding,
data_format=self.data_format,
dilations=list(self.dilation_rate),
name=self.name,
)
if self.b_init:
outputs = tf.nn.bias_add(outputs, self.b, data_format=self.data_format, name='bias_add')
if self.act:
outputs = self.act(outputs)
return outputs
class DeConv2dLayer(Layer):
"""A de-convolution 2D layer.
See `tf.nn.conv2d_transpose <https://tensorflow.google.cn/versions/r2.0/api_docs/python/tf/nn/conv2d_transpose>`__.
Parameters
----------
act : activation function or None
The activation function of this layer.
shape : tuple of int
Shape of the filters: (height, width, output_channels, in_channels).
The filter's ``in_channels`` dimension must match that of value.
outputs_shape : tuple of int
Output shape of the deconvolution,
strides : tuple of int
The sliding window strides for corresponding input dimensions.
padding : str
The padding algorithm type: "SAME" or "VALID".
data_format : str
"NHWC" or "NCHW", default is "NHWC".
dilation_rate : tuple of int
Filter up-sampling/input down-sampling rate.
W_init : initializer
The initializer for the weight matrix.
b_init : initializer or None
The initializer for the bias vector. If None, skip biases.
name : None or str
A unique layer name.
Notes
-----
- shape = [h, w, the number of output channels of this layer, the number of output channel of the previous layer].
- outputs_shape = [batch_size, any, any, the number of output channels of this layer].
- the number of output channel of a layer is its last dimension.
Examples
--------
With TensorLayer
TODO: Add the example code of a part of the generator in DCGAN example
U-Net
>>> ....
>>> conv10 = tl.layers.Conv2dLayer(
... act=tf.nn.relu,
... shape=(3, 3, 1024, 1024), strides=(1, 1, 1, 1), padding='SAME',
... W_init=w_init, b_init=b_init, name='conv10'
... )(conv9)
>>> print(conv10)
(batch_size, 32, 32, 1024)
>>> deconv1 = tl.layers.DeConv2dLayer(
... act=tf.nn.relu,
... shape=(3, 3, 512, 1024), strides=(1, 2, 2, 1), outputs_shape=(batch_size, 64, 64, 512),
... padding='SAME', W_init=w_init, b_init=b_init, name='devcon1_1'
... )(conv10)
"""
def __init__(
self,
act=None,
shape=(3, 3, 128, 256),
outputs_shape=(1, 256, 256, 128),
strides=(1, 2, 2, 1),
padding='SAME',
data_format='NHWC',
dilation_rate=(1, 1, 1, 1),
W_init=tl.initializers.truncated_normal(stddev=0.02),
b_init=tl.initializers.constant(value=0.0),
name=None # 'decnn2d_layer',
):
super().__init__(name, act=act)
self.shape = shape
self.outputs_shape = outputs_shape
self.strides = strides
self.padding = padding
self.data_format = data_format
self.dilation_rate = dilation_rate
self.W_init = W_init
self.b_init = b_init
self.in_channels = self.shape[-1]
self.build(None)
self._built = True
logging.info(
"DeConv2dLayer %s: shape: %s out_shape: %s strides: %s pad: %s act: %s" % (
self.name, str(shape), str(outputs_shape), str(strides), padding,
self.act.__name__ if self.act is not None else 'No Activation'
)
)
def __repr__(self):
actstr = self.act.__name__ if self.act is not None else 'No Activation'
s = (
'{classname}(in_channels={in_channels}, out_channels={n_filter}, kernel_size={filter_size}'
', strides={strides}, padding={padding}'
)
if self.dilation_rate != (1, ) * len(self.dilation_rate):
s += ', dilation={dilation_rate}'
if self.b_init is None:
s += ', bias=False'
s += (', ' + actstr)
if self.name is not None:
s += ', name=\'{name}\''
s += ')'
return s.format(
classname=self.__class__.__name__, n_filter=self.shape[-2], filter_size=(self.shape[0], self.shape[1]),
**self.__dict__
)
def build(self, inputs):
self.W = self._get_weights("filters", shape=self.shape, init=self.W_init)
if self.b_init:
self.b = self._get_weights("biases", shape=(self.shape[-2]), init=self.b_init)
def forward(self, inputs):
outputs = tf.nn.conv2d_transpose(
input=inputs,
filters=self.W,
output_shape=self.outputs_shape,
strides=self.strides,
padding=self.padding,
data_format=self.data_format,
dilations=list(self.dilation_rate),
name=self.name,
)
if self.b_init:
outputs = tf.nn.bias_add(outputs, self.b, data_format=self.data_format, name='bias_add')
if self.act:
outputs = self.act(outputs)
return outputs
class DeConv3dLayer(Layer):
"""A de-convolution 3D layer.
See `tf.nn.conv3d_transpose <https://tensorflow.google.cn/versions/r2.0/api_docs/python/tf/nn/conv3d_transpose>`__.
Parameters
----------
act : activation function or None
The activation function of this layer.
shape : tuple of int
The shape of the filters: (depth, height, width, output_channels, in_channels).
The filter's in_channels dimension must match that of value.
outputs_shape : tuple of int
The output shape of the deconvolution.
strides : tuple of int
The sliding window strides for corresponding input dimensions.
padding : str
The padding algorithm type: "SAME" or "VALID".
data_format : str
"NDHWC" or "NCDHW", default is "NDHWC".
dilation_rate : tuple of int
Filter up-sampling/input down-sampling rate.
W_init : initializer
The initializer for the weight matrix.
b_init : initializer or None
The initializer for the bias vector. If None, skip biases.
name : None or str
A unique layer name.
Notes
-----
- shape = [d, h, w, the number of output channels of this layer, the number of output channel of the previous layer].
- outputs_shape = [batch_size, any, any, any, the number of output channels of this layer].
- the number of output channel of a layer is its last dimension.
Examples
--------
>>> input_layer = Input([8, 10, 10, 10 32], name='input_layer')
>>> deconv3d = tl.layers.DeConv3dLayer(
... shape=(2, 2, 2, 128, 32), outputs_shape=(8, 20, 20, 20, 128), strides=(1, 2, 2, 2, 1), name='deconv3dlayer'
... )
>>> print(deconv3d)
>>> tensor = tl.layers.DeConv1dLayer(
... shape=(2, 2, 2, 128, 32), outputs_shape=(8, 20, 20, 20, 128), strides=(1, 2, 2, 2, 1), name='deconv3dlayer'
... )(input_layer)
>>> print(tensor)
>>> output shape : (8, 20, 20, 20, 128)
"""
def __init__(
self,
act=None,
shape=(2, 2, 2, 128, 256),
outputs_shape=(1, 12, 32, 32, 128),
strides=(1, 2, 2, 2, 1),
padding='SAME',
data_format='NDHWC',
dilation_rate=(1, 1, 1, 1, 1),
W_init=tl.initializers.truncated_normal(stddev=0.02),
b_init=tl.initializers.constant(value=0.0),
name=None # 'decnn3d_layer',
):
super().__init__(name, act=act)
self.shape = shape
self.outputs_shape = outputs_shape
self.strides = strides
self.padding = padding
self.data_format = data_format
self.dilation_rate = dilation_rate
self.W_init = W_init
self.b_init = b_init
self.in_channels = self.shape[-1]
self.build(None)
self._built = True
logging.info(
"DeConv3dLayer %s: shape: %s out_shape: %s strides: %s pad: %s act: %s" % (
self.name, str(shape), str(outputs_shape), str(strides), padding,
self.act.__name__ if self.act is not None else 'No Activation'
)
)
def __repr__(self):
actstr = self.act.__name__ if self.act is not None else 'No Activation'
s = (
'{classname}(in_channels={in_channels}, out_channels={n_filter}, kernel_size={filter_size}'
', strides={strides}, padding={padding}'
)
if self.dilation_rate != (1, ) * len(self.dilation_rate):
s += ', dilation={dilation_rate}'
if self.b_init is None:
s += ', bias=False'
s += (', ' + actstr)
if self.name is not None:
s += ', name=\'{name}\''
s += ')'
return s.format(
classname=self.__class__.__name__, n_filter=self.shape[-2],
filter_size=(self.shape[0], self.shape[1], self.shape[2]), **self.__dict__
)
def build(self, inputs):
self.W = self._get_weights("filters", shape=self.shape, init=self.W_init)
if self.b_init:
self.b = self._get_weights("biases", shape=(self.shape[-2]), init=self.b_init)
def forward(self, inputs):
outputs = tf.nn.conv3d_transpose(
input=inputs, filters=self.W, output_shape=self.outputs_shape, strides=self.strides, padding=self.padding,
data_format=self.data_format, dilations=list(self.dilation_rate), name=self.name
)
if self.b_init:
outputs = tf.nn.bias_add(outputs, self.b, data_format=self.data_format, name='bias_add')
if self.act:
outputs = self.act(outputs)
return outputs
| 35.756281 | 121 | 0.594477 |
ace1f37d4417548fc70a9b90d459327cc1997718 | 834 | py | Python | 02. Conditional Statements/P14 Shopping.py | KrisBestTech/Python-Basics | 10bd961bf16d15ddb94bbea53327b4fc5bfdba4c | [
"MIT"
] | null | null | null | 02. Conditional Statements/P14 Shopping.py | KrisBestTech/Python-Basics | 10bd961bf16d15ddb94bbea53327b4fc5bfdba4c | [
"MIT"
] | null | null | null | 02. Conditional Statements/P14 Shopping.py | KrisBestTech/Python-Basics | 10bd961bf16d15ddb94bbea53327b4fc5bfdba4c | [
"MIT"
] | null | null | null | import math
budget = float(input())
video_cards = int(input())
processors = int(input())
ram_memory = int(input())
video_cards_price = video_cards * 250
percentage_of_video_cards_price_ram = video_cards_price * 0.10
percentage_of_video_cards_price_processors = video_cards_price * 0.35
processors_price = processors * percentage_of_video_cards_price_processors
ram_memory_price = ram_memory * percentage_of_video_cards_price_ram
total_price = video_cards_price + processors_price + ram_memory_price
money_left = budget - total_price
if video_cards > processors:
total_price = total_price - (total_price * 0.15)
money_left = budget - total_price
if total_price <= budget:
print(f'You have {math.fabs(money_left):.2f} leva left!')
else:
print(f'Not enough money! You need {math.fabs(money_left):.2f} leva more!')
| 30.888889 | 79 | 0.779376 |
ace1f41b0682bc64a2def369b9faca7ce7da8f04 | 283 | py | Python | plugins/keepkey/__init__.py | johnlito123/electrum-xuez | 4eb35889f95e31f0a08d5488082df9ab94b4c3ca | [
"MIT"
] | null | null | null | plugins/keepkey/__init__.py | johnlito123/electrum-xuez | 4eb35889f95e31f0a08d5488082df9ab94b4c3ca | [
"MIT"
] | null | null | null | plugins/keepkey/__init__.py | johnlito123/electrum-xuez | 4eb35889f95e31f0a08d5488082df9ab94b4c3ca | [
"MIT"
] | 4 | 2018-07-07T16:35:50.000Z | 2018-12-25T16:02:52.000Z | from electrum_xuez.i18n import _
fullname = 'KeepKey'
description = _('Provides support for KeepKey hardware wallet')
requires = [('keepkeylib','github.com/keepkey/python-keepkey')]
registers_keystore = ('hardware', 'keepkey', _("KeepKey wallet"))
available_for = ['qt', 'cmdline']
| 35.375 | 65 | 0.742049 |
ace1f44d763073cadbae2dcf880a88318cbb1117 | 1,807 | py | Python | examples/tf2_nan_CTL.py | DEKHTIARJonathan/TF_HVD_Stability_Test | d6c5d0427caccd936c70791bfa368249081addb7 | [
"Apache-2.0"
] | 2 | 2020-04-23T20:32:26.000Z | 2020-04-24T01:26:01.000Z | examples/tf2_nan_CTL.py | DEKHTIARJonathan/TF_HVD_Stability_Test | d6c5d0427caccd936c70791bfa368249081addb7 | [
"Apache-2.0"
] | null | null | null | examples/tf2_nan_CTL.py | DEKHTIARJonathan/TF_HVD_Stability_Test | d6c5d0427caccd936c70791bfa368249081addb7 | [
"Apache-2.0"
] | 1 | 2020-06-11T21:57:08.000Z | 2020-06-11T21:57:08.000Z | import logging, os
logging.disable(logging.WARNING)
import tensorflow as tf
import horovod.tensorflow as hvd
import argparse
import numpy as np
import time
parser = argparse.ArgumentParser()
parser.add_argument("--use_amp", action="store_true")
args = parser.parse_args()
# Startup Horovod
hvd.init()
gpus = tf.config.experimental.list_physical_devices('GPU')
if gpus:
tf.config.experimental.set_visible_devices(gpus[hvd.local_rank()], 'GPU')
# Enable AMP
if args.use_amp:
policy = tf.keras.mixed_precision.experimental.Policy('mixed_float16')
tf.keras.mixed_precision.experimental.set_policy(policy)
# Create optimizer
opt = tf.keras.optimizers.SGD(learning_rate=1e-6)
opt = hvd.DistributedOptimizer(opt)
if args.use_amp:
opt = tf.keras.mixed_precision.LossScaleOptimizer(opt)
# Create dummy model
model = tf.keras.models.Sequential()
model.add(tf.keras.layers.Dense(1, use_bias=False))
for step in range(5):
## On rank 0, set input to NaN to generate NaN gradient
if hvd.rank() == 0 and step % 2 == 0:
x = np.ones((1, 1), dtype=np.float32)
x[:, :] = np.nan
else:
x = np.ones((1, 1), dtype=np.float32)
with tf.GradientTape() as tape:
predictions = model(x)
loss = tf.reduce_sum(predictions)
print("rank {} loss {}".format(hvd.rank(), loss))
# Scale the losses
if args.use_amp:
loss = opt.get_scaled_loss(loss)
tape = hvd.DistributedGradientTape(tape)
grads = tape.gradient(loss, model.trainable_variables)
# Unscale the grads
if args.use_amp:
grads = opt.get_unscaled_gradients(grads)
opt.apply_gradients(zip(grads, model.trainable_variables))
print("rank {} completed step...".format(hvd.rank()))
# Sleep to keep ranks from killing job prematurely
time.sleep(10)
| 26.188406 | 77 | 0.703376 |
ace1f456dcefb64eba1fb0e943552d3e43bacea4 | 2,682 | py | Python | testTFConvVAE.py | sankhaMukherjee/vae | 628d0bde29a465544f76599826fe9223ac850304 | [
"MIT"
] | null | null | null | testTFConvVAE.py | sankhaMukherjee/vae | 628d0bde29a465544f76599826fe9223ac850304 | [
"MIT"
] | null | null | null | testTFConvVAE.py | sankhaMukherjee/vae | 628d0bde29a465544f76599826fe9223ac850304 | [
"MIT"
] | 1 | 2021-12-12T10:01:22.000Z | 2021-12-12T10:01:22.000Z | import os
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
import tensorflow as tf
physical_devices = tf.config.list_physical_devices('GPU')
tf.config.experimental.set_memory_growth(physical_devices[0], enable=True)
from models.TF import ConvVAE
from datetime import datetime as dt
import numpy as np
from utils import plotUtils as pU
from utils import dataUtils as dU
def main():
now = dt.now().strftime('%Y-%m-%d--%H-%M-%S-ConvVAE')
os.makedirs(f'results/{now}')
batchSize = 1024
EPOCHS = 100
# --------- [ Generate the data ] ---------------------
(x_train, y_train), (x_test, y_test) = dU.getMNISTData(reshape=False)
x_train1 = x_train.reshape( -1, 28, 28, 1 )
x_test1 = x_test.reshape( -1, 28, 28, 1 )
train_dataset = tf.data.Dataset.from_tensor_slices(x_train1)
train_dataset = train_dataset.shuffle(buffer_size=2048).batch(batchSize)
# --------- [ Generate the model ] ---------------------
nInpX = 28
nInpY = 28
nInpCh = 1
nLatent = 2
encoderSpecs = {
'nFilters' : [2, 5, 10],
'kernelSizes' : [3, 3, 3],
'strideSizes' : [1, 1, 1],
'activations' : [tf.nn.tanh, tf.nn.tanh, tf.nn.tanh],
'paddings' : ['same', 'same', 'same'],
}
decoderSpecs = {
'nFilters' : [10, 5, 5, 5, 1],
'kernelSizes' : [5, 7, 7, 6, 6],
'strideSizes' : [1, 1, 1, 1, 1],
'activations' : [tf.nn.tanh, tf.nn.tanh, tf.nn.tanh, tf.nn.tanh, tf.nn.tanh],
'paddings' : ['valid', 'valid', 'valid', 'valid', 'valid'],
}
vae = ConvVAE.ConvVAE(nInpX, nInpY, nInpCh, nLatent, encoderSpecs, decoderSpecs)
# --------- [ Train the model ] ---------------------
losses = []
for epoch in range(EPOCHS):
print('Start of epoch %d' % (epoch,), end='-> ')
# Iterate over the batches of the dataset.
for step, x in enumerate(train_dataset):
reconLoss, klLoss, loss = vae.step( x )
losses.append([reconLoss, klLoss, loss])
if step % 100 == 0:
print(reconLoss, klLoss, loss)
# ------------- [plot everything] -----------------
losses = np.array(losses).T
losses = {
'reconstruction' : losses[0],
'KL Divergence' : losses[1],
'Total' : losses[2]}
pU.plotLosses(losses, folder=now)
pU.plotMNISTLatentSpace(epoch, vae, x_test1, y_test, folder=now)
pU.plotMNISTImages(epoch, vae, x_test1, y_test, logits=True, folder=now)
pU.plotMNISTLatentReconstruction(epoch, vae, extent=(-3, 3), nSteps=21, logits=True, folder=now)
return
if __name__ == "__main__":
main()
| 31.928571 | 100 | 0.567114 |
ace1f54973ae1d6f13a3e5c87383524bbbe4ae22 | 715 | py | Python | StudentWork/Mohammed/Python/Main/Main.py | mickknutson/SITE_BOOTCAMP_QA | adbd8014bcbbd5363e61eaad80b8eea002dd42ee | [
"MIT"
] | 12 | 2020-02-25T07:49:49.000Z | 2021-11-16T12:20:17.000Z | StudentWork/Mohammed/Python/Main/Main.py | mickknutson/SITE_BOOTCAMP_QA | adbd8014bcbbd5363e61eaad80b8eea002dd42ee | [
"MIT"
] | null | null | null | StudentWork/Mohammed/Python/Main/Main.py | mickknutson/SITE_BOOTCAMP_QA | adbd8014bcbbd5363e61eaad80b8eea002dd42ee | [
"MIT"
] | 11 | 2020-02-25T08:34:28.000Z | 2021-07-05T20:56:16.000Z | def evenOdd(x):
if (x % 2 == 0):
print("even")
else :
print ("odd")
evenOdd(2)
evenOdd(3)
##############
#---------------
#- String Lab
######
fName = "mohammed"
lName = "Aldawsari"
print(fName + " " + lName)
#-------------
#- for Loob/If statments
x = 19
print("Multiplication Table of", x)
for i in range(1, 11):
print(x,"X",i,"=",x * i)
# --- Lists & Tuples
list1 = ["apple", "Orange"]
print(list1)
# tupl1 = ("moe" , "Jack" , "Khalied")
# print(tupl1)
# Indexing!
print(list1[0:1])
## homework -- Exception divide by zero // chnuk norris
x = 500
y = 0
try:
print(x/y)
except:
print("You cannot divide By 0 ")
finally:
print("Exception Check has been done!")
| 14.019608 | 55 | 0.537063 |
ace1f5e8fb1a5470abb5e4f5a39654c73f861022 | 8,731 | py | Python | test/integration/app/controller/test_playlist_monitor.py | Tastyep/RaspberryCast | 8ae8cf986b373b04c43d248ee72c77b1a29daa43 | [
"MIT"
] | 1 | 2018-11-08T09:50:35.000Z | 2018-11-08T09:50:35.000Z | test/integration/app/controller/test_playlist_monitor.py | Tastyep/RaspberryCast | 8ae8cf986b373b04c43d248ee72c77b1a29daa43 | [
"MIT"
] | null | null | null | test/integration/app/controller/test_playlist_monitor.py | Tastyep/RaspberryCast | 8ae8cf986b373b04c43d248ee72c77b1a29daa43 | [
"MIT"
] | null | null | null | from aiohttp.test_utils import unittest_run_loop
from OpenCast.app.command import make_cmd
from OpenCast.app.command import playlist as PlaylistCmd
from OpenCast.app.service.error import OperationError
from OpenCast.domain.constant import HOME_PLAYLIST
from OpenCast.domain.event import playlist as PlaylistEvt
from OpenCast.domain.service.identity import IdentityService
from .util import MonitorControllerTestCase
class PlaylistMonitorControllerTest(MonitorControllerTestCase):
def setUp(self):
super(PlaylistMonitorControllerTest, self).setUp()
self.playlist_id = IdentityService.id_playlist()
self.data_producer.playlist(self.playlist_id, "playlist1").video(
"source1"
).video("source2").playlist(
IdentityService.id_playlist(), "playlist2"
).populate(
self.data_facade
)
self.playlist_repo = self.data_facade.playlist_repo
self.video_repo = self.data_facade.video_repo
self.playlists = self.playlist_repo.list()
@unittest_run_loop
async def test_create(self):
playlist_id = None
def make_and_respond(cmd):
nonlocal playlist_id
playlist_id = cmd.model_id
self.data_producer.playlist(
id=playlist_id, name=cmd.name, ids=cmd.ids
).populate(self.data_facade)
self.app_facade.evt_dispatcher.dispatch(
PlaylistEvt.PlaylistCreated(
cmd.id, playlist_id, cmd.name, cmd.ids, cmd.generated
)
)
self.hook_cmd(PlaylistCmd.CreatePlaylist, make_and_respond)
resp = await self.client.post("/api/playlists/", json={"name": "test_playlist"})
body = await resp.json()
playlist = self.playlist_repo.get(playlist_id)
self.assertEqual(200, resp.status)
self.assertEqual(playlist.to_dict(), body)
@unittest_run_loop
async def test_create_validation_error(self):
resp = await self.client.post("/api/playlists/", json={"name": 5})
body = await resp.json()
self.assertEqual(422, resp.status)
self.assertEqual(
{"name": ["Not a valid string."]},
body,
)
@unittest_run_loop
async def test_create_service_error(self):
self.error_on(PlaylistCmd.CreatePlaylist, "Error message")
resp = await self.client.post("/api/playlists/", json={"name": "test_playlist"})
body = await resp.json()
self.assertEqual(500, resp.status)
self.assertEqual(
{
"message": "Error message",
"details": {},
},
body,
)
@unittest_run_loop
async def test_list(self):
resp = await self.client.get("/api/playlists/")
body = await resp.json()
playlists = self.data_facade.playlist_repo.list()
self.assertEqual(200, resp.status)
self.assertEqual(
{"playlists": [playlist.to_dict() for playlist in playlists]}, body
)
@unittest_run_loop
async def test_get(self):
playlist = self.playlist_repo.get(self.playlist_id)
resp = await self.client.get(f"/api/playlists/{playlist.id}")
body = await resp.json()
self.assertEqual(200, resp.status)
self.assertEqual(playlist.to_dict(), body)
@unittest_run_loop
async def test_get_not_found(self):
playlist_id = IdentityService.id_playlist()
resp = await self.client.get(f"/api/playlists/{playlist_id}")
self.assertEqual(404, resp.status)
@unittest_run_loop
async def test_list_videos(self):
playlist = self.playlist_repo.get(self.playlist_id)
resp = await self.client.get(f"/api/playlists/{playlist.id}/videos")
body = await resp.json()
videos = self.video_repo.list(playlist.ids)
self.assertEqual(200, resp.status)
self.assertEqual({"videos": [video.to_dict() for video in videos]}, body)
@unittest_run_loop
async def test_list_videos_not_found(self):
playlist_id = IdentityService.id_playlist()
resp = await self.client.get(f"/api/playlists/{playlist_id}/videos")
self.assertEqual(404, resp.status)
@unittest_run_loop
async def test_update(self):
req_body = {"name": "test_playlist", "ids": [str(IdentityService.random())]}
playlist = self.playlist_repo.get(self.playlist_id)
self.expect_and_raise_l(
[
{
"cmd": make_cmd(
PlaylistCmd.RenamePlaylist, playlist.id, req_body["name"]
),
"evt": PlaylistEvt.PlaylistRenamed,
"args": {"name": req_body["name"]},
},
{
"cmd": make_cmd(
PlaylistCmd.UpdatePlaylistContent, playlist.id, req_body["ids"]
),
"evt": PlaylistEvt.PlaylistContentUpdated,
"args": {"ids": req_body["ids"]},
},
]
)
resp = await self.client.patch(f"/api/playlists/{playlist.id}", json=req_body)
body = await resp.json()
self.assertEqual(200, resp.status)
self.assertEqual(playlist.to_dict(), body)
@unittest_run_loop
async def test_update_not_found(self):
playlist_id = IdentityService.id_playlist()
req_body = {"name": "test_playlist", "ids": [str(IdentityService.random())]}
resp = await self.client.patch(f"/api/playlists/{playlist_id}", json=req_body)
self.assertEqual(404, resp.status)
@unittest_run_loop
async def test_update_validation_error(self):
req_body = {"name": 2, "ids": [str(IdentityService.random())]}
playlist = self.playlist_repo.get(self.playlist_id)
resp = await self.client.patch(f"/api/playlists/{playlist.id}", json=req_body)
body = await resp.json()
self.assertEqual(422, resp.status)
self.assertEqual(
{"name": ["Not a valid string."]},
body,
)
@unittest_run_loop
async def test_update_error(self):
req_body = {"name": "test_playlist", "ids": [str(IdentityService.random())]}
playlist = self.playlist_repo.get(self.playlist_id)
self.expect_and_raise_l(
[
{
"cmd": make_cmd(
PlaylistCmd.RenamePlaylist, playlist.id, req_body["name"]
),
"evt": PlaylistEvt.PlaylistRenamed,
"args": {"name": req_body["name"]},
},
{
"cmd": make_cmd(
PlaylistCmd.UpdatePlaylistContent, playlist.id, req_body["ids"]
),
"evt": OperationError,
"args": {"error": "Error message"},
},
]
)
resp = await self.client.patch(f"/api/playlists/{playlist.id}", json=req_body)
body = await resp.json()
self.assertEqual(500, resp.status)
self.assertEqual(
{
"message": "Error message",
"details": {},
},
body,
)
@unittest_run_loop
async def test_delete(self):
playlist = self.playlist_repo.get(self.playlist_id)
self.queueing_service.queue.return_value = [] # Ignore the queueing action
self.expect_and_raise(
make_cmd(PlaylistCmd.DeletePlaylist, playlist.id),
[
{
"type": PlaylistEvt.PlaylistDeleted,
"args": {"name": playlist.name, "ids": playlist.ids},
}
],
)
resp = await self.client.delete(f"/api/playlists/{playlist.id}")
self.assertEqual(204, resp.status)
@unittest_run_loop
async def test_delete_not_found(self):
playlist_id = IdentityService.id_playlist()
resp = await self.client.delete(f"/api/playlists/{playlist_id}")
self.assertEqual(404, resp.status)
@unittest_run_loop
async def test_delete_forbidden(self):
self.expect_and_error(
make_cmd(PlaylistCmd.DeletePlaylist, HOME_PLAYLIST.id),
error="cannot delete generated playlists",
)
resp = await self.client.delete(f"/api/playlists/{HOME_PLAYLIST.id}")
body = await resp.json()
self.assertEqual(403, resp.status)
self.assertEqual(
{
"message": "cannot delete generated playlists",
"details": {},
},
body,
)
| 36.684874 | 88 | 0.588821 |
ace1f60c45446f5c9d279f55167871249c913571 | 285 | py | Python | task06/banki/banki/pipelines.py | StasBel/pracanalytics-hw | a12bb6bf5d96ddbe8c036b0c8095a11e7fdc5d02 | [
"MIT"
] | 1 | 2018-04-17T21:59:35.000Z | 2018-04-17T21:59:35.000Z | task06/banki/banki/pipelines.py | stasbel/pracanalytics-hw | a12bb6bf5d96ddbe8c036b0c8095a11e7fdc5d02 | [
"MIT"
] | null | null | null | task06/banki/banki/pipelines.py | stasbel/pracanalytics-hw | a12bb6bf5d96ddbe8c036b0c8095a11e7fdc5d02 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
class BankiPipeline(object):
def process_item(self, item, spider):
return item
| 23.75 | 65 | 0.708772 |
ace1f73d8632e20217f61dd8b9c55c30cff7abaf | 216 | py | Python | django_scoops/scoops/urls.py | movingheart/django_scoops | e2f3e384b810dc920538cb655977033c69e59568 | [
"BSD-2-Clause"
] | null | null | null | django_scoops/scoops/urls.py | movingheart/django_scoops | e2f3e384b810dc920538cb655977033c69e59568 | [
"BSD-2-Clause"
] | null | null | null | django_scoops/scoops/urls.py | movingheart/django_scoops | e2f3e384b810dc920538cb655977033c69e59568 | [
"BSD-2-Clause"
] | null | null | null | from django.conf.urls import url
import views
urlpatterns = [
url(r'^current_date/$',views.current_date,name='current_date'),
url(r'^hours_ahead/$',views.hours_ahead,name='hours_ahead'),
]
| 21.6 | 67 | 0.666667 |
ace1f79c0aaebf444100aa772f3f8bcb0b5a2ee9 | 6,537 | py | Python | applications/CoSimulationApplication/python_scripts/coupling_operations/compute_boundary_force.py | clazaro/Kratos | b947b82c90dfcbf13d60511427f85990d36b90be | [
"BSD-4-Clause"
] | null | null | null | applications/CoSimulationApplication/python_scripts/coupling_operations/compute_boundary_force.py | clazaro/Kratos | b947b82c90dfcbf13d60511427f85990d36b90be | [
"BSD-4-Clause"
] | null | null | null | applications/CoSimulationApplication/python_scripts/coupling_operations/compute_boundary_force.py | clazaro/Kratos | b947b82c90dfcbf13d60511427f85990d36b90be | [
"BSD-4-Clause"
] | null | null | null | # Importing the Kratos Library
import KratosMultiphysics as KM
# Importing the base class
from KratosMultiphysics.CoSimulationApplication.base_classes.co_simulation_coupling_operation import CoSimulationCouplingOperation
# Additional imports
from KratosMultiphysics.time_based_ascii_file_writer_utility import TimeBasedAsciiFileWriterUtility
# CoSimulation imports
import KratosMultiphysics.CoSimulationApplication.co_simulation_tools as cs_tools
def Create(*args):
return ComputeBoundaryForce(*args)
class ComputeBoundaryForce(CoSimulationCouplingOperation):
"""This operation is used to compute forces in a boundary, based on the pressure.
TODO:
- add messages with different echo-levels
- add tests
- more cleanup
"""
def __init__(self, settings, solver_wrappers, process_info, data_communicator):
super().__init__(settings, process_info, data_communicator)
self.model = solver_wrappers[self.settings["solver"].GetString()].model
self.model_part_name = self.settings["model_part_name"].GetString()
self.model_part = self.model[self.model_part_name]
self.write_output_file = self.settings['write_output_file'].GetBool()
self.format = self.settings["print_format"].GetString()
self.width = self.settings["width"].GetDouble()
# If 2D case: width from parameters is used
# If 3D case: width is not used
domain_size = self.model_part.ProcessInfo[KM.DOMAIN_SIZE]
if domain_size == 3:
self.width = 1
self.interval = KM.IntervalUtility(settings)
if(self.model_part.GetCommunicator().MyPID() == 0):
if(self.write_output_file):
output_file_name = self.model_part_name + "_global_force.dat"
file_handler_settings = KM.Parameters(self.settings["output_file_settings"])
if file_handler_settings.Has("file_name"):
warn_msg = 'Unexpected user-specified entry found in "output_file_settings": {"file_name": '
warn_msg += '"' + file_handler_settings["file_name"].GetString() + '"}\n'
warn_msg += 'Using this specififed file name instead of the default "' + output_file_name + '"'
cs_tools.cs_print_info(self._ClassName(), warn_msg)
else:
file_handler_settings.AddEmptyValue("file_name")
file_handler_settings["file_name"].SetString(output_file_name)
file_header = self._GetFileHeader()
self.output_file = TimeBasedAsciiFileWriterUtility(self.model_part, file_handler_settings, file_header).file
def Execute(self):
current_time = self.model_part.ProcessInfo[KM.TIME]
if(self.interval.IsInInterval(current_time)):
results = self._EvaluateGlobalForces()
if(self.model_part.GetCommunicator().MyPID() == 0):
output = []
output.extend(results)
output_values = [format(val, self.format) for val in output]
# not formatting time in order to not lead to problems with time recognition
# in the file writer when restarting
output_values.insert(0, str(current_time))
if(self.echo_level > 2):
# print to screen the results at echo level 3 or higher
res_labels = ['time: ', 'vel_x: ', 'vel_y: ', 'vel_z: ', 'f_x: ', 'f_y: ', 'f_z: ', 'p: ',]
result_msg = 'Boundary Force force evaluation for model part ' + self.model_part_name + '\n'
result_msg += ', '.join([a + b for a, b in zip(res_labels, output_values)])
cs_tools.cs_print_info(self._ClassName(), result_msg)
if(self.write_output_file):
self.output_file.write(' '.join(output_values) + '\n')
def _EvaluateGlobalForces(self):
# vel_x, vel_y, vel_z
velocity = [0.0, 0.0, 0.0]
sum_forces = [0.0, 0.0, 0.0]
pressure_list = [0.0]
utils = KM.VariableUtils()
utils.SetVariable(KM.REACTION_X, 0, self.model_part.Nodes)
utils.SetVariable(KM.REACTION_Y, 0, self.model_part.Nodes)
utils.SetVariable(KM.REACTION_Z, 0, self.model_part.Nodes)
for element in self.model_part.Elements:
geometry = element.GetGeometry()
nodes = element.GetNodes()
shape_functions_values = geometry.ShapeFunctionsValues()
area = geometry.Area()
unit_normal = geometry.UnitNormal()
pressure = 0
c = 0
for node in nodes:
pressure_node = node.GetSolutionStepValue(KM.PRESSURE, 0)
force_node = unit_normal * (-1) * pressure_node * area * shape_functions_values[0, c] * self.width
pressure += pressure_node * shape_functions_values[0, c]
node.SetSolutionStepValue(KM.REACTION_X, 0, node.GetSolutionStepValue(KM.REACTION_X, 0) + force_node[0])
node.SetSolutionStepValue(KM.REACTION_Y, 0, node.GetSolutionStepValue(KM.REACTION_Y, 0) + force_node[1])
node.SetSolutionStepValue(KM.REACTION_Z, 0, node.GetSolutionStepValue(KM.REACTION_Z, 0) + force_node[2])
c += 1
force = unit_normal * pressure * area * self.width
for i in range(3):
sum_forces[i] += force[i]
pressure_list[0] += pressure
if self.echo_level > 1:
info_msg = "Computed boundary forces for model part \"" + self.model_part_name + "\" in solver: \"" + self.settings["solver"].GetString() + "\""
cs_tools.cs_print_info(self._ClassName(), info_msg)
return velocity + sum_forces + pressure_list
def _GetFileHeader(self):
header = '# Global force for model part ' + self.model_part_name + '\n'
header += '# Time vel_x vel_y vel_z f_x f_y f_z p\n'
return header
@classmethod
def _GetDefaultParameters(cls):
this_defaults = KM.Parameters("""{
"solver" : "UNSPECIFIED",
"model_part_name" : "",
"interval" : [0.0, 1e30],
"print_format" : ".8f",
"width" : 1.0,
"write_output_file" : true,
"output_file_settings" : {}
}""")
this_defaults.AddMissingParameters(super()._GetDefaultParameters())
return this_defaults
| 46.692857 | 157 | 0.62261 |
ace1f7c63118bc09d97121206936172bc1f26536 | 1,530 | py | Python | tests/ssg_test_suite/template.py | rhmdnd/content | 478c60314b7a1692920a4031b51f4b6b3a6f25a0 | [
"BSD-3-Clause"
] | null | null | null | tests/ssg_test_suite/template.py | rhmdnd/content | 478c60314b7a1692920a4031b51f4b6b3a6f25a0 | [
"BSD-3-Clause"
] | null | null | null | tests/ssg_test_suite/template.py | rhmdnd/content | 478c60314b7a1692920a4031b51f4b6b3a6f25a0 | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/python3
from __future__ import print_function
from ssg_test_suite import rule
class TemplateChecker(rule.RuleChecker):
"""
The template mode tests every rule that uses the specified
target template.
If the target template doesn't have tests, only the tests
from the rules are executed.
If there are templated test scenarios they are executed together
with any extra tests available for the rule.
"""
def __init__(self, test_env):
super(TemplateChecker, self).__init__(test_env)
self.target_type = "template"
def _rule_matches_rule_spec(self, rule_short_id):
return True
def _rule_matches_template_spec(self, template):
return (template in self.template_spec)
def perform_template_check(options):
checker = TemplateChecker(options.test_env)
checker.datastream = options.datastream
checker.benchmark_id = options.benchmark_id
checker.remediate_using = options.remediate_using
checker.dont_clean = options.dont_clean
checker.no_reports = options.no_reports
# No debug option is provided for template mode
checker.manual_debug = False
checker.benchmark_cpes = options.benchmark_cpes
checker.scenarios_regex = options.scenarios_regex
checker.slice_current = options.slice_current
checker.slice_total = options.slice_total
checker.scenarios_profile = options.scenarios_profile
checker.rule_spec = None
checker.template_spec = options.target
checker.test_target()
return
| 31.22449 | 68 | 0.75098 |
ace1f83c7896c73bf810a0702395a1836e55ad1c | 1,448 | py | Python | utils.py | osiriszjq/RND-pytorch | 17582a90cfcb84b6e0dc8c5cfba8b9c5472c7fb3 | [
"MIT"
] | 43 | 2018-12-11T22:41:12.000Z | 2022-01-05T14:45:41.000Z | utils.py | osiriszjq/RND-pytorch | 17582a90cfcb84b6e0dc8c5cfba8b9c5472c7fb3 | [
"MIT"
] | 5 | 2019-03-19T01:35:59.000Z | 2019-10-08T16:57:48.000Z | utils.py | osiriszjq/RND-pytorch | 17582a90cfcb84b6e0dc8c5cfba8b9c5472c7fb3 | [
"MIT"
] | 7 | 2019-01-31T13:18:17.000Z | 2022-02-08T13:04:06.000Z | import numpy as np
import torch
from torch._six import inf
class RunningMeanStd(object):
# https://en.wikipedia.org/wiki/Algorithms_for_calculating_variance#Parallel_algorithm
def __init__(self, epsilon=1e-4, shape=()):
self.mean = np.zeros(shape, 'float64')
self.var = np.ones(shape, 'float64')
self.count = epsilon
def update(self, x):
batch_mean = np.mean(x, axis=0)
batch_var = np.var(x, axis=0)
batch_count = x.shape[0]
self.update_from_moments(batch_mean, batch_var, batch_count)
def update_from_moments(self, batch_mean, batch_var, batch_count):
delta = batch_mean - self.mean
tot_count = self.count + batch_count
new_mean = self.mean + delta * batch_count / tot_count
m_a = self.var * (self.count)
m_b = batch_var * (batch_count)
M2 = m_a + m_b + np.square(delta) * self.count * batch_count / (self.count + batch_count)
new_var = M2 / (self.count + batch_count)
new_count = batch_count + self.count
self.mean = new_mean
self.var = new_var
self.count = new_count
class RewardForwardFilter(object):
def __init__(self, gamma):
self.rewems = None
self.gamma = gamma
def update(self, rews):
if self.rewems is None:
self.rewems = rews
else:
self.rewems = self.rewems * self.gamma + rews
return self.rewems | 32.177778 | 97 | 0.629834 |
ace1f85a783583a353fb4fc217a69c035e60b679 | 263 | py | Python | acred/reviewer/credibility/__init__.py | rdenaux/acred | ffe44953a96338acfe3860a9898e7f0b70b5c9cb | [
"Apache-2.0"
] | 8 | 2020-08-31T04:14:22.000Z | 2021-09-29T06:00:31.000Z | acred/reviewer/credibility/__init__.py | expertailab/acred | ee45840c942ef2fac4f26da8d756b7c47e42847c | [
"Apache-2.0"
] | null | null | null | acred/reviewer/credibility/__init__.py | expertailab/acred | ee45840c942ef2fac4f26da8d756b7c47e42847c | [
"Apache-2.0"
] | 1 | 2020-10-07T08:09:29.000Z | 2020-10-07T08:09:29.000Z | #
# Copyright (c) 2020 Expert System Iberia
#
"""Implements reviewer Bots for *credibility* reviewAspect
Each Bot has a method `review` which accepts some `item` to review and a
`config`uration map.
"""
# actual init?
# circular imports and visible submodules
| 21.916667 | 72 | 0.745247 |
ace1f86f755866814f48c61c7d15bfeb18f87edf | 2,872 | py | Python | python/hybrid/hybrid_decrypt_wrapper.py | taymonbeal/tink | aedd6d6bbe8dc93cba0c501f8608ba6d84162d0b | [
"Apache-2.0"
] | 1 | 2020-02-20T17:42:08.000Z | 2020-02-20T17:42:08.000Z | python/hybrid/hybrid_decrypt_wrapper.py | taymonbeal/tink | aedd6d6bbe8dc93cba0c501f8608ba6d84162d0b | [
"Apache-2.0"
] | null | null | null | python/hybrid/hybrid_decrypt_wrapper.py | taymonbeal/tink | aedd6d6bbe8dc93cba0c501f8608ba6d84162d0b | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""HybridDecrypt wrapper."""
from __future__ import absolute_import
from __future__ import division
# Placeholder for import for type annotations
from __future__ import print_function
from absl import logging
from typing import Type
from tink.python.core import crypto_format
from tink.python.core import primitive_set
from tink.python.core import primitive_wrapper
from tink.python.core import tink_error
from tink.python.hybrid import hybrid_decrypt
class _WrappedHybridDecrypt(hybrid_decrypt.HybridDecrypt):
"""Implements HybridDecrypt for a set of HybridDecrypt primitives."""
def __init__(self, pset: primitive_set.PrimitiveSet):
self._primitive_set = pset
def decrypt(self, ciphertext: bytes, context_info: bytes) -> bytes:
if len(ciphertext) > crypto_format.NON_RAW_PREFIX_SIZE:
prefix = ciphertext[:crypto_format.NON_RAW_PREFIX_SIZE]
ciphertext_no_prefix = ciphertext[crypto_format.NON_RAW_PREFIX_SIZE:]
for entry in self._primitive_set.primitive_from_identifier(prefix):
try:
return entry.primitive.decrypt(ciphertext_no_prefix,
context_info)
except tink_error.TinkError as e:
logging.info(
'ciphertext prefix matches a key, but cannot decrypt: %s', e)
# Let's try all RAW keys.
for entry in self._primitive_set.raw_primitives():
try:
return entry.primitive.decrypt(ciphertext, context_info)
except tink_error.TinkError as e:
pass
# nothing works.
raise tink_error.TinkError('Decryption failed.')
class HybridDecryptWrapper(
primitive_wrapper.PrimitiveWrapper[hybrid_decrypt.HybridDecrypt]):
"""HybridDecryptWrapper is the PrimitiveWrapper for HybridDecrypt.
The returned primitive works with a keyset (rather than a single key). To
decrypt, the primitive uses the prefix of the ciphertext to efficiently select
the right key in the set. If the keys associated with the prefix do not work,
the primitive tries all keys with OutputPrefixType RAW.
"""
def wrap(self,
pset: primitive_set.PrimitiveSet) -> hybrid_decrypt.HybridDecrypt:
return _WrappedHybridDecrypt(pset)
def primitive_class(self) -> Type[hybrid_decrypt.HybridDecrypt]:
return hybrid_decrypt.HybridDecrypt
| 38.293333 | 80 | 0.752786 |
ace1f8a73d4f8504ec90c5720fa96e52d5a4fb7e | 9,369 | py | Python | airflow_client/test/test_connection_collection.py | sptsakcg/airflow-client-python | 38a14e8641988f8cc88a5b27632ef4198bdb2843 | [
"Apache-2.0"
] | null | null | null | airflow_client/test/test_connection_collection.py | sptsakcg/airflow-client-python | 38a14e8641988f8cc88a5b27632ef4198bdb2843 | [
"Apache-2.0"
] | null | null | null | airflow_client/test/test_connection_collection.py | sptsakcg/airflow-client-python | 38a14e8641988f8cc88a5b27632ef4198bdb2843 | [
"Apache-2.0"
] | null | null | null | """
Airflow API (Stable)
# Overview To facilitate management, Apache Airflow supports a range of REST API endpoints across its objects. This section provides an overview of the API design, methods, and supported use cases. Most of the endpoints accept `JSON` as input and return `JSON` responses. This means that you must usually add the following headers to your request: ``` Content-type: application/json Accept: application/json ``` ## Resources The term `resource` refers to a single type of object in the Airflow metadata. An API is broken up by its endpoint's corresponding resource. The name of a resource is typically plural and expressed in camelCase. Example: `dagRuns`. Resource names are used as part of endpoint URLs, as well as in API parameters and responses. ## CRUD Operations The platform supports **C**reate, **R**ead, **U**pdate, and **D**elete operations on most resources. You can review the standards for these operations and their standard parameters below. Some endpoints have special behavior as exceptions. ### Create To create a resource, you typically submit an HTTP `POST` request with the resource's required metadata in the request body. The response returns a `201 Created` response code upon success with the resource's metadata, including its internal `id`, in the response body. ### Read The HTTP `GET` request can be used to read a resource or to list a number of resources. A resource's `id` can be submitted in the request parameters to read a specific resource. The response usually returns a `200 OK` response code upon success, with the resource's metadata in the response body. If a `GET` request does not include a specific resource `id`, it is treated as a list request. The response usually returns a `200 OK` response code upon success, with an object containing a list of resources' metadata in the response body. When reading resources, some common query parameters are usually available. e.g.: ``` v1/connections?limit=25&offset=25 ``` |Query Parameter|Type|Description| |---------------|----|-----------| |limit|integer|Maximum number of objects to fetch. Usually 25 by default| |offset|integer|Offset after which to start returning objects. For use with limit query parameter.| ### Update Updating a resource requires the resource `id`, and is typically done using an HTTP `PATCH` request, with the fields to modify in the request body. The response usually returns a `200 OK` response code upon success, with information about the modified resource in the response body. ### Delete Deleting a resource requires the resource `id` and is typically executing via an HTTP `DELETE` request. The response usually returns a `204 No Content` response code upon success. ## Conventions - Resource names are plural and expressed in camelCase. - Names are consistent between URL parameter name and field name. - Field names are in snake_case. ```json { \"name\": \"string\", \"slots\": 0, \"occupied_slots\": 0, \"used_slots\": 0, \"queued_slots\": 0, \"open_slots\": 0 } ``` ### Update Mask Update mask is available as a query parameter in patch endpoints. It is used to notify the API which fields you want to update. Using `update_mask` makes it easier to update objects by helping the server know which fields to update in an object instead of updating all fields. The update request ignores any fields that aren't specified in the field mask, leaving them with their current values. Example: ``` resource = request.get('/resource/my-id').json() resource['my_field'] = 'new-value' request.patch('/resource/my-id?update_mask=my_field', data=json.dumps(resource)) ``` ## Versioning and Endpoint Lifecycle - API versioning is not synchronized to specific releases of the Apache Airflow. - APIs are designed to be backward compatible. - Any changes to the API will first go through a deprecation phase. # Summary of Changes | Airflow version | Description | |-|-| | v2.0 | Initial release | | v2.0.2 | Added /plugins endpoint | | v2.1 | New providers endpoint | # Trying the API You can use a third party airflow_client.client, such as [curl](https://curl.haxx.se/), [HTTPie](https://httpie.org/), [Postman](https://www.postman.com/) or [the Insomnia rest airflow_client.client](https://insomnia.rest/) to test the Apache Airflow API. Note that you will need to pass credentials data. For e.g., here is how to pause a DAG with [curl](https://curl.haxx.se/), when basic authorization is used: ```bash curl -X PATCH 'https://example.com/api/v1/dags/{dag_id}?update_mask=is_paused' \\ -H 'Content-Type: application/json' \\ --user \"username:password\" \\ -d '{ \"is_paused\": true }' ``` Using a graphical tool such as [Postman](https://www.postman.com/) or [Insomnia](https://insomnia.rest/), it is possible to import the API specifications directly: 1. Download the API specification by clicking the **Download** button at top of this document 2. Import the JSON specification in the graphical tool of your choice. - In *Postman*, you can click the **import** button at the top - With *Insomnia*, you can just drag-and-drop the file on the UI Note that with *Postman*, you can also generate code snippets by selecting a request and clicking on the **Code** button. ## Enabling CORS [Cross-origin resource sharing (CORS)](https://developer.mozilla.org/en-US/docs/Web/HTTP/CORS) is a browser security feature that restricts HTTP requests that are initiated from scripts running in the browser. For details on enabling/configuring CORS, see [Enabling CORS](https://airflow.apache.org/docs/apache-airflow/stable/security/api.html). # Authentication To be able to meet the requirements of many organizations, Airflow supports many authentication methods, and it is even possible to add your own method. If you want to check which auth backend is currently set, you can use `airflow config get-value api auth_backend` command as in the example below. ```bash $ airflow config get-value api auth_backend airflow.api.auth.backend.basic_auth ``` The default is to deny all requests. For details on configuring the authentication, see [API Authorization](https://airflow.apache.org/docs/apache-airflow/stable/security/api.html). # Errors We follow the error response format proposed in [RFC 7807](https://tools.ietf.org/html/rfc7807) also known as Problem Details for HTTP APIs. As with our normal API responses, your airflow_client.client must be prepared to gracefully handle additional members of the response. ## Unauthenticated This indicates that the request has not been applied because it lacks valid authentication credentials for the target resource. Please check that you have valid credentials. ## PermissionDenied This response means that the server understood the request but refuses to authorize it because it lacks sufficient rights to the resource. It happens when you do not have the necessary permission to execute the action you performed. You need to get the appropriate permissions in other to resolve this error. ## BadRequest This response means that the server cannot or will not process the request due to something that is perceived to be a airflow_client.client error (e.g., malformed request syntax, invalid request message framing, or deceptive request routing). To resolve this, please ensure that your syntax is correct. ## NotFound This airflow_client.client error response indicates that the server cannot find the requested resource. ## MethodNotAllowed Indicates that the request method is known by the server but is not supported by the target resource. ## NotAcceptable The target resource does not have a current representation that would be acceptable to the user agent, according to the proactive negotiation header fields received in the request, and the server is unwilling to supply a default representation. ## AlreadyExists The request could not be completed due to a conflict with the current state of the target resource, e.g. the resource it tries to create already exists. ## Unknown This means that the server encountered an unexpected condition that prevented it from fulfilling the request. # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: dev@airflow.apache.org
Generated by: https://openapi-generator.tech
"""
import sys
import unittest
import airflow_client.client
from airflow_client.client.model.collection_info import CollectionInfo
from airflow_client.client.model.connection_collection_all_of import ConnectionCollectionAllOf
from airflow_client.client.model.connection_collection_item import ConnectionCollectionItem
globals()['CollectionInfo'] = CollectionInfo
globals()['ConnectionCollectionAllOf'] = ConnectionCollectionAllOf
globals()['ConnectionCollectionItem'] = ConnectionCollectionItem
from airflow_client.client.model.connection_collection import ConnectionCollection
class TestConnectionCollection(unittest.TestCase):
"""ConnectionCollection unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testConnectionCollection(self):
"""Test ConnectionCollection"""
# FIXME: construct object with mandatory attributes with example values
# model = ConnectionCollection() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| 217.883721 | 8,173 | 0.765823 |
ace1f91ec6aa303209ffa7e599924509e0141523 | 352 | py | Python | loopchain/blockchain/blocks/__init__.py | windies21/loopchain | 6e96c8a7e006747af04187155678f2fae59e1389 | [
"Apache-2.0"
] | 105 | 2018-04-03T05:29:08.000Z | 2022-01-28T17:33:20.000Z | loopchain/blockchain/blocks/__init__.py | laurenceyoon/loopchain | e87032779be4715c135c2c91d2757d9c63bf4e31 | [
"Apache-2.0"
] | 135 | 2018-09-04T07:11:02.000Z | 2021-12-15T06:25:47.000Z | loopchain/blockchain/blocks/__init__.py | laurenceyoon/loopchain | e87032779be4715c135c2c91d2757d9c63bf4e31 | [
"Apache-2.0"
] | 46 | 2018-05-07T09:12:07.000Z | 2022-02-23T09:58:37.000Z | from .block import Block, BlockHeader, BlockBody, _dict__str__, NextRepsChangeReason
from .block_builder import BlockBuilder
from .block_serializer import BlockSerializer
from .block_verifier import BlockVerifier
from .block_prover import BlockProver, BlockProverType
from .block_versioner import BlockVersioner
from . import v0_1a
from . import v0_3
| 35.2 | 84 | 0.855114 |
ace1faa8d765711d110fa71e483c0408f365612e | 30,987 | py | Python | law/workflow/base.py | HerrHorizontal/law | c31091d3bf39a25e79b3796ed5742346ddff8b77 | [
"BSD-3-Clause"
] | null | null | null | law/workflow/base.py | HerrHorizontal/law | c31091d3bf39a25e79b3796ed5742346ddff8b77 | [
"BSD-3-Clause"
] | null | null | null | law/workflow/base.py | HerrHorizontal/law | c31091d3bf39a25e79b3796ed5742346ddff8b77 | [
"BSD-3-Clause"
] | null | null | null | # coding: utf-8
"""
Workflow and workflow proxy base class definitions.
"""
__all__ = ["BaseWorkflow", "workflow_property", "cached_workflow_property"]
import sys
import re
import functools
import logging
from collections import OrderedDict
from abc import abstractmethod
import luigi
import six
from law.task.base import Task, Register
from law.task.proxy import ProxyTask, get_proxy_attribute
from law.target.collection import TargetCollection
from law.parameter import NO_STR, NO_INT, MultiRangeParameter
from law.util import (
no_value, make_list, iter_chunks, range_expand, range_join, create_hash, DotDict,
)
logger = logging.getLogger(__name__)
class BaseWorkflowProxy(ProxyTask):
"""
Base class of all workflow proxies.
.. py:classattribute:: workflow_type
type: string
The named type of the workflow. This attribute refers to the value of the ``--workflow``
parameter on the command line to select a particular workflow.
.. py:attribute:: task
type: Task
Reference to the actual *workflow* task.
"""
workflow_type = None
add_workflow_run_decorators = True
def __init__(self, *args, **kwargs):
super(BaseWorkflowProxy, self).__init__(*args, **kwargs)
# find decorators for this proxy's run method that can be configured on the actual task
if self.add_workflow_run_decorators:
for prefix in [self.workflow_type + "_", ""]:
attr = "{}workflow_run_decorators".format(prefix)
decorators = getattr(self.task, attr, None)
if decorators is not None:
# found decorators, so unbound, decorate and re-bound
run_func = self.run.__func__
for decorator in decorators:
run_func = decorator(run_func)
self.run = run_func.__get__(self)
break
self._workflow_has_reset_branch_map = False
def _get_task_attribute(self, name, fallback=False):
"""
Return an attribute of the actual task named ``<workflow_type>_<name>``. When the attribute
does not exist and *fallback* is *True*, try to return the task attribute simply named
*name*. In any case, if a requested task attribute is eventually not found, an
AttributeError is raised.
"""
attr = "{}_{}".format(self.workflow_type, name)
if fallback:
value = getattr(self.task, attr, no_value)
if value != no_value:
return value
else:
return getattr(self.task, name)
else:
return getattr(self.task, attr)
def complete(self):
"""
Custom completion check that invokes the task's *workflow_complete* if it is callable, or
just does the default completion check otherwise.
"""
if callable(self.task.workflow_complete):
return self.task.workflow_complete()
else:
return super(BaseWorkflowProxy, self).complete()
def requires(self):
"""
Returns the default workflow requirements in an ordered dictionary, which is updated with
the return value of the task's *workflow_requires* method.
"""
reqs = DotDict()
workflow_reqs = self.task.workflow_requires()
if workflow_reqs:
reqs.update(workflow_reqs)
return reqs
def output(self):
"""
Returns the default workflow outputs in an ordered dictionary. At the moment this is just
the collection of outputs of the branch tasks, stored with the key ``"collection"``.
"""
cls = self.task.output_collection_cls or TargetCollection
targets = luigi.task.getpaths(self.task.get_branch_tasks())
collection = cls(targets, threshold=self.threshold(len(targets)))
return DotDict([("collection", collection)])
def threshold(self, n=None):
"""
Returns the threshold number of tasks that need to be complete in order to consider the
workflow as being complete itself. This takes into account the
:py:attr:`law.BaseWorkflow.acceptance` parameter of the workflow. The threshold is passed
to the :py:class:`law.TargetCollection` (or :py:class:`law.SiblingFileCollection`) within
:py:meth:`output`. By default, the maximum number of tasks is taken from the length of the
branch map. For performance purposes, you can set this value, *n*, directly.
"""
if n is None:
n = len(self.task.get_branch_map())
acceptance = self.task.acceptance
return (acceptance * n) if acceptance <= 1 else acceptance
def run(self):
"""
Default run implementation that resets the branch map once if requested.
"""
if self.task.reset_branch_map_before_run and not self._workflow_has_reset_branch_map:
self._workflow_has_reset_branch_map = True
# reset cached branch map, branch tasks and boundaries
self.task._branch_map = None
self.task._branch_tasks = None
self.task.start_branch = self.task._initial_start_branch
self.task.end_branch = self.task._initial_end_branch
self.task.branches = self.task._initial_branches
def workflow_property(func):
"""
Decorator to declare a property that is stored only on a workflow but makes it also accessible
from branch tasks. Internally, branch tasks are re-instantiated with ``branch=-1``, and its
decorated property is invoked. You might want to use this decorator in case of a property that
is common (and mutable) to a workflow and all its branch tasks, e.g. for static data. Example:
.. code-block:: python
class MyTask(Workflow):
def __init__(self, *args, **kwargs):
super(MyTask, self).__init__(*args, **kwargs)
if self.is_workflow():
self._common_data = some_demanding_computation()
@workflow_property
def common_data(self):
# this method is always called with *self* is the *workflow*
return self._common_data
"""
@functools.wraps(func)
def wrapper(self):
return func(self.as_workflow())
return property(wrapper)
def cached_workflow_property(func=None, empty_value=no_value, attr=None, setter=True):
"""
Decorator to declare an attribute that is stored only on a workflow and also cached for
subsequent calls. Therefore, the decorated method is expected to (lazily) provide the value to
cache. When the value is equal to *empty_value*, it is not cached and the next access to the
property will invoke the decorated method again. The resulting value is stored as
``_workflow_cached_<func.__name__>`` on the workflow, which can be overwritten by setting the
*attr* argument. By default, a setter is provded to overwrite the cache value. Set *setter* to
*False* to disable this feature. Example:
.. code-block:: python
class MyTask(Workflow):
@cached_workflow_property
def common_data(self):
# this method is always called with *self* is the *workflow*
return some_demanding_computation()
@cached_workflow_property(attr="my_own_property", setter=False)
def common_data2(self):
return some_other_computation()
"""
def wrapper(func):
_attr = attr or "_workflow_cached_" + func.__name__
@functools.wraps(func)
def getter(self):
wf = self.as_workflow()
if getattr(wf, _attr, empty_value) == empty_value:
setattr(wf, _attr, func(wf))
return getattr(wf, _attr)
_setter = None
if setter:
def _setter(self, value):
wf = self.as_workflow()
setattr(wf, _attr, value)
_setter.__name__ = func.__name__
return property(fget=getter, fset=_setter)
return wrapper if not func else wrapper(func)
class WorkflowRegister(Register):
def __init__(cls, name, bases, classdict):
super(WorkflowRegister, cls).__init__(name, bases, classdict)
# store a flag on the created class whether it defined a new workflow_proxy_cls
# this flag will define the classes in the mro to consider for instantiating the proxy
cls._defined_workflow_proxy = "workflow_proxy_cls" in classdict
class BaseWorkflow(six.with_metaclass(WorkflowRegister, Task)):
"""
Base class of all workflows.
.. py:classattribute:: workflow
type: luigi.Parameter
Workflow type that refers to the workflow proxy implementation at instantiation / execution
time. Empty default value.
.. py:classattribute:: acceptance
type: luigi.FloatParameter
Number of complete tasks to consider the workflow successful. Values larger than one are
interpreted as absolute numbers, and as fractions otherwise. Defaults to *1.0*.
.. py:classattribute:: tolerance
type: luigi.FloatParameter
Number of failed tasks to still consider the workflow successful. Values larger than one are
interpreted as absolute numbers, and as fractions otherwise. Defaults to *0.0*.
.. py:classattribute:: branch
type: luigi.IntParameter
The branch number to run this task for. *-1* means that this task is the actual *workflow*,
rather than a *branch* task. Defaults to *-1*.
.. py:classattribute:: start_branch
type: luigi.IntParameter
First branch to process. Defaults to *0*.
.. py:classattribute:: end_branch
type: luigi.IntParameter
First branch that is *not* processed (pythonic). Defaults to *-1*.
.. py:classattribute:: branches
type: law.MultiRangeParameter
Explicit list of branches to process. Empty default value.
.. py:classattribute:: workflow_proxy_cls
type: BaseWorkflowProxy
Reference to the workflow proxy class associated to this workflow.
.. py:classattribute:: workflow_complete
type: None, callable
Custom completion check that is used by the workflow's proxy when callable.
.. py:classattribute:: output_collection_cls
type: TargetCollection
Configurable target collection class to use, such as
:py:class:`target.collection.TargetCollection`, :py:class:`target.collection.FileCollection`
or :py:class:`target.collection.SiblingFileCollection`.
.. py:classattribute:: force_contiguous_branches
type: bool
Flag that denotes if this workflow is forced to use contiguous branch numbers, starting from
0. If *False*, an exception is raised otherwise.
.. py:classattribute:: reset_branch_map_before_run
type: bool
Flag that denotes whether the branch map should be recreated from scratch before the run
method of the underlying workflow proxy is called.
.. py:classattribute:: create_branch_map_before_repr
type: bool
Flag that denotes whether the branch map should be created (if not already done) before the
task representation is created via :py:meth:`repr`.
.. py:classattribute:: workflow_property
type: function
Reference to :py:func:`workflow_property`.
.. py:classattribute:: cached_workflow_property
type: function
Reference to :py:func:`cached_workflow_property`.
.. py:classattribute:: workflow_run_decorators
type: sequence, None
Sequence of decorator functions that will be conveniently used to decorate the workflow
proxy's run method. This way, there is no need to subclass and reset the
:py:attr:`workflow_proxy_cls` just to add a decorator. The value is *None* by default.
.. py:attribute:: workflow_cls
type: law.task.Register
Reference to the class of the realized workflow. This is especially helpful in case your
derived class inherits from multiple workflows.
.. py:attribute:: workflow_proxy
type: BaseWorkflowProxy
Reference to the underlying workflow proxy instance.
.. py:attribute:: branch_map
read-only
type: dict
Shorthand for :py:meth:`get_branch_map`.
.. py:attribute:: branch_data
read-only
Shorthand for ``self.branch_map[self.branch]``.
"""
workflow = luigi.Parameter(default=NO_STR, significant=False, description="the type of the "
"workflow to use; uses the first workflow type in the MRO when empty; default: empty")
acceptance = luigi.FloatParameter(default=1.0, significant=False, description="number of "
"finished tasks to consider the task successful; relative fraction (<= 1) or absolute "
"value (> 1); default: 1.0")
tolerance = luigi.FloatParameter(default=0.0, significant=False, description="number of failed "
"tasks to still consider the task successful; relative fraction (<= 1) or absolute value "
"(> 1); default: 0.0")
pilot = luigi.BoolParameter(default=False, significant=False, description="disable "
"requirements of the workflow to let branch tasks resolve requirements on their own; "
"default: False")
branch = luigi.IntParameter(default=-1, description="the branch number/index to run this "
"task for; -1 means this task is the workflow; default: -1")
start_branch = luigi.IntParameter(default=NO_INT, description="the branch to start at; empty "
"value means first; default: empty")
end_branch = luigi.IntParameter(default=NO_INT, description="the branch to end at; the end "
"itself is not included; empty value means last; default: empty")
branches = MultiRangeParameter(default=(), require_start=False, require_end=False,
single_value=True, description="comma-separated list of branches to select; each value can "
"have the format 'start:stop' (inclusive) to support range syntax; has precedence over "
"--startBranch and --endBranch when set; default: empty")
# configuration members
workflow_proxy_cls = BaseWorkflowProxy
output_collection_cls = None
force_contiguous_branches = False
reset_branch_map_before_run = False
create_branch_map_before_repr = True
workflow_run_decorators = None
workflow_complete = None
# accessible properties
workflow_property = None
cached_workflow_property = None
exclude_index = True
exclude_params_branch = {
"workflow", "acceptance", "tolerance", "pilot", "start_branch", "end_branch", "branches",
}
exclude_params_workflow = {"branch"}
def __init__(self, *args, **kwargs):
super(BaseWorkflow, self).__init__(*args, **kwargs)
# cached attributes for the workflow
self._branch_map = None
self._branch_tasks = None
self._cache_branches = True
# cached attributes for branches
self._workflow_task = None
# store original branch boundaries
self._initial_start_branch = self.start_branch
self._initial_end_branch = self.end_branch
self._initial_branches = tuple(self.branches)
# determine workflow proxy class to instantiate
if self.is_workflow():
classes = self.__class__.mro()
for cls in classes:
if not issubclass(cls, BaseWorkflow):
continue
if not cls._defined_workflow_proxy:
continue
if self.workflow in (NO_STR, cls.workflow_proxy_cls.workflow_type):
self.workflow = cls.workflow_proxy_cls.workflow_type
self.workflow_cls = cls
self.workflow_proxy = cls.workflow_proxy_cls(task=self)
logger.debug("created workflow proxy instance of type '{}'".format(
cls.workflow_proxy_cls.workflow_type))
break
else:
raise ValueError("unknown workflow type {}".format(self.workflow))
def __getattribute__(self, attr, proxy=True):
return get_proxy_attribute(self, attr, proxy=proxy, super_cls=Task)
def repr(self, *args, **kwargs):
if self.create_branch_map_before_repr:
self.get_branch_map()
return super(BaseWorkflow, self).repr(*args, **kwargs)
def cli_args(self, exclude=None, replace=None):
exclude = set() if exclude is None else set(make_list(exclude))
if self.is_branch():
exclude |= self.exclude_params_branch
else:
exclude |= self.exclude_params_workflow
return super(BaseWorkflow, self).cli_args(exclude=exclude, replace=replace)
def _repr_params(self, *args, **kwargs):
params = super(BaseWorkflow, self)._repr_params(*args, **kwargs)
if self.is_workflow():
# when this is a workflow, add the workflow type
if "workflow" not in params:
params["workflow"] = self.workflow
else:
# when this is a branch, remove workflow parameters
for param in self.exclude_params_branch:
params.pop(param, None)
return params
def is_branch(self):
"""
Returns whether or not this task refers to a *branch*.
"""
return self.branch != -1
def is_workflow(self):
"""
Returns whether or not this task refers to the *workflow*.
"""
return not self.is_branch()
def as_branch(self, branch=None):
"""
When this task refers to the workflow, a re-instantiated task with identical parameters and
a certain *branch* value, defaulting to 0, is returned. When this task is already a branch
task, the task itself is returned when *branch* is *None* or matches this task's branch
value. Otherwise, a new branch task with that value and identical parameters is created and
returned.
"""
if branch == -1:
raise ValueError("branch must not be -1 when selecting a branch task")
if self.is_branch():
if branch is None or branch == self.branch:
return self
else:
return self.req(self, branch=branch, _skip_task_excludes=True)
return self.req(self, branch=branch or 0, _exclude=self.exclude_params_branch,
_skip_task_excludes=True)
def as_workflow(self):
"""
When this task refers to a branch task, a re-instantiated task with ``branch=-1`` and
identical parameters is returned. Otherwise, the workflow itself is returned.
"""
if self.is_workflow():
return self
if self._workflow_task is None:
self._workflow_task = self.req(self, branch=-1, _exclude=self.exclude_params_workflow,
_skip_task_excludes=True)
return self._workflow_task
@abstractmethod
def create_branch_map(self):
"""
Abstract method that must be overwritten by inheriting tasks to define the branch map.
"""
return
def _reset_branch_boundaries(self, branch_map):
if self.is_branch():
raise Exception("calls to _reset_branch_boundaries are forbidden for branch tasks")
# get minimum and maximum branches
min_branch = min(branch_map.keys())
max_branch = max(branch_map.keys())
# reset start_branch, starting from the initial value
self.start_branch = max(min_branch, min(max_branch, self._initial_start_branch))
# reset end_branch, starting from the initial value
self.end_branch = sys.maxsize if self._initial_end_branch < 0 else self._initial_end_branch
self.end_branch = max(self.start_branch, min(max_branch + 1, self.end_branch))
# rejoin branch ranges when given
if self.branches:
branches = range_expand(self.branches, min_value=min_branch, max_value=max_branch)
self.branches = tuple(range_join(branches))
def _reduce_branch_map(self, branch_map):
if self.is_branch():
raise Exception("calls to _reduce_branch_map are forbidden for branch tasks")
# create a set of branches to remove
branches = set(branch_map.keys())
min_branch = min(branches)
max_branch = max(branches)
remove_branches = set()
# apply branch ranges
if self.branches:
requested = set(range_expand(self.branches, min_value=min_branch, max_value=max_branch))
remove_branches |= branches - requested
# apply {start,end}_branch
if 0 <= self.start_branch <= self.end_branch:
remove_branches |= set(range(min_branch, self.start_branch))
remove_branches |= set(range(self.end_branch, max_branch + 1))
# remove from branch map
for b in remove_branches:
del branch_map[b]
def get_branch_map(self, reset_boundaries=True, reduce_branches=True):
"""
Creates and returns the branch map defined in :py:meth:`create_branch_map`. If
*reset_boundaries* is *True*, the *start_branch* and *end_branch* attributes are rearranged
to not exceed the actual branch map length. If *reduce_branches* is *True* and an explicit
list of branch numbers was set, the branch map is filtered accordingly. The branch map is
cached.
"""
if self.is_branch():
return self.as_workflow().get_branch_map(reset_boundaries=reset_boundaries,
reduce_branches=reduce_branches)
if self._branch_map is None:
# create a new branch map
branch_map = self.create_branch_map()
# some type and sanity checks
if isinstance(branch_map, (list, tuple)):
branch_map = dict(enumerate(branch_map))
elif isinstance(branch_map, six.integer_types):
branch_map = dict(enumerate(range(branch_map)))
elif self.force_contiguous_branches:
n = len(branch_map)
if set(branch_map.keys()) != set(range(n)):
raise ValueError("branch map keys must constitute contiguous range "
"[0, {})".format(n))
else:
for branch in branch_map:
if not isinstance(branch, six.integer_types) or branch < 0:
raise ValueError("branch map keys must be non-negative integers, got "
"'{}' ({})".format(branch, type(branch).__name__))
# post-process
if reset_boundaries:
self._reset_branch_boundaries(branch_map)
if reduce_branches:
self._reduce_branch_map(branch_map)
# return the map when we are not going to cache it
if not self._cache_branches:
return branch_map
# cache it
self._branch_map = branch_map
return self._branch_map
@property
def branch_map(self):
return self.get_branch_map()
@property
def branch_data(self):
if self.is_workflow():
raise Exception("calls to branch_data are forbidden for workflow tasks")
branch_map = self.get_branch_map()
if self.branch not in branch_map:
raise ValueError("invalid branch '{}', not found in branch map".format(self.branch))
return branch_map[self.branch]
def get_branch_tasks(self):
"""
Returns a dictionary that maps branch numbers to instantiated branch tasks. As this might be
computationally intensive, the return value is cached.
"""
if self.is_branch():
return self.as_workflow().get_branch_tasks()
if self._branch_tasks is None:
# get all branch tasks according to the map
branch_tasks = OrderedDict()
for b in self.get_branch_map():
branch_tasks[b] = self.as_branch(branch=b)
# return the task when we are not going to cache it
if not self._cache_branches:
return branch_tasks
# cache it
self._branch_tasks = branch_tasks
return self._branch_tasks
def get_branch_chunks(self, chunk_size):
"""
Returns a list of chunks of branch numbers defined in this workflow with a certain
*chunk_size*. Example:
.. code-block:: python
wf = SomeWorkflowTask() # has 8 branches
print(wf.get_branch_chunks(3))
# -> [[0, 1, 2], [3, 4, 5], [6, 7]]
wf2 = SomeWorkflowTask(end_branch=5) # has 5 branches
print(wf2.get_branch_chunks(3))
# -> [[0, 1, 2], [3, 4]]
"""
if self.is_branch():
return self.as_workflow().get_branch_chunks(chunk_size)
# get the branch map and create chunks of its branch values
branch_chunks = iter_chunks(self.get_branch_map().keys(), chunk_size)
return list(branch_chunks)
def get_all_branch_chunks(self, chunk_size, **kwargs):
"""
Returns a list of chunks of all branch numbers of this workflow (i.e. without
*start_branch*, *end_branch* and *branches* parameters applied) with a certain *chunk_size*.
Internally, a new instance of this workflow is created using :py:meth:`BaseTask.req`,
forwarding all *kwargs*. Its *_exclude* list will contain ``["start_branch", "end_branch",
"branches"]`` in order to use all possible branch values. Example:
.. code-block:: python
wf = SomeWorkflowTask() # has 8 branches
print(wf.get_all_branch_chunks(3))
# -> [[0, 1, 2], [3, 4, 5], [6, 7]]
wf2 = SomeWorkflowTask(end_branch=5) # has 5 branches
print(wf2.get_all_branch_chunks(3))
# -> [[0, 1, 2], [3, 4, 5], [6, 7]]
"""
if self.is_branch():
return self.as_workflow().get_all_branch_chunks(chunk_size, **kwargs)
# create a new instance
_exclude = set(kwargs.get("_exclude", set()))
_exclude |= {"start_branch", "end_branch", "branches"}
kwargs["_exclude"] = _exclude
kwargs["_skip_task_excludes"] = True
inst = self.req(self, **kwargs)
# return its branch chunks
return inst.get_branch_chunks(chunk_size)
def get_branches_repr(self, max_ranges=10):
"""
Creates a string representation of the selected branches that can be used as a readable
description or postfix in output paths. When the branches of this workflow are configured
via the *branches* parameter, and there are more than *max_ranges* identified ranges, the
string will contain a unique hash describing those ranges.
"""
branch_map = self.get_branch_map()
if self.branches:
ranges = range_join(list(branch_map.keys()))
if len(ranges) > max_ranges:
return "{}_ranges_{}".format(len(ranges), create_hash(ranges))
else:
return "_".join(("{}" if len(r) == 1 else "{}To{}").format(*r) for r in ranges)
else:
return "{}To{}".format(self.start_branch, self.end_branch)
def workflow_requires(self):
"""
Hook to add workflow requirements. This method is expected to return a dictionary. When
this method is called from a branch task, an exception is raised.
"""
if self.is_branch():
return self.as_workflow().workflow_requires()
return DotDict()
def workflow_input(self):
"""
Returns the output targets if all workflow requirements, comparable to the normal
``input()`` method of plain tasks. When this method is called from a branch task, an
exception is raised.
"""
if self.is_branch():
raise Exception("calls to workflow_input are forbidden for branch tasks")
return luigi.task.getpaths(self.workflow_proxy.requires())
def requires_from_branch(self):
"""
Returns the requirements defined in the standard ``requires()`` method, but called in the
context of the workflow. This method is only recommended in case all required tasks that
would normally take a branch number, are intended to be instantiated with ``branch=-1``.
When this method is called from a branch task, an exception is raised.
"""
if self.is_branch():
raise Exception("calls to requires_from_branch are forbidden for branch tasks")
return self.__class__.requires(self)
def _handle_scheduler_messages(self):
if self.scheduler_messages:
while not self.scheduler_messages.empty():
msg = self.scheduler_messages.get()
self.handle_scheduler_message(msg)
def handle_scheduler_message(self, msg, _attr_value=None):
""" handle_scheduler_message(msg)
Hook that is called when a scheduler message *msg* is received. Returns *True* when the
messages was handled, and *False* otherwise.
Handled messages:
- ``tolerance = <int/float>``
- ``acceptance = <int/float>``
"""
attr, value = _attr_value or (None, None)
# handle "tolerance"
if attr is None:
m = re.match(r"^\s*(tolerance)\s*(\=|\:)\s*(.*)\s*$", str(msg))
if m:
attr = "tolerance"
try:
self.tolerance = float(m.group(3))
value = self.tolerance
except ValueError as e:
value = e
# handle "acceptance"
if attr is None:
m = re.match(r"^\s*(acceptance)\s*(\=|\:)\s*(.*)\s*$", str(msg))
if m:
attr = "acceptance"
try:
self.acceptance = float(m.group(3))
value = self.acceptance
except ValueError as e:
value = e
# respond
if attr:
if isinstance(value, Exception):
msg.respond("cannot set {}: {}".format(attr, value))
logger.info("cannot set {} of task {}: {}".format(attr, self, value))
else:
msg.respond("{} set to {}".format(attr, value))
logger.info("{} of task {} set to {}".format(attr, self, value))
return True
else:
msg.respond("task cannot handle scheduler message: {}".format(msg))
return False
BaseWorkflow.workflow_property = workflow_property
BaseWorkflow.cached_workflow_property = cached_workflow_property
| 38.589041 | 100 | 0.637138 |
ace1fb33791dbaf0e2b7b4da2136a93c41a3362c | 1,378 | py | Python | test/test_versioned_event.py | beli-sk/k8sclient | 3a6102405ee1a65933e328298964f282329fb1f0 | [
"Apache-2.0"
] | null | null | null | test/test_versioned_event.py | beli-sk/k8sclient | 3a6102405ee1a65933e328298964f282329fb1f0 | [
"Apache-2.0"
] | null | null | null | test/test_versioned_event.py | beli-sk/k8sclient | 3a6102405ee1a65933e328298964f282329fb1f0 | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
"""
No descripton provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: extensions/v1beta1
Generated by: https://github.com/swagger-api/swagger-codegen.git
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import absolute_import
import os
import sys
import unittest
import k8sclient
from k8sclient.rest import ApiException
from k8sclient.models.versioned_event import VersionedEvent
class TestVersionedEvent(unittest.TestCase):
""" VersionedEvent unit test stubs """
def setUp(self):
pass
def tearDown(self):
pass
def testVersionedEvent(self):
"""
Test VersionedEvent
"""
model = k8sclient.models.versioned_event.VersionedEvent()
if __name__ == '__main__':
unittest.main()
| 25.518519 | 104 | 0.717707 |
ace1fb7304d19758d0230771ff5367efba783788 | 2,530 | py | Python | saleor/dashboard/group/views.py | jslegend/python3-django-saleor | 4b93add64e6f612ee9ce4ea3108effab65c2ad31 | [
"BSD-3-Clause"
] | 1 | 2021-01-29T13:28:29.000Z | 2021-01-29T13:28:29.000Z | saleor/dashboard/group/views.py | jslegend/python3-django-saleor | 4b93add64e6f612ee9ce4ea3108effab65c2ad31 | [
"BSD-3-Clause"
] | null | null | null | saleor/dashboard/group/views.py | jslegend/python3-django-saleor | 4b93add64e6f612ee9ce4ea3108effab65c2ad31 | [
"BSD-3-Clause"
] | null | null | null | from __future__ import unicode_literals
from django.contrib import messages
from django.contrib.auth.decorators import permission_required
from django.contrib.auth.models import Group
from django.shortcuts import get_object_or_404, redirect
from django.template.response import TemplateResponse
from django.utils.translation import pgettext_lazy
from ...core.utils import get_paginator_items
from ...settings import DASHBOARD_PAGINATE_BY
from ..views import staff_member_required
from .forms import GroupPermissionsForm
@staff_member_required
@permission_required('userprofile.view_group')
def group_list(request):
groups = [{'name': group, 'permissions': group.permissions.all()}
for group in Group.objects.all().prefetch_related('permissions')]
groups = get_paginator_items(
groups, DASHBOARD_PAGINATE_BY, request.GET.get('page'))
ctx = {'groups': groups}
return TemplateResponse(request, 'dashboard/group/list.html', ctx)
@staff_member_required
@permission_required('userprofile.edit_group')
def group_create(request):
group = Group()
form = GroupPermissionsForm(request.POST or None)
if form.is_valid():
form.save()
messages.success(
request,
pgettext_lazy('Dashboard message',
'Created group'))
return redirect('dashboard:group-list')
ctx = {'group': group, 'form': form}
return TemplateResponse(request, 'dashboard/group/detail.html', ctx)
@staff_member_required
@permission_required('userprofile.edit_group')
def group_details(request, pk):
group = Group.objects.get(pk=pk)
form = GroupPermissionsForm(request.POST or None, instance=group)
if form.is_valid():
form.save()
messages.success(
request,
pgettext_lazy('Dashboard message', 'Updated group %s') % group.name
)
return redirect('dashboard:group-list')
ctx = {'group': group, 'form': form}
return TemplateResponse(request, 'dashboard/group/detail.html', ctx)
@staff_member_required
@permission_required('userprofile.edit_group')
def group_delete(request, pk):
group = get_object_or_404(Group, pk=pk)
if request.method == 'POST':
group.delete()
messages.success(
request,
pgettext_lazy('Dashboard message', 'Deleted group %s') % group
)
return redirect('dashboard:group-list')
return TemplateResponse(
request, 'dashboard/group/modal/confirm_delete.html', {'group': group})
| 35.138889 | 79 | 0.706324 |
ace1fc02020af74cb1e047fd945b526931e7ab32 | 188 | py | Python | web/config.py | pwh19920920/spiders | b0543a2583a296a80dfaaa63ed7f80a00d51cd07 | [
"MIT"
] | 390 | 2020-07-20T05:21:45.000Z | 2022-03-12T14:55:59.000Z | web/config.py | pwh19920920/spiders | b0543a2583a296a80dfaaa63ed7f80a00d51cd07 | [
"MIT"
] | 21 | 2020-04-07T02:06:36.000Z | 2020-07-20T02:10:38.000Z | web/config.py | pwh19920920/spiders | b0543a2583a296a80dfaaa63ed7f80a00d51cd07 | [
"MIT"
] | 108 | 2020-07-23T03:35:54.000Z | 2022-03-12T14:56:04.000Z | import os
from dotenv import load_dotenv
load_dotenv()
ENV = os.getenv("FLASK_ENV") or "production"
SECRET_KEY = os.getenv("SECRET_KEY") or "wongxy"
DEBUG = os.getenv("DEBUG") or False
| 18.8 | 48 | 0.739362 |
ace1fc2dff08970dddc4d82f8faa966385810538 | 9,060 | py | Python | mass_calc/chemical.py | pFindStudio/pGlyco3 | 135b1ff90c47f84d3bdc1096326629fdaab28154 | [
"Apache-2.0"
] | 5 | 2021-03-27T22:21:29.000Z | 2021-11-12T18:36:37.000Z | mass_calc/chemical.py | pFindStudio/pGlyco3 | 135b1ff90c47f84d3bdc1096326629fdaab28154 | [
"Apache-2.0"
] | 13 | 2021-03-06T07:34:30.000Z | 2022-01-20T11:52:22.000Z | mass_calc/chemical.py | pFindStudio/pGlyco3 | 135b1ff90c47f84d3bdc1096326629fdaab28154 | [
"Apache-2.0"
] | null | null | null | def _get():
str_dict = {}
str_dict['X']='1,2;0.9,0.1'
str_dict['H']='1.0078246,2.0141021;0.99985,0.00015'
str_dict['He']='3.01603,4.00260;0.00000138,0.99999862'
str_dict['Li']='6.015121,7.016003;0.075,0.925'
str_dict['Be']='9.012182;1.0'
str_dict['B']='10.012937,11.009305;0.199,0.801'
str_dict['C']='12.0000000,13.0033554;0.988930,0.011070'
str_dict['N']='14.0030732,15.0001088;0.996337,0.003663'
str_dict['O']='15.9949141,16.9991322,17.9991616;0.997590,0.000374,0.002036'
str_dict['F']='18.9984032;1.0'
str_dict['Ne']='19.992435,20.993843,21.991383;0.9048,0.0027,0.0925'
str_dict['Na']='22.989767;1.0'
str_dict['Mg']='23.985042,24.985837,25.982593;0.7899,0.1000,0.1101'
str_dict['Al']='26.981539;1.0'
str_dict['Si']='27.976927,28.976495,29.973770;0.9223,0.0467,0.0310'
str_dict['P']='30.973762;1.0'
str_dict['S']='31.972070,32.971456,33.967866,35.967080;0.9502,0.0075,0.0421,0.0002'
str_dict['Cl']='34.9688531,36.9659034;0.755290,0.244710'
str_dict['Ar']='35.967545,37.962732,39.962384;0.00337,0.00063,0.99600'
str_dict['K']='38.963707,39.963999,40.961825;0.932581,0.000117,0.067302'
str_dict['Ca']='39.962591,41.958618,42.958766,43.955480,45.953689,47.952533;0.96941,0.00647,0.00135,0.02086,0.00004,0.00187'
str_dict['Sc']='44.955910;1.0'
str_dict['Ti']='45.952629,46.951764,47.947947,48.947871,49.944792;0.080,0.073,0.738,0.055,0.054'
str_dict['V']='49.947161,50.943962;0.00250,0.99750'
str_dict['Cr']='49.946046,51.940509,52.940651,53.938882;0.04345,0.83790,0.09500,0.02365'
str_dict['Mn']='54.938047;1.0'
str_dict['Fe']='53.939612,55.934939,56.935396,57.933277;0.0590,0.9172,0.0210,0.0028'
str_dict['Co']='58.933198;1.0'
str_dict['Ni']='57.935346,59.930788,60.931058,61.928346,63.927968;0.6827,0.2610,0.0113,0.0359,0.0091'
str_dict['Cu']='62.939598,64.927793;0.6917,0.3083'
str_dict['Zn']='63.929145,65.926034,66.927129,67.924846,69.925325;0.486,0.279,0.041,0.188,0.006'
str_dict['Ga']='68.925580,70.924700;0.60108,0.39892'
str_dict['Ge']='69.924250,71.922079,72.923463,73.921177,75.921401;0.205,0.274,0.078,0.365,0.078'
str_dict['As']='74.921594;1.0'
str_dict['Se']='73.922475,75.919212,76.919912,77.9190,79.916520,81.916698;0.009,0.091,0.076,0.236,0.499,0.089'
str_dict['Br']='78.918336,80.916289;0.5069,0.4931'
str_dict['Kr']='77.914,79.916380,81.913482,82.914135,83.911507,85.910616;0.0035,0.0225,0.116,0.115,0.570,0.173'
str_dict['Rb']='84.911794,86.909187;0.7217,0.2783'
str_dict['Sr']='83.913430,85.909267,86.908884,87.905619;0.0056,0.0986,0.0700,0.8258'
str_dict['Y']='88.905849;1.0'
str_dict['Zr']='89.904703,90.905644,91.905039,93.906314,95.908275;0.5145,0.1122,0.1715,0.1738,0.0280'
str_dict['Nb']='92.906377;1.0'
str_dict['Mo']='91.906808,93.905085,94.905840,95.904678,96.906020,97.905406,99.907477;0.1484,0.0925,0.1592,0.1668,0.0955,0.2413,0.0963'
str_dict['Tc']='98.0;1.0'
str_dict['Ru']='95.907599,97.905287,98.905939,99.904219,100.905582,101.904348,103.905424;0.0554,0.0186,0.127,0.126,0.171,0.316,0.186'
str_dict['Rh']='102.905500;1.0'
str_dict['Pd']='101.905634,103.904029,104.905079,105.903478,107.903895,109.905167;0.0102,0.1114,0.2233,0.2733,0.2646,0.1172'
str_dict['Ag']='106.905092,108.904757;0.51839,0.48161'
str_dict['Cd']='105.906461,107.904176,109.903005,110.904182,111.902758,112.904400,113.903357,115.904754;0.0125,0.0089,0.1249,0.1280,0.2413,0.1222,0.2873,0.0749'
str_dict['In']='112.904061,114.903880;0.043,0.957'
str_dict['Sn']='111.904826,113.902784,114.903348,115.901747,116.902956,117.901609,118.903310,119.902200,121.903440,123.905274;0.0097,0.0065,0.0036,0.1453,0.0768,0.2422,0.0858,0.3259,0.0463,0.0579'
str_dict['Sb']='120.903821,122.904216;0.574,0.426'
str_dict['Te']='119.904048,121.903054,122.904271,123.902823,124.904433,125.903314,127.904463,129.906229;0.00095,0.0259,0.00905,0.0479,0.0712,0.1893,0.3170,0.3387'
str_dict['I']='126.904473;1.0'
str_dict['Xe']='123.905894,125.904281,127.903531,128.904780,129.903509,130.905072,131.904144,133.905395,135.907214;0.0010,0.0009,0.0191,0.264,0.041,0.212,0.269,0.104,0.089'
str_dict['Cs']='132.905429;1.0'
str_dict['Ba']='129.906282,131.905042,133.904486,134.905665,135.904553,136.905812,137.905232;0.00106,0.00101,0.0242,0.06593,0.0785,0.1123,0.7170'
str_dict['La']='137.90711,138.906347;0.00090,0.99910'
str_dict['Ce']='135.907140,137.905985,139.905433,141.909241;0.0019,0.0025,0.8843,0.1113'
str_dict['Pr']='140.907647;1.0'
str_dict['Nd']='141.907719,142.909810,143.910083,144.912570,145.913113,147.916889,149.920887;0.2713,0.1218,0.2380,0.0830,0.1719,0.0576,0.0564'
str_dict['Pm']='145.0;1.0'
str_dict['Sm']='143.911998,146.914895,147.914820,148.917181,149.917273,151.919729,153.922206;0.031,0.150,0.113,0.138,0.074,0.267,0.227'
str_dict['Eu']='150.919847,152.921225;0.478,0.522'
str_dict['Gd']='151.919786,153.920861,154.922618,155.922118,156.923956,157.924099,159.927049;0.0020,0.0218,0.1480,0.2047,0.1565,0.2484,0.2186'
str_dict['Tb']='158.925342;1.0'
str_dict['Dy']='155.925277,157.924403,159.925193,160.926930,161.926795,162.928728,163.929171;0.0006,0.0010,0.0234,0.189,0.255,0.249,0.282'
str_dict['Ho']='164.930319;1.0'
str_dict['Er']='161.928775,163.929198,165.930290,166.932046,167.932368,169.935461;0.0014,0.0161,0.336,0.2295,0.268,0.149'
str_dict['Tm']='168.934212;1.0'
str_dict['Yb']='167.933894,169.934759,170.936323,171.936378,172.938208,173.938859,175.942564;0.0013,0.0305,0.143,0.219,0.1612,0.318,0.127'
str_dict['Lu']='174.940770,175.942679;0.9741,0.0259'
str_dict['Hf']='173.940044,175.941406,176.943217,177.943696,178.945812,179.946545;0.00162,0.05206,0.18606,0.27297,0.13629,0.35100'
str_dict['Ta']='179.947462,180.947992;0.00012,0.99988'
str_dict['W']='179.946701,181.948202,182.950220,183.950928,185.954357;0.0012,0.263,0.1428,0.307,0.286'
str_dict['Re']='184.952951,186.955744;0.3740,0.6260'
str_dict['Os']='183.952488,185.953830,186.955741,187.955860,188.958137,189.958436,191.961467;0.0002,0.0158,0.016,0.133,0.161,0.264,0.410'
str_dict['Ir']='190.960584,192.962917;0.373,0.627'
str_dict['Pt']='189.959917,191.961019,193.962655,194.964766,195.964926,197.967869;0.0001,0.0079,0.329,0.338,0.253,0.072'
str_dict['Au']='196.966543;1.0'
str_dict['Hg']='195.965807,197.966743,198.968254,199.968300,200.970277,201.970617,203.973467;0.0015,0.100,0.169,0.231,0.132,0.298,0.0685'
str_dict['Tl']='202.972320,204.974401;0.29524,0.70476'
str_dict['Pb']='203.973020,205.974440,206.975872,207.976627;0.014,0.241,0.221,0.524'
str_dict['Bi']='208.980374;1.0'
str_dict['Po']='209.0;1.0'
str_dict['At']='210.0;1.0'
str_dict['Rn']='222.0;1.0'
str_dict['Fr']='223.0;1.0'
str_dict['Ra']='226.025;1.0'
str_dict['Ac']='227.028;1.0'
str_dict['Th']='232.038054;1.0'
str_dict['Pa']='231.0359;1.0'
str_dict['U']='234.040946,235.043924,238.050784;0.000055,0.00720,0.992745'
str_dict['Np']='237.048;1.0'
str_dict['Pu']='244.0;1.0'
str_dict['Am']='243.0;1.0'
str_dict['Cm']='247.0;1.0'
str_dict['Bk']='247.0;1.0'
str_dict['Cf']='251.0;1.0'
str_dict['Es']='252.0;1.0'
str_dict['Fm']='257.0;1.0'
str_dict['Md']='258.0;1.0'
str_dict['No']='259.0;1.0'
str_dict['Lr']='260.0;1.0'
str_dict['15N']='14.0030732,15.0001088;0.01,0.99'
str_dict['14N']='14.0030732,15.0001088;0.996337,0.003663'
str_dict['18O']='15.9949141,16.9991322,17.9991616;0.005,0.005,0.99'
str_dict['2H']='1.0078246,2.0141021;0.01,0.99'
str_dict['13C']='12.0000000,13.0033554;0.01,0.99'
chemical_dict = {}
for elem, value in str_dict.items():
items = value.split(";")
items = [item.split(',') for item in items]
items = [(float(items[0][i]), float(items[1][i])) for i in range(len(items[0]))]
chemical_dict[elem] = items
return chemical_dict
chem_dict = _get()
def _get_mono(chem_dict):
chem_mono_mass = {}
for elem, items in chem_dict.items():
mono_mass, mono_inten = items[0]
for i in range(1, len(items)):
if mono_inten < items[i][1]:
mono_mass, mono_inten = items[i]
chem_mono_mass[elem] = mono_mass
return chem_mono_mass
chem_mono_mass = _get_mono(chem_dict)
def calc_formula_mass(formula):
def _chem_tuples(formula):
items = formula.strip(')').split(')')
items = [item.split('(') for item in items]
return [(elem, int(n)) for elem, n in items]
chem_tuples = _chem_tuples(formula)
return sum([chem_mono_mass[elem]*n for elem, n in chem_tuples])
def replace_element_and_calc_mass(formula, replacement):
for from_elem, to_elem in replacement:
if formula[:len(from_elem)+1] == from_elem + "(":
formula = to_elem + formula[len(from_elem):]
else:
formula = formula.replace(')%s('%from_elem, ')%s('%to_elem)
return formula, calc_formula_mass(formula)
| 59.605263 | 200 | 0.666115 |
ace1fd948eb53f2e5e3485fcb586c8b48044b2f1 | 2,761 | py | Python | ooobuild/dyn/ui/dialogs/wizard_button.py | Amourspirit/ooo_uno_tmpl | 64e0c86fd68f24794acc22d63d8d32ae05dd12b8 | [
"Apache-2.0"
] | null | null | null | ooobuild/dyn/ui/dialogs/wizard_button.py | Amourspirit/ooo_uno_tmpl | 64e0c86fd68f24794acc22d63d8d32ae05dd12b8 | [
"Apache-2.0"
] | null | null | null | ooobuild/dyn/ui/dialogs/wizard_button.py | Amourspirit/ooo_uno_tmpl | 64e0c86fd68f24794acc22d63d8d32ae05dd12b8 | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
#
# Copyright 2022 :Barry-Thomas-Paul: Moss
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http: // www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Const Class
# this is a auto generated file generated by Cheetah
# Libre Office Version: 7.3
# Namespace: com.sun.star.ui.dialogs
from enum import IntEnum
from typing import TYPE_CHECKING
from ooo.oenv.env_const import UNO_ENVIRONMENT, UNO_RUNTIME
_DYNAMIC = False
if (not TYPE_CHECKING) and UNO_RUNTIME and UNO_ENVIRONMENT:
_DYNAMIC = True
if not TYPE_CHECKING and _DYNAMIC:
from com.sun.star.ui.dialogs import WizardButton as WizardButton
if hasattr(WizardButton, '_constants') and isinstance(WizardButton._constants, dict):
WizardButton._constants['__ooo_ns__'] = 'com.sun.star.ui.dialogs'
WizardButton._constants['__ooo_full_ns__'] = 'com.sun.star.ui.dialogs.WizardButton'
WizardButton._constants['__ooo_type_name__'] = 'const'
def build_enum():
global WizardButtonEnum
ls = [f for f in dir(WizardButton) if not callable(getattr(WizardButton, f)) and not f.startswith('__')]
_dict = {}
for name in ls:
_dict[name] = getattr(WizardButton, name)
WizardButtonEnum = IntEnum('WizardButtonEnum', _dict)
build_enum()
else:
from ....lo.ui.dialogs.wizard_button import WizardButton as WizardButton
class WizardButtonEnum(IntEnum):
"""
Enum of Const Class WizardButton
denotes the buttons found in a Wizard
**since**
OOo 3.3
"""
NONE = WizardButton.NONE
"""
denotes none of the buttons in the wizard
"""
NEXT = WizardButton.NEXT
"""
denotes the button used to travel forward through the wizard
"""
PREVIOUS = WizardButton.PREVIOUS
"""
denotes the button used to travel backward through the wizard
"""
FINISH = WizardButton.FINISH
"""
denotes the button used to finish the wizard
"""
CANCEL = WizardButton.CANCEL
"""
denotes the button used to cancel the wizard
"""
HELP = WizardButton.HELP
"""
denotes the button used to request help
"""
__all__ = ['WizardButton', 'WizardButtonEnum']
| 34.08642 | 112 | 0.666787 |
ace1fe330f7437d5bc5fcaff7b448cb7b8cb37e9 | 3,472 | py | Python | chainer/links/cnet/link_cnet_convolution.py | asrlabncku/RAP | 11fab37c8d98257ec0aed1b306aa9709a3a51328 | [
"MIT"
] | null | null | null | chainer/links/cnet/link_cnet_convolution.py | asrlabncku/RAP | 11fab37c8d98257ec0aed1b306aa9709a3a51328 | [
"MIT"
] | null | null | null | chainer/links/cnet/link_cnet_convolution.py | asrlabncku/RAP | 11fab37c8d98257ec0aed1b306aa9709a3a51328 | [
"MIT"
] | null | null | null | from __future__ import absolute_import
import numpy
import os
from chainer import link
from chainer.functions.cnet import function_cnet_convolution_2d
def _pair(x):
if hasattr(x, '__getitem__'):
return x
return x, x
class CnetConvolution2D(link.Link):
"""Two-dimensional binarized convolutional layer.
This link wraps the :func:`function_binary_convolution_2d.binary_convolution_2d` function and
holds the filter weight and bias vector as parameters.
Args:
in_channels (int): Number of channels of input arrays.
out_channels (int): Number of channels of output arrays.
ksize (int or pair of ints): Size of filters (a.k.a. kernels).
``ksize=k`` and ``ksize=(k, k)`` are equivalent.
stride (int or pair of ints): Stride of filter applications.
``stride=s`` and ``stride=(s, s)`` are equivalent.
pad (int or pair of ints): Spatial padding width for input arrays.
``pad=p`` and ``pad=(p, p)`` are equivalent.
wscale (float): Scaling factor of the initial weight.
bias (float): Initial bias value.
nobias (bool): If ``True``, then this link does not use the bias term.
use_cudnn (bool): If ``True``, then this link uses cuDNN if available.
initialW (4-D array): Initial weight value. If ``None``, then this
function uses to initialize ``wscale``.
initial_bias (1-D array): Initial bias value. If ``None``, then this
function uses to initialize ``bias``.
.. seealso::
See :func:`chainer.functions.convolution_2d` for the definition of
two-dimensional convolution.
Attributes:
W (~chainer.Variable): Weight parameter.
b (~chainer.Variable): Bias parameter.
"""
def __init__(self, in_channels, out_channels, ksize, stride=1, pad=0,
wscale=1, bias=0, nobias=False, use_cudnn=True,
initialW=None, initial_bias=None):
self.cname = "l_d_conv"
kh, kw = _pair(ksize)
self.stride = _pair(stride)
self.pad = _pair(pad)
self.use_cudnn = use_cudnn
W_shape = (out_channels, in_channels, kh, kw)
super(CnetConvolution2D, self).__init__(W=W_shape)
if initialW is not None:
self.W.data[...] = initialW
else:
std = wscale * numpy.sqrt(1. / (kh * kw * in_channels))
self.W.data[...] = numpy.random.normal(0, std, W_shape)
# Wd = numpy.full(W_shape, 0.5, dtype=numpy.float64)
# print(Wd)
# print("ori")
# print(self.W.data)
# self.W.data[...] = Wd
# print("after")
# print(self.W.data)
if nobias:
self.b = None
else:
self.add_param('b', out_channels)
if initial_bias is None:
initial_bias = bias
self.b.data[...] = initial_bias
def __call__(self, x):
"""Applies the convolution layer.
Args:
x (~chainer.Variable): Input image.
Returns:
~chainer.Variable: Output of the convolution.
"""
# a = numpy.copy(x.data)
# print(a.shape)
# numpy.savetxt('cnet_conv_for_in_link_c_convolution.txt', a.flatten(), fmt='%f', delimiter=',')
return function_cnet_convolution_2d.cnet_convolution_2d(
x, self.W, self.b, self.stride, self.pad, self.use_cudnn)
| 34.72 | 104 | 0.598502 |
ace1ff3e4ddc4930b03599608a541a4959fac7dd | 16,698 | py | Python | code/manifolder/helper.py | babel-publishing/manifolder | 38ba3a6e426dd51a572a108d452df74f5139921f | [
"MIT"
] | 1 | 2020-07-12T22:22:15.000Z | 2020-07-12T22:22:15.000Z | code/manifolder/helper.py | babel-publishing/manifolder | 38ba3a6e426dd51a572a108d452df74f5139921f | [
"MIT"
] | 7 | 2020-06-24T20:00:46.000Z | 2020-07-01T21:15:51.000Z | code/manifolder/helper.py | babel-publishing/manifolder | 38ba3a6e426dd51a572a108d452df74f5139921f | [
"MIT"
] | 1 | 2020-06-26T17:50:32.000Z | 2020-06-26T17:50:32.000Z | __all__ = (
'count_cluster_lengths',
'show_cluster_lengths',
'print_cluster_lengths',
'make_transition_matrix',
'make_matrix_markov',
'reorder_cluster',
'image_M',
)
import math
import collections
import numpy as np
from numpy import linalg as LA
from scipy.sparse import linalg as LAs
from scipy.stats import norm, kurtosis
from scipy import stats
from scipy.stats import skew
import matplotlib.pyplot as plt
def histogram_bins_centered(data, nbins):
""" helper function for numpy histograms, to generate bins that are
centered, similar to the MATLAB hist. for python, the bins are
specified by nbins + 1 numbers, marking the boundaries of the bins.
this allows for bins of different widths, ranging across the data.
"""
# used by histograms_overlap
bins = np.linspace(np.min(data), np.max(data), nbins + 1)
return bins
def histogram_bins_all_snips(data, nbins):
""" helper function to find bin spacing across snippets,
similar to histogram_bins_centered on one series. """
N = data[0].shape[0]
n = len(data)
for dim in range(N): # loop over dimensions of signal
maxval = -math.inf
minval = math.inf
for snip in range(n): # loop over snippets to get same dimension each time
maxval = np.maximum(maxval, np.max(data[snip][dim, :]))
minval = np.minimum(minval, np.min(data[snip][dim, :]))
bins = np.linspace(minval, maxval, nbins + 1) # bins now has nbins+1 values and ranges across data
if dim == 0:
hist_bins = [bins]
else:
hist_bins.append(bins)
# results in list of arrays
return hist_bins
# svd_like_matlab used by histograms overlap
def svd_like_matlab(A):
""" The MATLAB and python SVDs return different values
this function uses the python libraries, but the return values are
those specfied in MATLAB https://www.mathworks.com/help/matlab/ref/double.svd.html
[U,S,V] = svd(A)
performs a singular value decomposition of matrix A, such that
A = U*S*V'
IN PYTHON,
u, s, vh = np.linalg.svd(A)
Factors the matrix a as
u @ np.diag(s) @ v
where u and v are unitary and s is a 1d array of a's singular values
note that Python uses @ for matrix multiplication, and .T for transpose"""
# U, S, V = svd(A)
# use lowercase variable names for the python call
u, s, vh = np.linalg.svd(A)
# MATLAB users expect
# U, S, V = svd(A)
# rename MATLAB like variable here
# note that Python and MATLAB algos effectively flip U and V
U = u # no need to transpose!
S = np.diag(s) # in MATLAB, S is a diagonal matrix
V = vh.T
# print(U.shape)
# print(S.shape)
# print(V.shape)
return U, S, V
def svds_like_matlab(A,k=None):
""" The MATLAB and python SVDs return different values
this function uses the python libraries, but the return values are
those specfied in MATLAB https://www.mathworks.com/help/matlab/ref/double.svd.html
[U,S,V] = svd(A)
performs a singular value decomposition of matrix A, such that
A = U*S*V'
IN PYTHON,
u, s, vh = np.linalg.svd(A)
Factors the matrix a as
u @ np.diag(s) @ v
where u and v are unitary and s is a 1d array of a's singular values
note that Python uses @ for matrix multiplication, and .T for transpose"""
if k is None:
k = A.shape[0]
# U, S, V = svd(A)
# use lowercase variable names for the python call
u, s, vh = LAs.svds(A,k)
# MATLAB users expect
# U, S, V = svds(A,dim)
idx = [i[0] for i in sorted(enumerate(s), reverse=True, key=lambda x:x[1])]
# rename MATLAB like variable here
# note that Python and MATLAB algos effectively flip U and V
U = u # no need to transpose!
V = vh.T
U = U[:,idx]
S = np.diag(s[idx]) # in MATLAB, S is a diagonal matrix
V = V[:,idx]
# print(U.shape)
# print(S.shape)
# print(V.shape)
return U, S, V
# eig_like_matlab used by embeddings
def eig_like_matlab(A, k=None):
""" like matlab's
d = eigs(A,k)
https://www.mathworks.com/help/matlab/ref/eigs.html
returns the k biggest (???) eigenvectors """
print('Using full eigensolver from numpy')
if k is None:
k = A.shape[0]
# this is how eig is usually called in python
w, v = LA.eig(A)
D = np.diag(w[:k]) # MATLAB returns a diagonal matrix
# NOTE - do I need to sort these first, or are they automatically largerst?
# (check by looking at D?)
V = v[:, :k] # rename, and remove later eigenvectors
# TODO - we are not sorting the vectors according to eigenvalues? (check?)
return V, D
def eigs_like_matlab(A, k=None):
""" like matlab's
d = eigs(A,k)
https://www.mathworks.com/help/matlab/ref/eigs.html
returns the k biggest (???) eigenvectors """
print('Using partial symmetric eigensolver from scipy')
if k is None:
k = A.shape[0]
# this is how eig is usually called in python
w, v = LAs.eigsh(A, k, which = 'LM', ncv = min(A.shape[0], max(5*k + 1, 20)))
idx = [i[0] for i in sorted(enumerate(w), reverse=True, key=lambda x:x[1])]
D = np.diag(w[idx]) # MATLAB returns a diagonal matrix
# NOTE - do I need to sort these first, or are they automatically largerst?
# (check by looking at D?)
V = v[:, idx] # rename, and remove later eigenvectors
# TODO - we are not sorting the vectors according to eigenvalues? (check?)
return V, D
def eig_like_matlab_test():
""" code, and make sure it looks like the MATLAB """
# Python eigenvalaues
# https://docs.scipy.org/doc/numpy/reference/generated/numpy.linalg.eig.html
#
# w,v = eig(a)
#
# v[:,i] is the RIGHT eigenvector corresponding to values w[i]
#
# (appears that in python, w[0] ≈ 1,
# meaning eigenvectors have been normed and sorted)
#
# MATLAB eigs
# https://www.mathworks.com/help/matlab/ref/eigs.html
# [V, D] = eigs(A,k)
#
# (to make matters more confusing, they use [V, E] = eigs(W2,10))
#
# V[:,i] and D[i,i]
# V columns are eigenvectors
# D is the diagonal matrix of the eigen values
#
# returns the k biggest (???) eigenvectors """
a = np.random.rand(3, 3)
V, E = eig_like_matlab(a, 2)
print('\n original matrix a:\n' + str(a))
print('\n V:\n' + str(V))
print('\n E:\n' + str(E))
# w, v = LA.eig(a)
# print('\n w (as diags):\n' + str(np.diag(w)))
# print('\n v\n' + str(v))
# i = 0
# eigvec = v[:,i]
# eigval = w[i]
# print('\n eigval\n' + str(w[i]))
# print('\n eigvec\n' + str(eigvec))
# print('A * eigvec / eigval ')
# print('\n multilpy:' + str(a @ eigvec / eigval))
import pandas as pd
def simplify_data(z_shape=(8, 87660)):
""" takes in a datastream of data, z, and makes a simple version
specifically, z should be [(8, 87660)] """
# set at era to me about 200*5 points ...
# this will give about 10 eras over 10k points,
# which can be downsampled by 5, and viewed as 2000 points
era = 1000 # how long is each sub-section (cos, gaussian noise, etc.)
total_length = z_shape[1] # total length of the z (and z_mod)
# create a datastructure, containing low-level noise, to contain the data
z_mod = 1e-3 * np.random.randn(z_shape[0], z_shape[1]) # fill with low-level noise
# this will hold the actual signal (only for the first row)
sig = np.array([])
while (sig.size < total_length):
# print('appending some more data ...')
sig_zero = np.zeros(era)
# gaussian noise
sig_gauss = np.random.randn(era)
# uniform noise
sig_unif = np.random.rand(era)
# cos, 10 cycles over the era
sig_cos = np.cos(2 * np.pi * np.arange(era) / era * 10.)
# sin, 5 cycles over the era
sig_sin = .1 * np.sin(2 * np.pi * np.arange(era) / era * 5.)
sig_root_sin = np.sqrt(np.abs(np.sin((2 * np.pi * np.arange(era) / era * 10.))))
# append all together ...
sig = np.concatenate((sig, sig_zero, sig_gauss, sig_unif, sig_cos, sig_sin, sig_root_sin))
# signal may be too long, cut to correct length
sig = sig[:total_length]
# fade in and out each section, usin a cosine wave
sin_mask = np.sin(2 * np.pi * np.arange(total_length) / (2 * era))
sig = sig * sin_mask
# add to the final dataset
z_mod[0, :] += sig
z_mod[1, :-1] += np.diff(sig) # add the diff to the second row
# optional? Normalize each row from 0 to 1
z_mod = z_mod - np.min(z_mod, axis=1).reshape(-1, 1)
z_mod = z_mod / np.max(z_mod, axis=1).reshape(-1, 1)
z_mod = np.round(z_mod, 6)
# save the data
# note that it was originally created with time as columns,
# the standard python format is time as rows (and features as columns),
# so transpose
# the fmt command suppress scientific notation / small number-junk
np.savetxt('data/simple_data.csv', z_mod.T, delimiter=',', fmt='%1.6f')
return z_mod.T
def test_moms():
""" look at numpy's calculation of higher order moments (through kurtosis), see
https://docs.scipy.org/doc/scipy/reference/generated/scipy.stats.skew.html#scipy.stats.skew
https://docs.scipy.org/doc/scipy/reference/generated/scipy.stats.kurtosis.html#scipy.stats.kurtosis
https://docs.scipy.org/doc/scipy/reference/generated/scipy.stats.moment.html
"""
# create some test data
x = np.random.randn(100) + (.1 + .2 * np.random.rand(100))
print('*** calculating moments, using numpy')
print('np.mean(x)', np.mean(x))
print('np.var(x)', np.var(x))
print('*** higher moments, using scipy')
print('skew', skew(x))
print('kurtosis (Fisher is default)', kurtosis(x))
print('scipy describe', stats.describe((x)))
# des = stats.describe(x)
# note, all the values are the same, except var, which is 1/(n-1) in describe, but 1/n for numpy ... weird ...
def get_log_spaced_bins(max_value=350.1):
""" generate the bin boundaries for use with np.histogram, with log spacing
the CENTER bin values will be 1,2,5,10,20, etc
the BOUNDARY values will be 0, 1.5,2.5,7.5,15, etc.
"""
assert max_value > 0, 'max_value must be positive'
bin_values = [0]
mult = 1
while (True):
bin_values.append(mult * 1)
if bin_values[-1] > max_value * 10: break
bin_values.append(mult * 2)
if bin_values[-1] > max_value * 10: break
bin_values.append(mult * 5)
if bin_values[-1] > max_value * 10: break
mult *= 10
bin_values = np.array(bin_values)
# [0 1 2 5 10 20 50 100 200 500]
bin_boundaries = np.diff(bin_values) / 2 + bin_values[:-1]
# [0.5 1.5 3.5 7.5 15. 35. 75. 150. 350.]
bin_boundaries[0] = 0
# [0 1.5 3.5 7.5 15. 35. 75. 150. 350.]
# made extra bins - now, cut down the bin boundaries so that the data just fits
while (bin_boundaries[-2] > max_value):
bin_boundaries = bin_boundaries[:-1] # cut out last element
# cut down the bin values, to match
# (do not need first element, which is zero, and should be one less elements that the bin_boundaries
bin_values = bin_values[1:bin_boundaries.size]
print(bin_values)
print(bin_boundaries)
def count_cluster_lengths(x):
""" takes an array in, x, which contains a series of cluster labels,
and returns the result in a dictionary
"""
# find the unique values (these are the cluster names)
keys = np.unique(x)
np.sort(keys) # happens in place
print('cluster names (keys)', keys)
# create dictionary of 'cluster_lengths' with empty lists
cluster_lens = collections.OrderedDict()
for key in keys:
cluster_lens[key] = []
# find the matching values
# loop through the sequence
i = 0
while (i < x.size):
this_val = x[i]
d = np.where(x[i:] != this_val)[0] # find where the value changes
if d.size > 0:
# now know how many points, before the value changes
this_len = d[0] # first location that is not this_val
else:
this_len = x.size - i # this was the last cluster, goes to the end
# this_val is the custer
# this_len is the length of that cluster
# ... store it!
cluster_lens[this_val].append(this_len)
# print(str(this_val),str(this_len))
i += this_len
return cluster_lens
def print_cluster_lengths(cluster_lens):
""" prints out the dictionary object created, that stores cluster lengths
"""
for key in cluster_lens.keys():
# also, sort the lists here
cluster_lens[key].sort() # on the list, happens in place
print('key', key, 'value', cluster_lens[key], '\n')
def show_cluster_lengths(cluster_lens, sharey=True):
""" plots the lengths of the clusters, as determined above
sharey=False allows each subplot to have different y-axis limits
"""
keys = list(cluster_lens.keys())
nkeys = len(keys)
plt.figure()
fig, axes = plt.subplots(nkeys, 1, sharex=True, sharey=sharey, figsize=[7, nkeys * 1 + 1])
# loop through, and make all the histograms, as subplots
for k in range(nkeys):
key = keys[k]
different_lengths_this_cluster = np.unique(cluster_lens[key])
for l in different_lengths_this_cluster:
number_of_occurrences = np.sum(np.where(cluster_lens[key] == l)[0])
# print('length', l, 'number_of_occurrences', number_of_occurrences)
# plot as a vertical bar
axes[k].plot([l, l], [0, number_of_occurrences])
axes[k].set_ylabel('cl ' + str(k))
plt.tight_layout()
plt.suptitle('cluster length histograms')
plt.xlabel('cluster lengths')
plt.show()
###
### transition matrix
###
def make_transition_matrix(states):
""" transition matrix of a bunch of states (like IDX)"""
states_max = np.max(states)
tmat = np.zeros((states_max + 1, states_max + 1))
# note, transition matrix is written with the
# STATES AS COLUMNS, as in quantum mechanics / physics
#
# evolution is then:
# |state+1> = tmat @ |state>
#
# (but need to normalize tmat)
#
for i in range(states.size - 1):
# states[i+1] is valid
state_from = states[i]
state_to = states[i + 1]
tmat[state_to, state_from] += 1
return tmat
def make_matrix_markov(A):
""" takes a matrix, and normalizes so that each column sums to one
(assumes matrix values are already positive!)"""
# makes more sense to normalize so that the columns sum to one
col_sum = np.sum(A, axis=0).reshape(1, -1)
A_markov = A / col_sum # col_sum will broadcast
return A_markov
def image_M(data, vmax=None):
""" t for transition matrix"""
plt.figure(figsize=(7, 7))
# create scaled data
if vmax is None:
vmax = np.max(np.abs(data))
# cmap = 'gist_heat'
# cmap = 'bone'
cmap = 'hot'
# cmap = 'binary'
plt.imshow(data, vmin=0, vmax=vmax, cmap=cmap)
plt.grid(b=None)
plt.xlabel('from')
plt.ylabel('to')
plt.colorbar()
plt.title('transition matrix')
plt.show()
def reorder_cluster(IDX, M):
""" renames the clusters, so the diagonal elements of M will be in decreasing order
note, M must be regenerated from the new_IDX that is returned"""
print('NOTE, need to fix bug, sometimes orders backwards')
idx_freq = M.diagonal()
new_idx = np.zeros_like(IDX)
# sort the values of the index, from largest to smallest
new_order = np.argsort(idx_freq)
# so weird ... new_order is alternately lowest-to-highest, and highest-to-lowest
# just reorder, if needed
# if idx_freq[new_order[-1]] > idx_freq[new_order[-1]]:
# # frequency INCREASES at the end ... reorder!
# print('yup!')
# new_order = new_order[::-1]
# else:
# print('nerp!!')
new_order = new_order[::-1]
for i in range(len(new_order)):
# find all the locations matching next index needed
loc = np.where(IDX == new_order[i])
new_idx[loc] = i # reorder, starting with i
return new_idx
| 28.889273 | 114 | 0.613846 |
ace20164f6757f44e79cca02c021916d26921645 | 4,988 | py | Python | twittoff/new_routes.py | npgeorge/inclass-twittoff | 5198cf572af05ebd2c1cc267077cf4b83559f8c2 | [
"MIT"
] | null | null | null | twittoff/new_routes.py | npgeorge/inclass-twittoff | 5198cf572af05ebd2c1cc267077cf4b83559f8c2 | [
"MIT"
] | 2 | 2021-09-08T01:46:23.000Z | 2022-01-13T02:19:51.000Z | twittoff/new_routes.py | npgeorge/inclass-twittoff | 5198cf572af05ebd2c1cc267077cf4b83559f8c2 | [
"MIT"
] | null | null | null | from flask import Blueprint, jsonify, request, render_template, flash
from sklearn.linear_model import LogisticRegression
import numpy as np
from twittoff.models import User, Tweet, db
from twittoff.twitter_service import twitter_api_client
from twittoff.basilica_service import basilica_connection
#from twittoff.classifier import load_model
new_routes = Blueprint("new_routes", __name__)
client = twitter_api_client()
basilica_client = basilica_connection()
#classifier_model = load_model()
@new_routes.route("/")
def index():
return render_template("homepage.html")
#
# DATABASE STUFF
#
@new_routes.route("/users")
@new_routes.route("/users.json")
def list_users():
print("LISTING USERS...")
users = User.query.all() # returns a list of <class 'alchemy.User'>
print(len(users))
users_response = []
for u in users:
user_dict = u.__dict__
del user_dict["_sa_instance_state"]
users_response.append(user_dict)
return jsonify(users_response)
@new_routes.route("/users/<string:screen_name>")
def show_user(screen_name=None):
print("SHOWING USER:", screen_name)
try:
# Get user info from twitter:
twitter_user = client.get_user(screen_name)
print(type(twitter_user))
# Find or create database user:
db_user = User.query.get(twitter_user.id) or User(id=twitter_user.id)
print(db_user)
# Update database user:
db_user.screen_name = twitter_user.screen_name
db_user.followers_count = twitter_user.followers_count
db.session.add(db_user)
db.session.commit()
# Get Tweets:
statuses = client.user_timeline(screen_name, tweet_mode="extended", count=50, exclude_replies=True, include_rts=False)
for status in statuses:
print(status.full_text)
# Find or create database tweet:
db_tweet = Tweet.query.get(status.id) or Tweet(id=status.id)
print(db_tweet)
# Update database tweet:
db_tweet.user_id = status.author.id # or db_user.id
db_tweet.full_text = status.full_text
embedding = basilica_client.embed_sentence(status.full_text, model="twitter") # todo: prefer to make a single request to basilica with all the tweet texts, instead of a request per tweet
print(len(embedding))
db_tweet.embedding = embedding
db.session.add(db_tweet)
db.session.commit()
return render_template("user_profile.html", user=db_user, tweets=db_user.tweets)
except Exception as e:
print(e)
return jsonify({"message": "OOPS THERE WAS AN ERROR. PLEASE TRY ANOTHER USER."})
@new_routes.route("/reset")
def reset():
db.drop_all()
db.create_all()
return jsonify({"message": "Database Reset OK"})
#
# MODEL STUFF
#
@new_routes.route("/predict", methods=["POST"])
def predict():
"""
Determines which of two users are more likely to say a given tweet.
Assumes users and their tweets have already been stored in the database.
Adapted from: https://github.com/LambdaSchool/TwitOff/blob/master/twitoff/predict.py
"""
print("PREDICTION REQUEST...")
print("FORM DATA:", dict(request.form))
sn1 = request.form["first_screen_name"]
sn2 = request.form["second_screen_name"]
tweet_text = request.form["tweet_text"]
print("FETCHING TWEETS FROM THE DATABASE...")
user1 = User.query.filter(User.screen_name == sn1).one()
user2 = User.query.filter(User.screen_name == sn2).one()
print("TRAINING THE MODEL...")
user1_embeddings = np.array([tweet.embedding for tweet in user1.tweets])
print(type(user1_embeddings), user1_embeddings.shape) #> <class 'numpy.ndarray'> (7, 768)
user2_embeddings = np.array([tweet.embedding for tweet in user2.tweets])
print(type(user2_embeddings), user2_embeddings.shape) #> <class 'numpy.ndarray'> (20, 768)
embeddings = np.vstack([user1_embeddings, user2_embeddings])
#> ValueError: all the input array dimensions for the concatenation axis must match exactly, but along dimension 1,
# the array at index 0 has size 41 and the array at index 1 has size 768
#print("EMBEDDINGS", type(embeddings))
#breakpoint()
labels = np.concatenate([np.ones(len(user1.tweets)), np.zeros(len(user2.tweets))])
print("LABELS", type(labels))
classifier = LogisticRegression().fit(embeddings, labels)
print("GETTING EMBEDDINGS FOR THE EXAMPLE TEXT...")
tweet_embedding = basilica_client.embed_sentence(tweet_text, model="twitter")
print("PREDICTING...")
results = classifier.predict(np.array(tweet_embedding).reshape(1, -1))
print(type(results), results.shape) #> <class 'numpy.ndarray'> (7, 768)
print(results)
#> [1.] for first user
#> [0.] for second user
return render_template("results.html",
screen_name1=sn1,
screen_name2=sn2,
tweet_text=tweet_text,
prediction_results=results
) | 36.948148 | 198 | 0.690457 |
ace201dcce252303a8a54d55bc54f7a332a797ca | 1,379 | py | Python | practice_questions/geeks_for_geeks/zero_summing_triplets.py | vaishnavprachi98/technical-interviews | 7e61aaf08774d67090f022bd288394ffce7c437d | [
"MIT"
] | 65 | 2017-11-17T09:08:36.000Z | 2021-12-25T03:20:06.000Z | practice_questions/geeks_for_geeks/zero_summing_triplets.py | vaishnavprachi98/technical-interviews | 7e61aaf08774d67090f022bd288394ffce7c437d | [
"MIT"
] | null | null | null | practice_questions/geeks_for_geeks/zero_summing_triplets.py | vaishnavprachi98/technical-interviews | 7e61aaf08774d67090f022bd288394ffce7c437d | [
"MIT"
] | 14 | 2018-03-08T03:06:26.000Z | 2021-09-14T13:50:53.000Z | """
@author: David Lei
@since: 8/11/2017
http://www.geeksforgeeks.org/find-triplets-array-whose-sum-equal-zero/
"""
array = [0, -1, 2, -3 ,1]
def zero_summing_triples(array): # O(n^2) time, O(n) space. Naive is O(n^3) time, can do n^2 soln in O(1) space using sorting.
# Match this i with every other j.
# so 0 is matched with -1, 2, -3, 1
# and -1 is matched with -1, 2, -3, 1.
# and so forth.
for i in range(len(array)):
look_up = set()
for j in range(i + 1, len(array)):
sum_to_zero = -1 * (array[i] + array[j])
if sum_to_zero in look_up:
print("This triple sums to 0: %s, %s, %s" % (array[i], array[j], sum_to_zero))
else:
look_up.add(array[j])
""" Works because, for the fist triple we encounter.
(0, -1, 1)
i = 0
j goes from 1 to n - 1
when j = 1, array[1] = -1, so we need to find 0 + -1 + x = 0 so x is 1
1 is not in the look_up so we add in the value of j.
Since if a triple appears we must encounter a part of the non set triple (set one is 0) twice
in the j loop we know that when we get to j = 4, array[4] = 1 we need to find 0 + 1 + x = 0 and x = -1
so we can check to look up to see has there been any value we have already seen that fits in this triple.
Damn this is really smart.
"""
zero_summing_triples(array) | 39.4 | 126 | 0.591008 |
ace2031983dc11c8c5174c7e55df3d9601b8eee7 | 6,262 | py | Python | experiments/densenet169_imagenet_transfer_medico/_sources/evaluate_05eed469fba42a2a6a4ea27a51f47568.py | Stevenah/keras-training-system | ef15519d84335621f3e8f73db68cd54134e723fe | [
"MIT"
] | 2 | 2018-09-19T14:53:15.000Z | 2021-09-30T21:46:26.000Z | experiments/densenet169_imagenet_transfer_medico/_sources/evaluate_05eed469fba42a2a6a4ea27a51f47568.py | Stevenah/keras-training-system | ef15519d84335621f3e8f73db68cd54134e723fe | [
"MIT"
] | null | null | null | experiments/densenet169_imagenet_transfer_medico/_sources/evaluate_05eed469fba42a2a6a4ea27a51f47568.py | Stevenah/keras-training-system | ef15519d84335621f3e8f73db68cd54134e723fe | [
"MIT"
] | null | null | null | from utils.util import get_sub_dirs, pad_string
from utils.metrics import *
from utils.logging import *
from scipy.misc import imread, imsave, imresize
import tensorflow as tf
import numpy as np
import os
import time
# file paths
kfold_split_file_path = ''
def evaluate(model, config, experiment, validation_directory, file_identifier=''):
missclassified = {}
# get number of classes in model
number_of_classes = config['dataset']['number_of_classes']
# image dimensions
image_width = config['image_processing']['image_width']
image_height = config['image_processing']['image_height']
image_channels = config['image_processing']['image_channels']
# get class directory names from validation directory
class_names = get_sub_dirs(validation_directory)
class_names.sort()
# get keras labels in label-index format
label_index = { class_name: index for index, class_name in enumerate(class_names) }
index_label = { index: class_name for index, class_name in enumerate(class_names) }
# prepare confusion table
confusion = np.zeros((number_of_classes, number_of_classes))
# iterate over each class name
for class_name in class_names:
print(f'Starting {class_name}')
# set path to class directory
class_dir = os.path.join(validation_directory, class_name)
# iterate over each image in class directory
for file_name in os.listdir(class_dir):
# models class prediction for image
prediction = None
# process image before passing it through the network
image = imread(os.path.join(class_dir, file_name), mode='RGB')
image = imresize(image, (image_width, image_height, image_channels))
image = image.reshape(1, image_width, image_height, image_channels)
image = np.true_divide(image, 255.)
with tf.get_default_graph().as_default():
predictions = model.predict(image)[0]
prediction = np.argmax(predictions)
# check prediction against ground truth, i.e, if it equals the class directory name
if (prediction != label_index[class_name]):
# initialize empty list of fist missclassified of class
if class_name not in missclassified:
missclassified[class_name] = { }
missclassified[class_name][file_name] = {
'prediction': index_label[prediction],
'predictions': { index_label[class_index]: pred in enumerate(predictions) }
}
# update confusion table
confusion[prediction][label_index[class_name]] += 1
# calculate FP, FN, TP and TN based on confusion table
FP = confusion.sum(axis=0) - np.diag(confusion)
FN = confusion.sum(axis=1) - np.diag(confusion)
TP = np.diag(confusion)
TN = confusion.sum() - (FP + FN + TP)
print ( f"True Positives: { TP }" )
print ( f"True Negatives: { TN }" )
print ( f"False Positives: { FP }" )
print ( f"False Positives: { FN }" )
# calculate metrics based on FP, FN, TP and TN
f1 = np.nan_to_num(f1score(TP, TN, FP, FN))
rec = np.nan_to_num(recall(TP, TN, FP, FN))
acc = np.nan_to_num(accuracy(TP, TN, FP, FN))
prec = np.nan_to_num(precision(TP, TN, FP, FN))
spec = np.nan_to_num(specificity(TP, TN, FP, FN))
mcc = np.nan_to_num(matthews_correlation_coefficient(TP, TN, FP, FN))
# bundle metrics into dictionary
metrics = { 'FP': FP, 'FN': FN, 'TP': TP, 'TN': TN, 'f1': f1, 'rec': rec, 'acc': acc, 'prec': prec, 'spec': spec, 'mcc': mcc }
# save missclassified images to file together with class
for class_name in missclassified:
log_misclassifications( f'{file_identifier}_class_misclassifications.txt', missclassified[class_name], class_name )
# write kvasir legend to results file
log_class_legend(f'{file_identifier}_split_evaluation_summary.txt', class_names)
# write confusion table to results file
log_confusion_table(f'{file_identifier}_split_evaluation_summary.txt', confusion)
# write model summary to results file
log_model_results(f'{file_identifier}_split_evaluation_summary.txt', metrics, file_identifier)
# write summaries for each class
for class_name in class_names:
# class index
class_index = label_index[class_name]
class_metrics = { key: value[class_index] for key, value in metrics.items() }
# write class summary to results file
log_class_results( f'{file_identifier}_class_results.txt', class_metrics, class_name, class_index)
evaluation_path = config['evaluation']['path']
print ("starting test validation...")
for file_name in os.listdir(evaluation_path):
prediction = None
prediction_time = None
image = imread(os.path.join(evaluation_path, file_name), mode='RGB')
image = imresize(image, (image_width, image_height, image_channels))
image = image.reshape(1, image_width, image_height, image_channels)
image = np.true_divide(image, 255.)
with tf.get_default_graph().as_default():
start_time = time.time()
prediction = model.predict(image)[0]
prediction_time = time.time() - start_time
prediction_index = np.argmax(prediction)
prediction_label = index_label[prediction_index]
log_file_evaluation( f'{file_identifier}_test_evaluation_results.txt', file_name, prediction_label, prediction[prediction_index], prediction_time )
# add evaluation files to experiment
experiment.add_artifact( f'../tmp/{file_identifier}_split_evaluation_summary.txt' )
experiment.add_artifact( f'../tmp/{file_identifier}_class_misclassifications.txt' )
experiment.add_artifact( f'../tmp/{file_identifier}_class_results.txt' )
experiment.add_artifact( f'../tmp/{file_identifier}_test_evaluation_results.txt' )
# return evaluation metrics
return {
'f1': np.mean(f1),
'rec': np.mean(rec),
'acc': np.mean(acc),
'prec': np.mean(prec),
'spec': np.mean(spec),
'mcc': np.mean(mcc)
}
| 38.654321 | 155 | 0.666081 |
ace203e21be4bb1bffcbddd9f6c7eb4e0f64f498 | 918 | py | Python | var/spack/repos/builtin/packages/davix/package.py | RemoteConnectionManager/spack | f2967b6c16effd26ce007cf86cadbb645c574f50 | [
"ECL-2.0",
"Apache-2.0",
"MIT"
] | 3 | 2019-06-27T13:26:50.000Z | 2019-07-01T16:24:54.000Z | var/spack/repos/builtin/packages/davix/package.py | openbiox/spack | bb6ec7fb40c14b37e094a860e3625af53f633174 | [
"ECL-2.0",
"Apache-2.0",
"MIT"
] | 75 | 2016-07-27T11:43:00.000Z | 2020-12-08T15:56:53.000Z | var/spack/repos/builtin/packages/davix/package.py | openbiox/spack | bb6ec7fb40c14b37e094a860e3625af53f633174 | [
"ECL-2.0",
"Apache-2.0",
"MIT"
] | 8 | 2015-10-16T13:51:49.000Z | 2021-10-18T13:58:03.000Z | # Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Davix(CMakePackage):
"""High-performance file management over WebDAV/HTTP."""
homepage = "https://dmc.web.cern.ch/projects/davix"
url = "http://grid-deployment.web.cern.ch/grid-deployment/dms/lcgutil/tar/davix/0.6.7/davix-0.6.7.tar.gz"
list_url = "http://grid-deployment.web.cern.ch/grid-deployment/dms/lcgutil/tar/davix/"
list_depth = 1
version('0.6.8', 'e1820f4cc3fc44858ae97197a3922cce2a1130ff553b080ba19e06eb8383ddf7',
url='http://grid-deployment.web.cern.ch/grid-deployment/dms/lcgutil/tar/davix/0.6.8/davix-0.6.8.tar.gz')
depends_on('pkgconfig', type='build')
depends_on('libxml2')
depends_on('libuuid')
depends_on('openssl')
| 38.25 | 116 | 0.717865 |
ace20547b6794b549b7ec790476d70df207fa358 | 11,583 | py | Python | domain/estimators/logistic.py | rcap107/holoclean | d4f5929a8e4d92d4f41eb058c04c96cdcb0af767 | [
"Apache-2.0"
] | 468 | 2018-11-11T15:40:12.000Z | 2022-03-30T13:21:48.000Z | domain/estimators/logistic.py | rcap107/holoclean | d4f5929a8e4d92d4f41eb058c04c96cdcb0af767 | [
"Apache-2.0"
] | 43 | 2018-11-10T20:03:49.000Z | 2020-10-20T16:39:03.000Z | domain/estimators/logistic.py | rcap107/holoclean | d4f5929a8e4d92d4f41eb058c04c96cdcb0af767 | [
"Apache-2.0"
] | 118 | 2018-11-12T19:11:42.000Z | 2022-03-23T18:25:29.000Z | from abc import ABCMeta, abstractmethod
import logging
import time
import torch
from torch.optim import Adam, SGD
from torch.utils.data import TensorDataset, DataLoader
from tqdm import tqdm
from ..estimator import Estimator
from utils import NULL_REPR, NA_COOCCUR_FV
class Logistic(Estimator, torch.nn.Module):
"""
Logistic is an Estimator that approximates posterior of
p(v_cur | v_init) by training a logistic regression model to predict the current
value in a cell given all other initial values using features
of the other initial values such as co-occurrence.
"""
# We should not use weight decay for this posterior model since we'd
# like to overfit as much as possible to the co-occurrence features.
# This is fine since we take only samples with high predicted probabilities.
WEIGHT_DECAY = 0
def __init__(self, env, dataset, domain_df, active_attrs):
"""
:param dataset: (Dataset) original dataset
:param domain_df: (DataFrame) currently populated domain dataframe.
Required columns are: _vid_, _tid_, attribute, domain, domain_size, init_value
:param active_attrs: (list[str]) attributes that have random values
"""
torch.nn.Module.__init__(self)
Estimator.__init__(self, env, dataset)
self.active_attrs = active_attrs
# Sorted records of the currently populated domain. This helps us
# align the final predicted probabilities.
self.domain_records = domain_df.sort_values('_vid_')[['_vid_', '_tid_', 'attribute', 'domain', 'init_value']].to_records()
# self.dom maps tid --> attr --> list of domain values
# we need to find the number of domain values we will be generating
# a training sample for.
self.n_samples = int(domain_df['domain_size'].sum())
# Create and initialize featurizers.
self.featurizers = [CooccurAttrFeaturizer(self.ds)]
for f in self.featurizers:
f.setup()
self.num_features = sum(feat.num_features() for feat in self.featurizers)
# Construct the X and Y tensors.
self._gen_training_data()
# Use pytorch logistic regression model.
self._W = torch.nn.Parameter(torch.zeros(self.num_features, 1))
torch.nn.init.xavier_uniform_(self._W)
self._B = torch.nn.Parameter(torch.Tensor([1e-6]))
self._loss = torch.nn.BCELoss()
if self.env['optimizer'] == 'sgd':
self._optimizer = SGD(self.parameters(), lr=self.env['learning_rate'], momentum=self.env['momentum'],
weight_decay=self.WEIGHT_DECAY)
else:
self._optimizer = Adam(self.parameters(), lr=self.env['learning_rate'], weight_decay=self.WEIGHT_DECAY)
def _gen_training_data(self):
"""
_gen_training_data memoizes the self._X and self._Y tensors
used for training and prediction.
"""
logging.debug('Logistic: featurizing training data...')
tic = time.clock()
# Each row corresponds to a possible value for a given attribute
# and given TID
self._X = torch.zeros(self.n_samples, self.num_features)
self._Y = torch.zeros(self.n_samples)
# Keeps track of cells with NULL init_value to ignore in training.
# We only train on cells when train_idx[idx] == 1.
self._train_idx = torch.zeros(self.n_samples)
"""
Iterate through the domain for every cell and create a sample
to use in training. We assign Y as 1 if the value is the initial value.
"""
sample_idx = 0
raw_data_dict = self.ds.raw_data.df.set_index('_tid_').to_dict('index')
# Keep track of which indices correspond to a VID so we can re-use
# self._X in prediction.
self.vid_to_idxs = {}
for rec in tqdm(list(self.domain_records)):
init_row = raw_data_dict[rec['_tid_']]
domain_vals = rec['domain'].split('|||')
# Generate the feature tensor for all the domain values for this
# cell.
feat_tensor = self._gen_feat_tensor(init_row, rec['attribute'], domain_vals)
assert(feat_tensor.shape[0] == len(domain_vals))
self._X[sample_idx:sample_idx+len(domain_vals)] = feat_tensor
self.vid_to_idxs[rec['_vid_']] = (sample_idx, sample_idx+len(domain_vals))
# If the initial value is NULL, we do not want to train on it
# nor assign it a weak label.
if rec['init_value'] == NULL_REPR:
sample_idx += len(domain_vals)
continue
# If the init value is not NULL, then we want to use these possible
# value samples during training.
self._train_idx[sample_idx:sample_idx + len(domain_vals)] = 1
# Assign the tensor corresponding to the initial value with
# a target label of 1.
init_idx = domain_vals.index(rec['init_value'])
self._Y[sample_idx + init_idx] = 1
sample_idx += len(domain_vals)
# Convert this to a vector of indices rather than a vector mask.
self._train_idx = (self._train_idx == 1).nonzero()[:,0]
logging.debug('Logistic: DONE featurization in %.2fs', time.clock() - tic)
def _gen_feat_tensor(self, init_row, attr, domain_vals):
"""
Generates the feature tensor for the list of :param`domain_vals` from
all featurizers.
:param init_row: (namedtuple or dict) current initial values
:param attr: (str) attribute of row (i.e. cell) the :param values: correspond to
and the cell to generate a feature tensor for.
:param domain_vals: (list[str]) domain values to featurize for
:return: Tensor with dimensions (len(values), total # of features across all featurizers)
"""
return torch.cat([f.create_tensor(init_row, attr, domain_vals) for f in self.featurizers], dim=1)
def forward(self, X):
linear = X.matmul(self._W) + self._B
return torch.sigmoid(linear)
def train(self, num_epochs=3, batch_size=32):
"""
Trains the LR model.
:param num_epochs: (int) number of epochs.
"""
batch_losses = []
# We train only on cells that do not have their initial value as NULL.
X_train, Y_train = self._X.index_select(0, self._train_idx), self._Y.index_select(0, self._train_idx)
torch_ds = TensorDataset(X_train, Y_train)
# Main training loop.
for epoch_idx in range(1, num_epochs+1):
logging.debug("Logistic: epoch %d", epoch_idx)
batch_cnt = 0
for batch_X, batch_Y in tqdm(DataLoader(torch_ds, batch_size=batch_size)):
batch_pred = self.forward(batch_X)
batch_loss = self._loss(batch_pred, batch_Y.reshape(-1,1))
batch_losses.append(float(batch_loss))
self.zero_grad()
batch_loss.backward()
self._optimizer.step()
batch_cnt += 1
logging.debug('Logistic: average batch loss: %f', sum(batch_losses[-1 * batch_cnt:]) / batch_cnt)
return batch_losses
def predict_pp(self, row, attr=None, values=None):
"""
predict_pp generates posterior probabilities for the domain values
corresponding to the cell/random variable row['_vid_'].
That is: :param`attr` and :param`values` are ignored.
predict_pp_batch is much faster for Logistic since it simply does
a one-pass of the batch feature tensor.
:return: (list[2-tuple]) 2-tuples corresponding to (value, proba)
"""
start_idx, end_idx = self.vid_to_idxs[row['_vid_']]
pred_X = self._X[start_idx:end_idx]
pred_Y = self.forward(pred_X)
values = self.domain_records[row['_vid_']]['domain'].split('|||')
return zip(values, map(float, pred_Y))
def predict_pp_batch(self):
"""
Performs batch prediction.
"""
pred_Y = self.forward(self._X)
for rec in self.domain_records:
values = rec['domain'].split('|||')
start_idx, end_idx = self.vid_to_idxs[rec['_vid_']]
yield zip(values, map(float, pred_Y[start_idx:end_idx]))
class Featurizer:
"""
Feauturizer is an abstract class for featurizers that is able to generate
real-valued tensors (features) for a row from raw data.
Used in Logistic model.
"""
__metaclass__ = ABCMeta
@abstractmethod
def setup(self):
raise NotImplementedError
@abstractmethod
def num_features(self):
raise NotImplementedError
@abstractmethod
def create_tensor(self, row, attr, values):
raise NotImplementedError
class CooccurAttrFeaturizer(Featurizer):
"""
CooccurAttrFeaturizer computes the co-occurrence statistics for a cell
and its possible domain values with the other initial values in the tuple.
It breaks down each co-occurrence feature on a pairwise attr1 X attr2 basis.
"""
name = 'CooccurAttrFeaturizer'
def __init__(self, dataset):
"""
:param data_df: (pandas.DataFrame) contains the data to compute co-occurrence features for.
:param attrs: attributes in columns of :param data_df: to compute feautres for.
:param freq: (dict { attr: { val: count } } }) if not None, uses these
frequency statistics instead of computing it from data_df.
:param cooccur_freq: (dict { attr1: { attr2: { val1: { val2: count } } } })
if not None, uses these co-occurrence statistics instead of
computing it from data_df.
"""
self.ds = dataset
self.attrs = self.ds.get_attributes()
self.attr_to_idx = {attr: idx for idx, attr in enumerate(self.attrs)}
self.n_attrs = len(self.attrs)
def num_features(self):
return len(self.attrs) * len(self.attrs)
def setup(self):
_, self.freq, self.cooccur_freq = self.ds.get_statistics()
def create_tensor(self, row, attr, values):
"""
:param row: (namedtuple or dict) current initial values
:param attr: (str) attribute of row (i.e. cell) the :param values: correspond to
and the cell to generate a feature tensor for.
:param values: (list[str]) values to generate
:return: Tensor with dimensions (len(values), # of features)
"""
tensor = torch.zeros(len(values), self.num_features())
for val_idx, val in enumerate(values):
for other_attr_idx, other_attr in enumerate(self.attrs):
if attr == other_attr:
continue
other_val = row[other_attr]
# calculate p(val | other_val)
# there may not be co-occurrence frequencies for some value pairs since
# our possible values were from correlation with only
# one other attribute
if val == NULL_REPR or other_val == NULL_REPR:
fv = NA_COOCCUR_FV
else:
cooccur = self.cooccur_freq[attr][other_attr].get(val, {}).get(other_val, NA_COOCCUR_FV)
freq = self.freq[other_attr][row[other_attr]]
fv = float(cooccur) / float(freq)
feat_idx = self.attr_to_idx[attr] * self.n_attrs + other_attr_idx
tensor[val_idx, feat_idx] = fv
return tensor
| 41.516129 | 130 | 0.63222 |
ace206058d0ca1b41cb158529643bd1a4df3784c | 14,640 | py | Python | lib/reda/containers/ERT.py | j-gallistl/reda | 13b1f9e1cda92bbbbafc5c28be2c691d3b722740 | [
"MIT"
] | null | null | null | lib/reda/containers/ERT.py | j-gallistl/reda | 13b1f9e1cda92bbbbafc5c28be2c691d3b722740 | [
"MIT"
] | null | null | null | lib/reda/containers/ERT.py | j-gallistl/reda | 13b1f9e1cda92bbbbafc5c28be2c691d3b722740 | [
"MIT"
] | null | null | null | import functools
import os
import pandas as pd
import reda
from reda.main.logger import LoggingClass
import reda.exporters.bert as reda_bert_export
import reda.exporters.crtomo as reda_crtomo_export
import reda.importers.bert as reda_bert_import
import reda.importers.iris_syscal_pro as reda_syscal
import reda.plotters.histograms as HS
import reda.plotters.pseudoplots as PS
import reda.utils.fix_sign_with_K as redafixK
import reda.utils.geometric_factors as redaK
from reda.utils import has_multiple_timesteps
from reda.utils.norrec import assign_norrec_to_df, average_repetitions
from reda.utils.norrec import assign_norrec_diffs
from reda.utils.decorators_and_managers import append_doc_of
from reda.utils.decorators_and_managers import LogDataChanges
class ImportersBase(object):
"""Base class for all importer classes"""
def _add_to_container(self, df):
"""Add a given DataFrame to the container
Parameters
----------
df : pandas.DataFrame
DataFrame, must adhere to the container contraints (i.e., must have
all required columns)
"""
if self.data is None:
self.data = df
else:
self.data = pd.concat(
(self.data, df), ignore_index=True, sort=True
)
# clean any previous norrec-assignments
if 'norrec' and 'id' in self.data.columns:
self.data.drop(['norrec', 'id'], axis=1, inplace=True)
self.data = assign_norrec_to_df(self.data)
# note that columns not in the DataFrames are ignored, thus no problem
# to include rho_a and rpha
self.data = assign_norrec_diffs(self.data, ['r', 'rho_a', 'rpha'])
# Put a, b, m, n in the front and ensure integers
for col in tuple("nmba"):
cols = list(self.data)
cols.insert(0, cols.pop(cols.index(col)))
self.data = self.data.ix[:, cols]
self.data[col] = self.data[col].astype(int)
if 'timestep' in self.data:
# make sure the timestep column is in the fifth position
col_order = ['a', 'b', 'm', 'n', 'timestep']
self.data = self.data.reindex(columns=(
col_order +
list(
[key for key in self.data.columns if key not in col_order]
)
))
def _describe_data(self, df=None):
"""Print statistics on a DataFrame by calling its .describe() function
Parameters
----------
df : None|pandas.DataFrame, optional
if not None, use this DataFrame. Otherwise use self.data
"""
if df is None:
df_to_use = self.data
else:
df_to_use = df
cols = []
for test_col in self.required_data_columns:
if test_col in df_to_use.columns:
cols.append(test_col)
print(df_to_use[cols].describe())
class Importers(ImportersBase):
"""This class provides wrappers for most of the importer functions and is
meant to be inherited by the ERT data container.
See Also
--------
Exporters
"""
@append_doc_of(reda_syscal.import_bin)
def import_syscal_bin(self, filename, **kwargs):
"""Syscal import
timestep: int or :class:`datetime.datetime`
if provided use this value to set the 'timestep' column of the
produced dataframe. Default: 0
"""
timestep = kwargs.get('timestep', None)
if 'timestep' in kwargs:
del (kwargs['timestep'])
self.logger.info('IRIS Syscal Pro bin import')
with LogDataChanges(self, filter_action='import'):
data, electrodes, topography = reda_syscal.import_bin(
filename, **kwargs
)
if timestep is not None:
data['timestep'] = timestep
self._add_to_container(data)
if kwargs.get('verbose', False):
print('Summary:')
self._describe_data(data)
@append_doc_of(reda_syscal.import_txt)
def import_syscal_txt(self, filename, **kwargs):
"""Syscal import
timestep: int or :class:`datetime.datetime`
if provided use this value to set the 'timestep' column of the
produced dataframe. Default: 0
"""
timestep = kwargs.get('timestep', None)
if 'timestep' in kwargs:
del (kwargs['timestep'])
self.logger.info('IRIS Syscal Pro text import')
with LogDataChanges(self, filter_action='import'):
data, electrodes, topography = reda_syscal.import_txt(
filename, **kwargs)
if timestep is not None:
data['timestep'] = timestep
self._add_to_container(data)
if kwargs.get('verbose', False):
print('Summary:')
self._describe_data(data)
@append_doc_of(reda_bert_import.import_ohm)
def import_bert(self, filename, **kwargs):
"""BERT .ohm file import"""
timestep = kwargs.get('timestep', None)
if 'timestep' in kwargs:
del (kwargs['timestep'])
self.logger.info('Unified data format (BERT/pyGIMLi) file import')
with LogDataChanges(self, filter_action='import',
filter_query=os.path.basename(filename)):
data, electrodes, topography = reda_bert_import.import_ohm(
filename, **kwargs)
if timestep is not None:
data['timestep'] = timestep
self._add_to_container(data)
self.electrode_positions = electrodes # See issue #22
if kwargs.get('verbose', False):
print('Summary:')
self._describe_data(data)
@functools.wraps(import_bert)
def import_pygimli(self, *args, **kargs):
self.import_bert(*args, **kargs)
class Exporters(object):
"""This class provides wrappers for most of the exporter functions and is
meant to be inherited by the ERT data container.
See Also
--------
Importers
"""
@functools.wraps(reda_bert_export.export_bert)
def export_bert(self, filename):
reda_bert_export.export_bert(self.data, self.electrode_positions,
filename)
@functools.wraps(export_bert)
def export_pygimli(self, *args, **kargs):
"""Same as .export_bert"""
self.export_bert(*args, **kargs)
@functools.wraps(reda_crtomo_export.save_block_to_crt)
def export_crtomo(self, filename, norrec='all', store_errors=False):
"""Export to CRTomo-compatible file"""
reda_crtomo_export.save_block_to_crt(
filename, self.data, norrec, store_errors
)
class ERT(LoggingClass, Importers, Exporters):
def __init__(self, data=None, electrode_positions=None, topography=None):
"""
Parameters
----------
data : :py:class:`pandas.DataFrame`
If not None, then the provided DataFrame is assumed to contain
valid data previously prepared elsewhere. Please refer to the
documentation for required columns.
electrode_positions : :py:class:`pandas.DataFrame`
If set, this is expected to be a DataFrame which contains electrode
positions with columns: "x", "y", "z".
topography : :py:class:`pandas.DataFrame`
If set, this is expected to a DataFrame which contains topography
information with columns: "x", "y", "z".
"""
self.setup_logger()
self.data = self.check_dataframe(data)
self.electrode_positions = electrode_positions
self.topography = topography
def to_ip(self):
"""Return of copy of the data inside a TDIP container
"""
if 'chargeability' in self.data.columns:
tdip = reda.TDIP(data=self.data)
else:
raise Exception('Missing column "chargeability"')
return tdip
def check_dataframe(self, dataframe):
"""Check the given dataframe for the required type and columns
"""
if dataframe is None:
return None
# is this a DataFrame
if not isinstance(dataframe, pd.DataFrame):
raise Exception(
'The provided dataframe object is not a pandas.DataFrame')
required_columns = tuple("abmnr")
for column in required_columns:
if column not in dataframe:
raise Exception(
'Required column not in dataframe: {0}'.format(column))
return dataframe
def sub_filter(self, subset, filter, inplace=True):
"""Apply a filter to subset of the data
Examples
--------
::
.subquery(
'timestep == 2',
'R > 4',
)
"""
# build the full query
full_query = ''.join(('not (', subset, ') or not (', filter, ')'))
with LogDataChanges(self, filter_action='filter', filter_query=filter):
result = self.data.query(full_query, inplace=inplace)
return result
def filter(self, query, inplace=True):
"""Use a query statement to filter data. Note that you specify the data
to be removed!
Parameters
----------
query : string
The query string to be evaluated. Is directly provided to
pandas.DataFrame.query
inplace : bool
if True, change the container dataframe in place (defaults to True)
Returns
-------
result : :py:class:`pandas.DataFrame`
DataFrame that contains the result of the filter application
"""
with LogDataChanges(self, filter_action='filter', filter_query=query):
result = self.data.query(
'not ({0})'.format(query),
inplace=inplace,
)
return result
def compute_K_analytical(self, spacing):
"""Compute geometrical factors over the homogeneous half-space with a
constant electrode spacing
"""
K = redaK.compute_K_analytical(self.data, spacing=spacing)
self.data = redaK.apply_K(self.data, K)
redafixK.fix_sign_with_K(self.data)
def compute_reciprocal_errors(self, key="r"):
r"""
Compute reciprocal erros following LaBrecque et al. (1996) according
to:
.. math::
\epsilon = \left|\frac{2(|R_n| - |R_r|)}{|R_n| + |R_r|}\right|
Parameters
----------
key : str
Parameter to calculate the reciprocal error for (default is "r").
Examples
--------
>>> import reda
>>> ert = reda.ERT()
>>> ert.data = reda.utils.norrec.get_test_df()
>>> ert.data = pd.DataFrame([
... [1,2,3,4,95],
... [3,4,2,1,-105]], columns=list("abmnr")
... )
>>> ert.compute_reciprocal_errors()
generating ids
assigning ids
>>> ert.data["error"].mean() == 0.1
True
"""
# Assign norrec ids if not already present
if "id" not in self.data.keys():
self.data = assign_norrec_to_df(self.data)
# Average repetitions
data = average_repetitions(self.data, "r")
# Get configurations with reciprocals
data = data.groupby("id").filter(lambda b: not b.shape[0] == 1)
n = self.data.shape[0] - data.shape[0]
if n > 0:
print("Could not find reciprocals for %d configurations" % n)
# Calc reciprocal error
grouped = data.groupby("id")
def _error(group):
R_n = group["r"].iloc[0]
R_r = group["r"].iloc[1]
return abs(2 * (abs(R_n) - abs(R_r)) / (abs(R_n) + abs(R_r)))
error = grouped.apply(_error)
error.name = "error"
self.data = pd.merge(
self.data,
error.to_frame().reset_index(), how='outer',
on='id'
)
def pseudosection(self, column='r', filename=None, log10=False, **kwargs):
"""Plot a pseudosection of the given column. Note that this function
only works with dipole-dipole data at the moment.
Parameters
----------
column : string, optional
Column to plot into the pseudosection, default: r
filename : string, optional
if not None, save the resulting figure directory to disc
log10 : bool, optional
if True, then plot values in log10, default: False
**kwargs : dict
all additional parameters are directly provided to
:py:func:`reda.plotters.pseudoplots.PS.plot_pseudosection_type2`
Returns
-------
fig : :class:`matplotlib.Figure`
matplotlib figure object
ax : :class:`matplotlib.axes`
matplotlib axes object
cb : colorbar object
matplotlib colorbar object
"""
fig, ax, cb = PS.plot_pseudosection_type2(
self.data, column=column, log10=log10, **kwargs
)
if filename is not None:
fig.savefig(filename, dpi=300)
return fig, ax, cb
def histogram(self, column='r', filename=None, log10=False, **kwargs):
"""Plot a histogram of one data column"""
return_dict = HS.plot_histograms(self.data, column)
if filename is not None:
return_dict['all'].savefig(filename, dpi=300)
return return_dict
def has_multiple_timesteps(self):
"""Return True if container has multiple timesteps."""
return has_multiple_timesteps(self.data)
def delete_measurements(self, row_or_rows):
"""Delete one or more measurements by index of the DataFrame.
Resets the DataFrame index.
Parameters
----------
row_or_rows : int or list of ints
Row numbers (starting with zero) of the data DataFrame (ert.data)
to delete
Returns
-------
None
"""
self.data.drop(self.data.index[row_or_rows], inplace=True)
self.data = self.data.reset_index()
def to_configs(self):
"""Return a config object that contains the measurement configurations
(a,b,m,n) from the data
Returns
-------
config_obj : reda.ConfigManager
"""
config_obj = reda.configs.configManager.ConfigManager()
config_obj.add_to_configs(self.data[['a', 'b', 'm', 'n']].values)
return config_obj
| 34.046512 | 79 | 0.591393 |
ace206448ebb92f7888f9f97253135a703707641 | 4,865 | py | Python | test/functional/wallet-dump.py | paicoin/paicoin | 09f8029112e7a57548ee5b5202c260f8aee7f2e9 | [
"MIT"
] | 77 | 2018-07-04T14:03:42.000Z | 2021-12-19T17:11:14.000Z | test/functional/wallet-dump.py | paicoin/paicoin | 09f8029112e7a57548ee5b5202c260f8aee7f2e9 | [
"MIT"
] | 89 | 2018-06-20T03:48:55.000Z | 2022-02-10T06:50:56.000Z | test/functional/wallet-dump.py | paicoin/paicoin | 09f8029112e7a57548ee5b5202c260f8aee7f2e9 | [
"MIT"
] | 40 | 2018-06-15T12:59:35.000Z | 2021-12-12T15:49:56.000Z | #!/usr/bin/env python3
# Copyright (c) 2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the dumpwallet RPC."""
import os
from test_framework.test_framework import PAIcoinTestFramework
from test_framework.util import (assert_equal, assert_raises_rpc_error)
def read_dump(file_name, addrs, hd_master_addr_old):
"""
Read the given dump, count the addrs that match, count change and reserve.
Also check that the old hd_master is inactive
"""
with open(file_name, encoding='utf8') as inputfile:
found_addr = 0
found_addr_chg = 0
found_addr_rsv = 0
hd_master_addr_ret = None
for line in inputfile:
# only read non comment lines
if line[0] != "#" and len(line) > 10:
# split out some data
key_label, comment = line.split("#")
# key = key_label.split(" ")[0]
keytype = key_label.split(" ")[2]
if len(comment) > 1:
addr_keypath = comment.split(" addr=")[1]
addr = addr_keypath.split(" ")[0]
keypath = None
if keytype == "inactivehdmaster=1":
# ensure the old master is still available
assert(hd_master_addr_old == addr)
elif keytype == "hdmaster=1":
# ensure we have generated a new hd master key
assert(hd_master_addr_old != addr)
hd_master_addr_ret = addr
else:
keypath = addr_keypath.rstrip().split("hdkeypath=")[1]
# count key types
for addrObj in addrs:
if addrObj['address'] == addr and addrObj['hdkeypath'] == keypath and keytype == "label=":
found_addr += 1
break
elif keytype == "change=1":
found_addr_chg += 1
break
elif keytype == "reserve=1":
found_addr_rsv += 1
break
return found_addr, found_addr_chg, found_addr_rsv, hd_master_addr_ret
class WalletDumpTest(PAIcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.extra_args = [["-keypool=90"]]
def setup_network(self, split=False):
# Use 1 minute timeout because the initial getnewaddress RPC can take
# longer than the default 30 seconds due to an expensive
# CWallet::TopUpKeyPool call, and the encryptwallet RPC made later in
# the test often takes even longer.
self.add_nodes(self.num_nodes, self.extra_args, timewait=60)
self.start_nodes()
def run_test (self):
tmpdir = self.options.tmpdir
# generate 20 addresses to compare against the dump
test_addr_count = 20
addrs = []
for i in range(0,test_addr_count):
addr = self.nodes[0].getnewaddress()
vaddr= self.nodes[0].validateaddress(addr) #required to get hd keypath
addrs.append(vaddr)
# Should be a no-op:
self.nodes[0].keypoolrefill()
# dump unencrypted wallet
result = self.nodes[0].dumpwallet(tmpdir + "/node0/wallet.unencrypted.dump")
assert_equal(result['filename'], os.path.abspath(tmpdir + "/node0/wallet.unencrypted.dump"))
found_addr, found_addr_chg, found_addr_rsv, hd_master_addr_unenc = \
read_dump(tmpdir + "/node0/wallet.unencrypted.dump", addrs, None)
assert_equal(found_addr, test_addr_count) # all keys must be in the dump
assert_equal(found_addr_chg, 50) # 50 blocks where mined
assert_equal(found_addr_rsv, 90*2) # 90 keys plus 100% internal keys
#encrypt wallet, restart, unlock and dump
self.nodes[0].node_encrypt_wallet('test')
self.start_node(0)
self.nodes[0].walletpassphrase('test', 10)
# Should be a no-op:
self.nodes[0].keypoolrefill()
self.nodes[0].dumpwallet(tmpdir + "/node0/wallet.encrypted.dump")
found_addr, found_addr_chg, found_addr_rsv, _ = \
read_dump(tmpdir + "/node0/wallet.encrypted.dump", addrs, hd_master_addr_unenc)
assert_equal(found_addr, test_addr_count)
assert_equal(found_addr_chg, 90*2 + 50) # old reserve keys are marked as change now
assert_equal(found_addr_rsv, 90*2)
# Overwriting should fail
assert_raises_rpc_error(-8, "already exists", self.nodes[0].dumpwallet, tmpdir + "/node0/wallet.unencrypted.dump")
if __name__ == '__main__':
WalletDumpTest().main ()
| 43.053097 | 122 | 0.594861 |
ace206a35a6a397516db50ca250f86b37f0ee177 | 21,347 | py | Python | pypy/interpreter/test/apptest_coroutine.py | hollmmax/zig | d80baa5a5fcbc82b3e2294b398edc20a98737a52 | [
"MIT"
] | null | null | null | pypy/interpreter/test/apptest_coroutine.py | hollmmax/zig | d80baa5a5fcbc82b3e2294b398edc20a98737a52 | [
"MIT"
] | null | null | null | pypy/interpreter/test/apptest_coroutine.py | hollmmax/zig | d80baa5a5fcbc82b3e2294b398edc20a98737a52 | [
"MIT"
] | 1 | 2022-03-30T11:42:37.000Z | 2022-03-30T11:42:37.000Z | import pytest
from pytest import raises
import sys
class suspend:
"""
A simple awaitable that returns control to the "event loop" with `msg`
as value.
"""
def __init__(self, msg=None):
self.msg = msg
def __await__(self):
yield self.msg
def test_cannot_iterate():
async def f(x):
pass
pytest.raises(TypeError, "for i in f(5): pass")
pytest.raises(TypeError, iter, f(5))
pytest.raises(TypeError, next, f(5))
def test_async_for():
class X:
def __aiter__(self):
return MyAIter()
class MyAIter:
async def __anext__(self):
return 42
async def f(x):
sum = 0
async for a in x:
sum += a
if sum > 100:
break
return sum
cr = f(X())
try:
cr.send(None)
except StopIteration as e:
assert e.value == 42 * 3
else:
assert False, "should have raised"
def test_StopAsyncIteration():
class X:
def __aiter__(self):
return MyAIter()
class MyAIter:
count = 0
async def __anext__(self):
if self.count == 3:
raise StopAsyncIteration
self.count += 1
return 42
async def f(x):
sum = 0
async for a in x:
sum += a
return sum
cr = f(X())
try:
cr.send(None)
except StopIteration as e:
assert e.value == 42 * 3
else:
assert False, "should have raised"
def test_for_error_cause():
class F:
def __aiter__(self):
return self
def __anext__(self):
return self
def __await__(self):
1 / 0
async def main():
async for _ in F():
pass
c = pytest.raises(TypeError, main().send, None)
assert 'an invalid object from __anext__' in c.value.args[0], c.value
assert isinstance(c.value.__cause__, ZeroDivisionError)
def test_async_with():
seen = []
class X:
async def __aenter__(self):
seen.append('aenter')
async def __aexit__(self, *args):
seen.append('aexit')
async def f(x):
async with x:
return 42
c = f(X())
try:
c.send(None)
except StopIteration as e:
assert e.value == 42
else:
assert False, "should have raised"
assert seen == ['aenter', 'aexit']
def test_async_with_exit_True():
seen = []
class X:
async def __aenter__(self):
seen.append('aenter')
async def __aexit__(self, *args):
seen.append('aexit')
return True
async def f(x):
async with x:
return 42
c = f(X())
try:
c.send(None)
except StopIteration as e:
assert e.value == 42
else:
assert False, "should have raised"
assert seen == ['aenter', 'aexit']
def test_await():
class X:
def __await__(self):
i1 = yield 40
assert i1 == 82
i2 = yield 41
assert i2 == 93
async def f():
await X()
await X()
c = f()
assert c.send(None) == 40
assert c.send(82) == 41
assert c.send(93) == 40
assert c.send(82) == 41
pytest.raises(StopIteration, c.send, 93)
def test_await_error():
async def f():
await [42]
c = f()
try:
c.send(None)
except TypeError as e:
assert str(e) == "object list can't be used in 'await' expression"
else:
assert False, "should have raised"
def test_async_with_exception_context():
class CM:
async def __aenter__(self):
pass
async def __aexit__(self, *e):
1/0
async def f():
async with CM():
raise ValueError
c = f()
try:
c.send(None)
except ZeroDivisionError as e:
assert e.__context__ is not None
assert isinstance(e.__context__, ValueError)
else:
assert False, "should have raised"
def test_runtime_warning():
import gc, warnings # XXX: importing warnings is expensive untranslated
async def foobaz():
pass
gc.collect() # emit warnings from unrelated older tests
with warnings.catch_warnings(record=True) as l:
foobaz()
gc.collect()
gc.collect()
gc.collect()
assert len(l) == 1, repr(l)
w = l[0].message
assert isinstance(w, RuntimeWarning)
assert str(w).startswith("coroutine ")
assert str(w).endswith("foobaz' was never awaited")
def test_async_for_with_tuple_subclass():
class Done(Exception): pass
class AIter(tuple):
i = 0
def __aiter__(self):
return self
async def __anext__(self):
if self.i >= len(self):
raise StopAsyncIteration
self.i += 1
return self[self.i - 1]
result = []
async def foo():
async for i in AIter([42]):
result.append(i)
raise Done
try:
foo().send(None)
except Done:
pass
assert result == [42]
def test_async_yield():
class Done(Exception): pass
async def mygen():
yield 5
result = []
async def foo():
async for i in mygen():
result.append(i)
raise Done
try:
foo().send(None)
except Done:
pass
assert result == [5]
def test_async_yield_already_finished():
class Done(Exception): pass
async def mygen():
yield 5
result = []
async def foo():
g = mygen()
async for i in g:
result.append(i)
async for i in g:
assert False # should not be reached
raise Done
try:
foo().send(None)
except Done:
pass
assert result == [5]
def test_async_yield_with_await():
class Done(Exception): pass
class X:
def __await__(self):
i1 = yield 40
assert i1 == 82
i2 = yield 41
assert i2 == 93
async def mygen():
yield 5
await X()
yield 6
result = []
async def foo():
async for i in mygen():
result.append(i)
raise Done
co = foo()
x = co.send(None)
assert x == 40
assert result == [5]
x = co.send(82)
assert x == 41
assert result == [5]
raises(Done, co.send, 93)
assert result == [5, 6]
def test_async_yield_with_explicit_send():
class X:
def __await__(self):
i1 = yield 40
assert i1 == 82
i2 = yield 41
assert i2 == 93
async def mygen():
x = yield 5
assert x == 2189
await X()
y = yield 6
assert y == 319
result = []
async def foo():
gen = mygen()
result.append(await gen.asend(None))
result.append(await gen.asend(2189))
try:
await gen.asend(319)
except StopAsyncIteration:
return 42
else:
raise AssertionError
co = foo()
x = co.send(None)
assert x == 40
assert result == [5]
x = co.send(82)
assert x == 41
assert result == [5]
e = raises(StopIteration, co.send, 93)
assert e.value.args == (42,)
assert result == [5, 6]
def test_async_yield_explicit_asend_and_next():
async def mygen(y):
assert y == 4983
x = yield 5
assert x == 2189
yield "ok"
g = mygen(4983)
raises(TypeError, g.asend(42).__next__)
e = raises(StopIteration, g.asend(None).__next__)
assert e.value.args == (5,)
e = raises(StopIteration, g.asend(2189).__next__)
assert e.value.args == ("ok",)
def test_async_yield_explicit_asend_and_send():
async def mygen(y):
assert y == 4983
x = yield 5
assert x == 2189
yield "ok"
g = mygen(4983)
e = raises(TypeError, g.asend(None).send, 42)
assert str(e.value) == ("can't send non-None value to a just-started "
"async generator")
e = raises(StopIteration, g.asend(None).send, None)
assert e.value.args == (5,)
e = raises(StopIteration, g.asend("IGNORED").send, 2189) # xxx
assert e.value.args == ("ok",)
def test_async_yield_explicit_asend_used_several_times():
class X:
def __await__(self):
r = yield -2
assert r == "cont1"
r = yield -3
assert r == "cont2"
return -4
async def mygen(y):
x = await X()
assert x == -4
r = yield -5
assert r == "foo"
r = yield -6
assert r == "bar"
g = mygen(4983)
gs = g.asend(None)
r = gs.send(None)
assert r == -2
r = gs.send("cont1")
assert r == -3
e = raises(StopIteration, gs.send, "cont2")
assert e.value.args == (-5,)
e = raises(RuntimeError, gs.send, None)
e = raises(RuntimeError, gs.send, None)
#
gs = g.asend("foo")
e = raises(StopIteration, gs.send, None)
assert e.value.args == (-6,)
e = raises(RuntimeError, gs.send, "bar")
def test_async_yield_asend_notnone_throw():
async def f():
yield 123
raises(ValueError, f().asend(42).throw, ValueError)
def test_async_yield_asend_none_throw():
async def f():
yield 123
raises(ValueError, f().asend(None).throw, ValueError)
def test_async_yield_athrow_send_none():
async def ag():
yield 42
raises(ValueError, ag().athrow(ValueError).send, None)
def test_async_yield_athrow_send_notnone():
async def ag():
yield 42
ex = raises(RuntimeError, ag().athrow(ValueError).send, 42)
expected = ("can't send non-None value to a just-started coroutine", )
assert ex.value.args == expected
def test_async_yield_athrow_send_after_exception():
async def ag():
yield 42
athrow_coro = ag().athrow(ValueError)
raises(ValueError, athrow_coro.send, None)
raises(RuntimeError, athrow_coro.send, None)
def test_async_yield_athrow_throw():
async def ag():
yield 42
raises(RuntimeError, ag().athrow(ValueError).throw, LookupError)
# CPython's message makes little sense; PyPy's message is different
def test_async_yield_athrow_while_running():
values = []
async def ag():
try:
received = yield 1
except ValueError:
values.append(42)
return
yield 2
async def run():
running = ag()
x = await running.asend(None)
assert x == 1
try:
await running.athrow(ValueError)
except StopAsyncIteration:
pass
try:
run().send(None)
except StopIteration:
assert values == [42]
def test_async_aclose():
raises_generator_exit = False
async def ag():
nonlocal raises_generator_exit
try:
yield
except GeneratorExit:
raises_generator_exit = True
raise
async def run():
a = ag()
async for i in a:
break
await a.aclose()
try:
run().send(None)
except StopIteration:
pass
assert raises_generator_exit
def test_async_aclose_ignore_generator_exit():
async def ag():
try:
yield
except GeneratorExit:
yield
async def run():
a = ag()
async for i in a:
break
await a.aclose()
raises(RuntimeError, run().send, None)
def test_async_aclose_await_in_finally():
state = 0
async def ag():
nonlocal state
try:
yield
finally:
state = 1
await suspend('coro')
state = 2
async def run():
a = ag()
async for i in a:
break
await a.aclose()
a = run()
assert state == 0
assert a.send(None) == 'coro'
assert state == 1
try:
a.send(None)
except StopIteration:
pass
assert state == 2
def test_async_aclose_await_in_finally_with_exception():
state = 0
async def ag():
nonlocal state
try:
yield
finally:
state = 1
try:
await suspend('coro')
except Exception as exc:
state = exc
async def run():
a = ag()
async for i in a:
break
await a.aclose()
a = run()
assert state == 0
assert a.send(None) == 'coro'
assert state == 1
exc = RuntimeError()
try:
a.throw(exc)
except StopIteration:
pass
assert state == exc
def test_agen_aclose_await_and_yield_in_finally():
async def foo():
try:
yield 1
1 / 0
finally:
await suspend(42)
yield 12
async def run():
gen = foo()
it = gen.__aiter__()
await it.__anext__()
await gen.aclose()
coro = run()
assert coro.send(None) == 42
with pytest.raises(RuntimeError):
coro.send(None)
def test_async_aclose_in_finalize_hook_await_in_finally():
import gc
state = 0
async def ag():
nonlocal state
try:
yield
finally:
state = 1
await suspend('coro')
state = 2
async def run():
a = ag()
async for i in a:
break
del a
gc.collect()
gc.collect()
gc.collect()
a = run()
a2 = None
assert sys.get_asyncgen_hooks() == (None, None)
def _finalize(g):
nonlocal a2
a2 = g.aclose()
sys.set_asyncgen_hooks(finalizer=_finalize)
assert state == 0
with pytest.raises(StopIteration):
a.send(None)
assert a2.send(None) == 'coro'
assert state == 1
with pytest.raises(StopIteration):
a2.send(None)
assert state == 2
sys.set_asyncgen_hooks(None, None)
def test_async_anext_close():
async def ag():
yield 42
an = ag().__anext__()
an.close()
try:
next(an)
except RuntimeError:
pass
else:
assert False, "didn't raise"
def run_async(coro):
buffer = []
result = None
while True:
try:
buffer.append(coro.send(None))
except StopIteration as ex:
result = ex.args[0] if ex.args else None
break
return buffer, result
def test_async_generator():
async def f(i):
return i
async def run_list():
return [await c for c in [f(1), f(41)]]
assert run_async(run_list()) == ([], [1, 41])
def test_async_genexpr():
async def f(it):
for i in it:
yield i
async def run_gen():
gen = (i + 1 async for i in f([10, 20]))
return [g + 100 async for g in gen]
assert run_async(run_gen()) == ([], [111, 121])
def test_anext_tuple():
async def foo():
try:
yield (1,)
except ZeroDivisionError:
yield (2,)
async def run():
it = foo().__aiter__()
return await it.__anext__()
assert run_async(run()) == ([], (1,))
def test_async_genexpr_in_regular_function():
async def arange(n):
for i in range(n):
yield i
def make_arange(n):
# This syntax is legal starting with Python 3.7
return (i * 2 async for i in arange(n))
async def run():
return [i async for i in make_arange(10)]
res = run_async(run())
assert res[1] == [i * 2 for i in range(10)]
# Helpers for test_async_gen_exception_11() below
def sync_iterate(g):
res = []
while True:
try:
res.append(g.__next__())
except StopIteration:
res.append('STOP')
break
except Exception as ex:
res.append(str(type(ex)))
return res
def async_iterate(g):
res = []
while True:
try:
g.__anext__().__next__()
except StopAsyncIteration:
res.append('STOP')
break
except StopIteration as ex:
if ex.args:
res.append(ex.args[0])
else:
res.append('EMPTY StopIteration')
break
except Exception as ex:
res.append(str(type(ex)))
return res
def test_async_gen_exception_11():
# bpo-33786
def sync_gen():
yield 10
yield 20
def sync_gen_wrapper():
yield 1
sg = sync_gen()
sg.send(None)
try:
sg.throw(GeneratorExit())
except GeneratorExit:
yield 2
yield 3
async def async_gen():
yield 10
yield 20
async def async_gen_wrapper():
yield 1
asg = async_gen()
await asg.asend(None)
try:
await asg.athrow(GeneratorExit())
except GeneratorExit:
yield 2
yield 3
sync_gen_result = sync_iterate(sync_gen_wrapper())
async_gen_result = async_iterate(async_gen_wrapper())
assert sync_gen_result == async_gen_result
def test_asyncgen_yield_stopiteration():
async def foo():
yield 1
yield StopIteration(2)
async def run():
it = foo().__aiter__()
val1 = await it.__anext__()
assert val1 == 1
val2 = await it.__anext__()
assert isinstance(val2, StopIteration)
assert val2.value == 2
run_async(run())
def test_asyncgen_hooks_shutdown():
finalized = 0
asyncgens = []
def register_agen(agen):
asyncgens.append(agen)
async def waiter(timeout):
nonlocal finalized
try:
await suspend('running waiter')
yield 1
finally:
await suspend('closing waiter')
finalized += 1
async def wait():
async for _ in waiter(1):
pass
task1 = wait()
task2 = wait()
old_hooks = sys.get_asyncgen_hooks()
try:
sys.set_asyncgen_hooks(firstiter=register_agen)
assert task1.send(None) == 'running waiter'
assert task2.send(None) == 'running waiter'
assert len(asyncgens) == 2
assert run_async(asyncgens[0].aclose()) == (['closing waiter'], None)
assert run_async(asyncgens[1].aclose()) == (['closing waiter'], None)
assert finalized == 2
finally:
sys.set_asyncgen_hooks(*old_hooks)
def test_coroutine_capture_origin():
import contextlib
def here():
f = sys._getframe().f_back
return (f.f_code.co_filename, f.f_lineno)
try:
async def corofn():
pass
with contextlib.closing(corofn()) as coro:
assert coro.cr_origin is None
sys.set_coroutine_origin_tracking_depth(1)
fname, lineno = here()
with contextlib.closing(corofn()) as coro:
print(coro.cr_origin)
assert coro.cr_origin == (
(fname, lineno + 1, "test_coroutine_capture_origin"),)
sys.set_coroutine_origin_tracking_depth(2)
def nested():
return (here(), corofn())
fname, lineno = here()
((nested_fname, nested_lineno), coro) = nested()
with contextlib.closing(coro):
print(coro.cr_origin)
assert coro.cr_origin == (
(nested_fname, nested_lineno, "nested"),
(fname, lineno + 1, "test_coroutine_capture_origin"))
# Check we handle running out of frames correctly
sys.set_coroutine_origin_tracking_depth(1000)
with contextlib.closing(corofn()) as coro:
print(coro.cr_origin)
assert 1 <= len(coro.cr_origin) < 1000
finally:
sys.set_coroutine_origin_tracking_depth(0)
def test_runtime_warning_origin_tracking():
import gc, warnings # XXX: importing warnings is expensive untranslated
async def foobaz():
pass
gc.collect() # emit warnings from unrelated older tests
with warnings.catch_warnings(record=True) as l:
foobaz()
gc.collect()
gc.collect()
gc.collect()
assert len(l) == 1, repr(l)
w = l[0].message
assert isinstance(w, RuntimeWarning)
assert str(w).startswith("coroutine ")
assert str(w).endswith("foobaz' was never awaited")
assert "test_runtime_warning_origin_tracking" in str(w)
def test_await_multiple_times_same_gen():
async def async_iterate():
yield 1
yield 2
async def run():
it = async_iterate()
nxt = it.__anext__()
await nxt
with pytest.raises(RuntimeError):
await nxt
coro = it.aclose()
await coro
with pytest.raises(RuntimeError):
await coro
run_async(run())
def test_async_generator_wrapped_value_is_real_type():
def tracer(frame, evt, *args):
str(args) # used to crash when seeing the AsyncGenValueWrapper
return tracer
async def async_gen():
yield -2
async def async_test():
a = 2
async for i in async_gen():
a = 4
else:
a = 6
def run():
x = async_test()
try:
sys.settrace(tracer)
x.send(None)
finally:
sys.settrace(None)
raises(StopIteration, run)
| 23.771715 | 77 | 0.552584 |
ace2070528cdced1c6f69847cc83f30dcbbe366e | 921 | py | Python | src/etc/print-new-snapshot.py | carols10cents/cargo | 3f6af3dcfa1875990a27aae377836a9c6541e1e6 | [
"Apache-2.0",
"MIT"
] | null | null | null | src/etc/print-new-snapshot.py | carols10cents/cargo | 3f6af3dcfa1875990a27aae377836a9c6541e1e6 | [
"Apache-2.0",
"MIT"
] | null | null | null | src/etc/print-new-snapshot.py | carols10cents/cargo | 3f6af3dcfa1875990a27aae377836a9c6541e1e6 | [
"Apache-2.0",
"MIT"
] | null | null | null | # When updating snapshots, run this file and pipe it into `src/snapshots.txt`
import os
import subprocess
import sys
import hashlib
date = sys.argv[1]
print(date)
if not os.path.isdir('target/dl'):
os.makedirs('target/dl')
snaps = {
'macos-i386': 'i686-apple-darwin',
'macos-x86_64': 'x86_64-apple-darwin',
'linux-i386': 'i686-unknown-linux-gnu',
'linux-x86_64': 'x86_64-unknown-linux-gnu',
'winnt-i386': 'i686-pc-mingw32',
}
for platform in sorted(snaps):
triple = snaps[platform]
tarball = 'cargo-nightly-' + triple + '.tar.gz'
url = 'https://static-rust-lang-org.s3.amazonaws.com/cargo-dist/' + date + '/' + tarball
dl_path = "target/dl/" + tarball
ret = subprocess.call(["curl", "-s", "-o", dl_path, url])
if ret != 0:
raise Exception("failed to fetch url")
h = hashlib.sha1(open(dl_path, 'rb').read()).hexdigest()
print(' ' + platform + ' ' + h)
| 28.78125 | 92 | 0.633008 |
ace20899b23e597f399d9d3aefd3e0299f0cef4c | 1,742 | py | Python | evaluation.py | muzudho/event-placement-ai | 48c24f44136cda11316219a140606be53b3043d2 | [
"MIT"
] | null | null | null | evaluation.py | muzudho/event-placement-ai | 48c24f44136cda11316219a140606be53b3043d2 | [
"MIT"
] | null | null | null | evaluation.py | muzudho/event-placement-ai | 48c24f44136cda11316219a140606be53b3043d2 | [
"MIT"
] | null | null | null | def evaluate(pos_df):
"""
position.csv
------------
X,Y,BLOCK,TABLE,PARTICIPANT,GENRE_CODE
0,0,C,27,2,Red
1,0,C,26,1,Red
2,0,C,25,37,Red
"""
# 評価値
value = 0
# block_dict[block][genre_code] = value
block_dict = {}
for _index, row in pos_df.iterrows():
# x = row["X"]
# y = row["Y"]
block = row["BLOCK"]
# table_id = row["TABLE"]
# participant_id = row["PARTICIPANT"]
genre_code = row["GENRE_CODE"]
if not(block in block_dict):
block_dict[block] = {}
if not(genre_code in block_dict[block]):
block_dict[block][genre_code] = 0
block_dict[block][genre_code] += 1
# 集計。ブロックに同じ色が集まっているほど高評価。
for _block_name, genre_code_dict in block_dict.items():
for _genre_code_name, count in genre_code_dict.items():
value += count ** 2
break
# 集計。テーブル番号順にして、同じ色が連続したら、連続した数だけ加点。
# ただし、ブロックの切れ目は連続しない。
continue_bonus = 0
sorted_pos_df = pos_df.sort_values(by=["TABLE"], ascending=True)
# print(sorted_pos_df.head(5))
table_ordered_list = sorted_pos_df[[
"TABLE", "BLOCK", "GENRE_CODE"]].values.tolist()
# print("table_ordered_list: {}".format(table_ordered_list))
prev_block = None
prev_genre_code = None
for entry in table_ordered_list:
if prev_genre_code == entry[2] and prev_block == entry[1]:
continue_bonus += 1
value += continue_bonus
# print("prev_genre_code: {}, entry[2]: {}, value: {}".format(
# prev_genre_code, entry[2], value))
else:
prev_genre_code = entry[2]
continue_bonus = 0
prev_block = entry[1]
return value
| 28.096774 | 74 | 0.58496 |
ace208f968047e5a8eafa28a86ac180a115c7b44 | 5,480 | py | Python | project/api/lines.py | mycognosist/mycofile-api | d38efef7e9c256e046e9c5ff3ddf89b686e43377 | [
"MIT"
] | null | null | null | project/api/lines.py | mycognosist/mycofile-api | d38efef7e9c256e046e9c5ff3ddf89b686e43377 | [
"MIT"
] | null | null | null | project/api/lines.py | mycognosist/mycofile-api | d38efef7e9c256e046e9c5ff3ddf89b686e43377 | [
"MIT"
] | null | null | null | # project/api/lines.py
from flask import Blueprint, jsonify, make_response, request
from project.api.models import Line
from project import db
from sqlalchemy import exc
lines_blueprint = Blueprint('lines', __name__, template_folder='./templates')
@lines_blueprint.errorhandler(404)
def not_found(error):
return make_response(jsonify({'error': 'Not found.'}), 404)
# add a line activity
@lines_blueprint.route('/api/v1/lines', methods=['POST'])
def add_line_activity():
post_data = request.get_json()
if not post_data:
response_object = {
'status': 'fail',
'message': 'Invalid payload.'
}
return jsonify(response_object), 400
container = post_data.get('container')
substrate = post_data.get('substrate')
user_id = post_data.get('user_id')
culture_id = post_data.get('culture_id')
try:
line = Line(
container=container,
substrate=substrate,
user_id=user_id,
culture_id=culture_id
)
line.save()
response_object = {
'status': 'success',
'message': 'Line object was added!'
}
return jsonify(response_object), 201
except exc.IntegrityError as e:
db.session.rollback()
response_object = {
'status': 'fail',
'message': 'Invalid payload.'
}
return jsonify(response_object), 400
# display a single line object
@lines_blueprint.route('/api/v1/users/<user_id>/lines/<line_id>', methods=['GET'])
def get_single_line_object(user_id, line_id):
"""Get single line object details."""
response_object = {
'status': 'fail',
'message': 'Line object does not exist.'
}
try:
line = Line.query.filter_by(user_id=user_id).filter_by(id=line_id).first()
if not line:
return jsonify(response_object), 404
else:
response_object = {
'status': 'success',
'data': {
'id': line.id,
'culture_id': line.culture_id,
'container': line.container,
'substrate': line.substrate,
'timestamp': line.timestamp,
'user_id': line.user_id
}
}
return jsonify(response_object), 200
except ValueError:
return jsonify(response_object), 404
# display all lines in the library for a specified user
@lines_blueprint.route('/api/v1/users/<user_id>/lines', methods=['GET'])
def get_all_lines(user_id):
"""Get all line details for user."""
lines = Line.query.filter_by(user_id=user_id).all()
lines_list = []
for line in lines:
line_object = {
'id': line.id,
'culture_id': line.culture_id,
'container': line.container,
'substrate': line.substrate,
'timestamp': line.timestamp,
'user_id': line.user_id
}
lines_list.append(line_object)
response_object = {
'status': 'success',
'data': {
'lines': lines_list
}
}
return jsonify(response_object), 200
# delete a line object
@lines_blueprint.route('/api/v1/users/<user_id>/lines/<line_object_id>', methods=['DELETE'])
def delete_single_line_object(user_id, line_object_id):
"""Delete a line object."""
try:
line = Line.query.filter_by(user_id=user_id).filter_by(id=line_object_id).first()
if not line:
response_object = {
'status': 'fail',
'message': f'{line_object_id} does not exist.'
}
return jsonify(response_object), 404
else:
db.session.delete(line)
db.session.commit()
response_object = {
'status': 'success',
'message': f'{line_object_id} was deleted.'
}
return jsonify(response_object), 200
except exc.IntegrityError as e:
db.session.rollback()
response_object = {
'status': 'fail',
'message': 'Invalid payload.'
}
return jsonify(response_object), 400
# update a line object
@lines_blueprint.route('/api/v1/users/<user_id>/lines/<line_object_id>', methods=['PUT'])
def update_single_line_object(user_id, line_object_id):
"""Update an existing line object."""
post_data = request.get_json()
if not post_data:
response_object = {
'status': 'fail',
'message': 'Invalid payload.'
}
return jsonify(response_object), 400
active = post_data.get('active')
try:
line = Line.query.filter_by(user_id=user_id).filter_by(id=line_object_id).first()
if not line:
response_object = {
'status': 'fail',
'message': f'{line_object_id} does not exist.'
}
return jsonify(response_object), 404
else:
line.active = active
db.session.commit()
response_object = {
'status': 'success',
'message': f'{line_object_id} was updated.'
}
return jsonify(response_object), 201
except exc.IntegrityError as e:
db.session.rollback()
response_object = {
'status': 'fail',
'message': 'Invalid payload.'
}
return jsonify(response_object), 400
| 32.619048 | 92 | 0.574088 |
ace2097f3303013cb012e8117e19bf47b273d21a | 2,749 | py | Python | abstract_model.py | cemoody/simple_mf | 95ec1d5592742cc9a167eda873021f4a881ce569 | [
"MIT"
] | 121 | 2018-08-19T20:04:44.000Z | 2022-03-14T21:52:44.000Z | abstract_model.py | cemoody/simple_mf | 95ec1d5592742cc9a167eda873021f4a881ce569 | [
"MIT"
] | 5 | 2018-08-23T07:57:38.000Z | 2020-11-13T17:41:45.000Z | abstract_model.py | cemoody/simple_mf | 95ec1d5592742cc9a167eda873021f4a881ce569 | [
"MIT"
] | 39 | 2018-08-23T05:41:58.000Z | 2021-09-12T00:21:29.000Z | import torch
import numpy as np
from random import shuffle
from torch import from_numpy
import pytorch_lightning as pl
from torch.utils.data import DataLoader
from torch.utils.data import TensorDataset
from torch.utils.data import BatchSampler
from torch.utils.data import RandomSampler
class AbstractModel(pl.LightningModule):
def save_data(self, train_x, train_y, test_x, test_y, train_d=None, test_d=None):
if train_d is None:
self.train_arrs = [from_numpy(x) for x in [train_x, train_y]]
self.test_arrs = [from_numpy(x) for x in [test_x, test_y]]
else:
self.train_arrs = [from_numpy(x) for x in [train_x, train_y]] + [train_d]
self.test_arrs = [from_numpy(x) for x in [test_x, test_y]] +[test_d]
def step(self, batch, batch_nb, prefix='train', add_reg=True):
input, target = batch
prediction = self.forward(input)
loss, log = self.loss(prediction, target)
if add_reg:
loss_reg, log_ = self.reg()
loss = loss + loss_reg
log.update(log_)
log[f'{prefix}_loss'] = loss
return {f'{prefix}_loss': loss, 'loss':loss, 'log': log}
def training_step(self, batch, batch_nb):
return self.step(batch, batch_nb, 'train')
def test_step(self, batch, batch_nb):
# Note that we do *not* include the regularization loss
# at test time
return self.step(batch, batch_nb, 'test', add_reg=False)
def validation_step(self, batch, batch_nb):
return self.step(batch, batch_nb, 'val', add_reg=False)
def test_epoch_end(self, outputs):
test_loss_mean = torch.stack([x['test_loss'] for x in outputs]).mean()
log = {'val_loss': test_loss_mean}
return {'avg_test_loss': test_loss_mean, 'log': log}
def validation_epoch_end(self, outputs):
test_loss_mean = torch.stack([x['val_loss'] for x in outputs]).mean()
log = {'val_loss': test_loss_mean}
return {'avg_val_loss': test_loss_mean, 'log': log}
def dataloader(self, is_train=True):
if is_train:
dataset = TensorDataset(*self.train_arrs)
else:
dataset = TensorDataset(*self.test_arrs)
bs = BatchSampler(RandomSampler(dataset),
batch_size=self.batch_size, drop_last=False)
return DataLoader(dataset, batch_sampler=bs, num_workers=8)
def train_dataloader(self):
return self.dataloader(is_train=True)
def test_dataloader(self):
return self.dataloader(is_train=False)
def val_dataloader(self):
return self.dataloader(is_train=False)
def configure_optimizers(self):
return torch.optim.Adam(self.parameters(), lr=1e-3)
| 36.653333 | 85 | 0.65442 |
ace20aed10ddcd36adb3d867fa1bcb15827abc94 | 2,476 | py | Python | scripts/pipeline.py | wmglab-duke/ascent | 2ca8c39a4462a728108038294ddac27488e9758b | [
"MIT"
] | 13 | 2021-07-21T18:03:28.000Z | 2022-02-14T17:25:33.000Z | scripts/pipeline.py | wmglab-duke/ascent | 2ca8c39a4462a728108038294ddac27488e9758b | [
"MIT"
] | 6 | 2021-09-29T17:01:59.000Z | 2022-02-02T18:06:34.000Z | scripts/pipeline.py | wmglab-duke/ascent | 2ca8c39a4462a728108038294ddac27488e9758b | [
"MIT"
] | 4 | 2021-09-02T09:39:21.000Z | 2021-11-04T22:42:01.000Z | #!/usr/bin/env python3.7
"""
The copyrights of this software are owned by Duke University.
Please refer to the LICENSE.txt and README.txt files for licensing instructions.
The source code can be found on the following GitHub repository: https://github.com/wmglab-duke/ascent
"""
# builtins
import os
import time
import sys
# ascent
from src.runner import Runner
from src.utils.enums import SetupMode, Config
from .env_setup import run as env_setup
def run(args):
# test
if not (sys.version_info.major == 3 and sys.version_info.minor >= 7):
print('You are running Python {}.{}, but 3.7 or later required'.format(sys.version_info.major,
sys.version_info.minor))
exit(1)
# create bin/ directory for storing compiled Java files if it does not yet exist
if not (os.path.exists('bin')):
os.mkdir('bin')
for argument in args.run_indices:
# START timer
start = time.time()
try:
int(argument)
except ValueError:
print('Invalid type for argument: {}\n'
'All arguments must be positive integers.'.format(argument))
exit(1)
if int(argument) < 0:
print('Invalid sign for argument: {}\n'
'All arguments must be positive integers.'.format(argument))
exit(1)
print('\n\n########## STARTING RUN {} ##########\n\n'.format(argument))
run_path = os.path.join('config', 'user', 'runs', '{}.json'.format(argument))
if not os.path.exists(run_path):
print('Invalid run configuration path: {}'.format(run_path))
exit(1)
env_path = os.path.join('config', 'system', 'env.json')
if not os.path.exists(env_path):
print('Missing env configuration file: {}'.format(env_path))
env_setup(env_path)
# initialize Runner (loads in parameters)
runner = Runner(int(argument))
runner.add(SetupMode.NEW, Config.RUN, run_path)
runner.add(SetupMode.NEW, Config.ENV, env_path)
runner.add(SetupMode.OLD, Config.CLI_ARGS, vars(args))
# populate environment variables
runner.populate_env_vars()
# ready, set, GO!
runner.run()
# END timer
end = time.time()
print('\nruntime: {}'.format(end - start))
# cleanup for console viewing/inspecting
del start, end | 32.578947 | 103 | 0.600162 |
ace20bb6c8ca70830add0ecc586bcede4aff7429 | 9,807 | py | Python | deploy/utils.py | rasalt/healthcare | 2c4324c0ad54b31b8e3ba20ed7bf3acddd8b0819 | [
"Apache-2.0"
] | null | null | null | deploy/utils.py | rasalt/healthcare | 2c4324c0ad54b31b8e3ba20ed7bf3acddd8b0819 | [
"Apache-2.0"
] | null | null | null | deploy/utils.py | rasalt/healthcare | 2c4324c0ad54b31b8e3ba20ed7bf3acddd8b0819 | [
"Apache-2.0"
] | null | null | null | """Utility functions for the project deployment scripts."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import logging
import os
import string
import subprocess
import sys
import tempfile
import jsonschema
import yaml
# Options for running GCloud and shell commands in this module.
GcloudOptions = collections.namedtuple('GcloudOptions', [
'dry_run', # If True, no gcloud commands will be executed.
'gcloud_bin', # Location of the gcloud binary.
])
GCLOUD_OPTIONS = GcloudOptions(dry_run=True, gcloud_bin='gcloud')
# Schema file for project configuration YAML files.
_PROJECT_CONFIG_SCHEMA = os.path.join(
os.path.dirname(__file__), 'project_config.yaml.schema')
def wait_for_yes_no(text):
"""Prompt user for Yes/No and return true if Yes/Y. Default to No."""
if GCLOUD_OPTIONS.dry_run:
return True
while True:
# For compatibility with both Python 2 and 3.
if sys.version_info[0] < 3:
prompt = raw_input(text)
else:
prompt = input(text)
if not prompt or prompt[0] in 'nN':
# Default to No.
return False
if prompt[0] in 'yY':
return True
# Not Y or N, Keep trying.
def read_yaml_file(path):
"""Reads and parses a YAML file.
Args:
path (string): The path to the YAML file.
Returns:
A dict holding the parsed contents of the YAML file, or None if the file
could not be read or parsed.
"""
try:
with open(path, 'r') as stream:
return yaml.load(stream)
except (yaml.YAMLError, IOError) as e:
logging.error('Error reading YAML file: %s', e)
return None
def write_yaml_file(contents, path):
"""Saves a dictionary as a YAML file.
Args:
contents (dict): The contents to write to the YAML file.
path (string): The path to the YAML file.
"""
if GCLOUD_OPTIONS.dry_run:
# If using dry_run mode, don't create the file, just print the contents.
print('Contents of {}:'.format(path))
print('===================================================================')
print(yaml.safe_dump(contents, default_flow_style=False))
print('===================================================================')
return
with open(path, 'w') as outfile:
yaml.safe_dump(contents, outfile, default_flow_style=False)
def validate_config_yaml(config):
"""Validates a Project config YAML against the schema.
Args:
config (dict): The parsed contents of the project config YAML file.
Raises:
jsonschema.exceptions.ValidationError: if the YAML contents do not match the
schema.
"""
schema_file_path = os.path.join(
os.path.dirname(os.path.realpath(sys.argv[0])), _PROJECT_CONFIG_SCHEMA)
schema = read_yaml_file(schema_file_path)
jsonschema.validate(config, schema)
class GcloudRuntimeError(Exception):
"""Runtime exception raised when gcloud return code is non-zero."""
def run_gcloud_command(cmd, project_id):
"""Execute a gcloud command and return the output.
Args:
cmd (list): a list of strings representing the gcloud command to run
project_id (string): append `--project {project_id}` to the command. Most
commands should specify the project ID, for those that don't, explicitly
set this to None.
Returns:
A string, the output from the command execution.
Raises:
GcloudRuntimeError: when command execution returns a non-zero return code.
"""
cmd = [GCLOUD_OPTIONS.gcloud_bin] + cmd
if project_id is not None:
cmd.extend(['--project', project_id])
logging.info('Executing command: %s', ' '.join(cmd))
if GCLOUD_OPTIONS.dry_run:
# Don't run the command, just return a place-holder value
print('>>>> {}'.format(' '.join(cmd)))
return '__DRY_RUN_MODE__:__DRY_RUN_MODE__'
p = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, shell=False, bufsize=-1,
close_fds=True)
out, err = p.communicate()
logging.info('Command returned stdout: %s', out)
if err:
logging.info('Command returned stderr: %s', err)
if p.returncode != 0:
raise GcloudRuntimeError(
'Command %s returned non-zero exit code: %s' % (cmd, p.returncode))
return out.decode()
def create_new_deployment(deployment_template, deployment_name, project_id):
"""Creates a new Deployment Manager deployment from a template.
Args:
deployment_template (dict): The dictionary representation of a deployment
manager YAML template.
deployment_name (string): The name for the deployment.
project_id (string): The project under which to create the deployment.
"""
# Save the deployment manager template to a temporary file in the same
# directory as the deployment manager templates.
dm_template_dir = os.path.join(os.path.dirname(__file__), 'templates')
dm_template_file = tempfile.NamedTemporaryFile(suffix='.yaml',
dir=dm_template_dir)
write_yaml_file(deployment_template, dm_template_file.name)
# Create the deployment.
run_gcloud_command(['deployment-manager', 'deployments', 'create',
deployment_name,
'--config', dm_template_file.name,
'--automatic-rollback-on-error'],
project_id)
# Check deployment exists (and wasn't automcatically rolled back
run_gcloud_command(['deployment-manager', 'deployments', 'describe',
deployment_name], project_id)
def create_notification_channel(alert_email, project_id):
"""Creates a new Stackdriver email notification channel.
Args:
alert_email (string): The email address to send alerts to.
project_id (string): The project under which to create the channel.
Returns:
A string, the name of the notification channel
Raises:
GcloudRuntimeError: when the channel cannot be created.
"""
# Create a config file for the new Email notification channel.
config_file = tempfile.NamedTemporaryFile(suffix='.yaml')
channel_config = {
'type': 'email',
'displayName': 'Email',
'labels': {
'email_address': alert_email
}
}
write_yaml_file(channel_config, config_file.name)
# Create the new channel and get its name.
channel_name = run_gcloud_command(
['alpha', 'monitoring', 'channels', 'create',
'--channel-content-from-file', config_file.name,
'--format', 'value(name)'], project_id).strip()
return channel_name
def create_alert_policy(
resource_type, metric_name, policy_name, description, channel, project_id):
"""Creates a new Stackdriver alert policy for a logs-based metric.
Args:
resource_type (string): The resource type for the metric.
metric_name (string): The name of the logs-based metric.
policy_name (string): The name for the newly created alert policy.
description (string): A description of the alert policy.
channel (string): The Stackdriver notification channel to send alerts on.
project_id (string): The project under which to create the alert.
Raises:
GcloudRuntimeError: when command execution returns a non-zero return code.
"""
# Create a config file for the new alert policy.
config_file = tempfile.NamedTemporaryFile(suffix='.yaml')
# Send an alert if the metric goes above zero.
alert_config = {
'displayName': policy_name,
'documentation': {
'content': description,
'mimeType': 'text/markdown',
},
'conditions': [{
'conditionThreshold': {
'comparison': 'COMPARISON_GT',
'thresholdValue': 0,
'filter': ('resource.type="{}" AND '
'metric.type="logging.googleapis.com/user/{}"'.format(
resource_type, metric_name)),
'duration': '0s'
},
'displayName': 'No tolerance on {}!'.format(metric_name),
}],
'combiner': 'AND',
'enabled': True,
'notificationChannels': [channel],
}
write_yaml_file(alert_config, config_file.name)
# Create the new alert policy.
run_gcloud_command(['alpha', 'monitoring', 'policies', 'create',
'--policy-from-file', config_file.name], project_id)
def get_gcloud_user():
"""Returns the active authenticated gcloud account."""
return run_gcloud_command(
['config', 'list', 'account', '--format', 'value(core.account)'],
project_id=None).strip()
def get_project_number(project_id):
"""Returns the project number the given project."""
return run_gcloud_command(
['projects', 'describe', project_id, '--format', 'value(projectNumber)'],
project_id=None).strip()
def get_deployment_manager_service_account(project_id):
"""Returns the deployment manager service account for the given project."""
return 'serviceAccount:{}@cloudservices.gserviceaccount.com'.format(
get_project_number(project_id))
def get_log_sink_service_account(log_sink_name, project_id):
"""Gets the service account name for the given log sink."""
sink_service_account = run_gcloud_command([
'logging', 'sinks', 'describe', log_sink_name,
'--format', 'value(writerIdentity)'], project_id).strip()
# The name returned has a 'serviceAccount:' prefix, so remove this.
return sink_service_account.split(':')[1]
def resolve_env_vars(config):
"""Recursively resolves environment variables in config values."""
if isinstance(config, str):
return string.Template(config).substitute(os.environ)
elif isinstance(config, dict):
return {k: resolve_env_vars(v) for k, v in config.items()}
elif isinstance(config, list):
return [resolve_env_vars(i) for i in config]
else:
return config
| 34.65371 | 80 | 0.676761 |
ace20c58e952a24cb7e1f3aae4530dd450160ded | 999 | py | Python | frappe/desk/doctype/tag/test_tag.py | alijasim/frappe | 17803d5408d0a0257ab6968acb1a847a582b07ce | [
"MIT"
] | null | null | null | frappe/desk/doctype/tag/test_tag.py | alijasim/frappe | 17803d5408d0a0257ab6968acb1a847a582b07ce | [
"MIT"
] | 17 | 2021-03-22T18:47:14.000Z | 2022-03-15T12:21:00.000Z | frappe/desk/doctype/tag/test_tag.py | alijasim/frappe | 17803d5408d0a0257ab6968acb1a847a582b07ce | [
"MIT"
] | null | null | null | from __future__ import unicode_literals
import unittest
import frappe
from frappe.desk.reportview import get_stats
from frappe.desk.doctype.tag.tag import add_tag
class TestTag(unittest.TestCase):
def setUp(self) -> None:
frappe.db.sql("DELETE from `tabTag`")
frappe.db.sql("UPDATE `tabDocType` set _user_tags=''")
def test_tag_count_query(self):
self.assertDictEqual(get_stats('["_user_tags"]', 'DocType'),
{'_user_tags': [['No Tags', frappe.db.count('DocType')]]})
add_tag('Standard', 'DocType', 'User')
add_tag('Standard', 'DocType', 'ToDo')
# count with no filter
self.assertDictEqual(get_stats('["_user_tags"]', 'DocType'),
{'_user_tags': [['Standard', 2], ['No Tags', frappe.db.count('DocType') - 2]]})
# count with child table field filter
self.assertDictEqual(get_stats('["_user_tags"]',
'DocType',
filters='[["DocField", "fieldname", "like", "%last_name%"], ["DocType", "name", "like", "%use%"]]'),
{'_user_tags': [['Standard', 1], ['No Tags', 0]]})
| 35.678571 | 103 | 0.676677 |
ace20db230776c4e7d52ca9f45083b2e1493b2ad | 4,971 | py | Python | uhd_restpy/testplatform/sessions/ixnetwork/traffic/trafficitem/highlevelstream/udf/valuelist/valuelist.py | Vibaswan/ixnetwork_restpy | 239fedc7050890746cbabd71ea1e91c68d9e5cad | [
"MIT"
] | null | null | null | uhd_restpy/testplatform/sessions/ixnetwork/traffic/trafficitem/highlevelstream/udf/valuelist/valuelist.py | Vibaswan/ixnetwork_restpy | 239fedc7050890746cbabd71ea1e91c68d9e5cad | [
"MIT"
] | null | null | null | uhd_restpy/testplatform/sessions/ixnetwork/traffic/trafficitem/highlevelstream/udf/valuelist/valuelist.py | Vibaswan/ixnetwork_restpy | 239fedc7050890746cbabd71ea1e91c68d9e5cad | [
"MIT"
] | null | null | null | # MIT LICENSE
#
# Copyright 1997 - 2020 by IXIA Keysight
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from uhd_restpy.base import Base
from uhd_restpy.files import Files
class ValueList(Base):
"""This object specifies the value list properties.
The ValueList class encapsulates a list of valueList resources that are managed by the system.
A list of resources can be retrieved from the server using the ValueList.find() method.
"""
__slots__ = ()
_SDM_NAME = 'valueList'
_SDM_ATT_MAP = {
'AvailableWidths': 'availableWidths',
'StartValueList': 'startValueList',
'Width': 'width',
}
def __init__(self, parent):
super(ValueList, self).__init__(parent)
@property
def AvailableWidths(self):
"""
Returns
-------
- list(str): Species all the possible widths available for a UDF in particular Type.
"""
return self._get_attribute(self._SDM_ATT_MAP['AvailableWidths'])
@property
def StartValueList(self):
"""
Returns
-------
- list(number): Specifies the starting value for a particular UDF.
"""
return self._get_attribute(self._SDM_ATT_MAP['StartValueList'])
@StartValueList.setter
def StartValueList(self, value):
self._set_attribute(self._SDM_ATT_MAP['StartValueList'], value)
@property
def Width(self):
"""
Returns
-------
- str(16 | 24 | 32 | 8): Specifies the width of the UDF.
"""
return self._get_attribute(self._SDM_ATT_MAP['Width'])
@Width.setter
def Width(self, value):
self._set_attribute(self._SDM_ATT_MAP['Width'], value)
def update(self, StartValueList=None, Width=None):
"""Updates valueList resource on the server.
Args
----
- StartValueList (list(number)): Specifies the starting value for a particular UDF.
- Width (str(16 | 24 | 32 | 8)): Specifies the width of the UDF.
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._update(self._map_locals(self._SDM_ATT_MAP, locals()))
def find(self, AvailableWidths=None, StartValueList=None, Width=None):
"""Finds and retrieves valueList resources from the server.
All named parameters are evaluated on the server using regex. The named parameters can be used to selectively retrieve valueList resources from the server.
To retrieve an exact match ensure the parameter value starts with ^ and ends with $
By default the find method takes no parameters and will retrieve all valueList resources from the server.
Args
----
- AvailableWidths (list(str)): Species all the possible widths available for a UDF in particular Type.
- StartValueList (list(number)): Specifies the starting value for a particular UDF.
- Width (str(16 | 24 | 32 | 8)): Specifies the width of the UDF.
Returns
-------
- self: This instance with matching valueList resources retrieved from the server available through an iterator or index
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._select(self._map_locals(self._SDM_ATT_MAP, locals()))
def read(self, href):
"""Retrieves a single instance of valueList data from the server.
Args
----
- href (str): An href to the instance to be retrieved
Returns
-------
- self: This instance with the valueList resources from the server available through an iterator or index
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
return self._read(href)
| 38.238462 | 163 | 0.671495 |
ace20e5d216eab3a68caa50edff592e4da8f472f | 1,667 | py | Python | mapping/sandbox/graphslam/view_merge.py | sameeptandon/sail-car-log | 0ee3d598bb09d389bcbd2ebf73cd4b2411e796be | [
"BSD-2-Clause"
] | 1 | 2021-02-24T03:11:13.000Z | 2021-02-24T03:11:13.000Z | mapping/sandbox/graphslam/view_merge.py | sameeptandon/sail-car-log | 0ee3d598bb09d389bcbd2ebf73cd4b2411e796be | [
"BSD-2-Clause"
] | null | null | null | mapping/sandbox/graphslam/view_merge.py | sameeptandon/sail-car-log | 0ee3d598bb09d389bcbd2ebf73cd4b2411e796be | [
"BSD-2-Clause"
] | 3 | 2015-03-18T14:36:04.000Z | 2018-07-04T02:57:24.000Z | import sys
import vtk
from blockworld import load_vtk_cloud
from pipeline_config import DATA_DIR
import h5py
recolored = False
def keyhandler(obj, event):
global recolored
key = obj.GetKeySym()
if key == 't':
if not recolored:
cloud_actor1.GetProperty().SetColor(0, 0, 1)
cloud_actor1.GetMapper().ScalarVisibilityOff()
cloud_actor2.GetProperty().SetColor(0, 1, 0)
cloud_actor2.GetMapper().ScalarVisibilityOff()
recolored = True
else:
cloud_actor1.GetMapper().ScalarVisibilityOn()
cloud_actor2.GetMapper().ScalarVisibilityOn()
recolored = False
if __name__ == '__main__':
ren = vtk.vtkRenderer()
cloud_actor1 = load_vtk_cloud(sys.argv[1])
cloud_actor2 = load_vtk_cloud(sys.argv[2])
transform_file = None
if (len(sys.argv) > 3):
transform_file = sys.argv[3]
h5f = h5py.File(transform_file)
T = h5f['transform'][...]
print T
transform = vtk.vtkTransform()
transform.Translate(T[0, 3], T[1, 3], T[2, 3])
cloud_actor2.SetUserTransform(transform)
ren.AddActor(cloud_actor1)
ren.AddActor(cloud_actor2)
axes = vtk.vtkAxesActor()
axes.AxisLabelsOff()
ren.AddActor(axes)
ren.ResetCamera()
win = vtk.vtkRenderWindow()
win.AddRenderer(ren)
win.SetSize(800, 800)
iren = vtk.vtkRenderWindowInteractor()
iren.SetRenderWindow(win)
mouseInteractor = vtk.vtkInteractorStyleTrackballCamera()
iren.SetInteractorStyle(mouseInteractor)
iren.Initialize()
iren.AddObserver('KeyPressEvent', keyhandler)
iren.Start()
| 26.887097 | 61 | 0.655669 |
ace20f90cdece66a796e66de29967523bb5bb3ca | 1,891 | py | Python | tensorflow2.0/save_and_load/save_load_model.py | YujieShui/up-up-tensorflow | 59f68ec9d5385012a5d60e74319aa00abf47646b | [
"MIT"
] | null | null | null | tensorflow2.0/save_and_load/save_load_model.py | YujieShui/up-up-tensorflow | 59f68ec9d5385012a5d60e74319aa00abf47646b | [
"MIT"
] | null | null | null | tensorflow2.0/save_and_load/save_load_model.py | YujieShui/up-up-tensorflow | 59f68ec9d5385012a5d60e74319aa00abf47646b | [
"MIT"
] | null | null | null | import tensorflow as tf
from tensorflow.keras import datasets, layers, optimizers, Sequential, metrics
def preprocess(x, y):
"""
x is a simple image, not a batch
"""
x = tf.cast(x, dtype=tf.float32) / 255.
x = tf.reshape(x, [28*28])
y = tf.cast(y, dtype=tf.int32)
y = tf.one_hot(y, depth=10)
return x,y
batchsz = 128
(x, y), (x_val, y_val) = datasets.mnist.load_data()
print('datasets:', x.shape, y.shape, x.min(), x.max())
db = tf.data.Dataset.from_tensor_slices((x,y))
db = db.map(preprocess).shuffle(60000).batch(batchsz)
ds_val = tf.data.Dataset.from_tensor_slices((x_val, y_val))
ds_val = ds_val.map(preprocess).batch(batchsz)
sample = next(iter(db))
print(sample[0].shape, sample[1].shape)
network = Sequential([layers.Dense(256, activation='relu'),
layers.Dense(128, activation='relu'),
layers.Dense(64, activation='relu'),
layers.Dense(32, activation='relu'),
layers.Dense(10)])
network.build(input_shape=(None, 28*28))
network.summary()
network.compile(optimizer=optimizers.Adam(lr=0.01),
loss=tf.losses.CategoricalCrossentropy(from_logits=True),
metrics=['accuracy']
)
network.fit(db, epochs=3, validation_data=ds_val, validation_freq=2)
network.evaluate(ds_val)
network.save('model.h5')
print('saved total model.')
del network
print('loaded model from file.')
network = tf.keras.models.load_model('model.h5', compile=False)
network.compile(optimizer=optimizers.Adam(lr=0.01),
loss=tf.losses.CategoricalCrossentropy(from_logits=True),
metrics=['accuracy']
)
x_val = tf.cast(x_val, dtype=tf.float32) / 255.
x_val = tf.reshape(x_val, [-1, 28*28])
y_val = tf.cast(y_val, dtype=tf.int32)
y_val = tf.one_hot(y_val, depth=10)
ds_val = tf.data.Dataset.from_tensor_slices((x_val, y_val)).batch(128)
network.evaluate(ds_val)
| 28.223881 | 81 | 0.675304 |
ace20f941bdb8c26ec83eee44036e251a472e70f | 8,960 | py | Python | aylien_news_api/models/histograms.py | AYLIEN/aylien_newsapi_python | ab4e667e718e8f919b894d4e4ec76e5d37b2fe74 | [
"Apache-2.0"
] | 13 | 2016-04-30T12:08:24.000Z | 2021-09-14T13:57:46.000Z | aylien_news_api/models/histograms.py | AYLIEN/aylien_newsapi_python | ab4e667e718e8f919b894d4e4ec76e5d37b2fe74 | [
"Apache-2.0"
] | 3 | 2016-09-02T08:22:28.000Z | 2018-07-03T10:59:31.000Z | aylien_news_api/models/histograms.py | AYLIEN/aylien_newsapi_python | ab4e667e718e8f919b894d4e4ec76e5d37b2fe74 | [
"Apache-2.0"
] | 5 | 2016-12-20T08:17:47.000Z | 2022-01-11T22:44:43.000Z | # coding: utf-8
"""
AYLIEN News API
The AYLIEN News API is the most powerful way of sourcing, searching and syndicating analyzed and enriched news content. It is accessed by sending HTTP requests to our server, which returns information to your client. # noqa: E501
The version of the OpenAPI document: 5.1.0
Contact: support@aylien.com
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from aylien_news_api.configuration import Configuration
class Histograms(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'field': 'str',
'interval_end': 'int',
'interval_start': 'int',
'interval_width': 'int',
'intervals': 'list[HistogramInterval]',
'published_at_end': 'datetime',
'published_at_start': 'datetime'
}
attribute_map = {
'field': 'field',
'interval_end': 'interval.end',
'interval_start': 'interval.start',
'interval_width': 'interval.width',
'intervals': 'intervals',
'published_at_end': 'published_at.end',
'published_at_start': 'published_at.start'
}
def __init__(self, field=None, interval_end=None, interval_start=None, interval_width=None, intervals=None, published_at_end=None, published_at_start=None, local_vars_configuration=None): # noqa: E501
"""Histograms - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._field = None
self._interval_end = None
self._interval_start = None
self._interval_width = None
self._intervals = None
self._published_at_end = None
self._published_at_start = None
self.discriminator = None
if field is not None:
self.field = field
if interval_end is not None:
self.interval_end = interval_end
if interval_start is not None:
self.interval_start = interval_start
if interval_width is not None:
self.interval_width = interval_width
if intervals is not None:
self.intervals = intervals
if published_at_end is not None:
self.published_at_end = published_at_end
if published_at_start is not None:
self.published_at_start = published_at_start
@property
def field(self):
"""Gets the field of this Histograms. # noqa: E501
:return: The field of this Histograms. # noqa: E501
:rtype: str
"""
return self._field
@field.setter
def field(self, field):
"""Sets the field of this Histograms.
:param field: The field of this Histograms. # noqa: E501
:type field: str
"""
self._field = field
@property
def interval_end(self):
"""Gets the interval_end of this Histograms. # noqa: E501
The end interval of the histogram # noqa: E501
:return: The interval_end of this Histograms. # noqa: E501
:rtype: int
"""
return self._interval_end
@interval_end.setter
def interval_end(self, interval_end):
"""Sets the interval_end of this Histograms.
The end interval of the histogram # noqa: E501
:param interval_end: The interval_end of this Histograms. # noqa: E501
:type interval_end: int
"""
self._interval_end = interval_end
@property
def interval_start(self):
"""Gets the interval_start of this Histograms. # noqa: E501
The start interval of the histogram # noqa: E501
:return: The interval_start of this Histograms. # noqa: E501
:rtype: int
"""
return self._interval_start
@interval_start.setter
def interval_start(self, interval_start):
"""Sets the interval_start of this Histograms.
The start interval of the histogram # noqa: E501
:param interval_start: The interval_start of this Histograms. # noqa: E501
:type interval_start: int
"""
self._interval_start = interval_start
@property
def interval_width(self):
"""Gets the interval_width of this Histograms. # noqa: E501
The width of the histogram # noqa: E501
:return: The interval_width of this Histograms. # noqa: E501
:rtype: int
"""
return self._interval_width
@interval_width.setter
def interval_width(self, interval_width):
"""Sets the interval_width of this Histograms.
The width of the histogram # noqa: E501
:param interval_width: The interval_width of this Histograms. # noqa: E501
:type interval_width: int
"""
self._interval_width = interval_width
@property
def intervals(self):
"""Gets the intervals of this Histograms. # noqa: E501
The intervals of the histograms # noqa: E501
:return: The intervals of this Histograms. # noqa: E501
:rtype: list[HistogramInterval]
"""
return self._intervals
@intervals.setter
def intervals(self, intervals):
"""Sets the intervals of this Histograms.
The intervals of the histograms # noqa: E501
:param intervals: The intervals of this Histograms. # noqa: E501
:type intervals: list[HistogramInterval]
"""
self._intervals = intervals
@property
def published_at_end(self):
"""Gets the published_at_end of this Histograms. # noqa: E501
The end of a period in which searched stories were published # noqa: E501
:return: The published_at_end of this Histograms. # noqa: E501
:rtype: datetime
"""
return self._published_at_end
@published_at_end.setter
def published_at_end(self, published_at_end):
"""Sets the published_at_end of this Histograms.
The end of a period in which searched stories were published # noqa: E501
:param published_at_end: The published_at_end of this Histograms. # noqa: E501
:type published_at_end: datetime
"""
self._published_at_end = published_at_end
@property
def published_at_start(self):
"""Gets the published_at_start of this Histograms. # noqa: E501
The start of a period in which searched stories were published # noqa: E501
:return: The published_at_start of this Histograms. # noqa: E501
:rtype: datetime
"""
return self._published_at_start
@published_at_start.setter
def published_at_start(self, published_at_start):
"""Sets the published_at_start of this Histograms.
The start of a period in which searched stories were published # noqa: E501
:param published_at_start: The published_at_start of this Histograms. # noqa: E501
:type published_at_start: datetime
"""
self._published_at_start = published_at_start
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, Histograms):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, Histograms):
return True
return self.to_dict() != other.to_dict()
| 30.896552 | 235 | 0.620759 |
ace21088005dfb59d8a8d61eb89f2a26368cf6cd | 3,736 | py | Python | alerta/webhooks/telegram.py | mustafaugurhancar/alerta | 608b2db9117ecb8400c29f30bc549d9a99e9eee7 | [
"Apache-2.0"
] | 1 | 2018-03-30T12:38:47.000Z | 2018-03-30T12:38:47.000Z | alerta/webhooks/telegram.py | mustafaugurhancar/alerta | 608b2db9117ecb8400c29f30bc549d9a99e9eee7 | [
"Apache-2.0"
] | null | null | null | alerta/webhooks/telegram.py | mustafaugurhancar/alerta | 608b2db9117ecb8400c29f30bc549d9a99e9eee7 | [
"Apache-2.0"
] | null | null | null |
import logging
import os
from flask import current_app, request, jsonify
from flask_cors import cross_origin
from alerta.auth.utils import permission
from alerta.models.alert import Alert
from alerta.models.blackout import Blackout
from . import webhooks
LOG = logging.getLogger(__name__)
def send_message_reply(alert, action, user, data):
try:
import telepot
except ImportError as e:
LOG.warning("You have configured Telegram but 'telepot' client is not installed", exc_info=True)
return
try:
bot_id = os.environ.get('TELEGRAM_TOKEN') or current_app.config.get('TELEGRAM_TOKEN')
dashboard_url = os.environ.get('DASHBOARD_URL') or current_app.config.get('DASHBOARD_URL')
chat_id = os.environ.get('TELEGRAM_CHAT_ID') or current_app.config.get('TELEGRAM_CHAT_ID')
bot = telepot.Bot(bot_id)
# message info
message_id = data['callback_query']['message']['message_id']
message_log = "\n".join(data['callback_query']['message']['text'].split('\n')[1:])
# process buttons for reply text
inline_keyboard, reply = [], "The status of alert {alert} is *{status}* now!"
actions = ['watch', 'unwatch']
if action in actions:
reply = "User `{user}` is _{status}ing_ alert {alert}"
next_action = actions[(actions.index(action) + 1) % len(actions)]
inline_keyboard = [
[{'text': next_action.capitalize(), 'callback_data': "/{} {}".format(next_action, alert.id)},
{'text': 'Ack', 'callback_data': "{} {}".format('/ack', alert.id)},
{'text': 'Close', 'callback_data': "{} {}".format('/close', alert.id)}, ]
]
# format message response
alert_short_id = alert.get_id(short=True)
alert_url = "{}/#/alert/{}".format(dashboard_url, alert.id)
reply = reply.format(alert=alert_short_id, status=action, user=user)
message = "{alert} *{level} - {event} on {resouce}*\n{log}\n{reply}".format(
alert="[{}]({})".format(alert_short_id, alert_url), level=alert.severity.capitalize(),
event=alert.event, resouce=alert.resource, log=message_log, reply=reply)
# send message
bot.editMessageText(
msg_identifier=(chat_id, message_id), text=message,
parse_mode='Markdown', reply_markup={'inline_keyboard': inline_keyboard}
)
except Exception as e:
LOG.warning("Error sending reply message", exc_info=True)
@webhooks.route('/webhooks/telegram', methods=['OPTIONS', 'POST'])
@cross_origin()
@permission('write:webhooks')
def telegram():
data = request.json
if 'callback_query' in data:
author = data['callback_query']['from']
user = "{} {}".format(author.get('first_name'), author.get('last_name'))
command, alert_id = data['callback_query']['data'].split(' ', 1)
alert = Alert.find_by_id(alert_id)
if not alert:
jsonify(status="error", message="alert not found for Telegram message")
action = command.lstrip('/')
if action in ['open', 'ack', 'close']:
alert.set_status(status=action, text='status change via Telegram')
elif action in ['watch', 'unwatch']:
alert.untag(tags=["{}:{}".format(action, user)])
elif action == 'blackout':
environment, resource, event = alert.split('|', 2)
blackout = Blackout(environment, resource=resource, event=event)
blackout.create()
send_message_reply(alert, action, user, data)
return jsonify(status="ok")
else:
return jsonify(status="error", message="no callback_query in Telegram message"), 400
| 40.608696 | 109 | 0.628747 |
ace21170ae71b56cf8e831b3d83f80f28783c119 | 186 | py | Python | confobj/config_ini.py | datasets-org/config | f21f2eefcacc45095210bcb4ddd315bda43481d5 | [
"MIT"
] | null | null | null | confobj/config_ini.py | datasets-org/config | f21f2eefcacc45095210bcb4ddd315bda43481d5 | [
"MIT"
] | null | null | null | confobj/config_ini.py | datasets-org/config | f21f2eefcacc45095210bcb4ddd315bda43481d5 | [
"MIT"
] | null | null | null | from .config_base import ConfigBase
class ConfigIni(ConfigBase):
def __init__(self):
# todo - suppot init (problem = nested)
raise Exception("Not implemented yet")
| 23.25 | 47 | 0.688172 |
ace212dd42b3c562db596778e99538c34b345b61 | 5,110 | py | Python | fingerprinting/analysis/wobj.py | rmorla/http2-sidechannel | 5a4738d31a026e9c8ae9d3b638315daff682a67e | [
"Unlicense"
] | null | null | null | fingerprinting/analysis/wobj.py | rmorla/http2-sidechannel | 5a4738d31a026e9c8ae9d3b638315daff682a67e | [
"Unlicense"
] | null | null | null | fingerprinting/analysis/wobj.py | rmorla/http2-sidechannel | 5a4738d31a026e9c8ae9d3b638315daff682a67e | [
"Unlicense"
] | null | null | null | # -*- coding: utf-8 -*-
from enum import Enum
def integer_compare(lhs, rhs):
return (lhs > rhs) - (lhs < rhs)
class WebObjectState(Enum):
WAITING = 0
REQUEST_HEADERS = 1
RESPONSE_HEADERS = 2
PAYLOAD_START = 3
PAYLOAD_FINISHED = 4
class WebObject(object):
def __init__(self, frame):
self.length = 0
self.status = -1
self.body = []
self.response = {}
self.finished = False
self.transitions = []
self.name = frame.http_resource
self.state = WebObjectState.WAITING
self.request = self.generate_headers_entry(frame)
@staticmethod
def generate_headers_entry(frame):
return {
"frame": frame.id,
"length": frame.body,
"timestamp": frame.timestamp
}
def generate_body_entry(self, frame):
body_dictionary = self.generate_headers_entry(frame)
if len(self.body) == 0:
body_dictionary["delta"] = 0.0
else:
body_dictionary["delta"] = frame.timestamp - self.body[-1]["timestamp"]
return body_dictionary
def transition(self, state, frame):
transition_dictionary = {
"state": state.name,
"timestamp": frame.timestamp
}
if len(self.transitions) == 0:
transition_dictionary["delta"] = 0.0
else:
transition_dictionary["delta"] = frame.timestamp - self.transitions[-1]["timestamp"]
self.state = state
self.transitions.append(transition_dictionary)
def handle_headers(self, frame):
if self.state == WebObjectState.WAITING or self.state == WebObjectState.PAYLOAD_FINISHED:
if frame.end_headers:
self.transition(WebObjectState.REQUEST_HEADERS, frame)
elif self.state == WebObjectState.REQUEST_HEADERS and frame.end_headers:
self.status = frame.http_status
self.response = self.generate_headers_entry(frame)
if frame.end_stream:
self.transition(WebObjectState.WAITING, frame)
else:
self.transition(WebObjectState.RESPONSE_HEADERS, frame)
def handle_data(self, frame):
if self.state == WebObjectState.RESPONSE_HEADERS:
if frame.end_stream:
self.transition(WebObjectState.PAYLOAD_FINISHED, frame)
else:
self.transition(WebObjectState.PAYLOAD_START, frame)
self._receive_payload(frame)
elif self.state == WebObjectState.PAYLOAD_START:
self._receive_payload(frame)
if frame.end_stream:
self.transition(WebObjectState.PAYLOAD_FINISHED, frame)
@property
def payload_start(self):
return self.body[0]
@property
def payload_finish(self):
return self.body[-1]
def serialize(self):
return {
"body": self.body,
"name": self.name,
"length": self.length,
"status": self.status,
"transitions": self.transitions,
"request": self.request,
"response": self.response
}
def _receive_payload(self, frame):
if frame.end_stream:
self.finished = True
self.length += frame.body
self.body.append(self.generate_body_entry(frame))
def compare_lhs_data_rhs_headers(self, other):
if self.payload_finish["timestamp"] < other["timestamp"]:
return -1
elif self.payload_start["timestamp"] > other["timestamp"]:
return 1
else:
return 0
@staticmethod
def compare_lhs_headers(this, other):
comparison = [
integer_compare(this["timestamp"], other.request_headers["timestamp"]),
integer_compare(this["timestamp"], other.response_headers["timestamp"])
]
if len(other.payload) > 0:
if this["timestamp"] < other.payload_start["timestamp"]:
comparison.append(-1)
elif this["timestamp"] > other.payload_finish["timestamp"]:
comparison.append(1)
else:
comparison.append(0)
return comparison
def compare(self, other):
if not self.finished or not other.finished:
return []
comparison = [
self.compare_lhs_headers(self.request, other),
self.compare_lhs_headers(self.response, other)
]
if len(self.body) > 0:
if self.payload_finish["timestamp"] < other.payload_start["timestamp"]:
overlap_lhs_data_rhs_data = -1
elif self.payload_start["timestamp"] > other.payload_finish["timestamp"]:
overlap_lhs_data_rhs_data = 1
else:
overlap_lhs_data_rhs_data = 0
comparison += [
self.compare_lhs_data_rhs_headers(other.request_headers),
self.compare_lhs_data_rhs_headers(other.response_headers),
overlap_lhs_data_rhs_data
]
return comparison
| 27.771739 | 97 | 0.592368 |
ace212e52356fcb6bf2ac5a6d50aa4f5417177b4 | 3,247 | py | Python | custom_components/hacs/repositories/appdaemon.py | LucaKaufmann/HomeAssistant-Config | 3be0ab0a91a2ff188abf1e0a9d0dd4dea7d30d45 | [
"MIT"
] | 31 | 2019-01-15T23:13:41.000Z | 2020-08-16T14:15:56.000Z | custom_components/hacs/repositories/appdaemon.py | LucaKaufmann/Home-AssistantConfig | 3be0ab0a91a2ff188abf1e0a9d0dd4dea7d30d45 | [
"MIT"
] | 3 | 2020-01-09T20:33:50.000Z | 2020-07-11T13:25:48.000Z | custom_components/hacs/repositories/appdaemon.py | LucaKaufmann/Home-AssistantConfig | 3be0ab0a91a2ff188abf1e0a9d0dd4dea7d30d45 | [
"MIT"
] | 3 | 2020-02-12T10:55:00.000Z | 2020-02-29T18:43:39.000Z | """Class for appdaemon apps in HACS."""
from aiogithubapi import AIOGitHubException
from .repository import HacsRepository, register_repository_class
from ..hacsbase.exceptions import HacsException
@register_repository_class
class HacsAppdaemon(HacsRepository):
"""Appdaemon apps in HACS."""
category = "appdaemon"
def __init__(self, full_name):
"""Initialize."""
super().__init__()
self.information.full_name = full_name
self.information.category = self.category
self.content.path.local = self.localpath
self.content.path.remote = "apps"
@property
def localpath(self):
"""Return localpath."""
return f"{self.system.config_path}/appdaemon/apps/{self.information.name}"
async def validate_repository(self):
"""Validate."""
await self.common_validate()
# Custom step 1: Validate content.
try:
addir = await self.repository_object.get_contents("apps", self.ref)
except AIOGitHubException:
raise HacsException(
f"Repostitory structure for {self.ref.replace('tags/','')} is not compliant"
)
if not isinstance(addir, list):
self.validate.errors.append("Repostitory structure not compliant")
self.content.path.remote = addir[0].path
self.information.name = addir[0].name
self.content.objects = await self.repository_object.get_contents(
self.content.path.remote, self.ref
)
self.content.files = []
for filename in self.content.objects:
self.content.files.append(filename.name)
# Handle potential errors
if self.validate.errors:
for error in self.validate.errors:
if not self.system.status.startup:
self.logger.error(error)
return self.validate.success
async def registration(self):
"""Registration."""
if not await self.validate_repository():
return False
# Run common registration steps.
await self.common_registration()
# Set local path
self.content.path.local = self.localpath
async def update_repository(self):
"""Update."""
if self.github.ratelimits.remaining == 0:
return
await self.common_update()
# Get appdaemon objects.
if self.repository_manifest:
if self.repository_manifest.content_in_root:
self.content.path.remote = ""
if self.content.path.remote == "apps":
addir = await self.repository_object.get_contents(
self.content.path.remote, self.ref
)
self.content.path.remote = addir[0].path
self.information.name = addir[0].name
self.content.objects = await self.repository_object.get_contents(
self.content.path.remote, self.ref
)
self.content.files = []
for filename in self.content.objects:
self.content.files.append(filename.name)
# Set local path
self.content.path.local = self.localpath
| 33.822917 | 93 | 0.606714 |
ace214903ea917397e63135f1705c5c7510fed03 | 20,275 | py | Python | src/sardana/taurus/core/tango/sardana/motion.py | dschick/sardana | 597d3b26ed6659aac188791d84ad81397f0d0633 | [
"CC-BY-3.0"
] | null | null | null | src/sardana/taurus/core/tango/sardana/motion.py | dschick/sardana | 597d3b26ed6659aac188791d84ad81397f0d0633 | [
"CC-BY-3.0"
] | null | null | null | src/sardana/taurus/core/tango/sardana/motion.py | dschick/sardana | 597d3b26ed6659aac188791d84ad81397f0d0633 | [
"CC-BY-3.0"
] | null | null | null | #!/usr/bin/env python
##############################################################################
##
# This file is part of Sardana
##
# http://www.sardana-controls.org/
##
# Copyright 2011 CELLS / ALBA Synchrotron, Bellaterra, Spain
##
# Sardana is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
##
# Sardana is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
##
# You should have received a copy of the GNU Lesser General Public License
# along with Sardana. If not, see <http://www.gnu.org/licenses/>.
##
##############################################################################
"""The sardana motion submodule. It contains specific part of sardana motion"""
__all__ = ["Moveable", "MoveableSource", "Motion", "MotionGroup"]
__docformat__ = 'restructuredtext'
import time
from taurus.core.util.containers import CaselessDict
def _get_tango_devstate_match(states):
"""
Retrieve PyTango.DevState match
:param states:
:return:
"""
import PyTango
state = PyTango.DevState.ON
if PyTango.DevState.FAULT in states:
state = PyTango.DevState.FAULT
elif PyTango.DevState.ALARM in states:
state = PyTango.DevState.ALARM
elif PyTango.DevState.UNKNOWN in states:
state = PyTango.DevState.UNKNOWN
elif PyTango.DevState.MOVING in states:
state = PyTango.DevState.MOVING
return state
class Moveable:
""" An item that can 'move'. In order to move it you need to provide a list
of values (normally interpreted as motor positions).
Therefore this Moveable can represent a single motor, a group of motors"""
def __init__(self):
pass
def startMove(self, new_pos, timeout=None):
""" startMove(sequence<float> new_pos, double timeout=None) -> sequence<id>
Calling this method will trigger a movement off all components of
the movement."""
pass
def waitMove(self, timeout=None, id=None):
""" waitMove(float timeout=None, sequence<id> id=None) -> None
Waits for the movement to end. If no movement is active the method
returns immediately."""
pass
def move(self, new_pos, timeout=None):
""" move(list<double> new_pos, timeout=None) -> None
Triggers a movement and waits for it to end.
"""
self.waitMove(id=self.startMove(new_pos, timeout=timeout))
def iterMove(self, new_pos, timeout=None):
""" generator for motor positions"""
pass
def getLastMotionTime(self):
raise NotImplementedError
def getTotalLastMotionTime(self):
raise NotImplementedError
def abort(self, wait_ready=True, timeout=None):
pass
def readPosition(self, force=False):
pass
def getMoveableSource(self):
return None
def getSize(self):
return 0
def getIndex(self, name):
pass
class MoveableSource:
""" A container of Moveable items. """
def __init__(self):
pass
def getMoveable(self, names):
""" getMoveable(list<string> names) -> Moveable
Returns a Moveable object that handles all the moveable items given in
names.
"""
pass
class BaseMotion(Moveable):
def __init__(self, elements, moveable_srcs, allow_repeat=False,
allow_unknown=False, read_only=False):
self.read_only = read_only
if len(elements) == 0:
raise Exception("Motion with no elements")
first_elem = elements[0]
if isinstance(first_elem, str):
self.init_by_names(elements, moveable_srcs, allow_repeat,
allow_unknown)
else:
self.init_by_movables(elements, moveable_srcs, allow_repeat,
allow_unknown)
def getMoveable(self, sources, name):
for source in sources:
moveable = source.getMoveable(name)
if moveable is not None:
return moveable
class MotionGroup(BaseMotion):
""" A virtual motion group object """
def __init__(self, elements, moveable_srcs, allow_repeat=False,
allow_unknown=False, read_only=False):
BaseMotion.__init__(self, elements, moveable_srcs,
allow_repeat=allow_repeat,
allow_unknown=allow_unknown, read_only=read_only)
self.__total_motion_time = 0
def init_by_movables(self, elements, moveable_srcs, allow_repeat, allow_unknown):
self.moveable_list = elements
def init_by_names(self, names, moveable_srcs, allow_repeat, allow_unknown):
moveables = [self.getMoveable(moveable_srcs, name) for name in names]
self.init_by_movables(moveables, moveable_srcs, allow_repeat,
allow_unknown)
def getLastMotionTime(self):
times = [moveable.getLastMotionTime()
for moveable in self.moveable_list]
return max(times)
def getTotalLastMotionTime(self):
return self.__total_motion_time
def startMove(self, pos_list, timeout=None):
if self.read_only:
raise Exception("Trying to move read only motion")
if len(pos_list) != len(self.moveable_list):
raise Exception("Invalid number of position values")
ids = []
for i, moveable in enumerate(self.moveable_list):
ids.append(moveable.startMove(pos_list[i], timeout=timeout))
return ids
def waitMove(self, timeout=None, id=None):
if id is None:
id = len(self.moveable_list) * [None]
for i, moveable in enumerate(self.moveable_list):
moveable.waitMove(timeout=timeout, id=id[i])
def move(self, new_pos, timeout=None):
start_time = time.time()
states, positions = [], []
for moveable, pos in zip(self.moveable_list, new_pos):
res = moveable.move(pos, timeout=timeout)
states.append(res[0])
positions.extend(res[1])
state = _get_tango_devstate_match(states)
self.__total_motion_time = time.time() - start_time
return state, positions
def iterMove(self, new_pos, timeout=None):
""" generator for motor positions"""
raise NotImplementedError
def getStatus(self):
# res = []
# for moveable in self.moveable_list:
# status = moveable.status.split('\n')
# res.append(moveable.getName() + ":")
# for st in status:
# res.append(" " + st)
# return "\n".join(res)
return "\n".join([m.status() for m in self.moveable_list])
def readState(self, force=False):
if len(self.moveable_list) == 1:
return self.moveable_list[0].getState()
return [m.getState() for m in self.moveable_list]
def readPosition(self, force=False):
return [m.readPosition(force=force) for m in self.moveable_list]
def abort(self, wait_ready=True, timeout=None):
for moveable in self.moveable_list:
try:
moveable.abort(wait_ready=wait_ready, timeout=timeout)
except:
pass
def stop(self, wait_ready=True, timeout=None):
for moveable in self.moveable_list:
try:
moveable.stop(wait_ready=wait_ready, timeout=timeout)
except:
pass
def read(self):
pass
class Motion(BaseMotion):
""" A motion object """
def __init__(self, elements, moveable_srcs, allow_repeat=False,
allow_unknown=False, read_only=False):
BaseMotion.__init__(self, elements, moveable_srcs,
allow_repeat=allow_repeat,
allow_unknown=allow_unknown, read_only=read_only)
self.__total_motion_time = 0
def __str__(self):
return self.__class__.__name__ + "(" + str(self.names) + ")"
def init_by_movables(self, elements, moveable_srcs, allow_repeat, allow_unknown):
# TODO: Optimize this. Dont call init_by_names. It its possible to do it
# manually with some performance gain
names = [elem.getName() for elem in elements]
self.init_by_names(names, moveable_srcs, allow_repeat, allow_unknown)
def init_by_names(self, names, moveable_srcs, allow_repeat, allow_unknown):
ms_elem_names = self.getElemNamesByMoveableSource(names, moveable_srcs,
allow_repeat=allow_repeat, allow_unknown=allow_unknown)
# map<MoveableSource, Moveable>
ms_moveables = {}
for moveable_source, ms_names in list(ms_elem_names.items()):
moveable = moveable_source.getMoveable(ms_names)
ms_moveables[moveable_source] = moveable
# list<Moveable>
moveable_list = list(ms_moveables.values())
# list<tuple(int moveable_index, int position_index)>
pos_to_moveable = len(names) * [None, ]
for i, name in enumerate(names):
moveable_index, position_index = -1, -1
for j, moveable in enumerate(moveable_list):
position_index = moveable.getIndex(name)
if position_index >= 0:
pos_to_moveable[i] = (j, position_index)
break
# list<string> moveable item names
self.names = names
# list<Moveable>
self.moveable_list = moveable_list
# list<tuple(int moveable_index, int position_index)>
# the list index itself is the position index for this motion
# the tuple for a given index gives the corresponding moveable index and
# the position index inside the moveable object
self.pos_to_moveable = pos_to_moveable
# map<MoveableSource, Moveable>
self.ms_moveables = ms_moveables
# a buffer for positions for performance reasons
# list<list<double>> index of outer list is the moveable index. The
# contents of the list are the ordered positions for that moveable
pos_buff = []
total_count = 0
for m in moveable_list:
count = m.getSize()
total_count += count
pos_buff.append(m.getSize() * [0.0, ])
self.pos_buff = pos_buff
assert(len(self.pos_to_moveable) == len(self.names))
assert(total_count == len(self.names))
def getElemNamesByMoveableSource(self, names, moveable_sources,
allow_repeat,
allow_unknown):
""" getElemNamesByMoveableSource(list<str>names,
list<MoveableSource> moveable_sources,
bool allow_repeat, bool allow_unknown)
Organizes the elements by moveable source. The result is a dictionary
with key being the MoveableSource and data a list of the names that
belong to the that motion source.
"""
ms_elems = {}
for name in names:
moveable = None
for moveable_source in moveable_sources:
moveable = moveable_source.getMoveable([name])
if not moveable is None:
if moveable_source not in ms_elems:
ms_elems[moveable_source] = []
moveable_source_moveables = ms_elems.get(moveable_source)
present = name in moveable_source_moveables
if not present or (present and allow_repeat):
moveable_source_moveables.append(name)
break
elif present and not allow_repeat:
raise Exception("Moveable item %s appears more "
"than once" % name)
if moveable is None and not allow_unknown:
raise Exception("Moveable item %s not found" % name)
return ms_elems
def getLastMotionTime(self):
times = [moveable.getLastMotionTime()
for moveable in self.moveable_list]
return max(times)
def getTotalLastMotionTime(self):
return self.__total_motion_time
def startMove(self, pos_list, timeout=None):
if self.read_only:
raise Exception("Trying to move read only motion")
if len(pos_list) != len(self.names):
raise Exception("Invalid number of position values")
buff = self.pos_buff
for i, pair in enumerate(self.pos_to_moveable):
pos = pos_list[i]
buff[pair[0]][pair[1]] = pos
ids = []
for i, moveable in enumerate(self.moveable_list):
ids.append(moveable.startMove(buff[i], timeout=timeout))
return ids
def waitMove(self, timeout=None, id=None):
if id is None:
id = len(self.moveable_list) * [None]
for i, moveable in enumerate(self.moveable_list):
moveable.waitMove(timeout=timeout, id=id[i])
def move(self, new_pos, timeout=None):
start_time = time.time()
if len(self.moveable_list) == 1:
moveable = self.moveable_list[0]
ret = moveable.move(new_pos, timeout=timeout)
else:
start, ids = 0, []
for moveable in self.moveable_list:
end = start + moveable.getSize()
pos = new_pos[start:end]
id = moveable.startMove(pos, timeout=timeout)
ids.append(id)
start = end
for moveable, id in zip(self.moveable_list, ids):
moveable.waitMove(id=id, timeout=timeout)
states, positions = self.readState(), self.readPosition()
state = _get_tango_devstate_match(states)
ret = state, positions
self.__total_motion_time = time.time() - start_time
return ret
def iterMove(self, new_pos, timeout=None):
""" generator for motor positions"""
assert len(
self.moveable_list) == 1, "for now we support only 'simple' motions!!!!"
moveable = self.moveable_list[0]
for p in moveable.iterMove(new_pos, timeout=timeout):
yield p
def getStatus(self):
return "\n".join([m.status() for m in self.moveable_list])
def readState(self, force=False):
if len(self.moveable_list) == 1:
return self.moveable_list[0].getState()
return [m.getState() for m in self.moveable_list]
def readPosition(self, force=False):
moveable_pos_list = [m.readPosition(
force=force) for m in self.moveable_list]
pos = [moveable_pos_list[pair[0]][pair[1]]
for pair in self.pos_to_moveable]
return pos
def abort(self, wait_ready=True, timeout=None):
for moveable in self.moveable_list:
moveable.abort(wait_ready=wait_ready, timeout=timeout)
def stop(self, wait_ready=True, timeout=None):
for moveable in self.moveable_list:
moveable.stop(wait_ready=wait_ready, timeout=timeout)
def read(self):
pass
#-------------------------------------------------------------------------
# Small test framework
#-------------------------------------------------------------------------
class PoolMoveableTest(Moveable):
def __init__(self, name, elems, moveable_src, mov_items=None):
self.name = name
self.elem_names = elems
self.moveable_src = moveable_src
self.pos = len(elems) * [0.0, ]
def startMove(self, new_pos, timeout=None):
self.pos = new_pos
# return the id as time_stamp
return len(self.elem_names) * [time.time()]
def waitMove(self, timeout=None, id=None):
pass
def abort(self, wait_ready=True, timeout=None):
pass
def readPosition(self, force=False):
return self.pos
def getMoveableSource(self):
return self.moveable_src
def getSize(self):
return len(self.elem_names)
def getIndex(self, name):
try:
return self.elem_names.index(name)
except:
return -1
class PoolMSTest(MoveableSource):
def __init__(self, initial_elems):
MoveableSource.__init__(self)
self.elem_names = initial_elems
self.moveable_inc = 0
self.motors = CaselessDict()
for elem_name in initial_elems:
self.motors[elem_name] = PoolMoveableTest(
elem_name, [elem_name], self)
self.motor_group = None
def getMoveable(self, names):
if len(names) == 1:
return self.motors.get(names[0])
l = [name for name in self.elem_names if name in names]
self.motor_group = PoolMoveableTest(
"moveable %d" % self.moveable_inc, l, self)
self.moveable_inc += 1
return self.motor_group
def test():
ms1 = PoolMSTest(["m1", "m2", "m3", "m4"])
ms2 = PoolMSTest(["m5", "m6", "m7", "m8"])
ms3 = PoolMSTest(["m9", "m10", "m11", "m12"])
motion1 = Motion(["m1"], [ms1, ms2, ms3])
# some tests
for p in [[5.0], [20.0], [0.0], [-14.5], [0.0]]:
motion1.move(p)
assert(ms1.motors["m1"].pos[0] == p[0])
assert(motion1.readPosition() == p)
motion2 = Motion(["m1", "m2", "m3"], [ms1, ms2, ms3])
for p in [[5.0, 20.0, 0.0], [-14.5, 3.32, 232], [0.0, 0.0, 0.0]]:
motion2.move(p)
assert(ms1.motor_group.pos == p)
assert(motion2.readPosition() == p)
motion3 = Motion(["m3", "m1", "m2"], [ms1, ms2, ms3])
for p in [[5.0, 20.0, 0.0], [-14.5, 3.32, 232], [0.0, 0.0, 0.0]]:
motion3.move(p)
assert(ms1.motor_group.pos[0] == p[1])
assert(ms1.motor_group.pos[1] == p[2])
assert(ms1.motor_group.pos[2] == p[0])
assert(motion3.readPosition() == p)
motion4 = Motion(["m2", "m6"], [ms1, ms2, ms3])
for p in [[5.0, 20.0], [-14.5, 3.32], [0.0, 0.0]]:
motion4.move(p)
assert(ms1.motors["m2"].pos[0] == p[0])
assert(ms2.motors["m6"].pos[0] == p[1])
assert(motion4.readPosition() == p)
motion5 = Motion(["m3", "m1", "m7"], [ms1, ms2, ms3])
for p in [[5.0, 20.0, 0.0], [-14.5, 3.32, 232], [0.0, 0.0, 0.0]]:
motion5.move(p)
assert(ms1.motor_group.pos[1] == p[0])
assert(ms1.motor_group.pos[0] == p[1])
assert(ms2.motors["m7"].pos[0] == p[2])
assert(motion5.readPosition() == p)
motion6 = Motion(["m4", "m9", "m10", "m2"], [ms1, ms2, ms3])
for p in [[5.0, 20.0, 0.0, 21.0], [-14.5, 3.32, 232, 45.4], [0.0, 0.0, 0.0, 0.0]]:
motion6.move(p)
assert(ms1.motor_group.pos[1] == p[0])
assert(ms1.motor_group.pos[0] == p[3])
assert(ms3.motor_group.pos[0] == p[1])
assert(ms3.motor_group.pos[1] == p[2])
assert(motion6.readPosition() == p)
motion7 = Motion(["m4", "m9", "m7", "m2"], [ms1, ms2, ms3])
for p in [[5.0, 20.0, 0.0, 21.0], [-14.5, 3.32, 232, 45.4], [0.0, 0.0, 0.0, 0.0]]:
motion7.move(p)
assert(ms1.motor_group.pos[1] == p[0])
assert(ms1.motor_group.pos[0] == p[3])
assert(ms3.motors["m9"].pos[0] == p[1])
assert(ms2.motors["m7"].pos[0] == p[2])
assert(motion7.readPosition() == p)
try:
m = Motion(["m1", "m2"], [ms1, ms2, ms3], read_only=True)
m.startMove([0.5, 20.4])
except Exception as e:
assert(str(e) == "Trying to move read only motion")
try:
m = Motion(["m1", "m1"], [ms1, ms2, ms3])
except Exception as e:
assert(str(e) == "Moveable item m1 appears more than once")
try:
m = Motion(["m1", "m999"], [ms1, ms2, ms3])
except Exception as e:
assert(str(e) == "Moveable item m999 not found")
if __name__ == "__main__":
test()
| 34.777015 | 113 | 0.589741 |
ace2158e326afed972d73d7b32323b9399f5a33b | 332 | py | Python | profiles_api/urls.py | congdu/profiles-rest-api | 211cfb9c890dd8c45187d48480b74a5a1e0d59a8 | [
"MIT"
] | null | null | null | profiles_api/urls.py | congdu/profiles-rest-api | 211cfb9c890dd8c45187d48480b74a5a1e0d59a8 | [
"MIT"
] | null | null | null | profiles_api/urls.py | congdu/profiles-rest-api | 211cfb9c890dd8c45187d48480b74a5a1e0d59a8 | [
"MIT"
] | null | null | null | from django.urls import path, include
from rest_framework.routers import DefaultRouter
from profiles_api import views
router = DefaultRouter()
router.register('hello-viewset', views.HelloViewSet, basename='hello-viewset')
urlpatterns = [
path('hello-view/', views.HelloApiView.as_view()),
path('', include(router.urls))
]
| 27.666667 | 78 | 0.762048 |
ace2162e855f2908c5ab48d6a7edcf4f0297ce35 | 1,991 | py | Python | docs/devguide/source/conf.py | draghuram/kubedr | f8891e8ffe76a909b8d50be565064dc6ed1fbee1 | [
"Apache-2.0"
] | 65 | 2020-01-15T22:41:14.000Z | 2021-12-27T11:25:27.000Z | docs/devguide/source/conf.py | draghuram/kubedr | f8891e8ffe76a909b8d50be565064dc6ed1fbee1 | [
"Apache-2.0"
] | 5 | 2020-01-27T22:12:06.000Z | 2020-02-11T19:13:04.000Z | docs/devguide/source/conf.py | draghuram/kubedr | f8891e8ffe76a909b8d50be565064dc6ed1fbee1 | [
"Apache-2.0"
] | 7 | 2020-01-23T15:15:40.000Z | 2021-08-05T05:07:19.000Z | # Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- Project information -----------------------------------------------------
project = 'KubeDR'
copyright = '2020, Catalogic Software'
author = 'Catalogic Software'
# -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx_rtd_theme',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = []
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
# html_theme = 'alabaster'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
html_css_files = [
'css/custom.css',
]
html_logo = "_static/images/logo-2.0-vert-xsmall.png"
| 32.639344 | 79 | 0.665495 |
ace21645c742ed1d4eead658b162d52c586861dc | 5,678 | py | Python | docs/conf.py | jeremyarr/younit | 48c23fbb535f6f3cf6227abcad66621d78dca2e8 | [
"MIT"
] | null | null | null | docs/conf.py | jeremyarr/younit | 48c23fbb535f6f3cf6227abcad66621d78dca2e8 | [
"MIT"
] | null | null | null | docs/conf.py | jeremyarr/younit | 48c23fbb535f6f3cf6227abcad66621d78dca2e8 | [
"MIT"
] | 1 | 2019-07-18T13:16:51.000Z | 2019-07-18T13:16:51.000Z | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# younit documentation build configuration file, created by
# sphinx-quickstart on Tue Jul 11 10:27:34 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
here = os.path.abspath(os.path.dirname(__file__))
about = {}
with open(os.path.join(here,"..",'younit', '__version__.py'), 'r') as f:
exec(f.read(), about)
sys.path.insert(0, os.path.abspath('..'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['sphinx.ext.autodoc',
'sphinx.ext.todo',
'sphinxcontrib.asyncio',
'sphinx.ext.intersphinx']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'younit'
copyright = '2017, Jeremy Arr'
author = 'Jeremy Arr'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = about['__version__']
# The full version, including alpha/beta/rc tags.
release = about['__version__']
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
html_theme_options = {
'show_powered_by': False,
'github_user': 'jeremyarr',
'github_repo': 'younit',
'github_banner': True,
'show_related': False
}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'younitdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'younit.tex', 'younit Documentation',
'Jeremy Arr', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'younit', 'younit Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'younit', 'younit Documentation',
author, 'younit', 'One line description of project.',
'Miscellaneous'),
]
# This value selects what content will be inserted into
# the main body of an autoclass directive. The possible values are:
# “class”: Only the class’ docstring is inserted. This is the default.
#You “can still document init as a separate method using automethod or “the members option to autoclass.
# “both”: Both the class’ and the init method’s docstring are concatenated and inserted.
# “init”: Only the init method’s docstring is inserted.
autoclass_content = 'both'
intersphinx_mapping = {'python': ('https://docs.python.org/3', None)}
| 30.691892 | 104 | 0.684572 |
ace2172b7861b310fb7a96ae912aea0324f453fc | 2,777 | py | Python | literature/test_game.py | neelsomani/literature | 6ad93bb28df2723af8117ce3754a2ae14600042e | [
"MIT"
] | 13 | 2019-09-02T08:13:55.000Z | 2021-01-30T08:25:49.000Z | literature/test_game.py | neelsomani/literature | 6ad93bb28df2723af8117ce3754a2ae14600042e | [
"MIT"
] | null | null | null | literature/test_game.py | neelsomani/literature | 6ad93bb28df2723af8117ce3754a2ae14600042e | [
"MIT"
] | 1 | 2021-04-18T16:45:57.000Z | 2021-04-18T16:45:57.000Z | """ Basic tests for the `Literature` class. """
from typing import List
import pytest
from literature.actor import Actor
from literature.card import (
Card,
Suit,
HalfSuit,
Half
)
from literature.constants import SETS
from literature.literature import Literature, Team
MISSING_CARD = Card.Name(3, Suit.CLUBS)
def two_player_mock(_: int) -> List[List[Card.Name]]:
p0_cards = [Card.Name(r, s) for r in SETS[Half.MINOR] for s in Suit]
p0_cards.remove(MISSING_CARD)
return [
p0_cards,
[
Card.Name(r, s) for r in SETS[Half.MAJOR] for s in Suit
] + [MISSING_CARD],
[],
[]
]
@pytest.fixture()
def game():
# Give two players all of the cards
return Literature(4, hands_fn=two_player_mock, turn_picker=lambda: 0)
def test_game_not_complete(game):
# There is no winner before the game is complete
with pytest.raises(ValueError):
assert game.winner == Team.NEITHER
def test_turn_change(game):
assert not game.completed
assert game.turn == Actor(0)
claims_1 = game.players[1].evaluate_claims()
c = claims_1.pop(HalfSuit(Half.MAJOR, Suit.DIAMONDS))
game.commit_claim(Actor(1), c)
assert game.turn == Actor(1)
with pytest.raises(ValueError):
game.commit_claim(Actor(1), c)
def test_end_game_condition(game):
game.commit_move(
game.players[0].asks(game.players[1]).to_give(MISSING_CARD))
for c in game.players[0].evaluate_claims().values():
game.commit_claim(Actor(0), c)
assert game.completed
assert game.winner == Team.EVEN
# Allow claims after the game is over
for c in game.players[1].evaluate_claims().values():
game.commit_claim(Actor(1), c)
assert game.winner == Team.NEITHER
def test_wrong_claim_conditions(game):
game.commit_move(
game.players[0].asks(game.players[1]).to_give(MISSING_CARD))
# Discard if we have all of the cards
claims_0 = game.players[0].evaluate_claims()
wrong_player = claims_0.pop(HalfSuit(Half.MINOR, Suit.DIAMONDS))
wrong_player[Card.Name(3, Suit.DIAMONDS)] = Actor(2)
game.commit_claim(Actor(0), wrong_player)
assert game.claims[HalfSuit(Half.MINOR, Suit.DIAMONDS)] == Team.DISCARD
# Award to the other team if we do not have all of the cards
claims_1 = game.players[1].evaluate_claims()
wrong_team = claims_1.pop(HalfSuit(Half.MAJOR, Suit.DIAMONDS))
for c in wrong_team:
wrong_team[c] = Actor(0)
game.commit_claim(Actor(0), wrong_team)
assert game.claims[HalfSuit(Half.MAJOR, Suit.DIAMONDS)] == Team.ODD
def test_switch_turn_if_no_cards(game):
game.commit_move(
game.players[0].asks(game.players[3]).to_give(MISSING_CARD))
assert game.turn == game.players[1]
| 30.516484 | 75 | 0.685272 |
ace2175a35c7daaf663edf7f1a1ead587dcdc845 | 9,261 | py | Python | mechanize/_util.py | berni69/mechanize | 6dbb1c56b3664c8b2e2157165d2837e7e8d70eaa | [
"BSD-3-Clause"
] | null | null | null | mechanize/_util.py | berni69/mechanize | 6dbb1c56b3664c8b2e2157165d2837e7e8d70eaa | [
"BSD-3-Clause"
] | null | null | null | mechanize/_util.py | berni69/mechanize | 6dbb1c56b3664c8b2e2157165d2837e7e8d70eaa | [
"BSD-3-Clause"
] | null | null | null | """Utility functions and date/time routines.
Copyright 2002-2006 John J Lee <jjl@pobox.com>
This code is free software; you can redistribute it and/or modify it
under the terms of the BSD or ZPL 2.1 licenses (see the file
COPYING.txt included with the distribution).
"""
from __future__ import absolute_import
import re
import time
import warnings
from calendar import timegm
class ExperimentalWarning(UserWarning):
pass
def experimental(message):
warnings.warn(message, ExperimentalWarning, stacklevel=3)
def hide_experimental_warnings():
warnings.filterwarnings("ignore", category=ExperimentalWarning)
def reset_experimental_warnings():
warnings.filterwarnings("default", category=ExperimentalWarning)
def deprecation(message):
warnings.warn(message, DeprecationWarning, stacklevel=3)
def hide_deprecations():
warnings.filterwarnings("ignore", category=DeprecationWarning)
def reset_deprecations():
warnings.filterwarnings("default", category=DeprecationWarning)
def read_file(filename):
fh = open(filename)
try:
return fh.read()
finally:
fh.close()
def write_file(filename, data):
f = open(filename, "wb")
try:
f.write(data)
finally:
f.close()
def get1(sequence):
assert len(sequence) == 1
return sequence[0]
def isstringlike(x):
try:
x + ""
except:
return False
else:
return True
# def caller():
# try:
## raise SyntaxError
# except:
## import sys
# return sys.exc_traceback.tb_frame.f_back.f_back.f_code.co_name
# Date/time conversion routines for formats used by the HTTP protocol.
EPOCH = 1970
def my_timegm(tt):
year, month, mday, hour, min, sec = tt[:6]
if ((year >= EPOCH) and (1 <= month <= 12) and (1 <= mday <= 31) and
(0 <= hour <= 24) and (0 <= min <= 59) and (0 <= sec <= 61)):
return timegm(tt)
else:
return None
days = ["Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"]
months = ["Jan", "Feb", "Mar", "Apr", "May", "Jun",
"Jul", "Aug", "Sep", "Oct", "Nov", "Dec"]
months_lower = []
for month in months:
months_lower.append(month.lower())
def time2isoz(t=None):
"""Return a string representing time in seconds since epoch, t.
If the function is called without an argument, it will use the current
time.
The format of the returned string is like "YYYY-MM-DD hh:mm:ssZ",
representing Universal Time (UTC, aka GMT). An example of this format is:
1994-11-24 08:49:37Z
"""
if t is None:
t = time.time()
year, mon, mday, hour, min, sec = time.gmtime(t)[:6]
return "%04d-%02d-%02d %02d:%02d:%02dZ" % (
year, mon, mday, hour, min, sec)
def time2netscape(t=None):
"""Return a string representing time in seconds since epoch, t.
If the function is called without an argument, it will use the current
time.
The format of the returned string is like this:
Wed, DD-Mon-YYYY HH:MM:SS GMT
"""
if t is None:
t = time.time()
year, mon, mday, hour, min, sec, wday = time.gmtime(t)[:7]
return "%s %02d-%s-%04d %02d:%02d:%02d GMT" % (
days[wday], mday, months[mon - 1], year, hour, min, sec)
UTC_ZONES = {"GMT": None, "UTC": None, "UT": None, "Z": None}
timezone_re = re.compile(r"^([-+])?(\d\d?):?(\d\d)?$")
def offset_from_tz_string(tz):
offset = None
if tz in UTC_ZONES:
offset = 0
else:
m = timezone_re.search(tz)
if m:
offset = 3600 * int(m.group(2))
if m.group(3):
offset = offset + 60 * int(m.group(3))
if m.group(1) == '-':
offset = -offset
return offset
def _str2time(day, mon, yr, hr, min, sec, tz):
# translate month name to number
# month numbers start with 1 (January)
try:
mon = months_lower.index(mon.lower()) + 1
except ValueError:
# maybe it's already a number
try:
imon = int(mon)
except ValueError:
return None
if 1 <= imon <= 12:
mon = imon
else:
return None
# make sure clock elements are defined
if hr is None:
hr = 0
if min is None:
min = 0
if sec is None:
sec = 0
yr = int(yr)
day = int(day)
hr = int(hr)
min = int(min)
sec = int(sec)
if yr < 1000:
# find "obvious" year
cur_yr = time.localtime(time.time())[0]
m = cur_yr % 100
tmp = yr
yr = yr + cur_yr - m
m = m - tmp
if abs(m) > 50:
if m > 0:
yr = yr + 100
else:
yr = yr - 100
# convert UTC time tuple to seconds since epoch (not timezone-adjusted)
t = my_timegm((yr, mon, day, hr, min, sec, tz))
if t is not None:
# adjust time using timezone string, to get absolute time since epoch
if tz is None:
tz = "UTC"
tz = tz.upper()
offset = offset_from_tz_string(tz)
if offset is None:
return None
t = t - offset
return t
strict_re = re.compile(r"^[SMTWF][a-z][a-z], (\d\d) ([JFMASOND][a-z][a-z]) "
r"(\d\d\d\d) (\d\d):(\d\d):(\d\d) GMT$")
wkday_re = re.compile(
r"^(?:Sun|Mon|Tue|Wed|Thu|Fri|Sat)[a-z]*,?\s*", re.I)
loose_http_re = re.compile(
r"""^
(\d\d?) # day
(?:\s+|[-\/])
(\w+) # month
(?:\s+|[-\/])
(\d+) # year
(?:
(?:\s+|:) # separator before clock
(\d\d?):(\d\d) # hour:min
(?::(\d\d))? # optional seconds
)? # optional clock
\s*
([-+]?\d{2,4}|(?![APap][Mm]\b)[A-Za-z]+)? # timezone
\s*
(?:\(\w+\))? # ASCII representation of timezone in parens.
\s*$""", re.X)
def http2time(text):
"""Returns time in seconds since epoch of time represented by a string.
Return value is an integer.
None is returned if the format of str is unrecognized, the time is outside
the representable range, or the timezone string is not recognized. If the
string contains no timezone, UTC is assumed.
The timezone in the string may be numerical (like "-0800" or "+0100") or a
string timezone (like "UTC", "GMT", "BST" or "EST"). Currently, only the
timezone strings equivalent to UTC (zero offset) are known to the function.
The function loosely parses the following formats:
Wed, 09 Feb 1994 22:23:32 GMT -- HTTP format
Tuesday, 08-Feb-94 14:15:29 GMT -- old rfc850 HTTP format
Tuesday, 08-Feb-1994 14:15:29 GMT -- broken rfc850 HTTP format
09 Feb 1994 22:23:32 GMT -- HTTP format (no weekday)
08-Feb-94 14:15:29 GMT -- rfc850 format (no weekday)
08-Feb-1994 14:15:29 GMT -- broken rfc850 format (no weekday)
The parser ignores leading and trailing whitespace. The time may be
absent.
If the year is given with only 2 digits, the function will select the
century that makes the year closest to the current date.
"""
# fast exit for strictly conforming string
m = strict_re.search(text)
if m:
g = m.groups()
mon = months_lower.index(g[1].lower()) + 1
tt = (int(g[2]), mon, int(g[0]),
int(g[3]), int(g[4]), float(g[5]))
return my_timegm(tt)
# No, we need some messy parsing...
# clean up
text = text.lstrip()
text = wkday_re.sub("", text, 1) # Useless weekday
# tz is time zone specifier string
day, mon, yr, hr, min, sec, tz = [None] * 7
# loose regexp parse
m = loose_http_re.search(text)
if m is not None:
day, mon, yr, hr, min, sec, tz = m.groups()
else:
return None # bad format
return _str2time(day, mon, yr, hr, min, sec, tz)
iso_re = re.compile(
"""^
(\d{4}) # year
[-\/]?
(\d\d?) # numerical month
[-\/]?
(\d\d?) # day
(?:
(?:\s+|[-:Tt]) # separator before clock
(\d\d?):?(\d\d) # hour:min
(?::?(\d\d(?:\.\d*)?))? # optional seconds (and fractional)
)? # optional clock
\s*
([-+]?\d\d?:?(:?\d\d)?
|Z|z)? # timezone (Z is "zero meridian", i.e. GMT)
\s*$""", re.X)
def iso2time(text):
"""
As for http2time, but parses the ISO 8601 formats:
1994-02-03 14:15:29 -0100 -- ISO 8601 format
1994-02-03 14:15:29 -- zone is optional
1994-02-03 -- only date
1994-02-03T14:15:29 -- Use T as separator
19940203T141529Z -- ISO 8601 compact format
19940203 -- only date
"""
# clean up
text = text.lstrip()
# tz is time zone specifier string
day, mon, yr, hr, min, sec, tz = [None] * 7
# loose regexp parse
m = iso_re.search(text)
if m is not None:
# XXX there's an extra bit of the timezone I'm ignoring here: is
# this the right thing to do?
yr, mon, day, hr, min, sec, tz, _ = m.groups()
else:
return None # bad format
return _str2time(day, mon, yr, hr, min, sec, tz)
| 26.688761 | 79 | 0.560415 |
ace217c74b37bc17dfcf0fac2739fcfdac6cd988 | 5,586 | py | Python | keras/distribution_utils.py | rohitsroch/recurrent_bert | 9b47acd081433280fd68849a07ca3ca9b924a1f6 | [
"Apache-2.0"
] | null | null | null | keras/distribution_utils.py | rohitsroch/recurrent_bert | 9b47acd081433280fd68849a07ca3ca9b924a1f6 | [
"Apache-2.0"
] | 1 | 2020-09-17T09:46:54.000Z | 2020-09-17T09:46:54.000Z | keras/distribution_utils.py | rohitsroch/recurrent_bert | 9b47acd081433280fd68849a07ca3ca9b924a1f6 | [
"Apache-2.0"
] | null | null | null | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Helper functions for running models in a distributed setting."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import json
import os
import random
import string
import tensorflow as tf
def _collective_communication(all_reduce_alg):
"""Return a CollectiveCommunication based on all_reduce_alg.
Args:
all_reduce_alg: a string specifying which collective communication to pick,
or None.
Returns:
tf.distribute.experimental.CollectiveCommunication object
Raises:
ValueError: if `all_reduce_alg` not in [None, 'ring', 'nccl']
"""
collective_communication_options = {
None: tf.distribute.experimental.CollectiveCommunication.AUTO,
"ring": tf.distribute.experimental.CollectiveCommunication.RING,
"nccl": tf.distribute.experimental.CollectiveCommunication.NCCL
}
if all_reduce_alg not in collective_communication_options:
raise ValueError(
"When used with `multi_worker_mirrored`, valid values for "
"all_reduce_alg are ['ring', 'nccl']. Supplied value: {}".format(
all_reduce_alg))
return collective_communication_options[all_reduce_alg]
def _mirrored_cross_device_ops(all_reduce_alg, num_packs):
"""Return a CrossDeviceOps based on all_reduce_alg and num_packs.
Args:
all_reduce_alg: a string specifying which cross device op to pick, or None.
num_packs: an integer specifying number of packs for the cross device op.
Returns:
tf.distribute.CrossDeviceOps object or None.
Raises:
ValueError: if `all_reduce_alg` not in [None, 'nccl', 'hierarchical_copy'].
"""
if all_reduce_alg is None:
return None
mirrored_all_reduce_options = {
"nccl": tf.distribute.NcclAllReduce,
"hierarchical_copy": tf.distribute.HierarchicalCopyAllReduce
}
if all_reduce_alg not in mirrored_all_reduce_options:
raise ValueError(
"When used with `mirrored`, valid values for all_reduce_alg are "
"['nccl', 'hierarchical_copy']. Supplied value: {}".format(
all_reduce_alg))
cross_device_ops_class = mirrored_all_reduce_options[all_reduce_alg]
return cross_device_ops_class(num_packs=num_packs)
def get_distribution_strategy(distribution_strategy="mirrored",
num_gpus=0,
num_workers=1,
all_reduce_alg=None,
num_packs=1):
"""Return a DistributionStrategy for running the model.
Args:
distribution_strategy: a string specifying which distribution strategy to
use. Accepted values are 'off', 'one_device', 'mirrored',
'parameter_server', 'multi_worker_mirrored', and 'tpu' -- case insensitive.
'off' means not to use Distribution Strategy; 'tpu' means to use
TPUStrategy using `tpu_address`.
num_gpus: Number of GPUs to run this model.
num_workers: Number of workers to run this model.
all_reduce_alg: Optional. Specifies which algorithm to use when performing
all-reduce. For `MirroredStrategy`, valid values are "nccl" and
"hierarchical_copy". For `MultiWorkerMirroredStrategy`, valid values are
"ring" and "nccl". If None, DistributionStrategy will choose based on
device topology.
num_packs: Optional. Sets the `num_packs` in `tf.distribute.NcclAllReduce`
or `tf.distribute.HierarchicalCopyAllReduce` for `MirroredStrategy`.
Returns:
tf.distribute.DistibutionStrategy object.
Raises:
ValueError: if `distribution_strategy` is 'off' or 'one_device' and
`num_gpus` is larger than 1; or `num_gpus` is negative or if
`distribution_strategy` is `tpu` but `tpu_address` is not specified.
"""
if num_gpus < 0:
raise ValueError("`num_gpus` can not be negative.")
distribution_strategy = distribution_strategy.lower()
if distribution_strategy == "multi_worker_mirrored":
return tf.distribute.experimental.MultiWorkerMirroredStrategy(
communication=_collective_communication(all_reduce_alg))
if distribution_strategy == "one_device":
if num_gpus == 0:
return tf.distribute.OneDeviceStrategy("device:CPU:0")
if num_gpus > 1:
raise ValueError("`OneDeviceStrategy` can not be used for more than "
"one device.")
return tf.distribute.OneDeviceStrategy("device:GPU:0")
if distribution_strategy == "mirrored":
if num_gpus == 0:
devices = ["device:CPU:0"]
else:
devices = ["device:GPU:%d" % i for i in range(num_gpus)]
return tf.distribute.MirroredStrategy(
devices=devices,
cross_device_ops=_mirrored_cross_device_ops(all_reduce_alg, num_packs))
if distribution_strategy == "parameter_server":
return tf.distribute.experimental.ParameterServerStrategy()
raise ValueError(
"Unrecognized Distribution Strategy: %r" % distribution_strategy)
| 41.686567 | 81 | 0.715539 |
ace2180e513073d1cd6403b9e55031326c35ef2d | 2,157 | py | Python | src/opnsense/scripts/filter/list_tables.py | ass-a2s/opnsense-core | a0634d180325f6afe3be7f514b4470e47ff5eb75 | [
"BSD-2-Clause"
] | 2 | 2019-03-15T03:35:54.000Z | 2019-03-15T07:50:36.000Z | src/opnsense/scripts/filter/list_tables.py | ass-a2s/opnsense-core | a0634d180325f6afe3be7f514b4470e47ff5eb75 | [
"BSD-2-Clause"
] | null | null | null | src/opnsense/scripts/filter/list_tables.py | ass-a2s/opnsense-core | a0634d180325f6afe3be7f514b4470e47ff5eb75 | [
"BSD-2-Clause"
] | null | null | null | #!/usr/local/bin/python3.6
"""
Copyright (c) 2015-2019 Ad Schellevis <ad@opnsense.org>
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES,
INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY
AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
--------------------------------------------------------------------------------------
returns a list of pf tables (optional as a json container)
"""
import tempfile
import subprocess
import os
import sys
import ujson
if __name__ == '__main__':
result = []
with tempfile.NamedTemporaryFile() as output_stream:
subprocess.call(['/sbin/pfctl', '-sT'], stdout=output_stream, stderr=open(os.devnull, 'wb'))
output_stream.seek(0)
for line in output_stream.read().decode().strip().split('\n'):
result.append(line.strip())
# handle command line argument (type selection)
if len(sys.argv) > 1 and sys.argv[1] == 'json':
print(ujson.dumps(result))
else:
# output plain
for table in result:
print (table)
| 41.480769 | 100 | 0.686138 |
ace2182fb49bbd3b183a207e426dcee8870b6ec7 | 4,389 | py | Python | SOSR.py | Xylon-Sean/local_feature_loss | 69c20ad8ec4dc11a1f5070b4bc9f4067a8da40ef | [
"MIT"
] | null | null | null | SOSR.py | Xylon-Sean/local_feature_loss | 69c20ad8ec4dc11a1f5070b4bc9f4067a8da40ef | [
"MIT"
] | null | null | null | SOSR.py | Xylon-Sean/local_feature_loss | 69c20ad8ec4dc11a1f5070b4bc9f4067a8da40ef | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# @Time : 2019/11/25 18:22
# @Author : xylon
import torch
from exceptions import EmptyTensorError
def SOS_KNN_ORI_HOR(xa, xp, knn):
"""
Second Order Similarity KNN Original Horizontal
:param xa: x anchor, nxd
:param xp: x positive, nxd
:param knn: int, like: 8
:return:
"""
if xa.shape[0] <= knn:
raise EmptyTensorError
assert xa.shape[0] == xp.shape[0]
n = xa.shape[0]
device = xa.device
# (n, n) - [0, 2] negative cosine similarity 越接近于 0 越好
d_xixj = -torch.mm(xa, xa.t()) + 1
d_xixj = d_xixj + torch.eye(n).to(device) * 10
knn_xixj = d_xixj <= d_xixj.topk(k=8, dim=1, largest=False)[0][:, -1][:, None]
d_xipxjp = -torch.mm(xp, xp.t()) + 1
d_xipxjp = d_xipxjp + torch.eye(n).to(device) * 10
knn_xipxjp = d_xipxjp <= d_xipxjp.topk(k=8, dim=1, largest=False)[0][:, -1][:, None]
clabel = torch.max(knn_xixj, knn_xipxjp)
d_sos = (d_xixj - d_xipxjp) ** 2
d_sos = (d_sos * clabel.float()).sum(dim=1).sqrt().mean()
# dv_xixj = d_xixj.masked_select(clabel)
# dv_xipxjp = d_xipxjp.masked_select(clabel)
# d_sos = ((dv_xixj - dv_xipxjp) ** 2).mean() / 4
return d_sos
def SOS_KNN_ORI_VER(xa, xp, knn):
"""
Second Order Similarity KNN Original Vertical
:param xa: x anchor
:param xp: x positive
:param knn: int (like, 8)
:return:
"""
if xa.shape[0] <= knn:
raise EmptyTensorError
assert xa.shape[0] == xp.shape[0]
n = xa.shape[0]
device = xa.device
# (n, n) - [0, 2] negative cosine similarity 越接近于 0 越好
d_xixj = -torch.mm(xa, xa.t()) + 1
d_xixj = d_xixj + torch.eye(n).to(device) * 10
knn_xixj = d_xixj <= d_xixj.topk(k=8, dim=0, largest=False)[0][-1][None, :]
d_xipxjp = -torch.mm(xp, xp.t()) + 1
d_xipxjp = d_xipxjp + torch.eye(n).to(device) * 10
knn_xipxjp = d_xipxjp <= d_xipxjp.topk(k=8, dim=0, largest=False)[0][-1][None, :]
clabel = torch.max(knn_xixj, knn_xipxjp)
d_sos = (d_xixj - d_xipxjp) ** 2
d_sos = (d_sos * clabel.float()).sum(dim=1).sqrt().mean()
# dv_xixj = d_xixj.masked_select(clabel)
# dv_xipxjp = d_xipxjp.masked_select(clabel)
# d_sos = ((dv_xixj - dv_xipxjp) ** 2).mean() / 4
return d_sos
def SOS_KNN_MOD_HOR(xa, xp, knn):
"""
Second Order Similarity KNN Modified Horizontal
:param xa: x anchor, nxd
:param xp: x positive, nxd
:param knn: int, like: 8
:return:
"""
if xa.shape[0] <= knn:
raise EmptyTensorError
assert xa.shape[0] == xp.shape[0]
n = xa.shape[0]
device = xa.device
# (n, n) - [0, 2] negative cosine similarity 越接近于 0 越好
d_xixj = -torch.mm(xa, xa.t()) + 1
d_xixj = d_xixj + torch.eye(n).to(device) * 10
knn_xixj = d_xixj <= d_xixj.topk(k=8, dim=1, largest=False)[0][:, -1][:, None]
d_xipxjp = -torch.mm(xp, xp.t()) + 1
d_xipxjp = d_xipxjp + torch.eye(n).to(device) * 10
knn_xipxjp = d_xipxjp <= d_xipxjp.topk(k=8, dim=1, largest=False)[0][:, -1][:, None]
clabel = torch.max(knn_xixj, knn_xipxjp)
# d_sos = (d_xixj - d_xipxjp) ** 2
# d_sos = (d_sos * clabel.float()).sum(dim=1).sqrt().mean()
dv_xixj = d_xixj.masked_select(clabel)
dv_xipxjp = d_xipxjp.masked_select(clabel)
d_sos = ((dv_xixj - dv_xipxjp) ** 2).mean() / 4
return d_sos
def SOS_KNN_MOD_VER(xa, xp, knn):
"""
Second Order Similarity KNN Modified Vertical
:param xa: x anchor
:param xp: x positive
:param knn: int (like, 8)
:return:
"""
if xa.shape[0] <= knn:
raise EmptyTensorError
assert xa.shape[0] == xp.shape[0]
n = xa.shape[0]
device = xa.device
# (n, n) - [0, 2] negative cosine similarity 越接近于 0 越好
d_xixj = -torch.mm(xa, xa.t()) + 1
d_xixj = d_xixj + torch.eye(n).to(device) * 10
knn_xixj = d_xixj <= d_xixj.topk(k=8, dim=0, largest=False)[0][-1][None, :]
d_xipxjp = -torch.mm(xp, xp.t()) + 1
d_xipxjp = d_xipxjp + torch.eye(n).to(device) * 10
knn_xipxjp = d_xipxjp <= d_xipxjp.topk(k=8, dim=0, largest=False)[0][-1][None, :]
clabel = torch.max(knn_xixj, knn_xipxjp)
# d_sos = (d_xixj - d_xipxjp) ** 2
# d_sos = (d_sos * clabel.float()).sum(dim=1).sqrt().mean()
dv_xixj = d_xixj.masked_select(clabel)
dv_xipxjp = d_xipxjp.masked_select(clabel)
d_sos = ((dv_xixj - dv_xipxjp) ** 2).mean() / 4
return d_sos
| 32.511111 | 88 | 0.599681 |
ace219d588877a4dbd42f4eba73c06b76953a2bf | 899 | py | Python | pub_site/src/pub_site/frontpage/frontpage_views.py | webee/pay | b48c6892686bf3f9014bb67ed119506e41050d45 | [
"W3C"
] | 1 | 2019-10-14T11:51:49.000Z | 2019-10-14T11:51:49.000Z | pub_site/src/pub_site/frontpage/frontpage_views.py | webee/pay | b48c6892686bf3f9014bb67ed119506e41050d45 | [
"W3C"
] | null | null | null | pub_site/src/pub_site/frontpage/frontpage_views.py | webee/pay | b48c6892686bf3f9014bb67ed119506e41050d45 | [
"W3C"
] | null | null | null | # -*- coding: utf-8 -*-
from __future__ import unicode_literals, print_function, division
from . import frontpage_mod as mod
from flask import render_template, redirect, url_for
from flask.ext.login import current_user, login_required
from pytoolbox.util.log import get_logger
from . import dba
log = get_logger(__name__)
@mod.route('/')
def main():
return render_template('frontpage/main.html')
@mod.route('/application')
@login_required
def application():
if not current_user.is_leader:
return render_template('frontpage/user_is_not_allowed.html')
user_id = current_user.user_id
user_name = current_user.user_name
_ = dba.add_leader_application(user_id, user_name)
return redirect(url_for('.application_success'))
@mod.route('/application_success')
@login_required
def application_success():
return render_template('frontpage/application_success.html')
| 27.242424 | 68 | 0.770857 |
ace21b48de0ef975d77fd79f9ea1fb07dbaff7d1 | 1,447 | py | Python | aliyun-python-sdk-qualitycheck/aliyunsdkqualitycheck/request/v20190115/RetrySubmitAsrRequest.py | ankitdobhal/aliyun-openapi-python-sdk | 991b1c2d91adc468480defc23ba790d4369cce7b | [
"Apache-2.0"
] | 1 | 2020-12-05T03:03:46.000Z | 2020-12-05T03:03:46.000Z | aliyun-python-sdk-qualitycheck/aliyunsdkqualitycheck/request/v20190115/RetrySubmitAsrRequest.py | hetw/aliyun-openapi-python-sdk | 7443eacee9fbbaa93c7975c6dbec92d3c364c577 | [
"Apache-2.0"
] | null | null | null | aliyun-python-sdk-qualitycheck/aliyunsdkqualitycheck/request/v20190115/RetrySubmitAsrRequest.py | hetw/aliyun-openapi-python-sdk | 7443eacee9fbbaa93c7975c6dbec92d3c364c577 | [
"Apache-2.0"
] | null | null | null | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkqualitycheck.endpoint import endpoint_data
class RetrySubmitAsrRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Qualitycheck', '2019-01-15', 'RetrySubmitAsr','Qualitycheck')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_JsonStr(self):
return self.get_query_params().get('JsonStr')
def set_JsonStr(self,JsonStr):
self.add_query_param('JsonStr',JsonStr) | 38.078947 | 91 | 0.769178 |
ace21f309cc17d8562322e642e0756ad55c33216 | 9,034 | py | Python | ProgressedHttp/test/http_test.py | hellflame/progressed-http | 079fe4ee00095f2e7407a45ded91a6a21671ffc6 | [
"MIT"
] | null | null | null | ProgressedHttp/test/http_test.py | hellflame/progressed-http | 079fe4ee00095f2e7407a45ded91a6a21671ffc6 | [
"MIT"
] | null | null | null | ProgressedHttp/test/http_test.py | hellflame/progressed-http | 079fe4ee00095f2e7407a45ded91a6a21671ffc6 | [
"MIT"
] | null | null | null | # coding=utf8
from __future__ import absolute_import, division, print_function
import os
import unittest
import hashlib
import tempfile
from ProgressedHttp.http import *
from ProgressedHttp.http import quote
class HTTPTest(unittest.TestCase):
"""
static.hellflame.net
域名下的文件大多数情况下大文件都是chunked编码,小文件可能会被缓存为正常编码
文件内容为大小与hash已知的随机二进制文件
raw.githubusercontent.com
域名下文件未分块
文件来自 https://raw.githubusercontent.com/hellflame/qiniu_manager/v1.4.6/qiniuManager/manager.py
"""
@staticmethod
def chunked_info(resp):
return "分块编码" if resp.chunked else "常规编码"
def test_url_parser(self):
parser = HTTPCons.url_parser
self.assertDictEqual(parser("http://www.hellflame.net:233/root/hellflame"), {
'scheme': 'http',
'host': 'www.hellflame.net',
'port': 233,
'href': '/root/hellflame'
})
self.assertDictEqual(parser("https://ok/"), {
'scheme': 'https',
'host': 'ok',
'port': 443,
'href': '/'
})
self.assertDictEqual(parser("hellflame.net/"), {
'scheme': 'http',
'host': 'hellflame.net',
'port': 80,
'href': '/'
})
self.assertDictEqual(parser("hellflame.net"), {
'scheme': 'http',
'host': 'hellflame.net',
'port': 80,
'href': '/'
})
self.assertDictEqual(parser("http://hell.net:443"), {
'scheme': 'http',
'host': 'hell.net',
'port': 443,
'href': '/'
})
self.assertDictEqual(parser("https://hell.net:80"), {
'scheme': 'https',
'host': 'hell.net',
'port': 80,
'href': '/'
})
self.assertDictEqual(parser("hell.net:443"), {
'scheme': 'http',
'host': 'hell.net',
'port': 443,
'href': '/'
})
def test_http_parser_quote(self):
parser = HTTPCons.http_parser
host = 'www.hellflame.net'
href = '/what'
method = 'get'
result = parser(host, href, method, None, {'name': '中文'})
href += '?name={}'.format(quote('中文'))
self.assertEqual(result['request'], "{method} {href} HTTP/1.1".format(method=method.upper(), href=href))
def test_http_parser_simple_get(self):
parser = HTTPCons.http_parser
host = 'www.hellflame.net'
href = '/'
method = 'get'
basic_header = {
'Host': host,
'User-Agent': HTTPCons.user_agent,
'Connection': 'close'
}
result = parser(host, href, method, None, None)
self.assertEqual("{method} {href} HTTP/1.1".format(method=method.upper(), href=href), result['request'])
self.assertEqual(result['entity'], '')
for k, v in basic_header.items():
self.assertTrue("{}: {}".format(k, v) in result['headers'])
def test_http_parser_simple_post(self):
parser = HTTPCons.http_parser
host = 'www.hellflame.net'
href = '/post'
method = 'post'
basic_header = {
'Host': host,
'User-Agent': HTTPCons.user_agent,
'Connection': 'close'
}
result = parser(host, href, method, None, None)
self.assertEqual("{method} {href} HTTP/1.1".format(method=method.upper(), href=href), result['request'])
self.assertEqual(result['entity'], '')
for k, v in basic_header.items():
self.assertTrue("{}: {}".format(k, v) in result['headers'])
data = "this is post data part"
result = parser(host, href, method, None, data)
self.assertEqual("{method} {href} HTTP/1.1".format(method=method.upper(), href=href), result['request'])
self.assertEqual(result['entity'], data)
for k, v in basic_header.items():
self.assertTrue("{}: {}".format(k, v) in result['headers'])
def test_http_parser_with_header(self):
parser = HTTPCons.http_parser
host = 'www.hellflame.net'
href = '/post'
method = 'post'
basic_header = {
'Host': host,
'User-Agent': HTTPCons.user_agent,
'Connection': 'close',
'Access-Allow-Origin': '*',
'Name': 'Done'
}
result = parser(host, href, method, {'Access-Allow-Origin': '*', 'Name': 'Done'}, None)
self.assertEqual("{method} {href} HTTP/1.1".format(method=method.upper(), href=href), result['request'])
self.assertEqual(result['entity'], '')
for k, v in basic_header.items():
self.assertTrue("{}: {}".format(k, v) in result['headers'])
data = "this is post data part"
result = parser(host, href, method, {'Access-Allow-Origin': '*', 'Name': 'Done'}, data)
self.assertEqual("{method} {href} HTTP/1.1".format(method=method.upper(), href=href), result['request'])
self.assertEqual(result['entity'], data)
for k, v in basic_header.items():
self.assertTrue("{}: {}".format(k, v) in result['headers'])
def test_response_in_memory(self):
req = HTTPCons()
req.request("https://static.hellflame.net/resource/c8c12b1c34af9808c34fa60d862016b7")
resp = SockFeed(req)
resp.disable_progress = True
resp.http_response()
self.assertEqual(hashlib.md5(resp.data).hexdigest(),
'9a50ddbef4c82eb9003bd496a00e0989',
"请保持数据获取正确完整, " + self.chunked_info(resp))
def test_response_downloading(self):
file_path = os.path.join(tempfile.gettempdir(), '1m.data')
req = HTTPCons()
req.request("https://static.hellflame.net/resource/c8c12b1c34af9808c34fa60d862016b7")
resp = SockFeed(req)
resp.disable_progress = True
resp.http_response(file_path, overwrite=True)
with open(file_path, 'rb') as handle:
# 如果上面请求结束未关闭文件,这里将无法读取全部文件
content = handle.read()
os.remove(resp.file_handle.name)
self.assertEqual(hashlib.md5(content).hexdigest(),
'9a50ddbef4c82eb9003bd496a00e0989',
"这里出错,多半是因为没有关闭文件, " + self.chunked_info(resp))
def test_small_response_in_memory(self):
req = HTTPCons()
req.request("https://static.hellflame.net/resource/5573012afe7227ab4457331df42af57d")
resp = SockFeed(req)
resp.disable_progress = True
resp.http_response()
self.assertEqual(hashlib.md5(resp.data).hexdigest(),
'8688229badcaa3cb2730dab99a618be6',
"请保持数据获取正确完整, " + self.chunked_info(resp))
def test_small_response_downloading(self):
file_path = os.path.join(tempfile.gettempdir(), '3k.data')
req = HTTPCons()
req.request("https://static.hellflame.net/resource/5573012afe7227ab4457331df42af57d")
resp = SockFeed(req)
resp.disable_progress = True
resp.http_response(file_path, overwrite=True)
with open(file_path, 'rb') as handle:
# 如果上面请求结束未关闭文件,这里将无法读取全部文件
content = handle.read()
os.remove(resp.file_handle.name)
self.assertEqual(hashlib.md5(content).hexdigest(),
'8688229badcaa3cb2730dab99a618be6',
"这里出错,多半是因为没有关闭文件, " + self.chunked_info(resp))
def test_request_get(self):
resp = get("https://static.hellflame.net/resource/5573012afe7227ab4457331df42af57d", disable_progress=True)
self.assertEqual(hashlib.md5(resp.data).hexdigest(), '8688229badcaa3cb2730dab99a618be6')
@unittest.skip("GFW's Fault")
def test_non_chunked_in_memory(self):
req = HTTPCons()
req.request("https://raw.githubusercontent.com/hellflame/qiniu_manager/v1.4.6/qiniuManager/manager.py")
resp = SockFeed(req)
resp.disable_progress = True
resp.http_response()
self.assertEqual(hashlib.md5(resp.data).hexdigest(),
'276efce035d49f7f3ea168b720075523',
"请保持数据获取正确完整," + self.chunked_info(resp))
@unittest.skip("GFW's Fault")
def test_test_non_chunked_downloading(self):
file_path = os.path.join(tempfile.gettempdir(), 'manager.py')
req = HTTPCons()
req.request("https://raw.githubusercontent.com/hellflame/qiniu_manager/v1.4.6/qiniuManager/manager.py")
resp = SockFeed(req)
resp.disable_progress = True
resp.http_response(file_path, overwrite=True)
with open(file_path, 'rb') as handle:
content = handle.read()
os.remove(resp.file_handle.name)
self.assertEqual(hashlib.md5(content).hexdigest(),
'276efce035d49f7f3ea168b720075523',
"请保持数据获取正确完整," + self.chunked_info(resp))
if __name__ == '__main__':
unittest.main(verbosity=2)
| 36.136 | 115 | 0.582798 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.