text
stringlengths 4
1.02M
| meta
dict |
|---|---|
from django.views.generic import TemplateView
class IndexView(TemplateView):
template_name = 'ng/index.html'
|
{
"content_hash": "35926db33d5db63ea327fe1610bf6650",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 45,
"avg_line_length": 28.5,
"alnum_prop": 0.7807017543859649,
"repo_name": "XeryusTC/18xx-accountant",
"id": "b78eb588b521c049763b2d4b3e59639b510aa9c0",
"size": "138",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "accountant/ng/views.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "56853"
},
{
"name": "HTML",
"bytes": "38371"
},
{
"name": "JavaScript",
"bytes": "1864"
},
{
"name": "Python",
"bytes": "595122"
},
{
"name": "Shell",
"bytes": "2580"
},
{
"name": "TypeScript",
"bytes": "163804"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Switch',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('serialno', models.CharField(default=b'', max_length=50, verbose_name=b'Serial No')),
('ip', models.GenericIPAddressField(verbose_name=b'IP')),
('name', models.CharField(default=b'', max_length=50, verbose_name=b'Name')),
('mac', models.CharField(default=b'', max_length=12, verbose_name=b'MAC')),
('model', models.CharField(default=b'', max_length=50, verbose_name=b'Model')),
('softwarever', models.CharField(default=b'', max_length=20, verbose_name=b'Software Version')),
('uptime', models.CharField(default=b'', max_length=50, verbose_name=b'Uptime')),
('stack', models.IntegerField(default=0, verbose_name=b'Stack')),
('purchaseyr', models.CharField(default=b'', max_length=4, verbose_name=b'Purchase Year')),
('purchaseorder', models.CharField(default=b'', max_length=50, verbose_name=b'Purchase Order')),
('uplink1', models.TextField(default=b'', verbose_name=b'Uplink 1')),
('uplink2', models.TextField(default=b'', verbose_name=b'Uplink 2')),
('uplink3', models.TextField(default=b'', verbose_name=b'Uplink 3')),
('uplink4', models.TextField(default=b'', verbose_name=b'Uplink 4')),
('notes', models.TextField(default=b'', verbose_name=b'Notes')),
('autoupdate', models.IntegerField(default=1, verbose_name=b'Autoupdate')),
('status', models.CharField(default=b'active', max_length=128, verbose_name=b'Status')),
('lastupdate', models.DateTimeField(auto_now=True, verbose_name=b'Last Update')),
],
options={
},
bases=(models.Model,),
),
]
|
{
"content_hash": "f0cff30264d5ed4df20d0b92bb2041d3",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 114,
"avg_line_length": 55.46153846153846,
"alnum_prop": 0.5839112343966713,
"repo_name": "lindseypack/NIM",
"id": "268ed7edd23bd0b37ecca1f5d19cf48c81f9857c",
"size": "2187",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "devices/migrations/0001_initial.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "55802"
},
{
"name": "HTML",
"bytes": "6204"
},
{
"name": "JavaScript",
"bytes": "114263"
},
{
"name": "Python",
"bytes": "63070"
}
],
"symlink_target": ""
}
|
"""Support for monitoring a Smappee appliance binary sensor."""
from __future__ import annotations
from homeassistant.components.binary_sensor import (
BinarySensorDeviceClass,
BinarySensorEntity,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity import DeviceInfo
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from .const import DOMAIN
BINARY_SENSOR_PREFIX = "Appliance"
PRESENCE_PREFIX = "Presence"
async def async_setup_entry(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up the Smappee binary sensor."""
smappee_base = hass.data[DOMAIN][config_entry.entry_id]
entities: list[BinarySensorEntity] = []
for service_location in smappee_base.smappee.service_locations.values():
for appliance_id, appliance in service_location.appliances.items():
if appliance.type != "Find me" and appliance.source_type == "NILM":
entities.append(
SmappeeAppliance(
smappee_base=smappee_base,
service_location=service_location,
appliance_id=appliance_id,
appliance_name=appliance.name,
appliance_type=appliance.type,
)
)
if not smappee_base.smappee.local_polling:
# presence value only available in cloud env
entities.append(SmappeePresence(smappee_base, service_location))
async_add_entities(entities, True)
class SmappeePresence(BinarySensorEntity):
"""Implementation of a Smappee presence binary sensor."""
def __init__(self, smappee_base, service_location):
"""Initialize the Smappee sensor."""
self._smappee_base = smappee_base
self._service_location = service_location
self._state = self._service_location.is_present
@property
def name(self):
"""Return the name of the binary sensor."""
return f"{self._service_location.service_location_name} - {PRESENCE_PREFIX}"
@property
def is_on(self):
"""Return if the binary sensor is turned on."""
return self._state
@property
def device_class(self):
"""Return the class of this device, from component DEVICE_CLASSES."""
return BinarySensorDeviceClass.PRESENCE
@property
def unique_id(
self,
):
"""Return the unique ID for this binary sensor."""
return (
f"{self._service_location.device_serial_number}-"
f"{self._service_location.service_location_id}-"
f"{BinarySensorDeviceClass.PRESENCE}"
)
@property
def device_info(self) -> DeviceInfo:
"""Return the device info for this binary sensor."""
return DeviceInfo(
identifiers={(DOMAIN, self._service_location.device_serial_number)},
manufacturer="Smappee",
model=self._service_location.device_model,
name=self._service_location.service_location_name,
sw_version=self._service_location.firmware_version,
)
async def async_update(self) -> None:
"""Get the latest data from Smappee and update the state."""
await self._smappee_base.async_update()
self._state = self._service_location.is_present
class SmappeeAppliance(BinarySensorEntity):
"""Implementation of a Smappee binary sensor."""
def __init__(
self,
smappee_base,
service_location,
appliance_id,
appliance_name,
appliance_type,
):
"""Initialize the Smappee sensor."""
self._smappee_base = smappee_base
self._service_location = service_location
self._appliance_id = appliance_id
self._appliance_name = appliance_name
self._appliance_type = appliance_type
self._state = False
@property
def name(self):
"""Return the name of the sensor."""
return (
f"{self._service_location.service_location_name} - "
f"{BINARY_SENSOR_PREFIX} - "
f"{self._appliance_name if self._appliance_name != '' else self._appliance_type}"
)
@property
def is_on(self):
"""Return if the binary sensor is turned on."""
return self._state
@property
def icon(self):
"""Icon to use in the frontend."""
icon_mapping = {
"Car Charger": "mdi:car",
"Coffeemaker": "mdi:coffee",
"Clothes Dryer": "mdi:tumble-dryer",
"Clothes Iron": "mdi:hanger",
"Dishwasher": "mdi:dishwasher",
"Lights": "mdi:lightbulb",
"Fan": "mdi:fan",
"Freezer": "mdi:fridge",
"Microwave": "mdi:microwave",
"Oven": "mdi:stove",
"Refrigerator": "mdi:fridge",
"Stove": "mdi:stove",
"Washing Machine": "mdi:washing-machine",
"Water Pump": "mdi:water-pump",
}
return icon_mapping.get(self._appliance_type)
@property
def unique_id(
self,
):
"""Return the unique ID for this binary sensor."""
return (
f"{self._service_location.device_serial_number}-"
f"{self._service_location.service_location_id}-"
f"appliance-{self._appliance_id}"
)
@property
def device_info(self) -> DeviceInfo:
"""Return the device info for this binary sensor."""
return DeviceInfo(
identifiers={(DOMAIN, self._service_location.device_serial_number)},
manufacturer="Smappee",
model=self._service_location.device_model,
name=self._service_location.service_location_name,
sw_version=self._service_location.firmware_version,
)
async def async_update(self) -> None:
"""Get the latest data from Smappee and update the state."""
await self._smappee_base.async_update()
appliance = self._service_location.appliances.get(self._appliance_id)
self._state = bool(appliance.state)
|
{
"content_hash": "3bc921402e7f7a166bb3a13ca5d94563",
"timestamp": "",
"source": "github",
"line_count": 182,
"max_line_length": 93,
"avg_line_length": 34.16483516483517,
"alnum_prop": 0.6080733354776455,
"repo_name": "mezz64/home-assistant",
"id": "88d46e3689d22649bd0c0d5eb3d8ed7a236cdcac",
"size": "6218",
"binary": false,
"copies": "3",
"ref": "refs/heads/dev",
"path": "homeassistant/components/smappee/binary_sensor.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2963"
},
{
"name": "PLSQL",
"bytes": "840"
},
{
"name": "Python",
"bytes": "52481895"
},
{
"name": "Shell",
"bytes": "6252"
}
],
"symlink_target": ""
}
|
import sys
import sqlite3
import random
import xml.etree.ElementTree as etree
import urllib.parse
def get_files_info(sqlite_filename, number_of_files):
with sqlite3.connect(r'file:///' + sqlite_filename + '?mode=ro', uri=True) as conn:
c = conn.cursor()
c.execute('SELECT COUNT(*) FROM images')
count, = c.fetchone()
random_rowids = []
for i in range(number_of_files):
rowid = random.randint(1, count)
random_rowids.append(str(rowid))
joined_rowids = ','.join(random_rowids)
print("selected rows=", joined_rowids)
c.execute('SELECT SourceFile, ImageWidth, ImageHeight FROM images WHERE rowid IN (' + joined_rowids + ')')
result = c.fetchall()
c.close()
return result
RSS_TEMPLATE = '''<?xml version="1.0" encoding="utf-8"?>
<rss version="2.0" xmlns:media="http://search.yahoo.com/mrss/">
<channel>
<title>Home photos</title>
<link>http://192.168.2.10/Photos</link>
<description>Home photos</description>
<pubDate>Sun, 20 Feb 2011 07:21:34 -0800</pubDate>
<lastBuildDate>Sun, 20 Feb 2011 07:21:34 -0800</lastBuildDate>
</channel>
</rss>
'''
MEDIA_NAMESPACE = 'http://search.yahoo.com/mrss/'
etree.register_namespace('media', MEDIA_NAMESPACE)
def get_image_title(relative_url):
# find the second slash from the end
index = relative_url.rfind('/')
if index > 0:
# find previous slash
index = relative_url.rfind('/', 0, index)
if index >= 0:
relative_url = relative_url[index + 1:]
return relative_url
def fill_image_element(elem, absolute_url, image_width, image_height, duration_in_seconds):
elem.set('url', absolute_url)
elem.set('type', 'image/jpeg')
elem.set('width', str(image_width))
elem.set('height', str(image_height))
elem.set('duration', str(duration_in_seconds))
def generate_rss(files_info, target_rss_file_path, duration_in_seconds, root_uri, prefix_to_remove):
root = etree.fromstring(RSS_TEMPLATE)
channel = root.find('channel')
link = channel.find('link')
if root_uri is None:
root_uri = link.text
if not root_uri.endswith('/'):
root_uri += '/'
link.text = root_uri
# Time to live (in minutes)
ttl = (len(files_info) * duration_in_seconds) // 60
etree.SubElement(channel, 'ttl').text = str(ttl)
for source_file, image_width, image_height in files_info:
if source_file.startswith(prefix_to_remove):
source_file = source_file[len(prefix_to_remove):]
absolute_url = urllib.parse.urljoin(root_uri, urllib.parse.quote(source_file))
item = etree.SubElement(channel, 'item')
title = etree.SubElement(item, 'title')
title.text = get_image_title(source_file)
guid = etree.SubElement(item, 'guid')
guid.text = absolute_url
media_thumbnail = etree.SubElement(item, '{' + MEDIA_NAMESPACE + '}thumbnail')
fill_image_element(media_thumbnail, absolute_url, image_width, image_height, duration_in_seconds)
media_content = etree.SubElement(item, '{' + MEDIA_NAMESPACE + '}content')
fill_image_element(media_content, absolute_url, image_width, image_height, duration_in_seconds)
doc = etree.ElementTree(root)
doc.write(target_rss_file_path)
def process(db_file_path, target_rss_file_path, number_of_items, duration_in_seconds, root_uri, prefix_to_remove):
files_info = get_files_info(db_file_path, number_of_items)
generate_rss(files_info, target_rss_file_path, duration_in_seconds, root_uri, prefix_to_remove)
def main():
db_file_path = sys.argv[1] # r'C:/Users/alexa/Documents/images.sqlite'
target_rss_file_path = sys.argv[2] # r'C:/Users/alexa/Documents/images_rss.xml'
number_of_items = int(sys.argv[3]) # 20
duration_in_seconds = int(sys.argv[4]) # 20
prefix_to_remove = sys.argv[5] # './'
root_uri = sys.argv[6] # 'http://192.168.2.2/'
process(db_file_path, target_rss_file_path, number_of_items, duration_in_seconds, root_uri, prefix_to_remove)
if __name__ == "__main__":
# execute only if run as a script
main()
|
{
"content_hash": "dccd6fe5d9088fcbfc51a1390db60c4b",
"timestamp": "",
"source": "github",
"line_count": 108,
"max_line_length": 114,
"avg_line_length": 38.94444444444444,
"alnum_prop": 0.6412268188302425,
"repo_name": "agavrilov/HomeImageRSSLinux",
"id": "d2816ec010976aae4b26530ec894525bc733c8ff",
"size": "4229",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "generate-rss.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "4229"
},
{
"name": "Shell",
"bytes": "2162"
}
],
"symlink_target": ""
}
|
import os
import re
from . import compat
class RmoqStorageBackend(object):
"""
Base backend for rmoq backends. All storage backends for rmoq must inherit this
backend if it is to be used with rmoq.
"""
def get(self, prefix, url):
"""
Fetches a request response from storage. Should be overridden by subclasses.
:param prefix: A prefix that separates containers of request responses in the storage.
:param url: The url of the request.
"""
raise NotImplementedError
def put(self, prefix, url, content, content_type):
"""
Writes a request response in to storage. Should be overridden by subclasses.
:param prefix: A prefix that separates containers of request responses in the storage.
:param url: The url of the request.
:param content: The content of the request response.
:param content_type: The content type header of the request response.
"""
raise NotImplementedError
@staticmethod
def _parse(content):
return (
compat.make_str(content.split('\n')[0]),
compat.make_str('\n'.join(content.split('\n')[1:]))
)
@staticmethod
def clean_url(url, replacement='_'):
"""
Cleans the url for protocol prefix and trailing slash and replaces special characters
with the given replacement.
:param url: The url of the request.
:param replacement: A string that is used to replace special characters.
"""
cleaned = re.sub(r'/$', '', re.sub(r'https?://', '', url))
for character in '/ _ ? & : ; %'.split():
cleaned = cleaned.replace(character, replacement)
return cleaned
class FileStorageBackend(RmoqStorageBackend):
"""
A rmoq backend that reads and writes to the local file system.
This is the default backend.
"""
def get(self, prefix, url):
filename = self.get_filename(prefix, url)
if os.path.exists(filename):
with open(filename) as f:
return compat.read_file(f)
def put(self, prefix, url, content, content_type):
filename = self.get_filename(prefix, url)
if not os.path.exists(os.path.dirname(filename)):
os.makedirs(os.path.dirname(filename))
with open(filename, mode='w') as f:
f.writelines([content_type, '\n', compat.prepare_for_write(content)])
def get_filename(self, prefix, url):
"""
Creates a file path on the form: current-working-directory/prefix/cleaned-url.txt
:param prefix: The prefix from the .get() and .put() methods.
:param url: The url of the request.
:return: The created path.
"""
return '{}.txt'.format(os.path.join(os.getcwd(), prefix, self.clean_url(url)))
class MemcachedStorageBackend(RmoqStorageBackend):
"""
Storage backend for rmoq that uses memcached for storage. Takes a the same arguments
as python-memcached: a list of servers and options as keyword arguments.
"""
def __init__(self, servers, **options):
import memcache
self.client = memcache.Client(servers=servers, **options)
def get(self, prefix, url):
return self._parse(self.client.get(self.create_key(prefix, url)))
def put(self, prefix, url, content, content_type):
return self.client.add(
self.create_key(prefix, url),
'\n'.join([content_type, content]),
60 * 60 * 24
)
def create_key(self, *parts):
return ''.join([self.clean_url(part, '') for part in parts])
|
{
"content_hash": "637538487093369c8cba5fb7d67df7be",
"timestamp": "",
"source": "github",
"line_count": 109,
"max_line_length": 94,
"avg_line_length": 33.403669724770644,
"alnum_prop": 0.6212578961823675,
"repo_name": "relekang/rmoq",
"id": "a92394df5963af73602c047781f2f6c1fd280c58",
"size": "3665",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "rmoq/backends.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "13907"
}
],
"symlink_target": ""
}
|
import tq
import redis
import argparse
#-------------------------------------------------------------------------------
def main():
parser = argparse.ArgumentParser()
parser.add_argument("--host",
default='localhost',
help="hostname of the redis instance")
parser.add_argument("--port",
type=int,
default=6379,
help="port number of the redis instance")
parser.add_argument("--db",
type=int,
default=0,
help="db number of the redis instance")
parser.add_argument('queues',
metavar='queue',
nargs='+',
help='name of a queue to process')
args = parser.parse_args()
r = redis.Redis(host=args.host, port=args.port, db=args.db)
worker = tq.Worker(r)
worker.work(args.queues)
#-------------------------------------------------------------------------------
if __name__ == '__main__':
main()
#-------------------------------------------------------------------------------
|
{
"content_hash": "046bcbe3ac8ed439aba7eb43b51d44b7",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 80,
"avg_line_length": 22.170731707317074,
"alnum_prop": 0.49064906490649063,
"repo_name": "johansten/tq",
"id": "8725dfff01af7b0608812e1678432fadcfcfbd1e",
"size": "928",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "bin/tqworker.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "10980"
}
],
"symlink_target": ""
}
|
"Test multicall, coverage 33%."
from idlelib import multicall
import unittest
from test.support import requires
from tkinter import Tk, Text
class MultiCallTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
requires('gui')
cls.root = Tk()
cls.root.withdraw()
cls.mc = multicall.MultiCallCreator(Text)
@classmethod
def tearDownClass(cls):
del cls.mc
cls.root.update_idletasks()
## for id in cls.root.tk.call('after', 'info'):
## cls.root.after_cancel(id) # Need for EditorWindow.
cls.root.destroy()
del cls.root
def test_creator(self):
mc = self.mc
self.assertIs(multicall._multicall_dict[Text], mc)
self.assertTrue(issubclass(mc, Text))
mc2 = multicall.MultiCallCreator(Text)
self.assertIs(mc, mc2)
def test_init(self):
mctext = self.mc(self.root)
self.assertIsInstance(mctext._MultiCall__binders, list)
if __name__ == '__main__':
unittest.main(verbosity=2)
|
{
"content_hash": "33ff06387bc4736142c497bf40ea2658",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 65,
"avg_line_length": 26.05,
"alnum_prop": 0.6285988483685221,
"repo_name": "huguesv/PTVS",
"id": "68156a743d7b9b63050082b6cde0655a20211672",
"size": "1042",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "Python/Product/Miniconda/Miniconda3-x64/Lib/idlelib/idle_test/test_multicall.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ASP",
"bytes": "109"
},
{
"name": "Batchfile",
"bytes": "10898"
},
{
"name": "C",
"bytes": "23236"
},
{
"name": "C#",
"bytes": "12464429"
},
{
"name": "C++",
"bytes": "211838"
},
{
"name": "CSS",
"bytes": "7025"
},
{
"name": "HTML",
"bytes": "34251"
},
{
"name": "JavaScript",
"bytes": "87257"
},
{
"name": "PowerShell",
"bytes": "25220"
},
{
"name": "Python",
"bytes": "913395"
},
{
"name": "Rich Text Format",
"bytes": "260880"
},
{
"name": "Smarty",
"bytes": "8156"
},
{
"name": "Tcl",
"bytes": "24968"
}
],
"symlink_target": ""
}
|
import json
from datetime import datetime as dt
from operator import itemgetter
import pendulum
from flask_appbuilder.fieldwidgets import (
BS3PasswordFieldWidget,
BS3TextAreaFieldWidget,
BS3TextFieldWidget,
Select2Widget,
)
from flask_appbuilder.forms import DynamicForm
from flask_babel import lazy_gettext
from flask_wtf import FlaskForm
from wtforms import widgets
from wtforms.fields import (
BooleanField,
Field,
IntegerField,
PasswordField,
SelectField,
StringField,
TextAreaField,
)
from wtforms.validators import DataRequired, NumberRange, Optional
from airflow.configuration import conf
from airflow.utils import timezone
from airflow.utils.types import DagRunType
from airflow.www.validators import ValidJson
from airflow.www.widgets import AirflowDateTimePickerWidget
class DateTimeWithTimezoneField(Field):
"""A text field which stores a `datetime.datetime` matching a format."""
widget = widgets.TextInput()
def __init__(self, label=None, validators=None, datetime_format='%Y-%m-%d %H:%M:%S%Z', **kwargs):
super().__init__(label, validators, **kwargs)
self.format = datetime_format
self.data = None
def _value(self):
if self.raw_data:
return ' '.join(self.raw_data)
if self.data:
return self.data.strftime(self.format)
return ''
def process_formdata(self, valuelist):
if not valuelist:
return
date_str = ' '.join(valuelist)
try:
# Check if the datetime string is in the format without timezone, if so convert it to the
# default timezone
if len(date_str) == 19:
parsed_datetime = dt.strptime(date_str, '%Y-%m-%d %H:%M:%S')
default_timezone = self._get_default_timezone()
self.data = default_timezone.convert(parsed_datetime)
else:
self.data = pendulum.parse(date_str)
except ValueError:
self.data = None
raise ValueError(self.gettext('Not a valid datetime value'))
def _get_default_timezone(self):
current_timezone = conf.get("core", "default_timezone")
if current_timezone == "system":
default_timezone = pendulum.local_timezone()
else:
default_timezone = pendulum.timezone(current_timezone)
return default_timezone
class DateTimeForm(FlaskForm):
"""Date filter form needed for task views"""
execution_date = DateTimeWithTimezoneField("Execution date", widget=AirflowDateTimePickerWidget())
class DateTimeWithNumRunsForm(FlaskForm):
"""
Date time and number of runs form for tree view, task duration
and landing times
"""
base_date = DateTimeWithTimezoneField(
"Anchor date", widget=AirflowDateTimePickerWidget(), default=timezone.utcnow()
)
num_runs = SelectField(
"Number of runs",
default=25,
choices=(
(5, "5"),
(25, "25"),
(50, "50"),
(100, "100"),
(365, "365"),
),
)
class DateTimeWithNumRunsWithDagRunsForm(DateTimeWithNumRunsForm):
"""Date time and number of runs and dag runs form for graph and gantt view"""
execution_date = SelectField("DAG run")
class DagRunForm(DynamicForm):
"""Form for editing and adding DAG Run"""
dag_id = StringField(lazy_gettext('Dag Id'), validators=[DataRequired()], widget=BS3TextFieldWidget())
start_date = DateTimeWithTimezoneField(lazy_gettext('Start Date'), widget=AirflowDateTimePickerWidget())
end_date = DateTimeWithTimezoneField(lazy_gettext('End Date'), widget=AirflowDateTimePickerWidget())
run_id = StringField(lazy_gettext('Run Id'), validators=[DataRequired()], widget=BS3TextFieldWidget())
state = SelectField(
lazy_gettext('State'),
choices=(
('success', 'success'),
('running', 'running'),
('failed', 'failed'),
),
widget=Select2Widget(),
)
execution_date = DateTimeWithTimezoneField(
lazy_gettext('Execution Date'), widget=AirflowDateTimePickerWidget()
)
external_trigger = BooleanField(lazy_gettext('External Trigger'))
conf = TextAreaField(
lazy_gettext('Conf'), validators=[ValidJson(), Optional()], widget=BS3TextAreaFieldWidget()
)
def populate_obj(self, item):
"""Populates the attributes of the passed obj with data from the form’s fields."""
super().populate_obj(item) # pylint: disable=no-member
item.run_type = DagRunType.from_run_id(item.run_id)
if item.conf:
item.conf = json.loads(item.conf)
_connection_types = [
('docker', 'Docker Registry'),
('elasticsearch', 'Elasticsearch'),
('exasol', 'Exasol'),
('facebook_social', 'Facebook Social'),
('fs', 'File (path)'),
('ftp', 'FTP'),
('google_cloud_platform', 'Google Cloud'),
('hdfs', 'HDFS'),
('http', 'HTTP'),
('pig_cli', 'Pig Client Wrapper'),
('hive_cli', 'Hive Client Wrapper'),
('hive_metastore', 'Hive Metastore Thrift'),
('hiveserver2', 'Hive Server 2 Thrift'),
('jdbc', 'JDBC Connection'),
('odbc', 'ODBC Connection'),
('jenkins', 'Jenkins'),
('mysql', 'MySQL'),
('postgres', 'Postgres'),
('oracle', 'Oracle'),
('vertica', 'Vertica'),
('presto', 'Presto'),
('s3', 'S3'),
('samba', 'Samba'),
('sqlite', 'Sqlite'),
('ssh', 'SSH'),
('cloudant', 'IBM Cloudant'),
('mssql', 'Microsoft SQL Server'),
('mesos_framework-id', 'Mesos Framework ID'),
('jira', 'JIRA'),
('redis', 'Redis'),
('wasb', 'Azure Blob Storage'),
('databricks', 'Databricks'),
('aws', 'Amazon Web Services'),
('emr', 'Elastic MapReduce'),
('snowflake', 'Snowflake'),
('segment', 'Segment'),
('sqoop', 'Sqoop'),
('azure_batch', 'Azure Batch Service'),
('azure_data_lake', 'Azure Data Lake'),
('azure_container_instances', 'Azure Container Instances'),
('azure_cosmos', 'Azure CosmosDB'),
('azure_data_explorer', 'Azure Data Explorer'),
('cassandra', 'Cassandra'),
('qubole', 'Qubole'),
('mongo', 'MongoDB'),
('gcpcloudsql', 'Google Cloud SQL'),
('grpc', 'GRPC Connection'),
('yandexcloud', 'Yandex Cloud'),
('livy', 'Apache Livy'),
('tableau', 'Tableau'),
('kubernetes', 'Kubernetes Cluster Connection'),
('spark', 'Spark'),
('imap', 'IMAP'),
('vault', 'Hashicorp Vault'),
('azure', 'Azure'),
]
class ConnectionForm(DynamicForm):
"""Form for editing and adding Connection"""
conn_id = StringField(lazy_gettext('Conn Id'), widget=BS3TextFieldWidget())
conn_type = SelectField(
lazy_gettext('Conn Type'),
choices=sorted(_connection_types, key=itemgetter(1)), # pylint: disable=protected-access
widget=Select2Widget(),
)
description = StringField(lazy_gettext('Description'), widget=BS3TextAreaFieldWidget())
host = StringField(lazy_gettext('Host'), widget=BS3TextFieldWidget())
schema = StringField(lazy_gettext('Schema'), widget=BS3TextFieldWidget())
login = StringField(lazy_gettext('Login'), widget=BS3TextFieldWidget())
password = PasswordField(lazy_gettext('Password'), widget=BS3PasswordFieldWidget())
port = IntegerField(lazy_gettext('Port'), validators=[Optional()], widget=BS3TextFieldWidget())
extra = TextAreaField(lazy_gettext('Extra'), widget=BS3TextAreaFieldWidget())
# Used to customized the form, the forms elements get rendered
# and results are stored in the extra field as json. All of these
# need to be prefixed with extra__ and then the conn_type ___ as in
# extra__{conn_type}__name. You can also hide form elements and rename
# others from the connection_form.js file
extra__jdbc__drv_path = StringField(lazy_gettext('Driver Path'), widget=BS3TextFieldWidget())
extra__jdbc__drv_clsname = StringField(lazy_gettext('Driver Class'), widget=BS3TextFieldWidget())
extra__google_cloud_platform__project = StringField(
lazy_gettext('Project Id'), widget=BS3TextFieldWidget()
)
extra__google_cloud_platform__key_path = StringField(
lazy_gettext('Keyfile Path'), widget=BS3TextFieldWidget()
)
extra__google_cloud_platform__keyfile_dict = PasswordField(
lazy_gettext('Keyfile JSON'), widget=BS3PasswordFieldWidget()
)
extra__google_cloud_platform__scope = StringField(
lazy_gettext('Scopes (comma separated)'), widget=BS3TextFieldWidget()
)
extra__google_cloud_platform__num_retries = IntegerField(
lazy_gettext('Number of Retries'),
validators=[NumberRange(min=0)],
widget=BS3TextFieldWidget(),
default=5,
)
extra__grpc__auth_type = StringField(lazy_gettext('Grpc Auth Type'), widget=BS3TextFieldWidget())
extra__grpc__credential_pem_file = StringField(
lazy_gettext('Credential Keyfile Path'), widget=BS3TextFieldWidget()
)
extra__grpc__scopes = StringField(lazy_gettext('Scopes (comma separated)'), widget=BS3TextFieldWidget())
extra__yandexcloud__service_account_json = PasswordField(
lazy_gettext('Service account auth JSON'),
widget=BS3PasswordFieldWidget(),
description='Service account auth JSON. Looks like '
'{"id", "...", "service_account_id": "...", "private_key": "..."}. '
'Will be used instead of OAuth token and SA JSON file path field if specified.',
)
extra__yandexcloud__service_account_json_path = StringField(
lazy_gettext('Service account auth JSON file path'),
widget=BS3TextFieldWidget(),
description='Service account auth JSON file path. File content looks like '
'{"id", "...", "service_account_id": "...", "private_key": "..."}. '
'Will be used instead of OAuth token if specified.',
)
extra__yandexcloud__oauth = PasswordField(
lazy_gettext('OAuth Token'),
widget=BS3PasswordFieldWidget(),
description='User account OAuth token. Either this or service account JSON must be specified.',
)
extra__yandexcloud__folder_id = StringField(
lazy_gettext('Default folder ID'),
widget=BS3TextFieldWidget(),
description='Optional. This folder will be used to create all new clusters and nodes by default',
)
extra__yandexcloud__public_ssh_key = StringField(
lazy_gettext('Public SSH key'),
widget=BS3TextFieldWidget(),
description='Optional. This key will be placed to all created Compute nodes'
'to let you have a root shell there',
)
extra__kubernetes__in_cluster = BooleanField(lazy_gettext('In cluster configuration'))
extra__kubernetes__kube_config_path = StringField(
lazy_gettext('Kube config path'), widget=BS3TextFieldWidget()
)
extra__kubernetes__kube_config = StringField(
lazy_gettext('Kube config (JSON format)'), widget=BS3TextFieldWidget()
)
extra__kubernetes__namespace = StringField(lazy_gettext('Namespace'), widget=BS3TextFieldWidget())
|
{
"content_hash": "8230e34293e18b3c5858e7640bc29862",
"timestamp": "",
"source": "github",
"line_count": 286,
"max_line_length": 108,
"avg_line_length": 38.83916083916084,
"alnum_prop": 0.647731364782139,
"repo_name": "mrkm4ntr/incubator-airflow",
"id": "24850100e534d9a94cd184277a25dea5ba0a0913",
"size": "11898",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "airflow/www/forms.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "22581"
},
{
"name": "Dockerfile",
"bytes": "31475"
},
{
"name": "HCL",
"bytes": "3786"
},
{
"name": "HTML",
"bytes": "221101"
},
{
"name": "JavaScript",
"bytes": "32643"
},
{
"name": "Jupyter Notebook",
"bytes": "2933"
},
{
"name": "Mako",
"bytes": "1339"
},
{
"name": "Python",
"bytes": "14407542"
},
{
"name": "Shell",
"bytes": "541811"
}
],
"symlink_target": ""
}
|
"""
Analyzer functions
"""
import sys
import os.path
from copy import deepcopy
# rebind in explore
anames = list()
states = list() # state's index here is its state number in lists, graph below
initial = 0 # always, keep as well as runstarts (below) for backward compat.
accepting = list()
frontier = list() # unexplored states in graph, may add more transitions
finished = list() # terminal states that are accepting states
deadend = list() # terminal states that are not accepting states
runstarts = list() # initial states of test runs after the first, if any
graph = list() # a set (no dups) but we want a sequence to see them in order
def explore(mp, maxTransitions):
# some globals may not be needed; code only mutates collection *contents*,
# as in finished, deadend
global anames, states, graph, accepting, frontier
anames = mp.anames
explored = list()
fsm = list() # list of transitions with mp states not state numbers
more_runs = True # TestSuite might have multiple runs
while more_runs:
initialState = mp.Current()
frontier.append(initialState)
states.append(initialState) # includes initial state even if no trans'ns
iInitial = states.index(initialState) # might already be there
runstarts.append(iInitial)
if mp.Accepting(): # initial state might be accepting even if no trans'ns
accepting.append(iInitial)
while frontier:
if len(graph) == maxTransitions:
break
current = frontier[0] # head, keep in mind current might lead nowhere
frontier = frontier[1:] # tail
icurrent = states.index(current) # might already be there
#print 'current %s' % current # DEBUG
#print ' frontier %s' % frontier # DEBUG
explored.append(current) # states we checked, some might lead nowhere
mp.Restore(deepcopy(current)) # assign state in mp, need deepcopy here
transitions = mp.EnabledTransitions(list()) # all actions, not cleanup
if not transitions: # terminal state, no enabled transitions
if icurrent in accepting:
finished.append(icurrent)
else:
deadend.append(icurrent)
# print 'current %s, transitions %s' % (current, transitions) # DEBUG
for (aname, args, result, next, next_properties) in transitions:
# EnabledTransitions doesn't return transitions where not statefilter
# if next_properties['statefilter']:
if len(graph) < maxTransitions:
if next not in explored and next not in frontier:
# append for breadth-first, push on head for depth-first
frontier.append(next) # frontier contents are already copies
transition = (current, (aname, args, result), next)
if transition not in fsm:
fsm.append(transition)
if current not in states:
states.append(current)
if next not in states:
states.append(next) # next might never be in explored
# icurrent = states.index(current) # might already be there
inext = states.index(next) # ditto
graph.append((icurrent, (aname,args,result), inext)) #tuple
if mp.Accepting() and icurrent not in accepting:
accepting.append(icurrent)
if next_properties['accepting'] and inext not in accepting:
accepting.append(inext)
# TK likewise unsafe states, dead states
else: # found transition that will not be included in graph
frontier.insert(0,current) # not completely explored after all
# explored.remove(current) # not necessary
break
# end if < ntransitions else ...
# end for transitions
# end while frontier
# continue exploring test suite with multiple runs
more_runs = False
if mp.TestSuite:
try:
mp.Reset()
more_runs = True
except StopIteration: # raised by TestSuite Reset after last run
pass # no more runs, we're done
# end while more_runs
def actiondef(aname):
return 'def %s(): pass' % aname
def state(i, state):
return '%s : %s,' % (i, state)
def initial_state(): # all FSMs
return 'initial = %s' % initial
def runstarts_states(): # initial states of test runs after the first, if any
return 'runstarts = %s' % runstarts
def accepting_states():
return 'accepting = %s' % accepting
def frontier_states():
return 'frontier = %s' % [ states.index(s) for s in frontier ]
def finished_states():
return 'finished = %s' % finished
def deadend_states():
return 'deadend = %s' % deadend
def quote_string(x): # also appears in Dot
if isinstance(x,tuple):
return str(x)
else:
return "'%s'" % x if isinstance(x, str) else "%s" % x
def transition(t):
# return '%s' % (t,) # simple but returns quotes around action name
current, (action, args, result), next = t
return '(%s, (%s, %s, %s), %s)' % (current, action, args,
quote_string(result), next)
def save(name):
f = open("%s.py" % name, 'w')
f.write('\n# %s' % os.path.basename(sys.argv[0])) # echo command line ...
f.write(' %s\n' % ' '.join(['%s' % arg for arg in sys.argv[1:]])) # ...etc.
f.write('# %s states, %s transitions, %s accepting states, %s finished and %s deadend states\n' % \
(len(states),len(graph),len(accepting),len(finished),len(deadend)))
f.write('\n# actions here are just labels, but must be symbols with __name__ attribute\n\n')
f.writelines([ actiondef(aname)+'\n' for aname in anames ])
f.write('\n# states, key of each state here is its number in graph etc. below\n\n')
f.write('states = {\n')
for i,s in enumerate(states):
f.write(' %s\n' % state(i,s))
f.write('}\n')
f.write('\n# initial state, accepting states, frontier states, deadend states\n\n')
f.write('%s\n' % initial_state())
f.write('%s\n' % accepting_states())
f.write('%s\n' % frontier_states())
f.write('%s\n' % finished_states())
f.write('%s\n' % deadend_states())
f.write('%s\n' % runstarts_states())
f.write('\n# finite state machine, list of tuples: (current, (action, args, result), next)\n\n')
f.write('graph = (\n')
f.writelines([ ' %s,\n' % transition(t) for t in graph ])
f.write(')\n')
f.close()
|
{
"content_hash": "836041c16d98ce4c625d33fbde07d608",
"timestamp": "",
"source": "github",
"line_count": 160,
"max_line_length": 103,
"avg_line_length": 41.50625,
"alnum_prop": 0.6063845806354464,
"repo_name": "nfredrik/pyModelStuff",
"id": "87865c8548a033bf4533b9a53faf98b512c32e9c",
"size": "6641",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pymodel/Analyzer.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Perl",
"bytes": "107"
},
{
"name": "Python",
"bytes": "64694"
},
{
"name": "Ruby",
"bytes": "128"
}
],
"symlink_target": ""
}
|
"""
Onshape REST API
The Onshape REST API consumed by all clients. # noqa: E501
The version of the OpenAPI document: 1.113
Contact: api-support@onshape.zendesk.com
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
import sys # noqa: F401
import six # noqa: F401
import nulltype # noqa: F401
from onshape_client.oas.model_utils import ( # noqa: F401
ModelComposed,
ModelNormal,
ModelSimple,
date,
datetime,
file_type,
int,
none_type,
str,
validate_get_composed_info,
)
class BTTableAssemblyCrossHighlightDataItem2659(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {}
validations = {}
additional_properties_type = None
@staticmethod
def openapi_types():
"""
This must be a class method so a model may have properties that are
of type self, this ensures that we don't create a cyclic import
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
"bt_type": (str,), # noqa: E501
"occurrence_path_id": (str,), # noqa: E501
}
@staticmethod
def discriminator():
return None
attribute_map = {
"bt_type": "btType", # noqa: E501
"occurrence_path_id": "occurrencePathId", # noqa: E501
}
@staticmethod
def _composed_schemas():
return None
required_properties = set(
[
"_data_store",
"_check_type",
"_from_server",
"_path_to_item",
"_configuration",
]
)
def __init__(
self,
_check_type=True,
_from_server=False,
_path_to_item=(),
_configuration=None,
**kwargs
): # noqa: E501
"""bt_table_assembly_cross_highlight_data_item2659.BTTableAssemblyCrossHighlightDataItem2659 - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_from_server (bool): True if the data is from the server
False if the data is from the client (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
bt_type (str): [optional] # noqa: E501
occurrence_path_id (str): [optional] # noqa: E501
"""
self._data_store = {}
self._check_type = _check_type
self._from_server = _from_server
self._path_to_item = _path_to_item
self._configuration = _configuration
for var_name, var_value in six.iteritems(kwargs):
if (
var_name not in self.attribute_map
and self._configuration is not None
and self._configuration.discard_unknown_keys
and self.additional_properties_type is None
):
# discard variable.
continue
setattr(self, var_name, var_value)
|
{
"content_hash": "a29c3d888d9947512fc8500a15af6750",
"timestamp": "",
"source": "github",
"line_count": 144,
"max_line_length": 129,
"avg_line_length": 33.458333333333336,
"alnum_prop": 0.5803237858032378,
"repo_name": "onshape-public/onshape-clients",
"id": "5bee1404de6eea35cae736a8a9b7f1aaadb03f2b",
"size": "4835",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/onshape_client/oas/models/bt_table_assembly_cross_highlight_data_item2659.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "4873"
},
{
"name": "Go",
"bytes": "59674"
},
{
"name": "HTML",
"bytes": "3851790"
},
{
"name": "JavaScript",
"bytes": "2217"
},
{
"name": "Makefile",
"bytes": "559"
},
{
"name": "Python",
"bytes": "7560009"
},
{
"name": "Shell",
"bytes": "3475"
},
{
"name": "TypeScript",
"bytes": "1412661"
}
],
"symlink_target": ""
}
|
import pickle
import types
import sys
import StringIO
import warnings
from copygenerators import GeneratorSnapshot, copy_generator, Generatorcopy
warnings.simplefilter("always", PendingDeprecationWarning)
__all__ = ["dump",
"pickle_generator",
"load",
"unpickle_generator",
"Pickler",
"GeneratorPickler",
"Unpickler",
"GeneratorUnpickler",
"dumps",
"loads"]
HIGHEST_PROTOCOL = pickle.HIGHEST_PROTOCOL
def dump(obj, file, protocol=0):
Pickler(file, protocol).dump(obj)
def dumps(obj, protocol=0):
filelike = StringIO.StringIO()
Pickler(filelike, protocol).dump(obj)
return filelike.getvalue()
def pickle_generator(f_gen, filelike):
lineno = sys._getframe(0).f_lineno +1
warnings.warn_explicit("pickle_generator not supported in generator_tools 0.5 - use dump instead!\n" , PendingDeprecationWarning, "picklegenerators.py", lineno)
dump(f_gen, filelike)
def load(file):
return Unpickler(file).load()
def loads(str):
filelike = StringIO.StringIO(str)
return Unpickler(filelike).load()
def unpickle_generator(filelike):
lineno = sys._getframe(0).f_lineno +1
warnings.warn_explicit("unpickle_generator not supported in generator_tools 0.5 - use load instead!\n" , PendingDeprecationWarning, "picklegenerators.py", lineno)
return load(filelike)
class SnapshotEnvelope(object):
'''
Wrapper class used to decept internal pickler memoization.
Motivation:
Suppose a Generator object G gets pickled. For each occurrence of G the save_generator
method of Pickler is called. We have to create a GeneratorSnapshot object for G.
If we cache a GeneratorSnapshot and reuse it for each occurrence of G it will be passed
only once to load_build on unpickling. In load_build we write a generator_copy in the
output stream but this has a local effect only. On all other occurrences the unpickled
GeneratorSnapshot is yielded not a generator_copy.
We try to move around this and memoize GeneratorSnapshots on ourselves while creating
distinct instances of SnapshotEnvelope for each occurrence of G. So they look distinct
and load_build is called for each. On each call we can write a generator_copy in the
output.
'''
def __init__(self, obj):
if isinstance(obj, SnapshotEnvelope):
self.obj = obj.obj
else:
obj._uses_envelope = True
self.obj = obj
class Pickler(pickle.Pickler, object):
def __init__(self, file, protocol=0):
if isinstance(file, (str, unicode)):
lineno = sys._getframe(0).f_lineno +1
warnings.warn_explicit("string argument not supported in generator_tools 0.5 - use filelike instead!\n" , PendingDeprecationWarning, "picklegenerators.py", lineno)
file = open(file, "wb")
super(Pickler, self).__init__(file, protocol)
self._pickled_generators_memo = {}
def save_generator(self, obj):
snapshot = self._pickled_generators_memo.get(obj)
if snapshot is None:
snapshot = GeneratorSnapshot(obj)
self._pickled_generators_memo[obj] = snapshot
super(Pickler, self).save(SnapshotEnvelope(snapshot))
pickle.Pickler.dispatch[types.GeneratorType] = save_generator
pickle.Pickler.dispatch[Generatorcopy] = save_generator
def pickle_generator(self, f_gen):
lineno = sys._getframe(0).f_lineno +1
warnings.warn_explicit("method pickle_generator deprecated in generator_tools 0.5 - use dump() instead!\n" , PendingDeprecationWarning, "picklegenerators.py", lineno)
self.dump(f_gen)
class GeneratorPickler(Pickler):
def __init__(self, filelike, protocol=None):
lineno = sys._getframe(0).f_lineno +1
warnings.warn_explicit("class GeneratorPickler deprecated in generator_tools 0.5 - use Pickler() instead!\n" , PendingDeprecationWarning, "picklegenerators.py", lineno)
super(GeneratorPickler, self).__init__(filelike, protocol)
class Unpickler(pickle.Unpickler, object):
def __init__(self, file):
if isinstance(file, (str, unicode)):
lineno = sys._getframe(0).f_lineno +1
warnings.warn_explicit("string argument not supported in generator_tools 0.5 - use filelike instead!\n" , PendingDeprecationWarning, "picklegenerators.py", lineno)
file = open(file, "rb")
super(Unpickler, self).__init__(file)
self._unpickled_generators_memo = {}
def load_build(self):
super(Unpickler, self).load_build()
if type(self.stack[-1]) == SnapshotEnvelope:
obj = self.stack[-1]
gencopy = self._unpickled_generators_memo.get(id(obj.obj))
if gencopy is None:
gencopy = copy_generator(obj.obj, copy_filter = lambda loc: True)
self._unpickled_generators_memo[id(obj.obj)] = gencopy
self.stack[-1] = gencopy
elif type(self.stack[-1]) == GeneratorSnapshot:
obj = self.stack[-1]
if not obj._uses_envelope:
self.stack[-1] = copy_generator(obj, copy_filter = lambda loc: True)
pickle.Unpickler.dispatch[pickle.BUILD] = load_build
def unpickle_generator(self):
lineno = sys._getframe(0).f_lineno +1
warnings.warn_explicit("method upickle_generator deprecated in generator_tools 0.5 - use load() instead!\n" , PendingDeprecationWarning, "picklegenerators.py", lineno)
return self.load()
class GeneratorUnpickler(Unpickler):
def __init__(self, filelike, protocol = None):
lineno = sys._getframe(0).f_lineno +1
warnings.warn_explicit("class GeneratorUnpickler deprecated in generator_tools 0.5 - use Unpickler() instead!\n" , PendingDeprecationWarning, "picklegenerators.py", lineno)
super(GeneratorUnpickler, self).__init__(filelike, protocol)
|
{
"content_hash": "6161ae2a55478d3d5397945ce1495545",
"timestamp": "",
"source": "github",
"line_count": 147,
"max_line_length": 180,
"avg_line_length": 40.95918367346939,
"alnum_prop": 0.6641753861484804,
"repo_name": "tonyroberts/generator_tools",
"id": "15019fd5f2c5275ae2700c089106ad8bb53e10ad",
"size": "6021",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "generator_tools/picklegenerators.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "87173"
},
{
"name": "Python",
"bytes": "79972"
}
],
"symlink_target": ""
}
|
from django import forms
from app.libs.utils import make_api_key
# api form base
class ApiForm(forms.Form):
client = forms.RegexField('^[A-Za-z0-9_]+$')
key = forms.CharField()
version = forms.RegexField('^1$')
def clean_key(self):
client = self.cleaned_data['client']
key = self.cleaned_data['key']
calced_key = make_api_key(client)
if key != calced_key:
raise forms.ValidationError(u"invalid key")
return key
class RecentAppForm(ApiForm):
offset = forms.IntegerField(
min_value = 0,
max_value = 1000,
initial = 0,
required = False,
)
count = forms.IntegerField(
min_value = 1,
max_value = 50,
initial = 10,
required = False,
)
platform = forms.IntegerField(required = True)
def clean_platform(self):
platform = self.cleaned_data['platform']
if platform == 0:
raise forms.ValidationError(u"invalid platform")
return platform
class DetailAppForm(ApiForm):
pass
# コールバック用バリデーションルール
class callbackform(forms.Form):
callback = forms.RegexField('^[a-zA-Z0-9_-]{1,20}$')
|
{
"content_hash": "21d0db79cfd7412c93aaf9631d401c3f",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 60,
"avg_line_length": 26.829787234042552,
"alnum_prop": 0.56304520222046,
"repo_name": "pistatium/houkago_app",
"id": "ca1383ad261ed499bd2a2ff0d145a9dc337aa78f",
"size": "1311",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "server_appengine/app/forms/apiform.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "13208"
},
{
"name": "HTML",
"bytes": "75736"
},
{
"name": "Java",
"bytes": "44743"
},
{
"name": "JavaScript",
"bytes": "14212"
},
{
"name": "Python",
"bytes": "84510"
},
{
"name": "Shell",
"bytes": "1081"
}
],
"symlink_target": ""
}
|
"""Stack implementation in Python."""
from src.linked_list import LinkedList
class Stack(object):
"""Implementation of Stack.
public methods:
push(value) - Adds a value to the stack.
The parameter is the value to be added to the stack.
pop() - Removes a value from the stack and returns that value.
If the stack is empty, attempts to call pop should raise an exception.
"""
def __init__(self, data=None):
"""Initialization."""
self._stack = LinkedList(data)
def push(self, val):
"""Add val to the stack."""
self._stack.push(val)
def pop(self):
"""Remove item off the stack."""
self._stack.pop()
|
{
"content_hash": "d2343eb20b9dd887a8937feffb21a470",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 74,
"avg_line_length": 24.714285714285715,
"alnum_prop": 0.6184971098265896,
"repo_name": "clair3st/Data-Structures",
"id": "38ee943b3be650c4cd2f320ff5fab768503c598e",
"size": "692",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/stack.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "59644"
}
],
"symlink_target": ""
}
|
"""The command to restore a backup of a Cloud SDK installation."""
from googlecloudsdk.calliope import base
class Restore(base.SilentCommand):
"""Restore the Cloud SDK installation to its previous state.
This is an undo operation, which restores the Cloud SDK installation on the
local workstation to the state it was in just before the most recent
`{parent_command} update` or `{parent_command} remove` command. Only the
state before the most recent such state is remembered, so it is impossible
to restore the state that existed before the two most recent `update`
commands, for example. A `restore` command does not undo a previous `restore`
command.
"""
@staticmethod
def Args(_):
pass
def Run(self, unused_args):
"""Runs the list command."""
self.group.update_manager.Restore()
|
{
"content_hash": "a266c571567b28e0d75c206ef52bae81",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 79,
"avg_line_length": 34.375,
"alnum_prop": 0.7369696969696969,
"repo_name": "flgiordano/netcash",
"id": "b7b47f436e3fcf1ae24a2e747bdb89306b795dc7",
"size": "1421",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "+/google-cloud-sdk/lib/surface/components/restore.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "622"
},
{
"name": "HTML",
"bytes": "33831"
},
{
"name": "JavaScript",
"bytes": "13859"
},
{
"name": "Shell",
"bytes": "2716"
}
],
"symlink_target": ""
}
|
"""Definition of targets run distribution package tests."""
import os.path
import sys
sys.path.insert(0, os.path.abspath('..'))
import python_utils.jobset as jobset
def create_docker_jobspec(name, dockerfile_dir, shell_command, environ={},
flake_retries=0, timeout_retries=0):
"""Creates jobspec for a task running under docker."""
environ = environ.copy()
environ['RUN_COMMAND'] = shell_command
environ['RELATIVE_COPY_PATH'] = 'test/distrib'
docker_args=[]
for k,v in environ.items():
docker_args += ['-e', '%s=%s' % (k, v)]
docker_env = {'DOCKERFILE_DIR': dockerfile_dir,
'DOCKER_RUN_SCRIPT': 'tools/run_tests/dockerize/docker_run.sh'}
jobspec = jobset.JobSpec(
cmdline=['tools/run_tests/dockerize/build_and_run_docker.sh'] + docker_args,
environ=docker_env,
shortname='distribtest.%s' % (name),
timeout_seconds=30*60,
flake_retries=flake_retries,
timeout_retries=timeout_retries)
return jobspec
def create_jobspec(name, cmdline, environ=None, shell=False,
flake_retries=0, timeout_retries=0):
"""Creates jobspec."""
jobspec = jobset.JobSpec(
cmdline=cmdline,
environ=environ,
shortname='distribtest.%s' % (name),
timeout_seconds=10*60,
flake_retries=flake_retries,
timeout_retries=timeout_retries,
shell=shell)
return jobspec
class CSharpDistribTest(object):
"""Tests C# NuGet package"""
def __init__(self, platform, arch, docker_suffix=None, use_dotnet_cli=False):
self.name = 'csharp_nuget_%s_%s' % (platform, arch)
self.platform = platform
self.arch = arch
self.docker_suffix = docker_suffix
self.labels = ['distribtest', 'csharp', platform, arch]
self.script_suffix = ''
if docker_suffix:
self.name += '_%s' % docker_suffix
self.labels.append(docker_suffix)
if use_dotnet_cli:
self.name += '_dotnetcli'
self.script_suffix = '_dotnetcli'
self.labels.append('dotnetcli')
else:
self.labels.append('olddotnet')
def pre_build_jobspecs(self):
return []
def build_jobspec(self):
if self.platform == 'linux':
return create_docker_jobspec(self.name,
'tools/dockerfile/distribtest/csharp_%s_%s' % (
self.docker_suffix,
self.arch),
'test/distrib/csharp/run_distrib_test%s.sh' % self.script_suffix)
elif self.platform == 'macos':
return create_jobspec(self.name,
['test/distrib/csharp/run_distrib_test%s.sh' % self.script_suffix],
environ={'EXTERNAL_GIT_ROOT': '../../..'})
elif self.platform == 'windows':
if self.arch == 'x64':
environ={'MSBUILD_EXTRA_ARGS': '/p:Platform=x64',
'DISTRIBTEST_OUTPATH': 'DistribTest\\bin\\x64\\Debug'}
else:
environ={'DISTRIBTEST_OUTPATH': 'DistribTest\\bin\\\Debug'}
return create_jobspec(self.name,
['test\\distrib\\csharp\\run_distrib_test%s.bat' % self.script_suffix],
environ=environ)
else:
raise Exception("Not supported yet.")
def __str__(self):
return self.name
class NodeDistribTest(object):
"""Tests Node package"""
def __init__(self, platform, arch, docker_suffix, node_version):
self.name = 'node_npm_%s_%s_%s' % (platform, arch, node_version)
self.platform = platform
self.arch = arch
self.node_version = node_version
self.labels = ['distribtest', 'node', platform, arch,
'node-%s' % node_version]
if docker_suffix is not None:
self.name += '_%s' % docker_suffix
self.docker_suffix = docker_suffix
self.labels.append(docker_suffix)
def pre_build_jobspecs(self):
return []
def build_jobspec(self):
if self.platform == 'linux':
linux32 = ''
if self.arch == 'x86':
linux32 = 'linux32'
return create_docker_jobspec(self.name,
'tools/dockerfile/distribtest/node_%s_%s' % (
self.docker_suffix,
self.arch),
'%s test/distrib/node/run_distrib_test.sh %s' % (
linux32,
self.node_version))
elif self.platform == 'macos':
return create_jobspec(self.name,
['test/distrib/node/run_distrib_test.sh',
str(self.node_version)],
environ={'EXTERNAL_GIT_ROOT': '../../..'})
else:
raise Exception("Not supported yet.")
def __str__(self):
return self.name
class PythonDistribTest(object):
"""Tests Python package"""
def __init__(self, platform, arch, docker_suffix):
self.name = 'python_%s_%s_%s' % (platform, arch, docker_suffix)
self.platform = platform
self.arch = arch
self.docker_suffix = docker_suffix
self.labels = ['distribtest', 'python', platform, arch, docker_suffix]
def pre_build_jobspecs(self):
return []
def build_jobspec(self):
if not self.platform == 'linux':
raise Exception("Not supported yet.")
return create_docker_jobspec(self.name,
'tools/dockerfile/distribtest/python_%s_%s' % (
self.docker_suffix,
self.arch),
'test/distrib/python/run_distrib_test.sh')
def __str__(self):
return self.name
class RubyDistribTest(object):
"""Tests Ruby package"""
def __init__(self, platform, arch, docker_suffix):
self.name = 'ruby_%s_%s_%s' % (platform, arch, docker_suffix)
self.platform = platform
self.arch = arch
self.docker_suffix = docker_suffix
self.labels = ['distribtest', 'ruby', platform, arch, docker_suffix]
def pre_build_jobspecs(self):
return []
def build_jobspec(self):
if not self.platform == 'linux':
raise Exception("Not supported yet.")
return create_docker_jobspec(self.name,
'tools/dockerfile/distribtest/ruby_%s_%s' % (
self.docker_suffix,
self.arch),
'test/distrib/ruby/run_distrib_test.sh')
def __str__(self):
return self.name
class PHPDistribTest(object):
"""Tests PHP package"""
def __init__(self, platform, arch, docker_suffix=None):
self.name = 'php_%s_%s_%s' % (platform, arch, docker_suffix)
self.platform = platform
self.arch = arch
self.docker_suffix = docker_suffix
self.labels = ['distribtest', 'php', platform, arch, docker_suffix]
def pre_build_jobspecs(self):
return []
def build_jobspec(self):
if self.platform == 'linux':
return create_docker_jobspec(self.name,
'tools/dockerfile/distribtest/php_%s_%s' % (
self.docker_suffix,
self.arch),
'test/distrib/php/run_distrib_test.sh')
elif self.platform == 'macos':
return create_jobspec(self.name,
['test/distrib/php/run_distrib_test.sh'],
environ={'EXTERNAL_GIT_ROOT': '../../..'})
else:
raise Exception("Not supported yet.")
def __str__(self):
return self.name
class CppDistribTest(object):
"""Tests Cpp make intall by building examples."""
def __init__(self, platform, arch, docker_suffix=None):
self.name = 'cpp_%s_%s_%s' % (platform, arch, docker_suffix)
self.platform = platform
self.arch = arch
self.docker_suffix = docker_suffix
self.labels = ['distribtest', 'cpp', platform, arch, docker_suffix]
def pre_build_jobspecs(self):
return []
def build_jobspec(self):
if self.platform == 'linux':
return create_docker_jobspec(self.name,
'tools/dockerfile/distribtest/cpp_%s_%s' % (
self.docker_suffix,
self.arch),
'test/distrib/cpp/run_distrib_test.sh')
else:
raise Exception("Not supported yet.")
def __str__(self):
return self.name
def targets():
"""Gets list of supported targets"""
return [CppDistribTest('linux', 'x64', 'jessie'),
CSharpDistribTest('linux', 'x64', 'wheezy'),
CSharpDistribTest('linux', 'x64', 'jessie'),
CSharpDistribTest('linux', 'x86', 'jessie'),
CSharpDistribTest('linux', 'x64', 'centos7'),
CSharpDistribTest('linux', 'x64', 'ubuntu1404'),
CSharpDistribTest('linux', 'x64', 'ubuntu1504'),
CSharpDistribTest('linux', 'x64', 'ubuntu1510'),
CSharpDistribTest('linux', 'x64', 'ubuntu1604'),
CSharpDistribTest('linux', 'x64', 'ubuntu1404', use_dotnet_cli=True),
CSharpDistribTest('macos', 'x86'),
CSharpDistribTest('windows', 'x86'),
CSharpDistribTest('windows', 'x64'),
PythonDistribTest('linux', 'x64', 'wheezy'),
PythonDistribTest('linux', 'x64', 'jessie'),
PythonDistribTest('linux', 'x86', 'jessie'),
PythonDistribTest('linux', 'x64', 'centos6'),
PythonDistribTest('linux', 'x64', 'centos7'),
PythonDistribTest('linux', 'x64', 'fedora20'),
PythonDistribTest('linux', 'x64', 'fedora21'),
PythonDistribTest('linux', 'x64', 'fedora22'),
PythonDistribTest('linux', 'x64', 'fedora23'),
PythonDistribTest('linux', 'x64', 'opensuse'),
PythonDistribTest('linux', 'x64', 'arch'),
PythonDistribTest('linux', 'x64', 'ubuntu1204'),
PythonDistribTest('linux', 'x64', 'ubuntu1404'),
PythonDistribTest('linux', 'x64', 'ubuntu1504'),
PythonDistribTest('linux', 'x64', 'ubuntu1510'),
PythonDistribTest('linux', 'x64', 'ubuntu1604'),
RubyDistribTest('linux', 'x64', 'wheezy'),
RubyDistribTest('linux', 'x64', 'jessie'),
RubyDistribTest('linux', 'x86', 'jessie'),
RubyDistribTest('linux', 'x64', 'centos6'),
RubyDistribTest('linux', 'x64', 'centos7'),
RubyDistribTest('linux', 'x64', 'fedora20'),
RubyDistribTest('linux', 'x64', 'fedora21'),
RubyDistribTest('linux', 'x64', 'fedora22'),
RubyDistribTest('linux', 'x64', 'fedora23'),
RubyDistribTest('linux', 'x64', 'opensuse'),
RubyDistribTest('linux', 'x64', 'ubuntu1204'),
RubyDistribTest('linux', 'x64', 'ubuntu1404'),
RubyDistribTest('linux', 'x64', 'ubuntu1504'),
RubyDistribTest('linux', 'x64', 'ubuntu1510'),
RubyDistribTest('linux', 'x64', 'ubuntu1604'),
NodeDistribTest('macos', 'x64', None, '4'),
NodeDistribTest('macos', 'x64', None, '5'),
NodeDistribTest('linux', 'x86', 'jessie', '4'),
PHPDistribTest('linux', 'x64', 'jessie'),
PHPDistribTest('macos', 'x64'),
] + [
NodeDistribTest('linux', 'x64', os, version)
for os in ('wheezy', 'jessie', 'ubuntu1204', 'ubuntu1404',
'ubuntu1504', 'ubuntu1510', 'ubuntu1604')
for version in ('0.12', '3', '4', '5')
]
|
{
"content_hash": "a409c29b7a0bd2534a4cf571fa6a12d8",
"timestamp": "",
"source": "github",
"line_count": 305,
"max_line_length": 86,
"avg_line_length": 36.73770491803279,
"alnum_prop": 0.5759036144578313,
"repo_name": "royalharsh/grpc",
"id": "90bbde83cf4f3ca80a39a56b86b3c35ef7e120f8",
"size": "12756",
"binary": false,
"copies": "8",
"ref": "refs/heads/master",
"path": "tools/run_tests/artifacts/distribtest_targets.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "23184"
},
{
"name": "C",
"bytes": "6754338"
},
{
"name": "C#",
"bytes": "1529121"
},
{
"name": "C++",
"bytes": "2034959"
},
{
"name": "CMake",
"bytes": "407656"
},
{
"name": "DTrace",
"bytes": "147"
},
{
"name": "JavaScript",
"bytes": "323192"
},
{
"name": "M4",
"bytes": "39634"
},
{
"name": "Makefile",
"bytes": "815100"
},
{
"name": "Objective-C",
"bytes": "333243"
},
{
"name": "PHP",
"bytes": "301697"
},
{
"name": "Protocol Buffer",
"bytes": "120452"
},
{
"name": "PureBasic",
"bytes": "147"
},
{
"name": "Python",
"bytes": "1431671"
},
{
"name": "Ruby",
"bytes": "677482"
},
{
"name": "Shell",
"bytes": "58479"
},
{
"name": "Swift",
"bytes": "5418"
}
],
"symlink_target": ""
}
|
"""Parse the input."""
import re
from tokenizer import Tokenizer
from matrix import Matrix
from utils import color_output
class Parser(object): # noqa
"""Input parser: parse token arguments."""
def __init__(self, vals):
"""Initialize Parser.
:tokens: lexed argument inputs.
"""
self.vals = vals
self.bounds = 'BOUNDS'
self.operator = 'OPERATOR'
self.variable = 'VARIABLE'
self.assignment = 'ASSIGNMENT'
self.ints = 'INTS'
def retrieve_args(self, tokens):
"""Format the tokens and save them to file."""
m = Matrix()
if len(tokens) == 1:
# variable call or display array / matrix
if tokens[0][0] == self.variable:
# variable call
value = m.find_variable(tokens[0][1])
if value:
value_mx = m.to_matrix(value)
if value_mx is not None:
color_output(value_mx)
else:
color_output('Input error.')
else:
color_output(
'Variable `{}` not defined.'.format(
str(tokens[0][1])))
elif tokens[0][0] == self.bounds:
# display array / list
mx = m.to_matrix(tokens[0][1])
if mx is not None:
color_output(mx)
else:
color_output('Input error')
elif tokens[0][0] == self.operator:
o = tokens[0][1]
if o.startswith('inv'):
r = re.compile('\(\w\)')
p = r.search(o).group()
v = re.compile('\w')
var = v.search(p).group()
val = m.find_variable(var)
if val is not None:
var_x = m.to_matrix(val)
if var_x is not None:
res = m.arith(('inverse', var_x, None))
if res is not None:
color_output(res)
else:
color_output('Wrong matrix format.')
else:
color_output('Input error.')
else:
color_output(
'Variable `{}` not defined.'.format(
str(tokens[0][1])))
else:
color_output('Wrong input...')
if len(tokens) == 2:
if tokens[0][0] == self.variable:
if tokens[1][0] != self.operator:
color_output('Wrong input...')
else:
if tokens[1][1] == '\'':
tr_value = m.find_variable(tokens[0][1])
if tr_value:
tr_value_mx = m.to_matrix(tr_value)
if tr_value_mx is not None:
tr = m.transpose(tr_value_mx)
color_output(str(tr))
else:
color_output('Input error.')
else:
color_output(
'Variable `{}` not defined.'.format(
str(tokens[0][1])))
else:
color_output('Wrong input...')
else:
color_output('Wrong input...')
if len(tokens) == 3:
if tokens[0][0] == self.variable:
if tokens[1][0] == self.assignment:
if tokens[2][0] == self.bounds:
input_var = tokens[0][1]
assign = tokens[2][1]
save_vars = (input_var, assign)
m.var_assigner(save_vars)
elif tokens[2][0] == self.variable:
assigner = tokens[0][1]
assignee = tokens[2][1]
assigner_val = m.find_variable(assigner)
if assigner_val:
m.var_assigner((assignee, assigner_val))
else:
color_output(
'Variable `{}` not defined.'.format(
str(assigner)))
elif tokens[1][0] == self.operator:
op = tokens[1][1]
op_var = m.find_variable(tokens[0][1])
if op_var is not None:
op_vars = m.to_matrix(op_var)
if tokens[2][0] == self.bounds:
op_bounds = m.to_matrix(tokens[2][1])
if op_bounds is not None:
op_res = m.arith((op_vars, op_bounds, op))
print op_res
if op_res is not None:
color_output(str(op_res))
else:
color_output(
'Operation not valid.')
else:
color_output('Input error.')
elif tokens[2][0] == self.variable:
val = m.find_variable(tokens[2][1])
if val is not None:
var_value = m.to_matrix(val)
op_results = m.arith(
(op_vars, var_value, op))
if op_results is not None:
color_output(str(op_results))
else:
color_output(
'Operation not valid.')
else:
color_output(
'Variable `{}` not defined.'.format(
tokens[2][1]))
else:
color_output(
'Variable `{}` not defined.'.format(
tokens[0][1]))
else:
color_output('Wrong input...')
elif tokens[0][0] == self.bounds:
if tokens[1][0] == self.operator:
if tokens[2][0] == self.self.bounds:
color_output('Perform operation on bounds.')
elif tokens[2][0] == self.variable:
color_output('Perform operation on variable.')
else:
color_output('Wrong input...')
if len(tokens) > 3:
color_output('Not implemented yet. WIP')
def save_retrieve_args(self):
"""Save and retrieve arguments."""
the_tokens = Tokenizer(self.vals)
tokens = the_tokens.lexer()
self.retrieve_args(tokens)
|
{
"content_hash": "e2317a03100a1d0168f7cd36223fbac1",
"timestamp": "",
"source": "github",
"line_count": 172,
"max_line_length": 74,
"avg_line_length": 41.93023255813954,
"alnum_prop": 0.3718801996672213,
"repo_name": "Habu-Kagumba/mini_matlab",
"id": "02002197ba314c99bb05f9e295dfe2adba4fc4be",
"size": "7234",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mini_matlab/parser.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "17262"
}
],
"symlink_target": ""
}
|
from __future__ import print_function
from mongoengine import StringField, Document
from cloudmesh_common.tables import array_dict_table_printer
import json
from cloudmesh.config.cm_config import get_mongo_db, get_mongo_dbname_from_collection, DBConnFactory
class ExperimentBase(Document):
cm_kind = StringField(default="experiment")
cm_label = StringField()
cm_userid = StringField()
dbname = get_mongo_dbname_from_collection("experiment")
if dbname:
meta = {'allow_inheritance': True, 'db_alias': dbname}
else:
meta = {'allow_inheritance': True}
class ExperimentVM(ExperimentBase):
cloud = StringField()
vmid = StringField()
class ExperimentGroup(object):
def __init__(self, userid, label):
self.userid = userid
self.label = label
get_mongo_db("experiment", DBConnFactory.TYPE_MONGOENGINE)
def add(self, vm):
vm.cm_label = self.label
vm.cm_userid = self.userid
vm.save()
def get(self, label=None):
if label is None:
label = self.label
# ide was, but does not work, so we use solution by hardcoding
# args = ExperimentVM._fields
# vms = ExperimentVM.objects(userid=self.userid,
# label=self.label).only(*args)
if label in ["all"]:
vms = ExperimentVM.objects(cm_userid=self.userid).only(
'cm_userid',
'cm_label',
'cloud',
'vmid')
else:
vms = ExperimentVM.objects(
cm_userid=self.userid,
cm_label=label).only('cm_userid',
'cm_label',
'cloud',
'vmid')
return json.loads(vms.to_json())
def delete(self, label):
vms = ExperimentVM.objects(cm_userid=self.userid,
cm_label=self.label)
for vm in vms:
vm.delete()
def to_table(self, label):
data = self.get(label)
if data == []:
return "No data found"
else:
return array_dict_table_printer(data)
def main():
username = "fuwang"
label = "exp-a"
experiment = ExperimentGroup(username, label)
experiment.delete(label)
for i in range(1, 10):
vm = ExperimentVM(
cm_label=label,
cm_userid=username,
cloud="india",
vmid="myid-{0}".format(i),
)
experiment.add(vm)
vms = ExperimentVM.objects()
for vm in vms:
print(vm.cm_label, vm.cm_userid, vm.vmid, vm.cloud)
print(experiment.to_table(label))
if __name__ == "__main__":
main()
|
{
"content_hash": "fbf7747fd03fa168a0a84e9ff0c9b066",
"timestamp": "",
"source": "github",
"line_count": 99,
"max_line_length": 100,
"avg_line_length": 27.92929292929293,
"alnum_prop": 0.5490054249547921,
"repo_name": "rajpushkar83/cloudmesh",
"id": "e23e007d55c16dbb8eff5c7e0ad292e30e299ea3",
"size": "2836",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cloudmesh/experiment/model_group.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ActionScript",
"bytes": "15982"
},
{
"name": "CSS",
"bytes": "390396"
},
{
"name": "HTML",
"bytes": "4158355"
},
{
"name": "Java",
"bytes": "369"
},
{
"name": "JavaScript",
"bytes": "2803977"
},
{
"name": "Makefile",
"bytes": "7572"
},
{
"name": "PHP",
"bytes": "183557"
},
{
"name": "Python",
"bytes": "1736957"
},
{
"name": "Ruby",
"bytes": "10670"
},
{
"name": "Shell",
"bytes": "32263"
}
],
"symlink_target": ""
}
|
"""Test the wallet accounts properly when there are cloned transactions with malleated scriptsigs."""
import io
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
)
from test_framework.messages import CTransaction, COIN
class TxnMallTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 4
self.supports_cli = False
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def add_options(self, parser):
parser.add_argument("--mineblock", dest="mine_block", default=False, action="store_true",
help="Test double-spend of 1-confirmed transaction")
parser.add_argument("--segwit", dest="segwit", default=False, action="store_true",
help="Test behaviour with SegWit txn (which should fail)")
def setup_network(self):
# Start with split network:
super().setup_network()
self.disconnect_nodes(1, 2)
def run_test(self):
if self.options.segwit:
output_type = "p2sh-segwit"
else:
output_type = "legacy"
# All nodes should start with 1,250 BTC:
starting_balance = 1250
for i in range(4):
assert_equal(self.nodes[i].getbalance(), starting_balance)
self.nodes[i].getnewaddress() # bug workaround, coins generated assigned to first getnewaddress!
self.nodes[0].settxfee(.001)
node0_address1 = self.nodes[0].getnewaddress(address_type=output_type)
node0_txid1 = self.nodes[0].sendtoaddress(node0_address1, 1219)
node0_tx1 = self.nodes[0].gettransaction(node0_txid1)
node0_address2 = self.nodes[0].getnewaddress(address_type=output_type)
node0_txid2 = self.nodes[0].sendtoaddress(node0_address2, 29)
node0_tx2 = self.nodes[0].gettransaction(node0_txid2)
assert_equal(self.nodes[0].getbalance(),
starting_balance + node0_tx1["fee"] + node0_tx2["fee"])
# Coins are sent to node1_address
node1_address = self.nodes[1].getnewaddress()
# Send tx1, and another transaction tx2 that won't be cloned
txid1 = self.nodes[0].sendtoaddress(node1_address, 40)
txid2 = self.nodes[0].sendtoaddress(node1_address, 20)
# Construct a clone of tx1, to be malleated
rawtx1 = self.nodes[0].getrawtransaction(txid1, 1)
clone_inputs = [{"txid": rawtx1["vin"][0]["txid"], "vout": rawtx1["vin"][0]["vout"], "sequence": rawtx1["vin"][0]["sequence"]}]
clone_outputs = {rawtx1["vout"][0]["scriptPubKey"]["addresses"][0]: rawtx1["vout"][0]["value"],
rawtx1["vout"][1]["scriptPubKey"]["addresses"][0]: rawtx1["vout"][1]["value"]}
clone_locktime = rawtx1["locktime"]
clone_raw = self.nodes[0].createrawtransaction(clone_inputs, clone_outputs, clone_locktime)
# createrawtransaction randomizes the order of its outputs, so swap them if necessary.
clone_tx = CTransaction()
clone_tx.deserialize(io.BytesIO(bytes.fromhex(clone_raw)))
if (rawtx1["vout"][0]["value"] == 40 and clone_tx.vout[0].nValue != 40*COIN or rawtx1["vout"][0]["value"] != 40 and clone_tx.vout[0].nValue == 40*COIN):
(clone_tx.vout[0], clone_tx.vout[1]) = (clone_tx.vout[1], clone_tx.vout[0])
# Use a different signature hash type to sign. This creates an equivalent but malleated clone.
# Don't send the clone anywhere yet
tx1_clone = self.nodes[0].signrawtransactionwithwallet(clone_tx.serialize().hex(), None, "ALL|ANYONECANPAY")
assert_equal(tx1_clone["complete"], True)
# Have node0 mine a block, if requested:
if (self.options.mine_block):
self.nodes[0].generate(1)
self.sync_blocks(self.nodes[0:2])
tx1 = self.nodes[0].gettransaction(txid1)
tx2 = self.nodes[0].gettransaction(txid2)
# Node0's balance should be starting balance, plus 50BTC for another
# matured block, minus tx1 and tx2 amounts, and minus transaction fees:
expected = starting_balance + node0_tx1["fee"] + node0_tx2["fee"]
if self.options.mine_block:
expected += 50
expected += tx1["amount"] + tx1["fee"]
expected += tx2["amount"] + tx2["fee"]
assert_equal(self.nodes[0].getbalance(), expected)
if self.options.mine_block:
assert_equal(tx1["confirmations"], 1)
assert_equal(tx2["confirmations"], 1)
else:
assert_equal(tx1["confirmations"], 0)
assert_equal(tx2["confirmations"], 0)
# Send clone and its parent to miner
self.nodes[2].sendrawtransaction(node0_tx1["hex"])
txid1_clone = self.nodes[2].sendrawtransaction(tx1_clone["hex"])
if self.options.segwit:
assert_equal(txid1, txid1_clone)
return
# ... mine a block...
self.nodes[2].generate(1)
# Reconnect the split network, and sync chain:
self.connect_nodes(1, 2)
self.nodes[2].sendrawtransaction(node0_tx2["hex"])
self.nodes[2].sendrawtransaction(tx2["hex"])
self.nodes[2].generate(1) # Mine another block to make sure we sync
self.sync_blocks()
# Re-fetch transaction info:
tx1 = self.nodes[0].gettransaction(txid1)
tx1_clone = self.nodes[0].gettransaction(txid1_clone)
tx2 = self.nodes[0].gettransaction(txid2)
# Verify expected confirmations
assert_equal(tx1["confirmations"], -2)
assert_equal(tx1_clone["confirmations"], 2)
assert_equal(tx2["confirmations"], 1)
# Check node0's total balance; should be same as before the clone, + 100 BTC for 2 matured,
# less possible orphaned matured subsidy
expected += 100
if (self.options.mine_block):
expected -= 50
assert_equal(self.nodes[0].getbalance(), expected)
if __name__ == '__main__':
TxnMallTest().main()
|
{
"content_hash": "5c0fb998f8bfa23490b48300035d42f4",
"timestamp": "",
"source": "github",
"line_count": 139,
"max_line_length": 160,
"avg_line_length": 43.58273381294964,
"alnum_prop": 0.6242984483327831,
"repo_name": "alecalve/bitcoin",
"id": "bdbbb3e530bb90f904dae358c872865bd7f5abf5",
"size": "6272",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "test/functional/wallet_txn_clone.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "28453"
},
{
"name": "C",
"bytes": "695632"
},
{
"name": "C++",
"bytes": "6008562"
},
{
"name": "HTML",
"bytes": "21860"
},
{
"name": "Java",
"bytes": "30290"
},
{
"name": "M4",
"bytes": "197255"
},
{
"name": "Makefile",
"bytes": "117105"
},
{
"name": "Objective-C",
"bytes": "123749"
},
{
"name": "Objective-C++",
"bytes": "6594"
},
{
"name": "Python",
"bytes": "1469100"
},
{
"name": "QMake",
"bytes": "756"
},
{
"name": "Shell",
"bytes": "88183"
}
],
"symlink_target": ""
}
|
from numba import jit
@jit
def pairwise_minmax(x, y):
"""
Simple implementation of the minmax distance metric
for two (non-sparse) vectors.
Parameters
----------
x : array [n_features]
y : array [n_features]
Input vectors between which minmax will between
calculate. Does not:
* support sparse input.
* check whether len(x) == len(y).
Returns
----------
The minmax distance between x and y.
"""
mins, maxs = 0.0, 0.0
for i in range(x.shape[0]):
a, b = x[i], y[i]
if a >= b:
maxs += a
mins += b
else:
maxs += b
mins += a
if maxs > 0.0:
return 1.0 - (mins / maxs)
return 0.0
|
{
"content_hash": "18b4bce1983adb550fc8b7998fa014ae",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 55,
"avg_line_length": 22.235294117647058,
"alnum_prop": 0.4947089947089947,
"repo_name": "mikekestemont/ruzicka",
"id": "1e33b27afcced3889663ca0ad2d06d1bd174ec3c",
"size": "803",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "code/ruzicka/distance_metrics.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "33992"
},
{
"name": "Python",
"bytes": "219727"
}
],
"symlink_target": ""
}
|
'''
Harvester for the DigitalCommons@University of Nebraska - Lincoln for the SHARE project
Example API call: http://digitalcommons.unl.edu/do/oai/?verb=ListRecords&metadataPrefix=oai_dc
'''
from __future__ import unicode_literals
from scrapi.base import OAIHarvester
class UnlHarvester(OAIHarvester):
short_name = 'unl_digitalcommons'
long_name = 'DigitalCommons@University of Nebraska - Lincoln'
url = 'http://digitalcommons.unl.edu'
base_url = 'http://digitalcommons.unl.edu/do/oai/'
property_list = ['type', 'identifier', 'format', 'date', 'source', 'setSpec']
timezone_granularity = True
|
{
"content_hash": "5b887d12c2739e036f624f23db52c750",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 94,
"avg_line_length": 34.55555555555556,
"alnum_prop": 0.7282958199356914,
"repo_name": "erinspace/scrapi",
"id": "b0e9993877b744826ae200939e0486f2e706912a",
"size": "622",
"binary": false,
"copies": "3",
"ref": "refs/heads/develop",
"path": "scrapi/harvesters/unl_digitalcommons.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "964"
},
{
"name": "HTML",
"bytes": "2300"
},
{
"name": "Python",
"bytes": "457330"
}
],
"symlink_target": ""
}
|
import logging
import os
import sys
def _SetupPaths():
sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir))
sys.path.append('/usr/local') # to import autotest libs
_SetupPaths()
from autotest.cros import constants
import pyauto
class ChromeosUtils(pyauto.PyUITest):
"""Utils for ChromeOS."""
def LoginToDefaultAccount(self):
"""Login to ChromeOS using default testing account.
Usage:
python chromeos_utils.py \
chromeos_utils.ChromeosUtils.LoginToDefaultAccount
"""
if self.GetLoginInfo()['is_logged_in']:
logging.info('Already logged in as %s.' % self.GetLoginInfo()['email'])
return
creds = constants.CREDENTIALS['$default']
username = creds[0]
passwd = creds[1]
self.Login(username, passwd)
assert self.GetLoginInfo()['is_logged_in']
logging.info('Logged in as %s.' % username)
if __name__ == '__main__':
pyauto.Main()
|
{
"content_hash": "80793240011b731b38cd949276503f49",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 77,
"avg_line_length": 24.31578947368421,
"alnum_prop": 0.6731601731601732,
"repo_name": "meego-tablet-ux/meego-app-browser",
"id": "c079c8cc318fe5757e25a7e84b9a37cfe666d0d0",
"size": "1109",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "chrome/test/pyautolib/chromeos/chromeos_utils.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ApacheConf",
"bytes": "5599"
},
{
"name": "AppleScript",
"bytes": "6772"
},
{
"name": "Assembly",
"bytes": "1871"
},
{
"name": "C",
"bytes": "1646303"
},
{
"name": "C++",
"bytes": "72324607"
},
{
"name": "CSS",
"bytes": "221604"
},
{
"name": "Diff",
"bytes": "11193"
},
{
"name": "Go",
"bytes": "3744"
},
{
"name": "HTML",
"bytes": "21930015"
},
{
"name": "Java",
"bytes": "11354"
},
{
"name": "JavaScript",
"bytes": "5339242"
},
{
"name": "Makefile",
"bytes": "2412"
},
{
"name": "Objective-C",
"bytes": "691329"
},
{
"name": "Objective-C++",
"bytes": "3786548"
},
{
"name": "PHP",
"bytes": "97796"
},
{
"name": "PLpgSQL",
"bytes": "70415"
},
{
"name": "Perl",
"bytes": "63704"
},
{
"name": "Protocol Buffer",
"bytes": "96399"
},
{
"name": "Python",
"bytes": "2296716"
},
{
"name": "QML",
"bytes": "452612"
},
{
"name": "QMake",
"bytes": "435"
},
{
"name": "Shell",
"bytes": "200146"
}
],
"symlink_target": ""
}
|
import argparse
import os
# [START healthcare_create_resource_attribute_definition]
def create_resource_attribute_definition(
project_id: str,
location: str,
dataset_id: str,
consent_store_id: str,
resource_attribute_definition_id: str,
):
"""Creates a RESOURCE attribute definition. A RESOURCE attribute is an attribute whose value is
determined by the properties of the data or action.
See https://github.com/GoogleCloudPlatform/python-docs-samples/tree/main/healthcare/api-client/v1/consent
before running the sample."""
# Imports the Google API Discovery Service.
from googleapiclient import discovery
api_version = "v1"
service_name = "healthcare"
# Returns an authorized API client by discovering the Healthcare API
# and using GOOGLE_APPLICATION_CREDENTIALS environment variable.
client = discovery.build(service_name, api_version)
# TODO(developer): Uncomment these lines and replace with your values.
# project_id = 'my-project' # replace with your GCP project ID
# location = 'us-central1' # replace with the parent dataset's location
# dataset_id = 'my-dataset' # replace with the FHIR store's parent dataset ID
# consent_store_id = 'my-consent-store' # replace with the consent store's ID
# resource_attribute_definition_id = 'requester_identity' # replace with the attribute definition ID
consent_store_parent = (
"projects/{}/locations/{}/datasets/{}/consentStores/{}".format(
project_id, location, dataset_id, consent_store_id
)
)
body = {
"description": "whether the data is identifiable",
"category": "RESOURCE",
"allowed_values": ["identifiable", "de-identified"],
}
request = (
client.projects()
.locations()
.datasets()
.consentStores()
.attributeDefinitions()
.create(
parent=consent_store_parent,
body=body,
attributeDefinitionId=resource_attribute_definition_id,
)
)
response = request.execute()
print("Created RESOURCE attribute definition: {}".format(response))
return response
# [END healthcare_create_resource_attribute_definition]
# [START healthcare_create_request_attribute_definition]
def create_request_attribute_definition(
project_id: str,
location: str,
dataset_id: str,
consent_store_id: str,
request_attribute_definition_id: str,
):
"""Creates a REQUEST attribute definition. A REQUEST attribute is an attribute whose value is determined
by the requester's identity or purpose.
See https://github.com/GoogleCloudPlatform/python-docs-samples/tree/main/healthcare/api-client/v1/consent
before running the sample."""
# Imports the Google API Discovery Service.
from googleapiclient import discovery
api_version = "v1"
service_name = "healthcare"
# Returns an authorized API client by discovering the Healthcare API
# and using GOOGLE_APPLICATION_CREDENTIALS environment variable.
client = discovery.build(service_name, api_version)
# TODO(developer): Uncomment these lines and replace with your values.
# project_id = 'my-project' # replace with your GCP project ID
# location = 'us-central1' # replace with the parent dataset's location
# dataset_id = 'my-dataset' # replace with the FHIR store's parent dataset ID
# consent_store_id = 'my-consent-store' # replace with the consent store's ID
# request_attribute_definition_id = 'requester_identity' # replace with the request attribute definition ID
consent_store_parent = (
"projects/{}/locations/{}/datasets/{}/consentStores/{}".format(
project_id, location, dataset_id, consent_store_id
)
)
body = {
"description": "what groups are consented for access",
"category": "REQUEST",
"allowed_values": [
"internal-researcher",
"external-researcher",
"clinical-admin",
],
}
request = (
client.projects()
.locations()
.datasets()
.consentStores()
.attributeDefinitions()
.create(
parent=consent_store_parent,
body=body,
attributeDefinitionId=request_attribute_definition_id,
)
)
response = request.execute()
print("Created REQUEST attribute definition: {}".format(response))
return response
# [END healthcare_create_request_attribute_definition]
# [START healthcare_get_attribute_definition]
def get_attribute_definition(
project_id: str,
location: str,
dataset_id: str,
consent_store_id: str,
attribute_definition_id: str,
):
"""Gets the specified attribute definition.
See https://github.com/GoogleCloudPlatform/python-docs-samples/tree/main/healthcare/api-client/v1/consent
before running the sample."""
# Imports the Google API Discovery Service.
from googleapiclient import discovery
api_version = "v1"
service_name = "healthcare"
# Returns an authorized API client by discovering the Healthcare API
# and using GOOGLE_APPLICATION_CREDENTIALS environment variable.
client = discovery.build(service_name, api_version)
# TODO(developer): Uncomment these lines and replace with your values.
# project_id = 'my-project' # replace with your GCP project ID
# location = 'us-central1' # replace with the parent dataset's location
# dataset_id = 'my-dataset' # replace with the consent store's parent dataset ID
# consent_store_id = 'my-consent-store' # replace with the consent store's ID
# attribute_definition_id = 'data_identifiable' # replace with the attribute definition ID
consent_store_parent = (
"projects/{}/locations/{}/datasets/{}/consentStores/{}".format(
project_id, location, dataset_id, consent_store_id
)
)
attribute_definition_name = "{}/attributeDefinitions/{}".format(
consent_store_parent, attribute_definition_id
)
request = (
client.projects()
.locations()
.datasets()
.consentStores()
.attributeDefinitions()
.get(name=attribute_definition_name)
)
response = request.execute()
print("Got attribute definition: {}".format(attribute_definition_id))
return response
# [END healthcare_get_attribute_definition]
# [START healthcare_list_attribute_definitions]
def list_attribute_definitions(project_id: str, location: str, dataset_id: str, consent_store_id: str):
"""Lists the attribute definitions in the given consent store.
See https://github.com/GoogleCloudPlatform/python-docs-samples/tree/main/healthcare/api-client/v1/consent
before running the sample."""
# Imports the Google API Discovery Service.
from googleapiclient import discovery
api_version = "v1"
service_name = "healthcare"
# Returns an authorized API client by discovering the Healthcare API
# and using GOOGLE_APPLICATION_CREDENTIALS environment variable.
client = discovery.build(service_name, api_version)
# TODO(developer): Uncomment these lines and replace with your values.
# project_id = 'my-project' # replace with your GCP project ID
# location = 'us-central1' # replace with the parent dataset's location
# dataset_id = 'my-dataset' # replace with the consent store's parent dataset ID
# consent_store_id = 'my-consent-store' # replace with the consent store ID
attribute_definition_parent = (
"projects/{}/locations/{}/datasets/{}/consentStores/{}".format(
project_id, location, dataset_id, consent_store_id
)
)
attribute_definitions = (
client.projects()
.locations()
.datasets()
.consentStores()
.attributeDefinitions()
.list(parent=attribute_definition_parent)
.execute()
.get("attributeDefinitions", [])
)
for attribute_definition in attribute_definitions:
print(attribute_definition)
return attribute_definitions
# [END healthcare_list_attribute_definitions]
# [START healthcare_patch_attribute_definition]
def patch_attribute_definition(
project_id: str,
location: str,
dataset_id: str,
consent_store_id: str,
attribute_definition_id: str,
description: str,
):
"""Updates the attribute definition.
See https://github.com/GoogleCloudPlatform/python-docs-samples/tree/main/healthcare/api-client/v1/consent
before running the sample."""
# Imports the Google API Discovery Service.
from googleapiclient import discovery
api_version = "v1"
service_name = "healthcare"
# Returns an authorized API client by discovering the Healthcare API
# and using GOOGLE_APPLICATION_CREDENTIALS environment variable.
client = discovery.build(service_name, api_version)
# TODO(developer): Uncomment these lines and replace with your values.
# project_id = 'my-project' # replace with your GCP project ID
# location = 'us-central1' # replace with the parent dataset's location
# dataset_id = 'my-dataset' # replace with the consent store's parent dataset ID
# consent_store_id = 'my-consent-store' # replace with the consent store's ID
# attribute_definition_id = 'requester_identity' # replace with the attribute definition ID
# description = 'whether the data is identifiable' # replace with a description of the attribute
attribute_definition_parent = (
"projects/{}/locations/{}/datasets/{}/consentStores/{}".format(
project_id, location, dataset_id, consent_store_id
)
)
attribute_definition_name = "{}/attributeDefinitions/{}".format(
attribute_definition_parent, attribute_definition_id
)
# Updates
patch = {"description": description}
request = (
client.projects()
.locations()
.datasets()
.consentStores()
.attributeDefinitions()
.patch(name=attribute_definition_name, updateMask="description", body=patch)
)
response = request.execute()
print(
"Patched attribute definition {} with new description: {}".format(
attribute_definition_id, description
)
)
return response
# [END healthcare_patch_attribute_definition]
# [START healthcare_delete_attribute_definition]
def delete_attribute_definition(
project_id: str,
location: str,
dataset_id: str,
consent_store_id: str,
attribute_definition_id: str,
):
"""Deletes the specified attribute definition.
See https://github.com/GoogleCloudPlatform/python-docs-samples/tree/main/healthcare/api-client/v1/consent
before running the sample."""
# Imports the Google API Discovery Service.
from googleapiclient import discovery
api_version = "v1"
service_name = "healthcare"
# Returns an authorized API client by discovering the Healthcare API
# and using GOOGLE_APPLICATION_CREDENTIALS environment variable.
client = discovery.build(service_name, api_version)
# TODO(developer): Uncomment these lines and replace with your values.
# project_id = 'my-project' # replace with your GCP project ID
# location = 'us-central1' # replace with the parent dataset's location
# dataset_id = 'my-dataset' # replace with the consent store's parent dataset ID
# consent_store_id = 'my-consent-store' # replace with the consent store's ID
# attribute_definition_id = 'data_identifiable' # replace with the attribute definition ID
consent_store_parent = (
"projects/{}/locations/{}/datasets/{}/consentStores/{}".format(
project_id, location, dataset_id, consent_store_id
)
)
attribute_definition_name = "{}/attributeDefinitions/{}".format(
consent_store_parent, attribute_definition_id
)
request = (
client.projects()
.locations()
.datasets()
.consentStores()
.attributeDefinitions()
.delete(name=attribute_definition_name)
)
response = request.execute()
print("Deleted attribute definition: {}".format(attribute_definition_id))
return response
# [END healthcare_delete_attribute_definition]
def parse_command_line_args():
"""Parses command line arguments."""
parser = argparse.ArgumentParser(
description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter
)
parser.add_argument(
"--project_id",
default=os.environ.get("GOOGLE_CLOUD_PROJECT"),
help="GCP cloud project name",
)
parser.add_argument("--location", default="us-central1", help="GCP location")
parser.add_argument("--dataset_id", default=None, help="ID of dataset")
parser.add_argument("--consent_store_id", default=None, help="ID of consent store")
parser.add_argument(
"--resource_attribute_definition_id",
default=None,
help="ID of a RESOURCE attribute definition",
)
parser.add_argument(
"--request_attribute_definition_id",
default=None,
help="ID of a REQUEST attribute definition",
)
parser.add_argument(
"--attribute_definition_id", default=None, help="ID of an attribute definition"
)
parser.add_argument(
"--description", default=None, help="A description of an attribute"
)
command = parser.add_subparsers(dest="command")
command.add_parser(
"create-resource-attribute-definition",
help=create_resource_attribute_definition.__doc__,
)
command.add_parser(
"create-request-attribute-definition",
help=create_request_attribute_definition.__doc__,
)
command.add_parser(
"get-attribute-definition", help=get_attribute_definition.__doc__
)
command.add_parser(
"list-attribute-definitions", help=list_attribute_definitions.__doc__
)
command.add_parser(
"patch-attribute-definition", help=patch_attribute_definition.__doc__
)
command.add_parser(
"delete-attribute-definition", help=delete_attribute_definition.__doc__
)
return parser.parse_args()
def run_command(args):
"""Calls the program using the specified command."""
if args.project_id is None:
print(
"You must specify a project ID or set the "
'"GOOGLE_CLOUD_PROJECT" environment variable.'
)
return
elif args.command == "create-resource-attribute-definition":
create_resource_attribute_definition(
args.project_id,
args.location,
args.dataset_id,
args.consent_store_id,
args.resource_attribute_definition_id,
)
elif args.command == "create-request-attribute-definition":
create_request_attribute_definition(
args.project_id,
args.location,
args.dataset_id,
args.consent_store_id,
args.request_attribute_definition_id,
)
elif args.command == "get-attribute-definition":
get_attribute_definition(
args.project_id,
args.location,
args.dataset_id,
args.consent_store_id,
args.attribute_definition_id,
)
elif args.command == "list-attribute-definitions":
list_attribute_definitions(
args.project_id, args.location, args.dataset_id, args.consent_store_id
)
elif args.command == "patch-attribute-definition":
patch_attribute_definition(
args.project_id,
args.location,
args.dataset_id,
args.consent_store_id,
args.attribute_definition_id,
args.description,
)
elif args.command == "delete-attribute-definition":
delete_attribute_definition(
args.project_id,
args.location,
args.dataset_id,
args.consent_store_id,
args.attribute_definition_id,
)
def main():
args = parse_command_line_args()
run_command(args)
if __name__ == "__main__":
main()
|
{
"content_hash": "4dc180306a71d93aa0b79e9ba5d69ce0",
"timestamp": "",
"source": "github",
"line_count": 481,
"max_line_length": 112,
"avg_line_length": 33.67359667359668,
"alnum_prop": 0.6614187812557881,
"repo_name": "GoogleCloudPlatform/python-docs-samples",
"id": "cb96acbc2dee28e7cbd10f29ac7a1ffa0f24150f",
"size": "16772",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "healthcare/api-client/v1/consent/attribute_definitions.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "8008"
},
{
"name": "Dockerfile",
"bytes": "62031"
},
{
"name": "HTML",
"bytes": "69878"
},
{
"name": "JavaScript",
"bytes": "26494"
},
{
"name": "Jinja",
"bytes": "1892"
},
{
"name": "Jupyter Notebook",
"bytes": "47951698"
},
{
"name": "Makefile",
"bytes": "932"
},
{
"name": "Procfile",
"bytes": "138"
},
{
"name": "PureBasic",
"bytes": "11115"
},
{
"name": "Python",
"bytes": "5323502"
},
{
"name": "Shell",
"bytes": "78261"
}
],
"symlink_target": ""
}
|
class CallbackModule(object):
"""
this is an example ansible callback file that does nothing. You can drop
other classes in the same directory to define your own handlers. Methods
you do not use can be omitted.
example uses include: logging, emailing, storing info, etc
"""
def on_any(self, *args, **kwargs):
pass
def runner_on_failed(self, host, res, ignore_errors=False):
pass
def runner_on_ok(self, host, res):
pass
def runner_on_error(self, host, msg):
pass
def runner_on_skipped(self, host, item=None):
pass
def runner_on_unreachable(self, host, res):
pass
def runner_on_no_hosts(self):
pass
def runner_on_async_poll(self, host, res, jid, clock):
pass
def runner_on_async_ok(self, host, res, jid):
pass
def runner_on_async_failed(self, host, res, jid):
pass
def playbook_on_start(self):
pass
def playbook_on_notify(self, host, handler):
pass
def playbook_on_no_hosts_matched(self):
pass
def playbook_on_no_hosts_remaining(self):
pass
def playbook_on_task_start(self, name, is_conditional):
pass
def playbook_on_vars_prompt(self, varname, private=True, prompt=None, encrypt=None, confirm=False, salt_size=None, salt=None, default=None):
pass
def playbook_on_setup(self):
pass
def playbook_on_import_for_host(self, host, imported_file):
pass
def playbook_on_not_import_for_host(self, host, missing_file):
pass
def playbook_on_play_start(self, pattern):
pass
def playbook_on_stats(self, stats):
pass
|
{
"content_hash": "a61a196f779581b9ef201c8b50451f04",
"timestamp": "",
"source": "github",
"line_count": 74,
"max_line_length": 144,
"avg_line_length": 23.04054054054054,
"alnum_prop": 0.6217008797653959,
"repo_name": "dlab-berkeley/collaboratool-archive",
"id": "54a2b254fc7050043a19ae4f059595f250e5f2a3",
"size": "2420",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "bsd2/vagrant-ansible/ansible/lib/ansible/callback_plugins/noop.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "18154"
},
{
"name": "JavaScript",
"bytes": "30509"
},
{
"name": "Perl",
"bytes": "23315"
},
{
"name": "Puppet",
"bytes": "2252"
},
{
"name": "Python",
"bytes": "684123"
},
{
"name": "Ruby",
"bytes": "11103"
},
{
"name": "Shell",
"bytes": "6980"
}
],
"symlink_target": ""
}
|
import argparse
import datetime
import json
import logging
from socket import gaierror
from socket import getfqdn
from socket import gethostbyname
from typing import List
from typing import NamedTuple
from typing import Optional
import a_sync
from dateutil import parser
from pytimeparse import timeparse
from requests import Request
from requests import Session
from requests.exceptions import HTTPError
from paasta_tools.mesos_tools import get_count_running_tasks_on_slave
from paasta_tools.mesos_tools import get_mesos_config_path
from paasta_tools.mesos_tools import get_mesos_leader
from paasta_tools.mesos_tools import get_mesos_master
from paasta_tools.mesos_tools import MESOS_MASTER_PORT
from paasta_tools.utils import SystemPaastaConfig
from paasta_tools.utils import time_cache
from paasta_tools.utils import to_bytes
log = logging.getLogger(__name__)
class Hostname(NamedTuple):
host: str
ip: str
class Credentials(NamedTuple):
file: str
principal: str
secret: str
class Resource(NamedTuple):
name: str
amount: int
MAINTENANCE_ROLE = "maintenance"
def base_api(mesos_config_path: Optional[str] = None):
"""Helper function for making all API requests
:returns: a function that can be called to make a request
"""
leader = get_mesos_leader(mesos_config_path)
def execute_request(method, endpoint, timeout=(3, 2), **kwargs):
url = "http://%s:%d%s" % (leader, MESOS_MASTER_PORT, endpoint)
s = Session()
s.auth = (get_principal(), get_secret())
req = Request(method, url, **kwargs)
prepared = s.prepare_request(req)
try:
resp = s.send(prepared, timeout=timeout)
resp.raise_for_status()
return resp
except HTTPError:
raise HTTPError("Error executing API request calling %s." % url)
return execute_request
def master_api(mesos_config_path: Optional[str] = None):
"""Helper function for making API requests to the /master API endpoints
:returns: a function that can be called to make a request to /master
"""
def execute_master_api_request(method, endpoint, **kwargs):
base_api_client = base_api(mesos_config_path=mesos_config_path)
return base_api_client(method, "/master%s" % endpoint, **kwargs)
return execute_master_api_request
def operator_api(mesos_config_path: Optional[str] = None):
def execute_operator_api_request(**kwargs):
base_api_client = base_api(mesos_config_path=mesos_config_path)
if "headers" in kwargs:
kwargs["headers"]["Content-Type"] = "application/json"
else:
kwargs["headers"] = {"Content-Type": "application/json"}
data = kwargs.pop("data")
return base_api_client("POST", "/api/v1", data=json.dumps(data), **kwargs)
return execute_operator_api_request
def reserve_api():
"""Helper function for making API requests to the /reserve API endpoints
:returns: a function that can be called to make a request to /reserve
"""
def execute_reserve_api_request(method, endpoint, **kwargs):
master_api_client = master_api()
return master_api_client(method, "/reserve%s" % endpoint, **kwargs)
return execute_reserve_api_request
def unreserve_api():
"""Helper function for making API requests to the /unreserve API endpoints
:returns: a function that can be called to make a request to /unreserve
"""
def execute_unreserve_api_request(method, endpoint, **kwargs):
master_api_client = master_api()
return master_api_client(method, "/unreserve%s" % endpoint, **kwargs)
return execute_unreserve_api_request
def maintenance_api():
"""Helper function for making API requests to the /master/maintenance API endpoints
:returns: a function that can be called to make a request to /master/maintenance
"""
def execute_schedule_api_request(method, endpoint, **kwargs):
master_api_client = master_api()
return master_api_client(
method, "/maintenance%s" % endpoint, timeout=(3, 10), **kwargs
)
return execute_schedule_api_request
def get_schedule_client():
"""Helper function for making API requests to the /master/maintenance/schedule API endpoints
:returns: a function that can be called to make a request to /master/maintenance/schedule
"""
def execute_schedule_api_request(method, endpoint, **kwargs):
maintenance_api_client = maintenance_api()
return maintenance_api_client(method, "/schedule%s" % endpoint, **kwargs)
return execute_schedule_api_request
def get_maintenance_schedule():
"""Makes a GET_MAINTENANCE_SCHEDULE request to the operator api
:returns: a GET_MAINTENANCE_SCHEDULE response
"""
client_fn = operator_api()
return client_fn(data={"type": "GET_MAINTENANCE_SCHEDULE"})
@time_cache(ttl=10)
def get_maintenance_status(mesos_config_path: Optional[str] = None):
"""Makes a GET_MAINTENANCE_STATUS request to the operator api
:returns: a GET_MAINTENANCE_STATUS response
"""
client_fn = operator_api(mesos_config_path=mesos_config_path)
return client_fn(data={"type": "GET_MAINTENANCE_STATUS"})
def schedule():
"""Get the Mesos maintenance schedule. This contains hostname/ip mappings and their maintenance window.
:returns: GET_MAINTENANCE_SCHEDULE response text
"""
try:
schedule = get_maintenance_schedule()
except HTTPError:
raise HTTPError("Error getting maintenance schedule.")
return schedule.text
def get_hosts_with_state(
state, system_paasta_config: Optional[SystemPaastaConfig] = None
) -> List[str]:
"""Helper function to check the maintenance status and return all hosts
listed as being in a current state
:param state: State we are interested in ('down_machines' or 'draining_machines')
:returns: A list of hostnames in the specified state or an empty list if no machines
"""
mesos_config_path = get_mesos_config_path(system_paasta_config)
try:
status = get_maintenance_status(mesos_config_path).json()
status = status["get_maintenance_status"]["status"]
except HTTPError:
raise HTTPError("Error getting maintenance status.")
if not status or state not in status:
return []
if "id" in status[state][0]:
return [machine["id"]["hostname"] for machine in status[state]]
else:
return [machine["hostname"] for machine in status[state]]
def get_draining_hosts(system_paasta_config: Optional[SystemPaastaConfig] = None):
"""Returns a list of hostnames that are marked as draining
:returns: a list of strings representing hostnames
"""
return get_hosts_with_state(
state="draining_machines", system_paasta_config=system_paasta_config
)
def get_down_hosts():
"""Returns a list of hostnames that are marked as down
:returns: a list of strings representing hostnames
"""
return get_hosts_with_state(state="down_machines")
def is_host_draining(hostname=getfqdn()):
"""Checks if the specified hostname is marked as draining
:param hostname: Hostname we want to check if draining (defaults to current host)
:returns: a boolean representing whether or not the specified hostname is draining
"""
return hostname in get_draining_hosts()
def is_host_down(hostname=getfqdn()):
"""Checks if the specified hostname is marked as down
:param hostname: Hostname we want to check if down (defaults to current host)
:returns: a boolean representing whether or not the specified hostname is down
"""
return hostname in get_down_hosts()
def get_hosts_forgotten_draining(grace=0):
"""Find hosts that are still marked as draining (rather than down) after the start
of their maintenance window.
:param grace: integer number of nanoseconds to allow a host to be left in the draining
state after the start of its maintenance window before we consider it forgotten.
:returns: a list of hostnames of hosts forgotten draining
"""
draining_hosts = get_draining_hosts()
log.debug("draining_hosts: %s" % draining_hosts)
hosts_past_maintenance_start = get_hosts_past_maintenance_start(grace=grace)
log.debug("hosts_past_maintenance_start: %s" % hosts_past_maintenance_start)
forgotten_draining = list(
set(draining_hosts).intersection(hosts_past_maintenance_start)
)
log.debug("forgotten_draining: %s" % forgotten_draining)
return forgotten_draining
def are_hosts_forgotten_draining():
"""Quick way to test if there are any forgotten draining hosts.
:returns: a boolean that is True if there are any forgotten draining
hosts and False otherwise
"""
return bool(get_hosts_forgotten_draining())
def get_hosts_forgotten_down(grace=0):
"""Find hosts that are still marked as down (rather than up) after the end
of their maintenance window.
:param grace: integer number of nanoseconds to allow a host to be left in the down
state after the end of its maintenance window before we consider it forgotten.
:returns: a list of hostnames of hosts forgotten down
"""
down_hosts = get_down_hosts()
log.debug("down_hosts: %s" % down_hosts)
hosts_past_maintenance_end = get_hosts_past_maintenance_end(grace=grace)
log.debug("hosts_past_maintenance_end: %s" % hosts_past_maintenance_end)
forgotten_down = list(set(down_hosts).intersection(hosts_past_maintenance_end))
log.debug("forgotten_down: %s" % forgotten_down)
return forgotten_down
def are_hosts_forgotten_down():
"""Quick way to test if there are any forgotten down hosts.
:returns: a boolean that is True if there are any forgotten down
hosts and False otherwise
"""
return bool(get_hosts_forgotten_down())
def parse_timedelta(value):
"""Return the delta in nanoseconds.
:param value: a string containing a time format supported by :mod:`pytimeparse`
:returns: an integer (or float) representing the specified delta in nanoseconds
"""
error_msg = "'%s' is not a valid time expression" % value
try:
seconds = timeparse.timeparse(value)
except TypeError:
raise argparse.ArgumentTypeError(error_msg)
if not seconds:
raise argparse.ArgumentTypeError(error_msg)
return seconds_to_nanoseconds(seconds)
def parse_datetime(value):
"""Return the datetime in nanoseconds.
:param value: a string containing a datetime supported by :mod:`dateutil.parser`
:returns: an integer (or float) representing the specified datetime in nanoseconds
"""
error_msg = "'%s' is not a valid datetime expression" % value
try:
dt = parser.parse(value)
except Exception:
raise argparse.ArgumentTypeError(error_msg)
if not dt:
raise argparse.ArgumentTypeError(error_msg)
return datetime_to_nanoseconds(dt)
def datetime_seconds_from_now(seconds):
"""Given a number of seconds, returns a datetime object representing that number of seconds in the future from the
current time.
:param seconds: an integer representing a certain number of seconds
:returns: a datetime.timedelta representing now + the specified number of seconds
"""
return now() + datetime.timedelta(seconds=seconds)
def now():
"""Returns a datetime object representing the current time
:returns: a datetime.datetime object representing the current time
"""
return datetime.datetime.now()
def seconds_to_nanoseconds(seconds):
"""Convert the specified number of seconds to nanoseconds
:param seconds: an integer representing a certain number of seconds
:returns: an integer (or float) representation of the specified number of seconds as nanoseconds
"""
return seconds * 1000000000
def datetime_to_nanoseconds(dt):
"""Convert the provided datetime object into nanoseconds
:returns: an integer (or float) representation of the specified datetime as nanoseconds
"""
return seconds_to_nanoseconds(int(dt.strftime("%s")))
def build_maintenance_payload(hostnames, maint_type):
"""Creates the JSON payload necessary to bring the specified hostnames up/down for maintenance.
:param hostnames: a list of hostnames
:returns: a dictionary representing the list of machines to bring up/down for maintenance
"""
return {
"type": maint_type.upper(),
maint_type.lower(): {"machines": get_machine_ids(hostnames)},
}
def hostnames_to_components(hostnames, resolve=False):
"""Converts a list of 'host[|ip]' entries into namedtuples containing 'host' and 'ip' attributes,
optionally performing a DNS lookup to resolve the hostname into an IP address
:param hostnames: a list of hostnames where each hostname can be of the form 'host[|ip]'
:param resolve: boolean representing whether to lookup the IP address corresponding to the hostname via DNS
:returns: a namedtuple containing the hostname and IP components
"""
components = []
for hostname in hostnames:
# This is to allow specifying a hostname as "hostname|ipaddress"
# to avoid querying DNS for the IP.
if "|" in hostname:
(host, ip) = hostname.split("|")
components.append(Hostname(host=host, ip=ip))
else:
try:
ip = gethostbyname(hostname) if resolve else None
except gaierror:
log.error(f"Failed to resolve IP for {hostname}, continuing regardless")
continue
components.append(Hostname(host=hostname, ip=ip))
return components
def get_machine_ids(hostnames):
"""Helper function to convert a list of hostnames into a JSON list of hostname/ip pairs.
:param hostnames: a list of hostnames
:returns: a dictionary representing the list of machines to bring up/down for maintenance
"""
machine_ids = []
components = hostnames_to_components(hostnames, resolve=True)
for component in components:
machine_id = {"hostname": component.host, "ip": component.ip}
machine_ids.append(machine_id)
return machine_ids
def build_reservation_payload(resources):
"""Creates the JSON payload needed to dynamically (un)reserve resources in mesos.
:param resources: list of Resource named tuples specifying the name and amount of the resource to (un)reserve
:returns: a dictionary that can be sent to Mesos to (un)reserve resources
"""
payload = []
for resource in resources:
payload.append(
{
"name": resource.name,
"type": "SCALAR",
"scalar": {"value": resource.amount},
"role": MAINTENANCE_ROLE,
"reservation": {"principal": get_principal()},
}
)
return payload
def build_maintenance_schedule_payload(
hostnames, start=None, duration=None, drain=True
):
"""Creates the JSON payload needed to (un)schedule maintenance on the specified hostnames.
:param hostnames: a list of hostnames
:param start: the time to start the maintenance, represented as number of nanoseconds since the epoch
:param duration: length of the maintenance window, represented as number of nanoseconds since the epoch
:param drain: boolean to note whether we are draining (True) the specified hosts or undraining (False) them
:returns: a dictionary that can be sent to Mesos to (un)schedule maintenance
"""
schedule = get_maintenance_schedule().json()["get_maintenance_schedule"]["schedule"]
machine_ids = get_machine_ids(hostnames)
if drain:
unavailability = dict()
unavailability["start"] = dict()
unavailability["start"]["nanoseconds"] = int(start)
unavailability["duration"] = dict()
unavailability["duration"]["nanoseconds"] = int(duration)
window = dict()
window["machine_ids"] = machine_ids
window["unavailability"] = unavailability
if schedule:
for existing_window in schedule["windows"]:
for existing_machine_id in existing_window["machine_ids"]:
# If we already have a maintenance window scheduled for one of the hosts,
# replace it with the new window.
if existing_machine_id in machine_ids:
existing_window["machine_ids"].remove(existing_machine_id)
if not existing_window["machine_ids"]:
schedule["windows"].remove(existing_window)
if drain:
windows = schedule["windows"] + [window]
else:
windows = schedule["windows"]
elif drain:
windows = [window]
else:
windows = []
payload = dict()
payload["windows"] = windows
return {
"type": "UPDATE_MAINTENANCE_SCHEDULE",
"update_maintenance_schedule": {"schedule": payload},
}
def load_credentials(mesos_secrets="/nail/etc/mesos-slave-secret"):
"""Loads the mesos-slave credentials from the specified file. These credentials will be used for all
maintenance API requests.
:param mesos_secrets: optional argument specifying the path to the file containing the mesos-slave credentials
:returns: a tuple of the form (username, password)
"""
try:
with open(mesos_secrets) as data_file:
data = json.load(data_file)
except EnvironmentError:
log.error(
"maintenance calls must be run on a Mesos slave containing valid credentials (%s)"
% mesos_secrets
)
raise
try:
username = data["principal"]
password = data["secret"]
except KeyError:
log.error(
"%s does not contain Mesos slave credentials in the expected format. "
"See http://mesos.apache.org/documentation/latest/authentication/ for details"
% mesos_secrets
)
raise
return Credentials(file=mesos_secrets, principal=username, secret=password)
def get_principal(mesos_secrets="/nail/etc/mesos-slave-secret"):
"""Helper function to get the principal from the mesos-slave credentials
:param mesos_secrets: optional argument specifying the path to the file containing the mesos-slave credentials
:returns: a string containing the principal/username
"""
return load_credentials(mesos_secrets).principal
def get_secret(mesos_secrets="/nail/etc/mesos-slave-secret"):
"""Helper function to get the secret from the mesos-slave credentials
:param mesos_secrets: optional argument specifying the path to the file containing the mesos-slave credentials
:returns: a string containing the secret/password
"""
return load_credentials(mesos_secrets).secret
def _make_request_payload(slave_id, reservation_payload):
return {
"slaveId": slave_id.encode("UTF-8"),
# We used to_bytes here since py2 json doesn't have a well defined
# return type. When moving to python 3, replace with .encode()
"resources": to_bytes(json.dumps(reservation_payload)).replace(b"+", b"%20"),
}
def _make_operator_reservation_request_payload(slave_id, payload, request_type):
return {
"type": request_type.upper(),
request_type.lower(): {"agent_id": {"value": slave_id}},
"resources": payload,
}
def reserve(slave_id, resources):
"""Dynamically reserve resources in marathon to prevent tasks from using them.
:param slave_id: the id of the mesos slave
:param resources: list of Resource named tuples specifying the name and amount of the resource to (un)reserve
:returns: boolean where 0 represents success and 1 is a failure
"""
log.info(f"Dynamically reserving resources on {slave_id}: {resources}")
payload = _make_operator_reservation_request_payload(
slave_id=slave_id,
payload=build_reservation_payload(resources),
request_type="reserve_resources",
)
client_fn = operator_api()
try:
print(payload)
reserve_output = client_fn(data=payload).text
except HTTPError:
raise HTTPError("Error adding dynamic reservation.")
return reserve_output
def unreserve(slave_id, resources):
"""Dynamically unreserve resources in marathon to allow tasks to using them.
:param slave_id: the id of the mesos slave
:param resources: list of Resource named tuples specifying the name and amount of the resource to (un)reserve
:returns: boolean where 0 represents success and 1 is a failure
"""
log.info(f"Dynamically unreserving resources on {slave_id}: {resources}")
payload = _make_operator_reservation_request_payload(
slave_id=slave_id,
payload=build_reservation_payload(resources),
request_type="unreserve_resources",
)
client_fn = operator_api()
try:
unreserve_output = client_fn(data=payload).text
except HTTPError:
raise HTTPError("Error adding dynamic unreservation.")
return unreserve_output
def components_to_hosts(components):
"""Convert a list of Component namedtuples to a list of their hosts
:param components: a list of Component namedtuples
:returns: list of the hosts associated with each Component
"""
hosts = []
for component in components:
hosts.append(component.host)
return hosts
def reserve_all_resources(hostnames):
"""Dynamically reserve all available resources on the specified hosts
:param hostnames: list of hostnames to reserve resources on
"""
mesos_state = a_sync.block(get_mesos_master().state_summary)
components = hostnames_to_components(hostnames)
hosts = components_to_hosts(components)
known_slaves = [
slave for slave in mesos_state["slaves"] if slave["hostname"] in hosts
]
for slave in known_slaves:
hostname = slave["hostname"]
log.info("Reserving all resources on %s" % hostname)
slave_id = slave["id"]
resources = []
for resource in ["disk", "mem", "cpus", "gpus"]:
free_resource = (
slave["resources"][resource] - slave["used_resources"][resource]
)
for role in slave["reserved_resources"]:
free_resource -= slave["reserved_resources"][role][resource]
resources.append(Resource(name=resource, amount=free_resource))
try:
reserve(slave_id=slave_id, resources=resources)
except HTTPError:
raise HTTPError(
f"Failed reserving all of the resources on {hostname} ({slave_id}). Aborting."
)
def unreserve_all_resources(hostnames):
"""Dynamically unreserve all available resources on the specified hosts
:param hostnames: list of hostnames to unreserve resources on
"""
mesos_state = a_sync.block(get_mesos_master().state_summary)
components = hostnames_to_components(hostnames)
hosts = components_to_hosts(components)
known_slaves = [
slave for slave in mesos_state["slaves"] if slave["hostname"] in hosts
]
for slave in known_slaves:
hostname = slave["hostname"]
log.info("Unreserving all resources on %s" % hostname)
slave_id = slave["id"]
resources = []
if MAINTENANCE_ROLE in slave["reserved_resources"]:
for resource in ["disk", "mem", "cpus", "gpus"]:
reserved_resource = slave["reserved_resources"][MAINTENANCE_ROLE][
resource
]
resources.append(Resource(name=resource, amount=reserved_resource))
try:
unreserve(slave_id=slave_id, resources=resources)
except HTTPError:
raise HTTPError(
f"Failed unreserving all of the resources on {hostname} ({slave_id}). Aborting."
)
def drain(hostnames, start, duration, reserve_resources=True):
"""Schedules a maintenance window for the specified hosts and marks them as draining.
:param hostnames: a list of hostnames
:param start: the time to start the maintenance, represented as number of nanoseconds since the epoch
:param duration: length of the maintenance window, represented as number of nanoseconds since the epoch
:param reserve_resources: bool setting to also reserve the free resources on the agent before the drain call
:returns: None
"""
log.info("Draining: %s" % hostnames)
if reserve_resources:
try:
reserve_all_resources(hostnames)
except HTTPError as e:
log.warning("Failed to reserve resources, will continue to drain: %s" % e)
payload = build_maintenance_schedule_payload(hostnames, start, duration, drain=True)
client_fn = operator_api()
try:
drain_output = client_fn(data=payload).text
except HTTPError:
raise HTTPError("Error performing maintenance drain.")
return drain_output
def undrain(hostnames, unreserve_resources=True):
"""Unschedules the maintenance window for the specified hosts and unmarks them as draining. They are ready for
regular use.
:param hostnames: a list of hostnames
:param unreserve_resources: bool setting to also unreserve resources on the agent before the undrain call
:returns: None
"""
log.info("Undraining: %s" % hostnames)
if unreserve_resources:
try:
unreserve_all_resources(hostnames)
except HTTPError as e:
log.warning(
"Failed to unreserve resources, will continue to undrain: %s" % e
)
payload = build_maintenance_schedule_payload(hostnames, drain=False)
client_fn = get_schedule_client()
client_fn = operator_api()
try:
undrain_output = client_fn(data=payload).text
except HTTPError:
raise HTTPError("Error performing maintenance undrain.")
return undrain_output
def down(hostnames):
"""Marks the specified hostnames as being down for maintenance, and makes them unavailable for use.
:param hostnames: a list of hostnames
:returns: None
"""
log.info("Bringing down: %s" % hostnames)
payload = build_maintenance_payload(hostnames, "start_maintenance")
client_fn = operator_api()
try:
down_output = client_fn(data=payload).text
except HTTPError:
raise HTTPError("Error performing maintenance down.")
return down_output
def up(hostnames):
"""Marks the specified hostnames as no longer being down for maintenance, and makes them available for use.
:param hostnames: a list of hostnames
:returns: None
"""
log.info("Bringing up: %s" % hostnames)
payload = build_maintenance_payload(hostnames, "stop_maintenance")
client_fn = operator_api()
try:
up_output = client_fn(data=payload).text
except HTTPError:
raise HTTPError("Error performing maintenance up.")
return up_output
def raw_status():
"""Get the Mesos maintenance status. This contains hostname/ip mappings for hosts that are either marked as being
down for maintenance or draining.
:returns: Response Object containing status
"""
try:
status = get_maintenance_status()
except HTTPError:
raise HTTPError("Error performing maintenance status.")
return status
def status():
"""Get the Mesos maintenance status. This contains hostname/ip mappings for hosts that are either marked as being
down for maintenance or draining.
:returns: Text representation of the status
"""
return raw_status().text
def friendly_status():
"""Display the Mesos maintenance status in a human-friendly way.
:returns: Text representation of the human-friendly status
"""
status = raw_status().json()["get_maintenance_status"]["status"]
ret = ""
for machine in status.get("draining_machines", []):
ret += "{} ({}): Draining\n".format(
machine["id"]["hostname"], machine["id"]["ip"]
)
for machine in status.get("down_machines", []):
ret += "{} ({}): Down\n".format(machine["hostname"], machine["ip"])
return ret
def is_host_drained(hostname):
"""Checks if a host has drained successfully by confirming it is
draining and currently running 0 tasks
:param hostname: hostname to check
:returns: True or False
"""
return (
is_host_draining(hostname=hostname)
and get_count_running_tasks_on_slave(hostname) == 0
)
def is_host_past_maintenance_start(hostname):
"""Checks if a host has reached the start of its maintenance window
:param hostname: hostname to check
:returns: True or False
"""
return hostname in get_hosts_past_maintenance_start()
def is_host_past_maintenance_end(hostname):
"""Checks if a host has reached the end of its maintenance window
:param hostname: hostname to check
:returns: True or False
"""
return hostname in get_hosts_past_maintenance_end()
def get_hosts_past_maintenance_start(grace=0):
"""Get a list of hosts that have reached the start of their maintenance window
:param grace: integer number of nanoseconds to allow a host to be left in the draining
state after the start of its maintenance window before we consider it past its maintenance start
:returns: List of hostnames
"""
schedules = get_maintenance_schedule().json()["get_maintenance_schedule"][
"schedule"
]
current_time = datetime_to_nanoseconds(now()) - grace
ret = []
if "windows" in schedules:
for window in schedules["windows"]:
if window["unavailability"]["start"]["nanoseconds"] < current_time:
ret += [host["hostname"] for host in window["machine_ids"]]
log.debug(f"Hosts past maintenance start: {ret}")
return ret
def get_hosts_past_maintenance_end(grace=0):
"""Get a list of hosts that have reached the end of their maintenance window
:param grace: integer number of nanoseconds to allow a host to be left in the down
state after the end of its maintenance window before we consider it past its maintenance end
:returns: List of hostnames
"""
schedules = get_maintenance_schedule().json()["get_maintenance_schedule"][
"schedule"
]
current_time = datetime_to_nanoseconds(now()) - grace
ret = []
if "windows" in schedules:
for window in schedules["windows"]:
end = (
window["unavailability"]["start"]["nanoseconds"]
+ window["unavailability"]["duration"]["nanoseconds"]
)
if end < current_time:
ret += [host["hostname"] for host in window["machine_ids"]]
log.debug(f"Hosts past maintenance end: {ret}")
return ret
|
{
"content_hash": "3a889a62109ae40d5800550de68f7e94",
"timestamp": "",
"source": "github",
"line_count": 834,
"max_line_length": 118,
"avg_line_length": 36.99760191846523,
"alnum_prop": 0.6770158154005704,
"repo_name": "Yelp/paasta",
"id": "4eb8645d0ece59d9b31d4168c5119146f8d7dbb2",
"size": "31456",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "paasta_tools/mesos_maintenance.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "19456"
},
{
"name": "Gherkin",
"bytes": "4399"
},
{
"name": "Makefile",
"bytes": "12710"
},
{
"name": "Python",
"bytes": "4745271"
},
{
"name": "Shell",
"bytes": "98025"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('msgs', '0002_auto_20150102_1007'),
]
operations = [
migrations.AddField(
model_name='message',
name='status',
field=models.CharField(default='S', help_text='Current status of this message', max_length=1, verbose_name='Status', choices=[('P', 'Pending'), ('S', 'Sent')]),
preserve_default=False,
),
]
|
{
"content_hash": "150d9bf92545822e8f38ac8b32a7626b",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 172,
"avg_line_length": 28,
"alnum_prop": 0.5958646616541353,
"repo_name": "rapidpro/chatpro",
"id": "0f202dea7f4c278aeb0e7337249b660a1add849b",
"size": "556",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "chatpro/msgs/migrations/0003_message_status.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "2638"
},
{
"name": "CoffeeScript",
"bytes": "12697"
},
{
"name": "HTML",
"bytes": "22196"
},
{
"name": "Python",
"bytes": "142459"
}
],
"symlink_target": ""
}
|
import markdown
def load_page(name):
try:
body=open("pages/"+name+".md").read().decode("utf-8")
except:
body=u'Empty Page'
body_html=markdown.markdown(body).encode('utf-8')
return dict(name=name,body=body,body_html=body_html)
def load_words():
try:
words=open("pages/Index.md").readlines()
except Exception,e:
words=u"No Words"
return dict(words=words)
def save_page(name,body):
with open("pages/"+name+".md",'w') as f:
f.write(body)
with open("pages/Index.md",'a') as f:
f.write('\n'+name)
|
{
"content_hash": "86e60c01a8160c3c2d1fa1336c16ceeb",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 61,
"avg_line_length": 23.2,
"alnum_prop": 0.596551724137931,
"repo_name": "xsank/bottle",
"id": "855d11a2a262982441ea352f306a7a06119202d1",
"size": "580",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "brick/example/util.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "250"
},
{
"name": "Python",
"bytes": "52035"
}
],
"symlink_target": ""
}
|
"""
Onshape REST API
The Onshape REST API consumed by all clients. # noqa: E501
The version of the OpenAPI document: 1.113
Contact: api-support@onshape.zendesk.com
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
import sys # noqa: F401
import six # noqa: F401
import nulltype # noqa: F401
from onshape_client.oas.model_utils import ( # noqa: F401
ModelComposed,
ModelNormal,
ModelSimple,
date,
datetime,
file_type,
int,
none_type,
str,
validate_get_composed_info,
)
class BTFlatSheetMetalFilter3018AllOf(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
("allows",): {
"MODEL_ONLY": "MODEL_ONLY",
"FLATTENED_ONLY": "FLATTENED_ONLY",
"MODEL_AND_FLATTENED": "MODEL_AND_FLATTENED",
"UNKNOWN": "UNKNOWN",
},
}
validations = {}
additional_properties_type = None
@staticmethod
def openapi_types():
"""
This must be a class method so a model may have properties that are
of type self, this ensures that we don't create a cyclic import
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
"allows": (str,), # noqa: E501
"bt_type": (str,), # noqa: E501
}
@staticmethod
def discriminator():
return None
attribute_map = {
"allows": "allows", # noqa: E501
"bt_type": "btType", # noqa: E501
}
@staticmethod
def _composed_schemas():
return None
required_properties = set(
[
"_data_store",
"_check_type",
"_from_server",
"_path_to_item",
"_configuration",
]
)
def __init__(
self,
_check_type=True,
_from_server=False,
_path_to_item=(),
_configuration=None,
**kwargs
): # noqa: E501
"""bt_flat_sheet_metal_filter3018_all_of.BTFlatSheetMetalFilter3018AllOf - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_from_server (bool): True if the data is from the server
False if the data is from the client (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
allows (str): [optional] # noqa: E501
bt_type (str): [optional] # noqa: E501
"""
self._data_store = {}
self._check_type = _check_type
self._from_server = _from_server
self._path_to_item = _path_to_item
self._configuration = _configuration
for var_name, var_value in six.iteritems(kwargs):
if (
var_name not in self.attribute_map
and self._configuration is not None
and self._configuration.discard_unknown_keys
and self.additional_properties_type is None
):
# discard variable.
continue
setattr(self, var_name, var_value)
|
{
"content_hash": "d7a28c8b44738b4d92eeffc1ab6c8545",
"timestamp": "",
"source": "github",
"line_count": 151,
"max_line_length": 109,
"avg_line_length": 32.854304635761586,
"alnum_prop": 0.5692400725660149,
"repo_name": "onshape-public/onshape-clients",
"id": "fd3a153b1993afe86d8cce7a21275bf076d3a428",
"size": "4978",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/onshape_client/oas/models/bt_flat_sheet_metal_filter3018_all_of.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "4873"
},
{
"name": "Go",
"bytes": "59674"
},
{
"name": "HTML",
"bytes": "3851790"
},
{
"name": "JavaScript",
"bytes": "2217"
},
{
"name": "Makefile",
"bytes": "559"
},
{
"name": "Python",
"bytes": "7560009"
},
{
"name": "Shell",
"bytes": "3475"
},
{
"name": "TypeScript",
"bytes": "1412661"
}
],
"symlink_target": ""
}
|
import ast
from py4j.java_gateway import *
# Spark imports
from pyspark.conf import SparkConf
from pyspark.context import SparkContext
from pyspark.sql import *
from pyspark.sql.functions import UserDefinedFunction
from sparklingml.transformation_functions import *
# Hack to allow people to hook in more easily
try:
from user_functions import *
setup_user()
except ImportError:
pass
# This class is used to allow the Scala process to call into Python
# It may not run in the same Python process as your regular Python
# shell if you are running PySpark normally.
class PythonRegistrationProvider(object):
"""
Provide an entry point for Scala to call to register functions.
"""
def __init__(self, gateway):
self.gateway = gateway
self._sc = None
self._session = None
self._count = 0
def registerFunction(self, ssc, jsession, function_name, params):
jvm = self.gateway.jvm
# If we don't have a reference to a running SparkContext
# Get the SparkContext from the provided SparkSession.
if not self._sc:
master = ssc.master()
jsc = jvm.org.apache.spark.api.java.JavaSparkContext(ssc)
jsparkConf = ssc.conf()
sparkConf = SparkConf(_jconf=jsparkConf)
self._sc = SparkContext(
master=master,
conf=sparkConf,
gateway=self.gateway,
jsc=jsc)
self._session = SparkSession.builder.getOrCreate()
if function_name in functions_info:
function_info = functions_info[function_name]
if params:
evaledParams = ast.literal_eval(params)
else:
evaledParams = []
func = function_info.func(*evaledParams)
ret_type = function_info.returnType()
self._count = self._count + 1
registration_name = function_name + str(self._count)
udf = UserDefinedFunction(func, ret_type, registration_name)
# Used to allow non-default (e.g. Arrow) UDFS
udf.evalType = function_info.evalType()
judf = udf._judf
return judf
else:
print("Could not find function")
# We do this rather than raising an exception since Py4J debugging
# is rough and we can check it.
return None
class Java:
package = "com.sparklingpandas.sparklingml.util.python"
className = "PythonRegisterationProvider"
implements = [package + "." + className]
if __name__ == "__main__":
def spark_jvm_imports(jvm):
# Import the classes used by PySpark
java_import(jvm, "org.apache.spark.SparkConf")
java_import(jvm, "org.apache.spark.api.java.*")
java_import(jvm, "org.apache.spark.api.python.*")
java_import(jvm, "org.apache.spark.ml.python.*")
java_import(jvm, "org.apache.spark.mllib.api.python.*")
# TODO(davies): move into sql
java_import(jvm, "org.apache.spark.sql.*")
java_import(jvm, "org.apache.spark.sql.hive.*")
java_import(jvm, "scala.Tuple2")
import os
if "SPARKLING_ML_SPECIFIC" in os.environ:
# Py4J setup work so we can talk
gateway_port = int(os.environ["PYSPARK_GATEWAY_PORT"])
gateway = JavaGateway(
GatewayClient(port=gateway_port),
# TODO: handle dynamic port binding here correctly.
callback_server_parameters=CallbackServerParameters(port=0),
auto_convert=True)
# retrieve the port on which the python callback server was bound to.
python_port = gateway.get_callback_server().get_listening_port()
# bind the callback server on the java side to the new python_port
gateway.java_gateway_server.resetCallbackClient(
gateway.java_gateway_server.getCallbackClient().getAddress(),
python_port)
# Create our registration provider interface for Py4J to call into
provider = PythonRegistrationProvider(gateway)
# Sparkling pandas specific imports
jvm = gateway.jvm
java_import(jvm, "com.sparklingpandas.sparklingml")
java_import(jvm, "com.sparklingpandas.sparklingml.util.python")
# We need to re-do the Spark gateway imports as well
spark_jvm_imports(jvm)
python_utils = jvm.com.sparklingpandas.sparklingml.util.python
pythonRegistrationObj = python_utils.PythonRegistration
boople = jvm.org.apache.spark.SparkConf(False)
pythonRegistrationObj.register(provider)
# Busy loop so we don't exit. This is also kind of a hack.
import time
while (True):
time.sleep(1)
print("real exit")
|
{
"content_hash": "36b102376c55c1fdef3e601548141a03",
"timestamp": "",
"source": "github",
"line_count": 120,
"max_line_length": 78,
"avg_line_length": 39.891666666666666,
"alnum_prop": 0.6335909755588051,
"repo_name": "sparklingpandas/sparklingml",
"id": "db3211758f15ff4d545169f0d03098e3ab19eecf",
"size": "4787",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sparklingml/startup.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "108272"
},
{
"name": "Scala",
"bytes": "102281"
},
{
"name": "Shell",
"bytes": "3314"
}
],
"symlink_target": ""
}
|
"""
Offer state listening automation rules.
For more details about this automation rule, please refer to the documentation
at https://home-assistant.io/docs/automation/trigger/#state-trigger
"""
import asyncio
import voluptuous as vol
from homeassistant.core import callback
import homeassistant.util.dt as dt_util
from homeassistant.const import MATCH_ALL, CONF_PLATFORM
from homeassistant.helpers.event import (
async_track_state_change, async_track_point_in_utc_time)
import homeassistant.helpers.config_validation as cv
CONF_ENTITY_ID = 'entity_id'
CONF_FROM = 'from'
CONF_TO = 'to'
CONF_FOR = 'for'
TRIGGER_SCHEMA = vol.All(
vol.Schema({
vol.Required(CONF_PLATFORM): 'state',
vol.Required(CONF_ENTITY_ID): cv.entity_ids,
# These are str on purpose. Want to catch YAML conversions
CONF_FROM: str,
CONF_TO: str,
CONF_FOR: vol.All(cv.time_period, cv.positive_timedelta),
}),
cv.key_dependency(CONF_FOR, CONF_TO),
)
@asyncio.coroutine
def async_trigger(hass, config, action):
"""Listen for state changes based on configuration."""
entity_id = config.get(CONF_ENTITY_ID)
from_state = config.get(CONF_FROM, MATCH_ALL)
to_state = config.get(CONF_TO, MATCH_ALL)
time_delta = config.get(CONF_FOR)
async_remove_state_for_cancel = None
async_remove_state_for_listener = None
match_all = (from_state == MATCH_ALL and to_state == MATCH_ALL)
@callback
def clear_listener():
"""Clear all unsub listener."""
nonlocal async_remove_state_for_cancel, async_remove_state_for_listener
# pylint: disable=not-callable
if async_remove_state_for_listener is not None:
async_remove_state_for_listener()
async_remove_state_for_listener = None
if async_remove_state_for_cancel is not None:
async_remove_state_for_cancel()
async_remove_state_for_cancel = None
@callback
def state_automation_listener(entity, from_s, to_s):
"""Listen for state changes and calls action."""
nonlocal async_remove_state_for_cancel, async_remove_state_for_listener
def call_action():
"""Call action with right context."""
hass.async_run_job(action, {
'trigger': {
'platform': 'state',
'entity_id': entity,
'from_state': from_s,
'to_state': to_s,
'for': time_delta,
}
})
# Ignore changes to state attributes if from/to is in use
if (not match_all and from_s is not None and to_s is not None and
from_s.last_changed == to_s.last_changed):
return
if time_delta is None:
call_action()
return
@callback
def state_for_listener(now):
"""Fire on state changes after a delay and calls action."""
nonlocal async_remove_state_for_listener
async_remove_state_for_listener = None
clear_listener()
call_action()
@callback
def state_for_cancel_listener(entity, inner_from_s, inner_to_s):
"""Fire on changes and cancel for listener if changed."""
if inner_to_s.state == to_s.state:
return
clear_listener()
# cleanup previous listener
clear_listener()
async_remove_state_for_listener = async_track_point_in_utc_time(
hass, state_for_listener, dt_util.utcnow() + time_delta)
async_remove_state_for_cancel = async_track_state_change(
hass, entity, state_for_cancel_listener)
unsub = async_track_state_change(
hass, entity_id, state_automation_listener, from_state, to_state)
@callback
def async_remove():
"""Remove state listeners async."""
unsub()
clear_listener()
return async_remove
|
{
"content_hash": "55bba4cf7d256a3568771cfe1abd8c18",
"timestamp": "",
"source": "github",
"line_count": 118,
"max_line_length": 79,
"avg_line_length": 33.54237288135593,
"alnum_prop": 0.6154623547246084,
"repo_name": "MungoRae/home-assistant",
"id": "8ad5c40bb80d27979443748615824c438b64e7ef",
"size": "3958",
"binary": false,
"copies": "3",
"ref": "refs/heads/dev",
"path": "homeassistant/components/automation/state.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "13788"
},
{
"name": "HTML",
"bytes": "1686638"
},
{
"name": "JavaScript",
"bytes": "15192"
},
{
"name": "Python",
"bytes": "7266062"
},
{
"name": "Ruby",
"bytes": "517"
},
{
"name": "Shell",
"bytes": "15114"
}
],
"symlink_target": ""
}
|
"""SQLAlchemy models for Social Auth"""
import base64
import six
import json
from sqlalchemy.exc import IntegrityError
from sqlalchemy.types import PickleType, Text
from sqlalchemy import Column, Integer, String
from sqlalchemy.schema import UniqueConstraint
from social.storage.base import UserMixin, AssociationMixin, NonceMixin, \
CodeMixin, BaseStorage
# JSON type field
class JSONType(PickleType):
impl = Text
def __init__(self, *args, **kwargs):
kwargs['pickler'] = json
super(JSONType, self).__init__(*args, **kwargs)
class SQLAlchemyMixin(object):
COMMIT_SESSION = True
@classmethod
def _session(cls):
raise NotImplementedError('Implement in subclass')
@classmethod
def _query(cls):
return cls._session().query(cls)
@classmethod
def _new_instance(cls, model, *args, **kwargs):
return cls._save_instance(model(*args, **kwargs))
@classmethod
def _save_instance(cls, instance):
cls._session().add(instance)
if cls.COMMIT_SESSION:
cls._session().commit()
return instance
def save(self):
self._save_instance(self)
class SQLAlchemyUserMixin(SQLAlchemyMixin, UserMixin):
"""Social Auth association model"""
__tablename__ = 'social_auth_usersocialauth'
__table_args__ = (UniqueConstraint('provider', 'uid'),)
id = Column(Integer, primary_key=True)
provider = Column(String(32))
extra_data = Column(JSONType)
uid = None
user_id = None
user = None
@classmethod
def changed(cls, user):
cls._save_instance(user)
def set_extra_data(self, extra_data=None):
if super(SQLAlchemyUserMixin, self).set_extra_data(extra_data):
self._save_instance(self)
@classmethod
def allowed_to_disconnect(cls, user, backend_name, association_id=None):
if association_id is not None:
qs = cls._query().filter(cls.id != association_id)
else:
qs = cls._query().filter(cls.provider != backend_name)
qs = qs.filter(cls.user == user)
if hasattr(user, 'has_usable_password'): # TODO
valid_password = user.has_usable_password()
else:
valid_password = True
return valid_password or qs.count() > 0
@classmethod
def disconnect(cls, entry):
cls._session().delete(entry)
try:
cls._session().commit()
except AssertionError:
import transaction
transaction.commit()
@classmethod
def user_query(cls):
return cls._session().query(cls.user_model())
@classmethod
def user_exists(cls, *args, **kwargs):
"""
Return True/False if a User instance exists with the given arguments.
Arguments are directly passed to filter() manager method.
"""
return cls.user_query().filter_by(*args, **kwargs).count() > 0
@classmethod
def get_username(cls, user):
return getattr(user, 'username', None)
@classmethod
def create_user(cls, *args, **kwargs):
return cls._new_instance(cls.user_model(), *args, **kwargs)
@classmethod
def get_user(cls, pk):
return cls.user_query().get(pk)
@classmethod
def get_users_by_email(cls, email):
return cls.user_query().filter_by(email=email)
@classmethod
def get_social_auth(cls, provider, uid):
if not isinstance(uid, six.string_types):
uid = str(uid)
try:
return cls._query().filter_by(provider=provider,
uid=uid)[0]
except IndexError:
return None
@classmethod
def get_social_auth_for_user(cls, user, provider=None, id=None):
qs = cls._query().filter_by(user_id=user.id)
if provider:
qs = qs.filter_by(provider=provider)
if id:
qs = qs.filter_by(id=id)
return qs
@classmethod
def create_social_auth(cls, user, uid, provider):
if not isinstance(uid, six.string_types):
uid = str(uid)
return cls._new_instance(cls, user=user, uid=uid, provider=provider)
class SQLAlchemyNonceMixin(SQLAlchemyMixin, NonceMixin):
__tablename__ = 'social_auth_nonce'
__table_args__ = (UniqueConstraint('server_url', 'timestamp', 'salt'),)
id = Column(Integer, primary_key=True)
server_url = Column(String(255))
timestamp = Column(Integer)
salt = Column(String(40))
@classmethod
def use(cls, server_url, timestamp, salt):
kwargs = {'server_url': server_url, 'timestamp': timestamp,
'salt': salt}
try:
return cls._query().filter_by(**kwargs)[0]
except IndexError:
return cls._new_instance(cls, **kwargs)
class SQLAlchemyAssociationMixin(SQLAlchemyMixin, AssociationMixin):
__tablename__ = 'social_auth_association'
__table_args__ = (UniqueConstraint('server_url', 'handle'),)
id = Column(Integer, primary_key=True)
server_url = Column(String(255))
handle = Column(String(255))
secret = Column(String(255)) # base64 encoded
issued = Column(Integer)
lifetime = Column(Integer)
assoc_type = Column(String(64))
@classmethod
def store(cls, server_url, association):
# Don't use get_or_create because issued cannot be null
try:
assoc = cls._query().filter_by(server_url=server_url,
handle=association.handle)[0]
except IndexError:
assoc = cls(server_url=server_url,
handle=association.handle)
assoc.secret = base64.encodestring(association.secret)
assoc.issued = association.issued
assoc.lifetime = association.lifetime
assoc.assoc_type = association.assoc_type
cls._save_instance(assoc)
@classmethod
def get(cls, *args, **kwargs):
return cls._query().filter_by(*args, **kwargs)
@classmethod
def remove(cls, ids_to_delete):
cls._query().filter(cls.id.in_(ids_to_delete)).delete(
synchronize_session='fetch'
)
class SQLAlchemyCodeMixin(SQLAlchemyMixin, CodeMixin):
__tablename__ = 'social_auth_code'
__table_args__ = (UniqueConstraint('code', 'email'),)
id = Column(Integer, primary_key=True)
email = Column(String(200))
code = Column(String(32), index=True)
@classmethod
def get_code(cls, code):
return cls._query().filter(cls.code == code).first()
class BaseSQLAlchemyStorage(BaseStorage):
user = SQLAlchemyUserMixin
nonce = SQLAlchemyNonceMixin
association = SQLAlchemyAssociationMixin
code = SQLAlchemyCodeMixin
@classmethod
def is_integrity_error(cls, exception):
return exception.__class__ is IntegrityError
|
{
"content_hash": "968b221d5974673aa7d7c353efd2c8b9",
"timestamp": "",
"source": "github",
"line_count": 221,
"max_line_length": 77,
"avg_line_length": 31.07239819004525,
"alnum_prop": 0.6193388670452891,
"repo_name": "garrett-schlesinger/python-social-auth",
"id": "f1934c43405bf8dca60ec80eca0381c6ce037afe",
"size": "6867",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "social/storage/sqlalchemy_orm.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "54"
},
{
"name": "Python",
"bytes": "594253"
},
{
"name": "Shell",
"bytes": "122"
}
],
"symlink_target": ""
}
|
from openstack.network.v2 import qos_policy as _qos_policy
from openstack.tests.functional import base
class TestQoSPolicy(base.BaseFunctionalTest):
QOS_POLICY_ID = None
IS_SHARED = False
IS_DEFAULT = False
RULES = []
QOS_POLICY_DESCRIPTION = "QoS policy description"
def setUp(self):
super(TestQoSPolicy, self).setUp()
self.QOS_POLICY_NAME = self.getUniqueString()
self.QOS_POLICY_NAME_UPDATED = self.getUniqueString()
qos = self.conn.network.create_qos_policy(
description=self.QOS_POLICY_DESCRIPTION,
name=self.QOS_POLICY_NAME,
shared=self.IS_SHARED,
is_default=self.IS_DEFAULT,
)
assert isinstance(qos, _qos_policy.QoSPolicy)
self.assertEqual(self.QOS_POLICY_NAME, qos.name)
self.QOS_POLICY_ID = qos.id
def tearDown(self):
sot = self.conn.network.delete_qos_policy(self.QOS_POLICY_ID)
self.assertIsNone(sot)
super(TestQoSPolicy, self).tearDown()
def test_find(self):
sot = self.conn.network.find_qos_policy(self.QOS_POLICY_NAME)
self.assertEqual(self.QOS_POLICY_ID, sot.id)
def test_get(self):
sot = self.conn.network.get_qos_policy(self.QOS_POLICY_ID)
self.assertEqual(self.QOS_POLICY_NAME, sot.name)
self.assertEqual(self.IS_SHARED, sot.is_shared)
self.assertEqual(self.RULES, sot.rules)
self.assertEqual(self.QOS_POLICY_DESCRIPTION, sot.description)
self.assertEqual(self.IS_DEFAULT, sot.is_default)
def test_list(self):
names = [o.name for o in self.conn.network.qos_policies()]
self.assertIn(self.QOS_POLICY_NAME, names)
def test_update(self):
sot = self.conn.network.update_qos_policy(
self.QOS_POLICY_ID,
name=self.QOS_POLICY_NAME_UPDATED)
self.assertEqual(self.QOS_POLICY_NAME_UPDATED, sot.name)
def test_set_tags(self):
sot = self.conn.network.get_qos_policy(self.QOS_POLICY_ID)
self.assertEqual([], sot.tags)
self.conn.network.set_tags(sot, ['blue'])
sot = self.conn.network.get_qos_policy(self.QOS_POLICY_ID)
self.assertEqual(['blue'], sot.tags)
self.conn.network.set_tags(sot, [])
sot = self.conn.network.get_qos_policy(self.QOS_POLICY_ID)
self.assertEqual([], sot.tags)
|
{
"content_hash": "ef0cf742e4fcdb2820b1d990e5614bff",
"timestamp": "",
"source": "github",
"line_count": 64,
"max_line_length": 70,
"avg_line_length": 36.984375,
"alnum_prop": 0.648077735530207,
"repo_name": "dtroyer/python-openstacksdk",
"id": "df081b8ed8d6aa159314e31e3ac5cff78a331239",
"size": "2914",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "openstack/tests/functional/network/v2/test_qos_policy.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "3803161"
},
{
"name": "Shell",
"bytes": "9027"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import datetime
import logging
import os
import unittest
import time
from airflow import models, settings, AirflowException
from airflow.exceptions import AirflowSkipException
from airflow.models import DAG, TaskInstance as TI
from airflow.models import State as ST
from airflow.models import DagModel, DagStat
from airflow.operators.dummy_operator import DummyOperator
from airflow.operators.bash_operator import BashOperator
from airflow.operators.python_operator import PythonOperator
from airflow.operators.python_operator import ShortCircuitOperator
from airflow.ti_deps.deps.trigger_rule_dep import TriggerRuleDep
from airflow.utils.state import State
from mock import patch
from nose_parameterized import parameterized
DEFAULT_DATE = datetime.datetime(2016, 1, 1)
TEST_DAGS_FOLDER = os.path.join(
os.path.dirname(os.path.realpath(__file__)), 'dags')
class DagTest(unittest.TestCase):
def test_parms_not_passed_is_empty_dict(self):
"""
Test that when 'params' is _not_ passed to a new Dag, that the params
attribute is set to an empty dictionary.
"""
dag = models.DAG('test-dag')
self.assertEqual(dict, type(dag.params))
self.assertEqual(0, len(dag.params))
def test_params_passed_and_params_in_default_args_no_override(self):
"""
Test that when 'params' exists as a key passed to the default_args dict
in addition to params being passed explicitly as an argument to the
dag, that the 'params' key of the default_args dict is merged with the
dict of the params argument.
"""
params1 = {'parameter1': 1}
params2 = {'parameter2': 2}
dag = models.DAG('test-dag',
default_args={'params': params1},
params=params2)
params_combined = params1.copy()
params_combined.update(params2)
self.assertEqual(params_combined, dag.params)
def test_dag_as_context_manager(self):
"""
Test DAG as a context manager.
When used as a context manager, Operators are automatically added to
the DAG (unless they specifiy a different DAG)
"""
dag = DAG(
'dag',
start_date=DEFAULT_DATE,
default_args={'owner': 'owner1'})
dag2 = DAG(
'dag2',
start_date=DEFAULT_DATE,
default_args={'owner': 'owner2'})
with dag:
op1 = DummyOperator(task_id='op1')
op2 = DummyOperator(task_id='op2', dag=dag2)
self.assertIs(op1.dag, dag)
self.assertEqual(op1.owner, 'owner1')
self.assertIs(op2.dag, dag2)
self.assertEqual(op2.owner, 'owner2')
with dag2:
op3 = DummyOperator(task_id='op3')
self.assertIs(op3.dag, dag2)
self.assertEqual(op3.owner, 'owner2')
with dag:
with dag2:
op4 = DummyOperator(task_id='op4')
op5 = DummyOperator(task_id='op5')
self.assertIs(op4.dag, dag2)
self.assertIs(op5.dag, dag)
self.assertEqual(op4.owner, 'owner2')
self.assertEqual(op5.owner, 'owner1')
with DAG('creating_dag_in_cm', start_date=DEFAULT_DATE) as dag:
DummyOperator(task_id='op6')
self.assertEqual(dag.dag_id, 'creating_dag_in_cm')
self.assertEqual(dag.tasks[0].task_id, 'op6')
def test_dag_topological_sort(self):
dag = DAG(
'dag',
start_date=DEFAULT_DATE,
default_args={'owner': 'owner1'})
# A -> B
# A -> C -> D
# ordered: B, D, C, A or D, B, C, A or D, C, B, A
with dag:
op1 = DummyOperator(task_id='A')
op2 = DummyOperator(task_id='B')
op3 = DummyOperator(task_id='C')
op4 = DummyOperator(task_id='D')
op1.set_upstream([op2, op3])
op3.set_upstream(op4)
topological_list = dag.topological_sort()
logging.info(topological_list)
tasks = [op2, op3, op4]
self.assertTrue(topological_list[0] in tasks)
tasks.remove(topological_list[0])
self.assertTrue(topological_list[1] in tasks)
tasks.remove(topological_list[1])
self.assertTrue(topological_list[2] in tasks)
tasks.remove(topological_list[2])
self.assertTrue(topological_list[3] == op1)
dag = DAG(
'dag',
start_date=DEFAULT_DATE,
default_args={'owner': 'owner1'})
# C -> (A u B) -> D
# C -> E
# ordered: E | D, A | B, C
with dag:
op1 = DummyOperator(task_id='A')
op2 = DummyOperator(task_id='B')
op3 = DummyOperator(task_id='C')
op4 = DummyOperator(task_id='D')
op5 = DummyOperator(task_id='E')
op1.set_downstream(op3)
op2.set_downstream(op3)
op1.set_upstream(op4)
op2.set_upstream(op4)
op5.set_downstream(op3)
topological_list = dag.topological_sort()
logging.info(topological_list)
set1 = [op4, op5]
self.assertTrue(topological_list[0] in set1)
set1.remove(topological_list[0])
set2 = [op1, op2]
set2.extend(set1)
self.assertTrue(topological_list[1] in set2)
set2.remove(topological_list[1])
self.assertTrue(topological_list[2] in set2)
set2.remove(topological_list[2])
self.assertTrue(topological_list[3] in set2)
self.assertTrue(topological_list[4] == op3)
dag = DAG(
'dag',
start_date=DEFAULT_DATE,
default_args={'owner': 'owner1'})
self.assertEquals(tuple(), dag.topological_sort())
def test_get_num_task_instances(self):
test_dag_id = 'test_get_num_task_instances_dag'
test_task_id = 'task_1'
test_dag = DAG(dag_id=test_dag_id, start_date=DEFAULT_DATE)
test_task = DummyOperator(task_id=test_task_id, dag=test_dag)
ti1 = TI(task=test_task, execution_date=DEFAULT_DATE)
ti1.state = None
ti2 = TI(task=test_task, execution_date=DEFAULT_DATE + datetime.timedelta(days=1))
ti2.state = State.RUNNING
ti3 = TI(task=test_task, execution_date=DEFAULT_DATE + datetime.timedelta(days=2))
ti3.state = State.QUEUED
ti4 = TI(task=test_task, execution_date=DEFAULT_DATE + datetime.timedelta(days=3))
ti4.state = State.RUNNING
session = settings.Session()
session.merge(ti1)
session.merge(ti2)
session.merge(ti3)
session.merge(ti4)
session.commit()
self.assertEqual(0, DAG.get_num_task_instances(test_dag_id, ['fakename'],
session=session))
self.assertEqual(4, DAG.get_num_task_instances(test_dag_id, [test_task_id],
session=session))
self.assertEqual(4, DAG.get_num_task_instances(test_dag_id,
['fakename', test_task_id], session=session))
self.assertEqual(1, DAG.get_num_task_instances(test_dag_id, [test_task_id],
states=[None], session=session))
self.assertEqual(2, DAG.get_num_task_instances(test_dag_id, [test_task_id],
states=[State.RUNNING], session=session))
self.assertEqual(3, DAG.get_num_task_instances(test_dag_id, [test_task_id],
states=[None, State.RUNNING], session=session))
self.assertEqual(4, DAG.get_num_task_instances(test_dag_id, [test_task_id],
states=[None, State.QUEUED, State.RUNNING], session=session))
session.close()
def test_render_template_field(self):
"""Tests if render_template from a field works"""
dag = DAG('test-dag',
start_date=DEFAULT_DATE)
with dag:
task = DummyOperator(task_id='op1')
result = task.render_template('', '{{ foo }}', dict(foo='bar'))
self.assertEqual(result, 'bar')
def test_render_template_field_macro(self):
""" Tests if render_template from a field works,
if a custom filter was defined"""
dag = DAG('test-dag',
start_date=DEFAULT_DATE,
user_defined_macros = dict(foo='bar'))
with dag:
task = DummyOperator(task_id='op1')
result = task.render_template('', '{{ foo }}', dict())
self.assertEqual(result, 'bar')
def test_user_defined_filters(self):
def jinja_udf(name):
return 'Hello %s' %name
dag = models.DAG('test-dag',
start_date=DEFAULT_DATE,
user_defined_filters=dict(hello=jinja_udf))
jinja_env = dag.get_template_env()
self.assertIn('hello', jinja_env.filters)
self.assertEqual(jinja_env.filters['hello'], jinja_udf)
def test_render_template_field_filter(self):
""" Tests if render_template from a field works,
if a custom filter was defined"""
def jinja_udf(name):
return 'Hello %s' %name
dag = DAG('test-dag',
start_date=DEFAULT_DATE,
user_defined_filters = dict(hello=jinja_udf))
with dag:
task = DummyOperator(task_id='op1')
result = task.render_template('', "{{ 'world' | hello}}", dict())
self.assertEqual(result, 'Hello world')
class DagStatTest(unittest.TestCase):
def test_dagstats_crud(self):
DagStat.create(dag_id='test_dagstats_crud')
session = settings.Session()
qry = session.query(DagStat).filter(DagStat.dag_id == 'test_dagstats_crud')
self.assertEqual(len(qry.all()), len(State.dag_states))
DagStat.set_dirty(dag_id='test_dagstats_crud')
res = qry.all()
for stat in res:
self.assertTrue(stat.dirty)
# create missing
DagStat.set_dirty(dag_id='test_dagstats_crud_2')
qry2 = session.query(DagStat).filter(DagStat.dag_id == 'test_dagstats_crud_2')
self.assertEqual(len(qry2.all()), len(State.dag_states))
dag = DAG(
'test_dagstats_crud',
start_date=DEFAULT_DATE,
default_args={'owner': 'owner1'})
with dag:
op1 = DummyOperator(task_id='A')
now = datetime.datetime.now()
dr = dag.create_dagrun(
run_id='manual__' + now.isoformat(),
execution_date=now,
start_date=now,
state=State.FAILED,
external_trigger=False,
)
DagStat.update(dag_ids=['test_dagstats_crud'])
res = qry.all()
for stat in res:
if stat.state == State.FAILED:
self.assertEqual(stat.count, 1)
else:
self.assertEqual(stat.count, 0)
DagStat.update()
res = qry2.all()
for stat in res:
self.assertFalse(stat.dirty)
class DagRunTest(unittest.TestCase):
def create_dag_run(self, dag, state=State.RUNNING, task_states=None, execution_date=None):
now = datetime.datetime.now()
if execution_date is None:
execution_date = now
dag_run = dag.create_dagrun(
run_id='manual__' + now.isoformat(),
execution_date=execution_date,
start_date=now,
state=state,
external_trigger=False,
)
if task_states is not None:
session = settings.Session()
for task_id, state in task_states.items():
ti = dag_run.get_task_instance(task_id)
ti.set_state(state, session)
session.close()
return dag_run
def test_id_for_date(self):
run_id = models.DagRun.id_for_date(
datetime.datetime(2015, 1, 2, 3, 4, 5, 6, None))
self.assertEqual(
'scheduled__2015-01-02T03:04:05', run_id,
'Generated run_id did not match expectations: {0}'.format(run_id))
def test_dagrun_find(self):
session = settings.Session()
now = datetime.datetime.now()
dag_id1 = "test_dagrun_find_externally_triggered"
dag_run = models.DagRun(
dag_id=dag_id1,
run_id='manual__' + now.isoformat(),
execution_date=now,
start_date=now,
state=State.RUNNING,
external_trigger=True,
)
session.add(dag_run)
dag_id2 = "test_dagrun_find_not_externally_triggered"
dag_run = models.DagRun(
dag_id=dag_id2,
run_id='manual__' + now.isoformat(),
execution_date=now,
start_date=now,
state=State.RUNNING,
external_trigger=False,
)
session.add(dag_run)
session.commit()
self.assertEqual(1, len(models.DagRun.find(dag_id=dag_id1, external_trigger=True)))
self.assertEqual(0, len(models.DagRun.find(dag_id=dag_id1, external_trigger=False)))
self.assertEqual(0, len(models.DagRun.find(dag_id=dag_id2, external_trigger=True)))
self.assertEqual(1, len(models.DagRun.find(dag_id=dag_id2, external_trigger=False)))
def test_dagrun_success_when_all_skipped(self):
"""
Tests that a DAG run succeeds when all tasks are skipped
"""
dag = DAG(
dag_id='test_dagrun_success_when_all_skipped',
start_date=datetime.datetime(2017, 1, 1)
)
dag_task1 = ShortCircuitOperator(
task_id='test_short_circuit_false',
dag=dag,
python_callable=lambda: False)
dag_task2 = DummyOperator(
task_id='test_state_skipped1',
dag=dag)
dag_task3 = DummyOperator(
task_id='test_state_skipped2',
dag=dag)
dag_task1.set_downstream(dag_task2)
dag_task2.set_downstream(dag_task3)
initial_task_states = {
'test_short_circuit_false': State.SUCCESS,
'test_state_skipped1': State.SKIPPED,
'test_state_skipped2': State.SKIPPED,
}
dag_run = self.create_dag_run(dag=dag,
state=State.RUNNING,
task_states=initial_task_states)
updated_dag_state = dag_run.update_state()
self.assertEqual(State.SUCCESS, updated_dag_state)
def test_dagrun_success_conditions(self):
session = settings.Session()
dag = DAG(
'test_dagrun_success_conditions',
start_date=DEFAULT_DATE,
default_args={'owner': 'owner1'})
# A -> B
# A -> C -> D
# ordered: B, D, C, A or D, B, C, A or D, C, B, A
with dag:
op1 = DummyOperator(task_id='A')
op2 = DummyOperator(task_id='B')
op3 = DummyOperator(task_id='C')
op4 = DummyOperator(task_id='D')
op1.set_upstream([op2, op3])
op3.set_upstream(op4)
dag.clear()
now = datetime.datetime.now()
dr = dag.create_dagrun(run_id='test_dagrun_success_conditions',
state=State.RUNNING,
execution_date=now,
start_date=now)
# op1 = root
ti_op1 = dr.get_task_instance(task_id=op1.task_id)
ti_op1.set_state(state=State.SUCCESS, session=session)
ti_op2 = dr.get_task_instance(task_id=op2.task_id)
ti_op3 = dr.get_task_instance(task_id=op3.task_id)
ti_op4 = dr.get_task_instance(task_id=op4.task_id)
# root is successful, but unfinished tasks
state = dr.update_state()
self.assertEqual(State.RUNNING, state)
# one has failed, but root is successful
ti_op2.set_state(state=State.FAILED, session=session)
ti_op3.set_state(state=State.SUCCESS, session=session)
ti_op4.set_state(state=State.SUCCESS, session=session)
state = dr.update_state()
self.assertEqual(State.SUCCESS, state)
# upstream dependency failed, root has not run
ti_op1.set_state(State.NONE, session)
state = dr.update_state()
self.assertEqual(State.FAILED, state)
def test_get_task_instance_on_empty_dagrun(self):
"""
Make sure that a proper value is returned when a dagrun has no task instances
"""
dag = DAG(
dag_id='test_get_task_instance_on_empty_dagrun',
start_date=datetime.datetime(2017, 1, 1)
)
dag_task1 = ShortCircuitOperator(
task_id='test_short_circuit_false',
dag=dag,
python_callable=lambda: False)
session = settings.Session()
now = datetime.datetime.now()
# Don't use create_dagrun since it will create the task instances too which we
# don't want
dag_run = models.DagRun(
dag_id=dag.dag_id,
run_id='manual__' + now.isoformat(),
execution_date=now,
start_date=now,
state=State.RUNNING,
external_trigger=False,
)
session.add(dag_run)
session.commit()
ti = dag_run.get_task_instance('test_short_circuit_false')
self.assertEqual(None, ti)
def test_get_latest_runs(self):
session = settings.Session()
dag = DAG(
dag_id='test_latest_runs_1',
start_date=DEFAULT_DATE)
dag_1_run_1 = self.create_dag_run(dag,
execution_date=datetime.datetime(2015, 1, 1))
dag_1_run_2 = self.create_dag_run(dag,
execution_date=datetime.datetime(2015, 1, 2))
dagruns = models.DagRun.get_latest_runs(session)
session.close()
for dagrun in dagruns:
if dagrun.dag_id == 'test_latest_runs_1':
self.assertEqual(dagrun.execution_date, datetime.datetime(2015, 1, 2))
class DagBagTest(unittest.TestCase):
def test_get_existing_dag(self):
"""
test that were're able to parse some example DAGs and retrieve them
"""
dagbag = models.DagBag(include_examples=True)
some_expected_dag_ids = ["example_bash_operator",
"example_branch_operator"]
for dag_id in some_expected_dag_ids:
dag = dagbag.get_dag(dag_id)
self.assertIsNotNone(dag)
self.assertEqual(dag_id, dag.dag_id)
self.assertGreaterEqual(dagbag.size(), 7)
def test_get_non_existing_dag(self):
"""
test that retrieving a non existing dag id returns None without crashing
"""
dagbag = models.DagBag(include_examples=True)
non_existing_dag_id = "non_existing_dag_id"
self.assertIsNone(dagbag.get_dag(non_existing_dag_id))
def test_process_file_that_contains_multi_bytes_char(self):
"""
test that we're able to parse file that contains multi-byte char
"""
from tempfile import NamedTemporaryFile
f = NamedTemporaryFile()
f.write('\u3042'.encode('utf8')) # write multi-byte char (hiragana)
f.flush()
dagbag = models.DagBag(include_examples=True)
self.assertEqual([], dagbag.process_file(f.name))
def test_zip(self):
"""
test the loading of a DAG within a zip file that includes dependencies
"""
dagbag = models.DagBag()
dagbag.process_file(os.path.join(TEST_DAGS_FOLDER, "test_zip.zip"))
self.assertTrue(dagbag.get_dag("test_zip_dag"))
@patch.object(DagModel,'get_current')
def test_get_dag_without_refresh(self, mock_dagmodel):
"""
Test that, once a DAG is loaded, it doesn't get refreshed again if it
hasn't been expired.
"""
dag_id = 'example_bash_operator'
mock_dagmodel.return_value = DagModel()
mock_dagmodel.return_value.last_expired = None
mock_dagmodel.return_value.fileloc = 'foo'
class TestDagBag(models.DagBag):
process_file_calls = 0
def process_file(self, filepath, only_if_updated=True, safe_mode=True):
if 'example_bash_operator.py' == os.path.basename(filepath):
TestDagBag.process_file_calls += 1
super(TestDagBag, self).process_file(filepath, only_if_updated, safe_mode)
dagbag = TestDagBag(include_examples=True)
processed_files = dagbag.process_file_calls
# Should not call process_file agani, since it's already loaded during init.
self.assertEqual(1, dagbag.process_file_calls)
self.assertIsNotNone(dagbag.get_dag(dag_id))
self.assertEqual(1, dagbag.process_file_calls)
def test_get_dag_fileloc(self):
"""
Test that fileloc is correctly set when we load example DAGs,
specifically SubDAGs.
"""
dagbag = models.DagBag(include_examples=True)
expected = {
'example_bash_operator': 'example_bash_operator.py',
'example_subdag_operator': 'example_subdag_operator.py',
'example_subdag_operator.section-1': 'subdags/subdag.py'
}
for dag_id, path in expected.items():
dag = dagbag.get_dag(dag_id)
self.assertTrue(
dag.fileloc.endswith('airflow/example_dags/' + path))
class TaskInstanceTest(unittest.TestCase):
def test_set_dag(self):
"""
Test assigning Operators to Dags, including deferred assignment
"""
dag = DAG('dag', start_date=DEFAULT_DATE)
dag2 = DAG('dag2', start_date=DEFAULT_DATE)
op = DummyOperator(task_id='op_1', owner='test')
# no dag assigned
self.assertFalse(op.has_dag())
self.assertRaises(AirflowException, getattr, op, 'dag')
# no improper assignment
with self.assertRaises(TypeError):
op.dag = 1
op.dag = dag
# no reassignment
with self.assertRaises(AirflowException):
op.dag = dag2
# but assigning the same dag is ok
op.dag = dag
self.assertIs(op.dag, dag)
self.assertIn(op, dag.tasks)
def test_infer_dag(self):
dag = DAG('dag', start_date=DEFAULT_DATE)
dag2 = DAG('dag2', start_date=DEFAULT_DATE)
op1 = DummyOperator(task_id='test_op_1', owner='test')
op2 = DummyOperator(task_id='test_op_2', owner='test')
op3 = DummyOperator(task_id='test_op_3', owner='test', dag=dag)
op4 = DummyOperator(task_id='test_op_4', owner='test', dag=dag2)
# double check dags
self.assertEqual(
[i.has_dag() for i in [op1, op2, op3, op4]],
[False, False, True, True])
# can't combine operators with no dags
self.assertRaises(AirflowException, op1.set_downstream, op2)
# op2 should infer dag from op1
op1.dag = dag
op1.set_downstream(op2)
self.assertIs(op2.dag, dag)
# can't assign across multiple DAGs
self.assertRaises(AirflowException, op1.set_downstream, op4)
self.assertRaises(AirflowException, op1.set_downstream, [op3, op4])
def test_bitshift_compose_operators(self):
dag = DAG('dag', start_date=DEFAULT_DATE)
op1 = DummyOperator(task_id='test_op_1', owner='test')
op2 = DummyOperator(task_id='test_op_2', owner='test')
op3 = DummyOperator(task_id='test_op_3', owner='test')
op4 = DummyOperator(task_id='test_op_4', owner='test')
op5 = DummyOperator(task_id='test_op_5', owner='test')
# can't compose operators without dags
with self.assertRaises(AirflowException):
op1 >> op2
dag >> op1 >> op2 << op3
# make sure dag assignment carries through
# using __rrshift__
self.assertIs(op1.dag, dag)
self.assertIs(op2.dag, dag)
self.assertIs(op3.dag, dag)
# op2 should be downstream of both
self.assertIn(op2, op1.downstream_list)
self.assertIn(op2, op3.downstream_list)
# test dag assignment with __rlshift__
dag << op4
self.assertIs(op4.dag, dag)
# dag assignment with __rrshift__
dag >> op5
self.assertIs(op5.dag, dag)
@patch.object(DAG, 'concurrency_reached')
def test_requeue_over_concurrency(self, mock_concurrency_reached):
mock_concurrency_reached.return_value = True
dag = DAG(dag_id='test_requeue_over_concurrency', start_date=DEFAULT_DATE,
max_active_runs=1, concurrency=2)
task = DummyOperator(task_id='test_requeue_over_concurrency_op', dag=dag)
ti = TI(task=task, execution_date=datetime.datetime.now())
ti.run()
self.assertEqual(ti.state, models.State.NONE)
@patch.object(TI, 'pool_full')
def test_run_pooling_task(self, mock_pool_full):
"""
test that running task update task state as without running task.
(no dependency check in ti_deps anymore, so also -> SUCCESS)
"""
# Mock the pool out with a full pool because the pool doesn't actually exist
mock_pool_full.return_value = True
dag = models.DAG(dag_id='test_run_pooling_task')
task = DummyOperator(task_id='test_run_pooling_task_op', dag=dag,
pool='test_run_pooling_task_pool', owner='airflow',
start_date=datetime.datetime(2016, 2, 1, 0, 0, 0))
ti = TI(
task=task, execution_date=datetime.datetime.now())
ti.run()
self.assertEqual(ti.state, models.State.SUCCESS)
@patch.object(TI, 'pool_full')
def test_run_pooling_task_with_mark_success(self, mock_pool_full):
"""
test that running task with mark_success param update task state as SUCCESS
without running task.
"""
# Mock the pool out with a full pool because the pool doesn't actually exist
mock_pool_full.return_value = True
dag = models.DAG(dag_id='test_run_pooling_task_with_mark_success')
task = DummyOperator(
task_id='test_run_pooling_task_with_mark_success_op',
dag=dag,
pool='test_run_pooling_task_with_mark_success_pool',
owner='airflow',
start_date=datetime.datetime(2016, 2, 1, 0, 0, 0))
ti = TI(
task=task, execution_date=datetime.datetime.now())
ti.run(mark_success=True)
self.assertEqual(ti.state, models.State.SUCCESS)
def test_run_pooling_task_with_skip(self):
"""
test that running task which returns AirflowSkipOperator will end
up in a SKIPPED state.
"""
def raise_skip_exception():
raise AirflowSkipException
dag = models.DAG(dag_id='test_run_pooling_task_with_skip')
task = PythonOperator(
task_id='test_run_pooling_task_with_skip',
dag=dag,
python_callable=raise_skip_exception,
owner='airflow',
start_date=datetime.datetime(2016, 2, 1, 0, 0, 0))
ti = TI(
task=task, execution_date=datetime.datetime.now())
ti.run()
self.assertTrue(ti.state == models.State.SKIPPED)
def test_retry_delay(self):
"""
Test that retry delays are respected
"""
dag = models.DAG(dag_id='test_retry_handling')
task = BashOperator(
task_id='test_retry_handling_op',
bash_command='exit 1',
retries=1,
retry_delay=datetime.timedelta(seconds=3),
dag=dag,
owner='airflow',
start_date=datetime.datetime(2016, 2, 1, 0, 0, 0))
def run_with_error(ti):
try:
ti.run()
except AirflowException:
pass
ti = TI(
task=task, execution_date=datetime.datetime.now())
# first run -- up for retry
run_with_error(ti)
self.assertEqual(ti.state, State.UP_FOR_RETRY)
self.assertEqual(ti.try_number, 1)
# second run -- still up for retry because retry_delay hasn't expired
run_with_error(ti)
self.assertEqual(ti.state, State.UP_FOR_RETRY)
# third run -- failed
time.sleep(3)
run_with_error(ti)
self.assertEqual(ti.state, State.FAILED)
@patch.object(TI, 'pool_full')
def test_retry_handling(self, mock_pool_full):
"""
Test that task retries are handled properly
"""
# Mock the pool with a pool with slots open since the pool doesn't actually exist
mock_pool_full.return_value = False
dag = models.DAG(dag_id='test_retry_handling')
task = BashOperator(
task_id='test_retry_handling_op',
bash_command='exit 1',
retries=1,
retry_delay=datetime.timedelta(seconds=0),
dag=dag,
owner='airflow',
start_date=datetime.datetime(2016, 2, 1, 0, 0, 0))
def run_with_error(ti):
try:
ti.run()
except AirflowException:
pass
ti = TI(
task=task, execution_date=datetime.datetime.now())
# first run -- up for retry
run_with_error(ti)
self.assertEqual(ti.state, State.UP_FOR_RETRY)
self.assertEqual(ti.try_number, 1)
# second run -- fail
run_with_error(ti)
self.assertEqual(ti.state, State.FAILED)
self.assertEqual(ti.try_number, 2)
# Clear the TI state since you can't run a task with a FAILED state without
# clearing it first
ti.set_state(None, settings.Session())
# third run -- up for retry
run_with_error(ti)
self.assertEqual(ti.state, State.UP_FOR_RETRY)
self.assertEqual(ti.try_number, 3)
# fourth run -- fail
run_with_error(ti)
self.assertEqual(ti.state, State.FAILED)
self.assertEqual(ti.try_number, 4)
def test_next_retry_datetime(self):
delay = datetime.timedelta(seconds=30)
max_delay = datetime.timedelta(minutes=60)
dag = models.DAG(dag_id='fail_dag')
task = BashOperator(
task_id='task_with_exp_backoff_and_max_delay',
bash_command='exit 1',
retries=3,
retry_delay=delay,
retry_exponential_backoff=True,
max_retry_delay=max_delay,
dag=dag,
owner='airflow',
start_date=datetime.datetime(2016, 2, 1, 0, 0, 0))
ti = TI(
task=task, execution_date=DEFAULT_DATE)
ti.end_date = datetime.datetime.now()
ti.try_number = 1
dt = ti.next_retry_datetime()
# between 30 * 2^0.5 and 30 * 2^1 (15 and 30)
self.assertEqual(dt, ti.end_date + datetime.timedelta(seconds=20.0))
ti.try_number = 4
dt = ti.next_retry_datetime()
# between 30 * 2^2 and 30 * 2^3 (120 and 240)
self.assertEqual(dt, ti.end_date + datetime.timedelta(seconds=181.0))
ti.try_number = 6
dt = ti.next_retry_datetime()
# between 30 * 2^4 and 30 * 2^5 (480 and 960)
self.assertEqual(dt, ti.end_date + datetime.timedelta(seconds=825.0))
ti.try_number = 9
dt = ti.next_retry_datetime()
self.assertEqual(dt, ti.end_date+max_delay)
ti.try_number = 50
dt = ti.next_retry_datetime()
self.assertEqual(dt, ti.end_date+max_delay)
def test_depends_on_past(self):
dagbag = models.DagBag()
dag = dagbag.get_dag('test_depends_on_past')
dag.clear()
task = dag.tasks[0]
run_date = task.start_date + datetime.timedelta(days=5)
ti = TI(task, run_date)
# depends_on_past prevents the run
task.run(start_date=run_date, end_date=run_date)
ti.refresh_from_db()
self.assertIs(ti.state, None)
# ignore first depends_on_past to allow the run
task.run(
start_date=run_date,
end_date=run_date,
ignore_first_depends_on_past=True)
ti.refresh_from_db()
self.assertEqual(ti.state, State.SUCCESS)
# Parameterized tests to check for the correct firing
# of the trigger_rule under various circumstances
# Numeric fields are in order:
# successes, skipped, failed, upstream_failed, done
@parameterized.expand([
#
# Tests for all_success
#
['all_success', 5, 0, 0, 0, 0, True, None, True],
['all_success', 2, 0, 0, 0, 0, True, None, False],
['all_success', 2, 0, 1, 0, 0, True, ST.UPSTREAM_FAILED, False],
['all_success', 2, 1, 0, 0, 0, True, ST.SKIPPED, False],
#
# Tests for one_success
#
['one_success', 5, 0, 0, 0, 5, True, None, True],
['one_success', 2, 0, 0, 0, 2, True, None, True],
['one_success', 2, 0, 1, 0, 3, True, None, True],
['one_success', 2, 1, 0, 0, 3, True, None, True],
#
# Tests for all_failed
#
['all_failed', 5, 0, 0, 0, 5, True, ST.SKIPPED, False],
['all_failed', 0, 0, 5, 0, 5, True, None, True],
['all_failed', 2, 0, 0, 0, 2, True, ST.SKIPPED, False],
['all_failed', 2, 0, 1, 0, 3, True, ST.SKIPPED, False],
['all_failed', 2, 1, 0, 0, 3, True, ST.SKIPPED, False],
#
# Tests for one_failed
#
['one_failed', 5, 0, 0, 0, 0, True, None, False],
['one_failed', 2, 0, 0, 0, 0, True, None, False],
['one_failed', 2, 0, 1, 0, 0, True, None, True],
['one_failed', 2, 1, 0, 0, 3, True, None, False],
['one_failed', 2, 3, 0, 0, 5, True, ST.SKIPPED, False],
#
# Tests for done
#
['all_done', 5, 0, 0, 0, 5, True, None, True],
['all_done', 2, 0, 0, 0, 2, True, None, False],
['all_done', 2, 0, 1, 0, 3, True, None, False],
['all_done', 2, 1, 0, 0, 3, True, None, False]
])
def test_check_task_dependencies(self, trigger_rule, successes, skipped,
failed, upstream_failed, done,
flag_upstream_failed,
expect_state, expect_completed):
start_date = datetime.datetime(2016, 2, 1, 0, 0, 0)
dag = models.DAG('test-dag', start_date=start_date)
downstream = DummyOperator(task_id='downstream',
dag=dag, owner='airflow',
trigger_rule=trigger_rule)
for i in range(5):
task = DummyOperator(task_id='runme_{}'.format(i),
dag=dag, owner='airflow')
task.set_downstream(downstream)
run_date = task.start_date + datetime.timedelta(days=5)
ti = TI(downstream, run_date)
dep_results = TriggerRuleDep()._evaluate_trigger_rule(
ti=ti,
successes=successes,
skipped=skipped,
failed=failed,
upstream_failed=upstream_failed,
done=done,
flag_upstream_failed=flag_upstream_failed)
completed = all([dep.passed for dep in dep_results])
self.assertEqual(completed, expect_completed)
self.assertEqual(ti.state, expect_state)
def test_xcom_pull_after_success(self):
"""
tests xcom set/clear relative to a task in a 'success' rerun scenario
"""
key = 'xcom_key'
value = 'xcom_value'
dag = models.DAG(dag_id='test_xcom', schedule_interval='@monthly')
task = DummyOperator(
task_id='test_xcom',
dag=dag,
pool='test_xcom',
owner='airflow',
start_date=datetime.datetime(2016, 6, 2, 0, 0, 0))
exec_date = datetime.datetime.now()
ti = TI(
task=task, execution_date=exec_date)
ti.run(mark_success=True)
ti.xcom_push(key=key, value=value)
self.assertEqual(ti.xcom_pull(task_ids='test_xcom', key=key), value)
ti.run()
# The second run and assert is to handle AIRFLOW-131 (don't clear on
# prior success)
self.assertEqual(ti.xcom_pull(task_ids='test_xcom', key=key), value)
# Test AIRFLOW-703: Xcom shouldn't be cleared if the task doesn't
# execute, even if dependencies are ignored
ti.run(ignore_all_deps=True, mark_success=True)
self.assertEqual(ti.xcom_pull(task_ids='test_xcom', key=key), value)
# Xcom IS finally cleared once task has executed
ti.run(ignore_all_deps=True)
self.assertEqual(ti.xcom_pull(task_ids='test_xcom', key=key), None)
def test_xcom_pull_different_execution_date(self):
"""
tests xcom fetch behavior with different execution dates, using
both xcom_pull with "include_prior_dates" and without
"""
key = 'xcom_key'
value = 'xcom_value'
dag = models.DAG(dag_id='test_xcom', schedule_interval='@monthly')
task = DummyOperator(
task_id='test_xcom',
dag=dag,
pool='test_xcom',
owner='airflow',
start_date=datetime.datetime(2016, 6, 2, 0, 0, 0))
exec_date = datetime.datetime.now()
ti = TI(
task=task, execution_date=exec_date)
ti.run(mark_success=True)
ti.xcom_push(key=key, value=value)
self.assertEqual(ti.xcom_pull(task_ids='test_xcom', key=key), value)
ti.run()
exec_date += datetime.timedelta(days=1)
ti = TI(
task=task, execution_date=exec_date)
ti.run()
# We have set a new execution date (and did not pass in
# 'include_prior_dates'which means this task should now have a cleared
# xcom value
self.assertEqual(ti.xcom_pull(task_ids='test_xcom', key=key), None)
# We *should* get a value using 'include_prior_dates'
self.assertEqual(ti.xcom_pull(task_ids='test_xcom',
key=key,
include_prior_dates=True),
value)
def test_post_execute_hook(self):
"""
Test that post_execute hook is called with the Operator's result.
The result ('error') will cause an error to be raised and trapped.
"""
class TestError(Exception):
pass
class TestOperator(PythonOperator):
def post_execute(self, context, result):
if result == 'error':
raise TestError('expected error.')
dag = models.DAG(dag_id='test_post_execute_dag')
task = TestOperator(
task_id='test_operator',
dag=dag,
python_callable=lambda: 'error',
owner='airflow',
start_date=datetime.datetime(2017, 2, 1))
ti = TI(task=task, execution_date=datetime.datetime.now())
with self.assertRaises(TestError):
ti.run()
|
{
"content_hash": "73d4c7ba150981c0e6fa33605aee39b5",
"timestamp": "",
"source": "github",
"line_count": 1095,
"max_line_length": 94,
"avg_line_length": 35.80639269406392,
"alnum_prop": 0.5754692919812283,
"repo_name": "aminghadersohi/airflow",
"id": "4c2a15f4fa0f77aa68465f3b37682516cb243562",
"size": "39775",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/models.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "57001"
},
{
"name": "HTML",
"bytes": "145755"
},
{
"name": "JavaScript",
"bytes": "1364212"
},
{
"name": "Mako",
"bytes": "1037"
},
{
"name": "Python",
"bytes": "2009286"
},
{
"name": "Shell",
"bytes": "20906"
}
],
"symlink_target": ""
}
|
"""HintApp URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from django.contrib import admin
from predictor import views
urlpatterns = [
url(r'^admin/', admin.site.urls),\
url(r'^$',views.handle_query,name='index'),
url(r'^predict/$',views.result,name='pred')
]
|
{
"content_hash": "41c4925ba467bcec90dc31573f3f975c",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 79,
"avg_line_length": 38.608695652173914,
"alnum_prop": 0.6959459459459459,
"repo_name": "abhi98khandelwal/HINT",
"id": "310d499ecb943505385d830b2b71143056d286c5",
"size": "888",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "HintApp/HintApp/urls.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "1411"
},
{
"name": "HTML",
"bytes": "5229"
},
{
"name": "Python",
"bytes": "14897"
}
],
"symlink_target": ""
}
|
import binascii
from malwareconfig import crypto
from malwareconfig.common import Decoder
from malwareconfig.common import string_printable
class BlueBanana(Decoder):
decoder_name = "BlueBanana"
decoder__version = 1
decoder_author = "@kevthehermit"
decoder_description = "Decoder for Blue Banana"
def __init__(self):
self.config = {}
def get_config(self):
'''
This is the main entry
:return:
'''
key1 = '15af8sd4s1c5s511'
key2 = '4e3f5a4c592b243f'
crypted_config = self.file_info.file_from_zip('config.txt')
first_round = crypto.decrypt_aes(key1, binascii.unhexlify(crypted_config))
clear_config = crypto.decrypt_aes(key2, binascii.unhexlify(first_round[:-16]))
fields = clear_config.decode('utf-8').split("<separator>")
config_dict = {'Domain': fields[0],
'Password': fields[1],
'Port1': fields[2],
'Port2': fields[3]
}
if len(fields) > 4:
config_dict['InstallName'] = fields[4]
config_dict['JarName'] = fields[5]
# Set the config to the class for use
self.config = config_dict
|
{
"content_hash": "c0126bd488dd56508543ef08ffda2d79",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 86,
"avg_line_length": 29.738095238095237,
"alnum_prop": 0.5804643714971978,
"repo_name": "kevthehermit/RATDecoders",
"id": "74843d1f22015db82433b11e987faa9fd157a5c1",
"size": "1249",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "malwareconfig/decoders/bluebanana.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "136439"
},
{
"name": "YARA",
"bytes": "38984"
}
],
"symlink_target": ""
}
|
'''
@author: FangSun
'''
import zstackwoodpecker.test_lib as test_lib
import zstackwoodpecker.test_state as test_state
import functools
_config_ = {
'timeout' : 1000,
'noparallel' : True
}
test_stub = test_lib.lib_get_test_stub()
test_obj_dict = test_state.TestStateDict()
'''
def test()
This document sting is a dirty solution to find test case
'''
test = functools.partial(test_stub.vm_offering_testcase,
tbj=test_obj_dict,
test_image_name="imageName_i_u15",
add_cpu=True,
add_memory=False,
need_online=True)
test = test_lib.deprecated_case(test)
def error_cleanup():
test_lib.lib_error_cleanup(test_obj_dict)
|
{
"content_hash": "a40940b3881946f1686fc35a3c144608",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 59,
"avg_line_length": 24.870967741935484,
"alnum_prop": 0.5914396887159533,
"repo_name": "zstackio/zstack-woodpecker",
"id": "adce1ef07d7010eb2ddf30097cd8e4bf2614aac3",
"size": "771",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "integrationtest/vm/vm_offering/legacy/test_hotplugin_cpu_u15.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2356"
},
{
"name": "Go",
"bytes": "49822"
},
{
"name": "Makefile",
"bytes": "687"
},
{
"name": "Puppet",
"bytes": "875"
},
{
"name": "Python",
"bytes": "13070596"
},
{
"name": "Shell",
"bytes": "177861"
}
],
"symlink_target": ""
}
|
from infcommon.factory import Factory
from infcommon.docker_compose.docker_compose import DockerComposeService
def docker_compose_service(base_dir=None, docker_compose_file_name=None):
return Factory.instance('docker_compose_service', lambda: DockerComposeService(base_dir, docker_compose_file_name))
|
{
"content_hash": "d0e4543893fef46cb512c9a347724b56",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 123,
"avg_line_length": 51.833333333333336,
"alnum_prop": 0.8102893890675241,
"repo_name": "aleasoluciones/infcommon",
"id": "16ce40272b45ec1dc6e62dbf84336360c1038014",
"size": "311",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "infcommon/docker_compose/factory.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "63321"
},
{
"name": "Shell",
"bytes": "1878"
}
],
"symlink_target": ""
}
|
import os
import argparse
import threading
from inputs import get_gamepad
import cereal.messaging as messaging
from common.realtime import Ratekeeper
from common.numpy_fast import interp, clip
from common.params import Params
from tools.lib.kbhit import KBHit
class Keyboard:
def __init__(self):
self.kb = KBHit()
self.axis_increment = 0.05 # 5% of full actuation each key press
self.axes_map = {'w': 'gb', 's': 'gb',
'a': 'steer', 'd': 'steer'}
self.axes_values = {'gb': 0., 'steer': 0.}
self.axes_order = ['gb', 'steer']
self.cancel = False
def update(self):
key = self.kb.getch().lower()
self.cancel = False
if key == 'r':
self.axes_values = {ax: 0. for ax in self.axes_values}
elif key == 'c':
self.cancel = True
elif key in self.axes_map:
axis = self.axes_map[key]
incr = self.axis_increment if key in ['w', 'a'] else -self.axis_increment
self.axes_values[axis] = clip(self.axes_values[axis] + incr, -1, 1)
else:
return False
return True
class Joystick:
def __init__(self, gamepad=False):
# TODO: find a way to get this from API, perhaps "inputs" doesn't support it
if gamepad:
self.cancel_button = 'BTN_NORTH' # (BTN_NORTH=X, ABS_RZ=Right Trigger)
accel_axis = 'ABS_Y'
steer_axis = 'ABS_RX'
else:
self.cancel_button = 'BTN_TRIGGER'
accel_axis = 'ABS_Y'
steer_axis = 'ABS_RZ'
self.min_axis_value = {accel_axis: 0., steer_axis: 0.}
self.max_axis_value = {accel_axis: 255., steer_axis: 255.}
self.axes_values = {accel_axis: 0., steer_axis: 0.}
self.axes_order = [accel_axis, steer_axis]
self.cancel = False
def update(self):
joystick_event = get_gamepad()[0]
event = (joystick_event.code, joystick_event.state)
if event[0] == self.cancel_button:
if event[1] == 1:
self.cancel = True
elif event[1] == 0: # state 0 is falling edge
self.cancel = False
elif event[0] in self.axes_values:
self.max_axis_value[event[0]] = max(event[1], self.max_axis_value[event[0]])
self.min_axis_value[event[0]] = min(event[1], self.min_axis_value[event[0]])
norm = -interp(event[1], [self.min_axis_value[event[0]], self.max_axis_value[event[0]]], [-1., 1.])
self.axes_values[event[0]] = norm if abs(norm) > 0.05 else 0. # center can be noisy, deadzone of 5%
else:
return False
return True
def send_thread(joystick):
joystick_sock = messaging.pub_sock('testJoystick')
rk = Ratekeeper(100, print_delay_threshold=None)
while 1:
dat = messaging.new_message('testJoystick')
dat.testJoystick.axes = [joystick.axes_values[a] for a in joystick.axes_order]
dat.testJoystick.buttons = [joystick.cancel]
joystick_sock.send(dat.to_bytes())
print('\n' + ', '.join(f'{name}: {round(v, 3)}' for name, v in joystick.axes_values.items()))
if "WEB" in os.environ:
import requests
requests.get("http://"+os.environ["WEB"]+":5000/control/%f/%f" % tuple([joystick.axes_values[a] for a in joystick.axes_order][::-1]), timeout=None)
rk.keep_time()
def joystick_thread(joystick):
Params().put_bool('JoystickDebugMode', True)
threading.Thread(target=send_thread, args=(joystick,), daemon=True).start()
while True:
joystick.update()
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Publishes events from your joystick to control your car.\n' +
'openpilot must be offroad before starting joysticked.',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('--keyboard', action='store_true', help='Use your keyboard instead of a joystick')
parser.add_argument('--gamepad', action='store_true', help='Use gamepad configuration instead of joystick')
args = parser.parse_args()
if not Params().get_bool("IsOffroad") and "ZMQ" not in os.environ and "WEB" not in os.environ:
print("The car must be off before running joystickd.")
exit()
print()
if args.keyboard:
print('Gas/brake control: `W` and `S` keys')
print('Steering control: `A` and `D` keys')
print('Buttons')
print('- `R`: Resets axes')
print('- `C`: Cancel cruise control')
else:
print('Using joystick, make sure to run cereal/messaging/bridge on your device if running over the network!')
joystick = Keyboard() if args.keyboard else Joystick(args.gamepad)
joystick_thread(joystick)
|
{
"content_hash": "9ee47bd493de4eb02da79ac52636c07b",
"timestamp": "",
"source": "github",
"line_count": 118,
"max_line_length": 153,
"avg_line_length": 38.279661016949156,
"alnum_prop": 0.6393624086783263,
"repo_name": "commaai/openpilot",
"id": "b31dab83fe1f4442ddbafcf3421840658343e587",
"size": "4539",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tools/joystick/joystickd.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "604924"
},
{
"name": "C++",
"bytes": "1125553"
},
{
"name": "Cython",
"bytes": "50503"
},
{
"name": "Dockerfile",
"bytes": "1239"
},
{
"name": "Emacs Lisp",
"bytes": "124"
},
{
"name": "HTML",
"bytes": "11493"
},
{
"name": "Kaitai Struct",
"bytes": "8093"
},
{
"name": "MATLAB",
"bytes": "35190"
},
{
"name": "Makefile",
"bytes": "14018"
},
{
"name": "Python",
"bytes": "2386885"
},
{
"name": "QML",
"bytes": "1132"
},
{
"name": "Shell",
"bytes": "32876"
}
],
"symlink_target": ""
}
|
"""
Gmail
Access Gmail mailboxes including sending user email.
OpenAPI spec version: v1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class UsersApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def gmail_users_drafts_create(self, user_id, **kwargs):
"""
Creates a new draft with the DRAFT label.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_drafts_create(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param Draft body:
:return: Draft
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_drafts_create_with_http_info(user_id, **kwargs)
else:
(data) = self.gmail_users_drafts_create_with_http_info(user_id, **kwargs)
return data
def gmail_users_drafts_create_with_http_info(self, user_id, **kwargs):
"""
Creates a new draft with the DRAFT label.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_drafts_create_with_http_info(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param Draft body:
:return: Draft
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_drafts_create" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_drafts_create`")
collection_formats = {}
resource_path = '/{userId}/drafts'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['message/rfc822'])
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Draft',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_drafts_delete(self, user_id, id, **kwargs):
"""
Immediately and permanently deletes the specified draft. Does not simply trash it.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_drafts_delete(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str id: The ID of the draft to delete. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_drafts_delete_with_http_info(user_id, id, **kwargs)
else:
(data) = self.gmail_users_drafts_delete_with_http_info(user_id, id, **kwargs)
return data
def gmail_users_drafts_delete_with_http_info(self, user_id, id, **kwargs):
"""
Immediately and permanently deletes the specified draft. Does not simply trash it.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_drafts_delete_with_http_info(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str id: The ID of the draft to delete. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_drafts_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_drafts_delete`")
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `gmail_users_drafts_delete`")
collection_formats = {}
resource_path = '/{userId}/drafts/{id}'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_drafts_get(self, user_id, id, **kwargs):
"""
Gets the specified draft.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_drafts_get(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str id: The ID of the draft to retrieve. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param str format: The format to return the draft in.
:return: Draft
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_drafts_get_with_http_info(user_id, id, **kwargs)
else:
(data) = self.gmail_users_drafts_get_with_http_info(user_id, id, **kwargs)
return data
def gmail_users_drafts_get_with_http_info(self, user_id, id, **kwargs):
"""
Gets the specified draft.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_drafts_get_with_http_info(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str id: The ID of the draft to retrieve. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param str format: The format to return the draft in.
:return: Draft
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip', 'format']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_drafts_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_drafts_get`")
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `gmail_users_drafts_get`")
collection_formats = {}
resource_path = '/{userId}/drafts/{id}'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
if 'format' in params:
query_params['format'] = params['format']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Draft',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_drafts_list(self, user_id, **kwargs):
"""
Lists the drafts in the user's mailbox.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_drafts_list(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param bool include_spam_trash: Include drafts from SPAM and TRASH in the results.
:param int max_results: Maximum number of drafts to return.
:param str page_token: Page token to retrieve a specific page of results in the list.
:param str q: Only return draft messages matching the specified query. Supports the same query format as the Gmail search box. For example, \"from:someuser@example.com rfc822msgid: is:unread\".
:return: ListDraftsResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_drafts_list_with_http_info(user_id, **kwargs)
else:
(data) = self.gmail_users_drafts_list_with_http_info(user_id, **kwargs)
return data
def gmail_users_drafts_list_with_http_info(self, user_id, **kwargs):
"""
Lists the drafts in the user's mailbox.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_drafts_list_with_http_info(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param bool include_spam_trash: Include drafts from SPAM and TRASH in the results.
:param int max_results: Maximum number of drafts to return.
:param str page_token: Page token to retrieve a specific page of results in the list.
:param str q: Only return draft messages matching the specified query. Supports the same query format as the Gmail search box. For example, \"from:someuser@example.com rfc822msgid: is:unread\".
:return: ListDraftsResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip', 'include_spam_trash', 'max_results', 'page_token', 'q']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_drafts_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_drafts_list`")
collection_formats = {}
resource_path = '/{userId}/drafts'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
if 'include_spam_trash' in params:
query_params['includeSpamTrash'] = params['include_spam_trash']
if 'max_results' in params:
query_params['maxResults'] = params['max_results']
if 'page_token' in params:
query_params['pageToken'] = params['page_token']
if 'q' in params:
query_params['q'] = params['q']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ListDraftsResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_drafts_send(self, user_id, **kwargs):
"""
Sends the specified, existing draft to the recipients in the To, Cc, and Bcc headers.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_drafts_send(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param Draft body:
:return: Message
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_drafts_send_with_http_info(user_id, **kwargs)
else:
(data) = self.gmail_users_drafts_send_with_http_info(user_id, **kwargs)
return data
def gmail_users_drafts_send_with_http_info(self, user_id, **kwargs):
"""
Sends the specified, existing draft to the recipients in the To, Cc, and Bcc headers.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_drafts_send_with_http_info(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param Draft body:
:return: Message
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_drafts_send" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_drafts_send`")
collection_formats = {}
resource_path = '/{userId}/drafts/send'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['message/rfc822'])
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Message',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_drafts_update(self, user_id, id, **kwargs):
"""
Replaces a draft's content.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_drafts_update(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str id: The ID of the draft to update. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param Draft body:
:return: Draft
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_drafts_update_with_http_info(user_id, id, **kwargs)
else:
(data) = self.gmail_users_drafts_update_with_http_info(user_id, id, **kwargs)
return data
def gmail_users_drafts_update_with_http_info(self, user_id, id, **kwargs):
"""
Replaces a draft's content.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_drafts_update_with_http_info(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str id: The ID of the draft to update. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param Draft body:
:return: Draft
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_drafts_update" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_drafts_update`")
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `gmail_users_drafts_update`")
collection_formats = {}
resource_path = '/{userId}/drafts/{id}'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['message/rfc822'])
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Draft',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_get_profile(self, user_id, **kwargs):
"""
Gets the current user's Gmail profile.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_get_profile(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: Profile
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_get_profile_with_http_info(user_id, **kwargs)
else:
(data) = self.gmail_users_get_profile_with_http_info(user_id, **kwargs)
return data
def gmail_users_get_profile_with_http_info(self, user_id, **kwargs):
"""
Gets the current user's Gmail profile.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_get_profile_with_http_info(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: Profile
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_get_profile" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_get_profile`")
collection_formats = {}
resource_path = '/{userId}/profile'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Profile',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_history_list(self, user_id, **kwargs):
"""
Lists the history of all changes to the given mailbox. History results are returned in chronological order (increasing historyId).
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_history_list(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param list[str] history_types: History types to be returned by the function
:param str label_id: Only return messages with a label matching the ID.
:param int max_results: The maximum number of history records to return.
:param str page_token: Page token to retrieve a specific page of results in the list.
:param str start_history_id: Required. Returns history records after the specified startHistoryId. The supplied startHistoryId should be obtained from the historyId of a message, thread, or previous list response. History IDs increase chronologically but are not contiguous with random gaps in between valid IDs. Supplying an invalid or out of date startHistoryId typically returns an HTTP 404 error code. A historyId is typically valid for at least a week, but in some rare circumstances may be valid for only a few hours. If you receive an HTTP 404 error response, your application should perform a full sync. If you receive no nextPageToken in the response, there are no updates to retrieve and you can store the returned historyId for a future request.
:return: ListHistoryResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_history_list_with_http_info(user_id, **kwargs)
else:
(data) = self.gmail_users_history_list_with_http_info(user_id, **kwargs)
return data
def gmail_users_history_list_with_http_info(self, user_id, **kwargs):
"""
Lists the history of all changes to the given mailbox. History results are returned in chronological order (increasing historyId).
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_history_list_with_http_info(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param list[str] history_types: History types to be returned by the function
:param str label_id: Only return messages with a label matching the ID.
:param int max_results: The maximum number of history records to return.
:param str page_token: Page token to retrieve a specific page of results in the list.
:param str start_history_id: Required. Returns history records after the specified startHistoryId. The supplied startHistoryId should be obtained from the historyId of a message, thread, or previous list response. History IDs increase chronologically but are not contiguous with random gaps in between valid IDs. Supplying an invalid or out of date startHistoryId typically returns an HTTP 404 error code. A historyId is typically valid for at least a week, but in some rare circumstances may be valid for only a few hours. If you receive an HTTP 404 error response, your application should perform a full sync. If you receive no nextPageToken in the response, there are no updates to retrieve and you can store the returned historyId for a future request.
:return: ListHistoryResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip', 'history_types', 'label_id', 'max_results', 'page_token', 'start_history_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_history_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_history_list`")
collection_formats = {}
resource_path = '/{userId}/history'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
if 'history_types' in params:
query_params['historyTypes'] = params['history_types']
collection_formats['historyTypes'] = 'multi'
if 'label_id' in params:
query_params['labelId'] = params['label_id']
if 'max_results' in params:
query_params['maxResults'] = params['max_results']
if 'page_token' in params:
query_params['pageToken'] = params['page_token']
if 'start_history_id' in params:
query_params['startHistoryId'] = params['start_history_id']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ListHistoryResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_labels_create(self, user_id, **kwargs):
"""
Creates a new label.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_labels_create(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param Label body:
:return: Label
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_labels_create_with_http_info(user_id, **kwargs)
else:
(data) = self.gmail_users_labels_create_with_http_info(user_id, **kwargs)
return data
def gmail_users_labels_create_with_http_info(self, user_id, **kwargs):
"""
Creates a new label.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_labels_create_with_http_info(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param Label body:
:return: Label
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_labels_create" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_labels_create`")
collection_formats = {}
resource_path = '/{userId}/labels'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Label',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_labels_delete(self, user_id, id, **kwargs):
"""
Immediately and permanently deletes the specified label and removes it from any messages and threads that it is applied to.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_labels_delete(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str id: The ID of the label to delete. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_labels_delete_with_http_info(user_id, id, **kwargs)
else:
(data) = self.gmail_users_labels_delete_with_http_info(user_id, id, **kwargs)
return data
def gmail_users_labels_delete_with_http_info(self, user_id, id, **kwargs):
"""
Immediately and permanently deletes the specified label and removes it from any messages and threads that it is applied to.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_labels_delete_with_http_info(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str id: The ID of the label to delete. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_labels_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_labels_delete`")
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `gmail_users_labels_delete`")
collection_formats = {}
resource_path = '/{userId}/labels/{id}'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_labels_get(self, user_id, id, **kwargs):
"""
Gets the specified label.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_labels_get(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str id: The ID of the label to retrieve. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: Label
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_labels_get_with_http_info(user_id, id, **kwargs)
else:
(data) = self.gmail_users_labels_get_with_http_info(user_id, id, **kwargs)
return data
def gmail_users_labels_get_with_http_info(self, user_id, id, **kwargs):
"""
Gets the specified label.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_labels_get_with_http_info(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str id: The ID of the label to retrieve. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: Label
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_labels_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_labels_get`")
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `gmail_users_labels_get`")
collection_formats = {}
resource_path = '/{userId}/labels/{id}'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Label',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_labels_list(self, user_id, **kwargs):
"""
Lists all labels in the user's mailbox.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_labels_list(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: ListLabelsResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_labels_list_with_http_info(user_id, **kwargs)
else:
(data) = self.gmail_users_labels_list_with_http_info(user_id, **kwargs)
return data
def gmail_users_labels_list_with_http_info(self, user_id, **kwargs):
"""
Lists all labels in the user's mailbox.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_labels_list_with_http_info(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: ListLabelsResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_labels_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_labels_list`")
collection_formats = {}
resource_path = '/{userId}/labels'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ListLabelsResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_labels_patch(self, user_id, id, **kwargs):
"""
Updates the specified label. This method supports patch semantics.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_labels_patch(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str id: The ID of the label to update. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param Label body:
:return: Label
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_labels_patch_with_http_info(user_id, id, **kwargs)
else:
(data) = self.gmail_users_labels_patch_with_http_info(user_id, id, **kwargs)
return data
def gmail_users_labels_patch_with_http_info(self, user_id, id, **kwargs):
"""
Updates the specified label. This method supports patch semantics.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_labels_patch_with_http_info(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str id: The ID of the label to update. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param Label body:
:return: Label
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_labels_patch" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_labels_patch`")
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `gmail_users_labels_patch`")
collection_formats = {}
resource_path = '/{userId}/labels/{id}'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Label',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_labels_update(self, user_id, id, **kwargs):
"""
Updates the specified label.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_labels_update(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str id: The ID of the label to update. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param Label body:
:return: Label
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_labels_update_with_http_info(user_id, id, **kwargs)
else:
(data) = self.gmail_users_labels_update_with_http_info(user_id, id, **kwargs)
return data
def gmail_users_labels_update_with_http_info(self, user_id, id, **kwargs):
"""
Updates the specified label.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_labels_update_with_http_info(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str id: The ID of the label to update. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param Label body:
:return: Label
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_labels_update" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_labels_update`")
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `gmail_users_labels_update`")
collection_formats = {}
resource_path = '/{userId}/labels/{id}'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Label',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_messages_attachments_get(self, user_id, message_id, id, **kwargs):
"""
Gets the specified message attachment.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_messages_attachments_get(user_id, message_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str message_id: The ID of the message containing the attachment. (required)
:param str id: The ID of the attachment. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: MessagePartBody
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_messages_attachments_get_with_http_info(user_id, message_id, id, **kwargs)
else:
(data) = self.gmail_users_messages_attachments_get_with_http_info(user_id, message_id, id, **kwargs)
return data
def gmail_users_messages_attachments_get_with_http_info(self, user_id, message_id, id, **kwargs):
"""
Gets the specified message attachment.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_messages_attachments_get_with_http_info(user_id, message_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str message_id: The ID of the message containing the attachment. (required)
:param str id: The ID of the attachment. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: MessagePartBody
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'message_id', 'id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_messages_attachments_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_messages_attachments_get`")
# verify the required parameter 'message_id' is set
if ('message_id' not in params) or (params['message_id'] is None):
raise ValueError("Missing the required parameter `message_id` when calling `gmail_users_messages_attachments_get`")
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `gmail_users_messages_attachments_get`")
collection_formats = {}
resource_path = '/{userId}/messages/{messageId}/attachments/{id}'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
if 'message_id' in params:
path_params['messageId'] = params['message_id']
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='MessagePartBody',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_messages_batch_delete(self, user_id, **kwargs):
"""
Deletes many messages by message ID. Provides no guarantees that messages were not already deleted or even existed at all.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_messages_batch_delete(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param BatchDeleteMessagesRequest body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_messages_batch_delete_with_http_info(user_id, **kwargs)
else:
(data) = self.gmail_users_messages_batch_delete_with_http_info(user_id, **kwargs)
return data
def gmail_users_messages_batch_delete_with_http_info(self, user_id, **kwargs):
"""
Deletes many messages by message ID. Provides no guarantees that messages were not already deleted or even existed at all.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_messages_batch_delete_with_http_info(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param BatchDeleteMessagesRequest body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_messages_batch_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_messages_batch_delete`")
collection_formats = {}
resource_path = '/{userId}/messages/batchDelete'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_messages_batch_modify(self, user_id, **kwargs):
"""
Modifies the labels on the specified messages.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_messages_batch_modify(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param BatchModifyMessagesRequest body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_messages_batch_modify_with_http_info(user_id, **kwargs)
else:
(data) = self.gmail_users_messages_batch_modify_with_http_info(user_id, **kwargs)
return data
def gmail_users_messages_batch_modify_with_http_info(self, user_id, **kwargs):
"""
Modifies the labels on the specified messages.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_messages_batch_modify_with_http_info(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param BatchModifyMessagesRequest body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_messages_batch_modify" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_messages_batch_modify`")
collection_formats = {}
resource_path = '/{userId}/messages/batchModify'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_messages_delete(self, user_id, id, **kwargs):
"""
Immediately and permanently deletes the specified message. This operation cannot be undone. Prefer messages.trash instead.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_messages_delete(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str id: The ID of the message to delete. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_messages_delete_with_http_info(user_id, id, **kwargs)
else:
(data) = self.gmail_users_messages_delete_with_http_info(user_id, id, **kwargs)
return data
def gmail_users_messages_delete_with_http_info(self, user_id, id, **kwargs):
"""
Immediately and permanently deletes the specified message. This operation cannot be undone. Prefer messages.trash instead.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_messages_delete_with_http_info(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str id: The ID of the message to delete. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_messages_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_messages_delete`")
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `gmail_users_messages_delete`")
collection_formats = {}
resource_path = '/{userId}/messages/{id}'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_messages_get(self, user_id, id, **kwargs):
"""
Gets the specified message.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_messages_get(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str id: The ID of the message to retrieve. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param str format: The format to return the message in.
:param list[str] metadata_headers: When given and format is METADATA, only include headers specified.
:return: Message
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_messages_get_with_http_info(user_id, id, **kwargs)
else:
(data) = self.gmail_users_messages_get_with_http_info(user_id, id, **kwargs)
return data
def gmail_users_messages_get_with_http_info(self, user_id, id, **kwargs):
"""
Gets the specified message.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_messages_get_with_http_info(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str id: The ID of the message to retrieve. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param str format: The format to return the message in.
:param list[str] metadata_headers: When given and format is METADATA, only include headers specified.
:return: Message
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip', 'format', 'metadata_headers']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_messages_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_messages_get`")
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `gmail_users_messages_get`")
collection_formats = {}
resource_path = '/{userId}/messages/{id}'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
if 'format' in params:
query_params['format'] = params['format']
if 'metadata_headers' in params:
query_params['metadataHeaders'] = params['metadata_headers']
collection_formats['metadataHeaders'] = 'multi'
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Message',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_messages_import(self, user_id, **kwargs):
"""
Imports a message into only this user's mailbox, with standard email delivery scanning and classification similar to receiving via SMTP. Does not send a message.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_messages_import(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param bool deleted: Mark the email as permanently deleted (not TRASH) and only visible in Google Vault to a Vault administrator. Only used for G Suite accounts.
:param str internal_date_source: Source for Gmail's internal date of the message.
:param bool never_mark_spam: Ignore the Gmail spam classifier decision and never mark this email as SPAM in the mailbox.
:param bool process_for_calendar: Process calendar invites in the email and add any extracted meetings to the Google Calendar for this user.
:param Message body:
:return: Message
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_messages_import_with_http_info(user_id, **kwargs)
else:
(data) = self.gmail_users_messages_import_with_http_info(user_id, **kwargs)
return data
def gmail_users_messages_import_with_http_info(self, user_id, **kwargs):
"""
Imports a message into only this user's mailbox, with standard email delivery scanning and classification similar to receiving via SMTP. Does not send a message.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_messages_import_with_http_info(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param bool deleted: Mark the email as permanently deleted (not TRASH) and only visible in Google Vault to a Vault administrator. Only used for G Suite accounts.
:param str internal_date_source: Source for Gmail's internal date of the message.
:param bool never_mark_spam: Ignore the Gmail spam classifier decision and never mark this email as SPAM in the mailbox.
:param bool process_for_calendar: Process calendar invites in the email and add any extracted meetings to the Google Calendar for this user.
:param Message body:
:return: Message
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip', 'deleted', 'internal_date_source', 'never_mark_spam', 'process_for_calendar', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_messages_import" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_messages_import`")
collection_formats = {}
resource_path = '/{userId}/messages/import'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
if 'deleted' in params:
query_params['deleted'] = params['deleted']
if 'internal_date_source' in params:
query_params['internalDateSource'] = params['internal_date_source']
if 'never_mark_spam' in params:
query_params['neverMarkSpam'] = params['never_mark_spam']
if 'process_for_calendar' in params:
query_params['processForCalendar'] = params['process_for_calendar']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['message/rfc822'])
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Message',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_messages_insert(self, user_id, **kwargs):
"""
Directly inserts a message into only this user's mailbox similar to IMAP APPEND, bypassing most scanning and classification. Does not send a message.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_messages_insert(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param bool deleted: Mark the email as permanently deleted (not TRASH) and only visible in Google Vault to a Vault administrator. Only used for G Suite accounts.
:param str internal_date_source: Source for Gmail's internal date of the message.
:param Message body:
:return: Message
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_messages_insert_with_http_info(user_id, **kwargs)
else:
(data) = self.gmail_users_messages_insert_with_http_info(user_id, **kwargs)
return data
def gmail_users_messages_insert_with_http_info(self, user_id, **kwargs):
"""
Directly inserts a message into only this user's mailbox similar to IMAP APPEND, bypassing most scanning and classification. Does not send a message.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_messages_insert_with_http_info(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param bool deleted: Mark the email as permanently deleted (not TRASH) and only visible in Google Vault to a Vault administrator. Only used for G Suite accounts.
:param str internal_date_source: Source for Gmail's internal date of the message.
:param Message body:
:return: Message
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip', 'deleted', 'internal_date_source', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_messages_insert" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_messages_insert`")
collection_formats = {}
resource_path = '/{userId}/messages'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
if 'deleted' in params:
query_params['deleted'] = params['deleted']
if 'internal_date_source' in params:
query_params['internalDateSource'] = params['internal_date_source']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['message/rfc822'])
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Message',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_messages_list(self, user_id, **kwargs):
"""
Lists the messages in the user's mailbox.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_messages_list(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param bool include_spam_trash: Include messages from SPAM and TRASH in the results.
:param list[str] label_ids: Only return messages with labels that match all of the specified label IDs.
:param int max_results: Maximum number of messages to return.
:param str page_token: Page token to retrieve a specific page of results in the list.
:param str q: Only return messages matching the specified query. Supports the same query format as the Gmail search box. For example, \"from:someuser@example.com rfc822msgid: is:unread\". Parameter cannot be used when accessing the api using the gmail.metadata scope.
:return: ListMessagesResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_messages_list_with_http_info(user_id, **kwargs)
else:
(data) = self.gmail_users_messages_list_with_http_info(user_id, **kwargs)
return data
def gmail_users_messages_list_with_http_info(self, user_id, **kwargs):
"""
Lists the messages in the user's mailbox.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_messages_list_with_http_info(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param bool include_spam_trash: Include messages from SPAM and TRASH in the results.
:param list[str] label_ids: Only return messages with labels that match all of the specified label IDs.
:param int max_results: Maximum number of messages to return.
:param str page_token: Page token to retrieve a specific page of results in the list.
:param str q: Only return messages matching the specified query. Supports the same query format as the Gmail search box. For example, \"from:someuser@example.com rfc822msgid: is:unread\". Parameter cannot be used when accessing the api using the gmail.metadata scope.
:return: ListMessagesResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip', 'include_spam_trash', 'label_ids', 'max_results', 'page_token', 'q']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_messages_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_messages_list`")
collection_formats = {}
resource_path = '/{userId}/messages'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
if 'include_spam_trash' in params:
query_params['includeSpamTrash'] = params['include_spam_trash']
if 'label_ids' in params:
query_params['labelIds'] = params['label_ids']
collection_formats['labelIds'] = 'multi'
if 'max_results' in params:
query_params['maxResults'] = params['max_results']
if 'page_token' in params:
query_params['pageToken'] = params['page_token']
if 'q' in params:
query_params['q'] = params['q']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ListMessagesResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_messages_modify(self, user_id, id, **kwargs):
"""
Modifies the labels on the specified message.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_messages_modify(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str id: The ID of the message to modify. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param ModifyMessageRequest body:
:return: Message
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_messages_modify_with_http_info(user_id, id, **kwargs)
else:
(data) = self.gmail_users_messages_modify_with_http_info(user_id, id, **kwargs)
return data
def gmail_users_messages_modify_with_http_info(self, user_id, id, **kwargs):
"""
Modifies the labels on the specified message.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_messages_modify_with_http_info(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str id: The ID of the message to modify. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param ModifyMessageRequest body:
:return: Message
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_messages_modify" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_messages_modify`")
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `gmail_users_messages_modify`")
collection_formats = {}
resource_path = '/{userId}/messages/{id}/modify'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Message',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_messages_send(self, user_id, **kwargs):
"""
Sends the specified message to the recipients in the To, Cc, and Bcc headers.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_messages_send(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param Message body:
:return: Message
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_messages_send_with_http_info(user_id, **kwargs)
else:
(data) = self.gmail_users_messages_send_with_http_info(user_id, **kwargs)
return data
def gmail_users_messages_send_with_http_info(self, user_id, **kwargs):
"""
Sends the specified message to the recipients in the To, Cc, and Bcc headers.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_messages_send_with_http_info(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param Message body:
:return: Message
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_messages_send" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_messages_send`")
collection_formats = {}
resource_path = '/{userId}/messages/send'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['message/rfc822'])
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Message',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_messages_trash(self, user_id, id, **kwargs):
"""
Moves the specified message to the trash.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_messages_trash(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str id: The ID of the message to Trash. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: Message
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_messages_trash_with_http_info(user_id, id, **kwargs)
else:
(data) = self.gmail_users_messages_trash_with_http_info(user_id, id, **kwargs)
return data
def gmail_users_messages_trash_with_http_info(self, user_id, id, **kwargs):
"""
Moves the specified message to the trash.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_messages_trash_with_http_info(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str id: The ID of the message to Trash. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: Message
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_messages_trash" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_messages_trash`")
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `gmail_users_messages_trash`")
collection_formats = {}
resource_path = '/{userId}/messages/{id}/trash'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Message',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_messages_untrash(self, user_id, id, **kwargs):
"""
Removes the specified message from the trash.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_messages_untrash(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str id: The ID of the message to remove from Trash. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: Message
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_messages_untrash_with_http_info(user_id, id, **kwargs)
else:
(data) = self.gmail_users_messages_untrash_with_http_info(user_id, id, **kwargs)
return data
def gmail_users_messages_untrash_with_http_info(self, user_id, id, **kwargs):
"""
Removes the specified message from the trash.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_messages_untrash_with_http_info(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str id: The ID of the message to remove from Trash. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: Message
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_messages_untrash" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_messages_untrash`")
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `gmail_users_messages_untrash`")
collection_formats = {}
resource_path = '/{userId}/messages/{id}/untrash'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Message',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_settings_filters_create(self, user_id, **kwargs):
"""
Creates a filter.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_filters_create(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param Filter body:
:return: Filter
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_settings_filters_create_with_http_info(user_id, **kwargs)
else:
(data) = self.gmail_users_settings_filters_create_with_http_info(user_id, **kwargs)
return data
def gmail_users_settings_filters_create_with_http_info(self, user_id, **kwargs):
"""
Creates a filter.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_filters_create_with_http_info(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param Filter body:
:return: Filter
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_settings_filters_create" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_settings_filters_create`")
collection_formats = {}
resource_path = '/{userId}/settings/filters'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Filter',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_settings_filters_delete(self, user_id, id, **kwargs):
"""
Deletes a filter.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_filters_delete(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str id: The ID of the filter to be deleted. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_settings_filters_delete_with_http_info(user_id, id, **kwargs)
else:
(data) = self.gmail_users_settings_filters_delete_with_http_info(user_id, id, **kwargs)
return data
def gmail_users_settings_filters_delete_with_http_info(self, user_id, id, **kwargs):
"""
Deletes a filter.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_filters_delete_with_http_info(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str id: The ID of the filter to be deleted. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_settings_filters_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_settings_filters_delete`")
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `gmail_users_settings_filters_delete`")
collection_formats = {}
resource_path = '/{userId}/settings/filters/{id}'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_settings_filters_get(self, user_id, id, **kwargs):
"""
Gets a filter.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_filters_get(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str id: The ID of the filter to be fetched. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: Filter
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_settings_filters_get_with_http_info(user_id, id, **kwargs)
else:
(data) = self.gmail_users_settings_filters_get_with_http_info(user_id, id, **kwargs)
return data
def gmail_users_settings_filters_get_with_http_info(self, user_id, id, **kwargs):
"""
Gets a filter.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_filters_get_with_http_info(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str id: The ID of the filter to be fetched. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: Filter
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_settings_filters_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_settings_filters_get`")
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `gmail_users_settings_filters_get`")
collection_formats = {}
resource_path = '/{userId}/settings/filters/{id}'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Filter',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_settings_filters_list(self, user_id, **kwargs):
"""
Lists the message filters of a Gmail user.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_filters_list(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: ListFiltersResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_settings_filters_list_with_http_info(user_id, **kwargs)
else:
(data) = self.gmail_users_settings_filters_list_with_http_info(user_id, **kwargs)
return data
def gmail_users_settings_filters_list_with_http_info(self, user_id, **kwargs):
"""
Lists the message filters of a Gmail user.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_filters_list_with_http_info(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: ListFiltersResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_settings_filters_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_settings_filters_list`")
collection_formats = {}
resource_path = '/{userId}/settings/filters'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ListFiltersResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_settings_forwarding_addresses_create(self, user_id, **kwargs):
"""
Creates a forwarding address. If ownership verification is required, a message will be sent to the recipient and the resource's verification status will be set to pending; otherwise, the resource will be created with verification status set to accepted. This method is only available to service account clients that have been delegated domain-wide authority.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_forwarding_addresses_create(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param ForwardingAddress body:
:return: ForwardingAddress
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_settings_forwarding_addresses_create_with_http_info(user_id, **kwargs)
else:
(data) = self.gmail_users_settings_forwarding_addresses_create_with_http_info(user_id, **kwargs)
return data
def gmail_users_settings_forwarding_addresses_create_with_http_info(self, user_id, **kwargs):
"""
Creates a forwarding address. If ownership verification is required, a message will be sent to the recipient and the resource's verification status will be set to pending; otherwise, the resource will be created with verification status set to accepted. This method is only available to service account clients that have been delegated domain-wide authority.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_forwarding_addresses_create_with_http_info(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param ForwardingAddress body:
:return: ForwardingAddress
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_settings_forwarding_addresses_create" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_settings_forwarding_addresses_create`")
collection_formats = {}
resource_path = '/{userId}/settings/forwardingAddresses'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ForwardingAddress',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_settings_forwarding_addresses_delete(self, user_id, forwarding_email, **kwargs):
"""
Deletes the specified forwarding address and revokes any verification that may have been required. This method is only available to service account clients that have been delegated domain-wide authority.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_forwarding_addresses_delete(user_id, forwarding_email, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str forwarding_email: The forwarding address to be deleted. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_settings_forwarding_addresses_delete_with_http_info(user_id, forwarding_email, **kwargs)
else:
(data) = self.gmail_users_settings_forwarding_addresses_delete_with_http_info(user_id, forwarding_email, **kwargs)
return data
def gmail_users_settings_forwarding_addresses_delete_with_http_info(self, user_id, forwarding_email, **kwargs):
"""
Deletes the specified forwarding address and revokes any verification that may have been required. This method is only available to service account clients that have been delegated domain-wide authority.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_forwarding_addresses_delete_with_http_info(user_id, forwarding_email, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str forwarding_email: The forwarding address to be deleted. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'forwarding_email', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_settings_forwarding_addresses_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_settings_forwarding_addresses_delete`")
# verify the required parameter 'forwarding_email' is set
if ('forwarding_email' not in params) or (params['forwarding_email'] is None):
raise ValueError("Missing the required parameter `forwarding_email` when calling `gmail_users_settings_forwarding_addresses_delete`")
collection_formats = {}
resource_path = '/{userId}/settings/forwardingAddresses/{forwardingEmail}'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
if 'forwarding_email' in params:
path_params['forwardingEmail'] = params['forwarding_email']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_settings_forwarding_addresses_get(self, user_id, forwarding_email, **kwargs):
"""
Gets the specified forwarding address.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_forwarding_addresses_get(user_id, forwarding_email, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str forwarding_email: The forwarding address to be retrieved. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: ForwardingAddress
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_settings_forwarding_addresses_get_with_http_info(user_id, forwarding_email, **kwargs)
else:
(data) = self.gmail_users_settings_forwarding_addresses_get_with_http_info(user_id, forwarding_email, **kwargs)
return data
def gmail_users_settings_forwarding_addresses_get_with_http_info(self, user_id, forwarding_email, **kwargs):
"""
Gets the specified forwarding address.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_forwarding_addresses_get_with_http_info(user_id, forwarding_email, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str forwarding_email: The forwarding address to be retrieved. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: ForwardingAddress
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'forwarding_email', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_settings_forwarding_addresses_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_settings_forwarding_addresses_get`")
# verify the required parameter 'forwarding_email' is set
if ('forwarding_email' not in params) or (params['forwarding_email'] is None):
raise ValueError("Missing the required parameter `forwarding_email` when calling `gmail_users_settings_forwarding_addresses_get`")
collection_formats = {}
resource_path = '/{userId}/settings/forwardingAddresses/{forwardingEmail}'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
if 'forwarding_email' in params:
path_params['forwardingEmail'] = params['forwarding_email']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ForwardingAddress',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_settings_forwarding_addresses_list(self, user_id, **kwargs):
"""
Lists the forwarding addresses for the specified account.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_forwarding_addresses_list(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: ListForwardingAddressesResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_settings_forwarding_addresses_list_with_http_info(user_id, **kwargs)
else:
(data) = self.gmail_users_settings_forwarding_addresses_list_with_http_info(user_id, **kwargs)
return data
def gmail_users_settings_forwarding_addresses_list_with_http_info(self, user_id, **kwargs):
"""
Lists the forwarding addresses for the specified account.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_forwarding_addresses_list_with_http_info(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: ListForwardingAddressesResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_settings_forwarding_addresses_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_settings_forwarding_addresses_list`")
collection_formats = {}
resource_path = '/{userId}/settings/forwardingAddresses'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ListForwardingAddressesResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_settings_get_auto_forwarding(self, user_id, **kwargs):
"""
Gets the auto-forwarding setting for the specified account.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_get_auto_forwarding(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: AutoForwarding
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_settings_get_auto_forwarding_with_http_info(user_id, **kwargs)
else:
(data) = self.gmail_users_settings_get_auto_forwarding_with_http_info(user_id, **kwargs)
return data
def gmail_users_settings_get_auto_forwarding_with_http_info(self, user_id, **kwargs):
"""
Gets the auto-forwarding setting for the specified account.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_get_auto_forwarding_with_http_info(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: AutoForwarding
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_settings_get_auto_forwarding" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_settings_get_auto_forwarding`")
collection_formats = {}
resource_path = '/{userId}/settings/autoForwarding'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AutoForwarding',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_settings_get_imap(self, user_id, **kwargs):
"""
Gets IMAP settings.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_get_imap(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: ImapSettings
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_settings_get_imap_with_http_info(user_id, **kwargs)
else:
(data) = self.gmail_users_settings_get_imap_with_http_info(user_id, **kwargs)
return data
def gmail_users_settings_get_imap_with_http_info(self, user_id, **kwargs):
"""
Gets IMAP settings.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_get_imap_with_http_info(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: ImapSettings
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_settings_get_imap" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_settings_get_imap`")
collection_formats = {}
resource_path = '/{userId}/settings/imap'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ImapSettings',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_settings_get_pop(self, user_id, **kwargs):
"""
Gets POP settings.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_get_pop(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: PopSettings
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_settings_get_pop_with_http_info(user_id, **kwargs)
else:
(data) = self.gmail_users_settings_get_pop_with_http_info(user_id, **kwargs)
return data
def gmail_users_settings_get_pop_with_http_info(self, user_id, **kwargs):
"""
Gets POP settings.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_get_pop_with_http_info(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: PopSettings
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_settings_get_pop" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_settings_get_pop`")
collection_formats = {}
resource_path = '/{userId}/settings/pop'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PopSettings',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_settings_get_vacation(self, user_id, **kwargs):
"""
Gets vacation responder settings.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_get_vacation(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: VacationSettings
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_settings_get_vacation_with_http_info(user_id, **kwargs)
else:
(data) = self.gmail_users_settings_get_vacation_with_http_info(user_id, **kwargs)
return data
def gmail_users_settings_get_vacation_with_http_info(self, user_id, **kwargs):
"""
Gets vacation responder settings.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_get_vacation_with_http_info(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: VacationSettings
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_settings_get_vacation" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_settings_get_vacation`")
collection_formats = {}
resource_path = '/{userId}/settings/vacation'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VacationSettings',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_settings_send_as_create(self, user_id, **kwargs):
"""
Creates a custom \"from\" send-as alias. If an SMTP MSA is specified, Gmail will attempt to connect to the SMTP service to validate the configuration before creating the alias. If ownership verification is required for the alias, a message will be sent to the email address and the resource's verification status will be set to pending; otherwise, the resource will be created with verification status set to accepted. If a signature is provided, Gmail will sanitize the HTML before saving it with the alias. This method is only available to service account clients that have been delegated domain-wide authority.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_send_as_create(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param SendAs body:
:return: SendAs
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_settings_send_as_create_with_http_info(user_id, **kwargs)
else:
(data) = self.gmail_users_settings_send_as_create_with_http_info(user_id, **kwargs)
return data
def gmail_users_settings_send_as_create_with_http_info(self, user_id, **kwargs):
"""
Creates a custom \"from\" send-as alias. If an SMTP MSA is specified, Gmail will attempt to connect to the SMTP service to validate the configuration before creating the alias. If ownership verification is required for the alias, a message will be sent to the email address and the resource's verification status will be set to pending; otherwise, the resource will be created with verification status set to accepted. If a signature is provided, Gmail will sanitize the HTML before saving it with the alias. This method is only available to service account clients that have been delegated domain-wide authority.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_send_as_create_with_http_info(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param SendAs body:
:return: SendAs
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_settings_send_as_create" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_settings_send_as_create`")
collection_formats = {}
resource_path = '/{userId}/settings/sendAs'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SendAs',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_settings_send_as_delete(self, user_id, send_as_email, **kwargs):
"""
Deletes the specified send-as alias. Revokes any verification that may have been required for using it. This method is only available to service account clients that have been delegated domain-wide authority.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_send_as_delete(user_id, send_as_email, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str send_as_email: The send-as alias to be deleted. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_settings_send_as_delete_with_http_info(user_id, send_as_email, **kwargs)
else:
(data) = self.gmail_users_settings_send_as_delete_with_http_info(user_id, send_as_email, **kwargs)
return data
def gmail_users_settings_send_as_delete_with_http_info(self, user_id, send_as_email, **kwargs):
"""
Deletes the specified send-as alias. Revokes any verification that may have been required for using it. This method is only available to service account clients that have been delegated domain-wide authority.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_send_as_delete_with_http_info(user_id, send_as_email, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str send_as_email: The send-as alias to be deleted. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'send_as_email', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_settings_send_as_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_settings_send_as_delete`")
# verify the required parameter 'send_as_email' is set
if ('send_as_email' not in params) or (params['send_as_email'] is None):
raise ValueError("Missing the required parameter `send_as_email` when calling `gmail_users_settings_send_as_delete`")
collection_formats = {}
resource_path = '/{userId}/settings/sendAs/{sendAsEmail}'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
if 'send_as_email' in params:
path_params['sendAsEmail'] = params['send_as_email']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_settings_send_as_get(self, user_id, send_as_email, **kwargs):
"""
Gets the specified send-as alias. Fails with an HTTP 404 error if the specified address is not a member of the collection.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_send_as_get(user_id, send_as_email, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str send_as_email: The send-as alias to be retrieved. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: SendAs
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_settings_send_as_get_with_http_info(user_id, send_as_email, **kwargs)
else:
(data) = self.gmail_users_settings_send_as_get_with_http_info(user_id, send_as_email, **kwargs)
return data
def gmail_users_settings_send_as_get_with_http_info(self, user_id, send_as_email, **kwargs):
"""
Gets the specified send-as alias. Fails with an HTTP 404 error if the specified address is not a member of the collection.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_send_as_get_with_http_info(user_id, send_as_email, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str send_as_email: The send-as alias to be retrieved. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: SendAs
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'send_as_email', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_settings_send_as_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_settings_send_as_get`")
# verify the required parameter 'send_as_email' is set
if ('send_as_email' not in params) or (params['send_as_email'] is None):
raise ValueError("Missing the required parameter `send_as_email` when calling `gmail_users_settings_send_as_get`")
collection_formats = {}
resource_path = '/{userId}/settings/sendAs/{sendAsEmail}'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
if 'send_as_email' in params:
path_params['sendAsEmail'] = params['send_as_email']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SendAs',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_settings_send_as_list(self, user_id, **kwargs):
"""
Lists the send-as aliases for the specified account. The result includes the primary send-as address associated with the account as well as any custom \"from\" aliases.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_send_as_list(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: ListSendAsResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_settings_send_as_list_with_http_info(user_id, **kwargs)
else:
(data) = self.gmail_users_settings_send_as_list_with_http_info(user_id, **kwargs)
return data
def gmail_users_settings_send_as_list_with_http_info(self, user_id, **kwargs):
"""
Lists the send-as aliases for the specified account. The result includes the primary send-as address associated with the account as well as any custom \"from\" aliases.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_send_as_list_with_http_info(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: ListSendAsResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_settings_send_as_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_settings_send_as_list`")
collection_formats = {}
resource_path = '/{userId}/settings/sendAs'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ListSendAsResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_settings_send_as_patch(self, user_id, send_as_email, **kwargs):
"""
Updates a send-as alias. If a signature is provided, Gmail will sanitize the HTML before saving it with the alias. Addresses other than the primary address for the account can only be updated by service account clients that have been delegated domain-wide authority. This method supports patch semantics.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_send_as_patch(user_id, send_as_email, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str send_as_email: The send-as alias to be updated. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param SendAs body:
:return: SendAs
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_settings_send_as_patch_with_http_info(user_id, send_as_email, **kwargs)
else:
(data) = self.gmail_users_settings_send_as_patch_with_http_info(user_id, send_as_email, **kwargs)
return data
def gmail_users_settings_send_as_patch_with_http_info(self, user_id, send_as_email, **kwargs):
"""
Updates a send-as alias. If a signature is provided, Gmail will sanitize the HTML before saving it with the alias. Addresses other than the primary address for the account can only be updated by service account clients that have been delegated domain-wide authority. This method supports patch semantics.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_send_as_patch_with_http_info(user_id, send_as_email, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str send_as_email: The send-as alias to be updated. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param SendAs body:
:return: SendAs
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'send_as_email', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_settings_send_as_patch" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_settings_send_as_patch`")
# verify the required parameter 'send_as_email' is set
if ('send_as_email' not in params) or (params['send_as_email'] is None):
raise ValueError("Missing the required parameter `send_as_email` when calling `gmail_users_settings_send_as_patch`")
collection_formats = {}
resource_path = '/{userId}/settings/sendAs/{sendAsEmail}'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
if 'send_as_email' in params:
path_params['sendAsEmail'] = params['send_as_email']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SendAs',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_settings_send_as_smime_info_delete(self, user_id, send_as_email, id, **kwargs):
"""
Deletes the specified S/MIME config for the specified send-as alias.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_send_as_smime_info_delete(user_id, send_as_email, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str send_as_email: The email address that appears in the \"From:\" header for mail sent using this alias. (required)
:param str id: The immutable ID for the SmimeInfo. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_settings_send_as_smime_info_delete_with_http_info(user_id, send_as_email, id, **kwargs)
else:
(data) = self.gmail_users_settings_send_as_smime_info_delete_with_http_info(user_id, send_as_email, id, **kwargs)
return data
def gmail_users_settings_send_as_smime_info_delete_with_http_info(self, user_id, send_as_email, id, **kwargs):
"""
Deletes the specified S/MIME config for the specified send-as alias.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_send_as_smime_info_delete_with_http_info(user_id, send_as_email, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str send_as_email: The email address that appears in the \"From:\" header for mail sent using this alias. (required)
:param str id: The immutable ID for the SmimeInfo. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'send_as_email', 'id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_settings_send_as_smime_info_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_settings_send_as_smime_info_delete`")
# verify the required parameter 'send_as_email' is set
if ('send_as_email' not in params) or (params['send_as_email'] is None):
raise ValueError("Missing the required parameter `send_as_email` when calling `gmail_users_settings_send_as_smime_info_delete`")
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `gmail_users_settings_send_as_smime_info_delete`")
collection_formats = {}
resource_path = '/{userId}/settings/sendAs/{sendAsEmail}/smimeInfo/{id}'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
if 'send_as_email' in params:
path_params['sendAsEmail'] = params['send_as_email']
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_settings_send_as_smime_info_get(self, user_id, send_as_email, id, **kwargs):
"""
Gets the specified S/MIME config for the specified send-as alias.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_send_as_smime_info_get(user_id, send_as_email, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str send_as_email: The email address that appears in the \"From:\" header for mail sent using this alias. (required)
:param str id: The immutable ID for the SmimeInfo. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: SmimeInfo
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_settings_send_as_smime_info_get_with_http_info(user_id, send_as_email, id, **kwargs)
else:
(data) = self.gmail_users_settings_send_as_smime_info_get_with_http_info(user_id, send_as_email, id, **kwargs)
return data
def gmail_users_settings_send_as_smime_info_get_with_http_info(self, user_id, send_as_email, id, **kwargs):
"""
Gets the specified S/MIME config for the specified send-as alias.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_send_as_smime_info_get_with_http_info(user_id, send_as_email, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str send_as_email: The email address that appears in the \"From:\" header for mail sent using this alias. (required)
:param str id: The immutable ID for the SmimeInfo. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: SmimeInfo
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'send_as_email', 'id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_settings_send_as_smime_info_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_settings_send_as_smime_info_get`")
# verify the required parameter 'send_as_email' is set
if ('send_as_email' not in params) or (params['send_as_email'] is None):
raise ValueError("Missing the required parameter `send_as_email` when calling `gmail_users_settings_send_as_smime_info_get`")
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `gmail_users_settings_send_as_smime_info_get`")
collection_formats = {}
resource_path = '/{userId}/settings/sendAs/{sendAsEmail}/smimeInfo/{id}'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
if 'send_as_email' in params:
path_params['sendAsEmail'] = params['send_as_email']
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SmimeInfo',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_settings_send_as_smime_info_insert(self, user_id, send_as_email, **kwargs):
"""
Insert (upload) the given S/MIME config for the specified send-as alias. Note that pkcs12 format is required for the key.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_send_as_smime_info_insert(user_id, send_as_email, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str send_as_email: The email address that appears in the \"From:\" header for mail sent using this alias. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param SmimeInfo body:
:return: SmimeInfo
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_settings_send_as_smime_info_insert_with_http_info(user_id, send_as_email, **kwargs)
else:
(data) = self.gmail_users_settings_send_as_smime_info_insert_with_http_info(user_id, send_as_email, **kwargs)
return data
def gmail_users_settings_send_as_smime_info_insert_with_http_info(self, user_id, send_as_email, **kwargs):
"""
Insert (upload) the given S/MIME config for the specified send-as alias. Note that pkcs12 format is required for the key.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_send_as_smime_info_insert_with_http_info(user_id, send_as_email, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str send_as_email: The email address that appears in the \"From:\" header for mail sent using this alias. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param SmimeInfo body:
:return: SmimeInfo
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'send_as_email', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_settings_send_as_smime_info_insert" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_settings_send_as_smime_info_insert`")
# verify the required parameter 'send_as_email' is set
if ('send_as_email' not in params) or (params['send_as_email'] is None):
raise ValueError("Missing the required parameter `send_as_email` when calling `gmail_users_settings_send_as_smime_info_insert`")
collection_formats = {}
resource_path = '/{userId}/settings/sendAs/{sendAsEmail}/smimeInfo'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
if 'send_as_email' in params:
path_params['sendAsEmail'] = params['send_as_email']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SmimeInfo',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_settings_send_as_smime_info_list(self, user_id, send_as_email, **kwargs):
"""
Lists S/MIME configs for the specified send-as alias.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_send_as_smime_info_list(user_id, send_as_email, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str send_as_email: The email address that appears in the \"From:\" header for mail sent using this alias. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: ListSmimeInfoResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_settings_send_as_smime_info_list_with_http_info(user_id, send_as_email, **kwargs)
else:
(data) = self.gmail_users_settings_send_as_smime_info_list_with_http_info(user_id, send_as_email, **kwargs)
return data
def gmail_users_settings_send_as_smime_info_list_with_http_info(self, user_id, send_as_email, **kwargs):
"""
Lists S/MIME configs for the specified send-as alias.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_send_as_smime_info_list_with_http_info(user_id, send_as_email, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str send_as_email: The email address that appears in the \"From:\" header for mail sent using this alias. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: ListSmimeInfoResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'send_as_email', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_settings_send_as_smime_info_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_settings_send_as_smime_info_list`")
# verify the required parameter 'send_as_email' is set
if ('send_as_email' not in params) or (params['send_as_email'] is None):
raise ValueError("Missing the required parameter `send_as_email` when calling `gmail_users_settings_send_as_smime_info_list`")
collection_formats = {}
resource_path = '/{userId}/settings/sendAs/{sendAsEmail}/smimeInfo'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
if 'send_as_email' in params:
path_params['sendAsEmail'] = params['send_as_email']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ListSmimeInfoResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_settings_send_as_smime_info_set_default(self, user_id, send_as_email, id, **kwargs):
"""
Sets the default S/MIME config for the specified send-as alias.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_send_as_smime_info_set_default(user_id, send_as_email, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str send_as_email: The email address that appears in the \"From:\" header for mail sent using this alias. (required)
:param str id: The immutable ID for the SmimeInfo. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_settings_send_as_smime_info_set_default_with_http_info(user_id, send_as_email, id, **kwargs)
else:
(data) = self.gmail_users_settings_send_as_smime_info_set_default_with_http_info(user_id, send_as_email, id, **kwargs)
return data
def gmail_users_settings_send_as_smime_info_set_default_with_http_info(self, user_id, send_as_email, id, **kwargs):
"""
Sets the default S/MIME config for the specified send-as alias.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_send_as_smime_info_set_default_with_http_info(user_id, send_as_email, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str send_as_email: The email address that appears in the \"From:\" header for mail sent using this alias. (required)
:param str id: The immutable ID for the SmimeInfo. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'send_as_email', 'id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_settings_send_as_smime_info_set_default" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_settings_send_as_smime_info_set_default`")
# verify the required parameter 'send_as_email' is set
if ('send_as_email' not in params) or (params['send_as_email'] is None):
raise ValueError("Missing the required parameter `send_as_email` when calling `gmail_users_settings_send_as_smime_info_set_default`")
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `gmail_users_settings_send_as_smime_info_set_default`")
collection_formats = {}
resource_path = '/{userId}/settings/sendAs/{sendAsEmail}/smimeInfo/{id}/setDefault'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
if 'send_as_email' in params:
path_params['sendAsEmail'] = params['send_as_email']
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_settings_send_as_update(self, user_id, send_as_email, **kwargs):
"""
Updates a send-as alias. If a signature is provided, Gmail will sanitize the HTML before saving it with the alias. Addresses other than the primary address for the account can only be updated by service account clients that have been delegated domain-wide authority.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_send_as_update(user_id, send_as_email, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str send_as_email: The send-as alias to be updated. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param SendAs body:
:return: SendAs
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_settings_send_as_update_with_http_info(user_id, send_as_email, **kwargs)
else:
(data) = self.gmail_users_settings_send_as_update_with_http_info(user_id, send_as_email, **kwargs)
return data
def gmail_users_settings_send_as_update_with_http_info(self, user_id, send_as_email, **kwargs):
"""
Updates a send-as alias. If a signature is provided, Gmail will sanitize the HTML before saving it with the alias. Addresses other than the primary address for the account can only be updated by service account clients that have been delegated domain-wide authority.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_send_as_update_with_http_info(user_id, send_as_email, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str send_as_email: The send-as alias to be updated. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param SendAs body:
:return: SendAs
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'send_as_email', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_settings_send_as_update" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_settings_send_as_update`")
# verify the required parameter 'send_as_email' is set
if ('send_as_email' not in params) or (params['send_as_email'] is None):
raise ValueError("Missing the required parameter `send_as_email` when calling `gmail_users_settings_send_as_update`")
collection_formats = {}
resource_path = '/{userId}/settings/sendAs/{sendAsEmail}'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
if 'send_as_email' in params:
path_params['sendAsEmail'] = params['send_as_email']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SendAs',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_settings_send_as_verify(self, user_id, send_as_email, **kwargs):
"""
Sends a verification email to the specified send-as alias address. The verification status must be pending. This method is only available to service account clients that have been delegated domain-wide authority.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_send_as_verify(user_id, send_as_email, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str send_as_email: The send-as alias to be verified. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_settings_send_as_verify_with_http_info(user_id, send_as_email, **kwargs)
else:
(data) = self.gmail_users_settings_send_as_verify_with_http_info(user_id, send_as_email, **kwargs)
return data
def gmail_users_settings_send_as_verify_with_http_info(self, user_id, send_as_email, **kwargs):
"""
Sends a verification email to the specified send-as alias address. The verification status must be pending. This method is only available to service account clients that have been delegated domain-wide authority.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_send_as_verify_with_http_info(user_id, send_as_email, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str send_as_email: The send-as alias to be verified. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'send_as_email', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_settings_send_as_verify" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_settings_send_as_verify`")
# verify the required parameter 'send_as_email' is set
if ('send_as_email' not in params) or (params['send_as_email'] is None):
raise ValueError("Missing the required parameter `send_as_email` when calling `gmail_users_settings_send_as_verify`")
collection_formats = {}
resource_path = '/{userId}/settings/sendAs/{sendAsEmail}/verify'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
if 'send_as_email' in params:
path_params['sendAsEmail'] = params['send_as_email']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_settings_update_auto_forwarding(self, user_id, **kwargs):
"""
Updates the auto-forwarding setting for the specified account. A verified forwarding address must be specified when auto-forwarding is enabled. This method is only available to service account clients that have been delegated domain-wide authority.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_update_auto_forwarding(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param AutoForwarding body:
:return: AutoForwarding
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_settings_update_auto_forwarding_with_http_info(user_id, **kwargs)
else:
(data) = self.gmail_users_settings_update_auto_forwarding_with_http_info(user_id, **kwargs)
return data
def gmail_users_settings_update_auto_forwarding_with_http_info(self, user_id, **kwargs):
"""
Updates the auto-forwarding setting for the specified account. A verified forwarding address must be specified when auto-forwarding is enabled. This method is only available to service account clients that have been delegated domain-wide authority.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_update_auto_forwarding_with_http_info(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param AutoForwarding body:
:return: AutoForwarding
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_settings_update_auto_forwarding" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_settings_update_auto_forwarding`")
collection_formats = {}
resource_path = '/{userId}/settings/autoForwarding'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AutoForwarding',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_settings_update_imap(self, user_id, **kwargs):
"""
Updates IMAP settings.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_update_imap(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param ImapSettings body:
:return: ImapSettings
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_settings_update_imap_with_http_info(user_id, **kwargs)
else:
(data) = self.gmail_users_settings_update_imap_with_http_info(user_id, **kwargs)
return data
def gmail_users_settings_update_imap_with_http_info(self, user_id, **kwargs):
"""
Updates IMAP settings.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_update_imap_with_http_info(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param ImapSettings body:
:return: ImapSettings
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_settings_update_imap" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_settings_update_imap`")
collection_formats = {}
resource_path = '/{userId}/settings/imap'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ImapSettings',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_settings_update_pop(self, user_id, **kwargs):
"""
Updates POP settings.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_update_pop(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param PopSettings body:
:return: PopSettings
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_settings_update_pop_with_http_info(user_id, **kwargs)
else:
(data) = self.gmail_users_settings_update_pop_with_http_info(user_id, **kwargs)
return data
def gmail_users_settings_update_pop_with_http_info(self, user_id, **kwargs):
"""
Updates POP settings.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_update_pop_with_http_info(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param PopSettings body:
:return: PopSettings
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_settings_update_pop" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_settings_update_pop`")
collection_formats = {}
resource_path = '/{userId}/settings/pop'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PopSettings',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_settings_update_vacation(self, user_id, **kwargs):
"""
Updates vacation responder settings.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_update_vacation(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param VacationSettings body:
:return: VacationSettings
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_settings_update_vacation_with_http_info(user_id, **kwargs)
else:
(data) = self.gmail_users_settings_update_vacation_with_http_info(user_id, **kwargs)
return data
def gmail_users_settings_update_vacation_with_http_info(self, user_id, **kwargs):
"""
Updates vacation responder settings.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_update_vacation_with_http_info(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param VacationSettings body:
:return: VacationSettings
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_settings_update_vacation" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_settings_update_vacation`")
collection_formats = {}
resource_path = '/{userId}/settings/vacation'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VacationSettings',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_stop(self, user_id, **kwargs):
"""
Stop receiving push notifications for the given user mailbox.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_stop(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_stop_with_http_info(user_id, **kwargs)
else:
(data) = self.gmail_users_stop_with_http_info(user_id, **kwargs)
return data
def gmail_users_stop_with_http_info(self, user_id, **kwargs):
"""
Stop receiving push notifications for the given user mailbox.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_stop_with_http_info(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_stop" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_stop`")
collection_formats = {}
resource_path = '/{userId}/stop'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_threads_delete(self, user_id, id, **kwargs):
"""
Immediately and permanently deletes the specified thread. This operation cannot be undone. Prefer threads.trash instead.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_threads_delete(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str id: ID of the Thread to delete. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_threads_delete_with_http_info(user_id, id, **kwargs)
else:
(data) = self.gmail_users_threads_delete_with_http_info(user_id, id, **kwargs)
return data
def gmail_users_threads_delete_with_http_info(self, user_id, id, **kwargs):
"""
Immediately and permanently deletes the specified thread. This operation cannot be undone. Prefer threads.trash instead.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_threads_delete_with_http_info(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str id: ID of the Thread to delete. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_threads_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_threads_delete`")
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `gmail_users_threads_delete`")
collection_formats = {}
resource_path = '/{userId}/threads/{id}'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_threads_get(self, user_id, id, **kwargs):
"""
Gets the specified thread.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_threads_get(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str id: The ID of the thread to retrieve. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param str format: The format to return the messages in.
:param list[str] metadata_headers: When given and format is METADATA, only include headers specified.
:return: Thread
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_threads_get_with_http_info(user_id, id, **kwargs)
else:
(data) = self.gmail_users_threads_get_with_http_info(user_id, id, **kwargs)
return data
def gmail_users_threads_get_with_http_info(self, user_id, id, **kwargs):
"""
Gets the specified thread.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_threads_get_with_http_info(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str id: The ID of the thread to retrieve. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param str format: The format to return the messages in.
:param list[str] metadata_headers: When given and format is METADATA, only include headers specified.
:return: Thread
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip', 'format', 'metadata_headers']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_threads_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_threads_get`")
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `gmail_users_threads_get`")
collection_formats = {}
resource_path = '/{userId}/threads/{id}'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
if 'format' in params:
query_params['format'] = params['format']
if 'metadata_headers' in params:
query_params['metadataHeaders'] = params['metadata_headers']
collection_formats['metadataHeaders'] = 'multi'
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Thread',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_threads_list(self, user_id, **kwargs):
"""
Lists the threads in the user's mailbox.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_threads_list(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param bool include_spam_trash: Include threads from SPAM and TRASH in the results.
:param list[str] label_ids: Only return threads with labels that match all of the specified label IDs.
:param int max_results: Maximum number of threads to return.
:param str page_token: Page token to retrieve a specific page of results in the list.
:param str q: Only return threads matching the specified query. Supports the same query format as the Gmail search box. For example, \"from:someuser@example.com rfc822msgid: is:unread\". Parameter cannot be used when accessing the api using the gmail.metadata scope.
:return: ListThreadsResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_threads_list_with_http_info(user_id, **kwargs)
else:
(data) = self.gmail_users_threads_list_with_http_info(user_id, **kwargs)
return data
def gmail_users_threads_list_with_http_info(self, user_id, **kwargs):
"""
Lists the threads in the user's mailbox.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_threads_list_with_http_info(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param bool include_spam_trash: Include threads from SPAM and TRASH in the results.
:param list[str] label_ids: Only return threads with labels that match all of the specified label IDs.
:param int max_results: Maximum number of threads to return.
:param str page_token: Page token to retrieve a specific page of results in the list.
:param str q: Only return threads matching the specified query. Supports the same query format as the Gmail search box. For example, \"from:someuser@example.com rfc822msgid: is:unread\". Parameter cannot be used when accessing the api using the gmail.metadata scope.
:return: ListThreadsResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip', 'include_spam_trash', 'label_ids', 'max_results', 'page_token', 'q']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_threads_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_threads_list`")
collection_formats = {}
resource_path = '/{userId}/threads'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
if 'include_spam_trash' in params:
query_params['includeSpamTrash'] = params['include_spam_trash']
if 'label_ids' in params:
query_params['labelIds'] = params['label_ids']
collection_formats['labelIds'] = 'multi'
if 'max_results' in params:
query_params['maxResults'] = params['max_results']
if 'page_token' in params:
query_params['pageToken'] = params['page_token']
if 'q' in params:
query_params['q'] = params['q']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ListThreadsResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_threads_modify(self, user_id, id, **kwargs):
"""
Modifies the labels applied to the thread. This applies to all messages in the thread.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_threads_modify(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str id: The ID of the thread to modify. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param ModifyThreadRequest body:
:return: Thread
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_threads_modify_with_http_info(user_id, id, **kwargs)
else:
(data) = self.gmail_users_threads_modify_with_http_info(user_id, id, **kwargs)
return data
def gmail_users_threads_modify_with_http_info(self, user_id, id, **kwargs):
"""
Modifies the labels applied to the thread. This applies to all messages in the thread.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_threads_modify_with_http_info(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str id: The ID of the thread to modify. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param ModifyThreadRequest body:
:return: Thread
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_threads_modify" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_threads_modify`")
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `gmail_users_threads_modify`")
collection_formats = {}
resource_path = '/{userId}/threads/{id}/modify'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Thread',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_threads_trash(self, user_id, id, **kwargs):
"""
Moves the specified thread to the trash.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_threads_trash(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str id: The ID of the thread to Trash. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: Thread
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_threads_trash_with_http_info(user_id, id, **kwargs)
else:
(data) = self.gmail_users_threads_trash_with_http_info(user_id, id, **kwargs)
return data
def gmail_users_threads_trash_with_http_info(self, user_id, id, **kwargs):
"""
Moves the specified thread to the trash.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_threads_trash_with_http_info(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str id: The ID of the thread to Trash. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: Thread
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_threads_trash" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_threads_trash`")
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `gmail_users_threads_trash`")
collection_formats = {}
resource_path = '/{userId}/threads/{id}/trash'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Thread',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_threads_untrash(self, user_id, id, **kwargs):
"""
Removes the specified thread from the trash.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_threads_untrash(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str id: The ID of the thread to remove from Trash. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: Thread
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_threads_untrash_with_http_info(user_id, id, **kwargs)
else:
(data) = self.gmail_users_threads_untrash_with_http_info(user_id, id, **kwargs)
return data
def gmail_users_threads_untrash_with_http_info(self, user_id, id, **kwargs):
"""
Removes the specified thread from the trash.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_threads_untrash_with_http_info(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str id: The ID of the thread to remove from Trash. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: Thread
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_threads_untrash" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_threads_untrash`")
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `gmail_users_threads_untrash`")
collection_formats = {}
resource_path = '/{userId}/threads/{id}/untrash'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Thread',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_watch(self, user_id, **kwargs):
"""
Set up or update a push notification watch on the given user mailbox.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_watch(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param WatchRequest body:
:return: WatchResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_watch_with_http_info(user_id, **kwargs)
else:
(data) = self.gmail_users_watch_with_http_info(user_id, **kwargs)
return data
def gmail_users_watch_with_http_info(self, user_id, **kwargs):
"""
Set up or update a push notification watch on the given user mailbox.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_watch_with_http_info(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param WatchRequest body:
:return: WatchResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_watch" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_watch`")
collection_formats = {}
resource_path = '/{userId}/watch'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='WatchResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
|
{
"content_hash": "46439c59873fdc160a6c3fb74ce0b805",
"timestamp": "",
"source": "github",
"line_count": 8286,
"max_line_length": 764,
"avg_line_length": 55.44593289886556,
"alnum_prop": 0.6004070305272895,
"repo_name": "CanopyIQ/gmail_client",
"id": "26349e35abe51441d3b94caf031521f650a14542",
"size": "459442",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "gmail_client/apis/users_api.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "757232"
},
{
"name": "Shell",
"bytes": "1656"
}
],
"symlink_target": ""
}
|
"""
Sensors on Zigbee Home Automation networks.
For more details on this platform, please refer to the documentation
at https://home-assistant.io/components/sensor.zha/
"""
import logging
from homeassistant.core import callback
from homeassistant.components.sensor import DOMAIN
from homeassistant.const import (
TEMP_CELSIUS, POWER_WATT, ATTR_UNIT_OF_MEASUREMENT
)
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from .core.const import (
DATA_ZHA, DATA_ZHA_DISPATCHERS, ZHA_DISCOVERY_NEW, HUMIDITY, TEMPERATURE,
ILLUMINANCE, PRESSURE, METERING, ELECTRICAL_MEASUREMENT,
GENERIC, SENSOR_TYPE, ATTRIBUTE_CHANNEL, ELECTRICAL_MEASUREMENT_CHANNEL,
SIGNAL_ATTR_UPDATED, SIGNAL_STATE_ATTR)
from .entity import ZhaEntity
_LOGGER = logging.getLogger(__name__)
DEPENDENCIES = ['zha']
# Formatter functions
def pass_through_formatter(value):
"""No op update function."""
return value
def temperature_formatter(value):
"""Convert temperature data."""
if value is None:
return None
return round(value / 100, 1)
def humidity_formatter(value):
"""Return the state of the entity."""
if value is None:
return None
return round(float(value) / 100, 1)
def active_power_formatter(value):
"""Return the state of the entity."""
if value is None:
return None
return round(float(value) / 10, 1)
def pressure_formatter(value):
"""Return the state of the entity."""
if value is None:
return None
return round(float(value))
FORMATTER_FUNC_REGISTRY = {
HUMIDITY: humidity_formatter,
TEMPERATURE: temperature_formatter,
PRESSURE: pressure_formatter,
ELECTRICAL_MEASUREMENT: active_power_formatter,
GENERIC: pass_through_formatter,
}
UNIT_REGISTRY = {
HUMIDITY: '%',
TEMPERATURE: TEMP_CELSIUS,
PRESSURE: 'hPa',
ILLUMINANCE: 'lx',
METERING: POWER_WATT,
ELECTRICAL_MEASUREMENT: POWER_WATT,
GENERIC: None
}
CHANNEL_REGISTRY = {
ELECTRICAL_MEASUREMENT: ELECTRICAL_MEASUREMENT_CHANNEL,
}
POLLING_REGISTRY = {
ELECTRICAL_MEASUREMENT: True
}
FORCE_UPDATE_REGISTRY = {
ELECTRICAL_MEASUREMENT: True
}
async def async_setup_platform(hass, config, async_add_entities,
discovery_info=None):
"""Old way of setting up Zigbee Home Automation sensors."""
pass
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Zigbee Home Automation sensor from config entry."""
async def async_discover(discovery_info):
await _async_setup_entities(hass, config_entry, async_add_entities,
[discovery_info])
unsub = async_dispatcher_connect(
hass, ZHA_DISCOVERY_NEW.format(DOMAIN), async_discover)
hass.data[DATA_ZHA][DATA_ZHA_DISPATCHERS].append(unsub)
sensors = hass.data.get(DATA_ZHA, {}).get(DOMAIN)
if sensors is not None:
await _async_setup_entities(hass, config_entry, async_add_entities,
sensors.values())
del hass.data[DATA_ZHA][DOMAIN]
async def _async_setup_entities(hass, config_entry, async_add_entities,
discovery_infos):
"""Set up the ZHA sensors."""
entities = []
for discovery_info in discovery_infos:
entities.append(await make_sensor(discovery_info))
async_add_entities(entities, update_before_add=True)
async def make_sensor(discovery_info):
"""Create ZHA sensors factory."""
return Sensor(**discovery_info)
class Sensor(ZhaEntity):
"""Base ZHA sensor."""
_domain = DOMAIN
def __init__(self, unique_id, zha_device, channels, **kwargs):
"""Init this sensor."""
super().__init__(unique_id, zha_device, channels, **kwargs)
self._sensor_type = kwargs.get(SENSOR_TYPE, GENERIC)
self._unit = UNIT_REGISTRY.get(self._sensor_type)
self._formatter_function = FORMATTER_FUNC_REGISTRY.get(
self._sensor_type,
pass_through_formatter
)
self._force_update = FORCE_UPDATE_REGISTRY.get(
self._sensor_type,
False
)
self._should_poll = POLLING_REGISTRY.get(
self._sensor_type,
False
)
self._channel = self.cluster_channels.get(
CHANNEL_REGISTRY.get(self._sensor_type, ATTRIBUTE_CHANNEL)
)
async def async_added_to_hass(self):
"""Run when about to be added to hass."""
await super().async_added_to_hass()
await self.async_accept_signal(
self._channel, SIGNAL_ATTR_UPDATED, self.async_set_state)
await self.async_accept_signal(
self._channel, SIGNAL_STATE_ATTR,
self.async_update_state_attribute)
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity."""
return self._unit
@property
def state(self) -> str:
"""Return the state of the entity."""
if self._state is None:
return None
if isinstance(self._state, float):
return str(round(self._state, 2))
return self._state
def async_set_state(self, state):
"""Handle state update from channel."""
# this is necessary because HA saves the unit based on what shows in
# the UI and not based on what the sensor has configured so we need
# to flip it back after state restoration
self._unit = UNIT_REGISTRY.get(self._sensor_type)
self._state = self._formatter_function(state)
self.async_schedule_update_ha_state()
@callback
def async_restore_last_state(self, last_state):
"""Restore previous state."""
self._state = last_state.state
self._unit = last_state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)
|
{
"content_hash": "e9226df640044bb2592d1b3a39e66ef5",
"timestamp": "",
"source": "github",
"line_count": 193,
"max_line_length": 77,
"avg_line_length": 30.321243523316063,
"alnum_prop": 0.6481544771018455,
"repo_name": "jamespcole/home-assistant",
"id": "56ce97c87a066b85081650a4cf3291d6f46e4b32",
"size": "5852",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "homeassistant/components/zha/sensor.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "1175"
},
{
"name": "Dockerfile",
"bytes": "1081"
},
{
"name": "HCL",
"bytes": "826"
},
{
"name": "Python",
"bytes": "14822074"
},
{
"name": "Ruby",
"bytes": "745"
},
{
"name": "Shell",
"bytes": "17609"
}
],
"symlink_target": ""
}
|
import pymic as mic
import numpy as np
# load the library with the kernel function (on the target)
device = mic.devices[0]
library = device.load_library(("libdouble_it.so",))
stream = device.get_default_stream()
na = np.arange(1, 33)
a = stream.bind(na)
print "input:"
print "--------------------------------------"
print na
print
stream.invoke(library.doubleit_kernel, a, a.size)
stream.sync()
print "output:"
print "--------------------------------------"
a.update_host()
stream.sync()
print a
|
{
"content_hash": "d49cd8f88ad44c5e4b08bd9b14b760b4",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 59,
"avg_line_length": 20.08,
"alnum_prop": 0.6055776892430279,
"repo_name": "01org/pyMIC",
"id": "4b375435697fae85d39963c8f1de2c60caf35c69",
"size": "2066",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "examples/double_it/double_it.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "14096"
},
{
"name": "C",
"bytes": "55001"
},
{
"name": "C++",
"bytes": "306562"
},
{
"name": "Gnuplot",
"bytes": "1313"
},
{
"name": "Makefile",
"bytes": "33094"
},
{
"name": "Python",
"bytes": "260335"
},
{
"name": "Shell",
"bytes": "7019"
}
],
"symlink_target": ""
}
|
import pygtk
pygtk.require('2.0')
import gtk
from utils.Form import FormBuilder
from datetime import datetime
from dateutil.relativedelta import relativedelta
class FiltroFecha:
def __init__(self, on_aceptar_handler):
self.on_aceptar_handler = on_aceptar_handler
builder = gtk.Builder()
builder.add_from_file('filtro_fecha_frame.glade')
builder.connect_signals(self)
self.form_builder = FormBuilder(builder, 'Venta')
start_date = datetime.today() - relativedelta(days=15)
end_date = datetime.today()
self.form_builder.load_widget_value('fecha_inicio',
[start_date.year, start_date.month - 1, start_date.day])
self.form_builder.load_widget_value('fecha_fin',
[end_date.year, end_date.month - 1, end_date.day])
self.window = builder.get_object('fecha_filtro_window')
self.window.show()
def on_aceptar_btn_clicked(self, widget):
self.window.destroy()
self.on_aceptar_handler(self.form_builder.get_widget_value('fecha_inicio'),
self.form_builder.get_widget_value('fecha_fin'))
def on_cancelar_btn_clicked(self, widget):
self.window.destroy()
|
{
"content_hash": "194ea8ba11f33a8c96a68a6883a67831",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 83,
"avg_line_length": 36.78787878787879,
"alnum_prop": 0.658154859967051,
"repo_name": "hey-mx/ventas_pygtk",
"id": "819fd9f36676549ef56eca4e33e3f2b5df26af6c",
"size": "1214",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "utils/FiltroFecha.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "55092"
}
],
"symlink_target": ""
}
|
from django.shortcuts import get_object_or_404, render_to_response
from django.template.context import RequestContext
from dnd.menu import MenuItem
from dnd.menu import menu_item, submenu_item
from dnd.dnd_paginator import DndPaginator
from dnd.filters import ( RaceFilter, RaceTypeFilter )
from dnd.models import (Rulebook, Race, RaceType )
from dnd.views import is_3e_edition, permanent_redirect_view
@menu_item(MenuItem.BESTIARY)
@submenu_item(MenuItem.Bestiary.RACES)
def race_index(request):
f = RaceFilter(request.GET, queryset=Race.objects.select_related(
'rulebook', 'rulebook__dnd_edition', 'school').distinct())
paginator = DndPaginator(f.qs, request)
form_submitted = 1 if '_filter' in request.GET else 0
return render_to_response('dnd/races/race_index.html',
{
'request': request,
'race_list': paginator.items(),
'paginator': paginator,
'filter': f,
'form_submitted': form_submitted,
}, context_instance=RequestContext(request), )
@menu_item(MenuItem.BESTIARY)
@submenu_item(MenuItem.Bestiary.RACES)
def race_list_by_rulebook(request):
rulebook_list = Rulebook.objects.select_related('dnd_edition').all()
paginator = DndPaginator(rulebook_list, request)
return render_to_response('dnd/races/race_list_by_rulebook.html',
{
'request': request,
'rulebook_list': paginator.items(),
'paginator': paginator,
}, context_instance=RequestContext(request), )
@menu_item(MenuItem.BESTIARY)
@submenu_item(MenuItem.Bestiary.RACES)
def races_in_rulebook(request, rulebook_slug, rulebook_id):
rulebook = get_object_or_404(Rulebook, pk=rulebook_id)
if not rulebook.slug == rulebook_slug:
return permanent_redirect_view(request, 'races_in_rulebook',
kwargs={
'rulebook_slug': rulebook.slug,
'rulebook_id': rulebook_id, })
race_list = rulebook.race_set.select_related(
'rulebook', 'rulebook__dnd_edition', 'school').all()
paginator = DndPaginator(race_list, request)
return render_to_response('dnd/races/races_in_rulebook.html',
{
'rulebook': rulebook,
'race_list': paginator.items(),
'paginator': paginator,
'request': request,
'display_3e_warning': is_3e_edition(rulebook.dnd_edition),
}, context_instance=RequestContext(request), )
@menu_item(MenuItem.BESTIARY)
@submenu_item(MenuItem.Bestiary.RACES)
def race_detail(request, rulebook_slug, rulebook_id, race_slug, race_id):
race = get_object_or_404(
Race.objects.select_related('rulebook', 'rulebook__dnd_edition', 'size', 'automatic_languages',
'bonus_languages', 'race_type'),
pk=race_id)
assert isinstance(race, Race)
if (race.slug != race_slug or
unicode(race.rulebook.id) != rulebook_id or
race.rulebook.slug != rulebook_slug):
return permanent_redirect_view(request, 'race_detail',
kwargs={
'rulebook_slug': race.rulebook.slug,
'rulebook_id': race.rulebook.id,
'race_slug': race.slug,
'race_id': race.id, })
race_speeds = race.racespeed_set.select_related('type', ).all()
favored_classes = race.favored_classes.select_related('character_class', ).all()
related_races = Race.objects.filter(slug=race.slug).exclude(rulebook__id=race.rulebook.id).select_related(
'rulebook', 'rulebook__dnd_edition').all()
return render_to_response('dnd/races/race_detail.html',
{
'race': race,
'rulebook': race.rulebook,
'request': request,
'race_speeds': race_speeds,
'favored_classes': favored_classes,
'automatic_languages': race.automatic_languages.all(),
'bonus_languages': race.bonus_languages.all(),
'related_races': related_races,
'i_like_it_url': request.build_absolute_uri(),
'inaccurate_url': request.build_absolute_uri(),
'display_3e_warning': is_3e_edition(race.rulebook.dnd_edition),
}, context_instance=RequestContext(request), )
@menu_item(MenuItem.BESTIARY)
@submenu_item(MenuItem.Bestiary.RACE_TYPES)
def race_type_index(request):
f = RaceTypeFilter(request.GET, queryset=RaceType.objects.distinct())
paginator = DndPaginator(f.qs, request)
form_submitted = 1 if '_filter' in request.GET else 0
return render_to_response('dnd/races/race_type_index.html',
{
'request': request,
'race_type_list': paginator.items(),
'paginator': paginator,
'filter': f,
'BaseSaveType': RaceType.BaseSaveType, # enums
'BaseAttackType': RaceType.BaseAttackType, # enums
'form_submitted': form_submitted,
}, context_instance=RequestContext(request), )
@menu_item(MenuItem.BESTIARY)
@submenu_item(MenuItem.Bestiary.RACE_TYPES)
def race_type_detail(request, race_type_slug):
race_type = get_object_or_404(
RaceType.objects, slug=race_type_slug,
)
assert isinstance(race_type, RaceType)
race_list = race_type.race_set.all()
paginator = DndPaginator(race_list, request)
return render_to_response('dnd/races/race_type_detail.html',
{
'race_type': race_type,
'paginator': paginator,
'race_list': race_list,
'BaseSaveType': RaceType.BaseSaveType, # enums
'BaseAttackType': RaceType.BaseAttackType, # enums
'i_like_it_url': request.build_absolute_uri(),
'inaccurate_url': request.build_absolute_uri(),
}, context_instance=RequestContext(request), )
|
{
"content_hash": "ade056f9a456275f32f6203401058d57",
"timestamp": "",
"source": "github",
"line_count": 156,
"max_line_length": 110,
"avg_line_length": 45.782051282051285,
"alnum_prop": 0.5183422010641277,
"repo_name": "dndtools/dndtools",
"id": "8b3cb255635c0f3a54d7259a2da1b7836a4f5701",
"size": "7168",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "dndtools/dnd/races/views.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "53355"
},
{
"name": "HTML",
"bytes": "196628"
},
{
"name": "JavaScript",
"bytes": "1941"
},
{
"name": "Python",
"bytes": "388605"
}
],
"symlink_target": ""
}
|
import os
import re
from testrunner.local import testsuite
from testrunner.objects import testcase
from testrunner.outproc import webkit
FILES_PATTERN = re.compile(r"//\s+Files:(.*)")
SELF_SCRIPT_PATTERN = re.compile(r"//\s+Env: TEST_FILE_NAME")
class TestLoader(testsuite.JSTestLoader):
@property
def excluded_dirs(self):
return {"resources"}
class TestSuite(testsuite.TestSuite):
def _test_loader_class(self):
return TestLoader
def _test_class(self):
return TestCase
class TestCase(testcase.D8TestCase):
def __init__(self, *args, **kwargs):
super(TestCase, self).__init__(*args, **kwargs)
source = self.get_source()
self._source_files = self._parse_source_files(source)
self._source_flags = self._parse_source_flags(source)
def _parse_source_files(self, source):
files_list = [] # List of file names to append to command arguments.
files_match = FILES_PATTERN.search(source);
# Accept several lines of 'Files:'.
while True:
if files_match:
files_list += files_match.group(1).strip().split()
files_match = FILES_PATTERN.search(source, files_match.end())
else:
break
files = [ os.path.normpath(os.path.join(self.suite.root, '..', '..', f))
for f in files_list ]
testfilename = os.path.join(self.suite.root, self.path + self._get_suffix())
if SELF_SCRIPT_PATTERN.search(source):
env = ["-e", "TEST_FILE_NAME=\"%s\"" % testfilename.replace("\\", "\\\\")]
files = env + files
files.append(os.path.join(self.suite.root, "resources/standalone-pre.js"))
files.append(testfilename)
files.append(os.path.join(self.suite.root, "resources/standalone-post.js"))
return files
def _get_files_params(self):
files = self._source_files
if self._test_config.isolates:
files = files + ['--isolate'] + files
return files
def _get_source_flags(self):
return self._source_flags
def _get_source_path(self):
return os.path.join(self.suite.root, self.path + self._get_suffix())
@property
def output_proc(self):
return webkit.OutProc(
self.expected_outcomes,
os.path.join(self.suite.root, self.path) + '-expected.txt')
def GetSuite(*args, **kwargs):
return TestSuite(*args, **kwargs)
|
{
"content_hash": "287d2b8ff8500f446b74fa382b669c05",
"timestamp": "",
"source": "github",
"line_count": 75,
"max_line_length": 80,
"avg_line_length": 30.413333333333334,
"alnum_prop": 0.6611135466900482,
"repo_name": "arangodb/arangodb",
"id": "500f44656b9e365d0b17223727ba8d1222d62da8",
"size": "3853",
"binary": false,
"copies": "11",
"ref": "refs/heads/devel",
"path": "3rdParty/V8/v7.9.317/test/webkit/testcfg.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "61827"
},
{
"name": "C",
"bytes": "311036"
},
{
"name": "C++",
"bytes": "35149373"
},
{
"name": "CMake",
"bytes": "387268"
},
{
"name": "CSS",
"bytes": "210549"
},
{
"name": "EJS",
"bytes": "232160"
},
{
"name": "HTML",
"bytes": "23114"
},
{
"name": "JavaScript",
"bytes": "33841256"
},
{
"name": "LLVM",
"bytes": "15003"
},
{
"name": "NASL",
"bytes": "381737"
},
{
"name": "NSIS",
"bytes": "47138"
},
{
"name": "Pascal",
"bytes": "75391"
},
{
"name": "Perl",
"bytes": "9811"
},
{
"name": "PowerShell",
"bytes": "6806"
},
{
"name": "Python",
"bytes": "190515"
},
{
"name": "SCSS",
"bytes": "255542"
},
{
"name": "Shell",
"bytes": "133576"
},
{
"name": "TypeScript",
"bytes": "179074"
},
{
"name": "Yacc",
"bytes": "79620"
}
],
"symlink_target": ""
}
|
from configparser import ConfigParser, ExtendedInterpolation
import os
class ConfigEvnParser:
"""
ConfigEvnParser is a class used to read configs from config files and environment variables.
The default config path is at './config/config.properties'
and to change the default path one can change the environment variable via:
export CONFIG_FILE_PATH=your/new/config/file/path/config.properties
Note that as we use the python package called configparser, it can only handle INI format of config.
Thus, you need to add a session name '[DEFAULT]' at the beginning of each config file.
"""
DEFAULT_SECTION_NAME = 'DEFAULT'
CONFIG_FILE_PATH_NAME = 'CONFIG_FILE_PATH'
def __init__(self, config_file_path='./config/config.properties'):
self.config = {}
# universal parser, which include all env vars, used for interpolation config files
interpolateParser = ConfigParser(os.environ, allow_no_value=True, interpolation=ExtendedInterpolation())
interpolateParser.read(config_file_path)
newConfigFilePath = os.environ.get(self.CONFIG_FILE_PATH_NAME, config_file_path)
interpolateParser.read(newConfigFilePath)
# use rawParser to get only the keys specified in the config files, so that we only generate the configs specified in config files
rawParser = ConfigParser(allow_no_value=True)
rawParser.read(config_file_path)
rawParser.read(newConfigFilePath)
# Override with env var:
for key in rawParser[self.DEFAULT_SECTION_NAME]:
envValue = os.environ.get(key.upper(), None)
if envValue is not None:
interpolateParser.set(self.DEFAULT_SECTION_NAME, key, envValue)
self.set(key, interpolateParser.get(self.DEFAULT_SECTION_NAME, key))
def set(self, key, value):
self.config[key.upper()] = str(value)
def get(self, key):
return self.config[key.upper()]
def get_bool(self, key):
return self._str2bool((self.config[key.upper()]))
def get_int(self, key):
return int(self.config[key.upper()])
def _str2bool(self, v):
return v.lower() in ("yes", "true", "t", "1")
|
{
"content_hash": "e772442a09d46d123293e1a0c3dde358",
"timestamp": "",
"source": "github",
"line_count": 52,
"max_line_length": 138,
"avg_line_length": 42.5,
"alnum_prop": 0.683710407239819,
"repo_name": "liufuyang/lifeinweeks",
"id": "8699921ce558cdbb111f2fbe442840f87d065c08",
"size": "2210",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "backend/utils/config/config_env_parser.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "398"
},
{
"name": "JavaScript",
"bytes": "26474"
},
{
"name": "Nginx",
"bytes": "1785"
},
{
"name": "Python",
"bytes": "9361"
},
{
"name": "Shell",
"bytes": "1972"
},
{
"name": "Vue",
"bytes": "20081"
}
],
"symlink_target": ""
}
|
"""Sample of a reticle top level Component."""
import gdsfactory as gf
from gdsfactory.types import Component
def mzi_te_pads1(**kwargs) -> Component:
c = gf.c.mzi_phase_shifter_top_heater_metal(delta_length=40)
c = gf.routing.add_fiber_single(c)
c = c.rotate(-90)
c = gf.routing.add_electrical_pads_top(c)
gf.add_labels.add_labels_to_ports_electrical(component=c, prefix=f"elec-{c.name}-")
return c
def mzi_te_pads2(**kwargs) -> Component:
c = gf.c.mzi_phase_shifter_top_heater_metal(delta_length=40)
c = gf.routing.add_fiber_single(c)
c = c.rotate(-90)
c = gf.routing.add_electrical_pads_top_dc(c)
gf.add_labels.add_labels_to_ports_electrical(component=c, prefix=f"elec-{c.name}-")
return c
def mzi_te_pads3(**kwargs) -> Component:
c = gf.c.mzi_phase_shifter_top_heater_metal(delta_length=40)
c = gf.routing.add_fiber_single(c)
c = c.rotate(-90)
c = gf.routing.add_electrical_pads_shortest(c)
gf.add_labels.add_labels_to_ports_vertical_dc(component=c, prefix=f"elec-{c.name}-")
return c
if __name__ == "__main__":
# c = mzi_te_pads3()
# c.show(show_ports=True)
gc = gf.c.grating_coupler_elliptical_tm()
c = gf.c.mzi_phase_shifter_top_heater_metal(delta_length=40)
c = gf.routing.add_fiber_single(
c, get_input_label_text_function=None, grating_coupler=gc
)
c = c.rotate(-90)
c = gf.routing.add_electrical_pads_top(c)
gf.add_labels.add_labels_to_ports_electrical(component=c, prefix=f"elec-{c.name}-")
gf.add_labels.add_labels_to_ports(
component=c, port_type="loopback", prefix=f"opttm1500-{c.name}-"
)
gf.add_labels.add_labels_to_ports(
component=c, port_type="vertical_tm", prefix=f"opttm1500-{c.name}-"
)
c.show(show_ports=True)
|
{
"content_hash": "c1343aa045e49decd8d085531433aff9",
"timestamp": "",
"source": "github",
"line_count": 52,
"max_line_length": 88,
"avg_line_length": 34.53846153846154,
"alnum_prop": 0.6581291759465479,
"repo_name": "gdsfactory/gdsfactory",
"id": "b94ceddb09bfdb3ccddd196b036f6eeb3d836530",
"size": "1796",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "gdsfactory/samples/23_reticle.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "605"
},
{
"name": "Dockerfile",
"bytes": "31"
},
{
"name": "Makefile",
"bytes": "4572"
},
{
"name": "Python",
"bytes": "2471982"
},
{
"name": "Shell",
"bytes": "671"
},
{
"name": "XS",
"bytes": "10045"
}
],
"symlink_target": ""
}
|
"""Test Alexa capabilities."""
from unittest.mock import patch
import pytest
from homeassistant.components.alexa import smart_home
from homeassistant.components.climate import const as climate
from homeassistant.components.lock import STATE_JAMMED, STATE_LOCKING, STATE_UNLOCKING
from homeassistant.components.media_player.const import (
SUPPORT_PAUSE,
SUPPORT_PLAY,
SUPPORT_STOP,
SUPPORT_VOLUME_MUTE,
SUPPORT_VOLUME_SET,
)
from homeassistant.const import (
ATTR_UNIT_OF_MEASUREMENT,
STATE_ALARM_ARMED_AWAY,
STATE_ALARM_ARMED_CUSTOM_BYPASS,
STATE_ALARM_ARMED_HOME,
STATE_ALARM_ARMED_NIGHT,
STATE_ALARM_DISARMED,
STATE_LOCKED,
STATE_UNAVAILABLE,
STATE_UNKNOWN,
STATE_UNLOCKED,
TEMP_CELSIUS,
)
from . import (
assert_request_calls_service,
assert_request_fails,
get_default_config,
get_new_request,
reported_properties,
)
from tests.common import async_mock_service
@pytest.mark.parametrize("adjust", ["-5", "5", "-80"])
async def test_api_adjust_brightness(hass, adjust):
"""Test api adjust brightness process."""
request = get_new_request(
"Alexa.BrightnessController", "AdjustBrightness", "light#test"
)
# add payload
request["directive"]["payload"]["brightnessDelta"] = adjust
# setup test devices
hass.states.async_set(
"light.test", "off", {"friendly_name": "Test light", "brightness": "77"}
)
call_light = async_mock_service(hass, "light", "turn_on")
msg = await smart_home.async_handle_message(hass, get_default_config(), request)
await hass.async_block_till_done()
assert "event" in msg
msg = msg["event"]
assert len(call_light) == 1
assert call_light[0].data["entity_id"] == "light.test"
assert call_light[0].data["brightness_step_pct"] == int(adjust)
assert msg["header"]["name"] == "Response"
async def test_api_set_color_rgb(hass):
"""Test api set color process."""
request = get_new_request("Alexa.ColorController", "SetColor", "light#test")
# add payload
request["directive"]["payload"]["color"] = {
"hue": "120",
"saturation": "0.612",
"brightness": "0.342",
}
# setup test devices
hass.states.async_set(
"light.test", "off", {"friendly_name": "Test light", "supported_features": 16}
)
call_light = async_mock_service(hass, "light", "turn_on")
msg = await smart_home.async_handle_message(hass, get_default_config(), request)
await hass.async_block_till_done()
assert "event" in msg
msg = msg["event"]
assert len(call_light) == 1
assert call_light[0].data["entity_id"] == "light.test"
assert call_light[0].data["rgb_color"] == (33, 87, 33)
assert msg["header"]["name"] == "Response"
async def test_api_set_color_temperature(hass):
"""Test api set color temperature process."""
request = get_new_request(
"Alexa.ColorTemperatureController", "SetColorTemperature", "light#test"
)
# add payload
request["directive"]["payload"]["colorTemperatureInKelvin"] = "7500"
# setup test devices
hass.states.async_set("light.test", "off", {"friendly_name": "Test light"})
call_light = async_mock_service(hass, "light", "turn_on")
msg = await smart_home.async_handle_message(hass, get_default_config(), request)
await hass.async_block_till_done()
assert "event" in msg
msg = msg["event"]
assert len(call_light) == 1
assert call_light[0].data["entity_id"] == "light.test"
assert call_light[0].data["kelvin"] == 7500
assert msg["header"]["name"] == "Response"
@pytest.mark.parametrize("result,initial", [(383, "333"), (500, "500")])
async def test_api_decrease_color_temp(hass, result, initial):
"""Test api decrease color temp process."""
request = get_new_request(
"Alexa.ColorTemperatureController", "DecreaseColorTemperature", "light#test"
)
# setup test devices
hass.states.async_set(
"light.test",
"off",
{"friendly_name": "Test light", "color_temp": initial, "max_mireds": 500},
)
call_light = async_mock_service(hass, "light", "turn_on")
msg = await smart_home.async_handle_message(hass, get_default_config(), request)
await hass.async_block_till_done()
assert "event" in msg
msg = msg["event"]
assert len(call_light) == 1
assert call_light[0].data["entity_id"] == "light.test"
assert call_light[0].data["color_temp"] == result
assert msg["header"]["name"] == "Response"
@pytest.mark.parametrize("result,initial", [(283, "333"), (142, "142")])
async def test_api_increase_color_temp(hass, result, initial):
"""Test api increase color temp process."""
request = get_new_request(
"Alexa.ColorTemperatureController", "IncreaseColorTemperature", "light#test"
)
# setup test devices
hass.states.async_set(
"light.test",
"off",
{"friendly_name": "Test light", "color_temp": initial, "min_mireds": 142},
)
call_light = async_mock_service(hass, "light", "turn_on")
msg = await smart_home.async_handle_message(hass, get_default_config(), request)
await hass.async_block_till_done()
assert "event" in msg
msg = msg["event"]
assert len(call_light) == 1
assert call_light[0].data["entity_id"] == "light.test"
assert call_light[0].data["color_temp"] == result
assert msg["header"]["name"] == "Response"
@pytest.mark.parametrize(
"domain,payload,source_list,idx",
[
("media_player", "GAME CONSOLE", ["tv", "game console", 10000], 1),
("media_player", "SATELLITE TV", ["satellite-tv", "game console"], 0),
("media_player", "SATELLITE TV", ["satellite_tv", "game console"], 0),
("media_player", "BAD DEVICE", ["satellite_tv", "game console"], None),
],
)
async def test_api_select_input(hass, domain, payload, source_list, idx):
"""Test api set input process."""
hass.states.async_set(
"media_player.test",
"off",
{
"friendly_name": "Test media player",
"source": "unknown",
"source_list": source_list,
},
)
# test where no source matches
if idx is None:
await assert_request_fails(
"Alexa.InputController",
"SelectInput",
"media_player#test",
"media_player.select_source",
hass,
payload={"input": payload},
)
return
call, _ = await assert_request_calls_service(
"Alexa.InputController",
"SelectInput",
"media_player#test",
"media_player.select_source",
hass,
payload={"input": payload},
)
assert call.data["source"] == source_list[idx]
async def test_report_lock_state(hass):
"""Test LockController implements lockState property."""
hass.states.async_set("lock.locked", STATE_LOCKED, {})
hass.states.async_set("lock.unlocked", STATE_UNLOCKED, {})
hass.states.async_set("lock.unlocking", STATE_UNLOCKING, {})
hass.states.async_set("lock.locking", STATE_LOCKING, {})
hass.states.async_set("lock.jammed", STATE_JAMMED, {})
hass.states.async_set("lock.unknown", STATE_UNKNOWN, {})
properties = await reported_properties(hass, "lock.locked")
properties.assert_equal("Alexa.LockController", "lockState", "LOCKED")
properties = await reported_properties(hass, "lock.unlocking")
properties.assert_equal("Alexa.LockController", "lockState", "LOCKED")
properties = await reported_properties(hass, "lock.unlocked")
properties.assert_equal("Alexa.LockController", "lockState", "UNLOCKED")
properties = await reported_properties(hass, "lock.locking")
properties.assert_equal("Alexa.LockController", "lockState", "UNLOCKED")
properties = await reported_properties(hass, "lock.unknown")
properties.assert_equal("Alexa.LockController", "lockState", "JAMMED")
properties = await reported_properties(hass, "lock.jammed")
properties.assert_equal("Alexa.LockController", "lockState", "JAMMED")
@pytest.mark.parametrize(
"supported_color_modes", [["brightness"], ["hs"], ["color_temp"]]
)
async def test_report_dimmable_light_state(hass, supported_color_modes):
"""Test BrightnessController reports brightness correctly."""
hass.states.async_set(
"light.test_on",
"on",
{
"friendly_name": "Test light On",
"brightness": 128,
"supported_color_modes": supported_color_modes,
},
)
hass.states.async_set(
"light.test_off",
"off",
{
"friendly_name": "Test light Off",
"supported_color_modes": supported_color_modes,
},
)
properties = await reported_properties(hass, "light.test_on")
properties.assert_equal("Alexa.BrightnessController", "brightness", 50)
properties = await reported_properties(hass, "light.test_off")
properties.assert_equal("Alexa.BrightnessController", "brightness", 0)
@pytest.mark.parametrize("supported_color_modes", [["hs"], ["rgb"], ["xy"]])
async def test_report_colored_light_state(hass, supported_color_modes):
"""Test ColorController reports color correctly."""
hass.states.async_set(
"light.test_on",
"on",
{
"friendly_name": "Test light On",
"hs_color": (180, 75),
"brightness": 128,
"supported_color_modes": supported_color_modes,
},
)
hass.states.async_set(
"light.test_off",
"off",
{
"friendly_name": "Test light Off",
"supported_color_modes": supported_color_modes,
},
)
properties = await reported_properties(hass, "light.test_on")
properties.assert_equal(
"Alexa.ColorController",
"color",
{"hue": 180, "saturation": 0.75, "brightness": 128 / 255.0},
)
properties = await reported_properties(hass, "light.test_off")
properties.assert_equal(
"Alexa.ColorController", "color", {"hue": 0, "saturation": 0, "brightness": 0}
)
async def test_report_colored_temp_light_state(hass):
"""Test ColorTemperatureController reports color temp correctly."""
hass.states.async_set(
"light.test_on",
"on",
{
"friendly_name": "Test light On",
"color_temp": 240,
"supported_color_modes": ["color_temp"],
},
)
hass.states.async_set(
"light.test_off",
"off",
{"friendly_name": "Test light Off", "supported_color_modes": ["color_temp"]},
)
properties = await reported_properties(hass, "light.test_on")
properties.assert_equal(
"Alexa.ColorTemperatureController", "colorTemperatureInKelvin", 4166
)
properties = await reported_properties(hass, "light.test_off")
properties.assert_not_has_property(
"Alexa.ColorTemperatureController", "colorTemperatureInKelvin"
)
async def test_report_fan_speed_state(hass):
"""Test PercentageController, PowerLevelController reports fan speed correctly."""
hass.states.async_set(
"fan.off",
"off",
{
"friendly_name": "Off fan",
"supported_features": 1,
"percentage": 0,
},
)
hass.states.async_set(
"fan.low_speed",
"on",
{
"friendly_name": "Low speed fan",
"supported_features": 1,
"percentage": 33,
},
)
hass.states.async_set(
"fan.medium_speed",
"on",
{
"friendly_name": "Medium speed fan",
"supported_features": 1,
"percentage": 66,
},
)
hass.states.async_set(
"fan.high_speed",
"on",
{
"friendly_name": "High speed fan",
"supported_features": 1,
"percentage": 100,
},
)
hass.states.async_set(
"fan.speed_less_on",
"on",
{
"friendly_name": "Speedless fan on",
"supported_features": 0,
},
)
hass.states.async_set(
"fan.speed_less_off",
"off",
{
"friendly_name": "Speedless fan off",
"supported_features": 0,
},
)
properties = await reported_properties(hass, "fan.off")
properties.assert_equal("Alexa.RangeController", "rangeValue", 0)
properties = await reported_properties(hass, "fan.low_speed")
properties.assert_equal("Alexa.RangeController", "rangeValue", 33)
properties = await reported_properties(hass, "fan.medium_speed")
properties.assert_equal("Alexa.RangeController", "rangeValue", 66)
properties = await reported_properties(hass, "fan.high_speed")
properties.assert_equal("Alexa.RangeController", "rangeValue", 100)
properties = await reported_properties(hass, "fan.speed_less_on")
properties.assert_equal("Alexa.RangeController", "rangeValue", 100)
properties = await reported_properties(hass, "fan.speed_less_off")
properties.assert_equal("Alexa.RangeController", "rangeValue", 0)
async def test_report_fan_preset_mode(hass):
"""Test ModeController reports fan preset_mode correctly."""
hass.states.async_set(
"fan.preset_mode",
"eco",
{
"friendly_name": "eco enabled fan",
"supported_features": 8,
"preset_mode": "eco",
"preset_modes": ["eco", "smart", "whoosh"],
},
)
properties = await reported_properties(hass, "fan.preset_mode")
properties.assert_equal("Alexa.ModeController", "mode", "preset_mode.eco")
hass.states.async_set(
"fan.preset_mode",
"smart",
{
"friendly_name": "smart enabled fan",
"supported_features": 8,
"preset_mode": "smart",
"preset_modes": ["eco", "smart", "whoosh"],
},
)
properties = await reported_properties(hass, "fan.preset_mode")
properties.assert_equal("Alexa.ModeController", "mode", "preset_mode.smart")
hass.states.async_set(
"fan.preset_mode",
"whoosh",
{
"friendly_name": "whoosh enabled fan",
"supported_features": 8,
"preset_mode": "whoosh",
"preset_modes": ["eco", "smart", "whoosh"],
},
)
properties = await reported_properties(hass, "fan.preset_mode")
properties.assert_equal("Alexa.ModeController", "mode", "preset_mode.whoosh")
hass.states.async_set(
"fan.preset_mode",
"whoosh",
{
"friendly_name": "one preset mode fan",
"supported_features": 8,
"preset_mode": "auto",
"preset_modes": ["auto"],
},
)
properties = await reported_properties(hass, "fan.preset_mode")
async def test_report_fan_oscillating(hass):
"""Test ToggleController reports fan oscillating correctly."""
hass.states.async_set(
"fan.oscillating_off",
"off",
{"friendly_name": "fan oscillating off", "supported_features": 2},
)
hass.states.async_set(
"fan.oscillating_on",
"on",
{
"friendly_name": "Fan oscillating on",
"oscillating": True,
"supported_features": 2,
},
)
properties = await reported_properties(hass, "fan.oscillating_off")
properties.assert_equal("Alexa.ToggleController", "toggleState", "OFF")
properties = await reported_properties(hass, "fan.oscillating_on")
properties.assert_equal("Alexa.ToggleController", "toggleState", "ON")
async def test_report_fan_direction(hass):
"""Test ModeController reports fan direction correctly."""
hass.states.async_set(
"fan.off", "off", {"friendly_name": "Off fan", "supported_features": 4}
)
hass.states.async_set(
"fan.reverse",
"on",
{
"friendly_name": "Fan Reverse",
"direction": "reverse",
"supported_features": 4,
},
)
hass.states.async_set(
"fan.forward",
"on",
{
"friendly_name": "Fan Forward",
"direction": "forward",
"supported_features": 4,
},
)
properties = await reported_properties(hass, "fan.off")
properties.assert_not_has_property("Alexa.ModeController", "mode")
properties = await reported_properties(hass, "fan.reverse")
properties.assert_equal("Alexa.ModeController", "mode", "direction.reverse")
properties = await reported_properties(hass, "fan.forward")
properties.assert_equal("Alexa.ModeController", "mode", "direction.forward")
async def test_report_cover_range_value(hass):
"""Test RangeController reports cover position correctly."""
hass.states.async_set(
"cover.fully_open",
"open",
{
"friendly_name": "Fully open cover",
"current_position": 100,
"supported_features": 15,
},
)
hass.states.async_set(
"cover.half_open",
"open",
{
"friendly_name": "Half open cover",
"current_position": 50,
"supported_features": 15,
},
)
hass.states.async_set(
"cover.closed",
"closed",
{
"friendly_name": "Closed cover",
"current_position": 0,
"supported_features": 15,
},
)
properties = await reported_properties(hass, "cover.fully_open")
properties.assert_equal("Alexa.RangeController", "rangeValue", 100)
properties = await reported_properties(hass, "cover.half_open")
properties.assert_equal("Alexa.RangeController", "rangeValue", 50)
properties = await reported_properties(hass, "cover.closed")
properties.assert_equal("Alexa.RangeController", "rangeValue", 0)
async def test_report_climate_state(hass):
"""Test ThermostatController reports state correctly."""
for auto_modes in (climate.HVAC_MODE_AUTO, climate.HVAC_MODE_HEAT_COOL):
hass.states.async_set(
"climate.downstairs",
auto_modes,
{
"friendly_name": "Climate Downstairs",
"supported_features": 91,
climate.ATTR_CURRENT_TEMPERATURE: 34,
ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS,
},
)
properties = await reported_properties(hass, "climate.downstairs")
properties.assert_equal("Alexa.ThermostatController", "thermostatMode", "AUTO")
properties.assert_equal(
"Alexa.TemperatureSensor",
"temperature",
{"value": 34.0, "scale": "CELSIUS"},
)
for off_modes in (climate.HVAC_MODE_OFF, climate.HVAC_MODE_FAN_ONLY):
hass.states.async_set(
"climate.downstairs",
off_modes,
{
"friendly_name": "Climate Downstairs",
"supported_features": 91,
climate.ATTR_CURRENT_TEMPERATURE: 34,
ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS,
},
)
properties = await reported_properties(hass, "climate.downstairs")
properties.assert_equal("Alexa.ThermostatController", "thermostatMode", "OFF")
properties.assert_equal(
"Alexa.TemperatureSensor",
"temperature",
{"value": 34.0, "scale": "CELSIUS"},
)
# assert dry is reported as CUSTOM
hass.states.async_set(
"climate.downstairs",
"dry",
{
"friendly_name": "Climate Downstairs",
"supported_features": 91,
climate.ATTR_CURRENT_TEMPERATURE: 34,
ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS,
},
)
properties = await reported_properties(hass, "climate.downstairs")
properties.assert_equal("Alexa.ThermostatController", "thermostatMode", "CUSTOM")
properties.assert_equal(
"Alexa.TemperatureSensor", "temperature", {"value": 34.0, "scale": "CELSIUS"}
)
hass.states.async_set(
"climate.heat",
"heat",
{
"friendly_name": "Climate Heat",
"supported_features": 91,
climate.ATTR_CURRENT_TEMPERATURE: 34,
ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS,
},
)
properties = await reported_properties(hass, "climate.heat")
properties.assert_equal("Alexa.ThermostatController", "thermostatMode", "HEAT")
properties.assert_equal(
"Alexa.TemperatureSensor", "temperature", {"value": 34.0, "scale": "CELSIUS"}
)
hass.states.async_set(
"climate.cool",
"cool",
{
"friendly_name": "Climate Cool",
"supported_features": 91,
climate.ATTR_CURRENT_TEMPERATURE: 34,
ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS,
},
)
properties = await reported_properties(hass, "climate.cool")
properties.assert_equal("Alexa.ThermostatController", "thermostatMode", "COOL")
properties.assert_equal(
"Alexa.TemperatureSensor", "temperature", {"value": 34.0, "scale": "CELSIUS"}
)
hass.states.async_set(
"climate.unavailable",
"unavailable",
{"friendly_name": "Climate Unavailable", "supported_features": 91},
)
properties = await reported_properties(hass, "climate.unavailable")
properties.assert_not_has_property("Alexa.ThermostatController", "thermostatMode")
hass.states.async_set(
"climate.unsupported",
"blablabla",
{
"friendly_name": "Climate Unsupported",
"supported_features": 91,
climate.ATTR_CURRENT_TEMPERATURE: 34,
ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS,
},
)
msg = await reported_properties(hass, "climate.unsupported", True)
assert msg["event"]["header"]["name"] == "ErrorResponse"
assert msg["event"]["payload"]["type"] == "INTERNAL_ERROR"
async def test_temperature_sensor_sensor(hass):
"""Test TemperatureSensor reports sensor temperature correctly."""
for bad_value in (STATE_UNKNOWN, STATE_UNAVAILABLE, "not-number"):
hass.states.async_set(
"sensor.temp_living_room",
bad_value,
{ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS},
)
properties = await reported_properties(hass, "sensor.temp_living_room")
properties.assert_not_has_property("Alexa.TemperatureSensor", "temperature")
hass.states.async_set(
"sensor.temp_living_room", "34", {ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS}
)
properties = await reported_properties(hass, "sensor.temp_living_room")
properties.assert_equal(
"Alexa.TemperatureSensor", "temperature", {"value": 34.0, "scale": "CELSIUS"}
)
async def test_temperature_sensor_climate(hass):
"""Test TemperatureSensor reports climate temperature correctly."""
for bad_value in (STATE_UNKNOWN, STATE_UNAVAILABLE, "not-number"):
hass.states.async_set(
"climate.downstairs",
climate.HVAC_MODE_HEAT,
{climate.ATTR_CURRENT_TEMPERATURE: bad_value},
)
properties = await reported_properties(hass, "climate.downstairs")
properties.assert_not_has_property("Alexa.TemperatureSensor", "temperature")
hass.states.async_set(
"climate.downstairs",
climate.HVAC_MODE_HEAT,
{climate.ATTR_CURRENT_TEMPERATURE: 34},
)
properties = await reported_properties(hass, "climate.downstairs")
properties.assert_equal(
"Alexa.TemperatureSensor", "temperature", {"value": 34.0, "scale": "CELSIUS"}
)
async def test_report_alarm_control_panel_state(hass):
"""Test SecurityPanelController implements armState property."""
hass.states.async_set("alarm_control_panel.armed_away", STATE_ALARM_ARMED_AWAY, {})
hass.states.async_set(
"alarm_control_panel.armed_custom_bypass", STATE_ALARM_ARMED_CUSTOM_BYPASS, {}
)
hass.states.async_set("alarm_control_panel.armed_home", STATE_ALARM_ARMED_HOME, {})
hass.states.async_set(
"alarm_control_panel.armed_night", STATE_ALARM_ARMED_NIGHT, {}
)
hass.states.async_set("alarm_control_panel.disarmed", STATE_ALARM_DISARMED, {})
properties = await reported_properties(hass, "alarm_control_panel.armed_away")
properties.assert_equal("Alexa.SecurityPanelController", "armState", "ARMED_AWAY")
properties = await reported_properties(
hass, "alarm_control_panel.armed_custom_bypass"
)
properties.assert_equal("Alexa.SecurityPanelController", "armState", "ARMED_STAY")
properties = await reported_properties(hass, "alarm_control_panel.armed_home")
properties.assert_equal("Alexa.SecurityPanelController", "armState", "ARMED_STAY")
properties = await reported_properties(hass, "alarm_control_panel.armed_night")
properties.assert_equal("Alexa.SecurityPanelController", "armState", "ARMED_NIGHT")
properties = await reported_properties(hass, "alarm_control_panel.disarmed")
properties.assert_equal("Alexa.SecurityPanelController", "armState", "DISARMED")
async def test_report_playback_state(hass):
"""Test PlaybackStateReporter implements playbackState property."""
hass.states.async_set(
"media_player.test",
"off",
{
"friendly_name": "Test media player",
"supported_features": SUPPORT_PAUSE | SUPPORT_PLAY | SUPPORT_STOP,
"volume_level": 0.75,
},
)
properties = await reported_properties(hass, "media_player.test")
properties.assert_equal(
"Alexa.PlaybackStateReporter", "playbackState", {"state": "STOPPED"}
)
async def test_report_speaker_volume(hass):
"""Test Speaker reports volume correctly."""
hass.states.async_set(
"media_player.test_speaker",
"on",
{
"friendly_name": "Test media player speaker",
"supported_features": SUPPORT_VOLUME_MUTE | SUPPORT_VOLUME_SET,
"volume_level": None,
"device_class": "speaker",
},
)
properties = await reported_properties(hass, "media_player.test_speaker")
properties.assert_not_has_property("Alexa.Speaker", "volume")
for good_value in range(101):
hass.states.async_set(
"media_player.test_speaker",
"on",
{
"friendly_name": "Test media player speaker",
"supported_features": SUPPORT_VOLUME_MUTE | SUPPORT_VOLUME_SET,
"volume_level": good_value / 100,
"device_class": "speaker",
},
)
properties = await reported_properties(hass, "media_player.test_speaker")
properties.assert_equal("Alexa.Speaker", "volume", good_value)
async def test_report_image_processing(hass):
"""Test EventDetectionSensor implements humanPresenceDetectionState property."""
hass.states.async_set(
"image_processing.test_face",
0,
{
"friendly_name": "Test face",
"device_class": "face",
"faces": [],
"total_faces": 0,
},
)
properties = await reported_properties(hass, "image_processing#test_face")
properties.assert_equal(
"Alexa.EventDetectionSensor",
"humanPresenceDetectionState",
{"value": "NOT_DETECTED"},
)
hass.states.async_set(
"image_processing.test_classifier",
3,
{
"friendly_name": "Test classifier",
"device_class": "face",
"faces": [
{"confidence": 98.34, "name": "Hans", "age": 16.0, "gender": "male"},
{"name": "Helena", "age": 28.0, "gender": "female"},
{"confidence": 62.53, "name": "Luna"},
],
"total_faces": 3,
},
)
properties = await reported_properties(hass, "image_processing#test_classifier")
properties.assert_equal(
"Alexa.EventDetectionSensor",
"humanPresenceDetectionState",
{"value": "DETECTED"},
)
async def test_get_property_blowup(hass, caplog):
"""Test we handle a property blowing up."""
hass.states.async_set(
"climate.downstairs",
climate.HVAC_MODE_AUTO,
{
"friendly_name": "Climate Downstairs",
"supported_features": 91,
climate.ATTR_CURRENT_TEMPERATURE: 34,
ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS,
},
)
with patch(
"homeassistant.components.alexa.capabilities.float",
side_effect=Exception("Boom Fail"),
):
properties = await reported_properties(hass, "climate.downstairs")
properties.assert_not_has_property("Alexa.ThermostatController", "temperature")
assert "Boom Fail" in caplog.text
|
{
"content_hash": "af6f69e503416e7b3452ad52f7d7f4da",
"timestamp": "",
"source": "github",
"line_count": 868,
"max_line_length": 87,
"avg_line_length": 33.29032258064516,
"alnum_prop": 0.6083194905869325,
"repo_name": "GenericStudent/home-assistant",
"id": "d24849e100601c5c4d88cb60b24bdc77eefc3c78",
"size": "28896",
"binary": false,
"copies": "2",
"ref": "refs/heads/dev",
"path": "tests/components/alexa/test_capabilities.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "3070"
},
{
"name": "Python",
"bytes": "44491729"
},
{
"name": "Shell",
"bytes": "5092"
}
],
"symlink_target": ""
}
|
"""Tests for ops which manipulate lists of tensors."""
# pylint: disable=g-bad-name
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import parameterized
import numpy as np # pylint: disable=unused-import
from tensorflow.python.client import session
from tensorflow.python.eager import backprop
from tensorflow.python.eager import context
from tensorflow.python.eager import def_function
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import gen_list_ops
from tensorflow.python.ops import gradients_impl
from tensorflow.python.ops import list_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import state_ops
from tensorflow.python.ops import variable_scope as vs
from tensorflow.python.platform import test
@test_util.run_all_in_graph_and_eager_modes
class ListOpsTest(test_util.TensorFlowTestCase, parameterized.TestCase):
def _testPushPop(self, max_num_elements):
l = list_ops.empty_tensor_list(
element_dtype=dtypes.float32,
element_shape=[],
max_num_elements=max_num_elements)
l = list_ops.tensor_list_push_back(l, constant_op.constant(1.0))
l, e = list_ops.tensor_list_pop_back(l, element_dtype=dtypes.float32)
self.assertAllEqual(self.evaluate(e), 1.0)
@parameterized.named_parameters(("NoMaxNumElements", None),
("WithMaxNumElements", 2))
def testPushPop(self, max_num_elements):
self._testPushPop(max_num_elements)
@parameterized.named_parameters(("NoMaxNumElements", None),
("WithMaxNumElements", 2))
@test_util.run_gpu_only
def testPushPopGPU(self, max_num_elements):
with context.device("gpu:0"):
self._testPushPop(max_num_elements)
@test_util.run_deprecated_v1
def testPushInFullListFails(self):
l = list_ops.empty_tensor_list(
element_dtype=dtypes.float32, element_shape=[], max_num_elements=1)
l = list_ops.tensor_list_push_back(l, constant_op.constant(1.0))
with self.assertRaisesRegexp(errors.InvalidArgumentError,
"Tried to push item into a full list"):
l = list_ops.tensor_list_push_back(l, 2.)
self.evaluate(l)
@parameterized.named_parameters(("NoMaxNumElements", None),
("WithMaxNumElements", 2))
@test_util.run_deprecated_v1
def testPopFromEmptyTensorListFails(self, max_num_elements):
l = list_ops.empty_tensor_list(
element_dtype=dtypes.float32,
element_shape=[],
max_num_elements=max_num_elements)
with self.assertRaisesRegexp(errors.InvalidArgumentError,
"Trying to pop from an empty list"):
l = list_ops.tensor_list_pop_back(l, element_dtype=dtypes.float32)
self.evaluate(l)
def testPopUninitializedTensorUseListElementShape(self):
l = list_ops.tensor_list_reserve(
element_dtype=dtypes.float32, element_shape=[2, 3], num_elements=3)
_, e = list_ops.tensor_list_pop_back(l, element_dtype=dtypes.float32)
self.assertAllEqual(e, np.zeros((2, 3)))
def testPopUninitializedTensorUseSpecifiedElementShape(self):
l = list_ops.tensor_list_reserve(
element_dtype=dtypes.float32, element_shape=[None, 3], num_elements=3)
_, e = gen_list_ops.tensor_list_pop_back(
l, element_dtype=dtypes.float32, element_shape=[4, 3])
self.assertAllEqual(e, np.zeros((4, 3)))
def testPopUninitializedTensorWithInvalidElementShapeFails(self):
l = list_ops.tensor_list_reserve(
element_dtype=dtypes.float32, element_shape=None, num_elements=3)
with self.assertRaisesRegexp(
errors.InvalidArgumentError,
"Trying to read an uninitialized tensor but "
"element_shape is not fully defined"):
_, e = list_ops.tensor_list_pop_back(l, element_dtype=dtypes.float32)
self.evaluate(e)
l = list_ops.tensor_list_reserve(
element_dtype=dtypes.float32, element_shape=[None, 2], num_elements=3)
with self.assertRaisesRegexp(
errors.InvalidArgumentError,
r"Incompatible shapes during merge: \[1,3\] vs. \[\?,2\]"):
_, e = gen_list_ops.tensor_list_pop_back(
l, element_dtype=dtypes.float32, element_shape=[1, 3])
self.evaluate(e)
def testPushGetGrad(self):
with backprop.GradientTape() as tape:
l = list_ops.empty_tensor_list(
element_dtype=dtypes.float32, element_shape=None)
c0 = constant_op.constant(5.0)
c1 = constant_op.constant([10.0, 20.0])
tape.watch(c0)
tape.watch(c1)
l = list_ops.tensor_list_push_back(l, c0)
l = list_ops.tensor_list_push_back(l, c1)
t1 = list_ops.tensor_list_get_item(l, 1, element_dtype=dtypes.float32)
self.assertAllEqual(self.evaluate(t1), [10.0, 20.0])
# t1 == c1 so the gradient should be [0., [1., 1.]]
# This tests that the gradient of push_back correctly converts DT_INVALID
# tensors to zeros. The list returned by the gradient of GetItem will
# have only have tensor at index 1 set and others set to DT_INVALID.
dt0, dt1 = tape.gradient(t1, [c0, c1])
self.assertAllEqual(self.evaluate(dt1), [1.0, 1.0])
self.assertEqual(self.evaluate(dt0), 0.0)
def _testStack(self, max_num_elements):
l = list_ops.empty_tensor_list(
element_dtype=dtypes.float32,
element_shape=[],
max_num_elements=max_num_elements)
l = list_ops.tensor_list_push_back(l, constant_op.constant(1.0))
l = list_ops.tensor_list_push_back(l, constant_op.constant(2.0))
t = list_ops.tensor_list_stack(l, element_dtype=dtypes.float32)
if not context.executing_eagerly():
self.assertAllEqual(t.shape.as_list(), [None])
self.assertAllEqual(self.evaluate(t), [1.0, 2.0])
@parameterized.named_parameters(("NoMaxNumElements", None),
("WithMaxNumElements", 2))
def testStack(self, max_num_elements):
self._testStack(max_num_elements)
@parameterized.named_parameters(("NoMaxNumElements", None),
("WithMaxNumElements", 2))
@test_util.run_gpu_only
def testStackGPU(self, max_num_elements):
with context.device("gpu:0"):
self._testStack(max_num_elements)
@parameterized.named_parameters(("NoMaxNumElements", None),
("WithMaxNumElements", 3))
@test_util.run_deprecated_v1
def testStackWithUnknownElementShape(self, max_num_elements):
l = list_ops.empty_tensor_list(
element_dtype=dtypes.float32,
element_shape=None,
max_num_elements=max_num_elements)
l = list_ops.tensor_list_push_back(l, constant_op.constant(1.0))
l = list_ops.tensor_list_push_back(l, constant_op.constant(2.0))
t = list_ops.tensor_list_stack(l, element_dtype=dtypes.float32)
self.assertAllEqual(self.evaluate(t), [1.0, 2.0])
# Should raise an error when the element tensors do not all have the same
# shape.
with self.assertRaisesRegexp(errors.InvalidArgumentError,
"Incompatible ranks during merge: 0 vs. 1"):
l = list_ops.tensor_list_push_back(l, constant_op.constant([3.0, 4.0]))
t = list_ops.tensor_list_stack(l, element_dtype=dtypes.float32)
self.evaluate(t)
@parameterized.named_parameters(("NoMaxNumElements", None),
("WithMaxNumElements", 3))
@test_util.run_deprecated_v1
def testStackWithPartiallyDefinedElementShape(self, max_num_elements):
l = list_ops.empty_tensor_list(
element_dtype=dtypes.float32,
element_shape=[None],
max_num_elements=max_num_elements)
l = list_ops.tensor_list_push_back(l, constant_op.constant([1.0]))
l = list_ops.tensor_list_push_back(l, constant_op.constant([2.0]))
t = list_ops.tensor_list_stack(l, element_dtype=dtypes.float32)
self.assertAllEqual(self.evaluate(t), [[1.0], [2.0]])
# Should raise an error when the element tensors do not all have the same
# shape.
with self.assertRaisesRegexp(
errors.InvalidArgumentError,
r"Incompatible shapes during merge: \[1\] vs. \[2\]"):
l = list_ops.tensor_list_push_back(l, constant_op.constant([2.0, 3.0]))
t = list_ops.tensor_list_stack(l, element_dtype=dtypes.float32)
self.evaluate(t)
@parameterized.named_parameters(("NoMaxNumElements", None),
("WithMaxNumElements", 2))
@test_util.run_deprecated_v1
def testStackEmptyList(self, max_num_elements):
# Should be able to stack empty lists with fully defined element_shape.
l = list_ops.empty_tensor_list(
element_dtype=dtypes.float32,
element_shape=[1, 2],
max_num_elements=max_num_elements)
t = list_ops.tensor_list_stack(l, element_dtype=dtypes.float32)
self.assertAllEqual(self.evaluate(t).shape, (0, 1, 2))
# Should not be able to stack empty lists with partially defined
# element_shape.
with self.assertRaisesRegexp(errors.InvalidArgumentError,
"non-fully-defined"):
l = list_ops.empty_tensor_list(
element_dtype=dtypes.float32,
element_shape=[None, 2],
max_num_elements=max_num_elements)
t = list_ops.tensor_list_stack(l, element_dtype=dtypes.float32)
self.evaluate(t)
# Should not be able to stack empty lists with undefined element_shape.
with self.assertRaisesRegexp(errors.InvalidArgumentError,
"non-fully-defined"):
l = list_ops.empty_tensor_list(
element_dtype=dtypes.float32,
element_shape=None,
max_num_elements=max_num_elements)
t = list_ops.tensor_list_stack(l, element_dtype=dtypes.float32)
self.evaluate(t)
def _testStackWithUninitializedTensors(self):
l = list_ops.tensor_list_reserve(
element_dtype=dtypes.float32, element_shape=[], num_elements=3)
t = list_ops.tensor_list_stack(l, element_dtype=dtypes.float32)
self.assertAllEqual(t, [0., 0., 0.])
def testStackWithUninitializedTensors(self):
self._testStackWithUninitializedTensors()
@test_util.run_gpu_only
def testStackWithUninitializedTensorsGpu(self):
with context.device("gpu:0"):
self._testStackWithUninitializedTensors()
def _testStackWithUninitializedTensorsInferShape(self):
l = list_ops.tensor_list_reserve(
element_dtype=dtypes.float32, element_shape=None, num_elements=3)
l = list_ops.tensor_list_set_item(l, 1, [1., 2.])
t = list_ops.tensor_list_stack(l, element_dtype=dtypes.float32)
self.assertAllEqual(t, [[0., 0.], [1., 2.], [0., 0.]])
def testStackWithUninitializedTensorsInferShape(self):
self._testStackWithUninitializedTensorsInferShape()
@test_util.run_gpu_only
def testStackWithUninitializedTensorsInferShapeGpu(self):
with context.device("gpu:0"):
self._testStackWithUninitializedTensorsInferShape()
def testStackReservedListWithNoElementsAndPartialElementShapeFails(self):
l = list_ops.tensor_list_reserve(
element_dtype=dtypes.float32, element_shape=None, num_elements=3)
with self.assertRaisesRegexp(errors.InvalidArgumentError,
"Tried to stack list which only contains "
"uninitialized tensors and has a "
"non-fully-defined element_shape: <unknown>"):
t = list_ops.tensor_list_stack(l, element_dtype=dtypes.float32)
self.evaluate(t)
def testStackUsingSpecifiedElementShape(self):
l = list_ops.tensor_list_reserve(
element_dtype=dtypes.float32, element_shape=None, num_elements=3)
t = gen_list_ops.tensor_list_stack(
l, element_dtype=dtypes.float32, element_shape=[])
if context.executing_eagerly():
self.assertEqual(t.shape.as_list(), [3])
else:
self.assertEqual(t.shape.as_list(), [None])
self.assertAllEqual(self.evaluate(t), np.zeros((3,)))
@parameterized.named_parameters(("NoMaxNumElements", None),
("WithMaxNumElements", 2))
def testGatherGrad(self, max_num_elements):
with backprop.GradientTape() as tape:
l = list_ops.empty_tensor_list(
element_dtype=dtypes.float32,
element_shape=[],
max_num_elements=max_num_elements)
c0 = constant_op.constant(1.0)
tape.watch(c0)
l = list_ops.tensor_list_push_back(l, c0)
l = list_ops.tensor_list_push_back(l, constant_op.constant(2.0))
t = list_ops.tensor_list_gather(l, [1, 0], element_dtype=dtypes.float32)
self.assertAllEqual(self.evaluate(t), [2.0, 1.0])
s = (t[0] + t[1]) * (t[0] + t[1])
dt = tape.gradient(s, c0)
self.assertAllEqual(self.evaluate(dt), 6.0)
@parameterized.named_parameters(("NoMaxNumElements", None),
("WithMaxNumElements", 3))
@test_util.run_deprecated_v1
def testGatherWithUnknownElementShape(self, max_num_elements):
l = list_ops.empty_tensor_list(
element_dtype=dtypes.float32,
element_shape=None,
max_num_elements=max_num_elements)
l = list_ops.tensor_list_push_back(l, constant_op.constant(1.0))
l = list_ops.tensor_list_push_back(l, constant_op.constant(2.0))
l = list_ops.tensor_list_push_back(l, constant_op.constant([3.0, 4.0]))
t = list_ops.tensor_list_gather(l, [1, 0], element_dtype=dtypes.float32)
self.assertAllEqual(self.evaluate(t), [2.0, 1.0])
t = list_ops.tensor_list_gather(l, [2], element_dtype=dtypes.float32)
self.assertAllEqual(self.evaluate(t), [[3.0, 4.0]])
# Should raise an error when the requested tensors do not all have the same
# shape.
with self.assertRaisesRegexp(errors.InvalidArgumentError,
"Incompatible ranks during merge: 0 vs. 1"):
t = list_ops.tensor_list_gather(l, [0, 2], element_dtype=dtypes.float32)
self.evaluate(t)
@parameterized.named_parameters(("NoMaxNumElements", None),
("WithMaxNumElements", 3))
@test_util.run_deprecated_v1
def testGatherWithPartiallyDefinedElementShape(self, max_num_elements):
l = list_ops.empty_tensor_list(
element_dtype=dtypes.float32,
element_shape=[None],
max_num_elements=max_num_elements)
l = list_ops.tensor_list_push_back(l, constant_op.constant([1.0]))
l = list_ops.tensor_list_push_back(l, constant_op.constant([2.0, 3.0]))
l = list_ops.tensor_list_push_back(l, constant_op.constant([4.0, 5.0]))
t = list_ops.tensor_list_gather(l, [0], element_dtype=dtypes.float32)
self.assertAllEqual(self.evaluate(t), [[1.0]])
t = list_ops.tensor_list_gather(l, [1, 2], element_dtype=dtypes.float32)
self.assertAllEqual(self.evaluate(t), [[2.0, 3.0], [4.0, 5.0]])
# Should raise an error when the requested tensors do not all have the same
# shape.
with self.assertRaisesRegexp(
errors.InvalidArgumentError,
r"Incompatible shapes during merge: \[1\] vs. \[2\]"):
t = list_ops.tensor_list_gather(l, [0, 2], element_dtype=dtypes.float32)
self.evaluate(t)
@parameterized.named_parameters(("NoMaxNumElements", None),
("WithMaxNumElements", 3))
@test_util.run_deprecated_v1
def testGatherEmptyList(self, max_num_elements):
# Should be able to gather from empty lists with fully defined
# element_shape.
l = list_ops.empty_tensor_list(
element_dtype=dtypes.float32,
element_shape=[1, 2],
max_num_elements=max_num_elements)
t = list_ops.tensor_list_gather(l, [], element_dtype=dtypes.float32)
self.assertAllEqual((0, 1, 2), self.evaluate(t).shape)
# Should not be able to gather from empty lists with partially defined
# element_shape.
with self.assertRaisesRegexp(errors.InvalidArgumentError,
"non-fully-defined"):
l = list_ops.empty_tensor_list(
element_dtype=dtypes.float32,
element_shape=[None, 2],
max_num_elements=max_num_elements)
t = list_ops.tensor_list_gather(l, [], element_dtype=dtypes.float32)
self.evaluate(t)
# Should not be able to gather from empty lists with undefined
# element_shape.
with self.assertRaisesRegexp(errors.InvalidArgumentError,
"non-fully-defined"):
l = list_ops.empty_tensor_list(
element_dtype=dtypes.float32,
element_shape=None,
max_num_elements=max_num_elements)
t = list_ops.tensor_list_gather(l, [], element_dtype=dtypes.float32)
self.evaluate(t)
def testGatherGradWithNonContiguousIndices(self):
with backprop.GradientTape(persistent=True) as tape:
t = constant_op.constant([1.0, 2.0, 3.0])
l = list_ops.tensor_list_from_tensor(t, element_shape=[])
c = constant_op.constant(5.0)
tape.watch(c)
l = list_ops.tensor_list_set_item(l, 1, c)
t = list_ops.tensor_list_gather(l, [1], element_dtype=dtypes.float32)
self.assertAllEqual(self.evaluate(t), [5.0])
s = t[0] * t[0]
dt = tape.gradient(s, c)
self.assertAllEqual(self.evaluate(dt), 10.0)
dl = tape.gradient(t, l)
dl_length = list_ops.tensor_list_length(dl)
self.assertAllEqual(self.evaluate(dl_length), 3)
def _testGatherWithUninitializedTensors(self):
l = list_ops.tensor_list_reserve(
element_dtype=dtypes.float32, element_shape=[], num_elements=3)
t = list_ops.tensor_list_gather(l, [0, 2], element_dtype=dtypes.float32)
self.assertAllEqual(self.evaluate(t), [0., 0.])
def testGatherWithUninitializedTensors(self):
self._testGatherWithUninitializedTensors()
@test_util.run_gpu_only
def testGatherWithUninitializedTensorsGpu(self):
with context.device("gpu:0"):
self._testGatherWithUninitializedTensors()
def _testGatherWithUninitializedTensorsInferShape(self):
l = list_ops.tensor_list_reserve(
element_dtype=dtypes.float32, element_shape=None, num_elements=3)
l = list_ops.tensor_list_set_item(l, 1, [1., 2.])
t = list_ops.tensor_list_gather(l, [1, 2], element_dtype=dtypes.float32)
self.assertAllEqual(self.evaluate(t), [[1., 2.], [0., 0.]])
def testGatherWithUninitializedTensorsInferShape(self):
self._testGatherWithUninitializedTensorsInferShape()
@test_util.run_gpu_only
def testGatherWithUninitializedTensorsInferShapeGpu(self):
with context.device("gpu:0"):
self._testGatherWithUninitializedTensorsInferShape()
def testGatherReservedListWithNoElementsAndPartialElementShapeFails(self):
l = list_ops.tensor_list_reserve(
element_dtype=dtypes.float32, element_shape=None, num_elements=3)
with self.assertRaisesRegexp(
errors.InvalidArgumentError,
"Tried to gather uninitialized tensors from a"
" list with non-fully-defined element_shape"):
t = list_ops.tensor_list_gather(l, [0], element_dtype=dtypes.float32)
self.evaluate(t)
def testGatherUsingSpecifiedElementShape(self):
l = list_ops.tensor_list_reserve(
element_dtype=dtypes.float32, element_shape=None, num_elements=3)
t = gen_list_ops.tensor_list_gather(
l, [0, 1, 2], element_dtype=dtypes.float32, element_shape=[])
self.assertEqual(t.shape.as_list(), [3])
self.assertAllEqual(self.evaluate(t), np.zeros((3,)))
def testScatterOutputListSize(self):
c0 = constant_op.constant([1.0, 2.0])
l = list_ops.tensor_list_scatter(c0, [1, 3], [])
# TensorListScatter should return a list with size largest index + 1.
self.assertAllEqual(list_ops.tensor_list_length(l), 4)
def testScatterOutputListSizeWithNumElementsSpecified(self):
c0 = constant_op.constant([1.0, 2.0])
l = gen_list_ops.tensor_list_scatter_v2(
c0, [1, 3], list_ops._build_element_shape([]), num_elements=5)
# TensorListScatter should return a list with size num_elements.
self.assertAllEqual(list_ops.tensor_list_length(l), 5)
def testScatterFailsWhenIndexLargerThanNumElements(self):
c0 = constant_op.constant([1.0, 2.0])
with self.assertRaisesRegexp(
errors.InvalidArgumentError,
"TensorListScatter: Trying to scatter at index 3 in list with size 3"):
l = gen_list_ops.tensor_list_scatter_v2(
c0, [1, 3], list_ops._build_element_shape([]), num_elements=3)
self.evaluate(l)
def testScatterFailsWithInvalidNumElements(self):
c0 = constant_op.constant([1.0, 2.0])
with self.assertRaisesRegexp(
errors.InvalidArgumentError,
"TensorListScatter expects num_elements >= -1, found: -2"):
l = gen_list_ops.tensor_list_scatter_v2(
c0, [1, 3], list_ops._build_element_shape([]), num_elements=-2)
self.evaluate(l)
def testScatterWithInvalidRowsInInputTensorFails(self):
c0 = constant_op.constant([1.0, 2.0])
with self.assertRaisesRegexp(
errors.InvalidArgumentError,
"Invalid number of rows in input tensor. Expected: 3 Actual: 2"):
l = list_ops.tensor_list_scatter(c0, [1, 0, 2], [])
self.evaluate(l)
def testScatterWithNegativeIndicesFails(self):
c0 = constant_op.constant([1.0, 2.0])
with self.assertRaisesRegexp(
errors.InvalidArgumentError,
"Indices in TensorListScatter must all be non-negative."):
l = list_ops.tensor_list_scatter(c0, [-1, -2], element_shape=[])
self.evaluate(l)
def testScatterIntoExistingList(self):
l = list_ops.tensor_list_reserve(
element_dtype=dtypes.float32, element_shape=[], num_elements=3)
l = list_ops.tensor_list_scatter(tensor=[1.], indices=[0], element_shape=[])
l = list_ops.tensor_list_scatter(
tensor=[2., 3.], indices=[1, 2], element_shape=[], input_handle=l)
self.assertAllEqual(
list_ops.tensor_list_stack(l, element_dtype=dtypes.float32),
[1., 2., 3.])
def testScatterGrad(self):
with backprop.GradientTape() as tape:
c0 = constant_op.constant([1.0, 2.0])
tape.watch(c0)
l = list_ops.tensor_list_scatter(c0, [1, 0], element_shape=[])
t0 = list_ops.tensor_list_get_item(l, 0, element_dtype=dtypes.float32)
t1 = list_ops.tensor_list_get_item(l, 1, element_dtype=dtypes.float32)
self.assertAllEqual(self.evaluate(t0), 2.0)
self.assertAllEqual(self.evaluate(t1), 1.0)
loss = t0 * t0 + t1 * t1
dt = tape.gradient(loss, c0)
self.assertAllEqual(self.evaluate(dt), [2., 4.])
def testScatterWithPartialReadGrad(self):
with backprop.GradientTape() as tape:
c0 = constant_op.constant([1.0, 2.0])
tape.watch(c0)
l = list_ops.tensor_list_scatter(c0, [1, 0], element_shape=[])
t0 = list_ops.tensor_list_get_item(l, 0, element_dtype=dtypes.float32)
self.assertAllEqual(self.evaluate(t0), 2.0)
loss = t0 * t0
dt = tape.gradient(loss, c0)
self.assertAllEqual(self.evaluate(dt), [0., 4.])
def testTensorListFromTensor(self):
t = constant_op.constant([1.0, 2.0])
l = list_ops.tensor_list_from_tensor(t, element_shape=[])
e = list_ops.tensor_list_get_item(l, 0, element_dtype=dtypes.float32)
self.assertAllEqual(e, 1.0)
l, e = list_ops.tensor_list_pop_back(l, element_dtype=dtypes.float32)
self.assertAllEqual(e, 2.0)
l, e = list_ops.tensor_list_pop_back(l, element_dtype=dtypes.float32)
self.assertAllEqual(e, 1.0)
self.assertAllEqual(list_ops.tensor_list_length(l), 0)
@test_util.run_gpu_only
def testFromTensorGPU(self):
with context.device("gpu:0"):
self.testTensorListFromTensor()
def testGetSet(self):
t = constant_op.constant([1.0, 2.0])
l = list_ops.tensor_list_from_tensor(t, element_shape=[])
e0 = list_ops.tensor_list_get_item(l, 0, element_dtype=dtypes.float32)
self.assertAllEqual(self.evaluate(e0), 1.0)
l = list_ops.tensor_list_set_item(l, 0, 3.0)
t = list_ops.tensor_list_stack(l, element_dtype=dtypes.float32)
self.assertAllEqual(self.evaluate(t), [3.0, 2.0])
@test_util.run_gpu_only
def testGetSetGPU(self):
with context.device("gpu:0"):
self.testGetSet()
def testGetSetReserved(self):
l = list_ops.tensor_list_reserve(
element_dtype=dtypes.float32, element_shape=[], num_elements=2)
e0 = list_ops.tensor_list_get_item(l, 0, element_dtype=dtypes.float32)
self.assertAllEqual(e0, 0.0)
l = list_ops.tensor_list_set_item(l, 0, 3.0)
t = list_ops.tensor_list_stack(l, element_dtype=dtypes.float32)
self.assertAllEqual(t, [3.0, 0.0])
@test_util.run_gpu_only
def testGetSetReservedGPU(self):
with context.device("gpu:0"):
self.testGetSetReserved()
def testSetGetGrad(self):
with backprop.GradientTape() as tape:
t = constant_op.constant(5.)
tape.watch(t)
l = list_ops.tensor_list_reserve(
element_dtype=dtypes.float32, element_shape=[], num_elements=3)
l = list_ops.tensor_list_set_item(l, 1, 2. * t)
e = list_ops.tensor_list_get_item(l, 1, element_dtype=dtypes.float32)
self.assertAllEqual(self.evaluate(e), 10.0)
self.assertAllEqual(self.evaluate(tape.gradient(e, t)), 2.0)
def testGetUninitializedTensorUseListElementShape(self):
l = list_ops.tensor_list_reserve(
element_dtype=dtypes.float32, element_shape=[], num_elements=3)
l = list_ops.tensor_list_set_item(l, 0, 5.)
e1 = list_ops.tensor_list_get_item(l, 1, element_dtype=dtypes.float32)
e2 = list_ops.tensor_list_get_item(l, 2, element_dtype=dtypes.float32)
self.assertEqual(self.evaluate(e1), 0.)
self.assertEqual(self.evaluate(e2), 0.)
def testGetUninitializedTensorUseSpecifiedElementShape(self):
l = list_ops.tensor_list_reserve(
element_dtype=dtypes.float32, element_shape=None, num_elements=3)
e0 = gen_list_ops.tensor_list_get_item(
l, 0, element_shape=[], element_dtype=dtypes.float32)
e1 = gen_list_ops.tensor_list_get_item(
l, 1, element_shape=[2, 3], element_dtype=dtypes.float32)
self.assertEqual(e0.shape.as_list(), [])
self.assertEqual(e1.shape.as_list(), [2, 3])
self.assertEqual(self.evaluate(e0), 0.)
self.assertAllEqual(self.evaluate(e1), np.zeros((2, 3)))
l = list_ops.tensor_list_reserve(
element_dtype=dtypes.float32, element_shape=[None, 3], num_elements=3)
e1 = gen_list_ops.tensor_list_get_item(
l, 1, element_shape=[2, 3], element_dtype=dtypes.float32)
self.assertAllEqual(self.evaluate(e1), np.zeros((2, 3)))
def testGetUninitializedTensorWithInvalidElementShapeFails(self):
l = list_ops.tensor_list_reserve(
element_dtype=dtypes.float32, element_shape=None, num_elements=3)
with self.assertRaisesRegexp(
errors.InvalidArgumentError,
"Trying to read an uninitialized tensor but "
"element_shape is not fully defined"):
e0 = list_ops.tensor_list_get_item(l, 0, element_dtype=dtypes.float32)
self.evaluate(e0)
l = list_ops.tensor_list_reserve(
element_dtype=dtypes.float32, element_shape=[None, 2], num_elements=3)
# In eager mode the shape mismatch is caught in the TensorListGetItem
# kernel which raises an InvalidArgumentError.
# In graph mode the shape mismatch is caught in the C++ shape inference
# which raises a ValueError.
if context.executing_eagerly():
error_type = errors.InvalidArgumentError
else:
error_type = ValueError
with self.assertRaisesRegexp(error_type, r"shapes"):
e0 = gen_list_ops.tensor_list_get_item(
l, 0, element_dtype=dtypes.float32, element_shape=[1, 3])
self.evaluate(e0)
@test_util.run_deprecated_v1
@test_util.enable_control_flow_v2
def testSkipEagerSetItemIndexOutOfBounds(self):
l = list_ops.empty_tensor_list(
element_dtype=dtypes.float32, element_shape=[])
e0 = constant_op.constant(5.)
l = list_ops.tensor_list_set_item(
l, 0, 2. * e0, resize_if_index_out_of_bounds=True)
l = list_ops.tensor_list_set_item(
l, 1, 1., resize_if_index_out_of_bounds=True)
t = list_ops.tensor_list_stack(l, element_dtype=dtypes.float32)
grad = gradients_impl.gradients(t, e0)[0]
self.assertAllEqual(self.evaluate(grad), 2.)
@test_util.run_deprecated_v1
def testSetOnEmptyListWithMaxNumElementsFails(self):
l = list_ops.empty_tensor_list(
element_dtype=dtypes.float32, element_shape=[], max_num_elements=3)
with self.assertRaisesRegexp(
errors.InvalidArgumentError,
"Trying to modify element 0 in a list with 0 elements."):
l = list_ops.tensor_list_set_item(l, 0, 1.)
self.evaluate(l)
def testUnknownShape(self):
l = list_ops.empty_tensor_list(
element_dtype=dtypes.float32, element_shape=None)
l = list_ops.tensor_list_push_back(l, constant_op.constant(1.0))
l = list_ops.tensor_list_push_back(l, constant_op.constant([1.0, 2.0]))
l, e = list_ops.tensor_list_pop_back(l, element_dtype=dtypes.float32)
self.assertAllEqual(self.evaluate(e), [1.0, 2.0])
l, e = list_ops.tensor_list_pop_back(l, element_dtype=dtypes.float32)
self.assertAllEqual(self.evaluate(e), 1.0)
@test_util.run_gpu_only
def testCPUGPUCopy(self):
t = constant_op.constant([1.0, 2.0])
l = list_ops.tensor_list_from_tensor(t, element_shape=[])
with context.device("gpu:0"):
l_gpu = array_ops.identity(l)
self.assertAllEqual(
self.evaluate(
list_ops.tensor_list_pop_back(
l_gpu, element_dtype=dtypes.float32)[1]), 2.0)
l_cpu = array_ops.identity(l_gpu)
self.assertAllEqual(
self.evaluate(
list_ops.tensor_list_pop_back(
l_cpu, element_dtype=dtypes.float32)[1]), 2.0)
@test_util.run_gpu_only
def testCPUGPUCopyNested(self):
t = constant_op.constant([1.0, 2.0])
child_l = list_ops.tensor_list_from_tensor(t, element_shape=[])
l = list_ops.empty_tensor_list(
element_shape=constant_op.constant([], dtype=dtypes.int32),
element_dtype=dtypes.variant)
l = list_ops.tensor_list_push_back(l, child_l)
with context.device("gpu:0"):
l_gpu = array_ops.identity(l)
_, child_l_gpu = list_ops.tensor_list_pop_back(
l_gpu, element_dtype=dtypes.variant)
self.assertAllEqual(
self.evaluate(
list_ops.tensor_list_pop_back(
child_l_gpu, element_dtype=dtypes.float32)[1]), 2.0)
l_cpu = array_ops.identity(l_gpu)
_, child_l_cpu = list_ops.tensor_list_pop_back(
l_cpu, element_dtype=dtypes.variant)
self.assertAllEqual(
self.evaluate(
list_ops.tensor_list_pop_back(
child_l_cpu, element_dtype=dtypes.float32)[1]), 2.0)
def testGraphStack(self):
with self.cached_session():
tl = list_ops.empty_tensor_list(
element_shape=constant_op.constant([1], dtype=dtypes.int32),
element_dtype=dtypes.int32)
tl = list_ops.tensor_list_push_back(tl, [1])
self.assertAllEqual(
self.evaluate(
list_ops.tensor_list_stack(tl, element_dtype=dtypes.int32)),
[[1]])
def testSkipEagerStackInLoop(self):
with self.cached_session():
t1 = list_ops.empty_tensor_list(
element_shape=constant_op.constant([], dtype=dtypes.int32),
element_dtype=dtypes.int32)
i = constant_op.constant(0, dtype=dtypes.int32)
def body(i, t1):
t1 = list_ops.tensor_list_push_back(t1, i)
i += 1
return i, t1
i, t1 = control_flow_ops.while_loop(lambda i, t1: math_ops.less(i, 4),
body, [i, t1])
s1 = list_ops.tensor_list_stack(t1, element_dtype=dtypes.int32)
self.assertAllEqual(self.evaluate(s1), [0, 1, 2, 3])
def testSkipEagerStackSwitchDtype(self):
with self.cached_session():
list_ = list_ops.empty_tensor_list(
element_shape=constant_op.constant([], dtype=dtypes.int32),
element_dtype=dtypes.int32)
m = constant_op.constant([1, 2, 3], dtype=dtypes.float32)
def body(list_, m):
list_ = control_flow_ops.cond(
math_ops.equal(list_ops.tensor_list_length(list_), 0),
lambda: list_ops.empty_tensor_list(m.shape, m.dtype), lambda: list_)
list_ = list_ops.tensor_list_push_back(list_, m)
return list_, m
for _ in range(2):
list_, m = body(list_, m)
s1 = list_ops.tensor_list_stack(list_, element_dtype=dtypes.float32)
np_s1 = np.array([[1, 2, 3], [1, 2, 3]], dtype=np.float32)
self.assertAllEqual(self.evaluate(s1), np_s1)
def testSkipEagerStackInLoopSwitchDtype(self):
with self.cached_session():
t1 = list_ops.empty_tensor_list(
element_shape=constant_op.constant([], dtype=dtypes.int32),
element_dtype=dtypes.int32)
i = constant_op.constant(0, dtype=dtypes.float32)
m = constant_op.constant([1, 2, 3], dtype=dtypes.float32)
def body(i, m, t1):
t1 = control_flow_ops.cond(
math_ops.equal(list_ops.tensor_list_length(t1), 0),
lambda: list_ops.empty_tensor_list(m.shape, m.dtype), lambda: t1)
t1 = list_ops.tensor_list_push_back(t1, m * i)
i += 1.0
return i, m, t1
i, m, t1 = control_flow_ops.while_loop(
lambda i, m, t1: math_ops.less(i, 4), body, [i, m, t1])
s1 = list_ops.tensor_list_stack(t1, element_dtype=dtypes.float32)
np_s1 = np.vstack([np.arange(1, 4) * i for i in range(4)])
self.assertAllEqual(self.evaluate(s1), np_s1)
def testSerialize(self):
worker = test_util.create_local_cluster(num_workers=1, num_ps=1)[0][0]
with ops.Graph().as_default(), session.Session(target=worker.target):
with ops.device("/job:worker"):
t = constant_op.constant([[1.0], [2.0]])
l = list_ops.tensor_list_from_tensor(t, element_shape=[1])
with ops.device("/job:ps"):
l_ps = array_ops.identity(l)
l_ps, e = list_ops.tensor_list_pop_back(
l_ps, element_dtype=dtypes.float32)
with ops.device("/job:worker"):
worker_e = array_ops.identity(e)
self.assertAllEqual(self.evaluate(worker_e), [2.0])
def testSerializeListWithInvalidTensors(self):
worker = test_util.create_local_cluster(num_workers=1, num_ps=1)[0][0]
with ops.Graph().as_default(), session.Session(target=worker.target):
with ops.device("/job:worker"):
l = list_ops.tensor_list_reserve(
element_dtype=dtypes.float32, element_shape=[], num_elements=2)
l = list_ops.tensor_list_set_item(l, 0, 1.)
with ops.device("/job:ps"):
l_ps = array_ops.identity(l)
l_ps = list_ops.tensor_list_set_item(l_ps, 1, 2.)
t = list_ops.tensor_list_stack(l_ps, element_dtype=dtypes.float32)
with ops.device("/job:worker"):
worker_t = array_ops.identity(t)
self.assertAllEqual(self.evaluate(worker_t), [1.0, 2.0])
def testSerializeListWithUnknownRank(self):
worker = test_util.create_local_cluster(num_workers=1, num_ps=1)[0][0]
with ops.Graph().as_default(), session.Session(target=worker.target):
with ops.device("/job:worker"):
t = constant_op.constant([[1.0], [2.0]])
l = list_ops.tensor_list_from_tensor(t, element_shape=None)
with ops.device("/job:ps"):
l_ps = array_ops.identity(l)
element_shape = list_ops.tensor_list_element_shape(
l_ps, shape_type=dtypes.int32)
with ops.device("/job:worker"):
element_shape = array_ops.identity(element_shape)
self.assertEqual(self.evaluate(element_shape), -1)
def testSerializeListWithMaxNumElements(self):
worker = test_util.create_local_cluster(num_workers=1, num_ps=1)[0][0]
with ops.Graph().as_default(), session.Session(target=worker.target):
with ops.device("/job:worker"):
l = list_ops.empty_tensor_list(
element_shape=None,
element_dtype=dtypes.float32,
max_num_elements=2)
l = list_ops.tensor_list_push_back(l, 1.)
with ops.device("/job:ps"):
l_ps = array_ops.identity(l)
l_ps = list_ops.tensor_list_push_back(l_ps, 2.)
with self.assertRaisesRegexp(errors.InvalidArgumentError,
"Tried to push item into a full list"):
with ops.device("/job:worker"):
l_worker = array_ops.identity(l_ps)
l_worker = list_ops.tensor_list_push_back(l_worker, 3.0)
self.evaluate(l_worker)
def testPushPopGradients(self):
with backprop.GradientTape() as tape:
l = list_ops.empty_tensor_list(
element_dtype=dtypes.float32, element_shape=[])
c = constant_op.constant(1.0)
tape.watch(c)
l = list_ops.tensor_list_push_back(l, c)
l, e = list_ops.tensor_list_pop_back(l, element_dtype=dtypes.float32)
e = 2 * e
self.assertAllEqual(self.evaluate(tape.gradient(e, [c])[0]), 2.0)
def testStackFromTensorGradients(self):
with backprop.GradientTape() as tape:
c = constant_op.constant([1.0, 2.0])
tape.watch(c)
l = list_ops.tensor_list_from_tensor(c, element_shape=[])
c2 = list_ops.tensor_list_stack(
l, element_dtype=dtypes.float32, num_elements=2)
result = c2 * 2.0
grad = tape.gradient(result, [c])[0]
self.assertAllEqual(self.evaluate(grad), [2.0, 2.0])
def testGetSetGradients(self):
with backprop.GradientTape() as tape:
c = constant_op.constant([1.0, 2.0])
tape.watch(c)
l = list_ops.tensor_list_from_tensor(c, element_shape=[])
c2 = constant_op.constant(3.0)
tape.watch(c2)
l = list_ops.tensor_list_set_item(l, 0, c2)
e = list_ops.tensor_list_get_item(l, 0, element_dtype=dtypes.float32)
ee = list_ops.tensor_list_get_item(l, 1, element_dtype=dtypes.float32)
y = e * e + ee * ee
grad_c, grad_c2 = tape.gradient(y, [c, c2])
self.assertAllEqual(self.evaluate(grad_c), [0.0, 4.0])
self.assertAllEqual(self.evaluate(grad_c2), 6.0)
@test_util.run_deprecated_v1
def testSetOutOfBounds(self):
c = constant_op.constant([1.0, 2.0])
l = list_ops.tensor_list_from_tensor(c, element_shape=[])
with self.assertRaises(errors.InvalidArgumentError):
self.evaluate(list_ops.tensor_list_set_item(l, 20, 3.0))
@test_util.run_deprecated_v1
def testSkipEagerSetItemWithMismatchedShapeFails(self):
with self.cached_session() as sess:
ph = array_ops.placeholder(dtypes.float32)
c = constant_op.constant([1.0, 2.0])
l = list_ops.tensor_list_from_tensor(c, element_shape=[])
# Set a placeholder with unknown shape to satisfy the shape inference
# at graph building time.
l = list_ops.tensor_list_set_item(l, 0, ph)
l_0 = list_ops.tensor_list_get_item(l, 0, element_dtype=dtypes.float32)
with self.assertRaisesRegexp(errors.InvalidArgumentError,
"incompatible shape"):
sess.run(l_0, {ph: [3.0]})
def testResourceVariableScatterGather(self):
c = constant_op.constant([1.0, 2.0], dtype=dtypes.float32)
l = list_ops.tensor_list_from_tensor(c, element_shape=[])
v = vs.get_variable("var", initializer=[l] * 10, use_resource=True)
v_r_0_stacked = list_ops.tensor_list_stack(v[0], dtypes.float32)
self.evaluate(v.initializer)
self.assertAllEqual([1.0, 2.0], self.evaluate(v_r_0_stacked))
v_r_sparse_stacked = list_ops.tensor_list_stack(
v.sparse_read(0), dtypes.float32)
self.assertAllEqual([1.0, 2.0], self.evaluate(v_r_sparse_stacked))
l_new_0 = list_ops.tensor_list_from_tensor([3.0, 4.0], element_shape=[])
l_new_1 = list_ops.tensor_list_from_tensor([5.0, 6.0], element_shape=[])
updated_v = state_ops.scatter_update(v, [3, 5], [l_new_0, l_new_1])
updated_v_elems = array_ops.unstack(updated_v)
updated_v_stacked = [
list_ops.tensor_list_stack(el, dtypes.float32) for el in updated_v_elems
]
expected = ([[1.0, 2.0]] * 3 + [[3.0, 4.0], [1.0, 2.0], [5.0, 6.0]] +
[[1.0, 2.0]] * 4)
self.assertAllEqual(self.evaluate(updated_v_stacked), expected)
@test_util.run_deprecated_v1
def testConcat(self):
c = constant_op.constant([1.0, 2.0], dtype=dtypes.float32)
l0 = list_ops.tensor_list_from_tensor(c, element_shape=[])
l1 = list_ops.tensor_list_from_tensor([-1.0], element_shape=[])
l_batch_0 = array_ops.stack([l0, l1])
l_batch_1 = array_ops.stack([l1, l0])
l_concat_01 = list_ops.tensor_list_concat_lists(
l_batch_0, l_batch_1, element_dtype=dtypes.float32)
l_concat_10 = list_ops.tensor_list_concat_lists(
l_batch_1, l_batch_0, element_dtype=dtypes.float32)
l_concat_00 = list_ops.tensor_list_concat_lists(
l_batch_0, l_batch_0, element_dtype=dtypes.float32)
l_concat_11 = list_ops.tensor_list_concat_lists(
l_batch_1, l_batch_1, element_dtype=dtypes.float32)
expected_00 = [[1.0, 2.0, 1.0, 2.0], [-1.0, -1.0]]
expected_01 = [[1.0, 2.0, -1.0], [-1.0, 1.0, 2.0]]
expected_10 = [[-1.0, 1.0, 2.0], [1.0, 2.0, -1.0]]
expected_11 = [[-1.0, -1.0], [1.0, 2.0, 1.0, 2.0]]
for i, (concat, expected) in enumerate(zip(
[l_concat_00, l_concat_01, l_concat_10, l_concat_11],
[expected_00, expected_01, expected_10, expected_11])):
splitted = array_ops.unstack(concat)
splitted_stacked_ret = self.evaluate(
(list_ops.tensor_list_stack(splitted[0], dtypes.float32),
list_ops.tensor_list_stack(splitted[1], dtypes.float32)))
print("Test concat %d: %s, %s, %s, %s"
% (i, expected[0], splitted_stacked_ret[0],
expected[1], splitted_stacked_ret[1]))
self.assertAllClose(expected[0], splitted_stacked_ret[0])
self.assertAllClose(expected[1], splitted_stacked_ret[1])
# Concatenating mismatched shapes fails.
with self.assertRaises((errors.InvalidArgumentError, ValueError)):
self.evaluate(
list_ops.tensor_list_concat_lists(
l_batch_0,
list_ops.empty_tensor_list([], dtypes.float32),
element_dtype=dtypes.float32))
if context.executing_eagerly():
expected_error = (
errors.InvalidArgumentError,
"element shapes are not identical at index 0")
else:
expected_error = (ValueError, "Shapes must be equal rank")
with self.assertRaisesRegexp(*expected_error):
l_batch_of_vec_tls = array_ops.stack(
[list_ops.tensor_list_from_tensor([[1.0]], element_shape=[1])] * 2)
self.evaluate(
list_ops.tensor_list_concat_lists(l_batch_0, l_batch_of_vec_tls,
element_dtype=dtypes.float32))
if context.executing_eagerly():
expected_error = (errors.InvalidArgumentError,
r"input_b\[0\].dtype != element_dtype.")
else:
expected_error = (ValueError, "input_b.type != element_dtype")
with self.assertRaisesRegexp(*expected_error):
l_batch_of_int_tls = array_ops.stack(
[list_ops.tensor_list_from_tensor([1], element_shape=[])] * 2)
self.evaluate(
list_ops.tensor_list_concat_lists(l_batch_0, l_batch_of_int_tls,
element_dtype=dtypes.float32))
@test_util.run_deprecated_v1
def testPushBackBatch(self):
c = constant_op.constant([1.0, 2.0], dtype=dtypes.float32)
l0 = list_ops.tensor_list_from_tensor(c, element_shape=[])
l1 = list_ops.tensor_list_from_tensor([-1.0], element_shape=[])
l_batch = array_ops.stack([l0, l1])
l_push = list_ops.tensor_list_push_back_batch(l_batch, [3.0, 4.0])
l_unstack = array_ops.unstack(l_push)
l0_ret = list_ops.tensor_list_stack(l_unstack[0], dtypes.float32)
l1_ret = list_ops.tensor_list_stack(l_unstack[1], dtypes.float32)
self.assertAllClose([1.0, 2.0, 3.0], self.evaluate(l0_ret))
self.assertAllClose([-1.0, 4.0], self.evaluate(l1_ret))
with ops.control_dependencies([l_push]):
l_unstack_orig = array_ops.unstack(l_batch)
l0_orig_ret = list_ops.tensor_list_stack(l_unstack_orig[0],
dtypes.float32)
l1_orig_ret = list_ops.tensor_list_stack(l_unstack_orig[1],
dtypes.float32)
# Check that without aliasing, push_back_batch still works; and
# that it doesn't modify the input.
l0_r_v, l1_r_v, l0_orig_v, l1_orig_v = self.evaluate(
(l0_ret, l1_ret, l0_orig_ret, l1_orig_ret))
self.assertAllClose([1.0, 2.0, 3.0], l0_r_v)
self.assertAllClose([-1.0, 4.0], l1_r_v)
self.assertAllClose([1.0, 2.0], l0_orig_v)
self.assertAllClose([-1.0], l1_orig_v)
# Pushing back mismatched shapes fails.
with self.assertRaises((errors.InvalidArgumentError, ValueError)):
self.evaluate(list_ops.tensor_list_push_back_batch(l_batch, []))
with self.assertRaisesRegexp(errors.InvalidArgumentError,
"incompatible shape to a list at index 0"):
self.evaluate(
list_ops.tensor_list_push_back_batch(l_batch, [[3.0], [4.0]]))
if context.executing_eagerly():
expected_error = (errors.InvalidArgumentError, "Invalid data type")
else:
expected_error = (ValueError, "wrong element dtype")
with self.assertRaisesRegexp(*expected_error):
self.evaluate(list_ops.tensor_list_push_back_batch(l_batch, [3, 4]))
def testZerosLike(self):
for dtype in (dtypes.uint8, dtypes.uint16, dtypes.int8, dtypes.int16,
dtypes.int32, dtypes.int64, dtypes.float16, dtypes.float32,
dtypes.float64, dtypes.complex64, dtypes.complex128,
dtypes.bool):
l_empty = list_ops.empty_tensor_list(
element_dtype=dtype, element_shape=[])
l_empty_zeros = array_ops.zeros_like(l_empty)
t_empty_zeros = list_ops.tensor_list_stack(
l_empty_zeros, element_dtype=dtype)
l_full = list_ops.tensor_list_push_back(l_empty,
math_ops.cast(0, dtype=dtype))
l_full = list_ops.tensor_list_push_back(l_full,
math_ops.cast(1, dtype=dtype))
l_full_zeros = array_ops.zeros_like(l_full)
t_full_zeros = list_ops.tensor_list_stack(
l_full_zeros, element_dtype=dtype)
self.assertAllEqual(self.evaluate(t_empty_zeros), [])
self.assertAllEqual(
self.evaluate(t_full_zeros), np.zeros(
(2,), dtype=dtype.as_numpy_dtype))
def testZerosLikeNested(self):
for dtype in (dtypes.uint8, dtypes.uint16, dtypes.int8, dtypes.int16,
dtypes.int32, dtypes.int64, dtypes.float16, dtypes.float32,
dtypes.float64, dtypes.complex64, dtypes.complex128,
dtypes.bool):
l = list_ops.empty_tensor_list(
element_dtype=dtypes.variant, element_shape=[])
sub_l = list_ops.empty_tensor_list(element_dtype=dtype, element_shape=[])
l = list_ops.tensor_list_push_back(l, sub_l)
sub_l = list_ops.tensor_list_push_back(sub_l, math_ops.cast(
1, dtype=dtype))
l = list_ops.tensor_list_push_back(l, sub_l)
sub_l = list_ops.tensor_list_push_back(sub_l, math_ops.cast(
2, dtype=dtype))
l = list_ops.tensor_list_push_back(l, sub_l)
# l : [[],
# [1],
# [1, 2]]
#
# l_zeros : [[],
# [0],
# [0, 0]]
l_zeros = array_ops.zeros_like(l)
outputs = []
for _ in range(3):
l_zeros, out = list_ops.tensor_list_pop_back(
l_zeros, element_dtype=dtypes.variant)
outputs.append(list_ops.tensor_list_stack(out, element_dtype=dtype))
# Note: `outputs` contains popped values so the order is reversed.
self.assertAllEqual(self.evaluate(outputs[2]), [])
self.assertAllEqual(
self.evaluate(outputs[1]), np.zeros((1,), dtype=dtype.as_numpy_dtype))
self.assertAllEqual(
self.evaluate(outputs[0]), np.zeros((2,), dtype=dtype.as_numpy_dtype))
def testElementShape(self):
l = list_ops.empty_tensor_list(
element_dtype=dtypes.float32, element_shape=None)
shape = list_ops.tensor_list_element_shape(l, shape_type=dtypes.int32)
self.assertEqual(self.evaluate(shape), -1)
def testZerosLikeUninitialized(self):
l0 = list_ops.tensor_list_reserve([], 3, element_dtype=dtypes.float32)
l1 = list_ops.tensor_list_set_item(l0, 0, 1.) # [1., _, _]
zeros_1 = array_ops.zeros_like(l1) # [0., _, _]
l2 = list_ops.tensor_list_set_item(l1, 2, 2.) # [1., _, 2.]
zeros_2 = array_ops.zeros_like(l2) # [0., _, 0.]
# Gather indices with zeros in `zeros_1`.
res_1 = list_ops.tensor_list_gather(
zeros_1, [0], element_dtype=dtypes.float32)
# Gather indices with zeros in `zeros_2`.
res_2 = list_ops.tensor_list_gather(
zeros_2, [0, 2], element_dtype=dtypes.float32)
self.assertAllEqual(self.evaluate(res_1), [0.])
self.assertAllEqual(self.evaluate(res_2), [0., 0.])
@test_util.run_deprecated_v1
def testSkipEagerTensorListGetItemGradAggregation(self):
l = list_ops.tensor_list_reserve(
element_shape=[], num_elements=1, element_dtype=dtypes.float32)
x = constant_op.constant(1.0)
l = list_ops.tensor_list_set_item(l, 0, x)
l_read1 = list_ops.tensor_list_get_item(l, 0, element_dtype=dtypes.float32)
l_read2 = list_ops.tensor_list_get_item(l, 0, element_dtype=dtypes.float32)
grad = gradients_impl.gradients([l_read1, l_read2], [x])
with self.cached_session() as sess:
self.assertSequenceEqual(self.evaluate(grad), [2.])
@test_util.run_deprecated_v1
def testSkipEagerBuildElementShape(self):
fn = list_ops._build_element_shape
# Unknown shape -> -1.
self.assertEqual(fn(None), -1)
self.assertEqual(fn(tensor_shape.unknown_shape()), -1)
# Scalar shape -> [] with type int32.
self.assertEqual(fn([]).dtype, dtypes.int32)
self.assertEqual(fn(tensor_shape.scalar()).dtype, dtypes.int32)
self.assertAllEqual(self.evaluate(fn([])), np.array([], np.int32))
self.assertAllEqual(
self.evaluate(fn(tensor_shape.scalar())), np.array([], np.int32))
# Tensor -> Tensor
shape = constant_op.constant(1)
self.assertIs(fn(shape), shape)
# Shape with unknown dims -> shape list with -1's.
shape = [None, 5]
self.assertAllEqual(fn(shape), [-1, 5])
self.assertAllEqual(fn(tensor_shape.TensorShape(shape)), [-1, 5])
# Shape with unknown dims and tensor dims -> shape list with -1's and tensor
# dims.
t = array_ops.placeholder(dtypes.int32)
shape = [None, 5, t]
result = fn(shape)
self.assertAllEqual(result[:2], [-1, 5])
self.assertIs(result[2], t)
def testAddN(self):
l1 = list_ops.tensor_list_from_tensor([1.0, 2.0], element_shape=[])
l2 = list_ops.tensor_list_from_tensor([3.0, 4.0], element_shape=[])
l3 = list_ops.tensor_list_from_tensor([5.0, 6.0], element_shape=[])
result = math_ops.add_n((l1, l2, l3))
result_t = list_ops.tensor_list_stack(result, element_dtype=dtypes.float32)
self.assertAllEqual(self.evaluate(result_t), [9., 12.])
def testAddNNestedList(self):
l1 = list_ops.tensor_list_from_tensor([1.0, 2.0], element_shape=[])
l2 = list_ops.tensor_list_from_tensor([3.0, 4.0], element_shape=[])
l3 = list_ops.tensor_list_from_tensor([5.0, 6.0], element_shape=[])
l4 = list_ops.tensor_list_from_tensor([7.0, 8.0], element_shape=[])
a = list_ops.empty_tensor_list(
element_dtype=dtypes.variant, element_shape=[])
a = list_ops.tensor_list_push_back(a, l1)
a = list_ops.tensor_list_push_back(a, l2)
b = list_ops.empty_tensor_list(
element_dtype=dtypes.variant, element_shape=[])
b = list_ops.tensor_list_push_back(b, l3)
b = list_ops.tensor_list_push_back(b, l4)
result = math_ops.add_n((a, b))
result_0 = list_ops.tensor_list_stack(
list_ops.tensor_list_get_item(result, 0, element_dtype=dtypes.variant),
element_dtype=dtypes.float32)
result_1 = list_ops.tensor_list_stack(
list_ops.tensor_list_get_item(result, 1, element_dtype=dtypes.variant),
element_dtype=dtypes.float32)
self.assertAllEqual(self.evaluate(result_0), [6., 8.])
self.assertAllEqual(self.evaluate(result_1), [10., 12.])
def testAddTensorListsFailsIfLeadingDimsMismatch(self):
l1 = list_ops.tensor_list_reserve(
element_shape=[], element_dtype=dtypes.float32, num_elements=2)
l2 = list_ops.tensor_list_reserve(
element_shape=[], element_dtype=dtypes.float32, num_elements=3)
with self.assertRaisesRegexp(
errors.InvalidArgumentError,
"Trying to add two lists of tensors with different lengths"):
l = math_ops.add_n([l1, l2])
self.evaluate(list_ops.tensor_list_stack(l, element_dtype=dtypes.float32))
@test_util.run_v1_only("Uses placeholders")
def testSkipEagerAddTensorListsFailsIfElementShapesMismatch(self):
with self.cached_session() as sess:
# Use placeholders instead of constant values for shapes to prevent TF's
# shape inference from catching this early.
l1_element_shape = array_ops.placeholder(dtype=dtypes.int32)
l2_element_shape = array_ops.placeholder(dtype=dtypes.int32)
l1 = list_ops.tensor_list_reserve(
element_shape=l1_element_shape,
element_dtype=dtypes.float32,
num_elements=3)
l2 = list_ops.tensor_list_reserve(
element_shape=l2_element_shape,
element_dtype=dtypes.float32,
num_elements=3)
l = math_ops.add_n([l1, l2])
with self.assertRaisesRegexp(
errors.InvalidArgumentError,
"Trying to add two lists of tensors with incompatible element shapes"
):
sess.run(
list_ops.tensor_list_stack(l, element_dtype=dtypes.float32), {
l1_element_shape: [],
l2_element_shape: [2]
})
@test_util.run_deprecated_v1
def testSkipEagerConcatShapeInference(self):
def BuildTensor(element_shape):
l = list_ops.empty_tensor_list(
element_dtype=dtypes.float32, element_shape=element_shape)
return list_ops.tensor_list_concat(l, element_dtype=dtypes.float32)
self.assertIsNone(BuildTensor(None).shape.rank)
self.assertAllEqual(BuildTensor([None, 2, 3]).shape.as_list(), [None, 2, 3])
self.assertAllEqual(
BuildTensor([None, 2, None]).shape.as_list(), [None, 2, None])
self.assertAllEqual(BuildTensor([1, 2, 3]).shape.as_list(), [None, 2, 3])
def testConcatWithFullyDefinedElementShape(self):
l = list_ops.empty_tensor_list(
element_dtype=dtypes.float32, element_shape=[2, 2])
l = list_ops.tensor_list_push_back(l, [[0., 1.], [2., 3.]])
l = list_ops.tensor_list_push_back(l, [[4., 5.], [6., 7.]])
t = list_ops.tensor_list_concat(l, element_dtype=dtypes.float32)
self.assertAllEqual(
self.evaluate(t), [[0., 1.], [2., 3.], [4., 5.], [6., 7.]])
def testConcatWithNonFullyDefinedElementShape(self):
l = list_ops.empty_tensor_list(
element_dtype=dtypes.float32, element_shape=[None, 2])
l = list_ops.tensor_list_push_back(l, [[0., 1.]])
l = list_ops.tensor_list_push_back(l, [[2., 3.], [4., 5.]])
t = list_ops.tensor_list_concat(l, element_dtype=dtypes.float32)
self.assertAllEqual(self.evaluate(t), [[0., 1.], [2., 3.], [4., 5.]])
def testConcatWithMismatchingTensorShapesFails(self):
l = list_ops.empty_tensor_list(
element_dtype=dtypes.float32, element_shape=None)
l = list_ops.tensor_list_push_back(l, [[0., 1.]])
l = list_ops.tensor_list_push_back(l, [[2.], [4.]])
with self.assertRaisesRegexp(
errors.InvalidArgumentError, r"Incompatible shapes during merge: "
r"\[2\] vs. \[1\]"):
t = list_ops.tensor_list_concat(l, element_dtype=dtypes.float32)
self.evaluate(t)
def testConcatEmptyListWithFullyDefinedElementShape(self):
l = list_ops.empty_tensor_list(
element_dtype=dtypes.float32, element_shape=[5, 2])
t = list_ops.tensor_list_concat(l, element_dtype=dtypes.float32)
self.assertAllEqual(self.evaluate(t).shape, (0, 2))
l = list_ops.empty_tensor_list(
element_dtype=dtypes.float32, element_shape=[None, 2])
t = list_ops.tensor_list_concat(l, element_dtype=dtypes.float32)
self.assertAllEqual(self.evaluate(t).shape, (0, 2))
def testConcatEmptyListWithUnknownElementShapeFails(self):
l = list_ops.empty_tensor_list(
element_dtype=dtypes.float32, element_shape=None)
with self.assertRaisesRegexp(
errors.InvalidArgumentError,
"All except the first dimension must be fully"
" defined when concating an empty tensor list"):
t = list_ops.tensor_list_concat(l, element_dtype=dtypes.float32)
self.evaluate(t)
def testConcatEmptyListWithPartiallyDefinedElementShapeFails(self):
l = list_ops.empty_tensor_list(
element_dtype=dtypes.float32, element_shape=[2, None])
with self.assertRaisesRegexp(
errors.InvalidArgumentError,
"All except the first dimension must be fully"
" defined when concating an empty tensor list"):
t = list_ops.tensor_list_concat(l, element_dtype=dtypes.float32)
self.evaluate(t)
def testConcatListWithScalarElementShapeFails(self):
l = list_ops.empty_tensor_list(
element_dtype=dtypes.float32, element_shape=tensor_shape.scalar())
with self.assertRaisesRegexp(
errors.InvalidArgumentError,
"Concat requires elements to be at least vectors, "
"found scalars instead"):
t = list_ops.tensor_list_concat(l, element_dtype=dtypes.float32)
self.evaluate(t)
def testConcatListWithScalarElementsFails(self):
l = list_ops.empty_tensor_list(
element_dtype=dtypes.float32, element_shape=None)
l1 = list_ops.tensor_list_push_back(l, 1.)
with self.assertRaisesRegexp(
errors.InvalidArgumentError, "Concat saw a scalar shape at index 0"
" but requires at least vectors"):
t = list_ops.tensor_list_concat(l1, element_dtype=dtypes.float32)
self.evaluate(t)
l1 = list_ops.tensor_list_push_back(l, [1.])
l1 = list_ops.tensor_list_push_back(l1, 2.)
with self.assertRaisesRegexp(
errors.InvalidArgumentError, "Concat saw a scalar shape at index 1"
" but requires at least vectors"):
t = list_ops.tensor_list_concat(l1, element_dtype=dtypes.float32)
self.evaluate(t)
def testConcatWithUninitializedTensorsUseListElementShape(self):
l = list_ops.tensor_list_reserve(
element_dtype=dtypes.float32, element_shape=[2, 3], num_elements=3)
t = list_ops.tensor_list_concat(l, element_dtype=dtypes.float32)
self.assertAllEqual(np.zeros((6, 3)), t)
def testConcatWithUninitializedTensorsUseProvidedElementShape(self):
l = list_ops.tensor_list_reserve(
element_dtype=dtypes.float32, element_shape=None, num_elements=3)
t = list_ops.tensor_list_concat(
l, element_dtype=dtypes.float32, element_shape=(2, 3))
self.assertAllEqual(np.zeros((6, 3)), t)
def testConcatWithUninitializedTensorsUseProvidedElementShapeAndLengths(self):
l = list_ops.tensor_list_reserve(
element_dtype=dtypes.float32, element_shape=None, num_elements=3)
t, _ = gen_list_ops.tensor_list_concat_v2(
l,
element_dtype=dtypes.float32,
element_shape=list_ops._build_element_shape((None, 3)),
leading_dims=[2, 3, 5])
self.assertAllEqual(np.zeros((10, 3)), t)
l = list_ops.tensor_list_set_item(l, 1, [[2., 3.], [4., 5.], [6., 7.]])
t, _ = gen_list_ops.tensor_list_concat_v2(
l,
element_dtype=dtypes.float32,
element_shape=list_ops._build_element_shape((None, 2)),
leading_dims=[2, 3, 4])
self.assertAllEqual([[0., 0.], [0., 0.], [2., 3.], [4., 5.], [6., 7.],
[0., 0.], [0., 0.], [0., 0.], [0., 0.]], t)
def testConcatWithUninitializedTensorsInferShapeFromElements(self):
l = list_ops.tensor_list_reserve(
element_dtype=dtypes.float32, element_shape=None, num_elements=3)
l = list_ops.tensor_list_set_item(l, 1, [[2., 3.], [4., 5.], [6., 7.]])
t = list_ops.tensor_list_concat(l, element_dtype=dtypes.float32)
self.assertAllEqual([[0., 0.], [0., 0.], [0., 0.], [2., 3.], [4., 5.],
[6., 7.], [0., 0.], [0., 0.], [0., 0.]], t)
def testConcatWithUninitializedTensorsFailsIfNoElementShape(self):
l = list_ops.tensor_list_reserve(
element_dtype=dtypes.float32, element_shape=None, num_elements=3)
with self.assertRaisesRegexp(
errors.InvalidArgumentError,
r"Trying to concat list with only uninitialized tensors "
r"but element_shape_except_first_dim_ is not fully defined"):
t = list_ops.tensor_list_concat(l, element_dtype=dtypes.float32)
self.evaluate(t)
def testConcatWithUninitializedTensorsFailsIfNoInputLengths(self):
l = list_ops.tensor_list_reserve(
element_dtype=dtypes.float32, element_shape=[None, 3], num_elements=3)
with self.assertRaisesRegexp(
errors.InvalidArgumentError,
r"List contains uninitialized tensor at index 0"
r" but leading_dims has only 0 elements."):
t = list_ops.tensor_list_concat(l, element_dtype=dtypes.float32)
self.evaluate(t)
def testEvenSplit(self):
def RunTest(input_tensor, lengths, expected_stacked_output):
l = list_ops.tensor_list_split(
input_tensor, element_shape=None, lengths=lengths)
self.assertAllEqual(
list_ops.tensor_list_stack(l, element_dtype=dtypes.float32),
expected_stacked_output)
RunTest([1., 2., 3.], [1, 1, 1], [[1.], [2.], [3.]])
RunTest([1., 2., 3., 4.], [2, 2], [[1., 2.], [3., 4.]])
RunTest([[1., 2.], [3., 4.]], [1, 1], [[[1., 2.]], [[3., 4.]]])
def testUnevenSplit(self):
l = list_ops.tensor_list_split([1., 2., 3., 4., 5],
element_shape=None,
lengths=[3, 2])
self.assertAllEqual(list_ops.tensor_list_length(l), 2)
self.assertAllEqual(
list_ops.tensor_list_get_item(l, 0, element_dtype=dtypes.float32),
[1., 2., 3.])
self.assertAllEqual(
list_ops.tensor_list_get_item(l, 1, element_dtype=dtypes.float32),
[4., 5.])
@test_util.run_deprecated_v1
def testSkipEagerSplitWithInvalidTensorShapeFails(self):
with self.cached_session():
tensor = array_ops.placeholder(dtype=dtypes.float32)
l = list_ops.tensor_list_split(tensor, element_shape=None, lengths=[1])
with self.assertRaisesRegexp(
errors.InvalidArgumentError,
r"Tensor must be at least a vector, but saw shape: \[\]"):
l.eval({tensor: 1})
@test_util.run_deprecated_v1
def testSkipEagerSplitWithInvalidLengthsShapeFails(self):
with self.cached_session():
lengths = array_ops.placeholder(dtype=dtypes.int64)
l = list_ops.tensor_list_split([1., 2.],
element_shape=None,
lengths=lengths)
with self.assertRaisesRegexp(
errors.InvalidArgumentError,
r"Expected lengths to be a vector, received shape: \[\]"):
l.eval({lengths: 1})
def testSplitWithInvalidLengthsFails(self):
with self.assertRaisesRegexp(errors.InvalidArgumentError,
r"Invalid value in lengths: -1"):
l = list_ops.tensor_list_split([1., 2.],
element_shape=None,
lengths=[1, -1])
self.evaluate(l)
with self.assertRaisesRegexp(
errors.InvalidArgumentError,
r"Attempting to slice \[0, 3\] from tensor with length 2"):
l = list_ops.tensor_list_split([1., 2.], element_shape=None, lengths=[3])
self.evaluate(l)
with self.assertRaisesRegexp(
errors.InvalidArgumentError,
r"Unused values in tensor. Length of tensor: 2 Values used: 1"):
l = list_ops.tensor_list_split([1., 2.], element_shape=None, lengths=[1])
self.evaluate(l)
@test_util.run_deprecated_v1
def testSkipEagerSplitWithScalarElementShapeFails(self):
with self.assertRaisesRegexp(ValueError,
r"Shapes must be equal rank, but are 1 and 0"):
l = list_ops.tensor_list_split([1., 2.], element_shape=[], lengths=[1, 1])
with self.cached_session():
with self.assertRaisesRegexp(
errors.InvalidArgumentError,
r"TensorListSplit requires element_shape to be at least of rank 1, "
r"but saw: \[\]"):
element_shape = array_ops.placeholder(dtype=dtypes.int32)
l = list_ops.tensor_list_split([1., 2.],
element_shape=element_shape,
lengths=[1, 1])
l.eval({element_shape: []})
def testEagerOnlySplitWithScalarElementShapeFails(self):
if context.executing_eagerly():
with self.assertRaisesRegexp(
errors.InvalidArgumentError,
r"TensorListSplit requires element_shape to be at least of rank 1, "
r"but saw: \[\]"):
list_ops.tensor_list_split([1., 2.], element_shape=[], lengths=[1, 1])
@test_util.run_deprecated_v1
def testSkipEagerSplitWithIncompatibleTensorShapeAndElementShapeFails(self):
with self.assertRaisesRegexp(ValueError,
r"Shapes must be equal rank, but are 2 and 1"):
l = list_ops.tensor_list_split([[1.], [2.]],
element_shape=[1],
lengths=[1, 1])
with self.cached_session():
with self.assertRaisesRegexp(
errors.InvalidArgumentError,
r"tensor shape \[2,1\] is not compatible with element_shape \[1\]"):
element_shape = array_ops.placeholder(dtype=dtypes.int32)
l = list_ops.tensor_list_split([[1.], [2.]],
element_shape=element_shape,
lengths=[1, 1])
l.eval({element_shape: [1]})
def testEagerOnlySplitWithIncompatibleTensorShapeAndElementShapeFails(self):
if context.executing_eagerly():
with self.assertRaisesRegexp(
errors.InvalidArgumentError,
r"tensor shape \[2,1\] is not compatible with element_shape \[1\]"):
list_ops.tensor_list_split([[1.], [2.]],
element_shape=[1],
lengths=[1, 1])
def testResizeGrow(self):
l = list_ops.tensor_list_from_tensor([1., 2.], element_shape=[])
l = list_ops.tensor_list_resize(l, 4)
self.assertEqual(self.evaluate(list_ops.tensor_list_length(l)), 4)
self.assertEqual(
self.evaluate(
list_ops.tensor_list_get_item(l, 0, element_dtype=dtypes.float32)),
1.)
self.assertEqual(
self.evaluate(
list_ops.tensor_list_get_item(l, 1, element_dtype=dtypes.float32)),
2.)
def testResizeShrink(self):
l = list_ops.tensor_list_from_tensor([1., 2., 3.], element_shape=[])
l = list_ops.tensor_list_resize(l, 2)
self.assertEqual(self.evaluate(list_ops.tensor_list_length(l)), 2)
self.assertAllEqual(
self.evaluate(
list_ops.tensor_list_stack(l, element_dtype=dtypes.float32)),
[1., 2.])
def testResizeWithInvalidSizeFails(self):
with self.assertRaisesRegexp(
errors.InvalidArgumentError,
"TensorListSlice expects size to be non-negative"):
l = list_ops.tensor_list_from_tensor([1., 2., 3.], element_shape=[])
l = list_ops.tensor_list_resize(l, -1)
self.evaluate(l)
@test_util.run_deprecated_v1
@test_util.enable_control_flow_v2
def testSkipEagerResizeGrad(self):
t = constant_op.constant([1., 2., 3.])
l = list_ops.tensor_list_from_tensor(t, element_shape=[])
l = list_ops.tensor_list_set_item(
l, 3, 4., resize_if_index_out_of_bounds=True)
t1 = list_ops.tensor_list_stack(l, element_dtype=dtypes.float32)
grad = gradients_impl.gradients(t1, t)[0]
self.assertAllEqual(self.evaluate(grad), [1., 1., 1.])
def testHandleDataAcrossFunctionCall(self):
@def_function.function
def func():
t = constant_op.constant([1., 2., 3.])
l = list_ops.tensor_list_from_tensor(t, element_shape=[])
return l
tensor_list = func()
element = list_ops.tensor_list_get_item(
tensor_list, 0, element_dtype=dtypes.float32)
self.assertAllEqual(element.shape.as_list(), [])
if __name__ == "__main__":
test.main()
|
{
"content_hash": "c71169aa7ea7ee5a801420e44fa7adf2",
"timestamp": "",
"source": "github",
"line_count": 1562,
"max_line_length": 80,
"avg_line_length": 44.2887323943662,
"alnum_prop": 0.643793636797294,
"repo_name": "ghchinoy/tensorflow",
"id": "3c35b9767e9a1434bd7a4c5c06d10a63845b8b00",
"size": "69868",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tensorflow/python/kernel_tests/list_ops_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "3568"
},
{
"name": "Batchfile",
"bytes": "15317"
},
{
"name": "C",
"bytes": "699905"
},
{
"name": "C#",
"bytes": "8446"
},
{
"name": "C++",
"bytes": "67022491"
},
{
"name": "CMake",
"bytes": "206499"
},
{
"name": "Dockerfile",
"bytes": "73602"
},
{
"name": "Go",
"bytes": "1585039"
},
{
"name": "HTML",
"bytes": "4680118"
},
{
"name": "Java",
"bytes": "836400"
},
{
"name": "Jupyter Notebook",
"bytes": "1665583"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "Makefile",
"bytes": "98194"
},
{
"name": "Objective-C",
"bytes": "94022"
},
{
"name": "Objective-C++",
"bytes": "175222"
},
{
"name": "PHP",
"bytes": "17600"
},
{
"name": "Pascal",
"bytes": "3239"
},
{
"name": "Perl",
"bytes": "7536"
},
{
"name": "Python",
"bytes": "48407007"
},
{
"name": "RobotFramework",
"bytes": "891"
},
{
"name": "Ruby",
"bytes": "4733"
},
{
"name": "Shell",
"bytes": "476920"
},
{
"name": "Smarty",
"bytes": "27495"
},
{
"name": "Swift",
"bytes": "56155"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import
from setuptools import setup
setup(
setup_requires=['pbr'],
pbr=True,
)
|
{
"content_hash": "f422de37e6c3f1cfc4b5a7ee231f9711",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 38,
"avg_line_length": 17.142857142857142,
"alnum_prop": 0.6916666666666667,
"repo_name": "mfalesni/mgmtsystem",
"id": "a88539e8933eabed0de16c1c3cb8a1717e34679e",
"size": "120",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "467439"
}
],
"symlink_target": ""
}
|
"""Futures for long-running operations returned from Google Cloud APIs.
These futures can be used to synchronously wait for the result of a
long-running operation using :meth:`Operation.result`:
.. code-block:: python
operation = my_api_client.long_running_method()
result = operation.result()
Or asynchronously using callbacks and :meth:`Operation.add_done_callback`:
.. code-block:: python
operation = my_api_client.long_running_method()
def my_callback(future):
result = future.result()
operation.add_done_callback(my_callback)
"""
import functools
import threading
from google.api_core import exceptions
from google.api_core import protobuf_helpers
from google.api_core.future import polling
from google.longrunning import operations_pb2
from google.protobuf import json_format
from google.rpc import code_pb2
class Operation(polling.PollingFuture):
"""A Future for interacting with a Google API Long-Running Operation.
Args:
operation (google.longrunning.operations_pb2.Operation): The
initial operation.
refresh (Callable[[], ~.api_core.operation.Operation]): A callable that
returns the latest state of the operation.
cancel (Callable[[], None]): A callable that tries to cancel
the operation.
result_type (func:`type`): The protobuf type for the operation's
result.
metadata_type (func:`type`): The protobuf type for the operation's
metadata.
retry (google.api_core.retry.Retry): The retry configuration used
when polling. This can be used to control how often :meth:`done`
is polled. Regardless of the retry's ``deadline``, it will be
overridden by the ``timeout`` argument to :meth:`result`.
"""
def __init__(
self,
operation,
refresh,
cancel,
result_type,
metadata_type=None,
retry=polling.DEFAULT_RETRY,
):
super(Operation, self).__init__(retry=retry)
self._operation = operation
self._refresh = refresh
self._cancel = cancel
self._result_type = result_type
self._metadata_type = metadata_type
self._completion_lock = threading.Lock()
# Invoke this in case the operation came back already complete.
self._set_result_from_operation()
@property
def operation(self):
"""google.longrunning.Operation: The current long-running operation."""
return self._operation
@property
def metadata(self):
"""google.protobuf.Message: the current operation metadata."""
if not self._operation.HasField("metadata"):
return None
return protobuf_helpers.from_any_pb(
self._metadata_type, self._operation.metadata
)
@classmethod
def deserialize(self, payload):
"""Deserialize a ``google.longrunning.Operation`` protocol buffer.
Args:
payload (bytes): A serialized operation protocol buffer.
Returns:
~.operations_pb2.Operation: An Operation protobuf object.
"""
return operations_pb2.Operation.FromString(payload)
def _set_result_from_operation(self):
"""Set the result or exception from the operation if it is complete."""
# This must be done in a lock to prevent the polling thread
# and main thread from both executing the completion logic
# at the same time.
with self._completion_lock:
# If the operation isn't complete or if the result has already been
# set, do not call set_result/set_exception again.
# Note: self._result_set is set to True in set_result and
# set_exception, in case those methods are invoked directly.
if not self._operation.done or self._result_set:
return
if self._operation.HasField("response"):
response = protobuf_helpers.from_any_pb(
self._result_type, self._operation.response
)
self.set_result(response)
elif self._operation.HasField("error"):
exception = exceptions.from_grpc_status(
status_code=self._operation.error.code,
message=self._operation.error.message,
errors=(self._operation.error,),
response=self._operation,
)
self.set_exception(exception)
else:
exception = exceptions.GoogleAPICallError(
"Unexpected state: Long-running operation had neither "
"response nor error set."
)
self.set_exception(exception)
def _refresh_and_update(self, retry=polling.DEFAULT_RETRY):
"""Refresh the operation and update the result if needed.
Args:
retry (google.api_core.retry.Retry): (Optional) How to retry the RPC.
"""
# If the currently cached operation is done, no need to make another
# RPC as it will not change once done.
if not self._operation.done:
self._operation = self._refresh(retry=retry)
self._set_result_from_operation()
def done(self, retry=polling.DEFAULT_RETRY):
"""Checks to see if the operation is complete.
Args:
retry (google.api_core.retry.Retry): (Optional) How to retry the RPC.
Returns:
bool: True if the operation is complete, False otherwise.
"""
self._refresh_and_update(retry)
return self._operation.done
def cancel(self):
"""Attempt to cancel the operation.
Returns:
bool: True if the cancel RPC was made, False if the operation is
already complete.
"""
if self.done():
return False
self._cancel()
return True
def cancelled(self):
"""True if the operation was cancelled."""
self._refresh_and_update()
return (
self._operation.HasField("error")
and self._operation.error.code == code_pb2.CANCELLED
)
def _refresh_http(api_request, operation_name, retry=None):
"""Refresh an operation using a JSON/HTTP client.
Args:
api_request (Callable): A callable used to make an API request. This
should generally be
:meth:`google.cloud._http.Connection.api_request`.
operation_name (str): The name of the operation.
retry (google.api_core.retry.Retry): (Optional) retry policy
Returns:
google.longrunning.operations_pb2.Operation: The operation.
"""
path = "operations/{}".format(operation_name)
if retry is not None:
api_request = retry(api_request)
api_response = api_request(method="GET", path=path)
return json_format.ParseDict(api_response, operations_pb2.Operation())
def _cancel_http(api_request, operation_name):
"""Cancel an operation using a JSON/HTTP client.
Args:
api_request (Callable): A callable used to make an API request. This
should generally be
:meth:`google.cloud._http.Connection.api_request`.
operation_name (str): The name of the operation.
"""
path = "operations/{}:cancel".format(operation_name)
api_request(method="POST", path=path)
def from_http_json(operation, api_request, result_type, **kwargs):
"""Create an operation future using a HTTP/JSON client.
This interacts with the long-running operations `service`_ (specific
to a given API) via `HTTP/JSON`_.
.. _HTTP/JSON: https://cloud.google.com/speech/reference/rest/\
v1beta1/operations#Operation
Args:
operation (dict): Operation as a dictionary.
api_request (Callable): A callable used to make an API request. This
should generally be
:meth:`google.cloud._http.Connection.api_request`.
result_type (:func:`type`): The protobuf result type.
kwargs: Keyword args passed into the :class:`Operation` constructor.
Returns:
~.api_core.operation.Operation: The operation future to track the given
operation.
"""
operation_proto = json_format.ParseDict(operation, operations_pb2.Operation())
refresh = functools.partial(_refresh_http, api_request, operation_proto.name)
cancel = functools.partial(_cancel_http, api_request, operation_proto.name)
return Operation(operation_proto, refresh, cancel, result_type, **kwargs)
def _refresh_grpc(operations_stub, operation_name, retry=None):
"""Refresh an operation using a gRPC client.
Args:
operations_stub (google.longrunning.operations_pb2.OperationsStub):
The gRPC operations stub.
operation_name (str): The name of the operation.
retry (google.api_core.retry.Retry): (Optional) retry policy
Returns:
google.longrunning.operations_pb2.Operation: The operation.
"""
request_pb = operations_pb2.GetOperationRequest(name=operation_name)
rpc = operations_stub.GetOperation
if retry is not None:
rpc = retry(rpc)
return rpc(request_pb)
def _cancel_grpc(operations_stub, operation_name):
"""Cancel an operation using a gRPC client.
Args:
operations_stub (google.longrunning.operations_pb2.OperationsStub):
The gRPC operations stub.
operation_name (str): The name of the operation.
"""
request_pb = operations_pb2.CancelOperationRequest(name=operation_name)
operations_stub.CancelOperation(request_pb)
def from_grpc(operation, operations_stub, result_type, grpc_metadata=None, **kwargs):
"""Create an operation future using a gRPC client.
This interacts with the long-running operations `service`_ (specific
to a given API) via gRPC.
.. _service: https://github.com/googleapis/googleapis/blob/\
050400df0fdb16f63b63e9dee53819044bffc857/\
google/longrunning/operations.proto#L38
Args:
operation (google.longrunning.operations_pb2.Operation): The operation.
operations_stub (google.longrunning.operations_pb2.OperationsStub):
The operations stub.
result_type (:func:`type`): The protobuf result type.
grpc_metadata (Optional[List[Tuple[str, str]]]): Additional metadata to pass
to the rpc.
kwargs: Keyword args passed into the :class:`Operation` constructor.
Returns:
~.api_core.operation.Operation: The operation future to track the given
operation.
"""
refresh = functools.partial(
_refresh_grpc, operations_stub, operation.name, metadata=grpc_metadata
)
cancel = functools.partial(
_cancel_grpc, operations_stub, operation.name, metadata=grpc_metadata
)
return Operation(operation, refresh, cancel, result_type, **kwargs)
def from_gapic(operation, operations_client, result_type, grpc_metadata=None, **kwargs):
"""Create an operation future from a gapic client.
This interacts with the long-running operations `service`_ (specific
to a given API) via a gapic client.
.. _service: https://github.com/googleapis/googleapis/blob/\
050400df0fdb16f63b63e9dee53819044bffc857/\
google/longrunning/operations.proto#L38
Args:
operation (google.longrunning.operations_pb2.Operation): The operation.
operations_client (google.api_core.operations_v1.OperationsClient):
The operations client.
result_type (:func:`type`): The protobuf result type.
grpc_metadata (Optional[List[Tuple[str, str]]]): Additional metadata to pass
to the rpc.
kwargs: Keyword args passed into the :class:`Operation` constructor.
Returns:
~.api_core.operation.Operation: The operation future to track the given
operation.
"""
refresh = functools.partial(
operations_client.get_operation, operation.name, metadata=grpc_metadata
)
cancel = functools.partial(
operations_client.cancel_operation, operation.name, metadata=grpc_metadata
)
return Operation(operation, refresh, cancel, result_type, **kwargs)
|
{
"content_hash": "0615b5af46077d1eb94fc39ad6c1e4d7",
"timestamp": "",
"source": "github",
"line_count": 337,
"max_line_length": 88,
"avg_line_length": 36.52818991097923,
"alnum_prop": 0.6456539398862713,
"repo_name": "martbhell/wasthereannhlgamelastnight",
"id": "b17f753b41315d31e2b0db35ef3613eaa7aca70b",
"size": "12885",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/lib/google/api_core/operation.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "730"
},
{
"name": "HTML",
"bytes": "8959"
},
{
"name": "JavaScript",
"bytes": "3318"
},
{
"name": "Python",
"bytes": "5989638"
}
],
"symlink_target": ""
}
|
from collections import Counter
from model import Model
import random
P = 0.2
SEED = 144144
random.seed(SEED)
class RandomP(Model):
"""
Class: RandomP
--------------
Each neighbor of the node has a probability p of infecting the node. If more
than one neighbor "infects" the node, then the node does not change its color.
"""
def update(self, node_color, node):
# Get the neighbors and their colors.
neighbors = self.adj_list[node]
neighbor_colors = filter(None, [node_color[x] for x in neighbors])
(color_changed, current, original) = (False, None, node_color[node])
# Go through each color.
for color in neighbor_colors:
should_infect = random.random() <= P
# If this node has already been infected, then its color gets reset back
# to the original color.
if should_infect and color_changed:
current = original
# If this node has not been infected yet, can still change its color.
elif should_infect:
current = color
color_changed = True
# If the current color never changed, then this node did not change color.
if current == original or current is None:
return (False, current)
else:
return (True, current)
|
{
"content_hash": "75f8136473590cfd6a0d4b134f0d5368",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 80,
"avg_line_length": 31.743589743589745,
"alnum_prop": 0.6680129240710824,
"repo_name": "visemet/pandemaniac-modelsim",
"id": "f2f1851038e4104351e043ee3cd79d5a944c5da8",
"size": "1238",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "models/random_p.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "26896"
}
],
"symlink_target": ""
}
|
import os, shutil
import sys
import glob
import csv
from collections import OrderedDict
def extractText(fileCsvPath):
results = {}
textReader = csv.reader(open(fileCsvPath, encoding='utf-8'))
for row in textReader:
fileName=row[2]
baseName=os.path.basename(fileName).split(os.extsep, 1)[0]
speaker = baseName.split("_")[-1]
speakerNum = str(speaker.strip("d")).zfill(3)
key = speakerNum+"_"+baseName.strip("_"+speaker)
content=row[1]
results[key]=[key, fileName ,content.strip(), speakerNum]
return OrderedDict(sorted(results.items(), key=lambda t: t[0]))
def formatWavScpFileRecord(row):
return row[0] + " " + row[1]
def formatTextFileRecord(row):
return row[0] + " " + row[2]
# utils/utt2spk_to_spk2utt.pl data/train/utt2spk > data/train/spk2utt
def formatUtt2spkFileRecord(row):
return row[0] + " " + row[3]
def extractUtterances(row):
return row[2]
corpus = set()
with open('./target/data/test/text', 'w') as text_file, open('./target/data/test/wav.scp', 'w') as wav_scp_file, open('./target/data/test/utt2spk', 'w') as utt2spk_file:
exractedTextMap = extractText('./target/liepa_test.csv')
wavScpArr = list(map(formatWavScpFileRecord,exractedTextMap.values()))
textArr = list(map(formatTextFileRecord,exractedTextMap.values()))
utt2spkArr = list(map(formatUtt2spkFileRecord,exractedTextMap.values()))
aUtteranceSet = set(map(extractUtterances,exractedTextMap.values()))
corpus.update(aUtteranceSet)
text_file.write("\n".join(textArr))
wav_scp_file.write("\n".join(wavScpArr))
utt2spk_file.write("\n".join(utt2spkArr))
with open('./target/data/train/text', 'w') as text_file, open('./target/data/train/wav.scp', 'w') as wav_scp_file, open('./target/data/train/utt2spk', 'w') as utt2spk_file:
exractedTextMap = extractText('./target/liepa_train.csv')
wavScpArr = list(map(formatWavScpFileRecord,exractedTextMap.values()))
textArr = list(map(formatTextFileRecord,exractedTextMap.values()))
utt2spkArr = list(map(formatUtt2spkFileRecord,exractedTextMap.values()))
aUtteranceSet = set(map(extractUtterances,exractedTextMap.values()))
corpus.update(aUtteranceSet)
text_file.write("\n".join(textArr))
wav_scp_file.write("\n".join(wavScpArr))
utt2spk_file.write("\n".join(utt2spkArr))
with open('./target/data/local/corpus.txt', 'w') as corpus_file:
corpus_file.write("\n".join(sorted(corpus)))
# cp sphinx_files/etc/liepa.dic ./target/data/local/dict/lexicon.txt
# inserto to ./target/data/local/dict/lexicon.bak "!SIL sil\n<UNK> spn\n"
shutil.copyfile("../target/liepa.dic", "./target/data/local/dict/lexicon.txt")
with open("./target/data/local/dict/lexicon.txt", 'r+') as f:
content = f.read()
f.seek(0, 0)
f.write('!SIL sil\n<UNK> spn\n' + content)
shutil.copyfile("../target/liepa.phone", "./target/data/local/dict/nonsilence_phones.txt")
with open("./target/data/local/dict/optional_silence.txt",'w') as f:
f.write("sil\n")
with open("./target/data/local/dict/silence_phones.txt",'w') as f:
f.write("sil\nspn\n")
#utils/fix_data_dir.sh data/train/
#utils/fix_data_dir.sh data/test/
|
{
"content_hash": "a4f0a0139984f2cf52b0ae935a7b5e0d",
"timestamp": "",
"source": "github",
"line_count": 83,
"max_line_length": 172,
"avg_line_length": 38.433734939759034,
"alnum_prop": 0.6905956112852665,
"repo_name": "mondhs/kaldi-liepa-train",
"id": "a9185661cc37978ec6f8ac7c83586b9424170c8d",
"size": "3215",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "opt/kaldi-liepa-train/tool_data_prep/02_data_prep.py",
"mode": "33261",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "5949"
},
{
"name": "Shell",
"bytes": "8944"
}
],
"symlink_target": ""
}
|
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.7.4
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import os
import sys
import unittest
import kubernetes.client
from kubernetes.client.rest import ApiException
from kubernetes.client.models.v1_event import V1Event
class TestV1Event(unittest.TestCase):
""" V1Event unit test stubs """
def setUp(self):
pass
def tearDown(self):
pass
def testV1Event(self):
"""
Test V1Event
"""
model = kubernetes.client.models.v1_event.V1Event()
if __name__ == '__main__':
unittest.main()
|
{
"content_hash": "972db7692318963d986e5617e1b25660",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 105,
"avg_line_length": 19.4,
"alnum_prop": 0.6675257731958762,
"repo_name": "sebgoa/client-python",
"id": "b7f41ea0b77fba2b119bc0387355b79f3191e304",
"size": "793",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "kubernetes/test/test_v1_event.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "5855378"
},
{
"name": "Shell",
"bytes": "16387"
}
],
"symlink_target": ""
}
|
import RPi.GPIO as GPIO
from timeit import default_timer as timer
import ctypes
import logging
from signal import pause
logger = logging.getLogger(__name__)
c_uint8 = ctypes.c_uint8
class Flags_bits(ctypes.LittleEndianStructure):
_fields_ = [
("A", c_uint8, 1), # asByte & 1
("B", c_uint8, 1), # asByte & 2
]
class Flags(ctypes.Union):
_anonymous_ = ("bit",)
_fields_ = [
("bit", Flags_bits),
("asByte", c_uint8)
]
class RotaryEncoder:
# select Enocder state bits
KeyIncr = 0b00000010
KeyDecr = 0b00000001
tblEncoder = [
0b00000011, 0b00000111, 0b00010011, 0b00000011,
0b00001011, 0b00000111, 0b00000011, 0b00000011,
0b00001011, 0b00000111, 0b00001111, 0b00000011,
0b00001011, 0b00000011, 0b00001111, 0b00000001,
0b00010111, 0b00000011, 0b00010011, 0b00000011,
0b00010111, 0b00011011, 0b00010011, 0b00000011,
0b00010111, 0b00011011, 0b00000011, 0b00000010]
def __init__(self, pinA, pinB, functionCallIncr=None, functionCallDecr=None, timeBase=0.1,
name='RotaryEncoder'):
logger.debug('Initialize {name} RotaryEncoder({arg_Apin}, {arg_Bpin})'.format(
arg_Apin=pinA,
arg_Bpin=pinB,
name=name if name is not None else ''
))
self.name = name
# persist values
self.pinA = pinA
self.pinB = pinB
self.functionCallbackIncr = functionCallIncr
self.functionCallbackDecr = functionCallDecr
self.timeBase = timeBase
self.encoderState = Flags() # stores the encoder state machine state
self.startTime = timer()
# setup pins
GPIO.setup(self.pinA, GPIO.IN, pull_up_down=GPIO.PUD_UP)
GPIO.setup(self.pinB, GPIO.IN, pull_up_down=GPIO.PUD_UP)
self._is_active = False
self.start()
def __repr__(self):
repr_str = '<{class_name}{object_name} on pin_a {pin_a},' + \
' pin_b {pin_b},timBase {time_base} is_active={is_active}%s>'
return repr_str.format(
class_name=self.__class__.__name__,
object_name=':{}'.format(self.name) if self.name is not None else '',
pin_a=self.pinA,
pin_b=self.pinB,
time_base=self.timeBase,
is_active=self.is_active)
def start(self):
logger.debug('Start Event Detection on {} and {}'.format(self.pinA, self.pinB))
self._is_active = True
GPIO.add_event_detect(self.pinA, GPIO.BOTH, callback=self._Callback)
GPIO.add_event_detect(self.pinB, GPIO.BOTH, callback=self._Callback)
def stop(self):
logger.debug('Stop Event Detection on {} and {}'.format(self.pinA, self.pinB))
GPIO.remove_event_detect(self.pinA)
GPIO.remove_event_detect(self.pinB)
self._is_active = False
def __del__(self):
if self.is_active:
self.stop()
@property
def is_active(self):
return self._is_active
def _StepSize(self):
end = timer()
duration = end - self.startTime
self.startTime = end
return int(self.timeBase / duration) + 1
def _Callback(self, pin):
logger.debug('EventDetection Called')
# construct new state machine input from encoder state and old state
statusA = GPIO.input(self.pinA)
statusB = GPIO.input(self.pinB)
self.encoderState.A = statusA
self.encoderState.B = statusB
logger.debug('new encoderState: "{}" -> {}, {},{}'.format(
self.encoderState.asByte,
self.tblEncoder[self.encoderState.asByte], statusA, statusB
))
current_state = self.encoderState.asByte
self.encoderState.asByte = self.tblEncoder[current_state]
if self.KeyIncr == self.encoderState.asByte:
steps = self._StepSize()
logger.info('{name}: Calling functionIncr {steps}'.format(
name=self.name, steps=steps))
self.functionCallbackIncr(steps)
elif self.KeyDecr == self.encoderState.asByte:
steps = self._StepSize()
logger.info('{name}: Calling functionDecr {steps}'.format(
name=self.name, steps=steps))
self.functionCallbackDecr(steps)
else:
logger.debug('Ignoring encoderState: "{}"'.format(self.encoderState.asByte))
if __name__ == "__main__":
logging.basicConfig(level='INFO')
GPIO.setmode(GPIO.BCM)
pin1 = int(input('please enter first pin'))
pin2 = int(input('please enter second pin'))
func1 = lambda *args: print('Function Incr executed with {}'.format(args))
func2 = lambda *args: print('Function Decr executed with {}'.format(args))
rotarty_encoder = RotaryEncoder(pin1, pin2, func1, func2)
print('running')
pause()
|
{
"content_hash": "728974af48c61560c1407f3a74164a7c",
"timestamp": "",
"source": "github",
"line_count": 141,
"max_line_length": 94,
"avg_line_length": 34.47517730496454,
"alnum_prop": 0.607899609133923,
"repo_name": "MiczFlor/RPi-Jukebox-RFID",
"id": "1528b625fdd9241ecbc4eedd94f77bb071b1768a",
"size": "5201",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "components/gpio_control/GPIODevices/rotary_encoder.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "5479"
},
{
"name": "CSS",
"bytes": "30300"
},
{
"name": "Hack",
"bytes": "11062"
},
{
"name": "JavaScript",
"bytes": "21303"
},
{
"name": "PHP",
"bytes": "390288"
},
{
"name": "Python",
"bytes": "213658"
},
{
"name": "Shell",
"bytes": "355597"
}
],
"symlink_target": ""
}
|
import os
def GetListOfFiles(dirpath):
list_of_files = os.listdir(dirpath)
all_files = []
for file in list_of_files:
full_path = os.path.join(dirpath, file)
if os.path.isdir(full_path):
all_files = all_files + GetListOfFiles(full_path)
else:
all_files.append(full_path)
return all_files
|
{
"content_hash": "6286d5c2f772de45fd6faeb7915e122f",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 61,
"avg_line_length": 25.285714285714285,
"alnum_prop": 0.6073446327683616,
"repo_name": "msbeta/apollo",
"id": "1c6ad5585c52e6fa27aeed57e79a341943fe9b92",
"size": "1115",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "modules/tools/prediction/learning_algorithms/utilities/IO_utils.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "1117"
},
{
"name": "C",
"bytes": "23596"
},
{
"name": "C++",
"bytes": "15304648"
},
{
"name": "CMake",
"bytes": "3601"
},
{
"name": "CSS",
"bytes": "39401"
},
{
"name": "Cuda",
"bytes": "91842"
},
{
"name": "Dockerfile",
"bytes": "2032"
},
{
"name": "GLSL",
"bytes": "7033"
},
{
"name": "HTML",
"bytes": "21068"
},
{
"name": "JavaScript",
"bytes": "331412"
},
{
"name": "Python",
"bytes": "1644200"
},
{
"name": "Shell",
"bytes": "273395"
},
{
"name": "Smarty",
"bytes": "33099"
}
],
"symlink_target": ""
}
|
'''OpenGL extension VERSION.GL_3_0
This module customises the behaviour of the
OpenGL.raw.GL.VERSION.GL_3_0 to provide a more
Python-friendly API
The official definition of this extension is available here:
http://www.opengl.org/registry/specs/VERSION/GL_3_0.txt
'''
from OpenGL import platform, constant, arrays
from OpenGL import extensions, wrapper
import ctypes
from OpenGL.raw.GL import _types, _glgets
from OpenGL.raw.GL.VERSION.GL_3_0 import *
from OpenGL.raw.GL.VERSION.GL_3_0 import _EXTENSION_NAME
def glInitGl30VERSION():
'''Return boolean indicating whether this extension is available'''
from OpenGL import extensions
return extensions.hasGLExtension( _EXTENSION_NAME )
glGetBooleani_v=wrapper.wrapper(glGetBooleani_v).setOutput(
'data',size=_glgets._glget_size_mapping,pnameArg='target',orPassIn=True
)
glGetIntegeri_v=wrapper.wrapper(glGetIntegeri_v).setOutput(
'data',size=_glgets._glget_size_mapping,pnameArg='target',orPassIn=True
)
# INPUT glTransformFeedbackVaryings.varyings size not checked against count
glTransformFeedbackVaryings=wrapper.wrapper(glTransformFeedbackVaryings).setInputArraySize(
'varyings', None
)
glGetTransformFeedbackVarying=wrapper.wrapper(glGetTransformFeedbackVarying).setOutput(
'length',size=(1,),orPassIn=True
).setOutput(
'type',size=(1,),orPassIn=True
).setOutput(
'name',size=lambda x:(x,),pnameArg='bufSize',orPassIn=True
).setOutput(
'size',size=(1,),orPassIn=True
)
# INPUT glVertexAttribIPointer.pointer size not checked against 'size,type,stride'
glVertexAttribIPointer=wrapper.wrapper(glVertexAttribIPointer).setInputArraySize(
'pointer', None
)
glGetVertexAttribIiv=wrapper.wrapper(glGetVertexAttribIiv).setOutput(
'params',size=(1,),orPassIn=True
)
glGetVertexAttribIuiv=wrapper.wrapper(glGetVertexAttribIuiv).setOutput(
'params',size=(1,),orPassIn=True
)
glVertexAttribI1iv=wrapper.wrapper(glVertexAttribI1iv).setInputArraySize(
'v', 1
)
glVertexAttribI2iv=wrapper.wrapper(glVertexAttribI2iv).setInputArraySize(
'v', 2
)
glVertexAttribI3iv=wrapper.wrapper(glVertexAttribI3iv).setInputArraySize(
'v', 3
)
glVertexAttribI4iv=wrapper.wrapper(glVertexAttribI4iv).setInputArraySize(
'v', 4
)
glVertexAttribI1uiv=wrapper.wrapper(glVertexAttribI1uiv).setInputArraySize(
'v', 1
)
glVertexAttribI2uiv=wrapper.wrapper(glVertexAttribI2uiv).setInputArraySize(
'v', 2
)
glVertexAttribI3uiv=wrapper.wrapper(glVertexAttribI3uiv).setInputArraySize(
'v', 3
)
glVertexAttribI4uiv=wrapper.wrapper(glVertexAttribI4uiv).setInputArraySize(
'v', 4
)
glVertexAttribI4bv=wrapper.wrapper(glVertexAttribI4bv).setInputArraySize(
'v', 4
)
glVertexAttribI4sv=wrapper.wrapper(glVertexAttribI4sv).setInputArraySize(
'v', 4
)
glVertexAttribI4ubv=wrapper.wrapper(glVertexAttribI4ubv).setInputArraySize(
'v', 4
)
glVertexAttribI4usv=wrapper.wrapper(glVertexAttribI4usv).setInputArraySize(
'v', 4
)
# OUTPUT glGetUniformuiv.params COMPSIZE(program,location)
# INPUT glBindFragDataLocation.name size not checked against 'name'
glBindFragDataLocation=wrapper.wrapper(glBindFragDataLocation).setInputArraySize(
'name', None
)
# INPUT glGetFragDataLocation.name size not checked against 'name'
glGetFragDataLocation=wrapper.wrapper(glGetFragDataLocation).setInputArraySize(
'name', None
)
# INPUT glUniform1uiv.value size not checked against count
glUniform1uiv=wrapper.wrapper(glUniform1uiv).setInputArraySize(
'value', None
)
# INPUT glUniform2uiv.value size not checked against None
glUniform2uiv=wrapper.wrapper(glUniform2uiv).setInputArraySize(
'value', None
)
# INPUT glUniform3uiv.value size not checked against None
glUniform3uiv=wrapper.wrapper(glUniform3uiv).setInputArraySize(
'value', None
)
# INPUT glUniform4uiv.value size not checked against None
glUniform4uiv=wrapper.wrapper(glUniform4uiv).setInputArraySize(
'value', None
)
# INPUT glTexParameterIiv.params size not checked against 'pname'
glTexParameterIiv=wrapper.wrapper(glTexParameterIiv).setInputArraySize(
'params', None
)
# INPUT glTexParameterIuiv.params size not checked against 'pname'
glTexParameterIuiv=wrapper.wrapper(glTexParameterIuiv).setInputArraySize(
'params', None
)
glGetTexParameterIiv=wrapper.wrapper(glGetTexParameterIiv).setOutput(
'params',size=_glgets._glget_size_mapping,pnameArg='pname',orPassIn=True
)
glGetTexParameterIuiv=wrapper.wrapper(glGetTexParameterIuiv).setOutput(
'params',size=_glgets._glget_size_mapping,pnameArg='pname',orPassIn=True
)
# INPUT glClearBufferiv.value size not checked against 'buffer'
glClearBufferiv=wrapper.wrapper(glClearBufferiv).setInputArraySize(
'value', None
)
# INPUT glClearBufferuiv.value size not checked against 'buffer'
glClearBufferuiv=wrapper.wrapper(glClearBufferuiv).setInputArraySize(
'value', None
)
# INPUT glClearBufferfv.value size not checked against 'buffer'
glClearBufferfv=wrapper.wrapper(glClearBufferfv).setInputArraySize(
'value', None
)
# INPUT glDeleteRenderbuffers.renderbuffers size not checked against n
glDeleteRenderbuffers=wrapper.wrapper(glDeleteRenderbuffers).setInputArraySize(
'renderbuffers', None
)
glGenRenderbuffers=wrapper.wrapper(glGenRenderbuffers).setOutput(
'renderbuffers',size=lambda x:(x,),pnameArg='n',orPassIn=True
)
glGetRenderbufferParameteriv=wrapper.wrapper(glGetRenderbufferParameteriv).setOutput(
'params',size=_glgets._glget_size_mapping,pnameArg='pname',orPassIn=True
)
# INPUT glDeleteFramebuffers.framebuffers size not checked against n
glDeleteFramebuffers=wrapper.wrapper(glDeleteFramebuffers).setInputArraySize(
'framebuffers', None
)
glGenFramebuffers=wrapper.wrapper(glGenFramebuffers).setOutput(
'framebuffers',size=lambda x:(x,),pnameArg='n',orPassIn=True
)
glGetFramebufferAttachmentParameteriv=wrapper.wrapper(glGetFramebufferAttachmentParameteriv).setOutput(
'params',size=_glgets._glget_size_mapping,pnameArg='pname',orPassIn=True
)
# INPUT glDeleteVertexArrays.arrays size not checked against n
glDeleteVertexArrays=wrapper.wrapper(glDeleteVertexArrays).setInputArraySize(
'arrays', None
)
glGenVertexArrays=wrapper.wrapper(glGenVertexArrays).setOutput(
'arrays',size=lambda x:(x,),pnameArg='n',orPassIn=True
)
### END AUTOGENERATED SECTION
from ctypes import c_char_p
glGetStringi.restype = c_char_p
from OpenGL.GL.ARB.vertex_array_object import *
from OpenGL.GL.ARB.texture_buffer_object import *
from OpenGL.GL.ARB.framebuffer_object import *
from OpenGL.GL.ARB.map_buffer_range import *
glGenVertexArrays = wrapper.wrapper(glGenVertexArrays).setOutput(
'arrays', lambda n: (n,), 'n', arrayType = arrays.GLuintArray,
orPassIn=True,
)
|
{
"content_hash": "7ce294de751042646bad520965e4f3a8",
"timestamp": "",
"source": "github",
"line_count": 177,
"max_line_length": 103,
"avg_line_length": 37.644067796610166,
"alnum_prop": 0.7978388113462405,
"repo_name": "alexus37/AugmentedRealityChess",
"id": "ab0b713b70f8c153f3eb9f0633ea744e7e8b7916",
"size": "6663",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "pythonAnimations/pyOpenGLChess/engineDirectory/oglc-env/lib/python2.7/site-packages/OpenGL/GL/VERSION/GL_3_0.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "158062"
},
{
"name": "C++",
"bytes": "267993"
},
{
"name": "CMake",
"bytes": "11319"
},
{
"name": "Fortran",
"bytes": "3707"
},
{
"name": "Makefile",
"bytes": "14618"
},
{
"name": "Python",
"bytes": "12813086"
},
{
"name": "Roff",
"bytes": "3310"
},
{
"name": "Shell",
"bytes": "3855"
}
],
"symlink_target": ""
}
|
from oslo_config import cfg
import six
from sahara import context
from sahara import exceptions as ex
from sahara.i18n import _
from sahara.utils.openstack import base as b
from sahara.utils.openstack import cinder as cinder_client
from sahara.utils.openstack import neutron as neutron_client
from sahara.utils.openstack import nova as nova_client
CONF = cfg.CONF
UNLIMITED = 'unlimited'
def _is_unlimited(limit):
return limit == -1
def _get_zero_limits():
return {
'ram': 0,
'cpu': 0,
'instances': 0,
'floatingips': 0,
'security_groups': 0,
'security_group_rules': 0,
'ports': 0,
'volumes': 0,
'volume_gbs': 0
}
def check_cluster(cluster):
req_limits = _get_req_cluster_limits(cluster)
_check_limits(req_limits)
def check_scaling(cluster, to_be_enlarged, additional):
req_limits = _get_req_scaling_limits(cluster, to_be_enlarged, additional)
_check_limits(req_limits)
def _check_limits(req_limits):
limits_name_map = {
'ram': _("RAM"),
'cpu': _("VCPU"),
'instances': _("instance"),
'floatingips': _("floating ip"),
'security_groups': _("security group"),
'security_group_rules': _("security group rule"),
'ports': _("port"),
'volumes': _("volume"),
'volume_gbs': _("volume storage")
}
avail_limits = _get_avail_limits()
for quota, quota_name in six.iteritems(limits_name_map):
if avail_limits[quota] != UNLIMITED:
if avail_limits[quota] < req_limits[quota]:
raise ex.QuotaException(quota_name, req_limits[quota],
avail_limits[quota])
def _get_req_cluster_limits(cluster):
req_limits = _get_zero_limits()
for ng in cluster.node_groups:
_update_limits_for_ng(req_limits, ng, ng.count)
return req_limits
def _get_req_scaling_limits(cluster, to_be_enlarged, additional):
ng_id_map = to_be_enlarged.copy()
ng_id_map.update(additional)
req_limits = _get_zero_limits()
for ng in cluster.node_groups:
if ng_id_map.get(ng.id):
_update_limits_for_ng(req_limits, ng, ng_id_map[ng.id] - ng.count)
return req_limits
def _update_limits_for_ng(limits, ng, count):
sign = lambda x: (1, -1)[x < 0]
nova = nova_client.client()
limits['instances'] += count
flavor = b.execute_with_retries(nova.flavors.get, ng.flavor_id)
limits['ram'] += flavor.ram * count
limits['cpu'] += flavor.vcpus * count
# tmckay-fp this is fine, it will be zero without it
if ng.floating_ip_pool:
limits['floatingips'] += count
if ng.volumes_per_node:
limits['volumes'] += ng.volumes_per_node * count
limits['volume_gbs'] += ng.volumes_per_node * ng.volumes_size * count
if ng.auto_security_group:
limits['security_groups'] += sign(count)
# NOTE: +3 - all traffic for private network
if CONF.use_neutron:
limits['security_group_rules'] += (
(len(ng.open_ports) + 3) * sign(count))
else:
limits['security_group_rules'] = max(
limits['security_group_rules'], len(ng.open_ports) + 3)
if CONF.use_neutron:
limits['ports'] += count
def _get_avail_limits():
limits = _get_zero_limits()
limits.update(_get_nova_limits())
limits.update(_get_neutron_limits())
if cinder_client.check_cinder_exists():
limits.update(_get_cinder_limits())
return limits
def _sub_limit(total, used):
if _is_unlimited(total):
return UNLIMITED
else:
return total - used
def _get_nova_limits():
limits = {}
nova = nova_client.client()
lim = b.execute_with_retries(nova.limits.get).to_dict()['absolute']
limits['ram'] = _sub_limit(lim['maxTotalRAMSize'], lim['totalRAMUsed'])
limits['cpu'] = _sub_limit(lim['maxTotalCores'], lim['totalCoresUsed'])
limits['instances'] = _sub_limit(lim['maxTotalInstances'],
lim['totalInstancesUsed'])
if CONF.use_neutron:
return limits
# tmckay-fp here we would just get the limits all the time
limits['floatingips'] = _sub_limit(lim['maxTotalFloatingIps'],
lim['totalFloatingIpsUsed'])
limits['security_groups'] = _sub_limit(lim['maxSecurityGroups'],
lim['totalSecurityGroupsUsed'])
limits['security_group_rules'] = _sub_limit(lim['maxSecurityGroupRules'],
0)
return limits
def _get_neutron_limits():
limits = {}
if not CONF.use_neutron:
return limits
neutron = neutron_client.client()
tenant_id = context.ctx().tenant_id
total_lim = b.execute_with_retries(neutron.show_quota, tenant_id)['quota']
# tmckay-fp here we would just get the limits all the time
usage_fip = b.execute_with_retries(
neutron.list_floatingips, tenant_id=tenant_id)['floatingips']
limits['floatingips'] = _sub_limit(total_lim['floatingip'],
len(usage_fip))
usage_sg = b.execute_with_retries(
neutron.list_security_groups, tenant_id=tenant_id).get(
'security_groups', [])
limits['security_groups'] = _sub_limit(total_lim['security_group'],
len(usage_sg))
usage_sg_rules = b.execute_with_retries(
neutron.list_security_group_rules, tenant_id=tenant_id).get(
'security_group_rules', [])
limits['security_group_rules'] = _sub_limit(
total_lim['security_group_rule'], len(usage_sg_rules))
usage_ports = b.execute_with_retries(
neutron.list_ports, tenant_id=tenant_id)['ports']
limits['ports'] = _sub_limit(total_lim['port'], len(usage_ports))
return limits
def _get_cinder_limits():
avail_limits = {}
cinder = cinder_client.client()
lim = {}
for l in b.execute_with_retries(cinder.limits.get).absolute:
lim[l.name] = l.value
avail_limits['volumes'] = _sub_limit(lim['maxTotalVolumes'],
lim['totalVolumesUsed'])
avail_limits['volume_gbs'] = _sub_limit(lim['maxTotalVolumeGigabytes'],
lim['totalGigabytesUsed'])
return avail_limits
|
{
"content_hash": "1b046d2c863d93cbb07e479b09e38ab3",
"timestamp": "",
"source": "github",
"line_count": 193,
"max_line_length": 78,
"avg_line_length": 33.0880829015544,
"alnum_prop": 0.5974005637331663,
"repo_name": "tellesnobrega/sahara",
"id": "563cfe45f806c77cff0aa0b755e4c1f4a8e505ca",
"size": "6966",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "sahara/service/quotas.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Mako",
"bytes": "952"
},
{
"name": "Python",
"bytes": "3354711"
},
{
"name": "Shell",
"bytes": "56856"
}
],
"symlink_target": ""
}
|
__version__ = '1.3.7'
__author__ = 'Adrian Sampson <adrian@radbox.org>'
import beets.library
from beets.util import confit
Library = beets.library.Library
config = confit.LazyConfig('beets', __name__)
|
{
"content_hash": "c71f9cd0b9bf58fb01f39eef5df3fc6c",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 49,
"avg_line_length": 22.666666666666668,
"alnum_prop": 0.7058823529411765,
"repo_name": "accesso/beets",
"id": "d4f4c5db83e4a3e63fc810e02500acedb6a6d8e0",
"size": "851",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "beets/__init__.py",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
}
|
from __future__ import division
import math
import numpy as np
np.set_printoptions(precision=3, suppress=True)
import rospy
import roslib
roslib.load_manifest('vehicle_core')
import tf.transformations as tft
from vehicle_core.path import trajectory_tools as tt
from auv_msgs.msg import NavSts
from diagnostic_msgs.msg import KeyValue
from geometry_msgs.msg import PoseStamped, WrenchStamped
from visualization_msgs.msg import Marker, MarkerArray
from vehicle_interface.msg import PathRequest, PathStatus, PilotRequest, PilotStatus, Vector6Stamped, Vector6
# topics
TOPIC_NAV = 'nav/nav_sts'
TOPIC_PILOT_POS = 'pilot/position_req'
TOPIC_PILOT_STS = 'pilot/status'
TOPIC_FORCES = 'pilot/forces'
TOPIC_WRENCH = 'pilot/wrench'
TOPIC_PATH_REQ = 'path/request'
TOPIC_PATH_STS = 'path/status'
TOPIC_POSE2D = '/move_base_simple/goal'
TOPIC_MARKER = 'hmi/marker'
# modes
MODE_POSITION = 'position'
MODE_PATH = 'path'
# config
DEFAULT_SPACING = 4.0 # meters
DEFAULT_PROXIMITY = 8.0 # meters
DEFAULT_REFRESH = 0.5 # seconds
TEXT_STATUS = """
Pilot: {} ({})
Path: {} ({})
"""
class RVizInterface(object):
def __init__(self, name, **kwargs):
self.name = name
# internal state
self.mode = MODE_PATH
self.pos = np.zeros(6)
self.vel = np.zeros(6)
self.status_pilot = None
self.status_path = None
# ros interface
self.sub_poses = rospy.Subscriber(TOPIC_POSE2D, PoseStamped, self.handle_poses, queue_size=1)
self.pub_marker = rospy.Publisher(TOPIC_MARKER, Marker, queue_size=1)
self.sub_nav = rospy.Subscriber(TOPIC_NAV, NavSts, self.handle_nav, queue_size=10)
self.sub_pilot = rospy.Subscriber(TOPIC_PILOT_STS, PilotStatus, self.handle_pilot_status, queue_size=1)
self.sub_path = rospy.Subscriber(TOPIC_PATH_STS, PathStatus, self.handle_path_status, queue_size=1)
#self.pub_wrench = rospy.Publisher(TOPIC_WRENCH, WrenchStamped, queue_size=1)
#self.sub_forces = rospy.Subscriber(TOPIC_FORCES, Vector6Stamped, self.handle_forces, queue_size=1, tcp_nodelay=True)
self.pub_pilot = rospy.Publisher(TOPIC_PILOT_POS, PilotRequest, queue_size=1)
self.pub_path = rospy.Publisher(TOPIC_PATH_REQ, PathRequest, queue_size=1)
# timers
self.t_hmi = rospy.Timer(rospy.Duration(DEFAULT_REFRESH), self.send_text_marker)
# user log
rospy.loginfo('%s: started in %s mode ...', self.name, self.mode)
def handle_nav(self, data):
# parse navigation data
self.pos = np.array([
data.position.north,
data.position.east,
data.position.depth,
data.orientation.roll,
data.orientation.pitch,
data.orientation.yaw
])
self.vel = np.array([
data.body_velocity.x,
data.body_velocity.y,
data.body_velocity.z,
data.orientation_rate.roll,
data.orientation_rate.pitch,
data.orientation_rate.yaw
])
def handle_pilot_status(self, data):
self.status_pilot = data
def handle_path_status(self, data):
self.status_path = data
# def handle_forces(self, data):
# # send wrench for rviz visualizer
# ws = WrenchStamped()
# ws.header.stamp = rospy.Time.now()
# ws.header.frame_id = 'base_link'
# ws.wrench.force.x = data.values[0]
# ws.wrench.force.y = data.values[1]
# ws.wrench.force.z = data.values[2]
# ws.wrench.torque.x = data.values[3]
# ws.wrench.torque.y = data.values[4]
# ws.wrench.torque.z = data.values[5]
#
# self.pub_wrench.publish(ws)
def handle_poses(self, data):
# parse data
pose = np.zeros(6)
# transform quaternions to euler angles
quat = np.zeros(4)
quat[0] = data.pose.orientation.x
quat[1] = data.pose.orientation.y
quat[2] = data.pose.orientation.z
quat[3] = data.pose.orientation.w
orientation = tft.euler_from_quaternion(quat)
# conversion from XYZ to NED
pose[0] = data.pose.position.x
pose[1] = -data.pose.position.y
pose[2] = -data.pose.position.z
pose[3] = orientation[0]
pose[4] = -orientation[1]
pose[5] = -orientation[2]
if self.mode == MODE_POSITION:
self.send_position_req(pose)
else:
self.send_path_req(pose)
def send_position_req(self, position):
# user log
rospy.loginfo('%s: sending position request: %s', self.name, position)
# send pilot request
msg = PilotRequest()
msg.header.stamp = rospy.Time.now()
msg.position = position
self.pub_pilot.publish(msg)
def send_path_req(self, goal):
# path info
distance = tt.distance_between(self.pos, goal)
if distance <= DEFAULT_PROXIMITY:
# generate linear path
mode = 'lines'
wps = tt.interpolate_leg(self.pos, goal, face_goal=True, spacing=DEFAULT_SPACING, dimensions=2)
else:
# generate smooth path
mode = 'fast'
p1 = (6.0, self.pos[5])
p2 = (6.0, goal[5])
steps = max(math.floor(distance / DEFAULT_SPACING), 100)
points = tt.format_bezier_input(self.pos, p1, p2, goal, degrees=False)
wps = tt.interpolate_bezier_cubic(points, steps=steps)
# user log
rospy.loginfo('%s: sending %s path request: %s', self.name, mode, goal)
# send new path
msg = PathRequest()
msg.header.stamp = rospy.Time.now()
msg.command = 'path'
msg.points = [Vector6(wp) for wp in wps]
msg.options = [
KeyValue('mode', mode),
KeyValue('target_speed', '1.00'),
#KeyValue('look_ahead', '5.0'),
]
self.pub_path.publish(msg)
def send_text_marker(self, event=None):
if self.status_pilot is None or self.status_path is None:
return
mm = Marker()
mm.header.stamp = rospy.Time.now()
mm.header.frame_id = 'map'
mm.ns = 'hmi'
mm.id = 0
mm.action = Marker.ADD
mm.lifetime = rospy.Duration(1, 0)
mm.type = Marker.TEXT_VIEW_FACING
mm.scale.z = 0.20
mm.color.r = 1.0
mm.color.g = 1.0
mm.color.b = 1.0
mm.color.a = 1.0
mm.pose.position.x = self.pos[0]
mm.pose.position.y = -(self.pos[1])
mm.pose.position.z = -(self.pos[2] + 1.0)
mm.text = TEXT_STATUS.format(
self.status_pilot.status, self.status_pilot.mode,
self.status_path.path_status, self.status_path.navigation_status
)
self.pub_marker.publish(mm)
if __name__ == '__main__':
rospy.init_node('rviz_interface')
#topic_input = rospy.get_param('~topic_input', TOPIC_CMDS)
#config = rospy.get_param('rviz/interface', dict())
rospy.loginfo('%s: node init ... ', rospy.get_name())
thruster = RVizInterface(rospy.get_name())
rospy.spin()
|
{
"content_hash": "eebf73e559ce3ed97311a3cb06ad23c8",
"timestamp": "",
"source": "github",
"line_count": 234,
"max_line_length": 125,
"avg_line_length": 30.517094017094017,
"alnum_prop": 0.600476123792186,
"repo_name": "decabyte/vehicle_core",
"id": "d3d0deb1af9f4f065d1aefd51cc26b0891dd4f82",
"size": "8961",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/node_rviz_interface.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "CMake",
"bytes": "1203"
},
{
"name": "Makefile",
"bytes": "43"
},
{
"name": "Python",
"bytes": "327502"
},
{
"name": "Shell",
"bytes": "38601"
}
],
"symlink_target": ""
}
|
from app.bot import BaseConverter
import apiai
import json
class ApiAiConverter(BaseConverter):
def __init__(self,client_access_token):
self.ai = apiai.ApiAI(client_access_token)
self.request = self.ai.text_request()
self.request.land = 'en'
@staticmethod
def getAction(response):
action = None
if 'result' in response:
result = response['result']
if 'action' in result:
action = result['action']
return action
def convert(self,request):
self.request.session_id = request['session_id']
self.request.query = request['query']
response = self.request.getresponse()
response = json.loads(response.read())
return ApiAiConverter.getAction(response), response
|
{
"content_hash": "680dc2f0578223a16314338d47ca7116",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 59,
"avg_line_length": 26.64516129032258,
"alnum_prop": 0.6089588377723971,
"repo_name": "VikasSherawat/hotdesk",
"id": "885813a4da964ea87a4ecc25c6052c193029682a",
"size": "826",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "app/botimpl/converters.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "297"
},
{
"name": "HTML",
"bytes": "1491"
},
{
"name": "Python",
"bytes": "25567"
}
],
"symlink_target": ""
}
|
from django import template
from django.core.exceptions import ObjectDoesNotExist
from django.utils.html import escape
from django.utils.safestring import mark_safe
from django.template.defaultfilters import urlize, linebreaks
from django.contrib.markup.templatetags.markup import markdown, restructuredtext
from pubman import settings
from pubman.models import MediaObject, SLIDESHOW
register = template.Library()
@register.simple_tag
def format_content(value, format):
if format=='M': # markdown
value = markdown(value, settings.MARKDOWN_EXTENSIONS)
elif format == 'R':
value = restructuredtext(value)
elif format=='H':
value = mark_safe(value)
else:
value = linebreaks(urlize(escape(value)))
return value
format_content.needs_autoescape = True
@register.simple_tag
def display_media(mediaobject,
width=settings.DEFAULT_IMAGE_WIDTH,
height=settings.DEFAULT_IMAGE_HEIGHT,
html_attributes_in_img_tag="",
link=1,
with_caption=0,
language=settings.ALL_LANGUAGES[settings.DEFAULT_LANGUAGE_INDEX][0],
image_to_display=SLIDESHOW):
if mediaobject == None:
return ""
if not isinstance(mediaobject, MediaObject):
try:
int(mediaobject)
except ValueError:
return "" # Fail silently if the object is wrong type
else:
try:
mediaobject = MediaObject.objects.get(id=mediaobject)
except ObjectDoesNotExist:
return "" # Fail silently if the object is wrong type
# Using *args instead of named arguments throws a Template Exception. Not sure why.
return mediaobject.display_media(width,
height,
html_attributes_in_img_tag,
link,
with_caption,
language,
image_to_display)
display_media.needs_autoescape = True
|
{
"content_hash": "28d5e8f20e1ad3e942c5842a263f58e6",
"timestamp": "",
"source": "github",
"line_count": 64,
"max_line_length": 87,
"avg_line_length": 33.875,
"alnum_prop": 0.5880996309963099,
"repo_name": "nathangeffen/django-pubman",
"id": "030277547ff882dc5e468ec2f0da092bc7e11bc8",
"size": "2168",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pubman/templatetags/pubmantags.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "104810"
},
{
"name": "PHP",
"bytes": "286684"
},
{
"name": "Python",
"bytes": "209989"
}
],
"symlink_target": ""
}
|
"""GAX/GAPIC module for managing Speech API requests."""
from google.cloud.gapic.speech.v1beta1.speech_client import SpeechClient
from google.cloud.grpc.speech.v1beta1.cloud_speech_pb2 import RecognitionAudio
from google.cloud.grpc.speech.v1beta1.cloud_speech_pb2 import RecognitionConfig
from google.cloud.grpc.speech.v1beta1.cloud_speech_pb2 import SpeechContext
from google.cloud.grpc.speech.v1beta1.cloud_speech_pb2 import (
StreamingRecognitionConfig)
from google.cloud.grpc.speech.v1beta1.cloud_speech_pb2 import (
StreamingRecognizeRequest)
from google.longrunning import operations_grpc
from google.cloud._helpers import make_secure_channel
from google.cloud._helpers import make_secure_stub
from google.cloud._http import DEFAULT_USER_AGENT
from google.cloud.speech.alternative import Alternative
from google.cloud.speech.operation import Operation
OPERATIONS_API_HOST = 'speech.googleapis.com'
class GAPICSpeechAPI(object):
"""Manage calls through GAPIC wrappers to the Speech API."""
def __init__(self, client=None):
self._client = client
credentials = self._client._connection.credentials
channel = make_secure_channel(
credentials, DEFAULT_USER_AGENT,
SpeechClient.SERVICE_ADDRESS)
self._gapic_api = SpeechClient(channel=channel)
self._operations_stub = make_secure_stub(
credentials,
DEFAULT_USER_AGENT,
operations_grpc.OperationsStub,
OPERATIONS_API_HOST)
def async_recognize(self, sample, language_code=None,
max_alternatives=None, profanity_filter=None,
speech_context=None):
"""Asychronous Recognize request to Google Speech API.
.. _async_recognize: https://cloud.google.com/speech/reference/\
rest/v1beta1/speech/asyncrecognize
See `async_recognize`_.
:type sample: :class:`~google.cloud.speech.sample.Sample`
:param sample: Instance of ``Sample`` containing audio information.
:type language_code: str
:param language_code: (Optional) The language of the supplied audio as
BCP-47 language tag. Example: ``'en-GB'``.
If omitted, defaults to ``'en-US'``.
:type max_alternatives: int
:param max_alternatives: (Optional) Maximum number of recognition
hypotheses to be returned. The server may
return fewer than maxAlternatives.
Valid values are 0-30. A value of 0 or 1
will return a maximum of 1. Defaults to 1
:type profanity_filter: bool
:param profanity_filter: If True, the server will attempt to filter
out profanities, replacing all but the
initial character in each filtered word with
asterisks, e.g. ``'f***'``. If False or
omitted, profanities won't be filtered out.
:type speech_context: list
:param speech_context: A list of strings (max 50) containing words and
phrases "hints" so that the speech recognition
is more likely to recognize them. This can be
used to improve the accuracy for specific words
and phrases. This can also be used to add new
words to the vocabulary of the recognizer.
:rtype: :class:`~google.cloud.speech.operation.Operation`
:returns: Instance of ``Operation`` to poll for results.
"""
config = RecognitionConfig(
encoding=sample.encoding, sample_rate=sample.sample_rate,
language_code=language_code, max_alternatives=max_alternatives,
profanity_filter=profanity_filter,
speech_context=SpeechContext(phrases=speech_context))
audio = RecognitionAudio(content=sample.content,
uri=sample.source_uri)
api = self._gapic_api
response = api.async_recognize(config=config, audio=audio)
return Operation.from_pb(response, self)
def streaming_recognize(self, sample, language_code=None,
max_alternatives=None, profanity_filter=None,
speech_context=None, single_utterance=False,
interim_results=False):
"""Streaming speech recognition.
.. note::
Streaming recognition requests are limited to 1 minute of audio.
See: https://cloud.google.com/speech/limits#content
Yields :class:`~streaming_response.StreamingSpeechResponse` containing
results and metadata from the streaming request.
:type sample: :class:`~google.cloud.speech.sample.Sample`
:param sample: Instance of ``Sample`` containing audio information.
:type language_code: str
:param language_code: (Optional) The language of the supplied audio as
BCP-47 language tag. Example: ``'en-GB'``.
If omitted, defaults to ``'en-US'``.
:type max_alternatives: int
:param max_alternatives: (Optional) Maximum number of recognition
hypotheses to be returned. The server may
return fewer than maxAlternatives.
Valid values are 0-30. A value of 0 or 1
will return a maximum of 1. Defaults to 1
:type profanity_filter: bool
:param profanity_filter: If True, the server will attempt to filter
out profanities, replacing all but the
initial character in each filtered word with
asterisks, e.g. ``'f***'``. If False or
omitted, profanities won't be filtered out.
:type speech_context: list
:param speech_context: A list of strings (max 50) containing words and
phrases "hints" so that the speech recognition
is more likely to recognize them. This can be
used to improve the accuracy for specific words
and phrases. This can also be used to add new
words to the vocabulary of the recognizer.
:type single_utterance: bool
:param single_utterance: (Optional) If false or omitted, the recognizer
will perform continuous recognition
(continuing to process audio even if the user
pauses speaking) until the client closes the
output stream (gRPC API) or when the maximum
time limit has been reached. Multiple
SpeechRecognitionResults with the is_final
flag set to true may be returned.
If true, the recognizer will detect a single
spoken utterance. When it detects that the
user has paused or stopped speaking, it will
return an END_OF_UTTERANCE event and cease
recognition. It will return no more than one
SpeechRecognitionResult with the is_final flag
set to true.
:type interim_results: bool
:param interim_results: (Optional) If true, interim results (tentative
hypotheses) may be returned as they become
available (these interim results are indicated
with the is_final=false flag). If false or
omitted, only is_final=true result(s) are
returned.
:raises: :class:`ValueError` if sample.content is not a file-like
object. :class:`ValueError` if stream has closed.
:rtype: :class:`~google.cloud.grpc.speech.v1beta1\
.cloud_speech_pb2.StreamingRecognizeResponse`
:returns: ``StreamingRecognizeResponse`` instances.
"""
if getattr(sample.content, 'closed', None) is None:
raise ValueError('Please use file-like object for data stream.')
if sample.content.closed:
raise ValueError('Stream is closed.')
requests = _stream_requests(sample, language_code=language_code,
max_alternatives=max_alternatives,
profanity_filter=profanity_filter,
speech_context=speech_context,
single_utterance=single_utterance,
interim_results=interim_results)
api = self._gapic_api
responses = api.streaming_recognize(requests)
return responses
def sync_recognize(self, sample, language_code=None, max_alternatives=None,
profanity_filter=None, speech_context=None):
"""Synchronous Speech Recognition.
.. _sync_recognize: https://cloud.google.com/speech/reference/\
rest/v1beta1/speech/syncrecognize
See `sync_recognize`_.
:type sample: :class:`~google.cloud.speech.sample.Sample`
:param sample: Instance of ``Sample`` containing audio information.
:type language_code: str
:param language_code: (Optional) The language of the supplied audio as
BCP-47 language tag. Example: ``'en-GB'``.
If omitted, defaults to ``'en-US'``.
:type max_alternatives: int
:param max_alternatives: (Optional) Maximum number of recognition
hypotheses to be returned. The server may
return fewer than maxAlternatives.
Valid values are 0-30. A value of 0 or 1
will return a maximum of 1. Defaults to 1
:type profanity_filter: bool
:param profanity_filter: If True, the server will attempt to filter
out profanities, replacing all but the
initial character in each filtered word with
asterisks, e.g. ``'f***'``. If False or
omitted, profanities won't be filtered out.
:type speech_context: list
:param speech_context: A list of strings (max 50) containing words and
phrases "hints" so that the speech recognition
is more likely to recognize them. This can be
used to improve the accuracy for specific words
and phrases. This can also be used to add new
words to the vocabulary of the recognizer.
:rtype: list
:returns: A list of dictionaries. One dict for each alternative. Each
dictionary typically contains two keys (though not
all will be present in all cases)
* ``transcript``: The detected text from the audio recording.
* ``confidence``: The confidence in language detection, float
between 0 and 1.
:raises: ValueError if more than one result is returned or no results.
"""
config = RecognitionConfig(
encoding=sample.encoding, sample_rate=sample.sample_rate,
language_code=language_code, max_alternatives=max_alternatives,
profanity_filter=profanity_filter,
speech_context=SpeechContext(phrases=speech_context))
audio = RecognitionAudio(content=sample.content,
uri=sample.source_uri)
api = self._gapic_api
api_response = api.sync_recognize(config=config, audio=audio)
if len(api_response.results) == 1:
results = api_response.results.pop()
alternatives = results.alternatives
return [Alternative.from_pb(alternative)
for alternative in alternatives]
else:
raise ValueError('More than one result or none returned from API.')
def _stream_requests(sample, language_code=None, max_alternatives=None,
profanity_filter=None, speech_context=None,
single_utterance=None, interim_results=None):
"""Generate stream of requests from sample.
:type sample: :class:`~google.cloud.speech.sample.Sample`
:param sample: Instance of ``Sample`` containing audio information.
:type language_code: str
:param language_code: (Optional) The language of the supplied audio as
BCP-47 language tag. Example: ``'en-GB'``.
If omitted, defaults to ``'en-US'``.
:type max_alternatives: int
:param max_alternatives: (Optional) Maximum number of recognition
hypotheses to be returned. The server may
return fewer than maxAlternatives.
Valid values are 0-30. A value of 0 or 1
will return a maximum of 1. Defaults to 1
:type profanity_filter: bool
:param profanity_filter: (Optional) If True, the server will attempt to
filter out profanities, replacing all but the
initial character in each filtered word with
asterisks, e.g. ``'f***'``. If False or
omitted, profanities won't be filtered out.
:type speech_context: list
:param speech_context: (Optional) A list of strings (max 50) containing
words and phrases "hints" so that the speech
recognition is more likely to recognize them.
This can be used to improve the accuracy for
specific words and phrases. This can also be used to
add new words to the vocabulary of the recognizer.
:type single_utterance: bool
:param single_utterance: (Optional) If false or omitted, the recognizer
will perform continuous recognition
(continuing to process audio even if the user
pauses speaking) until the client closes the
output stream (gRPC API) or when the maximum
time limit has been reached. Multiple
SpeechRecognitionResults with the is_final
flag set to true may be returned.
If true, the recognizer will detect a single
spoken utterance. When it detects that the
user has paused or stopped speaking, it will
return an END_OF_UTTERANCE event and cease
recognition. It will return no more than one
SpeechRecognitionResult with the is_final flag
set to true.
:type interim_results: bool
:param interim_results: (Optional) If true, interim results (tentative
hypotheses) may be returned as they become
available (these interim results are indicated
with the is_final=false flag). If false or
omitted, only is_final=true result(s) are
returned.
"""
config_request = _make_streaming_request(
sample, language_code=language_code, max_alternatives=max_alternatives,
profanity_filter=profanity_filter,
speech_context=SpeechContext(phrases=speech_context),
single_utterance=single_utterance, interim_results=interim_results)
# The config request MUST go first and not contain any audio data.
yield config_request
while True:
data = sample.content.read(sample.chunk_size)
if not data:
break
yield StreamingRecognizeRequest(audio_content=data)
def _make_streaming_request(sample, language_code,
max_alternatives, profanity_filter,
speech_context, single_utterance,
interim_results):
"""Build streaming request.
:type sample: :class:`~google.cloud.speech.sample.Sample`
:param sample: Instance of ``Sample`` containing audio information.
:type language_code: str
:param language_code: The language of the supplied audio as
BCP-47 language tag. Example: ``'en-GB'``.
If omitted, defaults to ``'en-US'``.
:type max_alternatives: int
:param max_alternatives: Maximum number of recognition
hypotheses to be returned. The server may
return fewer than maxAlternatives.
Valid values are 0-30. A value of 0 or 1
will return a maximum of 1. Defaults to 1
:type profanity_filter: bool
:param profanity_filter: If True, the server will attempt to filter
out profanities, replacing all but the
initial character in each filtered word with
asterisks, e.g. ``'f***'``. If False or
omitted, profanities won't be filtered out.
:type speech_context: list
:param speech_context: A list of strings (max 50) containing words and
phrases "hints" so that the speech recognition
is more likely to recognize them. This can be
used to improve the accuracy for specific words
and phrases. This can also be used to add new
words to the vocabulary of the recognizer.
:type single_utterance: bool
:param single_utterance: If false or omitted, the recognizer
will perform continuous recognition
(continuing to process audio even if the user
pauses speaking) until the client closes the
output stream (gRPC API) or when the maximum
time limit has been reached. Multiple
SpeechRecognitionResults with the is_final
flag set to true may be returned.
If true, the recognizer will detect a single
spoken utterance. When it detects that the
user has paused or stopped speaking, it will
return an END_OF_UTTERANCE event and cease
recognition. It will return no more than one
SpeechRecognitionResult with the is_final flag
set to true.
:type interim_results: bool
:param interim_results: If true, interim results (tentative
hypotheses) may be returned as they become
available (these interim results are indicated
with the is_final=false flag). If false or
omitted, only is_final=true result(s) are
returned.
:rtype:
:class:`~grpc.speech.v1beta1.cloud_speech_pb2.StreamingRecognizeRequest`
:returns: Instance of ``StreamingRecognizeRequest``.
"""
config = RecognitionConfig(
encoding=sample.encoding, sample_rate=sample.sample_rate,
language_code=language_code, max_alternatives=max_alternatives,
profanity_filter=profanity_filter, speech_context=speech_context)
streaming_config = StreamingRecognitionConfig(
config=config, single_utterance=single_utterance,
interim_results=interim_results)
config_request = StreamingRecognizeRequest(
streaming_config=streaming_config)
return config_request
|
{
"content_hash": "b2eafebbcfe013a56991f87cc4c12399",
"timestamp": "",
"source": "github",
"line_count": 410,
"max_line_length": 80,
"avg_line_length": 50.69512195121951,
"alnum_prop": 0.5645898484484003,
"repo_name": "Fkawala/gcloud-python",
"id": "05636a22806cd1d2e800bed580ca172fd91eab1e",
"size": "21361",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "speech/google/cloud/speech/_gax.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "3366"
},
{
"name": "PowerShell",
"bytes": "7195"
},
{
"name": "Protocol Buffer",
"bytes": "89702"
},
{
"name": "Python",
"bytes": "3403274"
},
{
"name": "Shell",
"bytes": "7548"
}
],
"symlink_target": ""
}
|
"""core functionalities of feets"""
__all__ = [
"FeatureNotFound",
"DataRequiredError",
"FeatureSpaceError",
"FeatureSpace",
"FeatureSet",
]
# =============================================================================
# IMPORTS
# =============================================================================
import copy
import itertools as it
from collections import Counter
from collections.abc import Mapping
import attr
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
from . import extractors
from .extractors.core import (
DATAS,
DATA_ALIGNED_ERROR,
DATA_ALIGNED_ERROR2,
DATA_ALIGNED_MAGNITUDE,
DATA_ALIGNED_MAGNITUDE2,
DATA_ALIGNED_TIME,
DATA_ERROR,
DATA_MAGNITUDE,
DATA_MAGNITUDE2,
DATA_TIME,
)
# =============================================================================
# EXCEPTIONS
# =============================================================================
class FeatureNotFound(ValueError):
"""Raises when a non-available feature are requested.
A non-available feature can be:
- The feature don't exist in any of the registered extractor.
- The feature can't be requested with the available data.
"""
class DataRequiredError(ValueError):
"""Raised when the feature-space required another data."""
class FeatureSpaceError(ValueError):
"""The FeatureSpace can't be configured with the given parameters."""
# =============================================================================
# RESULTSET
# =============================================================================
class _Map(Mapping):
"""Internal representation of a immutable dict"""
def __init__(self, d):
self._keys = tuple(d.keys())
self._values = tuple(d.values())
def __getitem__(self, k):
"""x.__getitem__(y) <==> x[y]"""
if k not in self._keys:
raise KeyError(k)
idx = self._keys.index(k)
return self._values[idx]
def __iter__(self):
"""x.__iter__() <==> iter(x)"""
return iter(self._keys)
def __len__(self):
"""x.__len__() <==> len(x)"""
return len(self._keys)
@attr.s(frozen=True, auto_attribs=True, repr=False)
class FeatureSet:
"""Container of features.
The FeatureSet object is capable of convert the features into
dicts, numpy arrays and also provides
analysis capabilities like plots thought the matplotlib
and seaborn library.
"""
features_names: tuple = attr.ib(converter=tuple)
values: dict = attr.ib(converter=_Map)
extractors: dict = attr.ib(converter=_Map)
timeserie: dict = attr.ib(converter=_Map)
def __attrs_post_init__(self):
cnt = Counter(
it.chain(self.features_names, self.values, self.extractors)
)
diff = set(k for k, v in cnt.items() if v < 3)
if diff:
joined_diff = ", ".join(diff)
raise FeatureNotFound(
f"The features '{joined_diff}' must be in 'features_names' "
"'values' and 'extractors'"
)
def __iter__(self):
"""x.__iter__() <==> iter(x)"""
return iter(self.as_arrays())
def __getitem__(self, k):
"""x.__getitem__(y) <==> x[y]"""
return copy.deepcopy(self.values[k])
def __repr__(self):
"""x.__repr__() <==> repr(x)"""
feats = ", ".join(self.features_names)
ts = ", ".join(d for d in DATAS if self.timeserie.get(d) is not None)
return f"FeatureSet(features=<{feats}>, timeserie=<{ts}>)"
def extractor_of(self, feature):
"""Retrieve the extractor instance used for create the feature."""
return copy.deepcopy(self.extractors[feature])
def plot(self, feature, ax=None, **plot_kws):
"""If is available, draw the plot-representation of the feature.
Parameters
----------
feature : str
The feature to plot.
ax : matplotlib axes object, default None.
`**plot_kws` : keywords
Options to pass to extractor and matplotlib plotting method.
Returns
-------
axes : matplotlib.axes.Axes or np.ndarray of them
"""
ax = plt.gca() if ax is None else ax
all_features = self.as_dict()
extractor = self.extractor_of(feature)
value = self[feature]
try:
ax = extractor.plot(
feature=feature,
value=value,
ax=ax,
plot_kws=plot_kws,
features=all_features,
**self.timeserie,
)
except NotImplementedError:
ax.remove()
raise
return ax
def as_arrays(self):
"""Convert the feature values into two 1D numpy arrays.
The first one contains all the names of the features (with suffixes
if the array was flattened). And the second one the values.
Internally this method uses the ``flatten_feature()`` method.
"""
all_features, flatten_features = self.as_dict(), {}
for fname, fvalue in self.values.items():
extractor = self.extractors[fname]
flatten_value = extractor.flatten(
feature=fname,
value=fvalue,
features=all_features,
**self.timeserie,
)
flatten_features.update(flatten_value)
features = np.empty(len(flatten_features), dtype=object)
values = np.empty(len(flatten_features))
for idx, fv in enumerate(flatten_features.items()):
features[idx], values[idx] = fv
return features, values
def as_dict(self):
"""Return a copy of values"""
return dict(self.values)
def as_dataframe(self):
"""Convert the entire features into a ``pandas.DataFrame``.
The multimensional features are first *flattened* with the
``flatten_feature()`` method.
"""
features, values = self.as_arrays()
return pd.DataFrame([values], columns=features)
# =============================================================================
# FEATURE EXTRACTORS
# =============================================================================
class FeatureSpace:
"""Wrapper class, to allow user select the
features based on the available time series vectors (magnitude, time,
error, second magnitude, etc.) or specify a list of features.
The finally selected features for the execution plan are are those that
satisfy all the filters.
Parameters
----------
data : array-like, optional, default ``None``
available time series vectors, which will
output all the features that need this data to be calculated.
only : array-like, optional, default ``None``
List of features, which will output
all the features in the list.
exclude : array-like, optional, default ``None``
List of features, which will not output
kwargs
Extra configuration for the feature extractors.
format is ``Feature_name={param1: value, param2: value, ...}``
Examples
--------
**List of features as an input:**
.. code-block:: pycon
>>> fs = feets.FeatureSpace(only=['Std'])
>>> features = fs.extract(*lc)
>>> features.as_dict()
{"Std": .42}
**Available data as an input:**
.. code-block:: pycon
>>> fs = feets.FeatureSpace(data=['magnitude','time'])
>>> features = fs.extract(**lc)
>>> features.as_dict()
{...}
**List of features and available data as an input:**
.. code-block:: pycon
>>> fs = feets.FeatureSpace(
... only=['Mean','Beyond1Std', 'CAR_sigma','Color'],
... data=['magnitude', 'error'])
>>> features = fs.extract(**lc)
>>> features.as_dict()
{"Beyond1Std": ..., "Mean": ...}
**Excluding list as an input**
.. code-block:: pycon
>>> fs = feets.FeatureSpace(
... only=['Mean','Beyond1Std','CAR_sigma','Color'],
... data=['magnitude', 'error'],
... exclude=["Beyond1Std"])
>>> features = fs.extract(**lc)
>>> features.as_dict()
{"Mean": 23}
"""
def __init__(self, data=None, only=None, exclude=None, **kwargs):
# retrieve all the extractors
exts = extractors.registered_extractors()
# store all the parameters for the extractors
self._kwargs = kwargs
# get all posible features by data
if data:
fbdata = []
for fname, f in exts.items():
if not f.get_required_data().difference(data):
fbdata.append(fname)
else:
fbdata = exts.keys()
self._data = frozenset(data or extractors.DATAS)
self._features_by_data = frozenset(fbdata)
# validate the list of features or select all of them
if only:
for f in only:
if f not in exts:
raise FeatureNotFound(f)
self._only = frozenset(only or exts.keys())
# select the features to exclude or not exclude anything
if exclude:
for f in exclude:
if f not in exts:
raise FeatureNotFound(f)
self._exclude = frozenset(exclude or ())
# the candidate to be the features to be extracted
candidates = self._features_by_data.intersection(
self._only
).difference(self._exclude)
# remove by dependencies
if only or exclude:
final = set()
for f in candidates:
fcls = exts[f]
dependencies = fcls.get_dependencies()
if dependencies.issubset(candidates):
final.add(f)
else:
final = candidates
# the final features
self._features = frozenset(final)
# create a ndarray for all the results
self._features_as_array = np.array(sorted(self._features))
# initialize the extractors and determine the required data only
features_extractors, features_extractors_names = set(), set()
required_data = set()
for fcls in set(exts.values()):
if fcls.get_features().intersection(self._features):
params = self._kwargs.get(fcls.__name__, {})
fext = fcls(**params)
features_extractors.add(fext)
features_extractors_names.add(fext.name)
required_data.update(fext.get_required_data())
if not features_extractors:
raise FeatureSpaceError("No feature extractor was selected")
self._features_extractors = frozenset(features_extractors)
self._features_extractors_names = frozenset(features_extractors_names)
self._required_data = frozenset(required_data)
# excecution order by dependencies
self._execution_plan = extractors.sort_by_dependencies(
features_extractors
)
not_found = set(self._kwargs).difference(
self._features_extractors_names
)
if not_found:
joined_not_found = ", ".join(not_found)
raise FeatureNotFound(
"This space not found feature(s) extractor(s) "
f"{joined_not_found} to assign the given parameter(s)"
)
def __repr__(self):
"""x.__repr__() <==> repr(x)"""
return str(self)
def __str__(self):
"""x.__str__() <==> str(x)"""
if not hasattr(self, "__str"):
extractors = [str(extractor) for extractor in self._execution_plan]
space = ", ".join(extractors)
self.__str = "<FeatureSpace: {}>".format(space)
return self.__str
def preprocess_timeserie(self, d):
"""Validate if the required values of the time-serie exist with
non ``None`` values in the dict ``d``. Finally returns a
new dictionary whose non-null values have been converted to
``np.ndarray``
"""
array_data = {}
for k, v in d.items():
if k in self._required_data and v is None:
raise DataRequiredError(k)
array_data[k] = v if v is None else np.asarray(v)
return array_data
def extract(
self,
time=None,
magnitude=None,
error=None,
magnitude2=None,
aligned_time=None,
aligned_magnitude=None,
aligned_magnitude2=None,
aligned_error=None,
aligned_error2=None,
):
"""Extract the features from a given time-series.
This method must be provided with the required timeseries data
specified in the attribute ``required_data_``.
Parameters
----------
time : iterable, optional
magnitude : iterable, optional
error : iterable, optional
magnitude2 : iterable, optional
aligned_time : iterable, optional
aligned_magnitude : iterable, optional
aligned_magnitude2 : iterable, optional
aligned_error : iterable, optional
aligned_error2 : iterable, optional
Returns
-------
feets.core.FeatureSet
Container of a calculated features.
"""
timeserie = self.preprocess_timeserie(
{
DATA_TIME: time,
DATA_MAGNITUDE: magnitude,
DATA_ERROR: error,
DATA_MAGNITUDE2: magnitude2,
DATA_ALIGNED_TIME: aligned_time,
DATA_ALIGNED_MAGNITUDE: aligned_magnitude,
DATA_ALIGNED_MAGNITUDE2: aligned_magnitude2,
DATA_ALIGNED_ERROR: aligned_error,
DATA_ALIGNED_ERROR2: aligned_error2,
}
)
features, extractors = {}, {}
for fextractor in self._execution_plan:
result = fextractor.extract(features=features, **timeserie)
for fname, fvalue in result.items():
features[fname] = fvalue
extractors[fname] = copy.deepcopy(fextractor)
# remove all the not needed features and extractors
flt_features, flt_extractors = {}, {}
for fname in self._features_as_array:
flt_features[fname] = features[fname]
flt_extractors[fname] = extractors[fname]
rs = FeatureSet(
features_names=self._features_as_array,
values=flt_features,
extractors=flt_extractors,
timeserie=timeserie,
)
return rs
@property
def extractors_conf(self):
return copy.deepcopy(self._kwargs)
@property
def data(self):
return self._data
@property
def only(self):
return self._only
@property
def exclude(self):
return self._exclude
@property
def features_by_data_(self):
return self._features_by_data
@property
def features_(self):
return self._features
@property
def features_extractors_(self):
return self._features_extractors
@property
def features_as_array_(self):
return self._features_as_array
@property
def excecution_plan_(self):
return self._execution_plan
@property
def required_data_(self):
return self._required_data
|
{
"content_hash": "b56333ca39535e9d591d4e5937e53783",
"timestamp": "",
"source": "github",
"line_count": 524,
"max_line_length": 79,
"avg_line_length": 29.654580152671755,
"alnum_prop": 0.5483621854688204,
"repo_name": "carpyncho/feets",
"id": "e2255c094c35367cd7149427b427f1b24869bbb4",
"size": "16867",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "feets/core.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "108441"
},
{
"name": "Python",
"bytes": "336059"
},
{
"name": "Shell",
"bytes": "2414"
},
{
"name": "TeX",
"bytes": "59406"
}
],
"symlink_target": ""
}
|
"""Tests for carfac.python.sai."""
import os
import unittest
import numpy as np
import sai as pysai
_TEST_DATA_DIR = "../test_data"
def LoadMatrix(filename, rows, columns):
"""Reads a matrix with shape (rows, columns) matrix from a text file."""
matrix = np.loadtxt(os.path.join(_TEST_DATA_DIR, filename))
assert matrix.shape == (rows, columns)
return matrix
def WriteMatrix(filename, matrix):
np.savetxt(os.path.join(_TEST_DATA_DIR, filename), matrix, fmt="%0.12f")
def CreatePulseTrain(num_channels, num_samples, period, leading_zeros=0):
segment = np.zeros((num_channels, num_samples))
for i in range(num_channels):
# Begin each channel at a different phase.
phase = (i + leading_zeros) % period
for j in range(phase, num_samples, period):
segment[i, j] = 1
return segment
def CreateSAIParams(sai_width, num_triggers_per_frame=2, **kwargs):
"""Fills an SAIParams object using reasonable defaults for some fields."""
return pysai.SAIParams(sai_width=sai_width,
# Half of the SAI should come from the future.
future_lags=sai_width // 2,
num_triggers_per_frame=num_triggers_per_frame,
**kwargs)
def HasPeakAt(frame, index):
if index == 0:
return frame[index] > frame[index + 1]
elif index == len(frame) - 1:
return frame[index] > frame[index - 1]
return frame[index] > frame[index + 1] and frame[index] > frame[index - 1]
class PeriodicInputTest(unittest.TestCase):
def _RunMultiChannelPulseTrainTest(self, period, num_channels):
input_segment_width = 38
segment = CreatePulseTrain(num_channels, input_segment_width, period)
sai_params = CreateSAIParams(num_channels=num_channels,
input_segment_width=input_segment_width,
trigger_window_width=input_segment_width,
sai_width=15)
sai_params.future_lags = 0 # Only compute past lags.
sai = pysai.SAI(sai_params)
sai_frame = sai.RunSegment(segment)
# The output should have peaks at the same positions, regardless of
# input phase.
for i in range(num_channels):
sai_channel = sai_frame[i, :]
for j in range(len(sai_channel) - 1, 0, -period):
print(i, j, sai_channel, HasPeakAt(sai_channel, j))
self.assertTrue(HasPeakAt(sai_channel, j))
print("Input\n{}\nOutput\n{}".format(segment, sai_frame))
def testMultiChannelPulseTrain(self):
for period in [25, 10, 5, 2]:
for num_channels in [1, 2, 15]:
print("Testing period={}, num_channels={}".format(period, num_channels))
self._RunMultiChannelPulseTrainTest(period, num_channels)
class SAITest(unittest.TestCase):
def testInputSegmentWidthIsLargerThanBuffer(self):
params = CreateSAIParams(num_channels=2, sai_width=10,
input_segment_width=200,
trigger_window_width=200)
sai = pysai.SAI(params)
params.trigger_window_width = params.input_segment_width // 10
self.assertGreater(params.input_segment_width,
params.num_triggers_per_frame *
params.trigger_window_width)
self.assertRaises(AssertionError, sai.Redesign, params)
def testInputWidthDoesntMatchInputSegmentWidth(self):
num_channels = 2
input_segment_width = 10
segment = CreatePulseTrain(num_channels, input_segment_width, period=4)
sai_width = 20
expected_input_segment_width = input_segment_width - 1
sai_params = CreateSAIParams(
num_channels=num_channels,
sai_width=sai_width,
input_segment_width=expected_input_segment_width,
trigger_window_width=sai_width + 1)
self.assertNotEqual(sai_params.input_segment_width, input_segment_width)
sai = pysai.SAI(sai_params)
self.assertRaises(AssertionError, sai.RunSegment, segment)
def testInputSegmentWidthSmallerThanTriggerWindow(self):
"""Tests small hop between segments."""
num_channels = 1
total_input_samples = 20
period = 5
full_input = CreatePulseTrain(num_channels, total_input_samples, period)
num_frames = 4
input_segment_width = total_input_samples // num_frames
sai_params = CreateSAIParams(num_channels=num_channels,
input_segment_width=input_segment_width,
trigger_window_width=total_input_samples,
sai_width=15)
self.assertLess(sai_params.input_segment_width,
sai_params.trigger_window_width)
sai_params.future_lags = 0 # Only compute past lags.
self.assertGreaterEqual(period, input_segment_width)
sai = pysai.SAI(sai_params)
for i in range(num_frames):
segment = (
full_input[:, i * input_segment_width:(i+1) * input_segment_width])
sai_frame = sai.RunSegment(segment)
print("Frame {}\nInput\n{}\nOutput\n{}".format(i, segment, sai_frame))
self.assertNotEqual(np.abs(segment).sum(), 0)
# Since the input segment is never all zero, there should always
# be a peak at zero lag.
sai_channel = sai_frame[0, :]
self.assertTrue(HasPeakAt(sai_channel, len(sai_channel) - 1))
if i == 0:
# Since the pulse train period is larger than the input segment
# size, the first input segment will only see a single impulse,
# most of the SAI will be zero.
np.testing.assert_allclose(sai_channel[:len(sai_channel) - 1],
np.zeros(len(sai_channel) - 1), 1e-9)
if i == num_frames - 1:
# By the last frame, the SAI's internal buffer will have
# accumulated the full input signal, so the resulting image
# should contain kPeriod peaks.
for j in range(len(sai_channel) - 1, 0, -period):
self.assertTrue(HasPeakAt(sai_channel, j))
def testMatchesMatlabOnBinauralData(self):
test_name = "binaural_test"
input_segment_width = 882
num_channels = 71
# The Matlab CARFAC output is transposed compared to the C++.
input_segment = LoadMatrix(test_name + "-matlab-nap1.txt",
input_segment_width,
num_channels).transpose()
sai_params = CreateSAIParams(num_channels=num_channels,
input_segment_width=input_segment_width,
trigger_window_width=input_segment_width,
sai_width=500)
sai = pysai.SAI(sai_params)
sai_frame = sai.RunSegment(input_segment)
expected_sai_frame = LoadMatrix(test_name + "-matlab-sai1.txt",
num_channels, sai_params.sai_width)
np.testing.assert_allclose(expected_sai_frame, sai_frame, rtol=1e-5)
WriteMatrix(test_name + "-py-sai1.txt", sai_frame)
if __name__ == "__main__":
unittest.main()
|
{
"content_hash": "6003dacc6672ba1c4d9c9dfa4eace23a",
"timestamp": "",
"source": "github",
"line_count": 181,
"max_line_length": 80,
"avg_line_length": 38.51933701657459,
"alnum_prop": 0.6303786574870912,
"repo_name": "google/carfac",
"id": "7e62c41c45ca16f92faee1f0678bd12f8443722b",
"size": "7735",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/sai_test.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "7134"
},
{
"name": "C++",
"bytes": "176288"
},
{
"name": "HTML",
"bytes": "14208"
},
{
"name": "Jupyter Notebook",
"bytes": "2818350"
},
{
"name": "MATLAB",
"bytes": "137907"
},
{
"name": "Python",
"bytes": "211445"
}
],
"symlink_target": ""
}
|
import pyaf.Bench.TS_datasets as tsds
import tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 1024 , FREQ = 'D', seed = 0, trendtype = "ConstantTrend", cycle_length = 7, transform = "Difference", sigma = 0.0, exog_count = 20, ar_order = 0);
|
{
"content_hash": "87dea85b011649ad7827fc515fd22c64",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 170,
"avg_line_length": 38.57142857142857,
"alnum_prop": 0.7111111111111111,
"repo_name": "antoinecarme/pyaf",
"id": "1f28d498d26b5e5a20fa219a58cd99a2dc098885",
"size": "270",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/artificial/transf_Difference/trend_ConstantTrend/cycle_7/ar_/test_artificial_1024_Difference_ConstantTrend_7__20.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "6773299"
},
{
"name": "Procfile",
"bytes": "24"
},
{
"name": "Python",
"bytes": "54209093"
},
{
"name": "R",
"bytes": "807"
},
{
"name": "Shell",
"bytes": "3619"
}
],
"symlink_target": ""
}
|
""" A collection of utility methods
:Authors: Sana dev team
:Version: 1.1
"""
import os, sys, traceback
import time
import logging
import cjson
from django.conf import settings
LOGGING_ENABLED = 'LOGGING_ENABLE'
LOGGING_START = 'LOGGING_START_TIME'
def trace(f):
"""Decorator to add traces to a method.
"""
def new_f(*args, **kwargs):
extra = {'mac':'', 'type':''}
logging.info("TRACE %s ENTER" % f.func_name,extra=extra)
result = f(*args, **kwargs)
logging.info("TRACE %s EXIT" % f.func_name,extra=extra)
return result
new_f.func_name = f.func_name
return new_f
def log_traceback(logging):
"""Prints the traceback for the most recently caught exception to the log
and returns a nicely formatted message.
"""
et, val, tb = sys.exc_info()
trace = traceback.format_tb(tb)
stack = traceback.extract_tb(tb)
for item in stack:
logging.error(traceback.format_tb(item))
mod = stack[0]
return "Exception : %s %s %s" % (et, val, trace[0])
def flush(flushable):
""" Removes data stored for a model instance cached in this servers data
stores
flushable => a instance of a class which provides a flush method
"""
flush_setting = 'FLUSH_'+flushable.__class__.__name__.upper()
if getattr(settings, flush_setting):
flushable.flush()
def mark(module, line,*args):
""" in code tracing util for debugging """
print('Mark %s.%s: %s' % (module, line, args))
|
{
"content_hash": "ffbc5814005c9a93106ead237d725428",
"timestamp": "",
"source": "github",
"line_count": 62,
"max_line_length": 78,
"avg_line_length": 24.483870967741936,
"alnum_prop": 0.6284584980237155,
"repo_name": "rryan/sana.mds",
"id": "0c22cf23c0c0468ce79717df2071e184aaf07b75",
"size": "1518",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "src/mds/api/v1/util.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "10323"
},
{
"name": "Python",
"bytes": "312922"
}
],
"symlink_target": ""
}
|
import re
import os
import json
from langconv import *
#输入Unicode编辑,输出Unicode
def big2simple(line):
#转换繁体到简体
line = Converter('zh-hans').convert(line)
return line
urm_name_map = None
def load_url(path):
ret = {}
dir = "./Uploads/Article/crawl_images"
for line in open(path):
line = line.strip()
if not line:
continue
url,name = line.split("\t")
url = big2simple(url.decode('utf-8'))
name = os.path.basename(name)
path = "%s/%s" % (dir,name)
ret[url] = path
#print url,path
return ret
def replace_img_url(content):
for url in urm_name_map:
#print url,urm_name_map[url]
content = content.replace(url,urm_name_map[url])
return content
def save_new_json(str):
out_file = open("fix_img.txt",'a')
out_file.write(str)
out_file.write("\n")
out_file.close()
def read_data(line):
ob = json.loads(line)
#id,keyword,title,content,category
print ob['id'],ob['category']
content = ob['content']
ob['content'] = replace_img_url(content)
#print ob['content']
save_new_json(json.dumps(ob))
if __name__=="__main__":
urm_name_map = load_url("img_record.txt")
file_path = "ret_summary.txt"
for line in open(file_path):
line = line.strip()
if not line and len(line)==0:
continue
read_data(line)
|
{
"content_hash": "e4a818e7e2f1785135a7fc811eef6251",
"timestamp": "",
"source": "github",
"line_count": 57,
"max_line_length": 50,
"avg_line_length": 21.771929824561404,
"alnum_prop": 0.6655922643029815,
"repo_name": "willdonetang/taisi360",
"id": "bc8493a0095192c0e06d61f80ee1e97090cf2848",
"size": "1304",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tools/parse/fix_img_url.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ApacheConf",
"bytes": "550"
},
{
"name": "CSS",
"bytes": "239830"
},
{
"name": "HTML",
"bytes": "346214"
},
{
"name": "JavaScript",
"bytes": "374095"
},
{
"name": "PHP",
"bytes": "1767497"
},
{
"name": "Smarty",
"bytes": "12169"
}
],
"symlink_target": ""
}
|
from __future__ import print_function
from future.utils import tobytes
import utils
import proto
def print_verbose(security_ctx, data):
if (security_ctx.verbose):
print("++++ " + data + " ++++")
def custom_config_request(security_ctx, info, version):
# Form protobuf request packet from custom-config data
cmd = proto.custom_config_pb2.CustomConfigRequest()
cmd.info = tobytes(info)
cmd.version = version
enc_cmd = security_ctx.encrypt_data(cmd.SerializeToString()).decode('latin-1')
print_verbose(security_ctx, "Client -> Device (CustomConfig cmd) " + utils.str_to_hexstr(enc_cmd))
return enc_cmd
def custom_config_response(security_ctx, response_data):
# Interpret protobuf response packet
decrypt = security_ctx.decrypt_data(tobytes(response_data))
cmd_resp = proto.custom_config_pb2.CustomConfigResponse()
cmd_resp.ParseFromString(decrypt)
print_verbose(security_ctx, "CustomConfig status " + str(cmd_resp.status))
return cmd_resp.status
|
{
"content_hash": "e5d5c0eb941f745c54d88c8178cf54c2",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 102,
"avg_line_length": 38.88461538461539,
"alnum_prop": 0.7171117705242335,
"repo_name": "www220/esp-idf",
"id": "d5d291326e0dee45cbb29e4d0af3913644c0a136",
"size": "1708",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tools/esp_prov/prov/custom_prov.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "142861"
},
{
"name": "C",
"bytes": "24407396"
},
{
"name": "C++",
"bytes": "1427370"
},
{
"name": "CMake",
"bytes": "139521"
},
{
"name": "Inno Setup",
"bytes": "10241"
},
{
"name": "Lex",
"bytes": "7270"
},
{
"name": "Makefile",
"bytes": "132288"
},
{
"name": "Objective-C",
"bytes": "44648"
},
{
"name": "Perl",
"bytes": "15204"
},
{
"name": "Python",
"bytes": "748019"
},
{
"name": "Shell",
"bytes": "67825"
},
{
"name": "Yacc",
"bytes": "15875"
}
],
"symlink_target": ""
}
|
import ctypes
testCount = int(raw_input())
for testId in range(testCount):
value = int(raw_input())
flipped = ~value & 0xFFFFFFFF
print flipped
|
{
"content_hash": "2e9a029be4f63a154e714b8d7433f401",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 33,
"avg_line_length": 22.428571428571427,
"alnum_prop": 0.6815286624203821,
"repo_name": "lilsweetcaligula/Online-Judges",
"id": "a8adccd758871d695716eca3d968df98b8389093",
"size": "172",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "hackerrank/algorithms/bit_manipulation/easy/flipping_bits/py/solution.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "47855"
},
{
"name": "C++",
"bytes": "27383"
},
{
"name": "Python",
"bytes": "152704"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
('contact', '0003_delete_contact'),
]
operations = [
migrations.CreateModel(
name='Contact',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('email', models.EmailField(max_length=254)),
('subject', models.CharField(max_length=500)),
('message', models.TextField()),
],
),
]
|
{
"content_hash": "178cc74a16ff6d9a722a349345fc1088",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 114,
"avg_line_length": 26.541666666666668,
"alnum_prop": 0.5572998430141287,
"repo_name": "Busaka/esl",
"id": "fdb41388dbd51873ad76a66ac8e4194e4ce9adde",
"size": "707",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/contact/migrations/0004_contact.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "84526"
},
{
"name": "HTML",
"bytes": "112006"
},
{
"name": "JavaScript",
"bytes": "140631"
},
{
"name": "Python",
"bytes": "58304"
}
],
"symlink_target": ""
}
|
import unittest
from cssqc.parser import CSSQC
from cssqc.qualityWarning import QualityWarning
class Test_bangFormat(unittest.TestCase):
def parseBefore(self, data):
c = CSSQC({"bangFormat": 'before'})
c.parse(data)
return c
def parseAfter(self, data):
c = CSSQC({"bangFormat": 'after'})
c.parse(data)
return c
def parseBoth(self, data):
c = CSSQC({"bangFormat": 'both'})
c.parse(data)
return c
def parseNone(self, data):
c = CSSQC({"bangFormat": 'none'})
c.parse(data)
return c
def test_bang_format(self):
sample = '''div {
width: 100% !important;
}
.class1 {
height: 100% ! important;
}
.class2 {
width: 50%!important;
}
.class3 {
margin: 0! important;
}'''
c_before = self.parseBefore(sample)
c_after = self.parseAfter(sample)
c_both = self.parseBoth(sample)
c_none = self.parseNone(sample)
self.assertEqual(c_before.warnings, [
QualityWarning('bangFormat', 5),
QualityWarning('bangFormat', 8),
QualityWarning('bangFormat', 11)
])
self.assertEqual(c_after.warnings, [
QualityWarning('bangFormat', 2),
QualityWarning('bangFormat', 5),
QualityWarning('bangFormat', 8),
])
self.assertEqual(c_both.warnings, [
QualityWarning('bangFormat', 2),
QualityWarning('bangFormat', 8),
QualityWarning('bangFormat', 11),
])
self.assertEqual(c_none.warnings, [
QualityWarning('bangFormat', 2),
QualityWarning('bangFormat', 5),
QualityWarning('bangFormat', 11),
])
|
{
"content_hash": "a064b404036da57923604e49ee556282",
"timestamp": "",
"source": "github",
"line_count": 62,
"max_line_length": 47,
"avg_line_length": 28.193548387096776,
"alnum_prop": 0.5646453089244852,
"repo_name": "matematik7/CSSQC",
"id": "fc953e7bf272305550cf8dd5ebf16a6657d3d705",
"size": "2133",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_bangFormat.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "111603"
},
{
"name": "Python",
"bytes": "199164"
}
],
"symlink_target": ""
}
|
import time
import hashlib
import hmac
try:
import simplejson as json
except ImportError:
import json # type: ignore # noqa: F401
from libcloud.utils.py3 import httplib
from libcloud.utils.py3 import urlencode
from libcloud.common.base import ConnectionUserAndKey, JsonResponse
from libcloud.common.types import InvalidCredsError, LibcloudError
from libcloud.storage.base import Container, StorageDriver
class NimbusResponse(JsonResponse):
valid_response_codes = [
httplib.OK,
httplib.NOT_FOUND,
httplib.CONFLICT,
httplib.BAD_REQUEST,
]
def success(self):
return self.status in self.valid_response_codes
def parse_error(self):
if self.status in [httplib.UNAUTHORIZED]:
raise InvalidCredsError(self.body)
raise LibcloudError(
"Unknown error. Status code: %d" % (self.status),
driver=self.connection.driver,
)
class NimbusConnection(ConnectionUserAndKey):
host = "nimbus.io"
responseCls = NimbusResponse
def __init__(self, *args, **kwargs):
self.id = kwargs.pop("id")
super(NimbusConnection, self).__init__(*args, **kwargs)
def pre_connect_hook(self, params, headers):
timestamp = str(int(time.time()))
signature = self._calculate_signature(
user_id=self.user_id,
method=self.method,
params=params,
path=self.action,
timestamp=timestamp,
key=self.key,
)
headers["X-NIMBUS-IO-Timestamp"] = timestamp
headers["Authorization"] = "NIMBUS.IO %s:%s" % (self.id, signature)
return params, headers
def _calculate_signature(self, user_id, method, params, path, timestamp, key):
if params:
uri_path = path + "?" + urlencode(params)
else:
uri_path = path
string_to_sign = [user_id, method, str(timestamp), uri_path]
string_to_sign = "\n".join(string_to_sign)
hmac_value = hmac.new(key, string_to_sign, hashlib.sha256)
return hmac_value.hexdigest()
class NimbusStorageDriver(StorageDriver):
name = "Nimbus.io"
website = "https://nimbus.io/"
connectionCls = NimbusConnection
def __init__(self, *args, **kwargs):
self.user_id = kwargs["user_id"]
super(NimbusStorageDriver, self).__init__(*args, **kwargs)
def iterate_containers(self):
response = self.connection.request("/customers/%s/collections" % (self.user_id))
return self._to_containers(response.object)
def create_container(self, container_name):
params = {"action": "create", "name": container_name}
response = self.connection.request(
"/customers/%s/collections" % (self.user_id), params=params, method="POST"
)
return self._to_container(response.object)
def _to_containers(self, data):
for item in data:
yield self._to_container(item)
def _to_container(self, data):
name = data[0]
extra = {"date_created": data[2]}
return Container(name=name, extra=extra, driver=self)
def _ex_connection_class_kwargs(self):
result = {"id": self.user_id}
return result
|
{
"content_hash": "8a94ddd4a7ac13593f2535289fc4f95d",
"timestamp": "",
"source": "github",
"line_count": 104,
"max_line_length": 88,
"avg_line_length": 31.192307692307693,
"alnum_prop": 0.6279284833538841,
"repo_name": "mistio/libcloud",
"id": "825574ddad0565bb5527463e1d754723a806bee2",
"size": "4026",
"binary": false,
"copies": "1",
"ref": "refs/heads/trunk",
"path": "libcloud/storage/drivers/nimbus.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1819"
},
{
"name": "HTML",
"bytes": "2545"
},
{
"name": "PowerShell",
"bytes": "410"
},
{
"name": "Python",
"bytes": "9067225"
},
{
"name": "Shell",
"bytes": "12994"
}
],
"symlink_target": ""
}
|
on_failure = (False, True, False, False, False)
|
{
"content_hash": "1946c37e4fe2bb025f05df85120d210f",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 47,
"avg_line_length": 47,
"alnum_prop": 0.7021276595744681,
"repo_name": "StanczakDominik/PythonPIC",
"id": "68dfeea2a76e41f77de05c6dfd636719845cd105",
"size": "135",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "pythonpic/tests/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "64"
},
{
"name": "Python",
"bytes": "226256"
}
],
"symlink_target": ""
}
|
import re
import cffi
import subprocess
import logging
l = logging.getLogger('cffier')
l.setLevel(logging.DEBUG)
def find_good_bsearch(known_good, questionable):
ffi = cffi.FFI()
l.debug("bsearch - trying %d good and %d questionable", len(known_good), len(questionable))
try:
ffi.cdef('\n'.join(known_good + questionable))
except (cffi.CDefError, AssertionError):
return find_good_bsearch(known_good, questionable[:len(questionable)/2])
return questionable
def find_good_scan(known_good, questionable):
ffi = cffi.FFI()
l.debug("scan - trying %d good and %d questionable", len(known_good), len(questionable))
#print "GOOD:"
#print ' ...'
#print ' ' + '\n '.join(known_good[-5:])
#print "UNKNOWN:"
#print ' ' + '\n '.join(questionable[:5])
#print ' ...'
#print ' ' + '\n '.join(questionable[-5:])
try:
ffi.cdef('\n'.join(known_good + questionable))
return questionable
except AssertionError:
return [ ]
except cffi.CDefError as e:
if str(e).count(':') >= 2:
fail = int(str(e).split('\n')[1].split(':')[1])
elif 'unrecognized construct' in str(e):
fail = int(str(e).split()[1][:-1])
elif 'end of input' in str(e):
return find_good_scan(known_good, questionable[:-1])
else:
raise Exception("Unknown error")
except cffi.FFIError as e:
if str(e).count(':') >= 2:
fail = int(str(e).split('\n')[0].split(':')[1])
else:
raise Exception("Unknown error")
return find_good_scan(known_good, questionable[:fail-2-len(known_good)])
def doit(vex_path):
#vex_pp,_ = subprocess.Popen(['cpp', 'vex/pub/libvex.h'], stdout=subprocess.PIPE, stderr=subprocess.STDOUT).communicate()
header,_ = subprocess.Popen(['cpp', '-I'+vex_path, 'pyvex_c/pyvex.h'], stdout=subprocess.PIPE, stderr=subprocess.STDOUT).communicate()
#header = vex_pp + pyvex_pp
linesep = '\r\n' if '\r\n' in header else '\n'
ffi_text = linesep.join(line for line in header.split(linesep) if '#' not in line and line.strip() != '' and 'jmp_buf' not in line)
ffi_text = re.sub('\{\s*\} NoOp;', '{ int DONOTUSE; } NoOp;', ffi_text)
ffi_text = re.sub('__attribute__\s*\(.*\)', '', ffi_text)
ffi_text = ffi_text.replace('__const', 'const')
ffi_lines = ffi_text.split(linesep)
good = find_good_scan([], ffi_lines)
remaining = ffi_lines[len(good)+1:]
while len(remaining) > 1:
l.debug("%d uncertain lines remaining", len(remaining))
new_good = find_good_scan(good, remaining[1:])
good += new_good
remaining = remaining[len(new_good)+1:]
good += [ 'extern VexControl vex_control;' ]
open('pyvex/vex_ffi.py', 'w').write('ffi_str = """' + '\n'.join(good) + '"""')
if __name__ == '__main__':
import sys
doit(sys.argv[1])
|
{
"content_hash": "f6e7334537ade29f02f9fe09fc43e39b",
"timestamp": "",
"source": "github",
"line_count": 81,
"max_line_length": 138,
"avg_line_length": 35.925925925925924,
"alnum_prop": 0.5910652920962199,
"repo_name": "chubbymaggie/pyvex",
"id": "ad69221d26edf2adfe160335394c1430110a8f5e",
"size": "2910",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "make_ffi.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "C",
"bytes": "18141"
},
{
"name": "Makefile",
"bytes": "457"
},
{
"name": "Python",
"bytes": "64167"
}
],
"symlink_target": ""
}
|
"""Unit tests for core.domain.takeout_service."""
from __future__ import annotations
import datetime
import json
import logging
from core import feconf
from core import utils
from core.constants import constants
from core.domain import exp_domain
from core.domain import exp_services
from core.domain import feedback_services
from core.domain import rights_domain
from core.domain import stats_domain
from core.domain import takeout_domain
from core.domain import takeout_service
from core.domain import topic_domain
from core.platform import models
from core.tests import test_utils
from typing import Any, Dict, Final, List, Optional, Union
MYPY = False
if MYPY: # pragma: no cover
from mypy_imports import app_feedback_report_models
from mypy_imports import auth_models
from mypy_imports import base_models
from mypy_imports import blog_models
from mypy_imports import collection_models
from mypy_imports import config_models
from mypy_imports import exp_models as exploration_models
from mypy_imports import feedback_models
from mypy_imports import improvements_models
from mypy_imports import learner_group_models
from mypy_imports import question_models
from mypy_imports import skill_models
from mypy_imports import story_models
from mypy_imports import subtopic_models
from mypy_imports import suggestion_models
from mypy_imports import topic_models
from mypy_imports import user_models
(
app_feedback_report_models,
auth_models,
base_models,
blog_models,
collection_models,
config_models,
exploration_models,
feedback_models,
improvements_models,
learner_group_models,
question_models,
skill_models,
story_models,
subtopic_models,
suggestion_models,
topic_models,
user_models
) = models.Registry.import_models([
models.Names.APP_FEEDBACK_REPORT,
models.Names.AUTH,
models.Names.BASE_MODEL,
models.Names.BLOG,
models.Names.COLLECTION,
models.Names.CONFIG,
models.Names.EXPLORATION,
models.Names.FEEDBACK,
models.Names.IMPROVEMENTS,
models.Names.LEARNER_GROUP,
models.Names.QUESTION,
models.Names.SKILL,
models.Names.STORY,
models.Names.SUBTOPIC,
models.Names.SUGGESTION,
models.Names.TOPIC,
models.Names.USER
])
class TakeoutServiceProfileUserUnitTests(test_utils.GenericTestBase):
"""Tests for the takeout service for profile user."""
USER_ID_1: Final = 'user_1'
PROFILE_ID_1: Final = 'profile_1'
USER_1_ROLE: Final = feconf.ROLE_ID_CURRICULUM_ADMIN
PROFILE_1_ROLE: Final = feconf.ROLE_ID_MOBILE_LEARNER
USER_1_EMAIL: Final = 'user1@example.com'
GENERIC_USERNAME: Final = 'user'
GENERIC_DATE: Final = datetime.datetime(2019, 5, 20)
GENERIC_EPOCH: Final = utils.get_time_in_millisecs(GENERIC_DATE)
GENERIC_IMAGE_URL: Final = 'www.example.com/example.png'
GENERIC_USER_BIO: Final = 'I am a user of Oppia!'
GENERIC_SUBJECT_INTERESTS: Final = ['Math', 'Science']
GENERIC_LANGUAGE_CODES: Final = ['en', 'es']
GENERIC_DISPLAY_ALIAS: Final = 'display_alias'
GENERIC_DISPLAY_ALIAS_2: Final = 'display_alias2'
EXPLORATION_IDS: Final = ['exp_1']
EXPLORATION_IDS_2: Final = ['exp_2']
COLLECTION_IDS: Final = ['23', '42', '4']
COLLECTION_IDS_2: Final = ['32', '44', '6']
STORY_IDS: Final = ['12', '22', '32']
STORY_IDS_2: Final = ['42', '52', '62']
TOPIC_IDS: Final = ['11', '21', '31']
TOPIC_IDS_2: Final = ['41', '51', '61']
SKILL_ID_1: Final = 'skill_id_1'
SKILL_ID_2: Final = 'skill_id_2'
SKILL_ID_3: Final = 'skill_id_3'
DEGREE_OF_MASTERY: Final = 0.5
DEGREE_OF_MASTERY_2: Final = 0.6
EXP_VERSION: Final = 1
STATE_NAME: Final = 'state_name'
STORY_ID_1: Final = 'story_id_1'
COMPLETED_NODE_IDS_1: Final = ['node_id_1', 'node_id_2']
def set_up_non_trivial(self) -> None:
"""Set up all models for use in testing.
1) Simulates skill mastery of user_1 and profile_1.
2) Simulates completion of some activities of user_1 and profile_1.
3) Simulates incomplete status of some activities.
4) Creates user LearnerGoalsModel.
5) Populates ExpUserLastPlaythroughModel of user.
6) Creates user LearnerPlaylsts.
7) Simulates collection progress of user.
8) Simulates story progress of user.
9) Creates new collection rights.
10) Simulates a general suggestion.
11) Creates new exploration rights.
12) Populates user settings.
"""
# Setup for UserSkillModel.
user_models.UserSkillMasteryModel(
id=user_models.UserSkillMasteryModel.construct_model_id(
self.USER_ID_1, self.SKILL_ID_3),
user_id=self.USER_ID_1,
skill_id=self.SKILL_ID_3,
degree_of_mastery=self.DEGREE_OF_MASTERY_2).put()
user_models.UserSkillMasteryModel(
id=user_models.UserSkillMasteryModel.construct_model_id(
self.PROFILE_ID_1, self.SKILL_ID_1),
user_id=self.PROFILE_ID_1,
skill_id=self.SKILL_ID_1,
degree_of_mastery=self.DEGREE_OF_MASTERY).put()
# Setup for CompletedActivitiesModel.
user_models.CompletedActivitiesModel(
id=self.USER_ID_1,
exploration_ids=self.EXPLORATION_IDS_2,
collection_ids=self.COLLECTION_IDS_2,
story_ids=self.STORY_IDS_2,
learnt_topic_ids=self.TOPIC_IDS_2).put()
user_models.CompletedActivitiesModel(
id=self.PROFILE_ID_1,
exploration_ids=self.EXPLORATION_IDS,
collection_ids=self.COLLECTION_IDS,
story_ids=self.STORY_IDS,
learnt_topic_ids=self.TOPIC_IDS).put()
# Setup for IncompleteACtivitiesModel.
user_models.IncompleteActivitiesModel(
id=self.PROFILE_ID_1,
exploration_ids=self.EXPLORATION_IDS,
collection_ids=self.COLLECTION_IDS,
story_ids=self.STORY_IDS_2,
partially_learnt_topic_ids=self.TOPIC_IDS).put()
# Setup for ExpUserLastPlaythroughModel.
user_models.ExpUserLastPlaythroughModel(
id='%s.%s' % (self.PROFILE_ID_1, self.EXPLORATION_IDS[0]),
user_id=self.PROFILE_ID_1, exploration_id=self.EXPLORATION_IDS[0],
last_played_exp_version=self.EXP_VERSION,
last_played_state_name=self.STATE_NAME).put()
# Setup for LearnerGoalsModel.
user_models.LearnerGoalsModel(
id=self.PROFILE_ID_1,
topic_ids_to_learn=self.TOPIC_IDS).put()
# Setup for LearnerPlaylistModel.
user_models.LearnerPlaylistModel(
id=self.PROFILE_ID_1,
exploration_ids=self.EXPLORATION_IDS,
collection_ids=self.COLLECTION_IDS).put()
# Setup for CollectionProgressModel.
user_models.CollectionProgressModel(
id='%s.%s' % (self.PROFILE_ID_1, self.COLLECTION_IDS[0]),
user_id=self.PROFILE_ID_1,
collection_id=self.COLLECTION_IDS[0],
completed_explorations=self.EXPLORATION_IDS).put()
# Setup for StoryProgressModel.
user_models.StoryProgressModel(
id='%s.%s' % (self.PROFILE_ID_1, self.STORY_ID_1),
user_id=self.PROFILE_ID_1,
story_id=self.STORY_ID_1,
completed_node_ids=self.COMPLETED_NODE_IDS_1).put()
# Setup for UserSettingsModel.
user_models.UserSettingsModel(
id=self.USER_ID_1,
email=self.USER_1_EMAIL,
roles=[self.USER_1_ROLE],
username=self.GENERIC_USERNAME,
normalized_username=self.GENERIC_USERNAME,
last_agreed_to_terms=self.GENERIC_DATE,
last_started_state_editor_tutorial=self.GENERIC_DATE,
last_started_state_translation_tutorial=self.GENERIC_DATE,
last_logged_in=self.GENERIC_DATE,
last_created_an_exploration=self.GENERIC_DATE,
last_edited_an_exploration=self.GENERIC_DATE,
profile_picture_data_url=self.GENERIC_IMAGE_URL,
default_dashboard='learner', creator_dashboard_display_pref='card',
user_bio=self.GENERIC_USER_BIO,
subject_interests=self.GENERIC_SUBJECT_INTERESTS,
first_contribution_msec=1,
preferred_language_codes=self.GENERIC_LANGUAGE_CODES,
preferred_site_language_code=self.GENERIC_LANGUAGE_CODES[0],
preferred_audio_language_code=self.GENERIC_LANGUAGE_CODES[0],
display_alias=self.GENERIC_DISPLAY_ALIAS
).put()
user_models.UserSettingsModel(
id=self.PROFILE_ID_1,
email=self.USER_1_EMAIL,
roles=[self.PROFILE_1_ROLE],
username=None,
normalized_username=None,
last_agreed_to_terms=self.GENERIC_DATE,
last_started_state_editor_tutorial=None,
last_started_state_translation_tutorial=None,
last_logged_in=self.GENERIC_DATE,
last_created_an_exploration=None,
last_edited_an_exploration=None,
profile_picture_data_url=None,
default_dashboard='learner', creator_dashboard_display_pref='card',
user_bio=self.GENERIC_USER_BIO,
subject_interests=self.GENERIC_SUBJECT_INTERESTS,
first_contribution_msec=None,
preferred_language_codes=self.GENERIC_LANGUAGE_CODES,
preferred_site_language_code=self.GENERIC_LANGUAGE_CODES[0],
preferred_audio_language_code=self.GENERIC_LANGUAGE_CODES[0],
display_alias=self.GENERIC_DISPLAY_ALIAS_2
).put()
def set_up_trivial(self) -> None:
"""Setup for trivial test of export_data functionality."""
user_models.UserSettingsModel(
id=self.USER_ID_1,
email=self.USER_1_EMAIL,
roles=[self.USER_1_ROLE]
).put()
user_models.UserSettingsModel(
id=self.PROFILE_ID_1,
email=self.USER_1_EMAIL,
roles=[self.PROFILE_1_ROLE]
).put()
def test_export_data_for_profile_user_trivial_raises_error(self) -> None:
"""Trivial test of export_data functionality."""
self.set_up_trivial()
error_msg = 'Takeout for profile users is not yet supported.'
with self.assertRaisesRegex(NotImplementedError, error_msg):
takeout_service.export_data_for_user(self.PROFILE_ID_1)
def test_export_data_for_profile_user_nontrivial_raises_error(self) -> None:
"""Nontrivial test of export_data functionality."""
self.set_up_non_trivial()
error_msg = 'Takeout for profile users is not yet supported.'
with self.assertRaisesRegex(NotImplementedError, error_msg):
takeout_service.export_data_for_user(self.PROFILE_ID_1)
class TakeoutServiceFullUserUnitTests(test_utils.GenericTestBase):
"""Tests for the takeout service for full user."""
USER_ID_1: Final = 'user_1'
PROFILE_ID_1: Final = 'profile_1'
THREAD_ID_1: Final = 'thread_id_1'
THREAD_ID_2: Final = 'thread_id_2'
BLOG_POST_ID_1: Final = 'blog_post_id_1'
BLOG_POST_ID_2: Final = 'blog_post_id_2'
TOPIC_ID_1: Final = 'topic_id_1'
TOPIC_ID_2: Final = 'topic_id_2'
USER_1_ROLE: Final = feconf.ROLE_ID_CURRICULUM_ADMIN
PROFILE_1_ROLE: Final = feconf.ROLE_ID_MOBILE_LEARNER
USER_1_EMAIL: Final = 'user1@example.com'
GENERIC_USERNAME: Final = 'user'
GENERIC_PIN: Final = '12345'
GENERIC_DATE: Final = datetime.datetime(2019, 5, 20)
GENERIC_EPOCH: Final = utils.get_time_in_millisecs(GENERIC_DATE)
GENERIC_IMAGE_URL: Final = 'www.example.com/example.png'
GENERIC_USER_BIO: Final = 'I am a user of Oppia!'
GENERIC_SUBJECT_INTERESTS: Final = ['Math', 'Science']
GENERIC_LANGUAGE_CODES: Final = ['en', 'es']
GENERIC_DISPLAY_ALIAS: Final = 'display_alias'
GENERIC_DISPLAY_ALIAS_2: Final = 'display_alias2'
USER_1_IMPACT_SCORE: Final = 0.87
USER_1_TOTAL_PLAYS: Final = 33
USER_1_AVERAGE_RATINGS: Final = 4.37
USER_1_NUM_RATINGS: Final = 22
USER_1_WEEKLY_CREATOR_STATS_LIST: Final = [
{
('2019-05-21'): {
'average_ratings': 4.00,
'total_plays': 5
}
},
{
('2019-05-28'): {
'average_ratings': 4.95,
'total_plays': 10
}
}
]
EXPLORATION_IDS: Final = ['exp_1']
EXPLORATION_IDS_2: Final = ['exp_2']
STORY_IDS: Final = ['12', '22', '32']
STORY_IDS_2: Final = ['42', '52', '62']
TOPIC_IDS_2: Final = ['41', '51', '61']
CREATOR_IDS: Final = ['4', '8', '16']
CREATOR_USERNAMES: Final = ['username4', 'username8', 'username16']
COLLECTION_IDS: Final = ['23', '42', '4']
COLLECTION_IDS_2: Final = ['32', '44', '6']
TOPIC_IDS: Final = ['12', '13', '14']
GENERAL_FEEDBACK_THREAD_IDS: Final = ['42', '4', '8']
MESSAGE_IDS_READ_BY_USER: Final = [0, 1]
SKILL_ID_1: Final = 'skill_id_1'
SKILL_ID_2: Final = 'skill_id_2'
SKILL_ID_3: Final = 'skill_id_3'
DEGREE_OF_MASTERY: Final = 0.5
DEGREE_OF_MASTERY_2: Final = 0.6
EXP_VERSION: Final = 1
STATE_NAME: Final = 'state_name'
STORY_ID_1: Final = 'story_id_1'
STORY_ID_2: Final = 'story_id_2'
COMPLETED_NODE_IDS_1: Final = ['node_id_1', 'node_id_2']
COMPLETED_NODE_IDS_2: Final = ['node_id_3', 'node_id_4']
LEARNER_GROUP_ID: Final = 'learner_group_1'
THREAD_ENTITY_TYPE: Final = feconf.ENTITY_TYPE_EXPLORATION
THREAD_ENTITY_ID: Final = 'exp_id_2'
THREAD_STATUS: Final = 'open'
THREAD_SUBJECT: Final = 'dummy subject'
THREAD_HAS_SUGGESTION: Final = True
THREAD_SUMMARY: Final = 'This is a great summary.'
THREAD_MESSAGE_COUNT: Final = 0
MESSAGE_TEXT: Final = 'Export test text.'
MESSAGE_RECEIEVED_VIA_EMAIL: Final = False
CHANGE_CMD: Dict[str, str] = {}
SCORE_CATEGORY_1: Final = 'category_1'
SCORE_CATEGORY_2: Final = 'category_2'
SCORE_CATEGORY: str = (
suggestion_models.SCORE_TYPE_TRANSLATION +
suggestion_models.SCORE_CATEGORY_DELIMITER + 'English'
)
GENERIC_MODEL_ID: Final = 'model-id-1'
COMMIT_TYPE: Final = 'create'
COMMIT_MESSAGE: Final = 'This is a commit.'
COMMIT_CMDS: Final = [
{'cmd': 'some_command'},
{'cmd2': 'another_command'}
]
PLATFORM_ANDROID: Final = 'android'
# Timestamp in sec since epoch for Mar 7 2021 21:17:16 UTC.
REPORT_SUBMITTED_TIMESTAMP: Final = (
datetime.datetime.fromtimestamp(1615151836)
)
# Timestamp in sec since epoch for Mar 19 2021 17:10:36 UTC.
TICKET_CREATION_TIMESTAMP: Final = (
datetime.datetime.fromtimestamp(1616173836)
)
TICKET_ID: Final = '%s.%s.%s' % (
'random_hash', TICKET_CREATION_TIMESTAMP.second, '16CharString1234')
REPORT_TYPE_SUGGESTION: Final = 'suggestion'
CATEGORY_OTHER: Final = 'other'
PLATFORM_VERSION: Final = '0.1-alpha-abcdef1234'
DEVICE_COUNTRY_LOCALE_CODE_INDIA: Final = 'in'
ANDROID_DEVICE_MODEL: Final = 'Pixel 4a'
ANDROID_SDK_VERSION: Final = 28
ENTRY_POINT_NAVIGATION_DRAWER: Final = 'navigation_drawer'
TEXT_LANGUAGE_CODE_ENGLISH: Final = 'en'
AUDIO_LANGUAGE_CODE_ENGLISH: Final = 'en'
ANDROID_REPORT_INFO: Dict[
str, Union[str, List[str], int, bool, Dict[str, str]]
] = {
'user_feedback_other_text_input': 'add an admin',
'event_logs': ['event1', 'event2'],
'logcat_logs': ['logcat1', 'logcat2'],
'package_version_code': 1,
'language_locale_code': 'en',
'entry_point_info': {
'entry_point_name': 'crash',
},
'text_size': 'MEDIUM_TEXT_SIZE',
'only_allows_wifi_download_and_update': True,
'automatically_update_topics': False,
'is_curriculum_admin': False
}
ANDROID_REPORT_INFO_SCHEMA_VERSION: Final = 1
SUGGESTION_LANGUAGE_CODE: Final = 'en'
SUBMITTED_TRANSLATIONS_COUNT: Final = 2
SUBMITTED_TRANSLATION_WORD_COUNT: Final = 100
ACCEPTED_TRANSLATIONS_COUNT: Final = 1
ACCEPTED_TRANSLATIONS_WITHOUT_REVIEWER_EDITS_COUNT: Final = 0
ACCEPTED_TRANSLATION_WORD_COUNT: Final = 50
REJECTED_TRANSLATIONS_COUNT: Final = 0
REJECTED_TRANSLATION_WORD_COUNT: Final = 0
REVIEWED_TRANSLATIONS_COUNT: Final = 0
REVIEWED_TRANSLATION_WORD_COUNT: Final = 0
ACCEPTED_TRANSLATIONS_WITH_REVIEWER_EDITS_COUNT: Final = 0
SUBMITTED_QUESTION_COUNT: Final = 20
ACCEPTED_QUESTIONS_COUNT: Final = 2
ACCEPTED_QUESTIONS_WITHOUT_REVIEWER_EDITS_COUNT: Final = 0
REVIEWED_QUESTIONS_COUNT: Final = 2
ACCEPTED_QUESTIONS_WITH_REVIEWER_EDITS_COUNT: Final = 0
# Timestamp dates in sec since epoch for Mar 19 2021 UTC.
CONTRIBUTION_DATES: Final = [
datetime.date.fromtimestamp(1616173836),
datetime.date.fromtimestamp(1616173837)
]
FIRST_CONTRIBUTION_DATE: Final = datetime.datetime(2021, 5, 20)
LAST_CONTRIBUTION_DATE: Final = datetime.datetime(2022, 5, 20)
def set_up_non_trivial(self) -> None:
"""Set up all models for use in testing.
1) Simulates the creation of a user, user_1, and their stats model.
2) Simulates skill mastery of user_1 with two skills.
3) Simulates subscriptions to threads, activities, and collections.
4) Simulates creation and edit of an exploration by user_1.
5) Creates an ExplorationUserDataModel.
6) Simulates completion of some activities.
7) Simulates incomplete status of some activities.
8) Creates user LearnerGoalsModel.
9) Populates ExpUserLastPlaythroughModel of user.
10) Creates user LearnerPlaylsts.
11) Simulates collection progress of user.
12) Simulates story progress of user.
13) Creates new collection rights.
14) Simulates a general suggestion.
15) Creates new exploration rights.
16) Populates user settings.
17) Creates two reply-to ids for feedback.
18) Creates a task closed by the user.
19) Simulates user_1 scrubbing a report.
20) Creates new BlogPostModel and BlogPostRightsModel.
21) Creates a TranslationContributionStatsModel.
22) Creates new LearnerGroupModel and LearnerGroupsUserModel.
"""
# Setup for UserStatsModel.
user_models.UserStatsModel(
id=self.USER_ID_1,
impact_score=self.USER_1_IMPACT_SCORE,
total_plays=self.USER_1_TOTAL_PLAYS,
average_ratings=self.USER_1_AVERAGE_RATINGS,
num_ratings=self.USER_1_NUM_RATINGS,
weekly_creator_stats_list=self.USER_1_WEEKLY_CREATOR_STATS_LIST
).put()
# Setup for UserSkillModel.
user_models.UserSkillMasteryModel(
id=user_models.UserSkillMasteryModel.construct_model_id(
self.USER_ID_1, self.SKILL_ID_1),
user_id=self.USER_ID_1,
skill_id=self.SKILL_ID_1,
degree_of_mastery=self.DEGREE_OF_MASTERY).put()
user_models.UserSkillMasteryModel(
id=user_models.UserSkillMasteryModel.construct_model_id(
self.USER_ID_1, self.SKILL_ID_2),
user_id=self.USER_ID_1,
skill_id=self.SKILL_ID_2,
degree_of_mastery=self.DEGREE_OF_MASTERY).put()
user_models.UserSkillMasteryModel(
id=user_models.UserSkillMasteryModel.construct_model_id(
self.PROFILE_ID_1, self.SKILL_ID_3),
user_id=self.PROFILE_ID_1,
skill_id=self.SKILL_ID_3,
degree_of_mastery=self.DEGREE_OF_MASTERY_2).put()
# Setup for UserSubscriptionsModel.
for creator_id in self.CREATOR_IDS:
user_models.UserSettingsModel(
id=creator_id,
username='username' + creator_id,
email=creator_id + '@example.com'
).put()
user_models.UserSubscriptionsModel(
id=self.USER_ID_1, creator_ids=self.CREATOR_IDS,
collection_ids=self.COLLECTION_IDS,
exploration_ids=self.EXPLORATION_IDS,
general_feedback_thread_ids=self.GENERAL_FEEDBACK_THREAD_IDS,
last_checked=self.GENERIC_DATE).put()
# Setup for UserContributionsModel.
self.save_new_valid_exploration(
self.EXPLORATION_IDS[0], self.USER_ID_1, end_state_name='End')
exp_services.update_exploration(
self.USER_ID_1, self.EXPLORATION_IDS[0],
[exp_domain.ExplorationChange({
'cmd': 'edit_exploration_property',
'property_name': 'objective',
'new_value': 'the objective'
})], 'Test edit')
# Setup for ExplorationUserDataModel.
user_models.ExplorationUserDataModel(
id='%s.%s' % (self.USER_ID_1, self.EXPLORATION_IDS[0]),
user_id=self.USER_ID_1,
exploration_id=self.EXPLORATION_IDS[0], rating=2,
rated_on=self.GENERIC_DATE,
draft_change_list={'new_content': {}},
draft_change_list_last_updated=self.GENERIC_DATE,
draft_change_list_exp_version=3,
draft_change_list_id=1).put()
# Setup for CompletedActivitiesModel.
user_models.CompletedActivitiesModel(
id=self.USER_ID_1,
exploration_ids=self.EXPLORATION_IDS,
collection_ids=self.COLLECTION_IDS,
story_ids=self.STORY_IDS,
learnt_topic_ids=self.TOPIC_IDS).put()
user_models.CompletedActivitiesModel(
id=self.PROFILE_ID_1,
exploration_ids=self.EXPLORATION_IDS_2,
collection_ids=self.COLLECTION_IDS_2,
story_ids=self.STORY_IDS_2,
learnt_topic_ids=self.TOPIC_IDS_2).put()
# Setup for IncompleteACtivitiesModel.
user_models.IncompleteActivitiesModel(
id=self.USER_ID_1,
exploration_ids=self.EXPLORATION_IDS,
collection_ids=self.COLLECTION_IDS,
story_ids=self.STORY_IDS,
partially_learnt_topic_ids=self.TOPIC_IDS).put()
# Setup for ExpUserLastPlaythroughModel.
user_models.ExpUserLastPlaythroughModel(
id='%s.%s' % (self.USER_ID_1, self.EXPLORATION_IDS[0]),
user_id=self.USER_ID_1, exploration_id=self.EXPLORATION_IDS[0],
last_played_exp_version=self.EXP_VERSION,
last_played_state_name=self.STATE_NAME).put()
# Setup for LearnerGoalsModel.
user_models.LearnerGoalsModel(
id=self.USER_ID_1,
topic_ids_to_learn=self.TOPIC_IDS).put()
user_models.LearnerGoalsModel(
id=self.PROFILE_ID_1,
topic_ids_to_learn=self.TOPIC_IDS_2).put()
# Setup for LearnerPlaylistModel.
user_models.LearnerPlaylistModel(
id=self.USER_ID_1,
exploration_ids=self.EXPLORATION_IDS,
collection_ids=self.COLLECTION_IDS).put()
user_models.LearnerPlaylistModel(
id=self.PROFILE_ID_1,
exploration_ids=self.EXPLORATION_IDS_2,
collection_ids=self.COLLECTION_IDS_2).put()
# Setup for CollectionProgressModel.
user_models.CollectionProgressModel(
id='%s.%s' % (self.USER_ID_1, self.COLLECTION_IDS[0]),
user_id=self.USER_ID_1,
collection_id=self.COLLECTION_IDS[0],
completed_explorations=self.EXPLORATION_IDS).put()
user_models.CollectionProgressModel(
id='%s.%s' % (self.PROFILE_ID_1, self.COLLECTION_IDS_2[0]),
user_id=self.PROFILE_ID_1,
collection_id=self.COLLECTION_IDS_2[0],
completed_explorations=self.EXPLORATION_IDS_2).put()
# Setup for StoryProgressModel.
user_models.StoryProgressModel(
id='%s.%s' % (self.USER_ID_1, self.STORY_ID_1),
user_id=self.USER_ID_1,
story_id=self.STORY_ID_1,
completed_node_ids=self.COMPLETED_NODE_IDS_1).put()
user_models.StoryProgressModel(
id='%s.%s' % (self.PROFILE_ID_1, self.STORY_ID_2),
user_id=self.PROFILE_ID_1,
story_id=self.STORY_ID_2,
completed_node_ids=self.COMPLETED_NODE_IDS_2).put()
# Setup for CollectionRightsModel.
collection_models.CollectionRightsModel(
id=self.COLLECTION_IDS[0],
owner_ids=[self.USER_ID_1],
editor_ids=[self.USER_ID_1],
voice_artist_ids=[self.USER_ID_1],
viewer_ids=[self.USER_ID_1],
community_owned=False,
status=constants.ACTIVITY_STATUS_PUBLIC,
viewable_if_private=False,
first_published_msec=0.0
).save(
'cid', 'Created new collection right',
[{'cmd': rights_domain.CMD_CREATE_NEW}])
# Setup for GeneralSuggestionModel.
suggestion_models.GeneralSuggestionModel.create(
feconf.SUGGESTION_TYPE_EDIT_STATE_CONTENT,
feconf.ENTITY_TYPE_EXPLORATION,
self.EXPLORATION_IDS[0], 1,
suggestion_models.STATUS_IN_REVIEW, self.USER_ID_1,
'reviewer_1', self.CHANGE_CMD, self.SCORE_CATEGORY,
'exploration.exp1.thread_1', None)
# Setup for TopicRightsModel.
topic_models.TopicRightsModel(
id=self.TOPIC_ID_1,
manager_ids=[self.USER_ID_1],
topic_is_published=True
).commit(
'committer_id',
'New topic rights',
[{'cmd': topic_domain.CMD_CREATE_NEW}])
topic_models.TopicRightsModel(
id=self.TOPIC_ID_2,
manager_ids=[self.USER_ID_1],
topic_is_published=True
).commit(
'committer_id',
'New topic rights',
[{'cmd': topic_domain.CMD_CREATE_NEW}])
# Setup for ExplorationRightsModel.
exploration_models.ExplorationRightsModel(
id=self.EXPLORATION_IDS[0],
owner_ids=[self.USER_ID_1],
editor_ids=[self.USER_ID_1],
voice_artist_ids=[self.USER_ID_1],
viewer_ids=[self.USER_ID_1],
community_owned=False,
status=constants.ACTIVITY_STATUS_PUBLIC,
viewable_if_private=False,
first_published_msec=0.0
).save(
'cid', 'Created new exploration right',
[{'cmd': rights_domain.CMD_CREATE_NEW}])
# Setup for UserSettingsModel.
user_models.UserSettingsModel(
id=self.USER_ID_1,
email=self.USER_1_EMAIL,
roles=[self.USER_1_ROLE],
username=self.GENERIC_USERNAME,
normalized_username=self.GENERIC_USERNAME,
last_agreed_to_terms=self.GENERIC_DATE,
last_started_state_editor_tutorial=self.GENERIC_DATE,
last_started_state_translation_tutorial=self.GENERIC_DATE,
last_logged_in=self.GENERIC_DATE,
last_created_an_exploration=self.GENERIC_DATE,
last_edited_an_exploration=self.GENERIC_DATE,
profile_picture_data_url=self.GENERIC_IMAGE_URL,
default_dashboard='learner', creator_dashboard_display_pref='card',
user_bio=self.GENERIC_USER_BIO,
subject_interests=self.GENERIC_SUBJECT_INTERESTS,
first_contribution_msec=1,
preferred_language_codes=self.GENERIC_LANGUAGE_CODES,
preferred_site_language_code=self.GENERIC_LANGUAGE_CODES[0],
preferred_audio_language_code=self.GENERIC_LANGUAGE_CODES[0],
display_alias=self.GENERIC_DISPLAY_ALIAS,
pin=self.GENERIC_PIN
).put()
user_models.UserSettingsModel(
id=self.PROFILE_ID_1,
email=self.USER_1_EMAIL,
roles=[self.PROFILE_1_ROLE],
username=None,
normalized_username=None,
last_agreed_to_terms=self.GENERIC_DATE,
last_started_state_editor_tutorial=None,
last_started_state_translation_tutorial=None,
last_logged_in=self.GENERIC_DATE,
last_created_an_exploration=None,
last_edited_an_exploration=None,
profile_picture_data_url=None,
default_dashboard='learner', creator_dashboard_display_pref='card',
user_bio=self.GENERIC_USER_BIO,
subject_interests=self.GENERIC_SUBJECT_INTERESTS,
first_contribution_msec=None,
preferred_language_codes=self.GENERIC_LANGUAGE_CODES,
preferred_site_language_code=self.GENERIC_LANGUAGE_CODES[0],
preferred_audio_language_code=self.GENERIC_LANGUAGE_CODES[0],
display_alias=self.GENERIC_DISPLAY_ALIAS_2
).put()
suggestion_models.TranslationContributionStatsModel.create(
language_code=self.SUGGESTION_LANGUAGE_CODE,
contributor_user_id=self.USER_ID_1,
topic_id=self.TOPIC_ID_1,
submitted_translations_count=self.SUBMITTED_TRANSLATIONS_COUNT,
submitted_translation_word_count=(
self.SUBMITTED_TRANSLATION_WORD_COUNT),
accepted_translations_count=self.ACCEPTED_TRANSLATIONS_COUNT,
accepted_translations_without_reviewer_edits_count=(
self.ACCEPTED_TRANSLATIONS_WITHOUT_REVIEWER_EDITS_COUNT),
accepted_translation_word_count=(
self.ACCEPTED_TRANSLATION_WORD_COUNT),
rejected_translations_count=self.REJECTED_TRANSLATIONS_COUNT,
rejected_translation_word_count=(
self.REJECTED_TRANSLATION_WORD_COUNT),
contribution_dates=self.CONTRIBUTION_DATES
)
suggestion_models.TranslationReviewStatsModel.create(
language_code=self.SUGGESTION_LANGUAGE_CODE,
reviewer_user_id=self.USER_ID_1,
topic_id=self.TOPIC_ID_1,
reviewed_translations_count=self.REVIEWED_TRANSLATIONS_COUNT,
reviewed_translation_word_count=(
self.REVIEWED_TRANSLATION_WORD_COUNT),
accepted_translations_count=self.ACCEPTED_TRANSLATIONS_COUNT,
accepted_translations_with_reviewer_edits_count=(
self.ACCEPTED_TRANSLATIONS_WITH_REVIEWER_EDITS_COUNT),
accepted_translation_word_count=(
self.ACCEPTED_TRANSLATION_WORD_COUNT),
first_contribution_date=self.FIRST_CONTRIBUTION_DATE,
last_contribution_date=self.LAST_CONTRIBUTION_DATE
)
suggestion_models.QuestionContributionStatsModel.create(
contributor_user_id=self.USER_ID_1,
topic_id=self.TOPIC_ID_1,
submitted_questions_count=self.SUBMITTED_QUESTION_COUNT,
accepted_questions_count=self.ACCEPTED_QUESTIONS_COUNT,
accepted_questions_without_reviewer_edits_count=(
self.ACCEPTED_QUESTIONS_WITHOUT_REVIEWER_EDITS_COUNT),
first_contribution_date=self.FIRST_CONTRIBUTION_DATE,
last_contribution_date=self.LAST_CONTRIBUTION_DATE
)
suggestion_models.QuestionReviewStatsModel.create(
reviewer_user_id=self.USER_ID_1,
topic_id=self.TOPIC_ID_1,
reviewed_questions_count=self.REVIEWED_QUESTIONS_COUNT,
accepted_questions_count=self.ACCEPTED_QUESTIONS_COUNT,
accepted_questions_with_reviewer_edits_count=(
self.ACCEPTED_QUESTIONS_WITH_REVIEWER_EDITS_COUNT),
first_contribution_date=self.FIRST_CONTRIBUTION_DATE,
last_contribution_date=self.LAST_CONTRIBUTION_DATE
)
user_models.UserContributionRightsModel(
id=self.USER_ID_1,
can_review_translation_for_language_codes=['hi', 'en'],
can_review_voiceover_for_language_codes=['hi'],
can_review_questions=True).put()
user_models.UserContributionProficiencyModel(
id='%s.%s' % (self.SCORE_CATEGORY_1, self.USER_ID_1),
user_id=self.USER_ID_1,
score_category=self.SCORE_CATEGORY_1,
score=1.5,
onboarding_email_sent=False
).put()
user_models.UserContributionProficiencyModel(
id='%s.%s' % (self.SCORE_CATEGORY_2, self.USER_ID_1),
user_id=self.USER_ID_1,
score_category=self.SCORE_CATEGORY_2,
score=2,
onboarding_email_sent=False
).put()
collection_models.CollectionRightsSnapshotMetadataModel(
id=self.GENERIC_MODEL_ID, committer_id=self.USER_ID_1,
commit_type=self.COMMIT_TYPE, commit_message=self.COMMIT_MESSAGE,
commit_cmds=self.COMMIT_CMDS
).put()
collection_models.CollectionSnapshotMetadataModel(
id=self.GENERIC_MODEL_ID, committer_id=self.USER_ID_1,
commit_type=self.COMMIT_TYPE, commit_message=self.COMMIT_MESSAGE,
commit_cmds=self.COMMIT_CMDS
).put()
skill_models.SkillSnapshotMetadataModel(
id=self.GENERIC_MODEL_ID, committer_id=self.USER_ID_1,
commit_type=self.COMMIT_TYPE, commit_message=self.COMMIT_MESSAGE,
commit_cmds=self.COMMIT_CMDS
).put()
subtopic_models.SubtopicPageSnapshotMetadataModel(
id=self.GENERIC_MODEL_ID, committer_id=self.USER_ID_1,
commit_type=self.COMMIT_TYPE, commit_message=self.COMMIT_MESSAGE,
commit_cmds=self.COMMIT_CMDS
).put()
topic_models.TopicRightsSnapshotMetadataModel(
id=self.GENERIC_MODEL_ID, committer_id=self.USER_ID_1,
commit_type=self.COMMIT_TYPE, commit_message=self.COMMIT_MESSAGE,
commit_cmds=self.COMMIT_CMDS
).put()
topic_models.TopicSnapshotMetadataModel(
id=self.GENERIC_MODEL_ID, committer_id=self.USER_ID_1,
commit_type=self.COMMIT_TYPE, commit_message=self.COMMIT_MESSAGE,
commit_cmds=self.COMMIT_CMDS
).put()
story_models.StorySnapshotMetadataModel(
id=self.GENERIC_MODEL_ID, committer_id=self.USER_ID_1,
commit_type=self.COMMIT_TYPE, commit_message=self.COMMIT_MESSAGE,
commit_cmds=self.COMMIT_CMDS
).put()
question_models.QuestionSnapshotMetadataModel(
id=self.GENERIC_MODEL_ID, committer_id=self.USER_ID_1,
commit_type=self.COMMIT_TYPE, commit_message=self.COMMIT_MESSAGE,
commit_cmds=self.COMMIT_CMDS
).put()
config_models.ConfigPropertySnapshotMetadataModel(
id=self.GENERIC_MODEL_ID, committer_id=self.USER_ID_1,
commit_type=self.COMMIT_TYPE, commit_message=self.COMMIT_MESSAGE,
commit_cmds=self.COMMIT_CMDS
).put()
exploration_models.ExplorationRightsSnapshotMetadataModel(
id=self.GENERIC_MODEL_ID, committer_id=self.USER_ID_1,
commit_type=self.COMMIT_TYPE, commit_message=self.COMMIT_MESSAGE,
commit_cmds=self.COMMIT_CMDS
).put()
improvements_models.TaskEntryModel(
id=self.GENERIC_MODEL_ID,
composite_entity_id=self.GENERIC_MODEL_ID,
entity_type=constants.TASK_ENTITY_TYPE_EXPLORATION,
entity_id=self.GENERIC_MODEL_ID,
entity_version=1,
task_type=constants.TASK_TYPE_HIGH_BOUNCE_RATE,
target_type=constants.TASK_TARGET_TYPE_STATE,
target_id=self.GENERIC_MODEL_ID,
status=constants.TASK_STATUS_OPEN,
resolver_id=self.USER_ID_1
).put()
config_models.PlatformParameterSnapshotMetadataModel(
id=self.GENERIC_MODEL_ID, committer_id=self.USER_ID_1,
commit_type=self.COMMIT_TYPE, commit_message=self.COMMIT_MESSAGE,
commit_cmds=self.COMMIT_CMDS
).put()
user_models.UserEmailPreferencesModel(
id=self.USER_ID_1,
site_updates=False,
editor_role_notifications=False,
feedback_message_notifications=False,
subscription_notifications=False
).put()
auth_models.UserAuthDetailsModel(
id=self.USER_ID_1,
parent_user_id=self.PROFILE_ID_1
).put()
# Set-up for AppFeedbackReportModel scrubbed by user.
report_id = '%s.%s.%s' % (
self.PLATFORM_ANDROID, self.REPORT_SUBMITTED_TIMESTAMP.second,
'randomInteger123')
app_feedback_report_models.AppFeedbackReportModel(
id=report_id,
platform=self.PLATFORM_ANDROID,
scrubbed_by=None,
ticket_id='%s.%s.%s' % (
'random_hash', self.TICKET_CREATION_TIMESTAMP.second,
'16CharString1234'),
submitted_on=self.REPORT_SUBMITTED_TIMESTAMP,
local_timezone_offset_hrs=0,
report_type=self.REPORT_TYPE_SUGGESTION,
category=self.CATEGORY_OTHER,
platform_version=self.PLATFORM_VERSION,
android_device_country_locale_code=(
self.DEVICE_COUNTRY_LOCALE_CODE_INDIA),
android_device_model=self.ANDROID_DEVICE_MODEL,
android_sdk_version=self.ANDROID_SDK_VERSION,
entry_point=self.ENTRY_POINT_NAVIGATION_DRAWER,
text_language_code=self.TEXT_LANGUAGE_CODE_ENGLISH,
audio_language_code=self.AUDIO_LANGUAGE_CODE_ENGLISH,
android_report_info=self.ANDROID_REPORT_INFO,
android_report_info_schema_version=(
self.ANDROID_REPORT_INFO_SCHEMA_VERSION)
).put()
report_entity = (
app_feedback_report_models.AppFeedbackReportModel.get_by_id(
report_id))
report_entity.scrubbed_by = self.USER_ID_1
report_entity.update_timestamps()
report_entity.put()
# Set-up for the BlogPostModel.
blog_post_model = blog_models.BlogPostModel(
id=self.BLOG_POST_ID_1,
author_id=self.USER_ID_1,
content='content sample',
title='sample title',
published_on=datetime.datetime.utcnow(),
url_fragment='sample-url-fragment',
tags=['tag', 'one'],
thumbnail_filename='thumbnail'
)
blog_post_model.update_timestamps()
blog_post_model.put()
blog_post_rights_for_post_1 = blog_models.BlogPostRightsModel(
id=self.BLOG_POST_ID_1,
editor_ids=[self.USER_ID_1],
blog_post_is_published=True,
)
blog_post_rights_for_post_1.update_timestamps()
blog_post_rights_for_post_1.put()
blog_post_rights_for_post_2 = blog_models.BlogPostRightsModel(
id=self.BLOG_POST_ID_2,
editor_ids=[self.USER_ID_1],
blog_post_is_published=False,
)
blog_post_rights_for_post_2.update_timestamps()
blog_post_rights_for_post_2.put()
blog_models.BlogAuthorDetailsModel.create(
author_id=self.USER_ID_1,
displayed_author_name='general name',
author_bio='general blog author'
)
learner_group_model = learner_group_models.LearnerGroupModel(
id=self.LEARNER_GROUP_ID,
title='sample title',
description='sample description',
facilitator_user_ids=[self.USER_ID_1],
learner_user_ids=['user_id_2'],
invited_learner_user_ids=['user_id_3'],
subtopic_page_ids=['subtopic_id_1', 'subtopic_id_2'],
story_ids=['skill_id_1', 'skill_id_2']
)
learner_group_model.update_timestamps()
learner_group_model.put()
learner_grp_user_model = user_models.LearnerGroupsUserModel(
id=self.USER_ID_1,
invited_to_learner_groups_ids=['group_id_1'],
learner_groups_user_details=[
{
'group_id': 'group_id_2',
'progress_sharing_is_turned_on': False
}
],
learner_groups_user_details_schema_version=1
)
learner_grp_user_model.update_timestamps()
learner_grp_user_model.put()
def set_up_trivial(self) -> None:
"""Setup for trivial test of export_data functionality."""
user_models.UserSettingsModel(
id=self.USER_ID_1,
email=self.USER_1_EMAIL,
roles=[self.USER_1_ROLE]
).put()
user_models.UserSettingsModel(
id=self.PROFILE_ID_1,
email=self.USER_1_EMAIL,
roles=[self.PROFILE_1_ROLE]
).put()
user_models.UserSubscriptionsModel(id=self.USER_ID_1).put()
def test_export_nonexistent_full_user_raises_error(self) -> None:
"""Setup for nonexistent user test of export_data functionality."""
with self.assertRaisesRegex(
user_models.UserSettingsModel.EntityNotFoundError,
'Entity for class UserSettingsModel with id fake_user_id '
'not found'):
takeout_service.export_data_for_user('fake_user_id')
def test_export_data_for_full_user_trivial_is_correct(self) -> None:
"""Trivial test of export_data functionality."""
self.set_up_trivial()
self.maxDiff = 0
# Generate expected output.
app_feedback_report: Dict[str, Dict[str, Union[str, int]]] = {}
collection_progress_data: Dict[str, List[str]] = {}
collection_rights_data: Dict[str, List[str]] = {
'editable_collection_ids': [],
'owned_collection_ids': [],
'viewable_collection_ids': [],
'voiced_collection_ids': []
}
completed_activities_data: Dict[str, List[str]] = {}
contribution_data: Dict[str, List[str]] = {}
exploration_rights_data: Dict[str, List[str]] = {
'editable_exploration_ids': [],
'owned_exploration_ids': [],
'viewable_exploration_ids': [],
'voiced_exploration_ids': []
}
exploration_data: Dict[
str,
Dict[str, Union[str, int, Dict[str, str]]]
] = {}
general_feedback_message_data: Dict[
str, Dict[str, Union[int, str]]
] = {}
general_feedback_thread_data: Dict[str, Dict[str, Union[int, str]]] = {}
general_feedback_thread_user_data: Dict[str, Dict[str, List[int]]] = {}
general_suggestion_data: Dict[str, Dict[str, Union[int, str]]] = {}
last_playthrough_data: Dict[str, Dict[str, Union[int, str]]] = {}
learner_goals_data: Dict[str, List[str]] = {}
learner_playlist_data: Dict[str, List[str]] = {}
incomplete_activities_data: Dict[str, List[str]] = {}
user_settings_data: Dict[str, Union[List[str], Optional[str], int]] = {
'email': 'user1@example.com',
'roles': [feconf.ROLE_ID_CURRICULUM_ADMIN],
'banned': False,
'username': None,
'normalized_username': None,
'last_agreed_to_terms_msec': None,
'last_started_state_editor_tutorial_msec': None,
'last_started_state_translation_tutorial_msec': None,
'last_logged_in_msec': None,
'last_edited_an_exploration_msec': None,
'last_created_an_exploration_msec': None,
'profile_picture_filename': None,
'default_dashboard': 'learner',
'creator_dashboard_display_pref': 'card',
'user_bio': None,
'subject_interests': [],
'first_contribution_msec': None,
'preferred_language_codes': [],
'preferred_site_language_code': None,
'preferred_audio_language_code': None,
'preferred_translation_language_code': None,
'display_alias': None,
'has_viewed_lesson_info_modal_once': False,
}
skill_data: Dict[str, str] = {}
stats_data: Dict[str, stats_domain.AggregatedStatsDict] = {}
story_progress_data: Dict[str, List[str]] = {}
subscriptions_data: Dict[str, Optional[List[str]]] = {
'exploration_ids': [],
'collection_ids': [],
'creator_usernames': [],
'general_feedback_thread_ids': [],
'last_checked_msec': None
}
task_entry_data: Dict[str, List[str]] = {
'task_ids_resolved_by_user': [],
'issue_descriptions': [],
'resolution_msecs': [],
'statuses': []
}
topic_rights_data: Dict[str, List[str]] = {
'managed_topic_ids': []
}
expected_contrib_proficiency_data: Dict[
str, Dict[str, Union[int, bool]]
] = {}
expected_contribution_rights_data: Dict[
str, Union[bool, List[str]]
] = {}
expected_collection_rights_sm: Dict[str, Dict[str, Dict[str, str]]] = {}
expected_collection_sm: Dict[str, Dict[str, Dict[str, str]]] = {}
expected_skill_sm: Dict[str, Dict[str, Dict[str, str]]] = {}
expected_subtopic_page_sm: Dict[str, Dict[str, Dict[str, str]]] = {}
expected_topic_rights_sm: Dict[str, Dict[str, Dict[str, str]]] = {}
expected_topic_sm: Dict[str, Dict[str, Dict[str, str]]] = {}
expected_translation_contribution_stats: Dict[
str, Dict[str, Dict[str, str]]
] = {}
expected_translation_review_stats: Dict[
str, Dict[str, Dict[str, str]]
] = {}
expected_question_contribution_stats: Dict[
str, Dict[str, Dict[str, str]]
] = {}
expected_question_review_stats: Dict[
str, Dict[str, Dict[str, str]]
] = {}
expected_story_sm: Dict[str, Dict[str, Dict[str, str]]] = {}
expected_question_sm: Dict[str, Dict[str, Dict[str, str]]] = {}
expected_config_property_sm: Dict[str, Dict[str, Dict[str, str]]] = {}
expected_exploration_rights_sm: Dict[
str, Dict[str, Dict[str, str]]
] = {}
expected_exploration_sm: Dict[str, Dict[str, Dict[str, str]]] = {}
expected_platform_parameter_sm: Dict[
str, Dict[str, Dict[str, str]]
] = {}
expected_user_auth_details: Dict[str, str] = {}
expected_user_email_preferences: Dict[str, str] = {}
expected_blog_post_data: Dict[str, Union[str, float, List[str]]] = {}
expected_blog_post_rights: Dict[str, List[str]] = {
'editable_blog_post_ids': []
}
expected_blog_author_details: Dict[str, Dict[str, str]] = {}
expected_learner_group_model_data: Dict[str, str] = {}
expected_learner_grp_user_model_data: Dict[str, str] = {}
# Here we use type Any because this dictionary contains other
# different types of dictionaries whose values can vary from int
# to complex Union types. So, to make this Dict generalized for
# every other Dict. We used Any here.
expected_user_data: Dict[str, Dict[str, Any]] = {
'app_feedback_report': app_feedback_report,
'blog_post': expected_blog_post_data,
'blog_post_rights': expected_blog_post_rights,
'blog_author_details': expected_blog_author_details,
'user_stats': stats_data,
'user_settings': user_settings_data,
'user_subscriptions': subscriptions_data,
'user_skill_mastery': skill_data,
'user_contributions': contribution_data,
'exploration_user_data': exploration_data,
'completed_activities': completed_activities_data,
'incomplete_activities': incomplete_activities_data,
'exp_user_last_playthrough': last_playthrough_data,
'learner_goals': learner_goals_data,
'learner_playlist': learner_playlist_data,
'learner_group': expected_learner_group_model_data,
'learner_groups_user': expected_learner_grp_user_model_data,
'task_entry': task_entry_data,
'topic_rights': topic_rights_data,
'collection_progress': collection_progress_data,
'story_progress': story_progress_data,
'general_feedback_thread': general_feedback_thread_data,
'general_feedback_thread_user':
general_feedback_thread_user_data,
'general_feedback_message': general_feedback_message_data,
'collection_rights': collection_rights_data,
'general_suggestion': general_suggestion_data,
'exploration_rights': exploration_rights_data,
'user_contribution_proficiency': expected_contrib_proficiency_data,
'user_contribution_rights': expected_contribution_rights_data,
'collection_rights_snapshot_metadata':
expected_collection_rights_sm,
'collection_snapshot_metadata':
expected_collection_sm,
'skill_snapshot_metadata':
expected_skill_sm,
'subtopic_page_snapshot_metadata':
expected_subtopic_page_sm,
'topic_rights_snapshot_metadata':
expected_topic_rights_sm,
'topic_snapshot_metadata': expected_topic_sm,
'translation_contribution_stats':
expected_translation_contribution_stats,
'translation_review_stats':
expected_translation_review_stats,
'question_contribution_stats':
expected_question_contribution_stats,
'question_review_stats':
expected_question_review_stats,
'story_snapshot_metadata': expected_story_sm,
'question_snapshot_metadata': expected_question_sm,
'config_property_snapshot_metadata':
expected_config_property_sm,
'exploration_rights_snapshot_metadata':
expected_exploration_rights_sm,
'exploration_snapshot_metadata': expected_exploration_sm,
'platform_parameter_snapshot_metadata':
expected_platform_parameter_sm,
'user_auth_details': expected_user_auth_details,
'user_email_preferences': expected_user_email_preferences
}
# Perform export and compare.
user_takeout_object = takeout_service.export_data_for_user(
self.USER_ID_1)
observed_data = user_takeout_object.user_data
observed_images = user_takeout_object.user_images
self.assertEqual(expected_user_data, observed_data)
observed_json = json.dumps(observed_data)
expected_json = json.dumps(expected_user_data)
self.assertEqual(json.loads(expected_json), json.loads(observed_json))
expected_images: List[takeout_domain.TakeoutImage] = []
self.assertEqual(expected_images, observed_images)
def test_export_data_for_full_user_when_user_id_is_leaked_fails(
self
) -> None:
user_models.UserSettingsModel(
id=self.USER_ID_1,
email=self.USER_1_EMAIL,
roles=[self.USER_1_ROLE],
user_bio='I want to leak uid_abcdefghijabcdefghijabcdefghijab'
).put()
with self.capture_logging(min_level=logging.ERROR) as log_messages:
takeout_service.export_data_for_user(self.USER_ID_1)
self.assertEqual(
[
'[TAKEOUT] User ID (uid_abcdefghijabcdefghijabcdefghijab) '
'found in the JSON generated for UserSettingsModel and '
'user with ID user_1'
],
log_messages
)
def test_exports_have_single_takeout_dict_key(self) -> None:
"""Test to ensure that all export policies that specify a key for the
Takeout dict are also models that specify this policy are type
MULTIPLE_INSTANCES_PER_USER.
"""
self.set_up_non_trivial()
# We set up the feedback_thread_model here so that we can easily
# access it when computing the expected data later.
feedback_thread_model = feedback_models.GeneralFeedbackThreadModel(
entity_type=self.THREAD_ENTITY_TYPE,
entity_id=self.THREAD_ENTITY_ID,
original_author_id=self.USER_ID_1,
status=self.THREAD_STATUS,
subject=self.THREAD_SUBJECT,
has_suggestion=self.THREAD_HAS_SUGGESTION,
summary=self.THREAD_SUMMARY,
message_count=self.THREAD_MESSAGE_COUNT
)
feedback_thread_model.put()
thread_id = feedback_services.create_thread(
self.THREAD_ENTITY_TYPE,
self.THREAD_ENTITY_ID,
self.USER_ID_1,
self.THREAD_SUBJECT,
self.MESSAGE_TEXT
)
feedback_services.create_message(
thread_id,
self.USER_ID_1,
self.THREAD_STATUS,
self.THREAD_SUBJECT,
self.MESSAGE_TEXT
)
# Retrieve all models for export.
all_models = [
clazz
for clazz in test_utils.get_storage_model_classes()
if (not clazz.__name__ in
test_utils.BASE_MODEL_CLASSES_WITHOUT_DATA_POLICIES)
]
for model in all_models:
export_method = model.get_model_association_to_user()
export_policy = model.get_export_policy()
num_takeout_keys = 0
for field_export_policy in export_policy.values():
if (field_export_policy ==
base_models
.EXPORT_POLICY
.EXPORTED_AS_KEY_FOR_TAKEOUT_DICT):
num_takeout_keys += 1
if (export_method ==
base_models.MODEL_ASSOCIATION_TO_USER
.MULTIPLE_INSTANCES_PER_USER):
# If the id is used as a Takeout key, then we should not
# have any fields exported as the key for the Takeout.
self.assertEqual(
num_takeout_keys,
0 if model.ID_IS_USED_AS_TAKEOUT_KEY else 1)
else:
self.assertEqual(num_takeout_keys, 0)
def test_exports_follow_export_policies(self) -> None:
"""Test to ensure that all fields that should be exported
per the export policy are exported, and exported in the proper format.
"""
self.set_up_non_trivial()
# We set up the feedback_thread_model here so that we can easily
# access it when computing the expected data later.
feedback_thread_model = feedback_models.GeneralFeedbackThreadModel(
entity_type=self.THREAD_ENTITY_TYPE,
entity_id=self.THREAD_ENTITY_ID,
original_author_id=self.USER_ID_1,
status=self.THREAD_STATUS,
subject=self.THREAD_SUBJECT,
has_suggestion=self.THREAD_HAS_SUGGESTION,
summary=self.THREAD_SUMMARY,
message_count=self.THREAD_MESSAGE_COUNT
)
feedback_thread_model.put()
thread_id = feedback_services.create_thread(
self.THREAD_ENTITY_TYPE,
self.THREAD_ENTITY_ID,
self.USER_ID_1,
self.THREAD_SUBJECT,
self.MESSAGE_TEXT
)
feedback_services.create_message(
thread_id,
self.USER_ID_1,
self.THREAD_STATUS,
self.THREAD_SUBJECT,
self.MESSAGE_TEXT
)
# Retrieve all models for export.
all_models = [
clazz
for clazz in test_utils.get_storage_model_classes()
if (not clazz.__name__ in
test_utils.BASE_MODEL_CLASSES_WITHOUT_DATA_POLICIES)
]
# Iterate over models and test export policies.
for model in all_models:
export_method = model.get_model_association_to_user()
export_policy = model.get_export_policy()
renamed_export_keys = model.get_field_names_for_takeout()
exported_field_names = []
field_used_as_key_for_takeout_dict = None
for field_name in model._properties: # pylint: disable=protected-access
if (export_policy[field_name] ==
base_models.EXPORT_POLICY.EXPORTED):
if field_name in renamed_export_keys:
exported_field_names.append(
renamed_export_keys[field_name]
)
else:
exported_field_names.append(field_name)
elif (export_policy[field_name] ==
base_models
.EXPORT_POLICY.EXPORTED_AS_KEY_FOR_TAKEOUT_DICT):
field_used_as_key_for_takeout_dict = field_name
if (export_method ==
base_models
.MODEL_ASSOCIATION_TO_USER.NOT_CORRESPONDING_TO_USER):
self.assertEqual(len(exported_field_names), 0)
elif (export_method ==
base_models.MODEL_ASSOCIATION_TO_USER.ONE_INSTANCE_PER_USER):
exported_data = model.export_data(self.USER_ID_1)
self.assertEqual(
sorted([str(key) for key in exported_data.keys()]),
sorted(exported_field_names)
)
elif (export_method ==
base_models
.MODEL_ASSOCIATION_TO_USER
.ONE_INSTANCE_SHARED_ACROSS_USERS):
# Here we use MyPy ignore because model is of
# BaseModel type and BaseModel does not contain
# get_field_name_mapping_to_takeout_keys attribute,
# so because of this MyPy throws an error. Thus to
# avoid the error, we used ignore here.
self.assertIsNotNone(
model.get_field_name_mapping_to_takeout_keys) # type: ignore[attr-defined]
exported_data = model.export_data(self.USER_ID_1)
# Here we use MyPy ignore because model is of
# BaseModel type and BaseModel does not contain
# get_field_name_mapping_to_takeout_keys(), so
# because of this MyPy throws an error. Thus to
# avoid the error, we used ignore here.
field_mapping = model.get_field_name_mapping_to_takeout_keys() # type: ignore[attr-defined]
self.assertEqual(
sorted(exported_field_names),
sorted(field_mapping.keys())
)
self.assertEqual(
sorted(exported_data.keys()),
sorted(field_mapping.values())
)
elif (export_method ==
base_models
.MODEL_ASSOCIATION_TO_USER.MULTIPLE_INSTANCES_PER_USER):
exported_data = model.export_data(self.USER_ID_1)
for model_id in exported_data.keys():
# If we are using a field as a Takeout key.
if field_used_as_key_for_takeout_dict:
# Ensure that we export the field.
self.assertEqual(
model_id,
getattr(
model,
field_used_as_key_for_takeout_dict)
)
self.assertEqual(
sorted([
str(key)
for key in exported_data[model_id].keys()]),
sorted(exported_field_names)
)
def test_export_data_for_full_user_nontrivial_is_correct(self) -> None:
"""Nontrivial test of export_data functionality."""
self.set_up_non_trivial()
# We set up the feedback_thread_model here so that we can easily
# access it when computing the expected data later.
feedback_thread_model = feedback_models.GeneralFeedbackThreadModel(
entity_type=self.THREAD_ENTITY_TYPE,
entity_id=self.THREAD_ENTITY_ID,
original_author_id=self.USER_ID_1,
status=self.THREAD_STATUS,
subject=self.THREAD_SUBJECT,
has_suggestion=self.THREAD_HAS_SUGGESTION,
summary=self.THREAD_SUMMARY,
message_count=self.THREAD_MESSAGE_COUNT
)
feedback_thread_model.update_timestamps()
feedback_thread_model.put()
blog_post_model = blog_models.BlogPostModel(
id=self.BLOG_POST_ID_1,
author_id=self.USER_ID_1,
content='content sample',
title='sample title',
published_on=datetime.datetime.utcnow(),
url_fragment='sample-url-fragment',
tags=['tag', 'one'],
thumbnail_filename='thumbnail'
)
blog_post_model.update_timestamps()
blog_post_model.put()
expected_stats_data = {
'impact_score': self.USER_1_IMPACT_SCORE,
'total_plays': self.USER_1_TOTAL_PLAYS,
'average_ratings': self.USER_1_AVERAGE_RATINGS,
'num_ratings': self.USER_1_NUM_RATINGS,
'weekly_creator_stats_list': self.USER_1_WEEKLY_CREATOR_STATS_LIST
}
expected_user_skill_data = {
self.SKILL_ID_1: self.DEGREE_OF_MASTERY,
self.SKILL_ID_2: self.DEGREE_OF_MASTERY
}
expected_contribution_data = {
'created_exploration_ids': [self.EXPLORATION_IDS[0]],
'edited_exploration_ids': [self.EXPLORATION_IDS[0]]
}
expected_exploration_data = {
self.EXPLORATION_IDS[0]: {
'rating': 2,
'rated_on_msec': self.GENERIC_EPOCH,
'draft_change_list': {'new_content': {}},
'draft_change_list_last_updated_msec': self.GENERIC_EPOCH,
'draft_change_list_exp_version': 3,
'draft_change_list_id': 1,
'mute_suggestion_notifications': (
feconf.DEFAULT_SUGGESTION_NOTIFICATIONS_MUTED_PREFERENCE),
'mute_feedback_notifications': (
feconf.DEFAULT_SUGGESTION_NOTIFICATIONS_MUTED_PREFERENCE)
}
}
expected_completed_activities_data = {
'completed_exploration_ids': self.EXPLORATION_IDS,
'completed_collection_ids': self.COLLECTION_IDS,
'completed_story_ids': self.STORY_IDS,
'learnt_topic_ids': self.TOPIC_IDS
}
expected_incomplete_activities_data = {
'incomplete_exploration_ids': self.EXPLORATION_IDS,
'incomplete_collection_ids': self.COLLECTION_IDS,
'incomplete_story_ids': self.STORY_IDS,
'partially_learnt_topic_ids': self.TOPIC_IDS
}
expected_last_playthrough_data = {
self.EXPLORATION_IDS[0]: {
'exp_version': self.EXP_VERSION,
'state_name': self.STATE_NAME
}
}
expected_learner_goals_data = {
'topic_ids_to_learn': self.TOPIC_IDS
}
expected_learner_playlist_data = {
'playlist_exploration_ids': self.EXPLORATION_IDS,
'playlist_collection_ids': self.COLLECTION_IDS
}
expected_collection_progress_data = {
self.COLLECTION_IDS[0]: self.EXPLORATION_IDS
}
expected_story_progress_data = {
self.STORY_ID_1: self.COMPLETED_NODE_IDS_1
}
thread_id = feedback_services.create_thread(
self.THREAD_ENTITY_TYPE,
self.THREAD_ENTITY_ID,
self.USER_ID_1,
self.THREAD_SUBJECT,
self.MESSAGE_TEXT
)
feedback_services.create_message(
thread_id,
self.USER_ID_1,
self.THREAD_STATUS,
self.THREAD_SUBJECT,
self.MESSAGE_TEXT
)
expected_general_feedback_thread_data = {
feedback_thread_model.id: {
'entity_type': self.THREAD_ENTITY_TYPE,
'entity_id': self.THREAD_ENTITY_ID,
'status': self.THREAD_STATUS,
'subject': self.THREAD_SUBJECT,
'has_suggestion': self.THREAD_HAS_SUGGESTION,
'summary': self.THREAD_SUMMARY,
'message_count': self.THREAD_MESSAGE_COUNT,
'last_updated_msec': utils.get_time_in_millisecs(
feedback_thread_model.last_updated)
},
thread_id: {
'entity_type': self.THREAD_ENTITY_TYPE,
'entity_id': self.THREAD_ENTITY_ID,
'status': self.THREAD_STATUS,
'subject': self.THREAD_SUBJECT,
'has_suggestion': False,
'summary': None,
'message_count': 2,
'last_updated_msec': utils.get_time_in_millisecs(
feedback_models.
GeneralFeedbackThreadModel.
get_by_id(thread_id).last_updated)
}
}
expected_general_feedback_thread_user_data = {
thread_id: {
'message_ids_read_by_user': self.MESSAGE_IDS_READ_BY_USER
}
}
expected_general_feedback_message_data = {
thread_id + '.0': {
'thread_id': thread_id,
'message_id': 0,
'updated_status': self.THREAD_STATUS,
'updated_subject': self.THREAD_SUBJECT,
'text': self.MESSAGE_TEXT,
'received_via_email': self.MESSAGE_RECEIEVED_VIA_EMAIL
},
thread_id + '.1': {
'thread_id': thread_id,
'message_id': 1,
'updated_status': self.THREAD_STATUS,
'updated_subject': self.THREAD_SUBJECT,
'text': self.MESSAGE_TEXT,
'received_via_email': self.MESSAGE_RECEIEVED_VIA_EMAIL
}
}
expected_collection_rights_data = {
'owned_collection_ids': (
[self.COLLECTION_IDS[0]]),
'editable_collection_ids': (
[self.COLLECTION_IDS[0]]),
'voiced_collection_ids': (
[self.COLLECTION_IDS[0]]),
'viewable_collection_ids': [self.COLLECTION_IDS[0]]
}
expected_general_suggestion_data = {
'exploration.exp1.thread_1': {
'suggestion_type': (
feconf.SUGGESTION_TYPE_EDIT_STATE_CONTENT),
'target_type': feconf.ENTITY_TYPE_EXPLORATION,
'target_id': self.EXPLORATION_IDS[0],
'target_version_at_submission': 1,
'status': suggestion_models.STATUS_IN_REVIEW,
'change_cmd': self.CHANGE_CMD
}
}
expected_exploration_rights_data = {
'owned_exploration_ids': (
[self.EXPLORATION_IDS[0]]),
'editable_exploration_ids': (
[self.EXPLORATION_IDS[0]]),
'voiced_exploration_ids': (
[self.EXPLORATION_IDS[0]]),
'viewable_exploration_ids': [self.EXPLORATION_IDS[0]]
}
expected_user_settings_data = {
'email': self.USER_1_EMAIL,
'roles': [feconf.ROLE_ID_CURRICULUM_ADMIN],
'username': self.GENERIC_USERNAME,
'normalized_username': self.GENERIC_USERNAME,
'last_agreed_to_terms_msec': self.GENERIC_EPOCH,
'last_started_state_editor_tutorial_msec': self.GENERIC_EPOCH,
'last_started_state_translation_tutorial_msec': self.GENERIC_EPOCH,
'last_logged_in_msec': self.GENERIC_EPOCH,
'last_edited_an_exploration_msec': self.GENERIC_EPOCH,
'last_created_an_exploration_msec': self.GENERIC_EPOCH,
'profile_picture_filename': 'user_settings_profile_picture.png',
'default_dashboard': 'learner',
'creator_dashboard_display_pref': 'card',
'user_bio': self.GENERIC_USER_BIO,
'subject_interests': self.GENERIC_SUBJECT_INTERESTS,
'first_contribution_msec': 1,
'preferred_language_codes': self.GENERIC_LANGUAGE_CODES,
'preferred_site_language_code': self.GENERIC_LANGUAGE_CODES[0],
'preferred_audio_language_code': self.GENERIC_LANGUAGE_CODES[0],
'display_alias': self.GENERIC_DISPLAY_ALIAS,
}
expected_subscriptions_data = {
'creator_usernames': self.CREATOR_USERNAMES,
'collection_ids': self.COLLECTION_IDS,
'exploration_ids': self.EXPLORATION_IDS,
'general_feedback_thread_ids': self.GENERAL_FEEDBACK_THREAD_IDS +
[thread_id],
'last_checked_msec': self.GENERIC_EPOCH
}
expected_task_entry_data = {
'task_ids_resolved_by_user': [self.GENERIC_MODEL_ID]
}
expected_topic_data = {
'managed_topic_ids': [self.TOPIC_ID_1, self.TOPIC_ID_2]
}
expected_contribution_rights_data = {
'can_review_translation_for_language_codes': ['hi', 'en'],
'can_review_voiceover_for_language_codes': ['hi'],
'can_review_questions': True
}
expected_contrib_proficiency_data = {
self.SCORE_CATEGORY_1: {
'onboarding_email_sent': False,
'score': 1.5
},
self.SCORE_CATEGORY_2: {
'onboarding_email_sent': False,
'score': 2
}
}
expected_collection_rights_sm = {
self.GENERIC_MODEL_ID: {
'commit_type': self.COMMIT_TYPE,
'commit_message': self.COMMIT_MESSAGE,
}
}
expected_collection_sm = {
self.GENERIC_MODEL_ID: {
'commit_type': self.COMMIT_TYPE,
'commit_message': self.COMMIT_MESSAGE,
}
}
expected_skill_sm = {
self.GENERIC_MODEL_ID: {
'commit_type': self.COMMIT_TYPE,
'commit_message': self.COMMIT_MESSAGE,
}
}
expected_subtopic_page_sm = {
self.GENERIC_MODEL_ID: {
'commit_type': self.COMMIT_TYPE,
'commit_message': self.COMMIT_MESSAGE,
}
}
expected_topic_rights_sm = {
self.GENERIC_MODEL_ID: {
'commit_type': self.COMMIT_TYPE,
'commit_message': self.COMMIT_MESSAGE,
}
}
expected_topic_sm = {
self.GENERIC_MODEL_ID: {
'commit_type': self.COMMIT_TYPE,
'commit_message': self.COMMIT_MESSAGE,
}
}
expected_story_sm = {
self.GENERIC_MODEL_ID: {
'commit_type': self.COMMIT_TYPE,
'commit_message': self.COMMIT_MESSAGE,
}
}
expected_question_sm = {
self.GENERIC_MODEL_ID: {
'commit_type': self.COMMIT_TYPE,
'commit_message': self.COMMIT_MESSAGE,
}
}
expected_config_property_sm = {
self.GENERIC_MODEL_ID: {
'commit_type': self.COMMIT_TYPE,
'commit_message': self.COMMIT_MESSAGE,
}
}
expected_exploration_rights_sm = {
self.GENERIC_MODEL_ID: {
'commit_type': self.COMMIT_TYPE,
'commit_message': self.COMMIT_MESSAGE,
}
}
expected_exploration_sm = {
'exp_1-1': {
'commit_type': 'create',
'commit_message':
'New exploration created with title \'A title\'.'
},
'exp_1-2': {
'commit_type': 'edit',
'commit_message': 'Test edit'
}
}
expected_platform_parameter_sm = {
self.GENERIC_MODEL_ID: {
'commit_type': self.COMMIT_TYPE,
'commit_message': self.COMMIT_MESSAGE,
}
}
expected_user_email_preferences: Dict[str, str] = {}
expected_user_auth_details: Dict[str, str] = {}
expected_app_feedback_report = {
'%s.%s.%s' % (
self.PLATFORM_ANDROID, self.REPORT_SUBMITTED_TIMESTAMP.second,
'randomInteger123'): {
'scrubbed_by': self.USER_ID_1,
'ticket_id': self.TICKET_ID,
'submitted_on': self.REPORT_SUBMITTED_TIMESTAMP.isoformat(),
'local_timezone_offset_hrs': 0,
'report_type': self.REPORT_TYPE_SUGGESTION,
'category': self.CATEGORY_OTHER,
'platform_version': self.PLATFORM_VERSION}}
expected_blog_post_data = {
'content': 'content sample',
'title': 'sample title',
'published_on': utils.get_time_in_millisecs(
blog_post_model.published_on),
'url_fragment': 'sample-url-fragment',
'tags': ['tag', 'one'],
'thumbnail_filename': 'thumbnail'
}
expected_blog_post_rights = {
'editable_blog_post_ids': [
self.BLOG_POST_ID_1,
self.BLOG_POST_ID_2
],
}
expected_blog_author_details = {
'author_name': 'test name',
'author_bio': ''
}
expected_learner_group_data = {
'title': 'sample title',
'description': 'sample description',
'role_in_group': 'facilitator',
'subtopic_page_ids': ['subtopic_id_1', 'subtopic_id_2'],
'story_ids': ['skill_id_1', 'skill_id_2']
}
expected_learner_groups_user_data = {
'invited_to_learner_groups_ids': ['group_id_1'],
'learner_groups_user_details': [
{
'group_id': 'group_id_2',
'progress_sharing_is_turned_on': False
}
]
}
expected_translation_contribution_stats_data = {
'%s.%s.%s' % (
self.SUGGESTION_LANGUAGE_CODE, self.USER_ID_1,
self.TOPIC_ID_1): {
'language_code': self.SUGGESTION_LANGUAGE_CODE,
'topic_id': self.TOPIC_ID_1,
'submitted_translations_count': (
self.SUBMITTED_TRANSLATIONS_COUNT),
'submitted_translation_word_count': (
self.SUBMITTED_TRANSLATION_WORD_COUNT),
'accepted_translations_count': (
self.ACCEPTED_TRANSLATIONS_COUNT),
'accepted_translations_without_reviewer_edits_count': (
self
.ACCEPTED_TRANSLATIONS_WITHOUT_REVIEWER_EDITS_COUNT),
'accepted_translation_word_count': (
self.ACCEPTED_TRANSLATION_WORD_COUNT),
'rejected_translations_count': (
self.REJECTED_TRANSLATIONS_COUNT),
'rejected_translation_word_count': (
self.REJECTED_TRANSLATION_WORD_COUNT),
'contribution_dates': [
date.isoformat() for date in self.CONTRIBUTION_DATES]
}
}
expected_translation_review_stats_data = {
'%s.%s.%s' % (
self.SUGGESTION_LANGUAGE_CODE, self.USER_ID_1,
self.TOPIC_ID_1): {
'language_code': self.SUGGESTION_LANGUAGE_CODE,
'topic_id': self.TOPIC_ID_1,
'reviewed_translations_count': (
self.REVIEWED_TRANSLATIONS_COUNT),
'reviewed_translation_word_count': (
self.REVIEWED_TRANSLATION_WORD_COUNT),
'accepted_translations_count': (
self.ACCEPTED_TRANSLATIONS_COUNT),
'accepted_translations_with_reviewer_edits_count': (
self
.ACCEPTED_TRANSLATIONS_WITH_REVIEWER_EDITS_COUNT),
'accepted_translation_word_count': (
self.ACCEPTED_TRANSLATION_WORD_COUNT),
'first_contribution_date': (
self.FIRST_CONTRIBUTION_DATE.isoformat()),
'last_contribution_date': (
self.LAST_CONTRIBUTION_DATE.isoformat())
}
}
expected_question_contribution_stats_data = {
'%s.%s' % (
self.USER_ID_1, self.TOPIC_ID_1): {
'topic_id': self.TOPIC_ID_1,
'submitted_questions_count': (
self.SUBMITTED_QUESTION_COUNT),
'accepted_questions_count': (
self.ACCEPTED_QUESTIONS_COUNT),
'accepted_questions_without_reviewer_edits_count': (
self
.ACCEPTED_QUESTIONS_WITHOUT_REVIEWER_EDITS_COUNT),
'first_contribution_date': (
self.FIRST_CONTRIBUTION_DATE.isoformat()),
'last_contribution_date': (
self.LAST_CONTRIBUTION_DATE.isoformat())
}
}
expected_question_review_stats_data = {
'%s.%s' % (
self.USER_ID_1, self.TOPIC_ID_1): {
'topic_id': self.TOPIC_ID_1,
'reviewed_questions_count': (
self.REVIEWED_QUESTIONS_COUNT),
'accepted_questions_count': (
self.ACCEPTED_QUESTIONS_COUNT),
'accepted_questions_with_reviewer_edits_count': (
self
.ACCEPTED_QUESTIONS_WITH_REVIEWER_EDITS_COUNT),
'first_contribution_date': (
self.FIRST_CONTRIBUTION_DATE.isoformat()),
'last_contribution_date': (
self.LAST_CONTRIBUTION_DATE.isoformat())
}
}
expected_user_data = {
'user_stats': expected_stats_data,
'user_settings': expected_user_settings_data,
'user_subscriptions': expected_subscriptions_data,
'user_skill_mastery': expected_user_skill_data,
'user_contributions': expected_contribution_data,
'exploration_user_data': expected_exploration_data,
'completed_activities': expected_completed_activities_data,
'incomplete_activities': expected_incomplete_activities_data,
'exp_user_last_playthrough': expected_last_playthrough_data,
'learner_goals': expected_learner_goals_data,
'learner_playlist': expected_learner_playlist_data,
'learner_group': expected_learner_group_data,
'learner_groups_user': expected_learner_groups_user_data,
'task_entry': expected_task_entry_data,
'topic_rights': expected_topic_data,
'collection_progress': expected_collection_progress_data,
'story_progress': expected_story_progress_data,
'general_feedback_thread':
expected_general_feedback_thread_data,
'general_feedback_thread_user':
expected_general_feedback_thread_user_data,
'general_feedback_message':
expected_general_feedback_message_data,
'collection_rights':
expected_collection_rights_data,
'general_suggestion': expected_general_suggestion_data,
'exploration_rights': expected_exploration_rights_data,
'user_contribution_proficiency': expected_contrib_proficiency_data,
'user_contribution_rights': expected_contribution_rights_data,
'collection_rights_snapshot_metadata':
expected_collection_rights_sm,
'collection_snapshot_metadata':
expected_collection_sm,
'skill_snapshot_metadata':
expected_skill_sm,
'subtopic_page_snapshot_metadata':
expected_subtopic_page_sm,
'topic_rights_snapshot_metadata':
expected_topic_rights_sm,
'topic_snapshot_metadata': expected_topic_sm,
'translation_contribution_stats':
expected_translation_contribution_stats_data,
'translation_review_stats':
expected_translation_review_stats_data,
'question_contribution_stats':
expected_question_contribution_stats_data,
'question_review_stats':
expected_question_review_stats_data,
'story_snapshot_metadata': expected_story_sm,
'question_snapshot_metadata': expected_question_sm,
'config_property_snapshot_metadata':
expected_config_property_sm,
'exploration_rights_snapshot_metadata':
expected_exploration_rights_sm,
'exploration_snapshot_metadata': expected_exploration_sm,
'platform_parameter_snapshot_metadata':
expected_platform_parameter_sm,
'user_email_preferences': expected_user_email_preferences,
'user_auth_details': expected_user_auth_details,
'app_feedback_report': expected_app_feedback_report,
'blog_post': expected_blog_post_data,
'blog_post_rights': expected_blog_post_rights,
'blog_author_details': expected_blog_author_details
}
user_takeout_object = takeout_service.export_data_for_user(
self.USER_ID_1)
observed_data = user_takeout_object.user_data
observed_images = user_takeout_object.user_images
self.assertItemsEqual(observed_data, expected_user_data)
observed_json = json.dumps(observed_data)
expected_json = json.dumps(expected_user_data)
self.assertItemsEqual(
json.loads(observed_json), json.loads(expected_json))
expected_images = [
takeout_domain.TakeoutImage(
self.GENERIC_IMAGE_URL, 'user_settings_profile_picture.png')
]
self.assertEqual(len(expected_images), len(observed_images))
for i, _ in enumerate(expected_images):
self.assertEqual(
expected_images[i].b64_image_data,
observed_images[i].b64_image_data
)
self.assertEqual(
expected_images[i].image_export_path,
observed_images[i].image_export_path
)
def test_export_for_full_user_does_not_export_profile_data(self) -> None:
"""Test that exporting data for a full user does not export
data for any profile user, atleast for the models that were
populated for the profile user.
"""
self.set_up_non_trivial()
profile_user_settings_data = {
'email': self.USER_1_EMAIL,
'roles': [self.PROFILE_1_ROLE],
'username': None,
'normalized_username': None,
'last_agreed_to_terms_msec': self.GENERIC_DATE,
'last_started_state_editor_tutorial_msec': None,
'last_started_state_translation_tutorial': None,
'last_logged_in_msec': self.GENERIC_DATE,
'last_created_an_exploration': None,
'last_edited_an_exploration': None,
'profile_picture_data_url': None,
'default_dashboard': 'learner',
'creator_dashboard_display_pref': 'card',
'user_bio': self.GENERIC_USER_BIO,
'subject_interests': self.GENERIC_SUBJECT_INTERESTS,
'first_contribution_msec': None,
'preferred_language_codes': self.GENERIC_LANGUAGE_CODES,
'preferred_site_language_code': self.GENERIC_LANGUAGE_CODES[0],
'preferred_audio_language_code': self.GENERIC_LANGUAGE_CODES[0],
'display_alias': self.GENERIC_DISPLAY_ALIAS_2
}
user_skill_data = {
self.SKILL_ID_3: self.DEGREE_OF_MASTERY_2
}
completed_activities_data = {
'completed_exploration_ids': self.EXPLORATION_IDS_2,
'completed_collection_ids': self.COLLECTION_IDS_2,
'completed_story_ids': self.STORY_IDS,
'learnt_topic_ids': self.TOPIC_IDS
}
incomplete_activities_data: Dict[str, List[str]] = {}
last_playthrough_data: Dict[str, Dict[str, Union[str, int]]] = {}
learner_goals_data: Dict[str, List[str]] = {}
learner_playlist_data = {
'playlist_exploration_ids': self.EXPLORATION_IDS_2,
'playlist_collection_ids': self.COLLECTION_IDS_2
}
collection_progress_data = {
self.COLLECTION_IDS_2[0]: self.EXPLORATION_IDS_2
}
story_progress_data = {
self.STORY_ID_2: self.COMPLETED_NODE_IDS_2
}
profile_user_data = {
'user_settings': profile_user_settings_data,
'user_skill_mastery': user_skill_data,
'completed_activities': completed_activities_data,
'incomplete_activities': incomplete_activities_data,
'exp_user_last_playthrough': last_playthrough_data,
'learner_goals': learner_goals_data,
'learner_playlist': learner_playlist_data,
'collection_progress': collection_progress_data,
'story_progress': story_progress_data,
}
user_takeout_object = takeout_service.export_data_for_user(
self.USER_ID_1)
observed_data = user_takeout_object.user_data
for key, value in profile_user_data.items():
self.assertNotEqual(value, observed_data[key])
|
{
"content_hash": "eb9c4dea8771c1a6505f700b625dcad4",
"timestamp": "",
"source": "github",
"line_count": 2002,
"max_line_length": 108,
"avg_line_length": 43.223276723276726,
"alnum_prop": 0.5799174881259173,
"repo_name": "oppia/oppia",
"id": "47074b7993499b9665a5a0c64e29c07040a1565e",
"size": "87138",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "core/domain/takeout_service_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "476480"
},
{
"name": "HTML",
"bytes": "2092923"
},
{
"name": "JavaScript",
"bytes": "1247116"
},
{
"name": "PEG.js",
"bytes": "71377"
},
{
"name": "Python",
"bytes": "17628953"
},
{
"name": "Shell",
"bytes": "2240"
},
{
"name": "TypeScript",
"bytes": "15541372"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from blinker import Namespace
_signals = Namespace()
process_args = _signals.signal('process-args', """
Executed right after `_process_args` of an `RH` instance has been called.
The *sender* is the RH class, the current instance is passed in *rh*.
The return value of `_process_args` (usually ``None``) is available in
*result*.
""")
check_access = _signals.signal('check-access', """
Executed right after `_check_access` of an `RH` instance has been called
unless the access check raised an exception. The *sender* is the RH class,
the current instance is passed in *rh*.
""")
before_process = _signals.signal('before-process', """
Executed right before `_process` of an `RH` instance is called.
The *sender* is the RH class, the current instance is passed in *rh*.
If a signal handler returns a value, the original `_process` method
will not be executed. If multiple signal handlers return a value, an
exception is raised.
""")
process = _signals.signal('process', """
Executed right after `_process` of an `RH` instance has been called.
The *sender* is the RH class, the current instance is passed in *rh*.
The return value of `_process` is available in *result* and if a signal
handler returns a value, it will replace the original return value.
If multiple signals handlers return a value, an exception is raised.
""")
|
{
"content_hash": "c292aca58d83d69a410b764523ab136d",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 75,
"avg_line_length": 38.166666666666664,
"alnum_prop": 0.7343522561863173,
"repo_name": "OmeGak/indico",
"id": "ca945ba38511b1325b1c5dddc8aaf558db7cd36d",
"size": "1588",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "indico/core/signals/rh.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "547418"
},
{
"name": "HTML",
"bytes": "1366687"
},
{
"name": "JavaScript",
"bytes": "1678182"
},
{
"name": "Mako",
"bytes": "1340"
},
{
"name": "Python",
"bytes": "4488419"
},
{
"name": "Shell",
"bytes": "2724"
},
{
"name": "TeX",
"bytes": "23051"
},
{
"name": "XSLT",
"bytes": "1504"
}
],
"symlink_target": ""
}
|
"""Illustrates a mixin which provides a generic association
via a individually generated association tables for each parent class.
The associated objects themselves are persisted in a single table
shared among all parents.
This configuration has the advantage that all Address
rows are in one table, so that the definition of "Address"
can be maintained in one place. The association table
contains the foreign key to Address so that Address
has no dependency on the system.
"""
from sqlalchemy import Column
from sqlalchemy import create_engine
from sqlalchemy import ForeignKey
from sqlalchemy import Integer
from sqlalchemy import String
from sqlalchemy import Table
from sqlalchemy.ext.declarative import as_declarative
from sqlalchemy.ext.declarative import declared_attr
from sqlalchemy.orm import relationship
from sqlalchemy.orm import Session
@as_declarative()
class Base:
"""Base class which provides automated table name
and surrogate primary key column.
"""
@declared_attr
def __tablename__(cls):
return cls.__name__.lower()
id = Column(Integer, primary_key=True)
class Address(Base):
"""The Address class.
This represents all address records in a
single table.
"""
street = Column(String)
city = Column(String)
zip = Column(String)
def __repr__(self):
return "%s(street=%r, city=%r, zip=%r)" % (
self.__class__.__name__,
self.street,
self.city,
self.zip,
)
class HasAddresses:
"""HasAddresses mixin, creates a new address_association
table for each parent.
"""
@declared_attr
def addresses(cls):
address_association = Table(
"%s_addresses" % cls.__tablename__,
cls.metadata,
Column("address_id", ForeignKey("address.id"), primary_key=True),
Column(
"%s_id" % cls.__tablename__,
ForeignKey("%s.id" % cls.__tablename__),
primary_key=True,
),
)
return relationship(Address, secondary=address_association)
class Customer(HasAddresses, Base):
name = Column(String)
class Supplier(HasAddresses, Base):
company_name = Column(String)
engine = create_engine("sqlite://", echo=True)
Base.metadata.create_all(engine)
session = Session(engine)
session.add_all(
[
Customer(
name="customer 1",
addresses=[
Address(
street="123 anywhere street", city="New York", zip="10110"
),
Address(
street="40 main street", city="San Francisco", zip="95732"
),
],
),
Supplier(
company_name="Ace Hammers",
addresses=[
Address(street="2569 west elm", city="Detroit", zip="56785")
],
),
]
)
session.commit()
for customer in session.query(Customer):
for address in customer.addresses:
print(address)
# no parent here
|
{
"content_hash": "dd4ce312779c91a9f3e2619c7b014a85",
"timestamp": "",
"source": "github",
"line_count": 122,
"max_line_length": 78,
"avg_line_length": 25.15573770491803,
"alnum_prop": 0.6177908113391984,
"repo_name": "sqlalchemy/sqlalchemy",
"id": "2e412869f08224d9d96396db703a87f83dcd48c0",
"size": "3069",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "examples/generic_associations/table_per_association.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Cython",
"bytes": "21698"
},
{
"name": "Python",
"bytes": "16838583"
}
],
"symlink_target": ""
}
|
from django.db import connection
from api.base.exceptions import (Conflict, EndpointNotImplementedError,
InvalidModelValueError,
RelationshipPostMakesNoChanges)
from api.base.serializers import (DateByVersion, HideIfRegistration, IDField,
JSONAPIListField,
JSONAPIRelationshipSerializer,
JSONAPISerializer, LinksField,
NodeFileHyperLinkField, RelationshipField,
ShowIfVersion, TargetTypeField, TypeField,
WaterbutlerLink, relationship_diff, BaseAPISerializer)
from api.base.settings import ADDONS_FOLDER_CONFIGURABLE
from api.base.utils import (absolute_reverse, get_object_or_error,
get_user_auth, is_truthy)
from django.apps import apps
from django.conf import settings
from framework.auth.core import Auth
from framework.exceptions import PermissionsError
from modularodm.exceptions import ValidationError
from osf.models import Tag
from rest_framework import serializers as ser
from rest_framework import exceptions
from addons.base.exceptions import InvalidAuthError, InvalidFolderError
from website.exceptions import NodeStateError
from osf.models import (Comment, DraftRegistration, Institution,
MetaSchema, AbstractNode as Node, PrivateLink)
from osf.models.external import ExternalAccount
from osf.models.licenses import NodeLicense
from osf.models.preprint_service import PreprintService
from website.project import new_private_link
from website.project.metadata.schemas import LATEST_SCHEMA_VERSION
from website.project.metadata.utils import is_prereg_admin_not_project_admin
from website.project.model import NodeUpdateError
from website.util import permissions as osf_permissions
class NodeTagField(ser.Field):
def to_representation(self, obj):
if obj is not None:
return obj.name
return None
def to_internal_value(self, data):
return data
class NodeLicenseSerializer(BaseAPISerializer):
copyright_holders = ser.ListField(allow_empty=True)
year = ser.CharField(allow_blank=True)
class Meta:
type_ = 'node_licenses'
class NodeLicenseRelationshipField(RelationshipField):
def to_internal_value(self, license_id):
node_license = NodeLicense.load(license_id)
if node_license:
return {'license_type': node_license}
raise exceptions.NotFound('Unable to find specified license.')
class NodeCitationSerializer(JSONAPISerializer):
id = IDField(read_only=True)
title = ser.CharField(allow_blank=True, read_only=True)
author = ser.ListField(read_only=True)
publisher = ser.CharField(allow_blank=True, read_only=True)
type = ser.CharField(allow_blank=True, read_only=True)
doi = ser.CharField(allow_blank=True, read_only=True)
links = LinksField({'self': 'get_absolute_url'})
def get_absolute_url(self, obj):
return obj['URL']
class Meta:
type_ = 'node-citation'
class NodeCitationStyleSerializer(JSONAPISerializer):
id = ser.CharField(read_only=True)
citation = ser.CharField(allow_blank=True, read_only=True)
def get_absolute_url(self, obj):
return obj['URL']
class Meta:
type_ = 'styled-citations'
def get_license_details(node, validated_data):
license = node.license if isinstance(node, PreprintService) else node.node_license
license_id = license.node_license.license_id if license else None
license_year = license.year if license else None
license_holders = license.copyright_holders if license else []
if 'license' in validated_data:
license_year = validated_data['license'].get('year', license_year)
license_holders = validated_data['license'].get('copyright_holders', license_holders)
if 'license_type' in validated_data:
license_id = validated_data['license_type'].license_id
return {
'id': license_id,
'year': license_year,
'copyrightHolders': license_holders
}
class NodeSerializer(JSONAPISerializer):
# TODO: If we have to redo this implementation in any of the other serializers, subclass ChoiceField and make it
# handle blank choices properly. Currently DRF ChoiceFields ignore blank options, which is incorrect in this
# instance
filterable_fields = frozenset([
'id',
'title',
'description',
'public',
'tags',
'category',
'date_created',
'date_modified',
'root',
'parent',
'contributors',
'preprint'
])
non_anonymized_fields = [
'id',
'title',
'description',
'category',
'date_created',
'date_modified',
'registration',
'tags',
'public',
'license',
'links',
'children',
'comments',
'contributors',
'files',
'node_links',
'parent',
'root',
'logs',
'wikis'
]
id = IDField(source='_id', read_only=True)
type = TypeField()
category_choices = settings.NODE_CATEGORY_MAP.items()
category_choices_string = ', '.join(["'{}'".format(choice[0]) for choice in category_choices])
title = ser.CharField(required=True)
description = ser.CharField(required=False, allow_blank=True, allow_null=True)
category = ser.ChoiceField(choices=category_choices, help_text='Choices: ' + category_choices_string)
date_created = DateByVersion(read_only=True)
date_modified = DateByVersion(read_only=True)
registration = ser.BooleanField(read_only=True, source='is_registration')
preprint = ser.BooleanField(read_only=True, source='is_preprint')
fork = ser.BooleanField(read_only=True, source='is_fork')
collection = ser.BooleanField(read_only=True, source='is_collection')
tags = JSONAPIListField(child=NodeTagField(), required=False)
node_license = NodeLicenseSerializer(required=False, source='license')
template_from = ser.CharField(required=False, allow_blank=False, allow_null=False,
help_text='Specify a node id for a node you would like to use as a template for the '
'new node. Templating is like forking, except that you do not copy the '
'files, only the project structure. Some information is changed on the top '
'level project by submitting the appropriate fields in the request body, '
'and some information will not change. By default, the description will '
'be cleared and the project will be made private.')
current_user_can_comment = ser.SerializerMethodField(help_text='Whether the current user is allowed to post comments')
current_user_permissions = ser.SerializerMethodField(help_text='List of strings representing the permissions '
'for the current user on this node.')
# Public is only write-able by admins--see update method
public = ser.BooleanField(source='is_public', required=False,
help_text='Nodes that are made public will give read-only access '
'to everyone. Private nodes require explicit read '
'permission. Write and admin access are the same for '
'public and private nodes. Administrators on a parent '
'node have implicit read permissions for all child nodes')
links = LinksField({'html': 'get_absolute_html_url'})
# TODO: When we have osf_permissions.ADMIN permissions, make this writable for admins
license = NodeLicenseRelationshipField(
related_view='licenses:license-detail',
related_view_kwargs={'license_id': '<license.node_license._id>'},
read_only=False
)
children = RelationshipField(
related_view='nodes:node-children',
related_view_kwargs={'node_id': '<_id>'},
related_meta={'count': 'get_node_count'},
)
comments = RelationshipField(
related_view='nodes:node-comments',
related_view_kwargs={'node_id': '<_id>'},
related_meta={'unread': 'get_unread_comments_count'},
filter={'target': '<_id>'}
)
contributors = RelationshipField(
related_view='nodes:node-contributors',
related_view_kwargs={'node_id': '<_id>'},
related_meta={'count': 'get_contrib_count'},
)
files = RelationshipField(
related_view='nodes:node-providers',
related_view_kwargs={'node_id': '<_id>'}
)
wikis = RelationshipField(
related_view='nodes:node-wikis',
related_view_kwargs={'node_id': '<_id>'}
)
forked_from = RelationshipField(
related_view=lambda n: 'registrations:registration-detail' if getattr(n, 'is_registration', False) else 'nodes:node-detail',
related_view_kwargs={'node_id': '<forked_from_guid>'}
)
template_node = RelationshipField(
related_view='nodes:node-detail',
related_view_kwargs={'node_id': '<template_node._id>'}
)
forks = RelationshipField(
related_view='nodes:node-forks',
related_view_kwargs={'node_id': '<_id>'}
)
node_links = ShowIfVersion(RelationshipField(
related_view='nodes:node-pointers',
related_view_kwargs={'node_id': '<_id>'},
related_meta={'count': 'get_pointers_count'},
help_text='This feature is deprecated as of version 2.1. Use linked_nodes instead.'
), min_version='2.0', max_version='2.0')
parent = RelationshipField(
related_view='nodes:node-detail',
related_view_kwargs={'node_id': '<parent_node._id>'},
filter_key='parent_node'
)
identifiers = RelationshipField(
related_view='nodes:identifier-list',
related_view_kwargs={'node_id': '<_id>'}
)
draft_registrations = HideIfRegistration(RelationshipField(
related_view='nodes:node-draft-registrations',
related_view_kwargs={'node_id': '<_id>'}
))
registrations = HideIfRegistration(RelationshipField(
related_view='nodes:node-registrations',
related_view_kwargs={'node_id': '<_id>'},
related_meta={'count': 'get_registration_count'}
))
affiliated_institutions = RelationshipField(
related_view='nodes:node-institutions',
related_view_kwargs={'node_id': '<_id>'},
self_view='nodes:node-relationships-institutions',
self_view_kwargs={'node_id': '<_id>'}
)
root = RelationshipField(
related_view='nodes:node-detail',
related_view_kwargs={'node_id': '<root._id>'}
)
logs = RelationshipField(
related_view='nodes:node-logs',
related_view_kwargs={'node_id': '<_id>'},
related_meta={'count': 'get_logs_count'}
)
linked_nodes = RelationshipField(
related_view='nodes:linked-nodes',
related_view_kwargs={'node_id': '<_id>'},
related_meta={'count': 'get_node_links_count'},
self_view='nodes:node-pointer-relationship',
self_view_kwargs={'node_id': '<_id>'},
self_meta={'count': 'get_node_links_count'}
)
linked_registrations = RelationshipField(
related_view='nodes:linked-registrations',
related_view_kwargs={'node_id': '<_id>'},
related_meta={'count': 'get_registration_links_count'},
self_view='nodes:node-registration-pointer-relationship',
self_view_kwargs={'node_id': '<_id>'},
self_meta={'count': 'get_node_links_count'}
)
view_only_links = RelationshipField(
related_view='nodes:node-view-only-links',
related_view_kwargs={'node_id': '<_id>'},
)
citation = RelationshipField(
related_view='nodes:node-citation',
related_view_kwargs={'node_id': '<_id>'}
)
preprints = HideIfRegistration(RelationshipField(
related_view='nodes:node-preprints',
related_view_kwargs={'node_id': '<_id>'}
))
def get_current_user_permissions(self, obj):
user = self.context['request'].user
if user.is_anonymous:
return ['read']
permissions = obj.get_permissions(user=user)
if not permissions:
permissions = ['read']
return permissions
def get_current_user_can_comment(self, obj):
user = self.context['request'].user
auth = Auth(user if not user.is_anonymous else None)
return obj.can_comment(auth)
class Meta:
type_ = 'nodes'
def get_absolute_url(self, obj):
return obj.get_absolute_url()
# TODO: See if we can get the count filters into the filter rather than the serializer.
def get_logs_count(self, obj):
return obj.logs.count()
def get_node_count(self, obj):
auth = get_user_auth(self.context['request'])
user_id = getattr(auth.user, 'id', None)
with connection.cursor() as cursor:
cursor.execute('''
WITH RECURSIVE parents AS (
SELECT parent_id, child_id
FROM osf_noderelation
WHERE child_id = %s AND is_node_link IS FALSE
UNION ALL
SELECT osf_noderelation.parent_id, parents.parent_id AS child_id
FROM parents JOIN osf_noderelation ON parents.PARENT_ID = osf_noderelation.child_id
WHERE osf_noderelation.is_node_link IS FALSE
), has_admin AS (SELECT * FROM osf_contributor WHERE (node_id IN (SELECT parent_id FROM parents) OR node_id = %s) AND user_id = %s AND admin IS TRUE LIMIT 1)
SELECT DISTINCT
COUNT(child_id)
FROM
osf_noderelation
JOIN osf_abstractnode ON osf_noderelation.child_id = osf_abstractnode.id
JOIN osf_contributor ON osf_abstractnode.id = osf_contributor.node_id
LEFT JOIN osf_privatelink_nodes ON osf_abstractnode.id = osf_privatelink_nodes.abstractnode_id
LEFT JOIN osf_privatelink ON osf_privatelink_nodes.privatelink_id = osf_privatelink.id
WHERE parent_id = %s AND is_node_link IS FALSE
AND osf_abstractnode.is_deleted IS FALSE
AND (
osf_abstractnode.is_public
OR (TRUE IN (SELECT TRUE FROM has_admin))
OR (osf_contributor.user_id = %s AND osf_contributor.read IS TRUE)
OR (osf_privatelink.key = %s AND osf_privatelink.is_deleted = FALSE)
);
''', [obj.id, obj.id, user_id, obj.id, user_id, auth.private_key])
return int(cursor.fetchone()[0])
def get_contrib_count(self, obj):
return len(obj.contributors)
def get_registration_count(self, obj):
auth = get_user_auth(self.context['request'])
registrations = [node for node in obj.registrations_all if node.can_view(auth)]
return len(registrations)
def get_pointers_count(self, obj):
return obj.linked_nodes.count()
def get_node_links_count(self, obj):
count = 0
auth = get_user_auth(self.context['request'])
for pointer in obj.linked_nodes.filter(is_deleted=False).exclude(type='osf.collection').exclude(type='osf.registration'):
if pointer.can_view(auth):
count += 1
return count
def get_registration_links_count(self, obj):
count = 0
auth = get_user_auth(self.context['request'])
for pointer in obj.linked_nodes.filter(is_deleted=False, type='osf.registration').exclude(type='osf.collection'):
if pointer.can_view(auth):
count += 1
return count
def get_unread_comments_count(self, obj):
user = get_user_auth(self.context['request']).user
node_comments = Comment.find_n_unread(user=user, node=obj, page='node')
return {
'node': node_comments
}
def create(self, validated_data):
request = self.context['request']
user = request.user
Node = apps.get_model('osf.Node')
tag_instances = []
if 'tags' in validated_data:
tags = validated_data.pop('tags')
for tag in tags:
tag_instance, created = Tag.objects.get_or_create(name=tag, defaults=dict(system=False))
tag_instances.append(tag_instance)
if 'template_from' in validated_data:
template_from = validated_data.pop('template_from')
template_node = Node.load(template_from)
if template_node is None:
raise exceptions.NotFound
if not template_node.has_permission(user, 'read', check_parent=False):
raise exceptions.PermissionDenied
validated_data.pop('creator')
changed_data = {template_from: validated_data}
node = template_node.use_as_template(auth=get_user_auth(request), changes=changed_data)
else:
node = Node(**validated_data)
try:
node.save()
except ValidationError as e:
raise InvalidModelValueError(detail=e.messages[0])
if len(tag_instances):
node.tags.add(*tag_instances)
if is_truthy(request.GET.get('inherit_contributors')) and validated_data['parent'].has_permission(user, 'write'):
auth = get_user_auth(request)
parent = validated_data['parent']
contributors = []
for contributor in parent.contributor_set.exclude(user=user):
contributors.append({
'user': contributor.user,
'permissions': parent.get_permissions(contributor.user),
'visible': contributor.visible
})
if not contributor.user.is_registered:
node.add_unregistered_contributor(
fullname=contributor.user.fullname, email=contributor.user.email, auth=auth,
permissions=parent.get_permissions(contributor.user), existing_user=contributor.user
)
node.add_contributors(contributors, auth=auth, log=True, save=True)
return node
def update(self, node, validated_data):
"""Update instance with the validated data. Requires
the request to be in the serializer context.
"""
assert isinstance(node, Node), 'node must be a Node'
auth = get_user_auth(self.context['request'])
old_tags = set(node.tags.values_list('name', flat=True))
if 'tags' in validated_data:
current_tags = set(validated_data.pop('tags', []))
elif self.partial:
current_tags = set(old_tags)
else:
current_tags = set()
for new_tag in (current_tags - old_tags):
node.add_tag(new_tag, auth=auth)
for deleted_tag in (old_tags - current_tags):
node.remove_tag(deleted_tag, auth=auth)
if validated_data:
if 'license_type' in validated_data or 'license' in validated_data:
license_details = get_license_details(node, validated_data)
validated_data['node_license'] = license_details
try:
node.update(validated_data, auth=auth)
except ValidationError as e:
raise InvalidModelValueError(detail=e.message)
except PermissionsError:
raise exceptions.PermissionDenied
except NodeUpdateError as e:
raise exceptions.ValidationError(detail=e.reason)
except NodeStateError as e:
raise InvalidModelValueError(detail=e.message)
return node
class NodeAddonSettingsSerializerBase(JSONAPISerializer):
class Meta:
type_ = 'node_addons'
id = ser.CharField(source='config.short_name', read_only=True)
node_has_auth = ser.BooleanField(source='has_auth', read_only=True)
configured = ser.BooleanField(read_only=True)
external_account_id = ser.CharField(source='external_account._id', required=False, allow_null=True)
folder_id = ser.CharField(required=False, allow_null=True)
folder_path = ser.CharField(required=False, allow_null=True)
# Forward-specific
label = ser.CharField(required=False, allow_null=True)
url = ser.CharField(required=False, allow_null=True)
links = LinksField({
'self': 'get_absolute_url',
})
def get_absolute_url(self, obj):
kwargs = self.context['request'].parser_context['kwargs']
if 'provider' not in kwargs or (obj and obj.config.short_name != kwargs.get('provider')):
kwargs.update({'provider': obj.config.short_name})
return absolute_reverse(
'nodes:node-addon-detail',
kwargs=kwargs
)
def create(self, validated_data):
auth = Auth(self.context['request'].user)
node = self.context['view'].get_node()
addon = self.context['request'].parser_context['kwargs']['provider']
return node.get_or_add_addon(addon, auth=auth)
class ForwardNodeAddonSettingsSerializer(NodeAddonSettingsSerializerBase):
def update(self, instance, validated_data):
auth = Auth(self.context['request'].user)
set_url = 'url' in validated_data
set_label = 'label' in validated_data
url_changed = False
url = validated_data.get('url')
label = validated_data.get('label')
if set_url and not url and label:
raise exceptions.ValidationError(detail='Cannot set label without url')
if not instance:
node = self.context['view'].get_node()
instance = node.get_or_add_addon('forward', auth)
if instance and instance.url:
# url required, label optional
if set_url and not url:
instance.reset()
elif set_url and url:
instance.url = url
url_changed = True
if set_label:
instance.label = label
elif instance and not instance.url:
instance.url = url
instance.label = label
url_changed = True
instance.save()
if url_changed:
# add log here because forward architecture isn't great
# TODO [OSF-6678]: clean this up
instance.owner.add_log(
action='forward_url_changed',
params=dict(
node=instance.owner._id,
project=instance.owner.parent_id,
forward_url=instance.url,
),
auth=auth,
save=True,
)
return instance
class NodeAddonSettingsSerializer(NodeAddonSettingsSerializerBase):
def check_for_update_errors(self, node_settings, folder_info, external_account_id):
if (not node_settings.has_auth and folder_info and not external_account_id):
raise Conflict('Cannot set folder without authorization')
def get_account_info(self, data):
try:
external_account_id = data['external_account']['_id']
set_account = True
except KeyError:
external_account_id = None
set_account = False
return set_account, external_account_id
def get_folder_info(self, data, addon_name):
try:
folder_info = data['folder_id']
set_folder = True
except KeyError:
folder_info = None
set_folder = False
if addon_name == 'googledrive':
folder_id = folder_info
try:
folder_path = data['folder_path']
except KeyError:
folder_path = None
if (folder_id or folder_path) and not (folder_id and folder_path):
raise exceptions.ValidationError(detail='Must specify both folder_id and folder_path for {}'.format(addon_name))
folder_info = {
'id': folder_id,
'path': folder_path
}
return set_folder, folder_info
def get_account_or_error(self, addon_name, external_account_id, auth):
external_account = ExternalAccount.load(external_account_id)
if not external_account:
raise exceptions.NotFound('Unable to find requested account.')
if not auth.user.external_accounts.filter(id=external_account.id).exists():
raise exceptions.PermissionDenied('Requested action requires account ownership.')
if external_account.provider != addon_name:
raise Conflict('Cannot authorize the {} addon with an account for {}'.format(addon_name, external_account.provider))
return external_account
def should_call_set_folder(self, folder_info, instance, auth, node_settings):
if (folder_info and not ( # If we have folder information to set
instance and getattr(instance, 'folder_id', False) and ( # and the settings aren't already configured with this folder
instance.folder_id == folder_info or (hasattr(folder_info, 'get') and instance.folder_id == folder_info.get('id', False))
))):
if auth.user._id != node_settings.user_settings.owner._id: # And the user is allowed to do this
raise exceptions.PermissionDenied('Requested action requires addon ownership.')
return True
return False
def update(self, instance, validated_data):
addon_name = instance.config.short_name
if addon_name not in ADDONS_FOLDER_CONFIGURABLE:
raise EndpointNotImplementedError('Requested addon not currently configurable via API.')
auth = get_user_auth(self.context['request'])
set_account, external_account_id = self.get_account_info(validated_data)
set_folder, folder_info = self.get_folder_info(validated_data, addon_name)
# Maybe raise errors
self.check_for_update_errors(instance, folder_info, external_account_id)
if instance and instance.configured and set_folder and not folder_info:
# Enabled and configured, user requesting folder unset
instance.clear_settings()
instance.save()
if instance and instance.has_auth and set_account and not external_account_id:
# Settings authorized, User requesting deauthorization
instance.deauthorize(auth=auth) # clear_auth performs save
return instance
elif external_account_id:
# Settings may or may not be authorized, user requesting to set instance.external_account
account = self.get_account_or_error(addon_name, external_account_id, auth)
if instance.external_account and external_account_id != instance.external_account._id:
# Ensure node settings are deauthorized first, logs
instance.deauthorize(auth=auth)
instance.set_auth(account, auth.user)
if set_folder and self.should_call_set_folder(folder_info, instance, auth, instance):
# Enabled, user requesting to set folder
try:
instance.set_folder(folder_info, auth)
instance.save()
except InvalidFolderError:
raise exceptions.NotFound('Unable to find requested folder.')
except InvalidAuthError:
raise exceptions.PermissionDenied('Addon credentials are invalid.')
return instance
class NodeDetailSerializer(NodeSerializer):
"""
Overrides NodeSerializer to make id required.
"""
id = IDField(source='_id', required=True)
class NodeForksSerializer(NodeSerializer):
category_choices = settings.NODE_CATEGORY_MAP.items()
category_choices_string = ', '.join(["'{}'".format(choice[0]) for choice in category_choices])
title = ser.CharField(required=False)
category = ser.ChoiceField(read_only=True, choices=category_choices, help_text='Choices: ' + category_choices_string)
forked_date = DateByVersion(read_only=True)
def create(self, validated_data):
node = validated_data.pop('node')
fork_title = validated_data.pop('title', None)
request = self.context['request']
auth = get_user_auth(request)
fork = node.fork_node(auth, title=fork_title)
try:
fork.save()
except ValidationError as e:
raise InvalidModelValueError(detail=e.message)
return fork
class ContributorIDField(IDField):
"""ID field to use with the contributor resource. Contributor IDs have the form "<node-id>-<user-id>"."""
def __init__(self, *args, **kwargs):
kwargs['source'] = kwargs.pop('source', '_id')
kwargs['help_text'] = kwargs.get('help_text', 'Unique contributor ID. Has the form "<node-id>-<user-id>". Example: "abc12-xyz34"')
super(ContributorIDField, self).__init__(*args, **kwargs)
def _get_node_id(self):
return self.context['request'].parser_context['kwargs']['node_id']
# override IDField
def get_id(self, obj):
node_id = self._get_node_id()
user_id = obj._id
return '{}-{}'.format(node_id, user_id)
def to_representation(self, value):
node_id = self._get_node_id()
user_id = super(ContributorIDField, self).to_representation(value)
return '{}-{}'.format(node_id, user_id)
class NodeContributorsSerializer(JSONAPISerializer):
""" Separate from UserSerializer due to necessity to override almost every field as read only
"""
non_anonymized_fields = ['bibliographic', 'permission']
filterable_fields = frozenset([
'id',
'bibliographic',
'permission',
'index'
])
id = IDField(source='_id', read_only=True)
type = TypeField()
index = ser.IntegerField(required=False, read_only=True, source='_order')
bibliographic = ser.BooleanField(help_text='Whether the user will be included in citations for this node or not.',
default=True)
permission = ser.ChoiceField(choices=osf_permissions.PERMISSIONS, required=False, allow_null=True,
default=osf_permissions.reduce_permissions(osf_permissions.DEFAULT_CONTRIBUTOR_PERMISSIONS),
help_text='User permission level. Must be "read", "write", or "admin". Defaults to "write".')
unregistered_contributor = ser.SerializerMethodField()
links = LinksField({
'self': 'get_absolute_url'
})
users = RelationshipField(
related_view='users:user-detail',
related_view_kwargs={'user_id': '<user._id>'},
always_embed=True
)
node = RelationshipField(
related_view='nodes:node-detail',
related_view_kwargs={'node_id': '<node._id>'}
)
class Meta:
type_ = 'contributors'
def get_absolute_url(self, obj):
return absolute_reverse(
'nodes:node-contributor-detail',
kwargs={
'user_id': obj.user._id,
'node_id': self.context['request'].parser_context['kwargs']['node_id'],
'version': self.context['request'].parser_context['kwargs']['version']
}
)
def get_unregistered_contributor(self, obj):
unclaimed_records = obj.user.unclaimed_records.get(obj.node._id, None)
if unclaimed_records:
return unclaimed_records.get('name', None)
class NodeContributorsCreateSerializer(NodeContributorsSerializer):
"""
Overrides NodeContributorsSerializer to add email, full_name, send_email, and non-required index and users field.
"""
id = IDField(source='_id', required=False, allow_null=True)
full_name = ser.CharField(required=False)
email = ser.EmailField(required=False, source='user.email')
index = ser.IntegerField(required=False, source='_order')
users = RelationshipField(
related_view='users:user-detail',
related_view_kwargs={'user_id': '<user._id>'},
required=False
)
email_preferences = ['default', 'preprint', 'false']
def validate_data(self, node, user_id=None, full_name=None, email=None, index=None):
if user_id and (full_name or email):
raise Conflict(detail='Full name and/or email should not be included with a user ID.')
if not user_id and not full_name:
raise exceptions.ValidationError(detail='A user ID or full name must be provided to add a contributor.')
if index > len(node.contributors):
raise exceptions.ValidationError(detail='{} is not a valid contributor index for node with id {}'.format(index, node._id))
def create(self, validated_data):
id = validated_data.get('_id')
email = validated_data.get('user', {}).get('email', None)
index = None
if '_order' in validated_data:
index = validated_data.pop('_order')
node = self.context['view'].get_node()
auth = Auth(self.context['request'].user)
full_name = validated_data.get('full_name')
bibliographic = validated_data.get('bibliographic')
send_email = self.context['request'].GET.get('send_email') or 'default'
permissions = osf_permissions.expand_permissions(validated_data.get('permission')) or osf_permissions.DEFAULT_CONTRIBUTOR_PERMISSIONS
self.validate_data(node, user_id=id, full_name=full_name, email=email, index=index)
if send_email not in self.email_preferences:
raise exceptions.ValidationError(detail='{} is not a valid email preference.'.format(send_email))
try:
contributor_obj = node.add_contributor_registered_or_not(
auth=auth, user_id=id, email=email, full_name=full_name, send_email=send_email,
permissions=permissions, bibliographic=bibliographic, index=index, save=True
)
except ValidationError as e:
raise exceptions.ValidationError(detail=e.messages[0])
except ValueError as e:
raise exceptions.NotFound(detail=e.args[0])
return contributor_obj
class NodeContributorDetailSerializer(NodeContributorsSerializer):
"""
Overrides node contributor serializer to add additional methods
"""
id = IDField(required=True, source='_id')
index = ser.IntegerField(required=False, read_only=False, source='_order')
def update(self, instance, validated_data):
index = None
if '_order' in validated_data:
index = validated_data.pop('_order')
auth = Auth(self.context['request'].user)
node = self.context['view'].get_node()
if 'bibliographic' in validated_data:
bibliographic = validated_data.get('bibliographic')
else:
bibliographic = node.get_visible(instance.user)
permission = validated_data.get('permission') or instance.permission
try:
if index is not None:
node.move_contributor(instance.user, auth, index, save=True)
node.update_contributor(instance.user, permission, bibliographic, auth, save=True)
except NodeStateError as e:
raise exceptions.ValidationError(detail=e.message)
except ValueError as e:
raise exceptions.ValidationError(detail=e.message)
instance.refresh_from_db()
return instance
class NodeLinksSerializer(JSONAPISerializer):
id = IDField(source='_id')
type = TypeField()
target_type = TargetTypeField(target_type='nodes')
# TODO: We don't show the title because the current user may not have access to this node. We may want to conditionally
# include this field in the future.
# title = ser.CharField(read_only=True, source='node.title', help_text='The title of the node that this Node Link '
# 'points to')
target_node = RelationshipField(
related_view='nodes:node-detail',
related_view_kwargs={'node_id': '<child._id>'},
always_embed=True
)
class Meta:
type_ = 'node_links'
links = LinksField({
'self': 'get_absolute_url'
})
def get_absolute_url(self, obj):
return absolute_reverse(
'nodes:node-pointer-detail',
kwargs={
'node_link_id': obj._id,
'node_id': self.context['request'].parser_context['kwargs']['node_id'],
'version': self.context['request'].parser_context['kwargs']['version']
}
)
def create(self, validated_data):
request = self.context['request']
user = request.user
auth = Auth(user)
node = self.context['view'].get_node()
target_node_id = validated_data['_id']
pointer_node = Node.load(target_node_id)
if not pointer_node or pointer_node.is_collection:
raise InvalidModelValueError(
source={'pointer': '/data/relationships/node_links/data/id'},
detail='Target Node \'{}\' not found.'.format(target_node_id)
)
try:
pointer = node.add_pointer(pointer_node, auth, save=True)
return pointer
except ValueError:
raise InvalidModelValueError(
source={'pointer': '/data/relationships/node_links/data/id'},
detail='Target Node \'{}\' already pointed to by \'{}\'.'.format(target_node_id, node._id)
)
def update(self, instance, validated_data):
pass
class NodeProviderSerializer(JSONAPISerializer):
id = ser.SerializerMethodField(read_only=True)
kind = ser.CharField(read_only=True)
name = ser.CharField(read_only=True)
path = ser.CharField(read_only=True)
node = ser.CharField(source='node_id', read_only=True)
provider = ser.CharField(read_only=True)
files = NodeFileHyperLinkField(
related_view='nodes:node-files',
related_view_kwargs={'node_id': '<node._id>', 'path': '<path>', 'provider': '<provider>'},
kind='folder',
never_embed=True
)
links = LinksField({
'upload': WaterbutlerLink(),
'new_folder': WaterbutlerLink(kind='folder'),
'storage_addons': 'get_storage_addons_url'
})
class Meta:
type_ = 'files'
@staticmethod
def get_id(obj):
return '{}:{}'.format(obj.node._id, obj.provider)
def get_absolute_url(self, obj):
return absolute_reverse(
'nodes:node-provider-detail',
kwargs={
'node_id': obj.node._id,
'provider': obj.provider,
'version': self.context['request'].parser_context['kwargs']['version']
}
)
def get_storage_addons_url(self, obj):
return absolute_reverse(
'addons:addon-list',
kwargs={
'version': self.context['request'].parser_context['kwargs']['version']
},
query_kwargs={
'filter[categories]': 'storage'
}
)
class InstitutionRelated(JSONAPIRelationshipSerializer):
id = ser.CharField(source='_id', required=False, allow_null=True)
class Meta:
type_ = 'institutions'
class NodeInstitutionsRelationshipSerializer(BaseAPISerializer):
data = ser.ListField(child=InstitutionRelated())
links = LinksField({'self': 'get_self_url',
'html': 'get_related_url'})
def get_self_url(self, obj):
return obj['self'].institutions_relationship_url
def get_related_url(self, obj):
return obj['self'].institutions_url
class Meta:
type_ = 'institutions'
def get_institutions_to_add_remove(self, institutions, new_institutions):
diff = relationship_diff(
current_items={inst._id: inst for inst in institutions.all()},
new_items={inst['_id']: inst for inst in new_institutions}
)
insts_to_add = []
for inst_id in diff['add']:
inst = Institution.load(inst_id)
if not inst:
raise exceptions.NotFound(detail='Institution with id "{}" was not found'.format(inst_id))
insts_to_add.append(inst)
return insts_to_add, diff['remove'].values()
def make_instance_obj(self, obj):
return {
'data': obj.affiliated_institutions.all(),
'self': obj
}
def update(self, instance, validated_data):
node = instance['self']
user = self.context['request'].user
add, remove = self.get_institutions_to_add_remove(
institutions=instance['data'],
new_institutions=validated_data['data']
)
for inst in remove:
if not user.is_affiliated_with_institution(inst) and not node.has_permission(user, 'admin'):
raise exceptions.PermissionDenied(detail='User needs to be affiliated with {}'.format(inst.name))
node.remove_affiliated_institution(inst, user)
for inst in add:
if not user.is_affiliated_with_institution(inst):
raise exceptions.PermissionDenied(detail='User needs to be affiliated with {}'.format(inst.name))
node.add_affiliated_institution(inst, user)
node.save()
return self.make_instance_obj(node)
def create(self, validated_data):
instance = self.context['view'].get_object()
user = self.context['request'].user
node = instance['self']
add, remove = self.get_institutions_to_add_remove(
institutions=instance['data'],
new_institutions=validated_data['data']
)
if not len(add):
raise RelationshipPostMakesNoChanges
for inst in add:
if not user.is_affiliated_with_institution(inst):
raise exceptions.PermissionDenied(detail='User needs to be affiliated with {}'.format(inst.name))
for inst in add:
node.add_affiliated_institution(inst, user)
node.save()
return self.make_instance_obj(node)
class NodeAlternativeCitationSerializer(JSONAPISerializer):
id = IDField(source='_id', read_only=True)
type = TypeField()
name = ser.CharField(required=True)
text = ser.CharField(required=True)
class Meta:
type_ = 'citations'
def create(self, validated_data):
errors = self.error_checker(validated_data)
if len(errors) > 0:
raise exceptions.ValidationError(detail=errors)
node = self.context['view'].get_node()
auth = Auth(self.context['request']._user)
citation = node.add_citation(auth, save=True, **validated_data)
return citation
def update(self, instance, validated_data):
errors = self.error_checker(validated_data)
if len(errors) > 0:
raise exceptions.ValidationError(detail=errors)
node = self.context['view'].get_node()
auth = Auth(self.context['request']._user)
instance = node.edit_citation(auth, instance, save=True, **validated_data)
return instance
def error_checker(self, data):
errors = []
name = data.get('name', None)
text = data.get('text', None)
citations = self.context['view'].get_node().alternative_citations
if not (self.instance and self.instance.name == name) and citations.filter(name=name).count() > 0:
errors.append("There is already a citation named '{}'".format(name))
if not (self.instance and self.instance.text == text):
matching_citations = citations.filter(text=text)
if matching_citations.count() > 0:
names = "', '".join([str(citation.name) for citation in matching_citations])
errors.append("Citation matches '{}'".format(names))
return errors
def get_absolute_url(self, obj):
# Citations don't have urls
raise NotImplementedError
class DraftRegistrationSerializer(JSONAPISerializer):
id = IDField(source='_id', read_only=True)
type = TypeField()
registration_supplement = ser.CharField(source='registration_schema._id', required=True)
registration_metadata = ser.DictField(required=False)
datetime_initiated = DateByVersion(read_only=True)
datetime_updated = DateByVersion(read_only=True)
branched_from = RelationshipField(
related_view='nodes:node-detail',
related_view_kwargs={'node_id': '<branched_from._id>'}
)
initiator = RelationshipField(
related_view='users:user-detail',
related_view_kwargs={'user_id': '<initiator._id>'},
)
registration_schema = RelationshipField(
related_view='metaschemas:metaschema-detail',
related_view_kwargs={'metaschema_id': '<registration_schema._id>'}
)
links = LinksField({
'html': 'get_absolute_url'
})
def get_absolute_url(self, obj):
return obj.absolute_url
def create(self, validated_data):
node = validated_data.pop('node')
initiator = validated_data.pop('initiator')
metadata = validated_data.pop('registration_metadata', None)
schema_id = validated_data.pop('registration_schema').get('_id')
schema = get_object_or_error(MetaSchema, schema_id)
if schema.schema_version != LATEST_SCHEMA_VERSION or not schema.active:
raise exceptions.ValidationError('Registration supplement must be an active schema.')
draft = DraftRegistration.create_from_node(node=node, user=initiator, schema=schema)
reviewer = is_prereg_admin_not_project_admin(self.context['request'], draft)
if metadata:
try:
# Required fields are only required when creating the actual registration, not updating the draft.
draft.validate_metadata(metadata=metadata, reviewer=reviewer, required_fields=False)
except ValidationError as e:
raise exceptions.ValidationError(e.message)
draft.update_metadata(metadata)
draft.save()
return draft
class Meta:
type_ = 'draft_registrations'
class DraftRegistrationDetailSerializer(DraftRegistrationSerializer):
"""
Overrides DraftRegistrationSerializer to make id and registration_metadata required.
registration_supplement cannot be changed after draft has been created.
Also makes registration_supplement read-only.
"""
id = IDField(source='_id', required=True)
registration_metadata = ser.DictField(required=True)
registration_supplement = ser.CharField(read_only=True, source='registration_schema._id')
def update(self, draft, validated_data):
"""
Update draft instance with the validated metadata.
"""
metadata = validated_data.pop('registration_metadata', None)
reviewer = is_prereg_admin_not_project_admin(self.context['request'], draft)
if metadata:
try:
# Required fields are only required when creating the actual registration, not updating the draft.
draft.validate_metadata(metadata=metadata, reviewer=reviewer, required_fields=False)
except ValidationError as e:
raise exceptions.ValidationError(e.message)
draft.update_metadata(metadata)
draft.save()
return draft
class NodeVOL(ser.Field):
def to_representation(self, obj):
if obj is not None:
return obj._id
return None
def to_internal_value(self, data):
return data
class NodeViewOnlyLinkSerializer(JSONAPISerializer):
filterable_fields = frozenset([
'anonymous',
'name',
'date_created'
])
key = ser.CharField(read_only=True)
id = IDField(source='_id', read_only=True)
date_created = DateByVersion(read_only=True)
anonymous = ser.BooleanField(required=False, default=False)
name = ser.CharField(required=False, default='Shared project link')
links = LinksField({
'self': 'get_absolute_url'
})
creator = RelationshipField(
related_view='users:user-detail',
related_view_kwargs={'user_id': '<creator._id>'},
)
nodes = RelationshipField(
related_view='view-only-links:view-only-link-nodes',
related_view_kwargs={'link_id': '<_id>'},
self_view='view-only-links:view-only-link-nodes-relationships',
self_view_kwargs={'link_id': '<_id>'}
)
def create(self, validated_data):
name = validated_data.pop('name')
user = get_user_auth(self.context['request']).user
anonymous = validated_data.pop('anonymous')
node = self.context['view'].get_node()
try:
view_only_link = new_private_link(
name=name,
user=user,
nodes=[node],
anonymous=anonymous
)
except ValidationError:
raise exceptions.ValidationError('Invalid link name.')
return view_only_link
def get_absolute_url(self, obj):
return absolute_reverse(
'nodes:node-view-only-link-detail',
kwargs={
'link_id': obj._id,
'node_id': self.context['request'].parser_context['kwargs']['node_id'],
'version': self.context['request'].parser_context['kwargs']['version']
}
)
class Meta:
type_ = 'view_only_links'
class NodeViewOnlyLinkUpdateSerializer(NodeViewOnlyLinkSerializer):
"""
Overrides NodeViewOnlyLinkSerializer to not default name and anonymous on update.
"""
name = ser.CharField(required=False)
anonymous = ser.BooleanField(required=False)
def update(self, link, validated_data):
assert isinstance(link, PrivateLink), 'link must be a PrivateLink'
name = validated_data.get('name')
anonymous = validated_data.get('anonymous')
if name:
link.name = name
if anonymous:
link.anonymous = anonymous
link.save()
return link
|
{
"content_hash": "952010b55366e4163867dc19359e412b",
"timestamp": "",
"source": "github",
"line_count": 1307,
"max_line_length": 173,
"avg_line_length": 38.82861514919664,
"alnum_prop": 0.6166821021103864,
"repo_name": "cwisecarver/osf.io",
"id": "3f2d5a9ffcc3bd19da2b7211a1d1aa5745180bfa",
"size": "50749",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "api/nodes/serializers.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "144027"
},
{
"name": "HTML",
"bytes": "217501"
},
{
"name": "JavaScript",
"bytes": "1712859"
},
{
"name": "Mako",
"bytes": "622293"
},
{
"name": "Perl",
"bytes": "13885"
},
{
"name": "Python",
"bytes": "7621431"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals, division, absolute_import
import logging
import re
from urllib import quote
from flexget import plugin
from flexget import validator
from flexget.entry import Entry
from flexget.event import event
from flexget.utils.soup import get_soup
from flexget.utils.search import torrent_availability, normalize_unicode, clean_title
from flexget.utils.requests import Session
log = logging.getLogger('search_sceneaccess')
CATEGORIES = {
'browse':
{
'Movies/DVD-R': 8,
'Movies/x264': 22,
'Movies/XviD': 7,
'TV/HD-x264': 27,
'TV/SD-x264': 17,
'TV/XviD': 11,
'Games/PC': 3,
'Games/PS3': 5,
'Games/PSP': 20,
'Games/WII': 28,
'Games/XBOX360': 23,
'APPS/ISO': 1,
'DOX': 14,
'MISC': 21
},
'nonscene':
{
'Movies/HD-x264': 41,
'Movies/SD-x264': 42,
'Movies/XviD': 43,
'TV/HD': 44,
'TV/SD': 45
},
'mp3/0day':
{
'0DAY/APPS': 2,
'FLAC': 40,
'MP3': 13,
'MVID': 15,
},
'archive':
{
'Movies/Packs': 4,
'TV/Packs': 26,
'Games/Packs': 29,
'XXX/Packs': 37,
'Music/Packs': 38
},
'foreign':
{
'Movies/DVD-R': 31,
'Movies/x264': 32,
'Movies/XviD': 30,
'TV/x264': 34,
'TV/XviD': 33,
},
'xxx':
{
'XXX/XviD': 12,
'XXX/x264': 35,
'XXX/0DAY': 36
}
}
URL = 'https://sceneaccess.eu/'
class SceneAccessSearch(object):
""" Scene Access Search plugin
== Basic usage:
sceneaccess:
username: XXXX (required)
password: XXXX (required)
category: Movies/x264 (optional)
gravity_multiplier: 200 (optional)
== Categories:
+---------------+----------------+-----------+--------------+--------------+----------+
| browse | nonscene | mp3/0day | archive | foreign | xxx |
+---------------+----------------+-----------+--------------+--------------+----------+
| APPS/ISO | Movies/HD-x264 | 0DAY/APPS | Games/Packs | Movies/DVD-R | XXX/0DAY |
| DOX | Movies/SD-x264 | FLAC | Movies/Packs | Movies/x264 | XXX/x264 |
| Games/PC | Movies/XviD | MP3 | Music/Packs | Movies/XviD | XXX/XviD |
| Games/PS3 | TV/HD | MVID | TV/Packs | TV/x264 | |
| Games/PSP | TV/SD | | XXX/Packs | TV/XviD | |
| Games/WII | | | | | |
| Games/XBOX360 | | | | | |
| MISC | | | | | |
| Movies/DVD-R | | | | | |
| Movies/x264 | | | | | |
| Movies/XviD | | | | | |
| TV/HD-x264 | | | | | |
| TV/SD-x264 | | | | | |
| TV/XviD | | | | | |
+---------------+----------------+-----------+--------------+--------------+----------+
You can combine the categories almost any way you want, here are some examples:
category:
archive: yes => Will search all categories within archive section
category: Movies/x264 => Search Movies/x264 within 'browse' section (browse is always default if unspecified)
category:
browse:
- 22 => This is custom category ID
- Movies/XviD
foreign:
- Movies/x264
- Movies/XviD
Specifying specific category ID is also possible, you can extract ID from URL, for example
if you hover or click on category on the site you'll see similar address:
http://sceneaccess.URL/browse?cat=22
In this example, according to this bit ?cat=22 , category id is 22.
== Priority
gravity_multiplier is optional parameter that increases odds of downloading found matches from sceneaccess
instead of other search providers, that may have higer odds due to their higher number of peers.
Although sceneaccess does not have many peers as some public trackers, the torrents are usually faster.
By default, Flexget give higher priority to found matches according to following formula:
gravity = number of seeds * 2 + number of leechers
gravity_multiplier will multiply the above number by specified amount.
If you use public trackers for searches, you may want to use this feature.
"""
def validator(self):
"""Return config validator."""
root = validator.factory('dict')
root.accept('text', key='username', required=True)
root.accept('text', key='password', required=True)
root.accept('number', key='gravity_multiplier')
# Scope as in pages like `browse`, `mp3/0day`, `foreign`, etc.
# Will only accept categories from `browse` which will it default to, unless user specifies other scopes
# via dict
root.accept('choice', key='category').accept_choices(CATEGORIES['browse'])
root.accept('number', key='category')
categories = root.accept('dict', key='category')
category_list = root.accept('list', key='category')
category_list.accept('choice').accept_choices(CATEGORIES['browse'])
for category in CATEGORIES:
categories.accept('choice', key=category).accept_choices(CATEGORIES[category])
categories.accept('boolean', key=category)
categories.accept('number', key=category)
category_list = categories.accept('list', key=category)
category_list.accept('choice', key=category).accept_choices(CATEGORIES[category])
category_list.accept('number', key=category)
return root
def processCategories(self, config):
toProcess = dict()
# Build request urls from config
try:
scope = 'browse' # Default scope to search in
category = config['category']
if isinstance(category, dict): # Categories have search scope specified.
for scope in category:
if isinstance(category[scope], bool): # If provided boolean, search all categories
category[scope] = []
elif not isinstance(category[scope], list): # Convert single category into list
category[scope] = [category[scope]]
toProcess[scope] = category[scope]
else: # Single category specified, will default to `browse` scope.
category = [category]
toProcess[scope] = category
except KeyError: # Category was not set, will default to `browse` scope and all categories.
toProcess[scope] = []
finally: # Process the categories to be actually in usable format for search() method
ret = list()
for scope, categories in toProcess.iteritems():
cat_id = list()
for category in categories:
try:
id = CATEGORIES[scope][category]
except KeyError: # User provided category id directly
id = category
finally:
if isinstance(id, list): #
[cat_id.append(l) for l in id]
else:
cat_id.append(id)
if scope == 'mp3/0day': # mp3/0day is actually /spam?search= in URL, can safely change it now
scope = 'spam'
category_url_string = ''.join(['&c' + str(id) + '=' + str(id) for id in cat_id]) # &c<id>=<id>&...
ret.append({'url_path': scope, 'category_url_string': category_url_string})
return ret
@plugin.internet(log)
def search(self, entry, config=None):
"""
Search for entries on SceneAccess
"""
try:
multip = int(config['gravity_multiplier'])
except KeyError:
multip = 1
# Login...
params = {'username': config['username'],
'password': config['password'],
'submit': 'come on in'}
session = Session()
session.headers = {'User agent': 'Mozilla/5.0 (Windows NT 6.3; WOW64; rv:27.0) Gecko/20100101 Firefox/27.0'}
log.debug('Logging in to %s...' % URL)
session.post(URL + 'login', data=params)
# Prepare queries...
BASE_URLS = list()
entries = set()
for category in self.processCategories(config):
BASE_URLS.append(URL + '%(url_path)s?method=2%(category_url_string)s' % category)
# Search...
for search_string in entry.get('search_strings', [entry['title']]):
search_string_normalized = normalize_unicode(clean_title(search_string))
search_string_url_fragment = '&search=' + quote(search_string_normalized.encode('utf8'))
for url in BASE_URLS:
url += search_string_url_fragment
log.debug('Search URL for `%s`: %s' % (search_string, url))
page = session.get(url).content
soup = get_soup(page)
for result in soup.findAll('tr', attrs={'class': 'tt_row'}):
entry = Entry()
entry['title'] = result.find('a', href=re.compile(r'details\?id=\d+'))['title']
entry['url'] = URL + result.find('a', href=re.compile(r'.torrent$'))['href']
entry['torrent_seeds'] = result.find('td', attrs={'class': 'ttr_seeders'}).string
entry['torrent_leeches'] = result.find('td', attrs={'class': 'ttr_leechers'}).string
entry['search_sort'] = torrent_availability(entry['torrent_seeds'], entry['torrent_leeches'])*multip
size = result.find('td', attrs={'class': 'ttr_size'}).next
size = re.search('(\d+(?:[.,]\d+)*)\s?([KMG]B)', size)
if size:
if size.group(2) == 'GB':
entry['content_size'] = int(float(size.group(1)) * 1000 ** 3 / 1024 ** 2)
elif size.group(2) == 'MB':
entry['content_size'] = int(float(size.group(1)) * 1000 ** 2 / 1024 ** 2)
elif size.group(2) == 'KB':
entry['content_size'] = int(float(size.group(1)) * 1000 / 1024 ** 2)
else:
entry['content_size'] = int(float(size.group(1)) / 1024 ** 2)
entries.add(entry)
return entries
@event('plugin.register')
def register_plugin():
plugin.register(SceneAccessSearch, 'sceneaccess', groups=['search'], api_ver=2)
|
{
"content_hash": "ed59080342c33244bc14d1e60a96106e",
"timestamp": "",
"source": "github",
"line_count": 283,
"max_line_length": 120,
"avg_line_length": 40.82332155477032,
"alnum_prop": 0.4901757119362936,
"repo_name": "voriux/Flexget",
"id": "cf044f92d53db85cf60fb6bcfbf0bcd57cdb6cfb",
"size": "11553",
"binary": false,
"copies": "5",
"ref": "refs/heads/develop",
"path": "flexget/plugins/search_sceneaccess.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "56725"
},
{
"name": "JavaScript",
"bytes": "455222"
},
{
"name": "Python",
"bytes": "1849035"
}
],
"symlink_target": ""
}
|
import requests
url = 'http://address/path'
headers = {
'header': 'qvalue',
'content-length': '7',
}
params = {
'a': ['foo', 'bar'],
'b': 'baz',
}
response = requests.request(
method='GET',
url=url,
headers=headers,
params=params,
)
print(response.text)
|
{
"content_hash": "da317673866a7543a015b9364887cdba",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 28,
"avg_line_length": 13.227272727272727,
"alnum_prop": 0.5601374570446735,
"repo_name": "tdickers/mitmproxy",
"id": "af8f7c81b9112e187c0423effbe776c30ebe4356",
"size": "291",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "test/mitmproxy/data/test_flow_export/python_get.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "405"
},
{
"name": "CSS",
"bytes": "194361"
},
{
"name": "HTML",
"bytes": "2878"
},
{
"name": "JavaScript",
"bytes": "127316"
},
{
"name": "Python",
"bytes": "1248282"
},
{
"name": "Shell",
"bytes": "4087"
}
],
"symlink_target": ""
}
|
from IMAP import IMAPRepository
from offlineimap import folder, imaputil
from offlineimap.imapserver import IMAPServer
class GmailRepository(IMAPRepository):
"""Gmail IMAP repository.
Uses hard-coded host name and port, see:
http://mail.google.com/support/bin/answer.py?answer=78799&topic=12814
"""
#: Gmail IMAP server hostname
HOSTNAME = "imap.gmail.com"
#: Gmail IMAP server port
PORT = 993
def __init__(self, reposname, account):
"""Initialize a GmailRepository object."""
account.getconfig().set('Repository ' + reposname,
'remotehost', GmailRepository.HOSTNAME)
account.getconfig().set('Repository ' + reposname,
'remoteport', GmailRepository.PORT)
account.getconfig().set('Repository ' + reposname,
'ssl', 'yes')
IMAPRepository.__init__(self, reposname, account)
def gethost(self):
return GmailRepository.HOSTNAME
def getport(self):
return GmailRepository.PORT
def getssl(self):
return 1
def getpreauthtunnel(self):
return None
def getfolder(self, foldername):
return self.getfoldertype()(self.imapserver, foldername,
self.nametrans(foldername),
self.accountname, self)
def getfoldertype(self):
return folder.Gmail.GmailFolder
def getrealdelete(self, foldername):
# XXX: `foldername` is currently ignored - the `realdelete`
# setting is repository-wide
return self.getconfboolean('realdelete', 0)
def gettrashfolder(self, foldername):
#: Where deleted mail should be moved
return self.getconf('trashfolder','[Gmail]/Trash')
def getspamfolder(self):
#: Gmail also deletes messages upon EXPUNGE in the Spam folder
return self.getconf('spamfolder','[Gmail]/Spam')
|
{
"content_hash": "19cbb7c0ba28d8f674f0614020fa31d5",
"timestamp": "",
"source": "github",
"line_count": 60,
"max_line_length": 75,
"avg_line_length": 32.93333333333333,
"alnum_prop": 0.6204453441295547,
"repo_name": "alexissmirnov/donomo",
"id": "4793db7798f13cdf7fdd69cd194318d286189840",
"size": "2807",
"binary": false,
"copies": "11",
"ref": "refs/heads/master",
"path": "donomo_archive/lib/offlineimap/repository/Gmail.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "360712"
},
{
"name": "Python",
"bytes": "7155992"
},
{
"name": "Shell",
"bytes": "391"
}
],
"symlink_target": ""
}
|
from os.path import dirname, realpath, exists
from setuptools import setup
import sys
author = u"Paul Müller"
authors = ["Philipp Rosendahl", author]
name = 'fcswrite'
description = 'Write .fcs files (flow cytometry)'
year = "2016"
long_description = """
This package provides basic functionalities for writing flow cytometry
standard (.fcs) files.
"""
sys.path.insert(0, realpath(dirname(__file__)) + "/" + name)
from _version import version # noqa: E402
setup(
name=name,
author=author,
author_email='dev@craban.de',
url='https://github.com/ZELLMECHANIK-DRESDEN/fcswrite',
version=version,
packages=[name],
package_dir={name: name},
license="BSD (3 clause)",
description=description,
long_description=open('README.rst').read() if exists('README.rst') else '',
install_requires=["numpy>=1.7.0",
],
setup_requires=['pytest-runner'],
python_requires=">=3.6",
tests_require=["pytest",
"fcsparser",
],
include_package_data=True,
keywords=["fcs", "flow cytometry", "flow cytometry standard"],
classifiers=['Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Topic :: Scientific/Engineering :: Visualization',
'Intended Audience :: Science/Research',
],
platforms=['ALL'],
)
|
{
"content_hash": "1f4d4a06e01b1937b9e79ba94a29d4ea",
"timestamp": "",
"source": "github",
"line_count": 46,
"max_line_length": 79,
"avg_line_length": 30.565217391304348,
"alnum_prop": 0.6173541963015647,
"repo_name": "ZELLMECHANIK-DRESDEN/fcswrite",
"id": "47b0a2c31fc6a59295bad8a5f998c0085df3fdfa",
"size": "1453",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "22445"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
import datetime
import time
import sys
from email.header import Header
try:
from urllib.parse import urlparse
except ImportError:
from urlparse import urlparse
from django.conf import settings
from django.core import signals
from django.core import signing
from django.core.exceptions import DisallowedRedirect
from django.http.cookie import SimpleCookie
from django.utils import six, timezone
from django.utils.encoding import force_bytes, force_text, iri_to_uri
from django.utils.http import cookie_date
from django.utils.six.moves import map
# See http://www.iana.org/assignments/http-status-codes
REASON_PHRASES = {
100: 'CONTINUE',
101: 'SWITCHING PROTOCOLS',
102: 'PROCESSING',
200: 'OK',
201: 'CREATED',
202: 'ACCEPTED',
203: 'NON-AUTHORITATIVE INFORMATION',
204: 'NO CONTENT',
205: 'RESET CONTENT',
206: 'PARTIAL CONTENT',
207: 'MULTI-STATUS',
208: 'ALREADY REPORTED',
226: 'IM USED',
300: 'MULTIPLE CHOICES',
301: 'MOVED PERMANENTLY',
302: 'FOUND',
303: 'SEE OTHER',
304: 'NOT MODIFIED',
305: 'USE PROXY',
306: 'RESERVED',
307: 'TEMPORARY REDIRECT',
400: 'BAD REQUEST',
401: 'UNAUTHORIZED',
402: 'PAYMENT REQUIRED',
403: 'FORBIDDEN',
404: 'NOT FOUND',
405: 'METHOD NOT ALLOWED',
406: 'NOT ACCEPTABLE',
407: 'PROXY AUTHENTICATION REQUIRED',
408: 'REQUEST TIMEOUT',
409: 'CONFLICT',
410: 'GONE',
411: 'LENGTH REQUIRED',
412: 'PRECONDITION FAILED',
413: 'REQUEST ENTITY TOO LARGE',
414: 'REQUEST-URI TOO LONG',
415: 'UNSUPPORTED MEDIA TYPE',
416: 'REQUESTED RANGE NOT SATISFIABLE',
417: 'EXPECTATION FAILED',
418: "I'M A TEAPOT",
422: 'UNPROCESSABLE ENTITY',
423: 'LOCKED',
424: 'FAILED DEPENDENCY',
426: 'UPGRADE REQUIRED',
428: 'PRECONDITION REQUIRED',
429: 'TOO MANY REQUESTS',
431: 'REQUEST HEADER FIELDS TOO LARGE',
500: 'INTERNAL SERVER ERROR',
501: 'NOT IMPLEMENTED',
502: 'BAD GATEWAY',
503: 'SERVICE UNAVAILABLE',
504: 'GATEWAY TIMEOUT',
505: 'HTTP VERSION NOT SUPPORTED',
506: 'VARIANT ALSO NEGOTIATES',
507: 'INSUFFICIENT STORAGE',
508: 'LOOP DETECTED',
510: 'NOT EXTENDED',
511: 'NETWORK AUTHENTICATION REQUIRED',
}
class BadHeaderError(ValueError):
pass
class HttpResponseBase(six.Iterator):
"""
An HTTP response base class with dictionary-accessed headers.
This class doesn't handle content. It should not be used directly.
Use the HttpResponse and StreamingHttpResponse subclasses instead.
"""
status_code = 200
reason_phrase = None # Use default reason phrase for status code.
def __init__(self, content_type=None, status=None, reason=None):
# _headers is a mapping of the lower-case name to the original case of
# the header (required for working with legacy systems) and the header
# value. Both the name of the header and its value are ASCII strings.
self._headers = {}
self._charset = settings.DEFAULT_CHARSET
self._closable_objects = []
# This parameter is set by the handler. It's necessary to preserve the
# historical behavior of request_finished.
self._handler_class = None
if not content_type:
content_type = "%s; charset=%s" % (settings.DEFAULT_CONTENT_TYPE,
self._charset)
self.cookies = SimpleCookie()
if status is not None:
self.status_code = status
if reason is not None:
self.reason_phrase = reason
elif self.reason_phrase is None:
self.reason_phrase = REASON_PHRASES.get(self.status_code,
'UNKNOWN STATUS CODE')
self['Content-Type'] = content_type
def serialize_headers(self):
"""HTTP headers as a bytestring."""
def to_bytes(val, encoding):
return val if isinstance(val, bytes) else val.encode(encoding)
headers = [
(b': '.join([to_bytes(key, 'ascii'), to_bytes(value, 'latin-1')]))
for key, value in self._headers.values()
]
return b'\r\n'.join(headers)
if six.PY3:
__bytes__ = serialize_headers
else:
__str__ = serialize_headers
def _convert_to_charset(self, value, charset, mime_encode=False):
"""Converts headers key/value to ascii/latin-1 native strings.
`charset` must be 'ascii' or 'latin-1'. If `mime_encode` is True and
`value` can't be represented in the given charset, MIME-encoding
is applied.
"""
if not isinstance(value, (bytes, six.text_type)):
value = str(value)
try:
if six.PY3:
if isinstance(value, str):
# Ensure string is valid in given charset
value.encode(charset)
else:
# Convert bytestring using given charset
value = value.decode(charset)
else:
if isinstance(value, str):
# Ensure string is valid in given charset
value.decode(charset)
else:
# Convert unicode string to given charset
value = value.encode(charset)
except UnicodeError as e:
if mime_encode:
# Wrapping in str() is a workaround for #12422 under Python 2.
value = str(Header(value, 'utf-8', maxlinelen=sys.maxsize).encode())
else:
e.reason += ', HTTP response headers must be in %s format' % charset
raise
if str('\n') in value or str('\r') in value:
raise BadHeaderError("Header values can't contain newlines (got %r)" % value)
return value
def __setitem__(self, header, value):
header = self._convert_to_charset(header, 'ascii')
value = self._convert_to_charset(value, 'latin-1', mime_encode=True)
self._headers[header.lower()] = (header, value)
def __delitem__(self, header):
try:
del self._headers[header.lower()]
except KeyError:
pass
def __getitem__(self, header):
return self._headers[header.lower()][1]
def __getstate__(self):
# SimpleCookie is not pickeable with pickle.HIGHEST_PROTOCOL, so we
# serialize to a string instead
state = self.__dict__.copy()
state['cookies'] = str(state['cookies'])
return state
def __setstate__(self, state):
self.__dict__.update(state)
self.cookies = SimpleCookie(self.cookies)
def has_header(self, header):
"""Case-insensitive check for a header."""
return header.lower() in self._headers
__contains__ = has_header
def items(self):
return self._headers.values()
def get(self, header, alternate=None):
return self._headers.get(header.lower(), (None, alternate))[1]
def set_cookie(self, key, value='', max_age=None, expires=None, path='/',
domain=None, secure=False, httponly=False):
"""
Sets a cookie.
``expires`` can be:
- a string in the correct format,
- a naive ``datetime.datetime`` object in UTC,
- an aware ``datetime.datetime`` object in any time zone.
If it is a ``datetime.datetime`` object then ``max_age`` will be calculated.
"""
self.cookies[key] = value
if expires is not None:
if isinstance(expires, datetime.datetime):
if timezone.is_aware(expires):
expires = timezone.make_naive(expires, timezone.utc)
delta = expires - expires.utcnow()
# Add one second so the date matches exactly (a fraction of
# time gets lost between converting to a timedelta and
# then the date string).
delta = delta + datetime.timedelta(seconds=1)
# Just set max_age - the max_age logic will set expires.
expires = None
max_age = max(0, delta.days * 86400 + delta.seconds)
else:
self.cookies[key]['expires'] = expires
if max_age is not None:
self.cookies[key]['max-age'] = max_age
# IE requires expires, so set it if hasn't been already.
if not expires:
self.cookies[key]['expires'] = cookie_date(time.time() +
max_age)
if path is not None:
self.cookies[key]['path'] = path
if domain is not None:
self.cookies[key]['domain'] = domain
if secure:
self.cookies[key]['secure'] = True
if httponly:
self.cookies[key]['httponly'] = True
def set_signed_cookie(self, key, value, salt='', **kwargs):
value = signing.get_cookie_signer(salt=key + salt).sign(value)
return self.set_cookie(key, value, **kwargs)
def delete_cookie(self, key, path='/', domain=None):
self.set_cookie(key, max_age=0, path=path, domain=domain,
expires='Thu, 01-Jan-1970 00:00:00 GMT')
# Common methods used by subclasses
def make_bytes(self, value):
"""Turn a value into a bytestring encoded in the output charset."""
# Per PEP 3333, this response body must be bytes. To avoid returning
# an instance of a subclass, this function returns `bytes(value)`.
# This doesn't make a copy when `value` already contains bytes.
# If content is already encoded (eg. gzip), assume bytes.
if self.has_header('Content-Encoding'):
return bytes(value)
# Handle string types -- we can't rely on force_bytes here because:
# - under Python 3 it attemps str conversion first
# - when self._charset != 'utf-8' it re-encodes the content
if isinstance(value, bytes):
return bytes(value)
if isinstance(value, six.text_type):
return bytes(value.encode(self._charset))
# Handle non-string types (#16494)
return force_bytes(value, self._charset)
# These methods partially implement the file-like object interface.
# See http://docs.python.org/lib/bltin-file-objects.html
# The WSGI server must call this method upon completion of the request.
# See http://blog.dscpl.com.au/2012/10/obligations-for-calling-close-on.html
def close(self):
for closable in self._closable_objects:
try:
closable.close()
except Exception:
pass
signals.request_finished.send(sender=self._handler_class)
def write(self, content):
raise Exception("This %s instance is not writable" % self.__class__.__name__)
def flush(self):
pass
def tell(self):
raise Exception("This %s instance cannot tell its position" % self.__class__.__name__)
class HttpResponse(HttpResponseBase):
"""
An HTTP response class with a string as content.
This content that can be read, appended to or replaced.
"""
streaming = False
def __init__(self, content=b'', *args, **kwargs):
super(HttpResponse, self).__init__(*args, **kwargs)
# Content is a bytestring. See the `content` property methods.
self.content = content
def serialize(self):
"""Full HTTP message, including headers, as a bytestring."""
return self.serialize_headers() + b'\r\n\r\n' + self.content
if six.PY3:
__bytes__ = serialize
else:
__str__ = serialize
@property
def content(self):
return b''.join(self._container)
@content.setter
def content(self, value):
# Consume iterators upon assignment to allow repeated iteration.
if hasattr(value, '__iter__') and not isinstance(value, (bytes, six.string_types)):
if hasattr(value, 'close'):
self._closable_objects.append(value)
value = b''.join(self.make_bytes(chunk) for chunk in value)
else:
value = self.make_bytes(value)
# Create a list of properly encoded bytestrings to support write().
self._container = [value]
def __iter__(self):
return iter(self._container)
def write(self, content):
self._container.append(self.make_bytes(content))
def tell(self):
return len(self.content)
class StreamingHttpResponse(HttpResponseBase):
"""
A streaming HTTP response class with an iterator as content.
This should only be iterated once, when the response is streamed to the
client. However, it can be appended to or replaced with a new iterator
that wraps the original content (or yields entirely new content).
"""
streaming = True
def __init__(self, streaming_content=(), *args, **kwargs):
super(StreamingHttpResponse, self).__init__(*args, **kwargs)
# `streaming_content` should be an iterable of bytestrings.
# See the `streaming_content` property methods.
self.streaming_content = streaming_content
@property
def content(self):
raise AttributeError("This %s instance has no `content` attribute. "
"Use `streaming_content` instead." % self.__class__.__name__)
@property
def streaming_content(self):
return map(self.make_bytes, self._iterator)
@streaming_content.setter
def streaming_content(self, value):
# Ensure we can never iterate on "value" more than once.
self._iterator = iter(value)
if hasattr(value, 'close'):
self._closable_objects.append(value)
def __iter__(self):
return self.streaming_content
class HttpResponseRedirectBase(HttpResponse):
allowed_schemes = ['http', 'https', 'ftp']
def __init__(self, redirect_to, *args, **kwargs):
parsed = urlparse(force_text(redirect_to))
if parsed.scheme and parsed.scheme not in self.allowed_schemes:
raise DisallowedRedirect("Unsafe redirect to URL with protocol '%s'" % parsed.scheme)
super(HttpResponseRedirectBase, self).__init__(*args, **kwargs)
self['Location'] = iri_to_uri(redirect_to)
url = property(lambda self: self['Location'])
class HttpResponseRedirect(HttpResponseRedirectBase):
status_code = 302
class HttpResponsePermanentRedirect(HttpResponseRedirectBase):
status_code = 301
class HttpResponseNotModified(HttpResponse):
status_code = 304
def __init__(self, *args, **kwargs):
super(HttpResponseNotModified, self).__init__(*args, **kwargs)
del self['content-type']
@HttpResponse.content.setter
def content(self, value):
if value:
raise AttributeError("You cannot set content to a 304 (Not Modified) response")
self._container = []
class HttpResponseBadRequest(HttpResponse):
status_code = 400
class HttpResponseNotFound(HttpResponse):
status_code = 404
class HttpResponseForbidden(HttpResponse):
status_code = 403
class HttpResponseNotAllowed(HttpResponse):
status_code = 405
def __init__(self, permitted_methods, *args, **kwargs):
super(HttpResponseNotAllowed, self).__init__(*args, **kwargs)
self['Allow'] = ', '.join(permitted_methods)
class HttpResponseGone(HttpResponse):
status_code = 410
class HttpResponseServerError(HttpResponse):
status_code = 500
class Http404(Exception):
pass
|
{
"content_hash": "97d624ca2e4606b84fad2f9bcc965b5a",
"timestamp": "",
"source": "github",
"line_count": 458,
"max_line_length": 97,
"avg_line_length": 34.11353711790393,
"alnum_prop": 0.6116231438812084,
"repo_name": "IRI-Research/django",
"id": "f38c4918cdafc8b0284fbf22b5be6aba4b73d44e",
"size": "15624",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "django/http/response.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "51177"
},
{
"name": "JavaScript",
"bytes": "102377"
},
{
"name": "Python",
"bytes": "9012121"
},
{
"name": "Shell",
"bytes": "12137"
}
],
"symlink_target": ""
}
|
import datetime
import pandas as pd
from rqalpha.utils.py2 import lru_cache
def _to_timestamp(d):
return pd.Timestamp(d).replace(hour=0, minute=0, second=0, microsecond=0)
class TradingDatesMixin(object):
def __init__(self, dates):
self._dates = dates
def get_trading_dates(self, start_date, end_date):
# 只需要date部分
start_date = _to_timestamp(start_date)
end_date = _to_timestamp(end_date)
left = self._dates.searchsorted(start_date)
right = self._dates.searchsorted(end_date, side='right')
return self._dates[left:right]
def get_previous_trading_date(self, date, n=1):
date = _to_timestamp(date)
pos = self._dates.searchsorted(date)
if pos >= n:
return self._dates[pos - n]
else:
return self._dates[0]
def get_next_trading_date(self, date, n=1):
date = _to_timestamp(date)
pos = self._dates.searchsorted(date, side='right')
if pos + n > len(self._dates):
return self._dates[-1]
else:
return self._dates[pos + n - 1]
def is_trading_date(self, date):
date = _to_timestamp(date)
pos = self._dates.searchsorted(date)
return pos < len(self._dates) and self._dates[pos] == date
@lru_cache(512)
def _get_future_trading_date(self, dt):
dt1 = dt - datetime.timedelta(hours=4)
td = pd.Timestamp(dt1.date())
pos = self._dates.searchsorted(td)
if self._dates[pos] != td:
raise RuntimeError('invalid future calendar datetime: {}'.format(dt))
if dt1.hour >= 16:
return self._dates[pos + 1]
return td
def get_trading_dt(self, calendar_dt):
trading_date = self.get_future_trading_date(calendar_dt)
return datetime.datetime.combine(trading_date, calendar_dt.time())
def get_future_trading_date(self, dt):
return self._get_future_trading_date(dt.replace(minute=0, second=0, microsecond=0))
get_nth_previous_trading_date = get_previous_trading_date
def get_n_trading_dates_until(self, dt, n):
date = _to_timestamp(dt)
pos = self._dates.searchsorted(date, side='right')
if pos >= n:
return self._dates[pos - n:pos]
return self._dates[:pos]
def count_trading_dates(self, start_date, end_date):
start_date = _to_timestamp(start_date)
end_date = _to_timestamp(end_date)
return self._dates.searchsorted(end_date, side='right') - self._dates.searchsorted(start_date)
|
{
"content_hash": "7a17f78e92228fd35254158399e25a19",
"timestamp": "",
"source": "github",
"line_count": 78,
"max_line_length": 102,
"avg_line_length": 32.93589743589744,
"alnum_prop": 0.6091864538731023,
"repo_name": "xclxxl414/rqalpha",
"id": "f5e4555b134e967eb68265c428dcf4d3e57b444c",
"size": "3184",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "rqalpha/data/trading_dates_mixin.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "2333905"
},
{
"name": "Python",
"bytes": "2597438"
},
{
"name": "Shell",
"bytes": "1154"
}
],
"symlink_target": ""
}
|
"""
Abstract defintions that enable task sandboxing.
"""
__all__ = ["Sandbox", "SandboxTask"]
import os
import sys
from abc import ABCMeta, abstractmethod, abstractproperty
from contextlib import contextmanager
from fnmatch import fnmatch
from collections import OrderedDict
import luigi
import six
from law.config import Config
from law.task.base import Task
from law.task.proxy import ProxyTask, ProxyCommand, get_proxy_attribute
from law.target.local import LocalDirectoryTarget
from law.target.collection import TargetCollection
from law.parameter import NO_STR
from law.parser import root_task
from law.util import (
colored, is_pattern, multi_match, mask_struct, map_struct, interruptable_popen, patch_object,
flatten,
)
from law.logger import get_logger
logger = get_logger(__name__)
_current_sandbox = os.getenv("LAW_SANDBOX", "").split(",")
_sandbox_switched = os.getenv("LAW_SANDBOX_SWITCHED", "") == "1"
_sandbox_task_id = os.getenv("LAW_SANDBOX_TASK_ID", "")
_sandbox_worker_id = os.getenv("LAW_SANDBOX_WORKER_ID", "")
_sandbox_worker_first_task_id = os.getenv("LAW_SANDBOX_WORKER_FIRST_TASK_ID", "")
_sandbox_is_root_task = os.getenv("LAW_SANDBOX_IS_ROOT_TASK", "") == "1"
_sandbox_stagein_dir = os.getenv("LAW_SANDBOX_STAGEIN_DIR", "")
_sandbox_stageout_dir = os.getenv("LAW_SANDBOX_STAGEOUT_DIR", "")
# certain values must be present in a sandbox
if _sandbox_switched:
if not _current_sandbox or not _current_sandbox[0]:
raise Exception("LAW_SANDBOX must not be empty in a sandbox")
if not _sandbox_task_id:
raise Exception("LAW_SANDBOX_TASK_ID must not be empty in a sandbox")
elif not _sandbox_worker_id:
raise Exception("LAW_SANDBOX_WORKER_ID must not be empty in a sandbox")
elif not _sandbox_worker_first_task_id:
raise Exception("LAW_SANDBOX_WORKER_FIRST_TASK_ID must not be empty in a sandbox")
class StageInfo(object):
def __init__(self, targets, stage_dir, stage_targets):
super(StageInfo, self).__init__()
self.targets = targets
self.stage_dir = stage_dir
self.stage_targets = stage_targets
def __str__(self):
tmpl = "{}.{} object at {}:\n targets : {}\n stage_dir : {}\n stage_targets: {}"
return tmpl.format(self.__class__.__module__, self.__class__.__name__, hex(id(self)),
self.targets, self.stage_dir.path, self.stage_targets)
def __repr__(self):
return str(self)
class Sandbox(six.with_metaclass(ABCMeta, object)):
delimiter = "::"
@staticmethod
def check_key(key, silent=False):
valid = True
if "," in key:
valid = False
if not valid and not silent:
raise ValueError("invalid sandbox key format '{}'".format(key))
else:
return valid
@staticmethod
def split_key(key):
parts = str(key).split(Sandbox.delimiter, 1)
if len(parts) != 2 or any(not p.strip() for p in parts):
raise ValueError("invalid sandbox key '{}'".format(key))
return tuple(parts)
@staticmethod
def join_key(_type, name):
""" join_key(type, name)
"""
return str(_type) + Sandbox.delimiter + str(name)
@classmethod
def new(cls, key, *args, **kwargs):
# check for key format
cls.check_key(key, silent=False)
# split the key into the sandbox type and name
_type, name = cls.split_key(key)
# loop recursively through subclasses and find class that matches the sandbox_type
classes = list(cls.__subclasses__())
while classes:
_cls = classes.pop(0)
if getattr(_cls, "sandbox_type", None) == _type:
return _cls(name, *args, **kwargs)
else:
classes.extend(_cls.__subclasses__())
raise Exception("no sandbox with type '{}' found".format(_type))
def __init__(self, name, task=None):
super(Sandbox, self).__init__()
# when a task is set, it must be a SandboxTask instance
if task and not isinstance(task, SandboxTask):
raise TypeError("sandbox task must be a SandboxTask instance, got {}".format(task))
self.name = name
self.task = task
# target staging info
self.stagein_info = None
self.stageout_info = None
@property
def key(self):
return self.join_key(self.sandbox_type, self.name)
def scheduler_on_host(self):
config = luigi.interface.core()
return multi_match(config.scheduler_host, ["0.0.0.0", "127.0.0.1", "localhost"])
def force_local_scheduler(self):
return False
@abstractproperty
def env(self):
return
@abstractmethod
def cmd(self, proxy_cmd):
return
def run(self, cmd, stdout=None, stderr=None):
if stdout is None:
stdout = sys.stdout
if stderr is None:
stderr = sys.stderr
return interruptable_popen(cmd, shell=True, executable="/bin/bash", stdout=stdout,
stderr=stderr, env=self.env)
def get_config_section(self, postfix=None):
section = self.sandbox_type + "_sandbox"
if postfix:
section += "_" + postfix
image_section = section + "_" + self.name
cfg = Config.instance()
return image_section if cfg.has_section(image_section) else section
def _get_env(self):
# environment variables to set
env = OrderedDict()
# default sandboxing variables
env["LAW_SANDBOX"] = self.key.replace("$", r"\$")
env["LAW_SANDBOX_SWITCHED"] = "1"
if self.task:
env["LAW_SANDBOX_TASK_ID"] = self.task.live_task_id
env["LAW_SANDBOX_ROOT_TASK_ID"] = root_task().task_id
env["LAW_SANDBOX_IS_ROOT_TASK"] = str(int(self.task.is_root_task()))
if getattr(self.task, "_worker_id", None):
env["LAW_SANDBOX_WORKER_ID"] = self.task._worker_id
if getattr(self.task, "_worker_first_task_id", None):
env["LAW_SANDBOX_WORKER_FIRST_TASK_ID"] = self.task._worker_first_task_id
# extend by variables from the config file
cfg = Config.instance()
section = self.get_config_section(postfix="env")
for name, value in cfg.items(section):
if is_pattern(name):
names = [key for key in os.environ.keys() if fnmatch(key, name)]
else:
names = [name]
for name in names:
# when there is only a key present, i.e., no value is set,
# get it from the current environment
env[name] = value if value is not None else os.getenv(name, "")
# extend by variables defined on task level
if self.task:
task_env = self.task.sandbox_env(env)
if task_env:
env.update(task_env)
return env
def _get_volumes(self):
volumes = OrderedDict()
# extend by volumes from the config file
cfg = Config.instance()
section = self.get_config_section(postfix="volumes")
for hdir, cdir in cfg.items(section, expand_vars=False, expand_user=False):
volumes[os.path.expandvars(os.path.expanduser(hdir))] = cdir
# extend by volumes defined on task level
if self.task:
task_volumes = self.task.sandbox_volumes(volumes)
if task_volumes:
volumes.update(task_volumes)
return volumes
def _expand_volume(self, vol, bin_dir=None, python_dir=None):
def replace(vol, name, repl):
# warn about the deprecation of the legacy format "${name}" (until v0.1)
var = "{{LAW_FORWARD_" + name + "}}"
vol = vol.replace(var, repl)
return vol
if bin_dir:
vol = replace(vol, "BIN", bin_dir)
if python_dir:
vol = replace(vol, "PY", python_dir)
return vol
def _build_setup_cmds(self, env):
# commands that are used to setup the env and actual run commands
setup_cmds = []
for tpl in six.iteritems(env):
setup_cmds.append("export {}=\"{}\"".format(*tpl))
if self.task:
setup_cmds.extend(self.task.sandbox_setup_cmds())
return setup_cmds
class SandboxProxy(ProxyTask):
def output(self):
return None
@property
def sandbox_inst(self):
return self.task.sandbox_inst
def create_proxy_cmd(self):
return ProxyCommand(self.task, exclude_task_args=self.task.exclude_params_sandbox,
exclude_global_args=["workers"])
def run(self):
# before_run hook
if callable(self.task.sandbox_before_run):
self.task.sandbox_before_run()
# create a temporary direction for file staging
tmp_dir = LocalDirectoryTarget(is_tmp=True)
tmp_dir.touch()
# stage-in input files
stagein_info = self.stagein(tmp_dir)
if stagein_info:
# tell the sandbox
self.sandbox_inst.stagein_info = stagein_info
logger.debug("configured sandbox stage-in data")
# prepare stage-out
stageout_info = self.prepare_stageout(tmp_dir)
if stageout_info:
# tell the sandbox
self.sandbox_inst.stageout_info = stageout_info
logger.debug("configured sandbox stage-out data")
# create the actual command to run
cmd = self.sandbox_inst.cmd(self.create_proxy_cmd())
# run with log section before and after actual run call
with self._run_context(cmd):
code, out, err = self.sandbox_inst.run(cmd)
if code != 0:
raise Exception("sandbox '{}' failed with exit code {}".format(
self.sandbox_inst.key, code))
# actual stage_out
if stageout_info:
self.stageout(stageout_info)
# after_run hook
if callable(self.task.sandbox_after_run):
self.task.sandbox_after_run()
def stagein(self, tmp_dir):
# check if the stage-in dir is set
cfg = Config.instance()
section = self.sandbox_inst.get_config_section()
stagein_dir_name = cfg.get_expanded(section, "stagein_dir_name")
if not stagein_dir_name:
return None
# get the sandbox stage-in mask
stagein_mask = self.task.sandbox_stagein()
if not stagein_mask:
return None
# determine inputs as seen from outside and within the sandbox
inputs = self.task.input()
with patch_object(os, "environ", self.task.env, lock=True):
sandbox_inputs = self.task.input()
# apply the mask to both structs
inputs = mask_struct(stagein_mask, inputs)
sandbox_inputs = mask_struct(stagein_mask, sandbox_inputs)
if not inputs:
return None
# create a lookup for input -> sandbox input
sandbox_targets = dict(zip(flatten(inputs), flatten(sandbox_inputs)))
# create the stage-in directory
stagein_dir = tmp_dir.child(stagein_dir_name, type="d")
stagein_dir.touch()
# create the structure of staged inputs
def stagein_target(target):
sandbox_target = sandbox_targets[target]
staged_target = make_staged_target(stagein_dir, sandbox_target)
logger.debug("stage-in {} to {}".format(target.path, staged_target.path))
target.copy_to_local(staged_target)
return staged_target
def map_collection(func, collection, **kwargs):
map_struct(func, collection.targets, **kwargs)
staged_inputs = map_struct(stagein_target, inputs,
custom_mappings={TargetCollection: map_collection})
logger.info("staged-in {} file(s)".format(len(stagein_dir.listdir())))
return StageInfo(inputs, stagein_dir, staged_inputs)
def prepare_stageout(self, tmp_dir):
# check if the stage-out dir is set
cfg = Config.instance()
section = self.sandbox_inst.get_config_section()
stageout_dir_name = cfg.get_expanded(section, "stageout_dir_name")
if not stageout_dir_name:
return None
# get the sandbox stage-out mask
stageout_mask = self.task.sandbox_stageout()
if not stageout_mask:
return None
# determine outputs as seen from outside and within the sandbox
outputs = self.task.output()
with patch_object(os, "environ", self.task.env, lock=True):
sandbox_outputs = self.task.output()
# apply the mask to both structs
outputs = mask_struct(stageout_mask, outputs)
sandbox_outputs = mask_struct(stageout_mask, sandbox_outputs)
if not outputs:
return None
# create the stage-out directory
stageout_dir = tmp_dir.child(stageout_dir_name, type="d")
stageout_dir.touch()
# create a lookup for input -> sandbox input
sandbox_targets = dict(zip(flatten(outputs), flatten(sandbox_outputs)))
return StageInfo(outputs, stageout_dir, sandbox_targets)
def stageout(self, stageout_info):
# traverse actual outputs, try to identify them in tmp_dir
# and move them to their proper location
def stageout_target(target):
sandbox_target = stageout_info.stage_targets[target]
staged_target = make_staged_target(stageout_info.stage_dir, sandbox_target)
logger.debug("stage-out {} to {}".format(staged_target.path, target))
if staged_target.exists():
target.copy_from_local(staged_target)
else:
logger.warning("could not find output target at {} for stage-out".format(
staged_target.path))
def map_collection(func, collection, **kwargs):
map_struct(func, collection.targets, **kwargs)
map_struct(stageout_target, stageout_info.targets,
custom_mappings={TargetCollection: map_collection})
logger.info("staged-out {} file(s)".format(len(stageout_info.stage_dir.listdir())))
@contextmanager
def _run_context(self, cmd=None):
def print_banner(msg, color):
print("")
print(colored(" {} ".format(msg).center(80, "="), color=color))
print(colored("task : ", color=color) + colored(self.task.task_id, style="bright"))
print(colored("sandbox: ", color=color) + colored(self.sandbox_inst.key, style="bright"))
print(colored(80 * "=", color=color))
print("")
# start banner
print_banner("entering sandbox", "magenta")
# log the command
if cmd:
self.task.logger.info("sandbox command:\n{}".format(cmd))
sys.stdout.flush()
try:
yield
finally:
# end banner
print_banner("leaving sandbox", "cyan")
sys.stdout.flush()
class SandboxTask(Task):
sandbox = luigi.Parameter(
default=_current_sandbox[0] or NO_STR,
description="name of the sandbox to run the task in; default: $LAW_SANDBOX when set, "
"otherwise empty",
)
allow_empty_sandbox = False
valid_sandboxes = ["*"]
exclude_params_sandbox = {"sandbox", "log_file"}
def __init__(self, *args, **kwargs):
super(SandboxTask, self).__init__(*args, **kwargs)
# store whether sandbox objects have been setup, which is done lazily,
# and predefine all attributes that are set by it
self._sandbox_initialized = False
self._effective_sandbox = None
self._sandbox_inst = None
self._sandbox_proxy = None
def _initialize_sandbox(self, force=False):
if self._sandbox_initialized and not force:
return
self._sandbox_initialized = True
# when we are already in a sandbox, this task is placed inside it, i.e., there is no nesting
if _sandbox_switched:
self._effective_sandbox = _current_sandbox[0]
# when the sandbox is set via a parameter and not hard-coded,
# check if the value is among the valid sandboxes, otherwise determine the fallback
elif isinstance(self.__class__.sandbox, luigi.Parameter):
if multi_match(self.sandbox, self.valid_sandboxes, mode=any):
self._effective_sandbox = self.sandbox
else:
self._effective_sandbox = self.fallback_sandbox(self.sandbox)
# just set the effective sandbox
else:
self._effective_sandbox = self.sandbox
# at this point, the sandbox must be set unless it is explicitely allowed to be empty
if self._effective_sandbox in (None, NO_STR):
if not self.allow_empty_sandbox:
raise Exception("task {!r} requires the sandbox parameter to be set".format(self))
self._effective_sandbox = NO_STR
# create the sandbox proxy when required
if not self.is_sandboxed():
self._sandbox_inst = Sandbox.new(self._effective_sandbox, self)
self._sandbox_proxy = SandboxProxy(task=self)
logger.debug("created sandbox proxy instance of type '{}'".format(
self._effective_sandbox))
@property
def effective_sandbox(self):
self._initialize_sandbox()
return self._effective_sandbox
@property
def sandbox_inst(self):
self._initialize_sandbox()
return self._sandbox_inst
@property
def sandbox_proxy(self):
self._initialize_sandbox()
return self._sandbox_proxy
def __getattribute__(self, attr, proxy=True):
return get_proxy_attribute(self, attr, proxy=proxy, super_cls=Task)
def is_sandboxed(self):
return self.effective_sandbox == NO_STR or self.effective_sandbox in _current_sandbox
def is_root_task(self):
is_root = super(SandboxTask, self).is_root_task()
if _sandbox_switched:
return is_root and _sandbox_is_root_task
else:
return is_root
def _staged_input(self):
if not _sandbox_stagein_dir:
raise Exception("LAW_SANDBOX_STAGEIN_DIR must not be empty in a sandbox when target "
"stage-in is required")
# get the original inputs
inputs = self.__getattribute__("input", proxy=False)()
# create the struct of staged inputs
staged_inputs = make_staged_target_struct(_sandbox_stagein_dir, inputs)
# apply the stage-in mask
return mask_struct(self.sandbox_stagein(), staged_inputs, inputs)
def _staged_output(self):
if not _sandbox_stageout_dir:
raise Exception("LAW_SANDBOX_STAGEOUT_DIR must not be empty in a sandbox when target "
"stage-out is required")
# get the original outputs
outputs = self.__getattribute__("output", proxy=False)()
# create the struct of staged outputs
staged_outputs = make_staged_target_struct(_sandbox_stageout_dir, outputs)
# apply the stage-out mask
return mask_struct(self.sandbox_stageout(), staged_outputs, outputs)
@property
def env(self):
return os.environ if self.is_sandboxed() else self.sandbox_inst.env
def fallback_sandbox(self, sandbox):
return None
def sandbox_user(self):
uid, gid = os.getuid(), os.getgid()
# check if there is a config section that defines the user and group ids
if self.sandbox_inst:
cfg = Config.instance()
section = self.sandbox_inst.get_config_section()
uid = cfg.get_expanded_int(section, "uid", default=uid)
gid = cfg.get_expanded_int(section, "gid", default=gid)
return uid, gid
def sandbox_stagein(self):
# disable stage-in by default
return False
def sandbox_stageout(self):
# disable stage-out by default
return False
def sandbox_env(self, env):
# additional environment variables
return {}
def sandbox_volumes(self, volumes):
# additional volumes to mount
return {}
def sandbox_setup_cmds(self):
# list of commands to set up the environment inside a sandbox
return []
def sandbox_before_run(self):
# method that is invoked before the run method of the sandbox proxy is called
return
def sandbox_after_run(self):
# method that is invoked after the run method of the sandbox proxy is called
return
def make_staged_target_struct(stage_dir, struct):
def map_target(target):
return make_staged_target(stage_dir, target)
def map_collection(func, collection, **kwargs):
staged_targets = map_struct(func, collection.targets, **kwargs)
return collection.__class__(staged_targets, **collection._copy_kwargs())
return map_struct(map_target, struct, custom_mappings={TargetCollection: map_collection})
def make_staged_target(stage_dir, target):
if not isinstance(stage_dir, LocalDirectoryTarget):
stage_dir = LocalDirectoryTarget(stage_dir)
return stage_dir.child(target.unique_basename, type=target.type, **target._copy_kwargs())
|
{
"content_hash": "c511c50a727c7602841f8f9c8e5ee83a",
"timestamp": "",
"source": "github",
"line_count": 621,
"max_line_length": 101,
"avg_line_length": 34.42673107890499,
"alnum_prop": 0.6129847046166799,
"repo_name": "riga/law",
"id": "1dd16497364e4811b50420e419e232bba7fccc96",
"size": "21397",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "law/sandbox/base.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "851844"
},
{
"name": "Shell",
"bytes": "58608"
}
],
"symlink_target": ""
}
|
from msrest.serialization import Model
class TopLevelDomainAgreementOption(Model):
"""Options for retrieving the list of top level domain legal agreements.
:param include_privacy: If <code>true</code>, then the list of agreements
will include agreements for domain privacy as well; otherwise,
<code>false</code>.
:type include_privacy: bool
"""
_attribute_map = {
'include_privacy': {'key': 'includePrivacy', 'type': 'bool'},
}
def __init__(self, include_privacy=None):
self.include_privacy = include_privacy
|
{
"content_hash": "5fcec9b3f108c49c63e3a6e459a9f653",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 77,
"avg_line_length": 31.555555555555557,
"alnum_prop": 0.6795774647887324,
"repo_name": "rjschwei/azure-sdk-for-python",
"id": "332c7df7ddc1e990d967143a781b6820649c202d",
"size": "1042",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "azure-mgmt-web/azure/mgmt/web/models/top_level_domain_agreement_option.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "8317911"
}
],
"symlink_target": ""
}
|
from gi.repository import Gio # pylint: disable=E0611
from locale import gettext as _
import logging
logger = logging.getLogger('spindl')
from spindl_lib.PreferencesDialog import PreferencesDialog
class PreferencesSpindlDialog(PreferencesDialog):
__gtype_name__ = "PreferencesSpindlDialog"
def finish_initializing(self, builder): # pylint: disable=E1002
"""Set up the preferences dialog"""
super(PreferencesSpindlDialog, self).finish_initializing(builder)
# Bind each preference widget to gsettings
settings = Gio.Settings("net.launchpad.spindl")
widget = self.builder.get_object('example_entry')
settings.bind("example", widget, "text", Gio.SettingsBindFlags.DEFAULT)
# Code for other initialization actions should be added here.
|
{
"content_hash": "ad2f19d2eee1a890152372d134f10abd",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 79,
"avg_line_length": 36.45454545454545,
"alnum_prop": 0.7319201995012469,
"repo_name": "Snesi/spindl",
"id": "699c79d8437e8b924b9341ebbc8b6e03bf865464",
"size": "1716",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "spindl/PreferencesSpindlDialog.py",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
}
|
import collections as _collections
import fnmatch as _fnmatch
import importlib as _importlib
import os as _os
import shutil as _shutil
import sys as _sys
from plano import *
class _Project:
def __init__(self):
self.name = None
self.source_dir = "python"
self.included_modules = ["*"]
self.excluded_modules = ["plano", "bullseye"]
self.data_dirs = []
self.build_dir = "build"
self.test_modules = []
project = _Project()
_default_prefix = join(get_home_dir(), ".local")
def check_project():
assert project.name
assert project.source_dir
assert project.build_dir
class project_env(working_env):
def __init__(self):
check_project()
home_var = "{0}_HOME".format(project.name.upper().replace("-", "_"))
env = {
home_var: get_absolute_path(join(project.build_dir, project.name)),
"PATH": get_absolute_path(join(project.build_dir, "bin")) + ":" + ENV["PATH"],
"PYTHONPATH": get_absolute_path(join(project.build_dir, project.name, project.source_dir)),
}
super(project_env, self).__init__(**env)
def configure_file(input_file, output_file, substitutions, quiet=False):
notice("Configuring '{0}' for output '{1}'", input_file, output_file)
content = read(input_file)
for name, value in substitutions.items():
content = content.replace("@{0}@".format(name), value)
write(output_file, content)
_shutil.copymode(input_file, output_file)
return output_file
_prefix_arg = CommandArgument("prefix", help="The base path for installed files", default=_default_prefix)
_clean_arg = CommandArgument("clean_", help="Clean before starting", display_name="clean")
_verbose_arg = CommandArgument("verbose", help="Print detailed logging to the console")
@command(args=(_prefix_arg, _clean_arg))
def build(prefix=None, clean_=False):
check_project()
if clean_:
clean()
build_file = join(project.build_dir, "build.json")
build_data = {}
if exists(build_file):
build_data = read_json(build_file)
mtime = _os.stat(project.source_dir).st_mtime
for path in find(project.source_dir):
mtime = max(mtime, _os.stat(path).st_mtime)
if prefix is None:
prefix = build_data.get("prefix", _default_prefix)
new_build_data = {"prefix": prefix, "mtime": mtime}
debug("Existing build data: {0}", pformat(build_data))
debug("New build data: {0}", pformat(new_build_data))
if build_data == new_build_data:
debug("Already built")
return
print(111, build_file, new_build_data)
write_json(build_file, new_build_data)
default_home = join(prefix, "lib", project.name)
for path in find("bin", "*.in"):
configure_file(path, join(project.build_dir, path[:-3]), {"default_home": default_home})
for path in find("bin", exclude="*.in"):
copy(path, join(project.build_dir, path), inside=False, symlinks=False)
for path in find(project.source_dir, "*.py"):
module_name = get_name_stem(path)
included = any([_fnmatch.fnmatchcase(module_name, x) for x in project.included_modules])
excluded = any([_fnmatch.fnmatchcase(module_name, x) for x in project.excluded_modules])
if included and not excluded:
copy(path, join(project.build_dir, project.name, path), inside=False, symlinks=False)
for dir_name in project.data_dirs:
for path in find(dir_name):
copy(path, join(project.build_dir, project.name, path), inside=False, symlinks=False)
@command(args=(CommandArgument("include", help="Run tests with names matching PATTERN", metavar="PATTERN"),
CommandArgument("exclude", help="Do not run tests with names matching PATTERN", metavar="PATTERN"),
CommandArgument("unskip", help="Run skipped tests matching PATTERN", metavar="PATTERN"),
CommandArgument("list_", help="Print the test names and exit", display_name="list"),
_verbose_arg, _clean_arg))
def test_(include="*", exclude=None, unskip=None, list_=False, verbose=False, clean_=False):
check_project()
if clean_:
clean()
if not list_:
build()
with project_env():
modules = [_importlib.import_module(x) for x in project.test_modules]
if not modules: # pragma: nocover
notice("No tests found")
return
args = list()
if list_:
print_tests(modules)
return
exclude = nvl(exclude, ())
unskip = nvl(unskip, ())
run_tests(modules, include=include, exclude=exclude, unskip=unskip, verbose=verbose)
@command(args=(CommandArgument("staging_dir", help="A path prepended to installed files"),
_prefix_arg, _clean_arg))
def install(staging_dir="", prefix=None, clean_=False):
check_project()
build(prefix=prefix, clean_=clean_)
assert is_dir(project.build_dir), list_dir()
build_file = join(project.build_dir, "build.json")
build_data = read_json(build_file)
build_prefix = project.build_dir + "/"
install_prefix = staging_dir + build_data["prefix"]
# XXX Windows trouble
# > plano-self-test: notice: Copying 'build\\bin\\chucker' to 'stagingC:\\Users\\runneradmin\\.local\\build\\bin\\chucker'
for path in find(join(project.build_dir, "bin")):
copy(path, join(install_prefix, remove_prefix(path, build_prefix)), inside=False, symlinks=False)
for path in find(join(project.build_dir, project.name)):
copy(path, join(install_prefix, "lib", remove_prefix(path, build_prefix)), inside=False, symlinks=False)
@command
def clean():
check_project()
remove(project.build_dir)
remove(find(".", "__pycache__"))
remove(find(".", "*.pyc"))
@command(args=(CommandArgument("undo", help="Generate settings that restore the previous environment"),))
def env(undo=False):
"""
Generate shell settings for the project environment
To apply the settings, source the output from your shell:
$ source <(plano env)
"""
check_project()
project_dir = get_current_dir() # XXX Needs some checking
home_var = "{0}_HOME".format(project.name.upper().replace("-", "_"))
old_home_var = "OLD_{0}".format(home_var)
home_dir = join(project_dir, project.build_dir, project.name)
if undo:
print("[[ ${0} ]] && export {1}=${2} && unset {3}".format(old_home_var, home_var, old_home_var, old_home_var))
print("[[ $OLD_PATH ]] && export PATH=$OLD_PATH && unset OLD_PATH")
print("[[ $OLD_PYTHONPATH ]] && export PYTHONPATH=$OLD_PYTHONPATH && unset OLD_PYTHONPATH")
return
print("[[ ${0} ]] && export {1}=${2}".format(home_var, old_home_var, home_var))
print("[[ $PATH ]] && export OLD_PATH=$PATH")
print("[[ $PYTHONPATH ]] && export OLD_PYTHONPATH=$PYTHONPATH")
print("export {0}={1}".format(home_var, home_dir))
path = [
join(project_dir, project.build_dir, "bin"),
ENV.get("PATH", ""),
]
print("export PATH={0}".format(join_path_var(*path)))
python_path = [
join(home_dir, project.source_dir),
join(project_dir, project.source_dir),
ENV.get("PYTHONPATH", ""),
]
print("export PYTHONPATH={0}".format(join_path_var(*python_path)))
@command(args=(CommandArgument("filename", help="Which file to generate"),
CommandArgument("stdout", help="Print to stdout instead of writing the file directly")))
def generate(filename, stdout=False):
"""
Generate standard project files
Use one of the following filenames:
.gitignore
LICENSE.txt
README.md
VERSION.txt
Use the special filename "all" to generate all of them.
"""
assert project.name
project_files = _StringCatalog(__file__)
if filename == "all":
for name in project_files:
_generate_file(project_files, name, stdout)
else:
_generate_file(project_files, filename, stdout)
def _generate_file(project_files, filename, stdout):
try:
content = project_files[filename]
except KeyError:
exit("File {0} is not one of the options".format(repr(filename)))
content = content.lstrip()
content = content.format(project_title=project.name.capitalize(), project_name=project.name)
if stdout:
print(content, end="")
else:
write(filename, content)
# @command
# def coverage():
# check_program("coverage3")
# with project_env():
# run("coverage3 run --include python/qtools/\* build/scripts-3.9/qtools-self-test")
# run("coverage3 report")
# run("coverage3 html")
# print(f"file:{get_current_dir()}/htmlcov/index.html")
class _StringCatalog(dict):
def __init__(self, path):
super(_StringCatalog, self).__init__()
self.path = "{0}.strings".format(split_extension(path)[0])
check_file(self.path)
key = None
out = list()
for line in read_lines(self.path):
line = line.rstrip()
if line.startswith("[") and line.endswith("]"):
if key:
self[key] = "".join(out).strip() + "\n"
out = list()
key = line[1:-1]
continue
out.append(line)
out.append("\r\n")
self[key] = "".join(out).strip() + "\n"
def __repr__(self):
return format_repr(self)
|
{
"content_hash": "7c921c1803788e54b19e57ae4dba9109",
"timestamp": "",
"source": "github",
"line_count": 303,
"max_line_length": 126,
"avg_line_length": 31.462046204620464,
"alnum_prop": 0.6151264030210847,
"repo_name": "ssorj/transom",
"id": "0285eedc776eeb03c135330e48a47da8df25ac49",
"size": "10323",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "subrepos/bullseye/src/bullseye/main.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "4978"
},
{
"name": "HTML",
"bytes": "6864"
},
{
"name": "Makefile",
"bytes": "4005"
},
{
"name": "Python",
"bytes": "296463"
}
],
"symlink_target": ""
}
|
'''
Offline utility tests
'''
from unittest import TestCase
from scutils.argparse_helper import ArgparseHelper
import argparse
from mock import MagicMock
import sys
# from http://stackoverflow.com/questions/4219717/how-to-assert-output-with-nosetest-unittest-in-python
from contextlib import contextmanager
from StringIO import StringIO
@contextmanager
def captured_output():
new_out, new_err = StringIO(), StringIO()
old_out, old_err = sys.stdout, sys.stderr
try:
sys.stdout, sys.stderr = new_out, new_err
yield sys.stdout, sys.stderr
finally:
sys.stdout, sys.stderr = old_out, old_err
class TestArgparseHelper(TestCase):
def test_output(self):
parser = argparse.ArgumentParser(description='Desc here',
add_help=False)
parser.add_argument('-h', '--help', action=ArgparseHelper, help='show this help message and exit')
subparsers = parser.add_subparsers(help='commands', dest='command')
base_parser = argparse.ArgumentParser(add_help=False)
base_parser.add_argument('-s', '--settings', action='store',
required=False,
help="The settings file to read from",
default="localsettings.py")
feed_parser = subparsers.add_parser('feed', help='Feed the script',
parents=[base_parser])
feed_parser.add_argument('json', help='The JSON object as a string')
run_parser = subparsers.add_parser('run', help='Run the script',
parents=[base_parser])
a = ArgparseHelper(MagicMock())
expected = '''usage: nosetests [-h] {feed,run} ...
Desc here
positional arguments:
{feed,run} commands
feed Feed the script
run Run the script
optional arguments:
-h, --help show this help message and exit
Command 'feed'
usage: nosetests feed [-h] [-s SETTINGS] json
Command 'run'
usage: nosetests run [-h] [-s SETTINGS]'''
try:
with captured_output() as (out, err):
a(parser, MagicMock(), MagicMock())
except SystemExit:
pass
output = out.getvalue().strip()
self.assertEqual(output, expected)
|
{
"content_hash": "105001db05f5d55830697503a5601fa2",
"timestamp": "",
"source": "github",
"line_count": 73,
"max_line_length": 106,
"avg_line_length": 31.506849315068493,
"alnum_prop": 0.611304347826087,
"repo_name": "istresearch/scrapy-cluster",
"id": "8892402d31d779e069f6a8ac8b16d3b0b3b45039",
"size": "2300",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "utils/tests/test_argparse_helper.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "46003"
},
{
"name": "Python",
"bytes": "450091"
},
{
"name": "Shell",
"bytes": "11968"
}
],
"symlink_target": ""
}
|
"""
The Email Cleanse Package.
This package contains tools for dealing with some of the common issues
surrounding email consistency.
"""
|
{
"content_hash": "3063d8d767d747980ab23bff1b627ccb",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 70,
"avg_line_length": 23,
"alnum_prop": 0.782608695652174,
"repo_name": "sergedomk/email_cleanse",
"id": "00ca7ac1e6ea95347ff547382332c4eabcfce913",
"size": "138",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "email_cleanse/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "26173"
}
],
"symlink_target": ""
}
|
class Create:
method = 'POST'
slug = False
class Update:
method = 'PATCH'
slug = True
class BulkUpdate:
method = 'PUT'
slug = False
class Fetch:
method = 'GET'
slug = True
class List:
method = 'GET'
slug = False
class Delete:
method = 'DELETE'
slug = True
|
{
"content_hash": "a5a5f535e820a232490f3ca759898fa5",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 21,
"avg_line_length": 11.214285714285714,
"alnum_prop": 0.5668789808917197,
"repo_name": "saurabh1e/SuperFlaskSeed",
"id": "dc0f8692b71d446c54def71c95937296674361cb",
"size": "314",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "web/src/utils/methods.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "749"
},
{
"name": "Python",
"bytes": "51752"
}
],
"symlink_target": ""
}
|
import torch
from collections import defaultdict
from os.path import join
from torch.autograd import Variable
from ibeis.algo.verif.torch import netmath
import tensorboard_logger
# from ibeis.algo.verif.torch import gpu_util
class FitHarness(object):
def __init__(harn, model, train_loader, vali_loader=None, test_loader=None,
criterion='cross_entropy', lr_scheduler='exp',
optimizer_cls='Adam', class_weights=None, gpu_num=None,
workdir=None):
harn.workdir = workdir
harn.train_loader = train_loader
harn.vali_loader = vali_loader
harn.test_loader = test_loader
harn.model = model
harn.optimizer_cls = optimizer_cls
harn.criterion = criterion
harn.lr_scheduler = lr_scheduler
# netmath.Optimizers.lookup(optimizer_cls)
# netmath.Criterions.lookup(criterion)
# netmath.LRSchedules.lookup(lr_scheduler)
harn.class_weights = class_weights
harn.gpu_num = gpu_num
harn.use_cuda = harn.gpu_num is not None
# harn.model = torch.nn.DataParallel(model, device_ids=[0, 1]).cuda()
harn.config = {
'maxIterations': 10000,
'displayInterval': 1,
'vail_displayInterval': 1,
'model_dir': '.',
'margin': 1.0,
}
harn.lr = harn.lr_scheduler.init_lr
harn.epoch = 0
def log(harn, msg):
print(msg)
def log_value(harn, key, value, n_iter):
if False:
print('{}={} @ {}'.format(key, value, n_iter))
if tensorboard_logger:
tensorboard_logger.log_value(key, value, n_iter)
def _to_xpu(harn, *args):
""" Puts data on the GPU if available """
if harn.use_cuda:
args = [Variable(item.cuda(harn.gpu_num)) for item in args]
# input_batch = [Variable(item.cuda()) for item in input_batch]
else:
args = [Variable(item) for item in args]
return args
def run(harn):
harn.log('Begin training')
if False:
# TODO: can we run this as a subprocess that dies when we die?
# or do we need to run externally?
# tensorboard --logdir runs
# http://aretha:6006
pass
if tensorboard_logger:
harn.log('Initializing tensorboard')
tensorboard_logger.configure("runs/ibeis", flush_secs=2)
if harn.use_cuda:
harn.log('Fitting model on GPU({})'.format(harn.gpu_num))
harn.model.cuda(harn.gpu_num)
else:
harn.log('Fitting model on the CPU')
if harn.class_weights is not None:
harn.class_weights, = harn._to_xpu(harn.class_weights)
lr = harn.lr_scheduler(harn.epoch)
harn.optimizer = harn.optimizer_cls(harn.model.parameters(), lr=lr)
# train loop
while not harn.check_termination():
harn.train_epoch()
if harn.vali_loader:
harn.validation_epoch()
harn.save_snapshot()
harn.epoch += 1
def check_termination(harn):
# check for termination
if harn.epoch > harn.config['maxIterations']:
harn.log('Maximum harn.epoch reached, terminating ...')
return True
return False
def train_epoch(harn):
ave_metrics = defaultdict(lambda: 0)
# change learning rate (modified optimizer inplace)
lr = harn.lr_scheduler(harn.epoch, harn.optimizer)
# train batch
for batch_idx, input_batch in enumerate(harn.train_loader):
input_batch = harn._to_xpu(*input_batch)
# print('Begin batch {}'.format(batch_idx))
t_cur_metrics = harn.train_batch(input_batch)
for k, v in t_cur_metrics.items():
ave_metrics[k] += v
# display training info
if (batch_idx + 1) % harn.config['displayInterval'] == 0:
for k in ave_metrics.keys():
ave_metrics[k] /= harn.config['displayInterval']
n_train = len(harn.train_loader)
harn.log('Epoch {0}: {1} / {2} | lr:{3} - tloss:{4:.5f} acc:{5:.2f} | sdis:{6:.3f} ddis:{7:.3f}'.format(
harn.epoch, batch_idx, n_train, lr,
ave_metrics['loss'], ave_metrics['accuracy'],
ave_metrics['pos_dist'], ave_metrics['neg_dist']))
iter_idx = harn.epoch * n_train + batch_idx
for key, value in ave_metrics.items():
harn.log_value('train ' + key, value, iter_idx)
# diagnoseGradients(model.parameters())
for k in ave_metrics.keys():
ave_metrics[k] = 0
def validation_epoch(harn):
ave_metrics = defaultdict(lambda: 0)
final_metrics = ave_metrics.copy()
for vali_idx, input_batch in enumerate(harn.vali_loader):
input_batch = harn._to_xpu(*input_batch)
# print('Begin batch {}'.format(vali_idx))
v_cur_metrics = harn.validation_batch(input_batch)
for k, v in v_cur_metrics.items():
ave_metrics[k] += v
final_metrics[k] += v
if (vali_idx + 1) % harn.config['vail_displayInterval'] == 0:
for k in ave_metrics.keys():
ave_metrics[k] /= harn.config['displayInterval']
harn.log('Epoch {0}: {1} / {2} | vloss:{3:.5f} acc:{4:.2f} | sdis:{5:.3f} ddis:{6:.3f}'.format(
harn.epoch, vali_idx, len(harn.vali_loader),
ave_metrics['loss'], ave_metrics['accuracy'],
ave_metrics['pos_dist'], ave_metrics['neg_dist']))
for k in ave_metrics.keys():
ave_metrics[k] = 0
for k in final_metrics.keys():
final_metrics[k] /= len(harn.vali_loader)
harn.log('Epoch {0}: final vloss:{1:.5f} acc:{2:.2f} | sdis:{3:.3f} ddis:{4:.3f}'.format(
harn.epoch, final_metrics['loss'], final_metrics['accuracy'],
final_metrics['pos_dist'], final_metrics['neg_dist']))
iter_idx = harn.epoch * len(harn.vali_loader) + vali_idx
for key, value in final_metrics.items():
harn.log_value('validation ' + key, value, iter_idx)
# def display_metrics():
# pass
def load_snapshot(harn, load_path):
snapshot = torch.load(load_path)
harn.model.load_state_dict(snapshot['model_state_dict'])
harn.epoch = snapshot['epoch']
harn.log('Model loaded from {}'.format(load_path))
def save_snapshot(harn):
# save snapshot
save_path = join(harn.config['model_dir'], 'snapshot_epoch_{}.pt'.format(harn.epoch))
snapshot = {
'epoch': harn.epoch,
'model_state_dict': harn.model.state_dict(),
}
torch.save(snapshot, save_path)
harn.log('Snapshot saved to {}'.format(save_path))
def train_batch(harn, input_batch):
"""
https://github.com/meetshah1995/pytorch-semseg/blob/master/train.py
"""
harn.model.train(True)
*inputs, label = input_batch
# Forward prop through the model
output = harn.model(*inputs)
# Compute the loss
loss = harn.criterion(output, label, weight=harn.class_weights)
# Measure train accuracy and other informative metrics
t_metrics = harn._measure_metrics(output, label, loss)
# Backprop and learn
harn.optimizer.zero_grad()
loss.backward()
harn.optimizer.step()
return t_metrics
def validation_batch(harn, input_batch):
harn.model.train(False)
*inputs, label = input_batch
output = harn.model(*inputs)
loss = harn.criterion(output, label, weight=harn.class_weights)
# Measure validation accuracy and other informative metrics
v_metrics = harn._measure_metrics(output, label, loss)
return v_metrics
def _measure_metrics(harn, output, label, loss):
metrics = netmath.Metrics._siamese_metrics(output, label, margin=harn.criterion.margin)
assert 'loss' not in metrics, 'cannot compute loss as an extra metric'
loss_sum = loss.data.sum()
inf = float("inf")
if loss_sum == inf or loss_sum == -inf:
harn.log("WARNING: received an inf loss, setting loss value to 0")
loss_value = 0
else:
loss_value = loss.data[0]
metrics['loss'] = loss_value
# metrics = {
# 'tpr': netmath.Metrics.tpr(output, label)
# }
return metrics
|
{
"content_hash": "42952cf9e26e1ca058bae78016e23f2d",
"timestamp": "",
"source": "github",
"line_count": 254,
"max_line_length": 120,
"avg_line_length": 34.44094488188976,
"alnum_prop": 0.5626428898033836,
"repo_name": "Erotemic/ibeis",
"id": "bdb0cd9e12b61339502d71ed06aab3f7d45db7c6",
"size": "8748",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "ibeis/algo/verif/torch/fit_harness.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CMake",
"bytes": "331"
},
{
"name": "CSS",
"bytes": "4676"
},
{
"name": "Dockerfile",
"bytes": "13018"
},
{
"name": "Inno Setup",
"bytes": "1585"
},
{
"name": "Python",
"bytes": "6661573"
},
{
"name": "Shell",
"bytes": "56171"
}
],
"symlink_target": ""
}
|
"""
couch models go here
"""
from __future__ import absolute_import
from datetime import datetime
import logging
import re
from django.utils import html, safestring
from restkit.errors import NoMoreData
from django.conf import settings
from django.contrib.auth.models import User
from django.contrib.sites.models import Site
from django.core.urlresolvers import reverse
from django.core.exceptions import ValidationError
from django.template.loader import render_to_string
from couchdbkit.ext.django.schema import *
from couchdbkit.resource import ResourceNotFound
from dimagi.utils.couch.database import get_safe_write_kwargs, iter_docs
from dimagi.utils.logging import notify_exception
from dimagi.utils.decorators.memoized import memoized
from dimagi.utils.make_uuid import random_hex
from dimagi.utils.modules import to_function
from casexml.apps.case.models import CommCareCase
from casexml.apps.phone.models import User as CaseXMLUser
from corehq.apps.domain.shortcuts import create_user
from corehq.apps.domain.utils import normalize_domain_name, domain_restricts_superusers
from corehq.apps.domain.models import LicenseAgreement
from corehq.apps.users.util import normalize_username, user_data_from_registration_form, format_username, raw_username
from corehq.apps.users.xml import group_fixture
from corehq.apps.sms.mixin import CommCareMobileContactMixin, VerifiedNumber, PhoneNumberInUseException, InvalidFormatException
from corehq.elastic import es_wrapper
from couchforms.models import XFormInstance
from dimagi.utils.couch.undo import DeleteRecord, DELETED_SUFFIX
from dimagi.utils.django.email import send_HTML_email
from dimagi.utils.mixins import UnicodeMixIn
from dimagi.utils.dates import force_to_datetime
from dimagi.utils.django.database import get_unique_value
from couchdbkit.exceptions import ResourceConflict, NoResultFound
COUCH_USER_AUTOCREATED_STATUS = 'autocreated'
def _add_to_list(list, obj, default):
if obj in list:
list.remove(obj)
if default:
ret = [obj]
ret.extend(list)
return ret
else:
list.append(obj)
return list
def _get_default(list):
return list[0] if list else None
class OldPermissions(object):
EDIT_WEB_USERS = 'edit-users'
EDIT_COMMCARE_USERS = 'edit-commcare-users'
EDIT_DATA = 'edit-data'
EDIT_APPS = 'edit-apps'
VIEW_REPORTS = 'view-reports'
VIEW_REPORT = 'view-report'
AVAILABLE_PERMISSIONS = [EDIT_DATA, EDIT_WEB_USERS, EDIT_COMMCARE_USERS, EDIT_APPS, VIEW_REPORTS, VIEW_REPORT]
perms = 'EDIT_DATA, EDIT_WEB_USERS, EDIT_COMMCARE_USERS, EDIT_APPS, VIEW_REPORTS, VIEW_REPORT'.split(', ')
old_to_new = dict([(locals()[attr], attr.lower()) for attr in perms])
@classmethod
def to_new(cls, old_permission):
return cls.old_to_new[old_permission]
class OldRoles(object):
ROLES = (
('edit-apps', 'App Editor', set([OldPermissions.EDIT_APPS])),
('field-implementer', 'Field Implementer', set([OldPermissions.EDIT_COMMCARE_USERS])),
('read-only', 'Read Only', set([]))
)
@classmethod
def get_role_labels(cls):
return tuple([('admin', 'Admin')] + [(key, label) for (key, label, _) in cls.ROLES])
@classmethod
def get_role_mapping(cls):
return dict([(key, perms) for (key, _, perms) in cls.ROLES])
class Permissions(DocumentSchema):
edit_web_users = BooleanProperty(default=False)
edit_commcare_users = BooleanProperty(default=False)
edit_data = BooleanProperty(default=False)
edit_apps = BooleanProperty(default=False)
view_reports = BooleanProperty(default=False)
view_report_list = StringListProperty(default=[])
@classmethod
def wrap(cls, data):
# this is why you don't store module paths in the database...
MOVED_REPORT_MAPPING = {
'corehq.apps.reports.standard.inspect.CaseListReport': 'corehq.apps.reports.standard.cases.basic.CaseListReport'
}
reports = data.get('view_report_list', [])
for i, report_name in enumerate(reports):
if report_name in MOVED_REPORT_MAPPING:
reports[i] = MOVED_REPORT_MAPPING[report_name]
return super(Permissions, cls).wrap(data)
def view_report(self, report, value=None):
"""Both a getter (when value=None) and setter (when value=True|False)"""
if value is None:
return self.view_reports or report in self.view_report_list
else:
if value:
if report not in self.view_report_list:
self.view_report_list.append(report)
else:
try:
self.view_report_list.remove(report)
except ValueError:
pass
def has(self, permission, data=None):
if data:
return getattr(self, permission)(data)
else:
return getattr(self, permission)
def set(self, permission, value, data=None):
if self.has(permission, data) == value:
return
if data:
getattr(self, permission)(data, value)
else:
setattr(self, permission, value)
def _getattr(self, name):
a = getattr(self, name)
if isinstance(a, list):
a = set(a)
return a
def _setattr(self, name, value):
if isinstance(value, set):
value = list(value)
setattr(self, name, value)
def __or__(self, other):
permissions = Permissions()
for name, value in permissions.properties().items():
if isinstance(value, (BooleanProperty, ListProperty)):
permissions._setattr(name, self._getattr(name) | other._getattr(name))
return permissions
def __eq__(self, other):
for name in self.properties():
if self._getattr(name) != other._getattr(name):
return False
return True
@classmethod
def max(cls):
return Permissions(
edit_web_users=True,
edit_commcare_users=True,
edit_data=True,
edit_apps=True,
view_reports=True,
)
class UserRole(Document):
domain = StringProperty()
name = StringProperty()
permissions = SchemaProperty(Permissions)
def get_qualified_id(self):
return 'user-role:%s' % self.get_id
@classmethod
def by_domain(cls, domain):
return cls.view('users/roles_by_domain',
startkey=[domain],
endkey=[domain, {}],
include_docs=True,
reduce=False,
)
@classmethod
def by_domain_and_name(cls, domain, name):
return cls.view('users/roles_by_domain',
key=[domain, name],
include_docs=True,
reduce=False,
)
@classmethod
def get_or_create_with_permissions(cls, domain, permissions, name=None):
if isinstance(permissions, dict):
permissions = Permissions.wrap(permissions)
roles = cls.by_domain(domain)
# try to get a matching role from the db
for role in roles:
if role.permissions == permissions:
return role
# otherwise create it
def get_name():
if name:
return name
elif permissions == Permissions():
return "Read Only (No Reports)"
elif permissions == Permissions(edit_apps=True, view_reports=True):
return "App Editor"
elif permissions == Permissions(view_reports=True):
return "Read Only"
elif permissions == Permissions(edit_commcare_users=True, view_reports=True):
return "Field Implementer"
role = cls(domain=domain, permissions=permissions, name=get_name())
role.save()
return role
@classmethod
def init_domain_with_presets(cls, domain):
cls.get_or_create_with_permissions(domain, Permissions(edit_apps=True, view_reports=True), 'App Editor')
cls.get_or_create_with_permissions(domain, Permissions(edit_commcare_users=True, view_reports=True), 'Field Implementer')
cls.get_or_create_with_permissions(domain, Permissions(view_reports=True), 'Read Only')
@classmethod
def get_default(cls, domain=None):
return cls(permissions=Permissions(), domain=domain, name=None)
@classmethod
def role_choices(cls, domain):
return [(role.get_qualified_id(), role.name or '(No Name)') for role in [AdminUserRole(domain=domain)] + list(cls.by_domain(domain))]
@classmethod
def commcareuser_role_choices(cls, domain):
return [('none','(none)')] + [(role.get_qualified_id(), role.name or '(No Name)') for role in list(cls.by_domain(domain))]
PERMISSIONS_PRESETS = {
'edit-apps': {'name': 'App Editor', 'permissions': Permissions(edit_apps=True, view_reports=True)},
'field-implementer': {'name': 'Field Implementer', 'permissions': Permissions(edit_commcare_users=True, view_reports=True)},
'read-only': {'name': 'Read Only', 'permissions': Permissions(view_reports=True)},
'no-permissions': {'name': 'Read Only', 'permissions': Permissions(view_reports=True)},
}
class AdminUserRole(UserRole):
def __init__(self, domain):
super(AdminUserRole, self).__init__(domain=domain, name='Admin', permissions=Permissions.max())
def get_qualified_id(self):
return 'admin'
class DomainMembershipError(Exception):
pass
class Membership(DocumentSchema):
# If we find a need for making UserRoles more general and decoupling it from domain then most of the role stuff from
# Domain membership can be put in here
is_admin = BooleanProperty(default=False)
class DomainMembership(Membership):
"""
Each user can have multiple accounts on the
web domain. This is primarily for Dimagi staff.
"""
domain = StringProperty()
timezone = StringProperty(default=getattr(settings, "TIME_ZONE", "UTC"))
override_global_tz = BooleanProperty(default=False)
role_id = StringProperty()
@property
def permissions(self):
if self.role:
return self.role.permissions
else:
return Permissions()
@classmethod
def wrap(cls, data):
if data.get('subject'):
data['domain'] = data['subject']
del data['subject']
# Do a just-in-time conversion of old permissions
old_permissions = data.get('permissions')
if old_permissions is not None:
del data['permissions']
if data.has_key('permissions_data'):
permissions_data = data['permissions_data']
del data['permissions_data']
else:
permissions_data = {}
if not data['is_admin']:
view_report_list = permissions_data.get('view-report')
custom_permissions = {}
for old_permission in old_permissions:
if old_permission == 'view-report':
continue
new_permission = OldPermissions.to_new(old_permission)
custom_permissions[new_permission] = True
if not view_report_list:
# Anyone whose report permissions haven't been explicitly taken away/reduced
# should be able to see reports by default
custom_permissions['view_reports'] = True
else:
custom_permissions['view_report_list'] = view_report_list
self = super(DomainMembership, cls).wrap(data)
self.role_id = UserRole.get_or_create_with_permissions(self.domain, custom_permissions).get_id
return self
return super(DomainMembership, cls).wrap(data)
@property
def role(self):
if self.is_admin:
return AdminUserRole(self.domain)
elif self.role_id:
return UserRole.get(self.role_id)
else:
return None
def has_permission(self, permission, data=None):
return self.is_admin or self.permissions.has(permission, data)
def viewable_reports(self):
return self.permissions.view_report_list
class Meta:
app_label = 'users'
class OrgMembership(Membership):
organization = StringProperty()
team_ids = StringListProperty(default=[]) # a set of ids corresponding to which teams the user is a member of
class OrgMembershipError(Exception):
pass
class CustomDomainMembership(DomainMembership):
custom_role = SchemaProperty(UserRole)
@property
def role(self):
if self.is_admin:
return AdminUserRole(self.domain)
else:
return self.custom_role
def set_permission(self, permission, value, data=None):
self.custom_role.domain = self.domain
self.custom_role.permissions.set(permission, value, data)
class IsMemberOfMixin(DocumentSchema):
def _is_member_of(self, domain):
return domain in self.get_domains() or (
self.is_global_admin() and
not domain_restricts_superusers(domain)
)
def is_member_of(self, domain_qs):
"""
takes either a domain name or a domain object and returns whether the user is part of that domain
either natively or through a team
"""
try:
domain = domain_qs.name
except Exception:
domain = domain_qs
return self._is_member_of(domain)
def is_global_admin(self):
# subclasses to override if they want this functionality
return False
class _AuthorizableMixin(IsMemberOfMixin):
"""
Use either SingleMembershipMixin or MultiMembershipMixin instead of this
"""
def get_domain_membership(self, domain):
domain_membership = None
try:
for d in self.domain_memberships:
if d.domain == domain:
domain_membership = d
if domain not in self.domains:
raise self.Inconsistent("Domain '%s' is in domain_memberships but not domains" % domain)
if not domain_membership and domain in self.domains:
raise self.Inconsistent("Domain '%s' is in domain but not in domain_memberships" % domain)
except self.Inconsistent as e:
logging.warning(e)
self.domains = [d.domain for d in self.domain_memberships]
return domain_membership
def add_domain_membership(self, domain, timezone=None, **kwargs):
for d in self.domain_memberships:
if d.domain == domain:
if domain not in self.domains:
raise self.Inconsistent("Domain '%s' is in domain_memberships but not domains" % domain)
return
domain_obj = Domain.get_by_name(domain, strict=True)
if not domain_obj:
domain_obj = Domain(is_active=True, name=domain, date_created=datetime.utcnow())
domain_obj.save()
if timezone:
domain_membership = DomainMembership(domain=domain, timezone=timezone, **kwargs)
else:
domain_membership = DomainMembership(domain=domain,
timezone=domain_obj.default_timezone,
**kwargs)
self.domain_memberships.append(domain_membership)
self.domains.append(domain)
def delete_domain_membership(self, domain, create_record=False):
for i, dm in enumerate(self.domain_memberships):
if dm.domain == domain:
if create_record:
record = DomainRemovalRecord(
domain=domain,
user_id=self.user_id,
domain_membership=dm,
)
del self.domain_memberships[i]
break
for i, domain_name in enumerate(self.domains):
if domain_name == domain:
del self.domains[i]
break
if create_record:
record.save()
return record
def is_domain_admin(self, domain=None):
if not domain:
# hack for template
if hasattr(self, 'current_domain'):
# this is a hack needed because we can't pass parameters from views
domain = self.current_domain
else:
return False # no domain, no admin
if self.is_global_admin() and (domain is None or not domain_restricts_superusers(domain)):
return True
dm = self.get_domain_membership(domain)
if dm:
return dm.is_admin
else:
return False
def get_domains(self):
domains = [dm.domain for dm in self.domain_memberships]
if set(domains) == set(self.domains):
return domains
else:
raise self.Inconsistent("domains and domain_memberships out of sync")
@memoized
def has_permission(self, domain, permission, data=None):
# is_admin is the same as having all the permissions set
if self.is_global_admin():
return True
elif self.is_domain_admin(domain):
return True
dm = self.get_domain_membership(domain)
if dm:
return dm.has_permission(permission, data)
else:
return False
@memoized
def get_role(self, domain=None, checking_global_admin=True):
"""
Get the role object for this user
"""
if domain is None:
# default to current_domain for django templates
if hasattr(self, 'current_domain'):
domain = self.current_domain
else:
domain = None
if checking_global_admin and self.is_global_admin():
return AdminUserRole(domain=domain)
if self.is_member_of(domain): #need to have a way of seeing is_member_of
return self.get_domain_membership(domain).role
else:
raise DomainMembershipError()
def set_role(self, domain, role_qualified_id):
"""
role_qualified_id is either 'admin' 'user-role:[id]'
"""
dm = self.get_domain_membership(domain)
dm.is_admin = False
if role_qualified_id == "admin":
dm.is_admin = True
elif role_qualified_id.startswith('user-role:'):
dm.role_id = role_qualified_id[len('user-role:'):]
elif role_qualified_id in PERMISSIONS_PRESETS:
preset = PERMISSIONS_PRESETS[role_qualified_id]
dm.role_id = UserRole.get_or_create_with_permissions(domain, preset['permissions'], preset['name']).get_id
elif role_qualified_id == 'none':
dm.role_id = None
else:
raise Exception("unexpected role_qualified_id is %r" % role_qualified_id)
self.has_permission.reset_cache(self)
self.get_role.reset_cache(self)
def role_label(self, domain=None):
if not domain:
try:
domain = self.current_domain
except (AttributeError, KeyError):
return None
try:
return self.get_role(domain, checking_global_admin=False).name
except TypeError:
return "Unknown User"
except DomainMembershipError:
return "Unauthorized User"
except Exception:
return None
class SingleMembershipMixin(_AuthorizableMixin):
domain_membership = SchemaProperty(DomainMembership)
@property
def domains(self):
return [self.domain]
@property
def domain_memberships(self):
return [self.domain_membership]
def add_domain_membership(self, domain, timezone=None, **kwargs):
raise NotImplementedError
def delete_domain_membership(self, domain, create_record=False):
raise NotImplementedError
class MultiMembershipMixin(_AuthorizableMixin):
domains = StringListProperty()
domain_memberships = SchemaListProperty(DomainMembership)
class LowercaseStringProperty(StringProperty):
"""
Make sure that the string is always lowercase'd
"""
def _adjust_value(self, value):
if value is not None:
return value.lower()
# def __set__(self, instance, value):
# return super(LowercaseStringProperty, self).__set__(instance, self._adjust_value(value))
# def __property_init__(self, instance, value):
# return super(LowercaseStringProperty, self).__property_init__(instance, self._adjust_value(value))
def to_json(self, value):
return super(LowercaseStringProperty, self).to_json(self._adjust_value(value))
class DjangoUserMixin(DocumentSchema):
username = LowercaseStringProperty()
first_name = StringProperty()
last_name = StringProperty()
email = LowercaseStringProperty()
password = StringProperty()
is_staff = BooleanProperty()
is_active = BooleanProperty()
is_superuser = BooleanProperty()
last_login = DateTimeProperty()
date_joined = DateTimeProperty()
ATTRS = (
'username',
'first_name',
'last_name',
'email',
'password',
'is_staff',
'is_active',
'is_superuser',
'last_login',
'date_joined',
)
def set_password(self, raw_password):
dummy = User()
dummy.set_password(raw_password)
self.password = dummy.password
def check_password(self, password):
""" Currently just for debugging"""
dummy = User()
dummy.password = self.password
return dummy.check_password(password)
class EulaMixin(DocumentSchema):
CURRENT_VERSION = '2.0' # Set this to the most up to date version of the eula
eulas = SchemaListProperty(LicenseAgreement)
@classmethod
def migrate_eula(cls, data):
if 'eula' in data:
data['eulas'] = [data['eula']]
data['eulas'][0]['version'] = '1.0'
del data['eula']
return data
def is_eula_signed(self, version=CURRENT_VERSION):
if self.is_superuser:
return True
for eula in self.eulas:
if eula.version == version:
return eula.signed
return False
def get_eula(self, version):
for eula in self.eulas:
if eula.version == version:
return eula
return None
@property
def eula(self, version=CURRENT_VERSION):
current_eula = self.get_eula(version)
if not current_eula:
current_eula = LicenseAgreement(type="End User License Agreement", version=version)
self.eulas.append(current_eula)
assert current_eula.type == "End User License Agreement"
return current_eula
class KeyboardShortcutsConfig(DocumentSchema):
enabled = BooleanProperty(False)
main_key = StringProperty(choices=["ctrl", "option", "command", "alt", "shift", "control"])
main_keycode = IntegerProperty()
class CouchUser(Document, DjangoUserMixin, IsMemberOfMixin, UnicodeMixIn, EulaMixin):
"""
A user (for web and commcare)
"""
base_doc = 'CouchUser'
device_ids = ListProperty()
phone_numbers = ListProperty()
created_on = DateTimeProperty(default=datetime(year=1900, month=1, day=1))
# For now, 'status' is things like:
# ('auto_created', 'Automatically created from form submission.'),
# ('phone_registered', 'Registered from phone'),
# ('site_edited', 'Manually added or edited from the HQ website.'),
status = StringProperty()
language = StringProperty()
email_opt_out = BooleanProperty(default=False)
announcements_seen = ListProperty()
keyboard_shortcuts = SchemaProperty(KeyboardShortcutsConfig)
_user = None
_user_checked = False
@classmethod
def wrap(cls, data, should_save=False):
if data.has_key("organizations"):
del data["organizations"]
should_save = True
data = cls.migrate_eula(data)
couch_user = super(CouchUser, cls).wrap(data)
if should_save:
couch_user.save()
return couch_user
@classmethod
def es_fakes(cls, domain, fields=None, start_at=None, size=None, wrap=True):
"""
Get users from ES. Use instead of by_domain()
This is faster than big db calls, but only returns partial data.
Set wrap to False to get a raw dict object (much faster).
This raw dict can be passed to _report_user_dict.
The save method has been disabled.
"""
fields = fields or ['_id', 'username', 'first_name', 'last_name',
'doc_type', 'is_active', 'email']
raw = es_wrapper('users', domain=domain, doc_type=cls.__name__,
fields=fields, start_at=start_at, size=size)
if not wrap:
return raw
def save(*args, **kwargs):
raise NotImplementedError("This is a fake user, don't save it!")
ESUser = type(cls.__name__, (cls,), {'save': save})
return [ESUser(u) for u in raw]
class AccountTypeError(Exception):
pass
class Inconsistent(Exception):
pass
class InvalidID(Exception):
pass
@property
def is_dimagi(self):
return self.username.endswith('@dimagi.com')
@property
def raw_username(self):
if self.doc_type == "CommCareUser":
return self.username.split("@")[0]
else:
return self.username
def html_username(self):
username = self.raw_username
if '@' in username:
html = "<span class='user_username'>%s</span><span class='user_domainname'>@%s</span>" % \
tuple(username.split('@'))
else:
html = "<span class='user_username'>%s</span>" % username
return html
@property
def userID(self):
return self._id
user_id = userID
class Meta:
app_label = 'users'
def __unicode__(self):
return "<%s '%s'>" % (self.__class__.__name__, self.get_id)
def get_email(self):
return self.email
@property
def projects(self):
return map(Domain.get_by_name, self.get_domains())
@property
def full_name(self):
return (u"%s %s" % (self.first_name or u'', self.last_name or u'')).strip()
@property
def human_friendly_name(self):
return self.full_name if self.full_name else self.username
formatted_name = full_name
name = full_name
def set_full_name(self, full_name):
data = full_name.split()
self.first_name = data.pop(0)
self.last_name = ' '.join(data)
def delete(self):
try:
user = self.get_django_user()
user.delete()
except User.DoesNotExist:
pass
super(CouchUser, self).delete() # Call the "real" delete() method.
couch_user_post_save.send_robust(sender='couch_user', couch_user=self)
def delete_phone_number(self, phone_number):
for i in range(0,len(self.phone_numbers)):
if self.phone_numbers[i] == phone_number:
del self.phone_numbers[i]
break
self.save()
self.delete_verified_number(phone_number)
def get_django_user(self):
return User.objects.get(username__iexact=self.username)
def add_phone_number(self, phone_number, default=False, **kwargs):
""" Don't add phone numbers if they already exist """
if not isinstance(phone_number, basestring):
phone_number = str(phone_number)
self.phone_numbers = _add_to_list(self.phone_numbers, phone_number, default)
def set_default_phone_number(self, phone_number):
self.add_phone_number(phone_number, True)
self.save()
@property
def default_phone_number(self):
return _get_default(self.phone_numbers)
phone_number = default_phone_number
def phone_numbers_extended(self, active_user=None):
# TODO: what about web users... do we not want to verify phone numbers
# for them too? if so, CommCareMobileContactMixin should be on CouchUser,
# not CommCareUser
# hack to work around the above issue
if not isinstance(self, CommCareMobileContactMixin):
return [{'number': phone, 'status': 'unverified', 'contact': None} for phone in self.phone_numbers]
verified = self.get_verified_numbers(True)
def extend_phone(phone):
extended_info = {}
contact = verified.get(phone)
if contact:
status = 'verified' if contact.verified else 'pending'
else:
try:
self.verify_unique_number(phone)
status = 'unverified'
except PhoneNumberInUseException:
status = 'duplicate'
duplicate = VerifiedNumber.by_phone(phone, include_pending=True)
assert duplicate is not None, 'expected duplicate VerifiedNumber entry'
# TODO seems like this could be a useful utility function? where to put it...
try:
doc_type = {
'CouchUser': 'user',
'CommCareUser': 'user',
'CommCareCase': 'case',
'CommConnectCase': 'case',
}[duplicate.owner_doc_type]
from corehq.apps.users.views.mobile import EditCommCareUserView
url_ref, doc_id_param = {
'user': (EditCommCareUserView.urlname, 'couch_user_id'),
'case': ('case_details', 'case_id'),
}[doc_type]
dup_url = reverse(url_ref, kwargs={'domain': duplicate.domain, doc_id_param: duplicate.owner_id})
if active_user is None or active_user.is_member_of(duplicate.domain):
extended_info['dup_url'] = dup_url
except Exception, e:
pass
except InvalidFormatException:
status = 'invalid'
extended_info.update({'number': phone, 'status': status, 'contact': contact})
return extended_info
return [extend_phone(phone) for phone in self.phone_numbers]
@property
def couch_id(self):
return self._id
# Couch view wrappers
@classmethod
def all(cls):
return CouchUser.view("users/by_username", include_docs=True)
@classmethod
def by_domain(cls, domain, is_active=True, reduce=False, limit=None, skip=0, strict=False, doc_type=None):
flag = "active" if is_active else "inactive"
doc_type = doc_type or cls.__name__
if cls.__name__ == "CouchUser":
key = [flag, domain]
else:
key = [flag, domain, doc_type]
extra_args = dict()
if not reduce:
extra_args.update(include_docs=True)
if limit is not None:
extra_args.update(
limit=limit,
skip=skip
)
return cls.view("users/by_domain",
reduce=reduce,
startkey=key,
endkey=key + [{}],
#stale=None if strict else settings.COUCH_STALE_QUERY,
**extra_args
).all()
@classmethod
def ids_by_domain(cls, domain, is_active=True):
flag = "active" if is_active else "inactive"
if cls.__name__ == "CouchUser":
key = [flag, domain]
else:
key = [flag, domain, cls.__name__]
return [r['id'] for r in cls.get_db().view("users/by_domain",
startkey=key,
endkey=key + [{}],
reduce=False,
include_docs=False,
)]
@classmethod
def total_by_domain(cls, domain, is_active=True):
data = cls.by_domain(domain, is_active, reduce=True)
return data[0].get('value', 0) if data else 0
@classmethod
def phone_users_by_domain(cls, domain):
return CouchUser.view("users/phone_users_by_domain",
startkey=[domain],
endkey=[domain, {}],
include_docs=True,
)
def is_previewer(self):
from django.conf import settings
return (self.is_superuser or
re.compile(settings.PREVIEWER_RE).match(self.username))
def sync_from_django_user(self, django_user):
if not django_user:
django_user = self.get_django_user()
for attr in DjangoUserMixin.ATTRS:
# name might be truncated so don't backwards sync
one_way_attrs = ['first_name', 'last_name']
if attr not in one_way_attrs or not getattr(self, attr):
# don't sync one-way attrs back to couch unless we didn't have
# something there in the first place. this is hack to allow
# unit test workflows that create the django user first to work
setattr(self, attr, getattr(django_user, attr))
def sync_to_django_user(self):
try:
django_user = self.get_django_user()
except User.DoesNotExist:
django_user = User(username=self.username)
for attr in DjangoUserMixin.ATTRS:
attr_val = getattr(self, attr) or ''
# truncate names when saving to django
if attr == 'first_name' or attr == 'last_name':
attr_val = attr_val[:30]
setattr(django_user, attr, attr_val)
django_user.DO_NOT_SAVE_COUCH_USER= True
return django_user
def sync_from_old_couch_user(self, old_couch_user):
login = old_couch_user.default_account.login
self.sync_from_django_user(login)
for attr in (
'device_ids',
'phone_numbers',
'created_on',
'status',
):
setattr(self, attr, getattr(old_couch_user, attr))
@classmethod
def from_old_couch_user(cls, old_couch_user, copy_id=True):
if old_couch_user.account_type == "WebAccount":
couch_user = WebUser()
else:
couch_user = CommCareUser()
couch_user.sync_from_old_couch_user(old_couch_user)
if old_couch_user.email:
couch_user.email = old_couch_user.email
if copy_id:
couch_user._id = old_couch_user.default_account.login_id
return couch_user
@classmethod
def wrap_correctly(cls, source):
if source['doc_type'] == 'CouchUser' and \
source.has_key('commcare_accounts') and \
source.has_key('web_accounts'):
from . import old_couch_user_models
# todo: remove this functionality and the old user models module
logging.error('still accessing old user models')
user_id = old_couch_user_models.CouchUser.wrap(source).default_account.login_id
return cls.get_by_user_id(user_id)
else:
return {
'WebUser': WebUser,
'CommCareUser': CommCareUser,
'FakeUser': FakeUser,
}[source['doc_type']].wrap(source)
@classmethod
def get_by_username(cls, username):
def get(stale, raise_if_none):
result = cls.get_db().view('users/by_username',
key=username,
include_docs=True,
#stale=stale,
)
return result.one(except_all=raise_if_none)
try:
result = get(stale=settings.COUCH_STALE_QUERY, raise_if_none=True)
if result['doc'] is None or result['doc']['username'] != username:
raise NoResultFound
except NoMoreData:
logging.exception('called get_by_username(%r) and it failed pretty bad' % username)
raise
except NoResultFound:
result = get(stale=None, raise_if_none=False)
if result:
return cls.wrap_correctly(result['doc'])
else:
return None
@classmethod
def get_by_default_phone(cls, phone_number):
result = cls.get_db().view('users/by_default_phone', key=phone_number, include_docs=True).one()
if result:
return cls.wrap_correctly(result['doc'])
else:
return None
@classmethod
def get_by_user_id(cls, userID, domain=None):
"""
if domain is given, checks to make sure the user is a member of that domain
returns None if there's no user found or if the domain check fails
"""
try:
couch_user = cls.wrap_correctly(cls.get_db().get(userID))
except ResourceNotFound:
return None
if couch_user.doc_type != cls.__name__ and cls.__name__ != "CouchUser":
raise CouchUser.AccountTypeError()
if domain:
if not couch_user.is_member_of(domain):
return None
return couch_user
@classmethod
def from_django_user(cls, django_user):
return cls.get_by_username(django_user.username)
@classmethod
def create(cls, domain, username, password, email=None, uuid='', date='',
first_name='', last_name='', **kwargs):
try:
django_user = User.objects.get(username=username)
except User.DoesNotExist:
django_user = create_user(
username, password=password, email=email,
first_name=first_name, last_name=last_name
)
if uuid:
if not re.match(r'[\w-]+', uuid):
raise cls.InvalidID('invalid id %r' % uuid)
couch_user = cls(_id=uuid)
else:
couch_user = cls()
if date:
couch_user.created_on = force_to_datetime(date)
else:
couch_user.created_on = datetime.utcnow()
couch_user.sync_from_django_user(django_user)
return couch_user
def to_be_deleted(self):
return self.base_doc.endswith(DELETED_SUFFIX)
def change_username(self, username):
if username == self.username:
return
if User.objects.filter(username=username).exists():
raise self.Inconsistent("User with username %s already exists" % self.username)
django_user = self.get_django_user()
django_user.DO_NOT_SAVE_COUCH_USER = True
django_user.username = username
django_user.save()
self.username = username
self.save()
def save(self, **params):
# test no username conflict
by_username = self.get_db().view('users/by_username', key=self.username).first()
if by_username and by_username['id'] != self._id:
raise self.Inconsistent("CouchUser with username %s already exists" % self.username)
if not self.to_be_deleted():
django_user = self.sync_to_django_user()
django_user.save()
super(CouchUser, self).save(**params)
results = couch_user_post_save.send_robust(sender='couch_user', couch_user=self)
for result in results:
# Second argument is None if there was no error
if result[1]:
notify_exception(
None,
message="Error occured while syncing user %s: %s" %
(self.username, str(result[1]))
)
@classmethod
def django_user_post_save_signal(cls, sender, django_user, created, max_tries=3):
if hasattr(django_user, 'DO_NOT_SAVE_COUCH_USER'):
del django_user.DO_NOT_SAVE_COUCH_USER
else:
couch_user = cls.from_django_user(django_user)
if couch_user:
couch_user.sync_from_django_user(django_user)
try:
# avoid triggering cyclical sync
super(CouchUser, couch_user).save(**get_safe_write_kwargs())
except ResourceConflict:
cls.django_user_post_save_signal(sender, django_user, created, max_tries - 1)
def is_deleted(self):
return self.base_doc.endswith(DELETED_SUFFIX)
def get_viewable_reports(self, domain=None, name=True, slug=False):
try:
domain = domain or self.current_domain
except AttributeError:
domain = None
try:
if self.is_commcare_user():
role = self.get_role(domain)
if role is None:
models = []
else:
models = role.permissions.view_report_list
else:
models = self.get_domain_membership(domain).viewable_reports()
if slug:
return [to_function(m).slug for m in models]
if name:
return [to_function(m).name for m in models]
return models
except AttributeError:
# todo: what is this here for? we should really be catching something
# more specific and the try/catch should be more isolated.
return []
def get_exportable_reports(self, domain=None):
viewable_reports = self.get_viewable_reports(domain=domain, slug=True)
from corehq.apps.data_interfaces.dispatcher import DataInterfaceDispatcher
export_reports = set(DataInterfaceDispatcher().get_reports_dict(domain).keys())
return list(export_reports.intersection(viewable_reports))
def can_export_data(self, domain=None):
can_see_exports = self.can_view_reports()
if not can_see_exports:
can_see_exports = bool(self.get_exportable_reports(domain))
return can_see_exports
def is_current_web_user(self, request):
return self.user_id == request.couch_user.user_id
def __getattr__(self, item):
if item.startswith('can_'):
perm = item[len('can_'):]
if perm:
def fn(domain=None, data=None):
try:
domain = domain or self.current_domain
except AttributeError:
domain = None
return self.has_permission(domain, perm, data)
fn.__name__ = item
return fn
return super(CouchUser, self).__getattr__(item)
class CommCareUser(CouchUser, SingleMembershipMixin, CommCareMobileContactMixin):
domain = StringProperty()
registering_device_id = StringProperty()
user_data = DictProperty()
@classmethod
def wrap(cls, data):
# migrations from using role_id to using the domain_memberships
role_id = None
should_save = False
if not data.has_key('domain_membership') or not data['domain_membership'].get('domain', None):
should_save = True
if data.has_key('role_id'):
role_id = data["role_id"]
del data['role_id']
should_save = True
self = super(CommCareUser, cls).wrap(data)
if should_save:
self.domain_membership = DomainMembership(domain=data.get('domain', ""))
if role_id:
self.domain_membership.role_id = role_id
# self.save() # will uncomment when I figure out what's happening with sheels commcareuser
return self
def save(self, **params):
super(CommCareUser, self).save(**params)
from corehq.apps.users.signals import commcare_user_post_save
results = commcare_user_post_save.send_robust(sender='couch_user', couch_user=self)
for result in results:
# Second argument is None if there was no error
if result[1]:
notify_exception(
None,
message="Error occured while syncing user %s: %s" %
(self.username, str(result[1]))
)
def is_domain_admin(self, domain=None):
# cloudcare workaround
return False
def sync_from_old_couch_user(self, old_couch_user):
super(CommCareUser, self).sync_from_old_couch_user(old_couch_user)
self.domain = normalize_domain_name(old_couch_user.default_account.domain)
self.registering_device_id = old_couch_user.default_account.registering_device_id
self.user_data = old_couch_user.default_account.user_data
@classmethod
def create(cls, domain, username, password, email=None, uuid='', date='', phone_number=None, **kwargs):
"""
used to be a function called `create_hq_user_from_commcare_registration_info`
"""
commcare_user = super(CommCareUser, cls).create(domain, username, password, email, uuid, date, **kwargs)
if phone_number is not None:
commcare_user.add_phone_number(phone_number)
device_id = kwargs.get('device_id', '')
user_data = kwargs.get('user_data', {})
# populate the couch user
commcare_user.domain = domain
commcare_user.device_ids = [device_id]
commcare_user.registering_device_id = device_id
commcare_user.user_data = user_data
commcare_user.domain_membership = DomainMembership(domain=domain, **kwargs)
commcare_user.save(**get_safe_write_kwargs())
return commcare_user
@property
def filter_flag(self):
from corehq.apps.reports.models import HQUserType
return HQUserType.REGISTERED
@property
def project(self):
return Domain.get_by_name(self.domain)
@property
def username_in_report(self):
def parts():
yield u'%s' % html.escape(self.raw_username)
if self.full_name:
yield u' "%s"' % html.escape(self.full_name)
return safestring.mark_safe(''.join(parts()))
@classmethod
def create_or_update_from_xform(cls, xform):
# if we have 1,000,000 users with the same name in a domain
# then we have bigger problems then duplicate user accounts
MAX_DUPLICATE_USERS = 1000000
def create_or_update_safe(username, password, uuid, date, registering_phone_id, domain, user_data, **kwargs):
# check for uuid conflicts, if one exists, respond with the already-created user
conflicting_user = CommCareUser.get_by_user_id(uuid)
# we need to check for username conflicts, other issues
# and make sure we send the appropriate conflict response to the phone
try:
username = normalize_username(username, domain)
except ValidationError:
raise Exception("Username (%s) is invalid: valid characters include [a-z], "
"[0-9], period, underscore, and single quote" % username)
if conflicting_user:
# try to update. If there are username conflicts, we have to resolve them
if conflicting_user.domain != domain:
raise Exception("Found a conflicting user in another domain. This is not allowed!")
saved = False
to_append = 2
prefix, suffix = username.split("@")
while not saved and to_append < MAX_DUPLICATE_USERS:
try:
conflicting_user.change_username(username)
conflicting_user.password = password
conflicting_user.date = date
conflicting_user.device_id = registering_phone_id
conflicting_user.user_data = user_data
conflicting_user.save()
saved = True
except CouchUser.Inconsistent:
username = "%(pref)s%(count)s@%(suff)s" % {
"pref": prefix, "count": to_append,
"suff": suffix}
to_append = to_append + 1
if not saved:
raise Exception("There are over 1,000,000 users with that base name in your domain. REALLY?!? REALLY?!?!")
return (conflicting_user, False)
try:
User.objects.get(username=username)
except User.DoesNotExist:
# Desired outcome
pass
else:
# Come up with a suitable username
prefix, suffix = username.split("@")
username = get_unique_value(User.objects, "username", prefix, sep="", suffix="@%s" % suffix)
couch_user = cls.create(domain, username, password,
uuid=uuid,
device_id=registering_phone_id,
date=date,
user_data=user_data
)
return (couch_user, True)
# will raise TypeError if xform.form doesn't have all the necessary params
return create_or_update_safe(
domain=xform.domain,
user_data=user_data_from_registration_form(xform),
**dict([(arg, xform.form[arg]) for arg in (
'username',
'password',
'uuid',
'date',
'registering_phone_id'
)])
)
def is_commcare_user(self):
return True
def is_web_user(self):
return False
def add_commcare_account(self, domain, device_id, user_data=None):
"""
Adds a commcare account to this.
"""
if self.domain and self.domain != domain:
raise self.Inconsistent("Tried to reinitialize commcare account to a different domain")
self.domain = domain
self.registering_device_id = device_id
self.user_data = user_data or {}
self.add_device_id(device_id=device_id)
def add_device_id(self, device_id, default=False, **kwargs):
""" Don't add phone devices if they already exist """
self.device_ids = _add_to_list(self.device_ids, device_id, default)
def to_casexml_user(self):
user = CaseXMLUser(
user_id=self.userID,
username=self.raw_username,
password=self.password,
date_joined=self.date_joined,
user_data=self.user_data,
domain=self.domain,
)
def get_owner_ids():
return self.get_owner_ids()
user.get_owner_ids = get_owner_ids
user._hq_user = self # don't tell anyone that we snuck this here
return user
def get_forms(self, deleted=False, wrap=True):
if deleted:
view_name = 'users/deleted_forms_by_user'
else:
view_name = 'couchforms/by_user'
db = XFormInstance.get_db()
doc_ids = [r['id'] for r in db.view(view_name,
startkey=[self.user_id],
endkey=[self.user_id, {}],
reduce=False,
include_docs=False,
)]
if wrap:
for doc in iter_docs(db, doc_ids):
yield XFormInstance.wrap(doc)
else:
for id in doc_ids:
yield id
@property
def form_count(self):
result = XFormInstance.view('couchforms/by_user',
startkey=[self.user_id],
endkey=[self.user_id, {}],
group_level=0
).one()
if result:
return result['value']
else:
return 0
def get_cases(self, deleted=False, last_submitter=False, wrap=True):
if deleted:
view_name = 'users/deleted_cases_by_user'
elif last_submitter:
view_name = 'case/by_user'
else:
view_name = 'case/by_owner'
db = CommCareCase.get_db()
case_ids = [r["id"] for r in db.view(view_name,
startkey=[self.user_id],
endkey=[self.user_id, {}],
reduce=False,
)]
for doc in iter_docs(db, case_ids):
yield CommCareCase.wrap(doc) if wrap else doc
@property
def case_count(self):
result = CommCareCase.view('case/by_user',
startkey=[self.user_id],
endkey=[self.user_id, {}], group_level=0
).one()
if result:
return result['value']
else:
return 0
def get_owner_ids(self):
from corehq.apps.groups.models import Group
owner_ids = [self.user_id]
owner_ids.extend(Group.by_user(self, wrap=False))
return owner_ids
def retire(self):
suffix = DELETED_SUFFIX
deletion_id = random_hex()
# doc_type remains the same, since the views use base_doc instead
if not self.base_doc.endswith(suffix):
self.base_doc += suffix
self['-deletion_id'] = deletion_id
for form in self.get_forms():
form.doc_type += suffix
form['-deletion_id'] = deletion_id
form.save()
for case in self.get_cases():
case.doc_type += suffix
case['-deletion_id'] = deletion_id
case.save()
for phone_number in self.get_verified_numbers(True).values():
phone_number.retire(deletion_id)
try:
django_user = self.get_django_user()
except User.DoesNotExist:
pass
else:
django_user.delete()
self.save()
def unretire(self):
def chop_suffix(string, suffix=DELETED_SUFFIX):
if string.endswith(suffix):
return string[:-len(suffix)]
else:
return string
self.base_doc = chop_suffix(self.base_doc)
for form in self.get_forms(deleted=True):
form.doc_type = chop_suffix(form.doc_type)
form.save()
for case in self.get_cases(deleted=True):
case.doc_type = chop_suffix(case.doc_type)
case.save()
self.save()
def transfer_to_domain(self, domain, app_id):
username = format_username(raw_username(self.username), domain)
self.change_username(username)
self.domain = domain
for form in self.get_forms():
form.domain = domain
form.app_id = app_id
form.save()
for case in self.get_cases():
case.domain = domain
case.save()
self.domain_membership = DomainMembership(domain=domain)
self.save()
def get_group_fixture(self):
return group_fixture(self.get_case_sharing_groups(), self)
@memoized
def get_case_sharing_groups(self):
from corehq.apps.groups.models import Group
return [group for group in Group.by_user(self) if group.case_sharing]
@classmethod
def cannot_share(cls, domain, limit=None, skip=0):
users_checked = list(cls.by_domain(domain, limit=limit, skip=skip))
if not users_checked:
# stop fetching when you come back with none
return []
users = [user for user in users_checked if len(user.get_case_sharing_groups()) != 1]
if limit is not None:
total = cls.total_by_domain(domain)
max_limit = min(total - skip, limit)
if len(users) < max_limit:
new_limit = max_limit - len(users_checked)
new_skip = skip + len(users_checked)
users.extend(cls.cannot_share(domain, new_limit, new_skip))
return users
return users
def get_group_ids(self):
from corehq.apps.groups.models import Group
return Group.by_user(self, wrap=False)
def set_groups(self, group_ids):
from corehq.apps.groups.models import Group
desired = set(group_ids)
current = set(self.get_group_ids())
touched = []
for to_add in desired - current:
group = Group.get(to_add)
group.add_user(self._id, save=False)
touched.append(group)
for to_remove in current - desired:
group = Group.get(to_remove)
group.remove_user(self._id, save=False)
touched.append(group)
Group.bulk_save(touched)
def get_time_zone(self):
try:
time_zone = self.user_data["time_zone"]
except Exception as e:
# Gracefully handle when user_data is None, or does not have a "time_zone" entry
time_zone = None
return time_zone
def get_language_code(self):
try:
lang = self.user_data["language_code"]
except Exception as e:
# Gracefully handle when user_data is None, or does not have a "language_code" entry
lang = None
return lang
def __repr__(self):
return ("{class_name}(username={self.username!r})".format(
class_name=self.__class__.__name__,
self=self
))
class OrgMembershipMixin(DocumentSchema):
org_memberships = SchemaListProperty(OrgMembership)
@property
def organizations(self):
return [om.organization for om in self.org_memberships]
def get_organizations(self):
from corehq.apps.orgs.models import Organization
return filter(None, [Organization.get_by_name(org) for org in self.organizations])
def is_member_of_org(self, org_name_or_model):
"""
takes either a organization name or an organization object and returns whether the user is part of that org
"""
try:
org = org_name_or_model.name
except Exception:
org = org_name_or_model
return org in self.organizations
def get_org_membership(self, org):
for om in self.org_memberships:
if om.organization == org:
return om
return None
def add_org_membership(self, org, **kwargs):
from corehq.apps.orgs.models import Organization
if self.get_org_membership(org):
return
organization = Organization.get_by_name(org, strict=True)
if not organization:
raise OrgMembershipError("Cannot add org membership -- Organization %s does not exist" % org)
kwargs.pop("organization", None) # prevents next line from raising an error due to two organization values being given to OrgMembership
self.org_memberships.append(OrgMembership(organization=org, **kwargs))
def delete_org_membership(self, org, create_record=False):
record = None
for i, om in enumerate(self.org_memberships):
if om.organization == org:
if create_record:
record = OrgRemovalRecord(org_membership = om, user_id=self.user_id)
del self.org_memberships[i]
break
if create_record:
if record:
record.save()
return record
else:
raise OrgMembershipError("Cannot delete org membership -- Organization %s does not exist" % org)
def is_org_admin(self, org):
om = self.get_org_membership(org)
return om and om.is_admin
def is_member_of_team(self, org, team_id):
om = self.get_org_membership(org)
return om and team_id in om.team_ids
def add_to_team(self, org, team_id):
om = self.get_org_membership(org)
if not om:
raise OrgMembershipError("Cannot add team -- %s is not a member of the %s organization" %
(self.username, org))
from corehq.apps.orgs.models import Team
team = Team.get(team_id)
if not team or team.organization != org:
raise OrgMembershipError("Cannot add team -- Team(%s) does not exist in organization %s" % (team_id, org))
om.team_ids.append(team_id)
def remove_from_team(self, org, team_id):
om = self.get_org_membership(org)
if om:
om.team_ids.remove(team_id)
def set_org_admin(self, org):
om = self.get_org_membership(org)
if not om:
raise OrgMembershipError("Cannot set admin -- %s is not a member of the %s organization" %
(self.username, org))
om.is_admin = True
class WebUser(CouchUser, MultiMembershipMixin, OrgMembershipMixin, CommCareMobileContactMixin):
#do sync and create still work?
location_id = StringProperty()
program_id = StringProperty()
def sync_from_old_couch_user(self, old_couch_user):
super(WebUser, self).sync_from_old_couch_user(old_couch_user)
for dm in old_couch_user.web_account.domain_memberships:
dm.domain = normalize_domain_name(dm.domain)
self.domain_memberships.append(dm)
self.domains.append(dm.domain)
def is_global_admin(self):
# override this function to pass global admin rights off to django
return self.is_superuser
@classmethod
def by_organization(cls, org, team_id=None):
key = [org] if team_id is None else [org, team_id]
users = cls.view("users/by_org_and_team",
startkey=key,
endkey=key + [{}],
include_docs=True,
).all()
# return a list of users with the duplicates removed
return dict([(u.get_id, u) for u in users]).values()
@classmethod
def create(cls, domain, username, password, email=None, uuid='', date='', **kwargs):
web_user = super(WebUser, cls).create(domain, username, password, email, uuid, date, **kwargs)
if domain:
web_user.add_domain_membership(domain, **kwargs)
web_user.save()
return web_user
def is_commcare_user(self):
return False
def is_web_user(self):
return True
def get_email(self):
return self.email or self.username
def get_time_zone(self):
from corehq.apps.reports import util as report_utils
if hasattr(self, 'current_domain'):
domain = self.current_domain
elif len(self.domains) > 0:
domain = self.domains[0]
else:
return None
timezone = report_utils.get_timezone(self.user_id, domain)
return timezone.zone
def get_language_code(self):
return self.language
def get_teams(self, ids_only=False):
from corehq.apps.orgs.models import Team
teams = []
def get_valid_teams(team_ids):
team_db = Team.get_db()
for t_id in team_ids:
if team_db.doc_exist(t_id):
yield Team.get(t_id)
else:
logging.info("Note: team %s does not exist for %s" % (t_id, self))
for om in self.org_memberships:
if not ids_only:
teams.extend(list(get_valid_teams(om.team_ids)))
else:
teams.extend(om.team_ids)
return teams
def get_domains(self):
domains = [dm.domain for dm in self.domain_memberships]
for team in self.get_teams():
team_domains = [dm.domain for dm in team.domain_memberships]
for domain in team_domains:
if domain not in domains:
domains.append(domain)
return domains
@memoized
def has_permission(self, domain, permission, data=None):
# is_admin is the same as having all the permissions set
if (self.is_global_admin() and (domain is None or not domain_restricts_superusers(domain))):
return True
elif self.is_domain_admin(domain):
return True
dm_list = list()
dm = self.get_domain_membership(domain)
if dm:
dm_list.append([dm, ''])
for team in self.get_teams():
if team.get_domain_membership(domain) and team.get_domain_membership(domain).role:
dm_list.append([team.get_domain_membership(domain), '(' + team.name + ')'])
#now find out which dm has the highest permissions
if dm_list:
role = self.total_domain_membership(dm_list, domain)
dm = CustomDomainMembership(domain=domain, custom_role=role)
return dm.has_permission(permission, data)
else:
return False
@memoized
def get_role(self, domain=None, include_teams=True, checking_global_admin=True):
"""
Get the role object for this user
"""
if domain is None:
# default to current_domain for django templates
domain = self.current_domain
if checking_global_admin and self.is_global_admin():
return AdminUserRole(domain=domain)
if not include_teams:
return super(WebUser, self).get_role(domain)
dm_list = list()
dm = self.get_domain_membership(domain)
if dm:
dm_list.append([dm, ''])
for team in self.get_teams():
if team.get_domain_membership(domain) and team.get_domain_membership(domain).role:
dm_list.append([team.get_domain_membership(domain), ' (' + team.name + ')'])
#now find out which dm has the highest permissions
if dm_list:
return self.total_domain_membership(dm_list, domain)
else:
raise DomainMembershipError()
def total_domain_membership(self, domain_memberships, domain):
#sort out the permissions
total_permission = Permissions()
if domain_memberships:
for domain_membership, membership_source in domain_memberships:
permission = domain_membership.permissions
total_permission |= permission
#set up a user role
return UserRole(domain=domain, permissions=total_permission,
name=', '.join(["%s %s" % (dm.role.name, ms) for dm, ms in domain_memberships if dm.role]))
#set up a domain_membership
class FakeUser(WebUser):
"""
Prevent actually saving user types that don't exist in the database
"""
def save(self, **kwargs):
raise NotImplementedError("You aren't allowed to do that!")
class PublicUser(FakeUser):
"""
Public users have read-only access to certain domains
"""
domain_memberships = None
def __init__(self, domain, **kwargs):
super(PublicUser, self).__init__(**kwargs)
self.domain = domain
self.domains = [domain]
dm = CustomDomainMembership(domain=domain, is_admin=False)
dm.set_permission('view_reports', True)
self.domain_memberships = [dm]
@memoized
def get_role(self, domain=None, checking_global_admin=None):
assert(domain == self.domain)
return super(PublicUser, self).get_role(domain)
def is_eula_signed(self):
return True # hack for public domain so eula modal doesn't keep popping up
def get_domains(self):
return []
class InvalidUser(FakeUser):
"""
Public users have read-only access to certain domains
"""
def is_member_of(self, domain_qs):
return False
#
# Django models go here
#
class Invitation(Document):
email = StringProperty()
invited_by = StringProperty()
invited_on = DateTimeProperty()
is_accepted = BooleanProperty(default=False)
_inviter = None
def get_inviter(self):
if self._inviter is None:
self._inviter = CouchUser.get_by_user_id(self.invited_by)
if self._inviter.user_id != self.invited_by:
self.invited_by = self._inviter.user_id
self.save()
return self._inviter
def send_activation_email(self):
raise NotImplementedError
class DomainInvitation(Invitation):
"""
When we invite someone to a domain it gets stored here.
"""
domain = StringProperty()
role = StringProperty()
doc_type = "Invitation"
def send_activation_email(self):
url = "http://%s%s" % (Site.objects.get_current().domain,
reverse("domain_accept_invitation", args=[self.domain, self.get_id]))
params = {"domain": self.domain, "url": url, "inviter": self.get_inviter().formatted_name}
text_content = render_to_string("domain/email/domain_invite.txt", params)
html_content = render_to_string("domain/email/domain_invite.html", params)
subject = 'Invitation from %s to join CommCareHQ' % self.get_inviter().formatted_name
send_HTML_email(subject, self.email, html_content, text_content=text_content,
cc=[self.get_inviter().get_email()],
email_from=settings.DEFAULT_FROM_EMAIL)
@classmethod
def by_domain(cls, domain, is_active=True):
key = [domain]
return cls.view("users/open_invitations_by_domain",
reduce=False,
startkey=key,
endkey=key + [{}],
include_docs=True,
).all()
class DomainRemovalRecord(DeleteRecord):
user_id = StringProperty()
domain_membership = SchemaProperty(DomainMembership)
def undo(self):
user = WebUser.get_by_user_id(self.user_id)
user.add_domain_membership(**self.domain_membership._doc)
user.save()
class OrgRemovalRecord(DeleteRecord):
user_id = StringProperty()
org_membership = SchemaProperty(OrgMembership)
def undo(self):
user = WebUser.get_by_user_id(self.user_id)
some_args = self.org_membership._doc
user.add_org_membership(some_args["organization"], **some_args)
user.save()
class UserCache(object):
def __init__(self):
self.cache = {}
def get(self, user_id):
if not user_id:
return None
if user_id in self.cache:
return self.cache[user_id]
else:
user = CouchUser.get_by_user_id(user_id)
self.cache[user_id] = user
return user
from .signals import *
from corehq.apps.domain.models import Domain
|
{
"content_hash": "68c1add7fb6d4fbaf70139f8f1bbd1a6",
"timestamp": "",
"source": "github",
"line_count": 1968,
"max_line_length": 143,
"avg_line_length": 35.6885162601626,
"alnum_prop": 0.5896347974656511,
"repo_name": "gmimano/commcaretest",
"id": "076081ca2f3a822002ee52c5eff949be87e48895",
"size": "70235",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "corehq/apps/users/models.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ActionScript",
"bytes": "15950"
},
{
"name": "CSS",
"bytes": "282577"
},
{
"name": "JavaScript",
"bytes": "2731012"
},
{
"name": "Python",
"bytes": "4738450"
},
{
"name": "Shell",
"bytes": "22454"
}
],
"symlink_target": ""
}
|
"""
A simple description of flat fuel costs for the SWITCH-Pyomo model that
serves as an alternative to the more complex fuel_markets with tiered
supply curves. This is mutually exclusive with the fuel_markets module.
SYNOPSIS
>>> from switch_mod.utilities import define_AbstractModel
>>> model = define_AbstractModel(
... 'timescales', 'load_zones', 'financials', 'fuels', 'gen_tech',
... 'project.build', 'project.dispatch', 'fuel_cost')
>>> instance = model.load_inputs(inputs_dir='test_dat')
"""
import os
from pyomo.environ import *
def define_components(mod):
"""
Augments a Pyomo abstract model object with sets and parameters to
describe simple fuel costs. Unless otherwise stated, each set and
parameter is mandatory. Unless otherwise specified, all dollar
values are real dollars in BASE_YEAR.
FUEL_AVAILABILITY is a set that describes fuel availability. Each
element of the set is (load_zone, fuel, period).
fuel_cost[(lz, f, p) in FUEL_AVAILABILITY] describes flat fuel costs
for each supply of fuel. Costs can vary by load zone and period.
PROJ_FUEL_DISPATCH_POINTS_UNAVAILABLE is a subset of
PROJ_FUEL_DISPATCH_POINTS that describes which points don't have fuel
available.
Enforce_Fuel_Availability[(proj, t) in
PROJ_FUEL_DISPATCH_POINTS_UNAVAILABLE] is a constraint that restricts
ProjFuelUseRate to 0 for in load zones and periods where the
projects' fuel is unavailable.
Fuel_Costs_TP[t in TIMEPOINTS] is an expression that summarizes fuel
costs for the objective function.
"""
mod.FUEL_AVAILABILITY = Set(
dimen=3,
validate=lambda m, lz, f, p: (
lz in m.LOAD_ZONES and
f in m.FUELS and
p in m.PERIODS))
mod.fuel_cost = Param(
mod.FUEL_AVAILABILITY,
within=PositiveReals)
mod.min_data_check('FUEL_AVAILABILITY', 'fuel_cost')
mod.PROJ_FUEL_DISPATCH_POINTS_UNAVAILABLE = Set(
initialize=mod.PROJ_FUEL_DISPATCH_POINTS,
filter=lambda m, pr, t, f: (
(m.proj_load_zone[pr], f, m.tp_period[t])
not in m.FUEL_AVAILABILITY))
mod.Enforce_Fuel_Availability = Constraint(
mod.PROJ_FUEL_DISPATCH_POINTS_UNAVAILABLE,
rule=lambda m, pr, t, f: m.ProjFuelUseRate[pr, t, f] == 0)
# Summarize total fuel costs in each timepoint for the objective function
mod.Fuel_Costs_TP = Expression(
mod.TIMEPOINTS,
rule=lambda m, t: sum(
m.ProjFuelUseRate[proj, t2, f]
* m.fuel_cost[(m.proj_load_zone[proj], f, m.tp_period[t2])]
for (proj, t2, f) in m.PROJ_FUEL_DISPATCH_POINTS
if((t2 == t) and (
(m.proj_load_zone[proj], f, m.tp_period[t2]) in
m.FUEL_AVAILABILITY))))
mod.cost_components_tp.append('Fuel_Costs_TP')
def load_inputs(mod, switch_data, inputs_dir):
"""
Import simple fuel cost data. The following files are expected in
the input directory:
fuel_cost.tab
load_zone, fuel, period, fuel_cost
"""
switch_data.load(
filename=os.path.join(inputs_dir, 'fuel_cost.tab'),
select=('load_zone', 'fuel', 'period', 'fuel_cost'),
index=mod.FUEL_AVAILABILITY,
param=[mod.fuel_cost])
|
{
"content_hash": "bfeb680eb8a915abba0c5905221d29f0",
"timestamp": "",
"source": "github",
"line_count": 96,
"max_line_length": 77,
"avg_line_length": 34.322916666666664,
"alnum_prop": 0.6558421851289833,
"repo_name": "bmaluenda/SWITCH-Pyomo-Chile",
"id": "aab538651d64a7838182de8c7cb71062071379e3",
"size": "3432",
"binary": false,
"copies": "1",
"ref": "refs/heads/Chile",
"path": "switch_mod/fuel_cost.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "358251"
},
{
"name": "Shell",
"bytes": "33876"
}
],
"symlink_target": ""
}
|
import RPi.GPIO as GPIO # Import the GPIO Library
import time # Import the Time library
# Set the GPIO modes
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
# Set the GPIO Pin mode
GPIO.setup(7, GPIO.OUT)
GPIO.setup(8, GPIO.OUT)
GPIO.setup(9, GPIO.OUT)
GPIO.setup(10, GPIO.OUT)
# Turn all motors off
GPIO.output(7, 0)
GPIO.output(8, 0)
GPIO.output(9, 0)
GPIO.output(10, 0)
# Turn the right motor forwards
GPIO.output(9, 0)
GPIO.output(10, 1)
# Turn the left motor forwards
GPIO.output(7, 0)
GPIO.output(8, 1)
# Wait for 1 seconds
time.sleep(1)
# Reset the GPIO pins (turns off motors too)
GPIO.cleanup()
|
{
"content_hash": "342cdf09e8d0cb149b7c929fca63fd96",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 50,
"avg_line_length": 19.125,
"alnum_prop": 0.7189542483660131,
"repo_name": "CamJam-EduKit/EduKit3",
"id": "54d54f28a0fdf69b2047c98da60bcf93ed489d89",
"size": "674",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "CamJam Edukit 3 - RPi.GPIO/Code/3-motors.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "29463"
}
],
"symlink_target": ""
}
|
import sys
import os.path
import unittest
#sys.path.append
this_dir = os.path.dirname(os.path.abspath(__file__))
sys.path.append( os.path.join(os.path.dirname(this_dir),'bin') )
from cocagne.paxos.essential import ProposalID
from cocagne.paxos import essential
import test_essential
def PID(proposalID):
if proposalID is not None:
return test_essential.PID(proposalID.getNumber(), proposalID.getUID())
class MessengerAdapter (object):
def sendPrepare(self, proposalID):
self.send_prepare(PID(proposalID))
def sendPromise(self, proposerUID, proposalID, previousID, acceptedValue):
self.send_promise(proposerUID, PID(proposalID), PID(previousID), acceptedValue)
def sendAccept(self, proposalID, proposalValue):
self.send_accept(PID(proposalID), proposalValue)
def sendAccepted(self, proposalID, acceptedValue):
self.send_accepted(PID(proposalID), acceptedValue)
def onResolution(self, proposalID, value):
self.on_resolution(PID(proposalID), value)
class EssentialMessengerAdapter(MessengerAdapter, essential.EssentialMessenger, test_essential.EssentialMessenger):
pass
class ProposerAdapter(object):
@property
def proposer_uid(self):
return self.getProposerUID()
@property
def quorum_size(self):
return self.getQuorumSize()
@property
def proposed_value(self):
return self.getProposedValue()
@property
def proposal_id(self):
return PID(self.getProposalID())
@property
def last_accepted_id(self):
return PID(self.getLastAcceptedID())
def set_proposal(self, value):
self.setProposal(value)
def recv_promise(self, from_uid, proposal_id, prev_accepted_id, prev_accepted_value):
if prev_accepted_id is not None:
prev_accepted_id = essential.ProposalID(prev_accepted_id.number, prev_accepted_id.uid)
self.receivePromise(from_uid, essential.ProposalID(proposal_id.number, proposal_id.uid),
prev_accepted_id, prev_accepted_value)
class AcceptorAdapter(object):
@property
def promised_id(self):
return PID(self.getPromisedID())
@property
def accepted_id(self):
return PID(self.getAcceptedID())
@property
def accepted_value(self):
return self.getAcceptedValue()
def recv_prepare(self, from_uid, proposal_id):
self.receivePrepare(from_uid, essential.ProposalID(proposal_id.number, proposal_id.uid))
def recv_accept_request(self, from_uid, proposal_id, value):
self.receiveAcceptRequest(from_uid, essential.ProposalID(proposal_id.number, proposal_id.uid), value)
class LearnerAdapter(object):
@property
def quorum_size(self):
return self.getQuorumSize()
@property
def complete(self):
return self.isComplete()
@property
def final_value(self):
return self.getFinalValue()
@property
def final_proposal_id(self):
return PID(self.getFinalProposalID())
def recv_accepted(self, from_uid, proposal_id, accepted_value):
self.receiveAccepted(from_uid,
essential.ProposalID(proposal_id.number, proposal_id.uid),
accepted_value)
class EssentialProposerAdapter(essential.EssentialProposerImpl, ProposerAdapter):
pass
class EssentialAcceptorAdapter(essential.EssentialAcceptorImpl, AcceptorAdapter):
pass
class EssentialLearnerAdapter(essential.EssentialLearnerImpl, LearnerAdapter):
pass
class EssentialProposerTester(test_essential.EssentialProposerTests, EssentialMessengerAdapter, unittest.TestCase):
def __init__(self, test_name):
unittest.TestCase.__init__(self, test_name)
def proposer_factory(self, messenger, uid, quorum_size):
return EssentialProposerAdapter(messenger, uid, quorum_size)
class EssentialAcceptorTester(test_essential.EssentialAcceptorTests, EssentialMessengerAdapter, unittest.TestCase):
def __init__(self, test_name):
unittest.TestCase.__init__(self, test_name)
def acceptor_factory(self, messenger, uid, quorum_size):
return EssentialAcceptorAdapter(messenger)
class EssentialLearnerTester(test_essential.EssentialLearnerTests, EssentialMessengerAdapter, unittest.TestCase):
def __init__(self, test_name):
unittest.TestCase.__init__(self, test_name)
def learner_factory(self, messenger, uid, quorum_size):
return EssentialLearnerAdapter(messenger, quorum_size)
if __name__ == '__main__':
unittest.main()
|
{
"content_hash": "d6274555585888811f5af2ad922372fc",
"timestamp": "",
"source": "github",
"line_count": 161,
"max_line_length": 115,
"avg_line_length": 28.919254658385093,
"alnum_prop": 0.6935137457044673,
"repo_name": "cocagne/paxos",
"id": "4f7375408cec8553b3630fdb8c9882a799861a42",
"size": "4679",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "test/java_test_essential.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "25894"
},
{
"name": "Python",
"bytes": "95573"
}
],
"symlink_target": ""
}
|
from _common import *
from ..generators.gen import make_coll, generate, get_dims
from ..generators import gen as generator
'''
some global definitions
'''
NEPOCH = 50
VERSION = 4
MODELDIR = environ.get('MODELDIR', 'models/') + '/particles/'
BASEDIR = environ['BASEDIR']
OPTIMIZER = 'Adam'
_APOSTLE = None
train_opts = {
'learn_mass' : True,
'learn_pt' : True,
}
# must be called!
def instantiate(trunc=4, limit=50):
global _APOSTLE
generator.truncate = trunc
config.limit = limit
_APOSTLE = 'v%s_trunc%i_limit%i'%(str(VERSION), generator.truncate, config.limit)
system('mkdir -p %s/%s/'%(MODELDIR,_APOSTLE))
system('cp -v %s %s/%s/trainer.py'%(sys.argv[0], MODELDIR, _APOSTLE))
system('cp -v %s %s/%s/lib.py'%(__file__.replace('.pyc','.py'), MODELDIR, _APOSTLE))
# instantiate data loaders
top = make_coll(BASEDIR + '/PARTITION/Top_*_CATEGORY.npy')
qcd = make_coll(BASEDIR + '/PARTITION/QCD_*_CATEGORY.npy')
data = [top, qcd]
dims = get_dims(top)
with open('%s/%s/setup.py'%(MODELDIR, _APOSTLE),'w') as fsetup:
fsetup.write('''
from subtlenet import config
from subtlenet.generators import gen as generator
config.limit = %i
generator.truncate = %i
'''%(config.limit, generator.truncate))
return data, dims
'''
first build the classifier!
'''
# set up data
def setup_data(data):
opts = {}; opts.update(train_opts)
gen = {
'train' : generate(data, partition='train', batch=500, **opts),
'validation' : generate(data, partition='validate', batch=2000, **opts),
'test' : generate(data, partition='test', batch=10, **opts),
}
return gen
def setup_adv_data(data):
opts = {'decorr_mass':True}; opts.update(train_opts)
gen = {
'train' : generate(data, partition='train', batch=1000, **opts),
'validation' : generate(data, partition='validate', batch=2000, **opts),
'test' : generate(data, partition='test', batch=10, **opts),
}
return gen
# this is purely a discriminatory classifier
def build_classifier(dims):
input_particles = Input(shape=(dims[1], dims[2]), name='input_particles')
input_mass = Input(shape=(1,), name='input_mass')
input_pt = Input(shape=(1,), name='input_pt')
inputs = [input_particles, input_mass, input_pt]
# now build the particle network
h = BatchNormalization(momentum=0.6)(input_particles)
h = Conv1D(32, 2, activation='relu', kernel_initializer='lecun_uniform', padding='same')(h)
h = BatchNormalization(momentum=0.6)(h)
h = Conv1D(16, 4, activation='relu', kernel_initializer='lecun_uniform', padding='same')(h)
h = BatchNormalization(momentum=0.6)(h)
h = CuDNNLSTM(100)(h)
h = BatchNormalization(momentum=0.6)(h)
h = Dense(100, activation='relu', kernel_initializer='lecun_uniform')(h)
particles_final = BatchNormalization(momentum=0.6)(h)
# merge everything
to_merge = [particles_final, input_mass, input_pt]
h = concatenate(to_merge)
for i in xrange(1,5):
h = Dense(50, activation='tanh')(h)
# if i%2:
# h = Dropout(0.1)(h)
h = BatchNormalization(momentum=0.6)(h)
y_hat = Dense(config.n_truth, activation='softmax', name='y_hat')(h)
classifier = Model(inputs=inputs, outputs=[y_hat])
#classifier.compile(optimizer=Adam(lr=0.0002),
classifier.compile(optimizer=getattr(keras_objects, OPTIMIZER)(lr=0.0005),
loss='categorical_crossentropy',
metrics=['accuracy'])
print '########### CLASSIFIER ############'
classifier.summary()
print '###################################'
return classifier
def build_adversary(clf, loss, scale, w_clf, w_adv):
if loss == 'mean_squared_error':
config.n_decorr_bins = 1
y_hat = clf.outputs[0]
inputs= clf.inputs
kin_hats = Adversary(config.n_decorr_bins, n_outputs=1, scale=scale)(y_hat)
adversary = Model(inputs=inputs,
outputs=[y_hat]+kin_hats)
adversary.compile(optimizer=getattr(keras_objects, OPTIMIZER)(lr=0.00025),
loss=['categorical_crossentropy']+[loss for _ in kin_hats],
loss_weights=[w_clf]+[w_adv for _ in kin_hats])
print '########### ADVERSARY ############'
adversary.summary()
print '###################################'
return adversary
# train any model
def train(model, name, train_gen, validation_gen, save_clf_params=None):
if save_clf_params is not None:
callbacks = [PartialModelCheckpoint(filepath='%s/%s/%s_clf_best.h5'%(MODELDIR,_APOSTLE,name),
save_best_only=True, verbose=True,
**save_clf_params)]
save_clf = save_clf_params['partial_model']
else:
save_clf = model
callbacks = []
callbacks += [ModelCheckpoint('%s/%s/%s_best.h5'%(MODELDIR,_APOSTLE,name),
save_best_only=True, verbose=True)]
def save_classifier(name_=name, model_=save_clf):
model_.save('%s/%s/%s.h5'%(MODELDIR,_APOSTLE,name_))
def save_and_exit(signal=None, frame=None):
save_classifier()
exit(1)
signal.signal(signal.SIGINT, save_and_exit)
model.fit_generator(train_gen,
steps_per_epoch=3000,
epochs=NEPOCH,
validation_data=validation_gen,
validation_steps=2000,
callbacks = callbacks,
)
save_classifier()
def infer(modelh5, name):
model = load_model(modelh5,
custom_objects={'DenseBroadcast':DenseBroadcast,
'GradReverseLayer':GradReverseLayer})
model.summary()
coll = generator.make_coll(BASEDIR + '/PARTITION/*_CATEGORY.npy')
msd_norm_factor = 1. / config.max_mass
pt_norm_factor = 1. / (config.max_pt - config.min_pt)
msd_index = config.gen_singletons['msd']
pt_index = config.gen_singletons['pt']
def predict_t(data):
msd = data['singletons'][:,msd_index] * msd_norm_factor
pt = (data['singletons'][:,pt_index] - config.min_pt) * pt_norm_factor
if msd.shape[0] > 0:
particles = data['particles'][:,:config.limit,:generator.truncate]
r_t = model.predict([particles,msd,pt])[:,config.n_truth-1]
else:
r_t = np.empty((0,))
return r_t
print 'loaded from',modelh5,
print 'saving to',name
coll.infer(['singletons','particles'], f=predict_t, name=name, partition='test')
|
{
"content_hash": "5e4288783d1e416086f9040c7b7fbb77",
"timestamp": "",
"source": "github",
"line_count": 192,
"max_line_length": 102,
"avg_line_length": 34.770833333333336,
"alnum_prop": 0.5895745955662073,
"repo_name": "sidnarayanan/BAdNet",
"id": "cf400d132c1383ea09d32639f4d6a13e2dbf567d",
"size": "6702",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "train/gen/adv/models/particles/v4_Adam_trunc7_limit100/lib.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "326584"
},
{
"name": "Shell",
"bytes": "900"
}
],
"symlink_target": ""
}
|
import os
import re
from wic import msger
from wic.pluginbase import SourcePlugin
from wic.utils.oe.misc import *
class BootimgPartitionPlugin(SourcePlugin):
name = 'bootimg-partition'
@classmethod
def do_install_disk(self, disk, disk_name, cr, workdir, oe_builddir,
bootimg_dir, kernel_dir, native_sysroot):
"""
Called after all partitions have been prepared and assembled into a
disk image. Do nothing.
"""
pass
@classmethod
def do_configure_partition(self, part, source_params, cr, cr_workdir,
oe_builddir, bootimg_dir, kernel_dir,
native_sysroot):
"""
Called before do_prepare_partition(). Possibly prepare
configuration files of some sort.
"""
pass
@classmethod
def do_prepare_partition(self, part, source_params, cr, cr_workdir,
oe_builddir, bootimg_dir, kernel_dir,
rootfs_dir, native_sysroot):
"""
Called to do the actual content population for a partition i.e. it
'prepares' the partition to be incorporated into the image.
In this case, does the following:
- sets up a vfat partition
- copies all files listed in IMAGE_BOOT_FILES variable
"""
hdddir = "%s/boot" % cr_workdir
rm_cmd = "rm -rf %s" % cr_workdir
exec_cmd(rm_cmd)
install_cmd = "install -d %s" % hdddir
exec_cmd(install_cmd)
if not bootimg_dir:
bootimg_dir = get_bitbake_var("DEPLOY_DIR_IMAGE")
if not bootimg_dir:
msger.error("Couldn't find DEPLOY_DIR_IMAGE, exiting\n")
msger.debug('Bootimg dir: %s' % bootimg_dir)
boot_files = get_bitbake_var("IMAGE_BOOT_FILES")
if not boot_files:
msger.error('No boot files defined, IMAGE_BOOT_FILES unset')
msger.debug('Boot files: %s' % boot_files)
# list of tuples (src_name, dst_name)
deploy_files = []
for src_entry in re.findall(r'[\w;\-\./]+', boot_files):
if ';' in src_entry:
dst_entry = tuple(src_entry.split(';'))
if not dst_entry[0] or not dst_entry[1]:
msger.error('Malformed boot file entry: %s' % (src_entry))
else:
dst_entry = (src_entry, src_entry)
msger.debug('Destination entry: %r' % (dst_entry,))
deploy_files.append(dst_entry)
for deploy_entry in deploy_files:
src, dst = deploy_entry
src_path = os.path.join(bootimg_dir, src)
dst_path = os.path.join(hdddir, dst)
msger.debug('Install %s as %s' % (os.path.basename(src_path),
dst_path))
install_cmd = "install -m 0644 -D %s %s" \
% (src_path, dst_path)
exec_cmd(install_cmd)
msger.debug('Prepare boot partition using rootfs in %s' % (hdddir))
part.prepare_rootfs(cr_workdir, oe_builddir, hdddir,
native_sysroot)
|
{
"content_hash": "32ccf2f3f00f177c56d2a19215f41606",
"timestamp": "",
"source": "github",
"line_count": 90,
"max_line_length": 78,
"avg_line_length": 35.422222222222224,
"alnum_prop": 0.5486198243412798,
"repo_name": "wwright2/dcim3-angstrom1",
"id": "564118ad8ba8f53ae5dfe298825be3a4c10e874a",
"size": "4191",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sources/openembedded-core/scripts/lib/wic/plugins/source/bootimg-partition.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "73541"
},
{
"name": "Awk",
"bytes": "286"
},
{
"name": "Batchfile",
"bytes": "19960"
},
{
"name": "BitBake",
"bytes": "2875212"
},
{
"name": "BlitzBasic",
"bytes": "6367"
},
{
"name": "C",
"bytes": "1598095"
},
{
"name": "C++",
"bytes": "2198121"
},
{
"name": "CMake",
"bytes": "7277"
},
{
"name": "CSS",
"bytes": "28636"
},
{
"name": "Groff",
"bytes": "502999"
},
{
"name": "HTML",
"bytes": "210823"
},
{
"name": "JavaScript",
"bytes": "23100"
},
{
"name": "Lua",
"bytes": "1194"
},
{
"name": "Makefile",
"bytes": "32539"
},
{
"name": "Nginx",
"bytes": "2744"
},
{
"name": "PHP",
"bytes": "829048"
},
{
"name": "Pascal",
"bytes": "17352"
},
{
"name": "Perl",
"bytes": "66339"
},
{
"name": "Python",
"bytes": "3672452"
},
{
"name": "QMake",
"bytes": "165"
},
{
"name": "Ruby",
"bytes": "10695"
},
{
"name": "Shell",
"bytes": "820076"
},
{
"name": "SourcePawn",
"bytes": "259600"
},
{
"name": "Tcl",
"bytes": "4897"
},
{
"name": "VimL",
"bytes": "8483"
},
{
"name": "XSLT",
"bytes": "9089"
}
],
"symlink_target": ""
}
|
'''
Run this script from the root of the repository to update all translations from
transifex.
It will do the following automatically:
- fetch all translations using the tx tool
- post-process them into valid and committable format
- remove invalid control characters
- remove location tags (makes diffs less noisy)
TODO:
- auto-add new translations to the build system according to the translation process
'''
from __future__ import division, print_function
import subprocess
import re
import sys
import os
import io
import xml.etree.ElementTree as ET
# Name of transifex tool
TX = 'tx'
# Name of source language file
SOURCE_LANG = 'ulm_en.ts'
# Directory with locale files
LOCALE_DIR = 'src/qt/locale'
# Minimum number of messages for translation to be considered at all
MIN_NUM_MESSAGES = 10
def check_at_repository_root():
if not os.path.exists('.git'):
print('No .git directory found')
print('Execute this script at the root of the repository', file=sys.stderr)
exit(1)
def fetch_all_translations():
if subprocess.call([TX, 'pull', '-f', '-a']):
print('Error while fetching translations', file=sys.stderr)
exit(1)
def find_format_specifiers(s):
'''Find all format specifiers in a string.'''
pos = 0
specifiers = []
while True:
percent = s.find('%', pos)
if percent < 0:
break
try:
specifiers.append(s[percent+1])
except:
print('Failed to get specifier')
pos = percent+2
return specifiers
def split_format_specifiers(specifiers):
'''Split format specifiers between numeric (Qt) and others (strprintf)'''
numeric = []
other = []
for s in specifiers:
if s in {'1','2','3','4','5','6','7','8','9'}:
numeric.append(s)
else:
other.append(s)
# numeric (Qt) can be present in any order, others (strprintf) must be in specified order
return set(numeric),other
def sanitize_string(s):
'''Sanitize string for printing'''
return s.replace('\n',' ')
def check_format_specifiers(source, translation, errors, numerus):
source_f = split_format_specifiers(find_format_specifiers(source))
# assert that no source messages contain both Qt and strprintf format specifiers
# if this fails, go change the source as this is hacky and confusing!
#assert(not(source_f[0] and source_f[1]))
try:
translation_f = split_format_specifiers(find_format_specifiers(translation))
except IndexError:
errors.append("Parse error in translation for '%s': '%s'" % (sanitize_string(source), sanitize_string(translation)))
return False
else:
if source_f != translation_f:
if numerus and source_f == (set(), ['n']) and translation_f == (set(), []) and translation.find('%') == -1:
# Allow numerus translations to omit %n specifier (usually when it only has one possible value)
return True
errors.append("Mismatch between '%s' and '%s'" % (sanitize_string(source), sanitize_string(translation)))
return False
return True
def all_ts_files(suffix=''):
for filename in os.listdir(LOCALE_DIR):
# process only language files, and do not process source language
if not filename.endswith('.ts'+suffix) or filename == SOURCE_LANG+suffix:
continue
if suffix: # remove provided suffix
filename = filename[0:-len(suffix)]
filepath = os.path.join(LOCALE_DIR, filename)
yield(filename, filepath)
FIX_RE = re.compile(b'[\x00-\x09\x0b\x0c\x0e-\x1f]')
def remove_invalid_characters(s):
'''Remove invalid characters from translation string'''
return FIX_RE.sub(b'', s)
# Override cdata escape function to make our output match Qt's (optional, just for cleaner diffs for
# comparison, disable by default)
_orig_escape_cdata = None
def escape_cdata(text):
text = _orig_escape_cdata(text)
text = text.replace("'", ''')
text = text.replace('"', '"')
return text
def postprocess_translations(reduce_diff_hacks=False):
print('Checking and postprocessing...')
if reduce_diff_hacks:
global _orig_escape_cdata
_orig_escape_cdata = ET._escape_cdata
ET._escape_cdata = escape_cdata
for (filename,filepath) in all_ts_files():
os.rename(filepath, filepath+'.orig')
have_errors = False
for (filename,filepath) in all_ts_files('.orig'):
# pre-fixups to cope with transifex output
parser = ET.XMLParser(encoding='utf-8') # need to override encoding because 'utf8' is not understood only 'utf-8'
with open(filepath + '.orig', 'rb') as f:
data = f.read()
# remove control characters; this must be done over the entire file otherwise the XML parser will fail
data = remove_invalid_characters(data)
tree = ET.parse(io.BytesIO(data), parser=parser)
# iterate over all messages in file
root = tree.getroot()
for context in root.findall('context'):
for message in context.findall('message'):
numerus = message.get('numerus') == 'yes'
source = message.find('source').text
translation_node = message.find('translation')
# pick all numerusforms
if numerus:
translations = [i.text for i in translation_node.findall('numerusform')]
else:
translations = [translation_node.text]
for translation in translations:
if translation is None:
continue
errors = []
valid = check_format_specifiers(source, translation, errors, numerus)
for error in errors:
print('%s: %s' % (filename, error))
if not valid: # set type to unfinished and clear string if invalid
translation_node.clear()
translation_node.set('type', 'unfinished')
have_errors = True
# Remove location tags
for location in message.findall('location'):
message.remove(location)
# Remove entire message if it is an unfinished translation
if translation_node.get('type') == 'unfinished':
context.remove(message)
# check if document is (virtually) empty, and remove it if so
num_messages = 0
for context in root.findall('context'):
for message in context.findall('message'):
num_messages += 1
if num_messages < MIN_NUM_MESSAGES:
print('Removing %s, as it contains only %i messages' % (filepath, num_messages))
continue
# write fixed-up tree
# if diff reduction requested, replace some XML to 'sanitize' to qt formatting
if reduce_diff_hacks:
out = io.BytesIO()
tree.write(out, encoding='utf-8')
out = out.getvalue()
out = out.replace(b' />', b'/>')
with open(filepath, 'wb') as f:
f.write(out)
else:
tree.write(filepath, encoding='utf-8')
return have_errors
if __name__ == '__main__':
check_at_repository_root()
# fetch_all_translations()
postprocess_translations()
|
{
"content_hash": "e19ae69a1abb8b606a96b5a7adb205ab",
"timestamp": "",
"source": "github",
"line_count": 199,
"max_line_length": 124,
"avg_line_length": 37.46733668341709,
"alnum_prop": 0.6103809012875536,
"repo_name": "unlimitedcoin-dev/unlimited-core",
"id": "d2af4f43202fb9f2f18d532e8c087d3dc5baef0a",
"size": "7657",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "contrib/devtools/update-translations.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "1315034"
},
{
"name": "C++",
"bytes": "5283466"
},
{
"name": "CSS",
"bytes": "123890"
},
{
"name": "HTML",
"bytes": "50621"
},
{
"name": "Java",
"bytes": "2100"
},
{
"name": "M4",
"bytes": "147843"
},
{
"name": "Makefile",
"bytes": "97098"
},
{
"name": "Objective-C",
"bytes": "4930"
},
{
"name": "Objective-C++",
"bytes": "7222"
},
{
"name": "Protocol Buffer",
"bytes": "2308"
},
{
"name": "Python",
"bytes": "706101"
},
{
"name": "QMake",
"bytes": "2054"
},
{
"name": "Roff",
"bytes": "3649"
},
{
"name": "Shell",
"bytes": "35569"
}
],
"symlink_target": ""
}
|
"""
Copyright 2018 Google LLC
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
https://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import tensorflow as tf
import numpy as np
def SVD_Conv_Tensor(conv, inp_shape):
""" Find the singular values of the linear transformation
corresponding to the convolution represented by conv on
an n x n x depth input. """
conv_tr = tf.cast(tf.transpose(conv, perm=[2, 3, 0, 1]), tf.complex64)
conv_shape = conv.get_shape().as_list()
padding = tf.constant([[0, 0], [0, 0],
[0, inp_shape[0] - conv_shape[0]],
[0, inp_shape[1] - conv_shape[1]]])
transform_coeff = tf.fft2d(tf.pad(conv_tr, padding))
singular_values = tf.svd(tf.transpose(transform_coeff, perm = [2, 3, 0, 1]),
compute_uv=False)
return singular_values
def Clip_OperatorNorm(conv, inp_shape, clip_to):
conv_tr = tf.cast(tf.transpose(conv, perm=[2, 3, 0, 1]), tf.complex64)
conv_shape = conv.get_shape().as_list()
padding = tf.constant([[0, 0], [0, 0],
[0, inp_shape[0] - conv_shape[0]],
[0, inp_shape[1] - conv_shape[1]]])
transform_coeff = tf.fft2d(tf.pad(conv_tr, padding))
D, U, V = tf.svd(tf.transpose(transform_coeff, perm = [2, 3, 0, 1]))
norm = tf.reduce_max(D)
D_clipped = tf.cast(tf.minimum(D, clip_to), tf.complex64)
clipped_coeff = tf.matmul(U, tf.matmul(tf.linalg.diag(D_clipped),
V, adjoint_b=True))
clipped_conv_padded = tf.real(tf.ifft2d(
tf.transpose(clipped_coeff, perm=[2, 3, 0, 1])))
return tf.slice(tf.transpose(clipped_conv_padded, perm=[2, 3, 0, 1]),
[0] * len(conv_shape), conv_shape), norm
def SVD_Conv_Tensor_NP(filter, inp_size):
# compute the singular values using FFT
# first compute the transforms for each pair of input and output channels
transform_coeff = np.fft.fft2(filter, inp_size, axes=[0, 1])
# now, for each transform coefficient, compute the singular values of the
# matrix obtained by selecting that coefficient for
# input-channel/output-channel pairs
return np.linalg.svd(transform_coeff, compute_uv=False)
def Clip_OperatorNorm_NP(filter, inp_shape, clip_to):
# compute the singular values using FFT
# first compute the transforms for each pair of input and output channels
transform_coeff = np.fft.fft2(filter, inp_shape, axes=[0, 1])
# now, for each transform coefficient, compute the singular values of the
# matrix obtained by selecting that coefficient for
# input-channel/output-channel pairs
U, D, V = np.linalg.svd(transform_coeff, compute_uv=True, full_matrices=False)
D_clipped = np.minimum(D, clip_to)
if filter.shape[2] > filter.shape[3]:
clipped_transform_coeff = np.matmul(U, D_clipped[..., None] * V)
else:
clipped_transform_coeff = np.matmul(U * D_clipped[..., None, :], V)
clipped_filter = np.fft.ifft2(clipped_transform_coeff, axes=[0, 1]).real
args = [range(d) for d in filter.shape]
return clipped_filter[np.ix_(*args)]
|
{
"content_hash": "d5d473192f6d1b86c59c0730886cbd42",
"timestamp": "",
"source": "github",
"line_count": 80,
"max_line_length": 80,
"avg_line_length": 44.175,
"alnum_prop": 0.6604414261460102,
"repo_name": "brain-research/conv-sv",
"id": "3aa8ad0384c14d542e495f1d9dac2cbc194da10d",
"size": "3534",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "conv2d_singular_values.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "15094"
}
],
"symlink_target": ""
}
|
from corehq.apps.reports.standard import CustomProjectReport, ProjectReportParametersMixin, DatespanMixin
from corehq.const import USER_DATE_FORMAT, USER_MONTH_FORMAT
from dimagi.utils.couch.database import get_db
class PathIndiaKrantiReport(CustomProjectReport, ProjectReportParametersMixin, DatespanMixin):
name = "Key Indicators"
slug = "pathindia_key_indicators"
fields = ['corehq.apps.reports.filters.users.UserTypeFilter',
'corehq.apps.reports.filters.select.GroupFilter',
'corehq.apps.reports.filters.dates.DatespanFilter']
report_template_path = "pathindia/reports/kranti_report.html"
flush_layout = True
@property
def report_context(self):
report_data = dict()
for user in self.users:
key = [user.user_id]
data = get_db().view("pathindia/kranti_report",
reduce=True,
startkey = key+[self.datespan.startdate_param_utc],
endkey = key+[self.datespan.enddate_param_utc]
).all()
for item in data:
report_data = self._merge_data(report_data, item.get('value', {}))
delivery_place_total = report_data.get("antenatal", {}).get("delivery_place", {}).get("govt", 0) + \
report_data.get("antenatal", {}).get("delivery_place", {}).get("priv", 0)
complications = report_data.get("postnatal", {}).get("complications", {})
complications_total = complications.get("bleeding", 0) + \
complications.get("fever", 0) + \
complications.get("convulsions", 0)
if "postnatal" in report_data:
report_data["postnatal"]["complications_total"] = complications_total
reg_preg_total = report_data.get("antenatal", {}).get("registered_preg", 0)
anc_exam_total = report_data.get("antenatal", {}).get("anc_examination", 0)
live_birth_total = report_data.get("intranatal", {}).get("outcome", {}).get("live_birth", 0)
sexed_total = report_data.get("intranatal", {}).get("sex", {}).get("female", 0) +\
report_data.get("intranatal", {}).get("sex", {}).get("male", 0)
hb_exam_total = report_data.get("antenatal", {}).get("stats", {}).get("hb_exam", 0)
kranti_expected = dict(
antenatal=dict(
reg_place=dict(
govt=reg_preg_total,
priv=reg_preg_total
),
early_registration=reg_preg_total,
stats=dict(
bp=anc_exam_total,
weight=anc_exam_total,
abdominal_exam=anc_exam_total,
hb_exam=anc_exam_total
),
hb=dict(
low=hb_exam_total,
avg=hb_exam_total,
high=hb_exam_total
),
tt_booster=reg_preg_total,
ifa_tabs=reg_preg_total,
injection_syrup=reg_preg_total,
delivery_place=dict(
govt=delivery_place_total,
priv=delivery_place_total
)
),
intranatal=dict(
place=dict(
govt=live_birth_total,
priv=live_birth_total,
home=live_birth_total
),
type=dict(
normal=live_birth_total,
lscs=live_birth_total,
forceps=live_birth_total
),
sex=dict(
male=sexed_total,
female=sexed_total
),
weight=dict(
low=live_birth_total,
avg=live_birth_total,
high=live_birth_total
)
),
postnatal=dict(
currently_breastfeeding=live_birth_total,
at_least_one_pnc=live_birth_total,
no_pnc=live_birth_total,
complications=dict(
bleeding=complications_total,
fever=complications_total,
convulsions=complications_total
),
jsy=live_birth_total
)
)
kranti_percentages = self._get_percentages(report_data, kranti_expected)
month_reporting_range = self.datespan.enddate.strftime(USER_MONTH_FORMAT)
if self.datespan.enddate.strftime(USER_MONTH_FORMAT) != self.datespan.startdate.strftime(USER_MONTH_FORMAT):
month_reporting_range = "%s to %s" % (self.datespan.startdate.strftime(USER_MONTH_FORMAT), month_reporting_range)
return dict(
kranti=report_data,
percentages=kranti_percentages,
general_info=dict(
total_link_workers=len(self.users),
month_of_reporting=month_reporting_range,
date_of_sending_report=self.datespan.enddate.strftime(USER_DATE_FORMAT),
total_preg_women_monitored=get_db().view("pathindia/kranti_cases",
reduce=True
).first().get('value', 0),
uhp=self.group.name if self.group else "All UHPs"
)
)
def _merge_data(self, dict1, dict2):
for key, val in dict2.items():
if isinstance(val, dict):
if key not in dict1:
dict1[key] = val
else:
dict1[key] = self._merge_data(dict1[key], val)
elif isinstance(val, int):
if key not in dict1:
dict1[key] = val
else:
dict1[key] += val
return dict1
def _get_percentages(self, data, expected,
compute_percent=lambda x,expected: (float(x)/expected)*100 if expected != 0 else 0):
for key, val in expected.items():
if isinstance(val, dict):
self._get_percentages(data.get(key, {}), expected[key])
elif isinstance(val, int):
expected[key] = "%.2f%%" % compute_percent(data.get(key, 0), val)
return expected
|
{
"content_hash": "82523ae69a6f28cf3b335e146cf5b454",
"timestamp": "",
"source": "github",
"line_count": 145,
"max_line_length": 125,
"avg_line_length": 43.42068965517242,
"alnum_prop": 0.5184243964421855,
"repo_name": "puttarajubr/commcare-hq",
"id": "32532ad66b28fa9b42c52bcd1c2f21dda566c867",
"size": "6296",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "custom/_legacy/pathindia/reports.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ActionScript",
"bytes": "15950"
},
{
"name": "CSS",
"bytes": "581878"
},
{
"name": "HTML",
"bytes": "2790361"
},
{
"name": "JavaScript",
"bytes": "2572023"
},
{
"name": "Makefile",
"bytes": "3999"
},
{
"name": "Python",
"bytes": "11275678"
},
{
"name": "Shell",
"bytes": "23890"
}
],
"symlink_target": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.