text stringlengths 81 112k |
|---|
Send a dweet to dweet.io for a thing with a known name
def dweet_for(thing_name, payload, key=None, session=None):
"""Send a dweet to dweet.io for a thing with a known name
"""
if key is not None:
params = {'key': key}
else:
params = None
return _send_dweet(payload, '/dweet/for/{0}'.format(thing_name), params=params, session=session) |
Read all the dweets for a dweeter
def get_dweets_for(thing_name, key=None, session=None):
"""Read all the dweets for a dweeter
"""
if key is not None:
params = {'key': key}
else:
params = None
return _request('get', '/get/dweets/for/{0}'.format(thing_name), params=params, session=None) |
Remove a lock (no matter what it's connected to).
def remove_lock(lock, key, session=None):
"""Remove a lock (no matter what it's connected to).
"""
return _request('get', '/remove/lock/{0}'.format(lock), params={'key': key}, session=session) |
Lock a thing (prevents unauthed dweets for the locked thing)
def lock(thing_name, lock, key, session=None):
"""Lock a thing (prevents unauthed dweets for the locked thing)
"""
return _request('get', '/lock/{0}'.format(thing_name), params={'key': key, 'lock': lock}, session=session) |
Unlock a thing
def unlock(thing_name, key, session=None):
"""Unlock a thing
"""
return _request('get', '/unlock/{0}'.format(thing_name), params={'key': key}, session=session) |
Set an alert on a thing with the given condition
def set_alert(thing_name, who, condition, key, session=None):
"""Set an alert on a thing with the given condition
"""
return _request('get', '/alert/{0}/when/{1}/{2}'.format(
','.join(who),
thing_name,
quote(condition),
), params={'key': key}, session=session) |
Set an alert on a thing with the given condition
def get_alert(thing_name, key, session=None):
"""Set an alert on a thing with the given condition
"""
return _request('get', '/get/alert/for/{0}'.format(thing_name), params={'key': key}, session=session) |
Remove an alert for the given thing
def remove_alert(thing_name, key, session=None):
"""Remove an alert for the given thing
"""
return _request('get', '/remove/alert/for/{0}'.format(thing_name), params={'key': key}, session=session) |
list all product sets for current user
def get_product_sets(self):
"""
list all product sets for current user
"""
# ensure we are using api url without a specific product set id
api_url = super(ProductSetAPI, self).base_url
return self.client.get(api_url) |
BE NOTICED: this will delete all product sets for current user
def delete_all_product_sets(self):
"""
BE NOTICED: this will delete all product sets for current user
"""
# ensure we are using api url without a specific product set id
api_url = super(ProductSetAPI, self).base_url
return self.client.delete(api_url) |
This function (and backend API) is being obsoleted. Don't use it anymore.
def get_products(self, product_ids):
"""
This function (and backend API) is being obsoleted. Don't use it anymore.
"""
if self.product_set_id is None:
raise ValueError('product_set_id must be specified')
data = {'ids': product_ids}
return self.client.get(self.base_url + '/products', json=data) |
Check if the timeout has been reached and raise a `StopIteration` if so.
def _check_stream_timeout(started, timeout):
"""Check if the timeout has been reached and raise a `StopIteration` if so.
"""
if timeout:
elapsed = datetime.datetime.utcnow() - started
if elapsed.seconds > timeout:
raise StopIteration |
Yields dweets as received from dweet.io's streaming API
def _listen_for_dweets_from_response(response):
"""Yields dweets as received from dweet.io's streaming API
"""
streambuffer = ''
for byte in response.iter_content():
if byte:
streambuffer += byte.decode('ascii')
try:
dweet = json.loads(streambuffer.splitlines()[1])
except (IndexError, ValueError):
continue
if isstr(dweet):
yield json.loads(dweet)
streambuffer = '' |
Create a real-time subscription to dweets
def listen_for_dweets_from(thing_name, timeout=900, key=None, session=None):
"""Create a real-time subscription to dweets
"""
url = BASE_URL + '/listen/for/dweets/from/{0}'.format(thing_name)
session = session or requests.Session()
if key is not None:
params = {'key': key}
else:
params = None
start = datetime.datetime.utcnow()
while True:
request = requests.Request("GET", url, params=params).prepare()
resp = session.send(request, stream=True, timeout=timeout)
try:
for x in _listen_for_dweets_from_response(resp):
yield x
_check_stream_timeout(start, timeout)
except (ChunkedEncodingError, requests.exceptions.ConnectionError, requests.exceptions.ReadTimeout):
pass
_check_stream_timeout(start, timeout) |
curl -X POST \
-H 'x-ca-version: 1.0' \
-H 'x-ca-accesskeyid: YourAccessId' \
-d "service_id=p4dkh2sg&request_id=c13ed5aa-d6d2-11e8-ba11-02420a582a05&description=blahlblah" \
https://api.productai.cn/bad_cases/_0000204
def add(self, service_id, request_id, description=None, details=None):
if not service_id:
raise ValueError('service_id is required')
if not request_id:
raise ValueError('request_id is required')
"""
curl -X POST \
-H 'x-ca-version: 1.0' \
-H 'x-ca-accesskeyid: YourAccessId' \
-d "service_id=p4dkh2sg&request_id=c13ed5aa-d6d2-11e8-ba11-02420a582a05&description=blahlblah" \
https://api.productai.cn/bad_cases/_0000204
"""
data = dict()
data['service_id'] = service_id
data['request_id'] = request_id
if description:
data['description'] = description
if details:
data['details'] = details
"""
{
"created_at": "2018-10-24T03:30:51Z",
"description": "\u8fd9\u662f\u4e00\u4e2a\u6d4b\u8bd5",
"details": "",
"id": 34,
"image_path": null,
"modified_at": "2018-10-24T03:30:51Z",
"reporter_id": 1632,
"request_id": "34954696-d73d-11e8-9419-0242ac1c2b04",
"service_id": "p4dkh2sg",
"status": "open"
}
"""
return self.client.post(self.base_url, data=data) |
Executes a `packer build`
:param bool parallel: Run builders in parallel
:param bool debug: Run in debug mode
:param bool force: Force artifact output even if exists
:param bool machine_readable: Make output machine-readable
def build(self, parallel=True, debug=False, force=False,
machine_readable=False):
"""Executes a `packer build`
:param bool parallel: Run builders in parallel
:param bool debug: Run in debug mode
:param bool force: Force artifact output even if exists
:param bool machine_readable: Make output machine-readable
"""
self.packer_cmd = self.packer.build
self._add_opt('-parallel=true' if parallel else None)
self._add_opt('-debug' if debug else None)
self._add_opt('-force' if force else None)
self._add_opt('-machine-readable' if machine_readable else None)
self._append_base_arguments()
self._add_opt(self.packerfile)
return self.packer_cmd() |
Implements the `packer fix` function
:param string to_file: File to output fixed template to
def fix(self, to_file=None):
"""Implements the `packer fix` function
:param string to_file: File to output fixed template to
"""
self.packer_cmd = self.packer.fix
self._add_opt(self.packerfile)
result = self.packer_cmd()
if to_file:
with open(to_file, 'w') as f:
f.write(result.stdout.decode())
result.fixed = json.loads(result.stdout.decode())
return result |
Inspects a Packer Templates file (`packer inspect -machine-readable`)
To return the output in a readable form, the `-machine-readable` flag
is appended automatically, afterwhich the output is parsed and returned
as a dict of the following format:
"variables": [
{
"name": "aws_access_key",
"value": "{{env `AWS_ACCESS_KEY_ID`}}"
},
{
"name": "aws_secret_key",
"value": "{{env `AWS_ACCESS_KEY`}}"
}
],
"provisioners": [
{
"type": "shell"
}
],
"builders": [
{
"type": "amazon-ebs",
"name": "amazon"
}
]
:param bool mrf: output in machine-readable form.
def inspect(self, mrf=True):
"""Inspects a Packer Templates file (`packer inspect -machine-readable`)
To return the output in a readable form, the `-machine-readable` flag
is appended automatically, afterwhich the output is parsed and returned
as a dict of the following format:
"variables": [
{
"name": "aws_access_key",
"value": "{{env `AWS_ACCESS_KEY_ID`}}"
},
{
"name": "aws_secret_key",
"value": "{{env `AWS_ACCESS_KEY`}}"
}
],
"provisioners": [
{
"type": "shell"
}
],
"builders": [
{
"type": "amazon-ebs",
"name": "amazon"
}
]
:param bool mrf: output in machine-readable form.
"""
self.packer_cmd = self.packer.inspect
self._add_opt('-machine-readable' if mrf else None)
self._add_opt(self.packerfile)
result = self.packer_cmd()
if mrf:
result.parsed_output = self._parse_inspection_output(
result.stdout.decode())
else:
result.parsed_output = None
return result |
Implmenets the `packer push` function
UNTESTED! Must be used alongside an Atlas account
def push(self, create=True, token=False):
"""Implmenets the `packer push` function
UNTESTED! Must be used alongside an Atlas account
"""
self.packer_cmd = self.packer.push
self._add_opt('-create=true' if create else None)
self._add_opt('-tokn={0}'.format(token) if token else None)
self._add_opt(self.packerfile)
return self.packer_cmd() |
Validates a Packer Template file (`packer validate`)
If the validation failed, an `sh` exception will be raised.
:param bool syntax_only: Whether to validate the syntax only
without validating the configuration itself.
def validate(self, syntax_only=False):
"""Validates a Packer Template file (`packer validate`)
If the validation failed, an `sh` exception will be raised.
:param bool syntax_only: Whether to validate the syntax only
without validating the configuration itself.
"""
self.packer_cmd = self.packer.validate
self._add_opt('-syntax-only' if syntax_only else None)
self._append_base_arguments()
self._add_opt(self.packerfile)
# as sh raises an exception rather than return a value when execution
# fails we create an object to return the exception and the validation
# state
try:
validation = self.packer_cmd()
validation.succeeded = validation.exit_code == 0
validation.error = None
except Exception as ex:
validation = ValidationObject()
validation.succeeded = False
validation.failed = True
validation.error = ex.message
return validation |
Appends base arguments to packer commands.
-except, -only, -var and -var-file are appeneded to almost
all subcommands in packer. As such this can be called to add
these flags to the subcommand.
def _append_base_arguments(self):
"""Appends base arguments to packer commands.
-except, -only, -var and -var-file are appeneded to almost
all subcommands in packer. As such this can be called to add
these flags to the subcommand.
"""
if self.exc and self.only:
raise PackerException('Cannot provide both "except" and "only"')
elif self.exc:
self._add_opt('-except={0}'.format(self._join_comma(self.exc)))
elif self.only:
self._add_opt('-only={0}'.format(self._join_comma(self.only)))
for var, value in self.vars.items():
self._add_opt("-var")
self._add_opt("{0}={1}".format(var, value))
if self.var_file:
self._add_opt('-var-file={0}'.format(self.var_file)) |
Parses the machine-readable output `packer inspect` provides.
See the inspect method for more info.
This has been tested vs. Packer v0.7.5
def _parse_inspection_output(self, output):
"""Parses the machine-readable output `packer inspect` provides.
See the inspect method for more info.
This has been tested vs. Packer v0.7.5
"""
parts = {'variables': [], 'builders': [], 'provisioners': []}
for line in output.splitlines():
line = line.split(',')
if line[2].startswith('template'):
del line[0:2]
component = line[0]
if component == 'template-variable':
variable = {"name": line[1], "value": line[2]}
parts['variables'].append(variable)
elif component == 'template-builder':
builder = {"name": line[1], "type": line[2]}
parts['builders'].append(builder)
elif component == 'template-provisioner':
provisioner = {"type": line[1]}
parts['provisioners'].append(provisioner)
return parts |
Perform an HTTP POST request for a given url.
Returns the response object.
def post(self, url, data, headers=None):
"""
Perform an HTTP POST request for a given url.
Returns the response object.
"""
return self._request('POST', url, data, headers=headers) |
Perform an HTTP PUT request for a given url.
Returns the response object.
def put(self, url, data, headers=None):
"""
Perform an HTTP PUT request for a given url.
Returns the response object.
"""
return self._request('PUT', url, data, headers=headers) |
Query a fulltext index by key and query or just a plain Lucene query,
i1 = gdb.nodes.indexes.get('people',type='fulltext', provider='lucene')
i1.query('name','do*')
i1.query('name:do*')
In this example, the last two line are equivalent.
def query(self, *args):
"""
Query a fulltext index by key and query or just a plain Lucene query,
i1 = gdb.nodes.indexes.get('people',type='fulltext', provider='lucene')
i1.query('name','do*')
i1.query('name:do*')
In this example, the last two line are equivalent.
"""
if not args or len(args) > 2:
raise TypeError('query() takes 2 or 3 arguments (a query or a key '
'and a query) (%d given)' % (len(args) + 1))
elif len(args) == 1:
query, = args
return self.get('text').query(text_type(query))
else:
key, query = args
index_key = self.get(key)
if isinstance(query, string_types):
return index_key.query(query)
else:
if query.fielded:
raise ValueError('Queries with an included key should '
'not include a field.')
return index_key.query(text_type(query)) |
Return a HTML representation for a particular QuerySequence.
Mainly for IPython Notebook.
def _plot_graph(self, graph, title=None, width=None, height=None):
"""
Return a HTML representation for a particular QuerySequence.
Mainly for IPython Notebook.
"""
if not self._elements_row and not self._elements_graph:
raise ValueError('Unable to display the graph or the table')
title = title or self.q
width = width or json.dumps(None)
height = height or 300
d3_uuid = text_type(uuid.uuid1())
d3_graph = self._transform_graph_to_d3(graph)
d3_id = "d3_id_" + d3_uuid
d3_title = title
d3_container_id = d3_id + "_d3c"
style = """
#{d3_id} path.link {{
fill: none;
stroke-width: 1.5px;
}}
#{d3_id} .node {{
/*fill: #ccc;*/
stroke: #333;
stroke-width: 1.5px;
}}
#{d3_id} text {{
font: 10px sans-serif;
pointer-events: none;
}}
#{d3_id} text.shadow {{
stroke: #fff;
stroke-width: 3px;
stroke-opacity: .8;
}}
#{d3_id} .node.sticky {{
/* stroke-width: 2px; */
}}
""".format(d3_id=d3_id)
js = """
var links = graph.links;
var nodes = graph.nodes;
// Compute the distinct nodes from the links.
links.forEach(function(link) {
link.source = (nodes[link.source] ||
(nodes[link.source] = {name: link.source}));
link.target = (nodes[link.target] ||
(nodes[link.target] = {name: link.target}));
});
var w = width || $(container).width(), h = height;
var force = d3.layout.force()
.nodes(d3.values(nodes))
.links(links)
.size([w, h])
.linkDistance(60)
.charge(-300)
.on("tick", tick)
.start();
var svg = d3.select(container).append("svg:svg")
.attr("width", w)
.attr("height", h);
// Per-type markers, as they don't inherit styles.
svg.append("svg:defs").selectAll("marker")
.data(["arrow"])
.enter().append("svg:marker")
.attr("id", String)
.attr("viewBox", "0 -5 10 10")
.attr("refX", 15)
.attr("refY", -1.5)
.attr("markerWidth", 6)
.attr("markerHeight", 6)
.attr("orient", "auto")
.append("svg:path")
.attr("d", "M0,-5L10,0L0,5");
var path = svg.append("svg:g").selectAll("path")
.data(force.links())
.enter().append("svg:path")
.attr("class", function(d) { return "link " + d.stroke; })
.attr("stroke", function(d) { return d.stroke; })
.attr("marker-end", function(d) { return "url(#arrow)"; });
var circle = svg.append("svg:g").selectAll("circle")
.data(force.nodes())
.enter().append("svg:circle")
.attr("fill", function(d) { return d.fill; })
.attr("r", 6)
.attr("class", "node")
.call(force.drag)
.on("mousedown", function(d) {
d.fixed = true;
d3.select(this).classed("sticky", true);
});
var text = svg.append("svg:g").selectAll("g")
.data(force.nodes())
.enter().append("svg:g");
// A copy of the text with a thick white stroke for legibility.
text.append("svg:text")
.attr("x", 8)
.attr("y", ".31em")
.attr("class", "shadow")
.text(function(d) { return d.label; });
text.append("svg:text")
.attr("x", 8)
.attr("y", ".31em")
.attr("class", "front")
.text(function(d) { return d.label; });
// Use elliptical arc path segments to doubly-encode directionality.
function tick() {
path.attr("d", function(d) {
var dx = d.target.x - d.source.x,
dy = d.target.y - d.source.y,
dr = Math.sqrt(dx * dx + dy * dy);
return ("M" + d.source.x + "," + d.source.y + "A"
+ dr + "," + dr + " 0 0,1 " + d.target.x + ","
+ d.target.y);
});
circle.attr("transform", function(d) {
return "translate(" + d.x + "," + d.y + ")";
});
text.attr("transform", function(d) {
return "translate(" + d.x + "," + d.y + ")";
});
}
// Display options
var display = $(container + "_display");
graph.properties.forEach(function (property) {
var option = $("<OPTION/>");
option.text(property);
option.attr("value", property);
display.append(option);
});
display.on("change", function () {
var selected = $(this).find(":selected").val(),
displayFunc;
if (selected.length !== 0) {
displayFunc = function(d) {
return d.properties[selected];
}
} else {
displayFunc = function(d) {
return d.label;
}
}
text.select("text.front").text(displayFunc);
text.select("text.shadow").text(displayFunc);
});
"""
return ("""
<style type="text/css">
{style}
</style>
<div class="accordion">
<div class="accordion-group">
<div class="accordion-heading">
<a class="accordion-toggle collapsed"
data-toggle="collapse" data-parent=""
href="#{d3_id}">
{d3_title}
</a>
</div>
<div id="{d3_id}" class="accordion-body in collapse">
<div class="accordion-inner">
<div id="{d3_container_id}">
<select id="{d3_container_id}_display">
<option value="">ID</option>
</select>
</div>
</div>
</div>
</div>
</div>
<script>
var neo4jrestclient = window.neo4jrestclient || {{}};
neo4jrestclient['{d3_uuid}'] = {{}};
neo4jrestclient['{d3_uuid}'].graph = {d3_graph};
neo4jrestclient['{d3_uuid}'].container_id = "{d3_container_id}";
neo4jrestclient['{d3_uuid}'].container = "#{d3_container_id}";
neo4jrestclient['{d3_uuid}'].render = function () {{
(function (graph, container, width, height) {{
{js}
}})(
neo4jrestclient['{d3_uuid}'].graph,
neo4jrestclient['{d3_uuid}'].container,
{width},
{height}
);
}}
if (!window.d3) {{
$.getScript(
"//d3js.org/d3.v2.js?2.9.1",
neo4jrestclient['{d3_uuid}'].render
);
}} else {{
neo4jrestclient['{d3_uuid}'].render();
}}
</script>
""".format(
style=style,
js=js,
d3_graph=json.dumps(d3_graph),
d3_id=d3_id,
d3_uuid=d3_uuid,
d3_title=d3_title,
d3_container_id=d3_container_id,
width=width,
height=height,
)) |
Send an HTTP request to the REST API.
:param string path: A URL
:param string method: The HTTP method (GET, POST, etc.) to use
in the request.
:param string body: A string representing any data to be sent in the
body of the HTTP request.
:param dictionary headers:
"{header-name: header-value}" dictionary.
def do_call(self, path, method, body=None, headers=None):
"""
Send an HTTP request to the REST API.
:param string path: A URL
:param string method: The HTTP method (GET, POST, etc.) to use
in the request.
:param string body: A string representing any data to be sent in the
body of the HTTP request.
:param dictionary headers:
"{header-name: header-value}" dictionary.
"""
url = urljoin(self.base_url, path)
try:
resp = requests.request(method, url, data=body, headers=headers,
auth=self.auth, timeout=self.timeout)
except requests.exceptions.Timeout as out:
raise NetworkError("Timeout while trying to connect to RabbitMQ")
except requests.exceptions.RequestException as err:
# All other requests exceptions inherit from RequestException
raise NetworkError("Error during request %s %s" % (type(err), err))
try:
content = resp.json()
except ValueError as out:
content = None
# 'success' HTTP status codes are 200-206
if resp.status_code < 200 or resp.status_code > 206:
raise HTTPError(content, resp.status_code, resp.text, path, body)
else:
if content:
return content
else:
return None |
Wrapper around http.do_call that transforms some HTTPError into
our own exceptions
def _call(self, path, method, body=None, headers=None):
"""
Wrapper around http.do_call that transforms some HTTPError into
our own exceptions
"""
try:
resp = self.http.do_call(path, method, body, headers)
except http.HTTPError as err:
if err.status == 401:
raise PermissionError('Insufficient permissions to query ' +
'%s with user %s :%s' % (path, self.user, err))
raise
return resp |
Uses the aliveness-test API call to determine if the
server is alive and the vhost is active. The broker (not this code)
creates a queue and then sends/consumes a message from it.
:param string vhost: There should be no real reason to ever change
this from the default value, but it's there if you need to.
:returns bool: True if alive, False otherwise
:raises: HTTPError if *vhost* doesn't exist on the broker.
def is_alive(self, vhost='%2F'):
"""
Uses the aliveness-test API call to determine if the
server is alive and the vhost is active. The broker (not this code)
creates a queue and then sends/consumes a message from it.
:param string vhost: There should be no real reason to ever change
this from the default value, but it's there if you need to.
:returns bool: True if alive, False otherwise
:raises: HTTPError if *vhost* doesn't exist on the broker.
"""
uri = Client.urls['live_test'] % vhost
try:
resp = self._call(uri, 'GET')
except http.HTTPError as err:
if err.status == 404:
raise APIError("No vhost named '%s'" % vhost)
raise
if resp['status'] == 'ok':
return True
else:
return False |
A convenience function used in the event that you need to confirm that
the broker thinks you are who you think you are.
:returns dict whoami: Dict structure contains:
* administrator: whether the user is has admin privileges
* name: user name
* auth_backend: backend used to determine admin rights
def get_whoami(self):
"""
A convenience function used in the event that you need to confirm that
the broker thinks you are who you think you are.
:returns dict whoami: Dict structure contains:
* administrator: whether the user is has admin privileges
* name: user name
* auth_backend: backend used to determine admin rights
"""
path = Client.urls['whoami']
whoami = self._call(path, 'GET')
return whoami |
A convenience function for getting back only the vhost names instead of
the larger vhost dicts.
:returns list vhost_names: A list of just the vhost names.
def get_vhost_names(self):
"""
A convenience function for getting back only the vhost names instead of
the larger vhost dicts.
:returns list vhost_names: A list of just the vhost names.
"""
vhosts = self.get_all_vhosts()
vhost_names = [i['name'] for i in vhosts]
return vhost_names |
Returns the attributes of a single named vhost in a dict.
:param string vname: Name of the vhost to get.
:returns dict vhost: Attribute dict for the named vhost
def get_vhost(self, vname):
"""
Returns the attributes of a single named vhost in a dict.
:param string vname: Name of the vhost to get.
:returns dict vhost: Attribute dict for the named vhost
"""
vname = quote(vname, '')
path = Client.urls['vhosts_by_name'] % vname
vhost = self._call(path, 'GET', headers=Client.json_headers)
return vhost |
Creates a vhost on the server to house exchanges.
:param string vname: The name to give to the vhost on the server
:returns: boolean
def create_vhost(self, vname):
"""
Creates a vhost on the server to house exchanges.
:param string vname: The name to give to the vhost on the server
:returns: boolean
"""
vname = quote(vname, '')
path = Client.urls['vhosts_by_name'] % vname
return self._call(path, 'PUT',
headers=Client.json_headers) |
Deletes a vhost from the server. Note that this also deletes any
exchanges or queues that belong to this vhost.
:param string vname: Name of the vhost to delete from the server.
def delete_vhost(self, vname):
"""
Deletes a vhost from the server. Note that this also deletes any
exchanges or queues that belong to this vhost.
:param string vname: Name of the vhost to delete from the server.
"""
vname = quote(vname, '')
path = Client.urls['vhosts_by_name'] % vname
return self._call(path, 'DELETE') |
:returns: list of dicts, or an empty list if there are no permissions.
def get_permissions(self):
"""
:returns: list of dicts, or an empty list if there are no permissions.
"""
path = Client.urls['all_permissions']
conns = self._call(path, 'GET')
return conns |
:returns: list of dicts, or an empty list if there are no permissions.
:param string vname: Name of the vhost to set perms on.
def get_vhost_permissions(self, vname):
"""
:returns: list of dicts, or an empty list if there are no permissions.
:param string vname: Name of the vhost to set perms on.
"""
vname = quote(vname, '')
path = Client.urls['vhost_permissions_get'] % (vname,)
conns = self._call(path, 'GET')
return conns |
:returns: list of dicts, or an empty list if there are no permissions.
:param string username: User to set permissions for.
def get_user_permissions(self, username):
"""
:returns: list of dicts, or an empty list if there are no permissions.
:param string username: User to set permissions for.
"""
path = Client.urls['user_permissions'] % (username,)
conns = self._call(path, 'GET')
return conns |
Set permissions for a given username on a given vhost. Both
must already exist.
:param string vname: Name of the vhost to set perms on.
:param string username: User to set permissions for.
:param string config: Permission pattern for configuration operations
for this user in this vhost.
:param string rd: Permission pattern for read operations for this user
in this vhost
:param string wr: Permission pattern for write operations for this user
in this vhost.
Permission patterns are regex strings. If you're unfamiliar with this,
you should definitely check out this section of the RabbitMQ docs:
http://www.rabbitmq.com/admin-guide.html#access-control
def set_vhost_permissions(self, vname, username, config, rd, wr):
"""
Set permissions for a given username on a given vhost. Both
must already exist.
:param string vname: Name of the vhost to set perms on.
:param string username: User to set permissions for.
:param string config: Permission pattern for configuration operations
for this user in this vhost.
:param string rd: Permission pattern for read operations for this user
in this vhost
:param string wr: Permission pattern for write operations for this user
in this vhost.
Permission patterns are regex strings. If you're unfamiliar with this,
you should definitely check out this section of the RabbitMQ docs:
http://www.rabbitmq.com/admin-guide.html#access-control
"""
vname = quote(vname, '')
body = json.dumps({"configure": config, "read": rd, "write": wr})
path = Client.urls['vhost_permissions'] % (vname, username)
return self._call(path, 'PUT', body,
headers=Client.json_headers) |
Delete permission for a given username on a given vhost. Both
must already exist.
:param string vname: Name of the vhost to set perms on.
:param string username: User to set permissions for.
def delete_permission(self, vname, username):
"""
Delete permission for a given username on a given vhost. Both
must already exist.
:param string vname: Name of the vhost to set perms on.
:param string username: User to set permissions for.
"""
vname = quote(vname, '')
path = Client.urls['vhost_permissions'] % (vname, username)
return self._call(path, 'DELETE') |
:returns: A list of dicts
:param string vhost: A vhost to query for exchanges, or None (default),
which triggers a query for all exchanges in all vhosts.
def get_exchanges(self, vhost=None):
"""
:returns: A list of dicts
:param string vhost: A vhost to query for exchanges, or None (default),
which triggers a query for all exchanges in all vhosts.
"""
if vhost:
vhost = quote(vhost, '')
path = Client.urls['exchanges_by_vhost'] % vhost
else:
path = Client.urls['all_exchanges']
exchanges = self._call(path, 'GET')
return exchanges |
Gets a single exchange which requires a vhost and name.
:param string vhost: The vhost containing the target exchange
:param string name: The name of the exchange
:returns: dict
def get_exchange(self, vhost, name):
"""
Gets a single exchange which requires a vhost and name.
:param string vhost: The vhost containing the target exchange
:param string name: The name of the exchange
:returns: dict
"""
vhost = quote(vhost, '')
name = quote(name, '')
path = Client.urls['exchange_by_name'] % (vhost, name)
exch = self._call(path, 'GET')
return exch |
Creates an exchange in the given vhost with the given name. As per the
RabbitMQ API documentation, a JSON body also needs to be included that
"looks something like this":
{"type":"direct",
"auto_delete":false,
"durable":true,
"internal":false,
"arguments":[]}
On success, the API returns a 204 with no content, in which case this
function returns True. If any other response is received, it's raised.
:param string vhost: Vhost to create the exchange in.
:param string name: Name of the proposed exchange.
:param string type: The AMQP exchange type.
:param bool auto_delete: Whether or not the exchange should be
dropped when the no. of consumers drops to zero.
:param bool durable: Whether you want this exchange to persist a
broker restart.
:param bool internal: Whether or not this is a queue for use by the
broker only.
:param list arguments: If given, should be a list. If not given, an
empty list is sent.
def create_exchange(self,
vhost,
name,
xtype,
auto_delete=False,
durable=True,
internal=False,
arguments=None):
"""
Creates an exchange in the given vhost with the given name. As per the
RabbitMQ API documentation, a JSON body also needs to be included that
"looks something like this":
{"type":"direct",
"auto_delete":false,
"durable":true,
"internal":false,
"arguments":[]}
On success, the API returns a 204 with no content, in which case this
function returns True. If any other response is received, it's raised.
:param string vhost: Vhost to create the exchange in.
:param string name: Name of the proposed exchange.
:param string type: The AMQP exchange type.
:param bool auto_delete: Whether or not the exchange should be
dropped when the no. of consumers drops to zero.
:param bool durable: Whether you want this exchange to persist a
broker restart.
:param bool internal: Whether or not this is a queue for use by the
broker only.
:param list arguments: If given, should be a list. If not given, an
empty list is sent.
"""
vhost = quote(vhost, '')
name = quote(name, '')
path = Client.urls['exchange_by_name'] % (vhost, name)
base_body = {"type": xtype, "auto_delete": auto_delete,
"durable": durable, "internal": internal,
"arguments": arguments or list()}
body = json.dumps(base_body)
self._call(path, 'PUT', body,
headers=Client.json_headers)
return True |
Publish a message to an exchange.
:param string vhost: vhost housing the target exchange
:param string xname: name of the target exchange
:param string rt_key: routing key for message
:param string payload: the message body for publishing
:param string payload_enc: encoding of the payload. The only choices
here are 'string' and 'base64'.
:param dict properties: a dict of message properties
:returns: boolean indicating success or failure.
def publish(self, vhost, xname, rt_key, payload, payload_enc='string',
properties=None):
"""
Publish a message to an exchange.
:param string vhost: vhost housing the target exchange
:param string xname: name of the target exchange
:param string rt_key: routing key for message
:param string payload: the message body for publishing
:param string payload_enc: encoding of the payload. The only choices
here are 'string' and 'base64'.
:param dict properties: a dict of message properties
:returns: boolean indicating success or failure.
"""
vhost = quote(vhost, '')
xname = quote(xname, '')
path = Client.urls['publish_to_exchange'] % (vhost, xname)
body = json.dumps({'routing_key': rt_key, 'payload': payload,
'payload_encoding': payload_enc,
'properties': properties or {}})
result = self._call(path, 'POST', body)
return result['routed'] |
Delete the named exchange from the named vhost. The API returns a 204
on success, in which case this method returns True, otherwise the
error is raised.
:param string vhost: Vhost where target exchange was created
:param string name: The name of the exchange to delete.
:returns bool: True on success.
def delete_exchange(self, vhost, name):
"""
Delete the named exchange from the named vhost. The API returns a 204
on success, in which case this method returns True, otherwise the
error is raised.
:param string vhost: Vhost where target exchange was created
:param string name: The name of the exchange to delete.
:returns bool: True on success.
"""
vhost = quote(vhost, '')
name = quote(name, '')
path = Client.urls['exchange_by_name'] % (vhost, name)
self._call(path, 'DELETE')
return True |
Get all queues, or all queues in a vhost if vhost is not None.
Returns a list.
:param string vhost: The virtual host to list queues for. If This is
None (the default), all queues for the broker instance
are returned.
:returns: A list of dicts, each representing a queue.
:rtype: list of dicts
def get_queues(self, vhost=None):
"""
Get all queues, or all queues in a vhost if vhost is not None.
Returns a list.
:param string vhost: The virtual host to list queues for. If This is
None (the default), all queues for the broker instance
are returned.
:returns: A list of dicts, each representing a queue.
:rtype: list of dicts
"""
if vhost:
vhost = quote(vhost, '')
path = Client.urls['queues_by_vhost'] % vhost
else:
path = Client.urls['all_queues']
queues = self._call(path, 'GET')
return queues or list() |
Get a single queue, which requires both vhost and name.
:param string vhost: The virtual host for the queue being requested.
If the vhost is '/', note that it will be translated to '%2F' to
conform to URL encoding requirements.
:param string name: The name of the queue being requested.
:returns: A dictionary of queue properties.
:rtype: dict
def get_queue(self, vhost, name):
"""
Get a single queue, which requires both vhost and name.
:param string vhost: The virtual host for the queue being requested.
If the vhost is '/', note that it will be translated to '%2F' to
conform to URL encoding requirements.
:param string name: The name of the queue being requested.
:returns: A dictionary of queue properties.
:rtype: dict
"""
vhost = quote(vhost, '')
name = quote(name, '')
path = Client.urls['queues_by_name'] % (vhost, name)
queue = self._call(path, 'GET')
return queue |
Get the number of messages currently in a queue. This is a convenience
function that just calls :meth:`Client.get_queue` and pulls
out/returns the 'messages' field from the dictionary it returns.
:param string vhost: The vhost of the queue being queried.
:param string name: The name of the queue to query.
:returns: Number of messages in the queue
:rtype: integer
def get_queue_depth(self, vhost, name):
"""
Get the number of messages currently in a queue. This is a convenience
function that just calls :meth:`Client.get_queue` and pulls
out/returns the 'messages' field from the dictionary it returns.
:param string vhost: The vhost of the queue being queried.
:param string name: The name of the queue to query.
:returns: Number of messages in the queue
:rtype: integer
"""
vhost = quote(vhost, '')
name = quote(name, '')
path = Client.urls['queues_by_name'] % (vhost, name)
queue = self._call(path, 'GET')
depth = queue['messages']
return depth |
Get the number of messages currently sitting in either the queue
names listed in 'names', or all queues in 'vhost' if no 'names' are
given.
:param str vhost: Vhost where queues in 'names' live.
:param list names: OPTIONAL - Specific queues to show depths for. If
None, show depths for all queues in 'vhost'.
def get_queue_depths(self, vhost, names=None):
"""
Get the number of messages currently sitting in either the queue
names listed in 'names', or all queues in 'vhost' if no 'names' are
given.
:param str vhost: Vhost where queues in 'names' live.
:param list names: OPTIONAL - Specific queues to show depths for. If
None, show depths for all queues in 'vhost'.
"""
vhost = quote(vhost, '')
if not names:
# get all queues in vhost
path = Client.urls['queues_by_vhost'] % vhost
queues = self._call(path, 'GET')
for queue in queues:
depth = queue['messages']
print("\t%s: %s" % (queue, depth))
else:
# get the named queues only.
for name in names:
depth = self.get_queue_depth(vhost, name)
print("\t%s: %s" % (name, depth)) |
Purge all messages from one or more queues.
:param list queues: A list of ('qname', 'vhost') tuples.
:returns: True on success
def purge_queues(self, queues):
"""
Purge all messages from one or more queues.
:param list queues: A list of ('qname', 'vhost') tuples.
:returns: True on success
"""
for name, vhost in queues:
vhost = quote(vhost, '')
name = quote(name, '')
path = Client.urls['purge_queue'] % (vhost, name)
self._call(path, 'DELETE')
return True |
Purge all messages from a single queue. This is a convenience method
so you aren't forced to supply a list containing a single tuple to
the purge_queues method.
:param string vhost: The vhost of the queue being purged.
:param string name: The name of the queue being purged.
:rtype: None
def purge_queue(self, vhost, name):
"""
Purge all messages from a single queue. This is a convenience method
so you aren't forced to supply a list containing a single tuple to
the purge_queues method.
:param string vhost: The vhost of the queue being purged.
:param string name: The name of the queue being purged.
:rtype: None
"""
vhost = quote(vhost, '')
name = quote(name, '')
path = Client.urls['purge_queue'] % (vhost, name)
return self._call(path, 'DELETE') |
Create a queue. The API documentation specifies that all of the body
elements are optional, so this method only requires arguments needed
to form the URI
:param string vhost: The vhost to create the queue in.
:param string name: The name of the queue
More on these operations can be found at:
http://www.rabbitmq.com/amqp-0-9-1-reference.html
def create_queue(self, vhost, name, **kwargs):
"""
Create a queue. The API documentation specifies that all of the body
elements are optional, so this method only requires arguments needed
to form the URI
:param string vhost: The vhost to create the queue in.
:param string name: The name of the queue
More on these operations can be found at:
http://www.rabbitmq.com/amqp-0-9-1-reference.html
"""
vhost = quote(vhost, '')
name = quote(name, '')
path = Client.urls['queues_by_name'] % (vhost, name)
body = json.dumps(kwargs)
return self._call(path,
'PUT',
body,
headers=Client.json_headers) |
Deletes the named queue from the named vhost.
:param string vhost: Vhost housing the queue to be deleted.
:param string qname: Name of the queue to delete.
Note that if you just want to delete the messages from a queue, you
should use purge_queue instead of deleting/recreating a queue.
def delete_queue(self, vhost, qname):
"""
Deletes the named queue from the named vhost.
:param string vhost: Vhost housing the queue to be deleted.
:param string qname: Name of the queue to delete.
Note that if you just want to delete the messages from a queue, you
should use purge_queue instead of deleting/recreating a queue.
"""
vhost = quote(vhost, '')
qname = quote(qname, '')
path = Client.urls['queues_by_name'] % (vhost, qname)
return self._call(path, 'DELETE', headers=Client.json_headers) |
Gets <count> messages from the queue.
:param string vhost: Name of vhost containing the queue
:param string qname: Name of the queue to consume from
:param int count: Number of messages to get.
:param bool requeue: Whether to requeue the message after getting it.
This will cause the 'redelivered' flag to be set in the message on
the queue.
:param int truncate: The length, in bytes, beyond which the server will
truncate the message before returning it.
:returns: list of dicts. messages[msg-index]['payload'] will contain
the message body.
def get_messages(self, vhost, qname, count=1,
requeue=False, truncate=None, encoding='auto'):
"""
Gets <count> messages from the queue.
:param string vhost: Name of vhost containing the queue
:param string qname: Name of the queue to consume from
:param int count: Number of messages to get.
:param bool requeue: Whether to requeue the message after getting it.
This will cause the 'redelivered' flag to be set in the message on
the queue.
:param int truncate: The length, in bytes, beyond which the server will
truncate the message before returning it.
:returns: list of dicts. messages[msg-index]['payload'] will contain
the message body.
"""
vhost = quote(vhost, '')
base_body = {'count': count, 'requeue': requeue, 'encoding': encoding}
if truncate:
base_body['truncate'] = truncate
body = json.dumps(base_body)
qname = quote(qname, '')
path = Client.urls['get_from_queue'] % (vhost, qname)
messages = self._call(path, 'POST', body,
headers=Client.json_headers)
return messages |
:returns: list of dicts, or an empty list if there are no connections.
def get_connections(self):
"""
:returns: list of dicts, or an empty list if there are no connections.
"""
path = Client.urls['all_connections']
conns = self._call(path, 'GET')
return conns |
Get a connection by name. To get the names, use get_connections.
:param string name: Name of connection to get
:returns dict conn: A connection attribute dictionary.
def get_connection(self, name):
"""
Get a connection by name. To get the names, use get_connections.
:param string name: Name of connection to get
:returns dict conn: A connection attribute dictionary.
"""
name = quote(name, '')
path = Client.urls['connections_by_name'] % name
conn = self._call(path, 'GET')
return conn |
Close the named connection. The API returns a 204 on success,
in which case this method returns True, otherwise the
error is raised.
:param string name: The name of the connection to delete.
:returns bool: True on success.
def delete_connection(self, name):
"""
Close the named connection. The API returns a 204 on success,
in which case this method returns True, otherwise the
error is raised.
:param string name: The name of the connection to delete.
:returns bool: True on success.
"""
name = quote(name, '')
path = Client.urls['connections_by_name'] % name
self._call(path, 'DELETE')
return True |
Return a list of dicts containing details about broker connections.
:returns: list of dicts
def get_channels(self):
"""
Return a list of dicts containing details about broker connections.
:returns: list of dicts
"""
path = Client.urls['all_channels']
chans = self._call(path, 'GET')
return chans |
Get a channel by name. To get the names, use get_channels.
:param string name: Name of channel to get
:returns dict conn: A channel attribute dictionary.
def get_channel(self, name):
"""
Get a channel by name. To get the names, use get_channels.
:param string name: Name of channel to get
:returns dict conn: A channel attribute dictionary.
"""
name = quote(name, '')
path = Client.urls['channels_by_name'] % name
chan = self._call(path, 'GET')
return chan |
:returns: list of dicts
def get_bindings(self):
"""
:returns: list of dicts
"""
path = Client.urls['all_bindings']
bindings = self._call(path, 'GET')
return bindings |
Return a list of dicts, one dict per binding. The dict format coming
from RabbitMQ for queue named 'testq' is:
{"source":"sourceExch","vhost":"/","destination":"testq",
"destination_type":"queue","routing_key":"*.*","arguments":{},
"properties_key":"%2A.%2A"}
def get_queue_bindings(self, vhost, qname):
"""
Return a list of dicts, one dict per binding. The dict format coming
from RabbitMQ for queue named 'testq' is:
{"source":"sourceExch","vhost":"/","destination":"testq",
"destination_type":"queue","routing_key":"*.*","arguments":{},
"properties_key":"%2A.%2A"}
"""
vhost = quote(vhost, '')
qname = quote(qname, '')
path = Client.urls['bindings_on_queue'] % (vhost, qname)
bindings = self._call(path, 'GET')
return bindings |
Creates a binding between an exchange and a queue on a given vhost.
:param string vhost: vhost housing the exchange/queue to bind
:param string exchange: the target exchange of the binding
:param string queue: the queue to bind to the exchange
:param string rt_key: the routing key to use for the binding
:param list args: extra arguments to associate w/ the binding.
:returns: boolean
def create_binding(self, vhost, exchange, queue, rt_key=None, args=None):
"""
Creates a binding between an exchange and a queue on a given vhost.
:param string vhost: vhost housing the exchange/queue to bind
:param string exchange: the target exchange of the binding
:param string queue: the queue to bind to the exchange
:param string rt_key: the routing key to use for the binding
:param list args: extra arguments to associate w/ the binding.
:returns: boolean
"""
vhost = quote(vhost, '')
exchange = quote(exchange, '')
queue = quote(queue, '')
body = json.dumps({'routing_key': rt_key, 'arguments': args or []})
path = Client.urls['bindings_between_exch_queue'] % (vhost,
exchange,
queue)
binding = self._call(path, 'POST', body=body,
headers=Client.json_headers)
return binding |
Deletes a binding between an exchange and a queue on a given vhost.
:param string vhost: vhost housing the exchange/queue to bind
:param string exchange: the target exchange of the binding
:param string queue: the queue to bind to the exchange
:param string rt_key: the routing key to use for the binding
def delete_binding(self, vhost, exchange, queue, rt_key):
"""
Deletes a binding between an exchange and a queue on a given vhost.
:param string vhost: vhost housing the exchange/queue to bind
:param string exchange: the target exchange of the binding
:param string queue: the queue to bind to the exchange
:param string rt_key: the routing key to use for the binding
"""
vhost = quote(vhost, '')
exchange = quote(exchange, '')
queue = quote(queue, '')
body = ''
path = Client.urls['rt_bindings_between_exch_queue'] % (vhost,
exchange,
queue,
rt_key)
return self._call(path, 'DELETE', headers=Client.json_headers) |
Creates a user.
:param string username: The name to give to the new user
:param string password: Password for the new user
:param string tags: Comma-separated list of tags for the user
:returns: boolean
def create_user(self, username, password, tags=""):
"""
Creates a user.
:param string username: The name to give to the new user
:param string password: Password for the new user
:param string tags: Comma-separated list of tags for the user
:returns: boolean
"""
path = Client.urls['users_by_name'] % username
body = json.dumps({'password': password, 'tags': tags})
return self._call(path, 'PUT', body=body,
headers=Client.json_headers) |
Deletes a user from the server.
:param string username: Name of the user to delete from the server.
def delete_user(self, username):
"""
Deletes a user from the server.
:param string username: Name of the user to delete from the server.
"""
path = Client.urls['users_by_name'] % username
return self._call(path, 'DELETE') |
Redirects to the default wiki index name.
def index(request):
"""
Redirects to the default wiki index name.
"""
kwargs = {'slug': getattr(settings, 'WAKAWAKA_DEFAULT_INDEX', 'WikiIndex')}
redirect_to = reverse('wakawaka_page', kwargs=kwargs)
return HttpResponseRedirect(redirect_to) |
Displays a wiki page. Redirects to the edit view if the page doesn't exist.
def page(
request,
slug,
rev_id=None,
template_name='wakawaka/page.html',
extra_context=None,
):
"""
Displays a wiki page. Redirects to the edit view if the page doesn't exist.
"""
try:
queryset = WikiPage.objects.all()
page = queryset.get(slug=slug)
rev = page.current
# Display an older revision if rev_id is given
if rev_id:
revision_queryset = Revision.objects.all()
rev_specific = revision_queryset.get(pk=rev_id)
if rev.pk != rev_specific.pk:
rev_specific.is_not_current = True
rev = rev_specific
# The Page does not exist, redirect to the edit form or
# deny, if the user has no permission to add pages
except WikiPage.DoesNotExist:
if request.user.is_authenticated:
kwargs = {'slug': slug}
redirect_to = reverse('wakawaka_edit', kwargs=kwargs)
return HttpResponseRedirect(redirect_to)
raise Http404
template_context = {'page': page, 'rev': rev}
template_context.update(extra_context or {})
return render(request, template_name, template_context) |
Displays the form for editing and deleting a page.
def edit(
request,
slug,
rev_id=None,
template_name='wakawaka/edit.html',
extra_context=None,
wiki_page_form=WikiPageForm,
wiki_delete_form=DeleteWikiPageForm,
):
"""
Displays the form for editing and deleting a page.
"""
# Get the page for slug and get a specific revision, if given
try:
queryset = WikiPage.objects.all()
page = queryset.get(slug=slug)
rev = page.current
initial = {'content': page.current.content}
# Do not allow editing wiki pages if the user has no permission
if not request.user.has_perms(
('wakawaka.change_wikipage', 'wakawaka.change_revision')
):
return HttpResponseForbidden(
ugettext('You don\'t have permission to edit pages.')
)
if rev_id:
# There is a specific revision, fetch this
rev_specific = Revision.objects.get(pk=rev_id)
if rev.pk != rev_specific.pk:
rev = rev_specific
rev.is_not_current = True
initial = {
'content': rev.content,
'message': _('Reverted to "%s"' % rev.message),
}
# This page does not exist, create a dummy page
# Note that it's not saved here
except WikiPage.DoesNotExist:
# Do not allow adding wiki pages if the user has no permission
if not request.user.has_perms(
('wakawaka.add_wikipage', 'wakawaka.add_revision')
):
return HttpResponseForbidden(
ugettext('You don\'t have permission to add wiki pages.')
)
page = WikiPage(slug=slug)
page.is_initial = True
rev = None
initial = {
'content': _('Describe your new page %s here...' % slug),
'message': _('Initial revision'),
}
# Don't display the delete form if the user has nor permission
delete_form = None
# The user has permission, then do
if request.user.has_perm(
'wakawaka.delete_wikipage'
) or request.user.has_perm('wakawaka.delete_revision'):
delete_form = wiki_delete_form(request)
if request.method == 'POST' and request.POST.get('delete'):
delete_form = wiki_delete_form(request, request.POST)
if delete_form.is_valid():
return delete_form.delete_wiki(request, page, rev)
# Page add/edit form
form = wiki_page_form(initial=initial)
if request.method == 'POST':
form = wiki_page_form(data=request.POST)
if form.is_valid():
# Check if the content is changed, except there is a rev_id and the
# user possibly only reverted the HEAD to it
if (
not rev_id
and initial['content'] == form.cleaned_data['content']
):
form.errors['content'] = (_('You have made no changes!'),)
# Save the form and redirect to the page view
else:
try:
# Check that the page already exist
queryset = WikiPage.objects.all()
page = queryset.get(slug=slug)
except WikiPage.DoesNotExist:
# Must be a new one, create that page
page = WikiPage(slug=slug)
page.save()
form.save(request, page)
kwargs = {'slug': page.slug}
redirect_to = reverse('wakawaka_page', kwargs=kwargs)
messages.success(
request,
ugettext('Your changes to %s were saved' % page.slug),
)
return HttpResponseRedirect(redirect_to)
template_context = {
'form': form,
'delete_form': delete_form,
'page': page,
'rev': rev,
}
template_context.update(extra_context or {})
return render(request, template_name, template_context) |
Displays the list of all revisions for a specific WikiPage
def revisions(
request, slug, template_name='wakawaka/revisions.html', extra_context=None
):
"""
Displays the list of all revisions for a specific WikiPage
"""
queryset = WikiPage.objects.all()
page = get_object_or_404(queryset, slug=slug)
template_context = {'page': page}
template_context.update(extra_context or {})
return render(request, template_name, template_context) |
Displays the changes between two revisions.
def changes(
request, slug, template_name='wakawaka/changes.html', extra_context=None
):
"""
Displays the changes between two revisions.
"""
rev_a_id = request.GET.get('a', None)
rev_b_id = request.GET.get('b', None)
# Some stinky fingers manipulated the url
if not rev_a_id or not rev_b_id:
return HttpResponseBadRequest('Bad Request')
try:
revision_queryset = Revision.objects.all()
wikipage_queryset = WikiPage.objects.all()
rev_a = revision_queryset.get(pk=rev_a_id)
rev_b = revision_queryset.get(pk=rev_b_id)
page = wikipage_queryset.get(slug=slug)
except ObjectDoesNotExist:
raise Http404
if rev_a.content != rev_b.content:
d = difflib.unified_diff(
rev_b.content.splitlines(),
rev_a.content.splitlines(),
'Original',
'Current',
lineterm='',
)
difftext = '\n'.join(d)
else:
difftext = _(u'No changes were made between this two files.')
template_context = {
'page': page,
'diff': difftext,
'rev_a': rev_a,
'rev_b': rev_b,
}
template_context.update(extra_context or {})
return render(request, template_name, template_context) |
Displays a list of all recent revisions.
def revision_list(
request, template_name='wakawaka/revision_list.html', extra_context=None
):
"""
Displays a list of all recent revisions.
"""
revision_list = Revision.objects.all()
template_context = {'revision_list': revision_list}
template_context.update(extra_context or {})
return render(request, template_name, template_context) |
Displays all Pages
def page_list(
request, template_name='wakawaka/page_list.html', extra_context=None
):
"""
Displays all Pages
"""
page_list = WikiPage.objects.all()
page_list = page_list.order_by('slug')
template_context = {
'page_list': page_list,
'index_slug': getattr(settings, 'WAKAWAKA_DEFAULT_INDEX', 'WikiIndex'),
}
template_context.update(extra_context or {})
return render(request, template_name, template_context) |
Deletes the page with all revisions or the revision, based on the
users choice.
Returns a HttpResponseRedirect.
def delete_wiki(self, request, page, rev):
"""
Deletes the page with all revisions or the revision, based on the
users choice.
Returns a HttpResponseRedirect.
"""
# Delete the page
if (
self.cleaned_data.get('delete') == 'page'
and request.user.has_perm('wakawaka.delete_revision')
and request.user.has_perm('wakawaka.delete_wikipage')
):
self._delete_page(page)
messages.success(
request, ugettext('The page %s was deleted' % page.slug)
)
return HttpResponseRedirect(reverse('wakawaka_index'))
# Revision handling
if self.cleaned_data.get('delete') == 'rev':
revision_length = len(page.revisions.all())
# Delete the revision if there are more than 1 and the user has permission
if revision_length > 1 and request.user.has_perm(
'wakawaka.delete_revision'
):
self._delete_revision(rev)
messages.success(
request,
ugettext('The revision for %s was deleted' % page.slug),
)
return HttpResponseRedirect(
reverse('wakawaka_page', kwargs={'slug': page.slug})
)
# Do not allow deleting the revision, if it's the only one and the user
# has no permisson to delete the page.
if revision_length <= 1 and not request.user.has_perm(
'wakawaka.delete_wikipage'
):
messages.error(
request,
ugettext(
'You can not delete this revison for %s because it\'s the '
'only one and you have no permission to delete the whole page.'
% page.slug
),
)
return HttpResponseRedirect(
reverse('wakawaka_page', kwargs={'slug': page.slug})
)
# Delete the page and the revision if the user has both permissions
if (
revision_length <= 1
and request.user.has_perm('wakawaka.delete_revision')
and request.user.has_perm('wakawaka.delete_wikipage')
):
self._delete_page(page)
messages.success(
request,
ugettext(
'The page for %s was deleted because you deleted the only revision'
% page.slug
),
)
return HttpResponseRedirect(reverse('wakawaka_index')) |
Get the real field from a model given its name.
Handle nested models recursively (aka. ``__`` lookups)
def get_real_field(model, field_name):
'''
Get the real field from a model given its name.
Handle nested models recursively (aka. ``__`` lookups)
'''
parts = field_name.split('__')
field = model._meta.get_field(parts[0])
if len(parts) == 1:
return model._meta.get_field(field_name)
elif isinstance(field, models.ForeignKey):
return get_real_field(field.rel.to, '__'.join(parts[1:]))
else:
raise Exception('Unhandled field: %s' % field_name) |
Test if a given field supports regex lookups
def can_regex(self, field):
'''Test if a given field supports regex lookups'''
from django.conf import settings
if settings.DATABASES['default']['ENGINE'].endswith('sqlite3'):
return not isinstance(get_real_field(self.model, field), UNSUPPORTED_REGEX_FIELDS)
else:
return True |
Get ordering fields for ``QuerySet.order_by``
def get_orders(self):
'''Get ordering fields for ``QuerySet.order_by``'''
orders = []
iSortingCols = self.dt_data['iSortingCols']
dt_orders = [(self.dt_data['iSortCol_%s' % i], self.dt_data['sSortDir_%s' % i]) for i in xrange(iSortingCols)]
for field_idx, field_dir in dt_orders:
direction = '-' if field_dir == DESC else ''
if hasattr(self, 'sort_col_%s' % field_idx):
method = getattr(self, 'sort_col_%s' % field_idx)
result = method(direction)
if isinstance(result, (bytes, text_type)):
orders.append(result)
else:
orders.extend(result)
else:
field = self.get_field(field_idx)
if RE_FORMATTED.match(field):
tokens = RE_FORMATTED.findall(field)
orders.extend(['%s%s' % (direction, token) for token in tokens])
else:
orders.append('%s%s' % (direction, field))
return orders |
Filter a queryset with global search
def global_search(self, queryset):
'''Filter a queryset with global search'''
search = self.dt_data['sSearch']
if search:
if self.dt_data['bRegex']:
criterions = [
Q(**{'%s__iregex' % field: search})
for field in self.get_db_fields()
if self.can_regex(field)
]
if len(criterions) > 0:
search = reduce(or_, criterions)
queryset = queryset.filter(search)
else:
for term in search.split():
criterions = (Q(**{'%s__icontains' % field: term}) for field in self.get_db_fields())
search = reduce(or_, criterions)
queryset = queryset.filter(search)
return queryset |
Filter a queryset with column search
def column_search(self, queryset):
'''Filter a queryset with column search'''
for idx in xrange(self.dt_data['iColumns']):
search = self.dt_data['sSearch_%s' % idx]
if search:
if hasattr(self, 'search_col_%s' % idx):
custom_search = getattr(self, 'search_col_%s' % idx)
queryset = custom_search(search, queryset)
else:
field = self.get_field(idx)
fields = RE_FORMATTED.findall(field) if RE_FORMATTED.match(field) else [field]
if self.dt_data['bRegex_%s' % idx]:
criterions = [Q(**{'%s__iregex' % field: search}) for field in fields if self.can_regex(field)]
if len(criterions) > 0:
search = reduce(or_, criterions)
queryset = queryset.filter(search)
else:
for term in search.split():
criterions = (Q(**{'%s__icontains' % field: term}) for field in fields)
search = reduce(or_, criterions)
queryset = queryset.filter(search)
return queryset |
Apply Datatables sort and search criterion to QuerySet
def get_queryset(self):
'''Apply Datatables sort and search criterion to QuerySet'''
qs = super(DatatablesView, self).get_queryset()
# Perform global search
qs = self.global_search(qs)
# Perform column search
qs = self.column_search(qs)
# Return the ordered queryset
return qs.order_by(*self.get_orders()) |
Get the requested page
def get_page(self, form):
'''Get the requested page'''
page_size = form.cleaned_data['iDisplayLength']
start_index = form.cleaned_data['iDisplayStart']
paginator = Paginator(self.object_list, page_size)
num_page = (start_index / page_size) + 1
return paginator.page(num_page) |
Format a single row (if necessary)
def get_row(self, row):
'''Format a single row (if necessary)'''
if isinstance(self.fields, dict):
return dict([
(key, text_type(value).format(**row) if RE_FORMATTED.match(value) else row[value])
for key, value in self.fields.items()
])
else:
return [text_type(field).format(**row) if RE_FORMATTED.match(field)
else row[field]
for field in self.fields] |
Render Datatables expected JSON format
def render_to_response(self, form, **kwargs):
'''Render Datatables expected JSON format'''
page = self.get_page(form)
data = {
'iTotalRecords': page.paginator.count,
'iTotalDisplayRecords': page.paginator.count,
'sEcho': form.cleaned_data['sEcho'],
'aaData': self.get_rows(page.object_list),
}
return self.json_response(data) |
Grant types:
- token:
An authorization is requested to the end-user by redirecting it to an authorization page hosted
on Dailymotion. Once authorized, a refresh token is requested by the API client to the token
server and stored in the end-user's cookie (or other storage technique implemented by subclasses).
The refresh token is then used to request time limited access token to the token server.
- none / client_credentials:
This grant type is a 2 legs authentication: it doesn't allow to act on behalf of another user.
With this grant type, all API requests will be performed with the user identity of the API key owner.
- password:
This grant type allows to authenticate end-user by directly providing its credentials.
This profile is highly discouraged for web-server workflows. If used, the username and password
MUST NOT be stored by the client.
def set_grant_type(self, grant_type = 'client_credentials', api_key=None, api_secret=None, scope=None, info=None):
"""
Grant types:
- token:
An authorization is requested to the end-user by redirecting it to an authorization page hosted
on Dailymotion. Once authorized, a refresh token is requested by the API client to the token
server and stored in the end-user's cookie (or other storage technique implemented by subclasses).
The refresh token is then used to request time limited access token to the token server.
- none / client_credentials:
This grant type is a 2 legs authentication: it doesn't allow to act on behalf of another user.
With this grant type, all API requests will be performed with the user identity of the API key owner.
- password:
This grant type allows to authenticate end-user by directly providing its credentials.
This profile is highly discouraged for web-server workflows. If used, the username and password
MUST NOT be stored by the client.
"""
self.access_token = None
if api_key and api_secret:
self._grant_info['key'] = api_key
self._grant_info['secret'] = api_secret
else:
raise DailymotionClientError('Missing API key/secret')
if isinstance(info, dict):
self._grant_info.update(info)
else:
info = {}
if self._session_store_enabled and isinstance(info, dict) and info.get('username') is not None:
self._session_store.set_user(info.get('username'))
if grant_type in ('authorization', 'token'):
grant_type = 'authorization'
if 'redirect_uri' not in info:
raise DailymotionClientError('Missing redirect_uri in grant info for token grant type.')
elif grant_type in ('client_credentials', 'none'):
grant_type = 'client_credentials'
elif grant_type == 'password':
if 'username' not in info or 'password' not in info:
raise DailymotionClientError('Missing username or password in grant info for password grant type.')
self._grant_type = grant_type
if scope:
if not isinstance(scope, (list, tuple)):
raise DailymotionClientError('Invalid scope type: must be a list of valid scopes')
self._grant_info['scope'] = scope |
Decorator to check if Smappee's access token has expired.
If it has, use the refresh token to request a new access token
def authenticated(func):
"""
Decorator to check if Smappee's access token has expired.
If it has, use the refresh token to request a new access token
"""
@wraps(func)
def wrapper(*args, **kwargs):
self = args[0]
if self.refresh_token is not None and \
self.token_expiration_time <= dt.datetime.utcnow():
self.re_authenticate()
return func(*args, **kwargs)
return wrapper |
Join terms together with forward slashes
Parameters
----------
parts
Returns
-------
str
def urljoin(*parts):
"""
Join terms together with forward slashes
Parameters
----------
parts
Returns
-------
str
"""
# first strip extra forward slashes (except http:// and the likes) and
# create list
part_list = []
for part in parts:
p = str(part)
if p.endswith('//'):
p = p[0:-1]
else:
p = p.strip('/')
part_list.append(p)
# join everything together
url = '/'.join(part_list)
return url |
Uses a Smappee username and password to request an access token,
refresh token and expiry date.
Parameters
----------
username : str
password : str
Returns
-------
requests.Response
access token is saved in self.access_token
refresh token is saved in self.refresh_token
expiration time is set in self.token_expiration_time as
datetime.datetime
def authenticate(self, username, password):
"""
Uses a Smappee username and password to request an access token,
refresh token and expiry date.
Parameters
----------
username : str
password : str
Returns
-------
requests.Response
access token is saved in self.access_token
refresh token is saved in self.refresh_token
expiration time is set in self.token_expiration_time as
datetime.datetime
"""
url = URLS['token']
data = {
"grant_type": "password",
"client_id": self.client_id,
"client_secret": self.client_secret,
"username": username,
"password": password
}
r = requests.post(url, data=data)
r.raise_for_status()
j = r.json()
self.access_token = j['access_token']
self.refresh_token = j['refresh_token']
self._set_token_expiration_time(expires_in=j['expires_in'])
return r |
Saves the token expiration time by adding the 'expires in' parameter
to the current datetime (in utc).
Parameters
----------
expires_in : int
number of seconds from the time of the request until expiration
Returns
-------
nothing
saves expiration time in self.token_expiration_time as
datetime.datetime
def _set_token_expiration_time(self, expires_in):
"""
Saves the token expiration time by adding the 'expires in' parameter
to the current datetime (in utc).
Parameters
----------
expires_in : int
number of seconds from the time of the request until expiration
Returns
-------
nothing
saves expiration time in self.token_expiration_time as
datetime.datetime
"""
self.token_expiration_time = dt.datetime.utcnow() + \
dt.timedelta(0, expires_in) |
Request service locations
Returns
-------
dict
def get_service_locations(self):
"""
Request service locations
Returns
-------
dict
"""
url = URLS['servicelocation']
headers = {"Authorization": "Bearer {}".format(self.access_token)}
r = requests.get(url, headers=headers)
r.raise_for_status()
return r.json() |
Request service location info
Parameters
----------
service_location_id : int
Returns
-------
dict
def get_service_location_info(self, service_location_id):
"""
Request service location info
Parameters
----------
service_location_id : int
Returns
-------
dict
"""
url = urljoin(URLS['servicelocation'], service_location_id, "info")
headers = {"Authorization": "Bearer {}".format(self.access_token)}
r = requests.get(url, headers=headers)
r.raise_for_status()
return r.json() |
Request Elektricity consumption and Solar production
for a given service location.
Parameters
----------
service_location_id : int
start : int | dt.datetime | pd.Timestamp
end : int | dt.datetime | pd.Timestamp
start and end support epoch (in milliseconds),
datetime and Pandas Timestamp
aggregation : int
1 = 5 min values (only available for the last 14 days)
2 = hourly values
3 = daily values
4 = monthly values
5 = quarterly values
raw : bool
default False
if True: Return the data "as is" from the server
if False: convert the 'alwaysOn' value to Wh.
(the server returns this value as the sum of the power,
measured in 5 minute blocks. This means that it is 12 times
higher than the consumption in Wh.
See https://github.com/EnergieID/smappy/issues/24)
Returns
-------
dict
def get_consumption(self, service_location_id, start, end, aggregation, raw=False):
"""
Request Elektricity consumption and Solar production
for a given service location.
Parameters
----------
service_location_id : int
start : int | dt.datetime | pd.Timestamp
end : int | dt.datetime | pd.Timestamp
start and end support epoch (in milliseconds),
datetime and Pandas Timestamp
aggregation : int
1 = 5 min values (only available for the last 14 days)
2 = hourly values
3 = daily values
4 = monthly values
5 = quarterly values
raw : bool
default False
if True: Return the data "as is" from the server
if False: convert the 'alwaysOn' value to Wh.
(the server returns this value as the sum of the power,
measured in 5 minute blocks. This means that it is 12 times
higher than the consumption in Wh.
See https://github.com/EnergieID/smappy/issues/24)
Returns
-------
dict
"""
url = urljoin(URLS['servicelocation'], service_location_id,
"consumption")
d = self._get_consumption(url=url, start=start, end=end,
aggregation=aggregation)
if not raw:
for block in d['consumptions']:
if 'alwaysOn' not in block.keys():
break
block.update({'alwaysOn': block['alwaysOn'] / 12})
return d |
Request consumption for a given sensor in a given service location
Parameters
----------
service_location_id : int
sensor_id : int
start : int | dt.datetime | pd.Timestamp
end : int | dt.datetime | pd.Timestamp
start and end support epoch (in milliseconds),
datetime and Pandas Timestamp
timezone-naive datetimes are assumed to be in UTC
aggregation : int
1 = 5 min values (only available for the last 14 days)
2 = hourly values
3 = daily values
4 = monthly values
5 = quarterly values
Returns
-------
dict
def get_sensor_consumption(self, service_location_id, sensor_id, start,
end, aggregation):
"""
Request consumption for a given sensor in a given service location
Parameters
----------
service_location_id : int
sensor_id : int
start : int | dt.datetime | pd.Timestamp
end : int | dt.datetime | pd.Timestamp
start and end support epoch (in milliseconds),
datetime and Pandas Timestamp
timezone-naive datetimes are assumed to be in UTC
aggregation : int
1 = 5 min values (only available for the last 14 days)
2 = hourly values
3 = daily values
4 = monthly values
5 = quarterly values
Returns
-------
dict
"""
url = urljoin(URLS['servicelocation'], service_location_id, "sensor",
sensor_id, "consumption")
return self._get_consumption(url=url, start=start, end=end,
aggregation=aggregation) |
Request for both the get_consumption and
get_sensor_consumption methods.
Parameters
----------
url : str
start : dt.datetime
end : dt.datetime
aggregation : int
Returns
-------
dict
def _get_consumption(self, url, start, end, aggregation):
"""
Request for both the get_consumption and
get_sensor_consumption methods.
Parameters
----------
url : str
start : dt.datetime
end : dt.datetime
aggregation : int
Returns
-------
dict
"""
start = self._to_milliseconds(start)
end = self._to_milliseconds(end)
headers = {"Authorization": "Bearer {}".format(self.access_token)}
params = {
"aggregation": aggregation,
"from": start,
"to": end
}
r = requests.get(url, headers=headers, params=params)
r.raise_for_status()
return r.json() |
Request events for a given appliance
Parameters
----------
service_location_id : int
appliance_id : int
start : int | dt.datetime | pd.Timestamp
end : int | dt.datetime | pd.Timestamp
start and end support epoch (in milliseconds),
datetime and Pandas Timestamp
timezone-naive datetimes are assumed to be in UTC
max_number : int, optional
The maximum number of events that should be returned by this query
Default returns all events in the selected period
Returns
-------
dict
def get_events(self, service_location_id, appliance_id, start, end,
max_number=None):
"""
Request events for a given appliance
Parameters
----------
service_location_id : int
appliance_id : int
start : int | dt.datetime | pd.Timestamp
end : int | dt.datetime | pd.Timestamp
start and end support epoch (in milliseconds),
datetime and Pandas Timestamp
timezone-naive datetimes are assumed to be in UTC
max_number : int, optional
The maximum number of events that should be returned by this query
Default returns all events in the selected period
Returns
-------
dict
"""
start = self._to_milliseconds(start)
end = self._to_milliseconds(end)
url = urljoin(URLS['servicelocation'], service_location_id, "events")
headers = {"Authorization": "Bearer {}".format(self.access_token)}
params = {
"from": start,
"to": end,
"applianceId": appliance_id,
"maxNumber": max_number
}
r = requests.get(url, headers=headers, params=params)
r.raise_for_status()
return r.json() |
Turn actuator on
Parameters
----------
service_location_id : int
actuator_id : int
duration : int, optional
300,900,1800 or 3600 , specifying the time in seconds the actuator
should be turned on. Any other value results in turning on for an
undetermined period of time.
Returns
-------
requests.Response
def actuator_on(self, service_location_id, actuator_id, duration=None):
"""
Turn actuator on
Parameters
----------
service_location_id : int
actuator_id : int
duration : int, optional
300,900,1800 or 3600 , specifying the time in seconds the actuator
should be turned on. Any other value results in turning on for an
undetermined period of time.
Returns
-------
requests.Response
"""
return self._actuator_on_off(
on_off='on', service_location_id=service_location_id,
actuator_id=actuator_id, duration=duration) |
Turn actuator off
Parameters
----------
service_location_id : int
actuator_id : int
duration : int, optional
300,900,1800 or 3600 , specifying the time in seconds the actuator
should be turned on. Any other value results in turning on for an
undetermined period of time.
Returns
-------
requests.Response
def actuator_off(self, service_location_id, actuator_id, duration=None):
"""
Turn actuator off
Parameters
----------
service_location_id : int
actuator_id : int
duration : int, optional
300,900,1800 or 3600 , specifying the time in seconds the actuator
should be turned on. Any other value results in turning on for an
undetermined period of time.
Returns
-------
requests.Response
"""
return self._actuator_on_off(
on_off='off', service_location_id=service_location_id,
actuator_id=actuator_id, duration=duration) |
Turn actuator on or off
Parameters
----------
on_off : str
'on' or 'off'
service_location_id : int
actuator_id : int
duration : int, optional
300,900,1800 or 3600 , specifying the time in seconds the actuator
should be turned on. Any other value results in turning on for an
undetermined period of time.
Returns
-------
requests.Response
def _actuator_on_off(self, on_off, service_location_id, actuator_id,
duration=None):
"""
Turn actuator on or off
Parameters
----------
on_off : str
'on' or 'off'
service_location_id : int
actuator_id : int
duration : int, optional
300,900,1800 or 3600 , specifying the time in seconds the actuator
should be turned on. Any other value results in turning on for an
undetermined period of time.
Returns
-------
requests.Response
"""
url = urljoin(URLS['servicelocation'], service_location_id,
"actuator", actuator_id, on_off)
headers = {"Authorization": "Bearer {}".format(self.access_token)}
if duration is not None:
data = {"duration": duration}
else:
data = {}
r = requests.post(url, headers=headers, json=data)
r.raise_for_status()
return r |
Extends get_consumption() AND get_sensor_consumption(),
parses the results in a Pandas DataFrame
Parameters
----------
service_location_id : int
start : dt.datetime | int
end : dt.datetime | int
timezone-naive datetimes are assumed to be in UTC
epoch timestamps need to be in milliseconds
aggregation : int
sensor_id : int, optional
If a sensor id is passed, api method get_sensor_consumption will
be used otherwise (by default),
the get_consumption method will be used: this returns Electricity
and Solar consumption and production.
localize : bool
default False
default returns timestamps in UTC
if True, timezone is fetched from service location info and
Data Frame is localized
raw : bool
default False
if True: Return the data "as is" from the server
if False: convert the 'alwaysOn' value to Wh.
(the server returns this value as the sum of the power,
measured in 5 minute blocks. This means that it is 12 times
higher than the consumption in Wh.
See https://github.com/EnergieID/smappy/issues/24)
Returns
-------
pd.DataFrame
def get_consumption_dataframe(self, service_location_id, start, end,
aggregation, sensor_id=None, localize=False,
raw=False):
"""
Extends get_consumption() AND get_sensor_consumption(),
parses the results in a Pandas DataFrame
Parameters
----------
service_location_id : int
start : dt.datetime | int
end : dt.datetime | int
timezone-naive datetimes are assumed to be in UTC
epoch timestamps need to be in milliseconds
aggregation : int
sensor_id : int, optional
If a sensor id is passed, api method get_sensor_consumption will
be used otherwise (by default),
the get_consumption method will be used: this returns Electricity
and Solar consumption and production.
localize : bool
default False
default returns timestamps in UTC
if True, timezone is fetched from service location info and
Data Frame is localized
raw : bool
default False
if True: Return the data "as is" from the server
if False: convert the 'alwaysOn' value to Wh.
(the server returns this value as the sum of the power,
measured in 5 minute blocks. This means that it is 12 times
higher than the consumption in Wh.
See https://github.com/EnergieID/smappy/issues/24)
Returns
-------
pd.DataFrame
"""
import pandas as pd
if sensor_id is None:
data = self.get_consumption(
service_location_id=service_location_id, start=start,
end=end, aggregation=aggregation, raw=raw)
consumptions = data['consumptions']
else:
data = self.get_sensor_consumption(
service_location_id=service_location_id, sensor_id=sensor_id,
start=start, end=end, aggregation=aggregation)
# yeah please someone explain me why they had to name this
# differently...
consumptions = data['records']
df = pd.DataFrame.from_dict(consumptions)
if not df.empty:
df.set_index('timestamp', inplace=True)
df.index = pd.to_datetime(df.index, unit='ms', utc=True)
if localize:
info = self.get_service_location_info(
service_location_id=service_location_id)
timezone = info['timezone']
df = df.tz_convert(timezone)
return df |
Converts a datetime-like object to epoch, in milliseconds
Timezone-naive datetime objects are assumed to be in UTC
Parameters
----------
time : dt.datetime | pd.Timestamp | int
Returns
-------
int
epoch milliseconds
def _to_milliseconds(self, time):
"""
Converts a datetime-like object to epoch, in milliseconds
Timezone-naive datetime objects are assumed to be in UTC
Parameters
----------
time : dt.datetime | pd.Timestamp | int
Returns
-------
int
epoch milliseconds
"""
if isinstance(time, dt.datetime):
if time.tzinfo is None:
time = time.replace(tzinfo=pytz.UTC)
return int(time.timestamp() * 1e3)
elif isinstance(time, numbers.Number):
return time
else:
raise NotImplementedError("Time format not supported. Use milliseconds since epoch,\
Datetime or Pandas Datetime") |
Because basically every post request is the same
Parameters
----------
url : str
data : str, optional
Returns
-------
requests.Response
def _basic_post(self, url, data=None):
"""
Because basically every post request is the same
Parameters
----------
url : str
data : str, optional
Returns
-------
requests.Response
"""
_url = urljoin(self.base_url, url)
r = self.session.post(_url, data=data, headers=self.headers, timeout=5)
r.raise_for_status()
return r |
Parameters
----------
password : str
default 'admin'
Returns
-------
dict
def logon(self, password='admin'):
"""
Parameters
----------
password : str
default 'admin'
Returns
-------
dict
"""
r = self._basic_post(url='logon', data=password)
return r.json() |
Takes the sum of all instantaneous active power values
Returns them in kWh
Returns
-------
float
def active_power(self):
"""
Takes the sum of all instantaneous active power values
Returns them in kWh
Returns
-------
float
"""
inst = self.load_instantaneous()
values = [float(i['value']) for i in inst if i['key'].endswith('ActivePower')]
return sum(values) / 1000 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.