text stringlengths 81 112k |
|---|
Creates some aliases for attributes of ``current``.
Args:
current: :attr:`~zengine.engine.WFCurrent` object.
def set_current(self, current):
"""
Creates some aliases for attributes of ``current``.
Args:
current: :attr:`~zengine.engine.WFCurrent` object.
"""
self.current = current
self.input = current.input
# self.req = current.request
# self.resp = current.response
self.output = current.output
self.cmd = current.task_data['cmd']
if self.cmd and NEXT_CMD_SPLITTER in self.cmd:
self.cmd, self.next_cmd = self.cmd.split(NEXT_CMD_SPLITTER)
else:
self.next_cmd = None |
Renders form. Applies form modifiers, then writes
result to response payload. If supplied, given form
object instance will be used instead of view's
default ObjectForm.
Args:
_form (:py:attr:`~zengine.forms.json_form.JsonForm`):
Form object to override `self.object_form`
def form_out(self, _form=None):
"""
Renders form. Applies form modifiers, then writes
result to response payload. If supplied, given form
object instance will be used instead of view's
default ObjectForm.
Args:
_form (:py:attr:`~zengine.forms.json_form.JsonForm`):
Form object to override `self.object_form`
"""
_form = _form or self.object_form
self.output['forms'] = _form.serialize()
self._add_meta_props(_form)
self.output['forms']['grouping'] = _form.Meta.grouping
self.output['forms']['constraints'] = _form.Meta.constraints
self._patch_form(self.output['forms'])
self.set_client_cmd('form') |
Adds given cmd(s) to ``self.output['client_cmd']``
Args:
*args: Client commands.
def set_client_cmd(self, *args):
"""
Adds given cmd(s) to ``self.output['client_cmd']``
Args:
*args: Client commands.
"""
self.client_cmd.update(args)
self.output['client_cmd'] = list(self.client_cmd) |
Creates new permissions.
def run(self):
"""
Creates new permissions.
"""
from pyoko.lib.utils import get_object_from_path
from zengine.config import settings
model = get_object_from_path(settings.PERMISSION_MODEL)
perm_provider = get_object_from_path(settings.PERMISSION_PROVIDER)
existing_perms = []
new_perms = []
for code, name, desc in perm_provider():
code = six.text_type(code)
if self.manager.args.dry:
exists = model.objects.filter(code=code, name=name)
if exists:
perm = exists[0]
new = False
else:
new = True
perm = model(code=code, name=name)
else:
try:
perm = model.objects.get(code)
existing_perms.append(perm)
except ObjectDoesNotExist:
perm = model(description=desc, code=code, name=name)
perm.key = code
perm.save()
new_perms.append(perm)
# perm, new = model.objects.get_or_create({'description': desc}, code=code, name=name)
# if new:
# new_perms.append(perm)
# else:
# existing_perms.append(perm)
report = "\n\n%s permission(s) were found in DB. " % len(existing_perms)
if new_perms:
report += "\n%s new permission record added. " % len(new_perms)
else:
report += 'No new perms added. '
if new_perms:
if not self.manager.args.dry:
SelectBoxCache.flush(model.__name__)
report += 'Total %s perms exists.' % (len(existing_perms) + len(new_perms))
report = "\n + " + "\n + ".join([p.name or p.code for p in new_perms]) + report
if self.manager.args.dry:
print("\n~~~~~~~~~~~~~~ DRY RUN ~~~~~~~~~~~~~~\n")
print(report + "\n") |
Creates user, encrypts password.
def run(self):
"""
Creates user, encrypts password.
"""
from zengine.models import User
user = User(username=self.manager.args.username, superuser=self.manager.args.super)
user.set_password(self.manager.args.password)
user.save()
print("New user created with ID: %s" % user.key) |
Starts a development server for the zengine application
def run(self):
"""
Starts a development server for the zengine application
"""
print("Development server started on http://%s:%s. \n\nPress Ctrl+C to stop\n" % (
self.manager.args.addr,
self.manager.args.port)
)
if self.manager.args.server_type == 'falcon':
self.run_with_falcon()
elif self.manager.args.server_type == 'tornado':
self.run_with_tornado() |
runs the tornado/websockets based test server
def run_with_tornado(self):
"""
runs the tornado/websockets based test server
"""
from zengine.tornado_server.server import runserver
runserver(self.manager.args.addr, int(self.manager.args.port)) |
runs the falcon/http based test server
def run_with_falcon(self):
"""
runs the falcon/http based test server
"""
from wsgiref import simple_server
from zengine.server import app
httpd = simple_server.make_server(self.manager.args.addr, int(self.manager.args.port), app)
httpd.serve_forever() |
Starts a development server for the zengine application
def run(self):
"""
Starts a development server for the zengine application
"""
from zengine.wf_daemon import run_workers, Worker
worker_count = int(self.manager.args.workers or 1)
if not self.manager.args.daemonize:
print("Starting worker(s)")
if worker_count > 1 or self.manager.args.autoreload:
run_workers(worker_count,
self.manager.args.paths.split(' '),
self.manager.args.daemonize)
else:
worker = Worker()
worker.run() |
Prepare a helper dictionary for the domain to temporarily hold some information.
def _prepare_domain(mapping):
"""Prepare a helper dictionary for the domain to temporarily hold some information."""
# Parse the domain-directory mapping
try:
domain, dir = mapping.split(':')
except ValueError:
print("Please provide the sources in the form of '<domain>:<directory>'")
sys.exit(1)
try:
default_language = settings.TRANSLATION_DOMAINS[domain]
except KeyError:
print("Unknown domain {domain}, check the settings file to make sure"
" this domain is set in TRANSLATION_DOMAINS".format(domain=domain))
sys.exit(1)
# Create a temporary file to hold the `.pot` file for this domain
handle, path = tempfile.mkstemp(prefix='zengine_i18n_', suffix='.pot')
return (domain, {
'default': default_language,
'pot': path,
'source': dir,
}) |
Check that all domains specified in the settings was provided in the options.
def _validate_domains(domains):
"""Check that all domains specified in the settings was provided in the options."""
missing = set(settings.TRANSLATION_DOMAINS.keys()) - set(domains.keys())
if missing:
print('The following domains have been set in the configuration, '
'but their sources were not provided, use the `--source` '
'option to specify their sources: {domains}'.format(domains=', '.join(missing)))
sys.exit(1) |
Extract the translations into `.pot` files
def _extract_translations(self, domains):
"""Extract the translations into `.pot` files"""
for domain, options in domains.items():
# Create the extractor
extractor = babel_frontend.extract_messages()
extractor.initialize_options()
# The temporary location to write the `.pot` file
extractor.output_file = options['pot']
# Add the comments marked with 'tn:' to the translation file for translators to read. Strip the marker.
extractor.add_comments = ['tn:']
extractor.strip_comments = True
# The directory where the sources for this domain are located
extractor.input_paths = [options['source']]
# Pass the metadata to the translator
extractor.msgid_bugs_address = self.manager.args.contact
extractor.copyright_holder = self.manager.args.copyright
extractor.version = self.manager.args.version
extractor.project = self.manager.args.project
extractor.finalize_options()
# Add keywords for lazy translation functions, based on their non-lazy variants
extractor.keywords.update({
'gettext_lazy': extractor.keywords['gettext'],
'ngettext_lazy': extractor.keywords['ngettext'],
'__': extractor.keywords['gettext'], # double underscore for lazy
})
# Do the extraction
_run_babel_command(extractor) |
Update or initialize the `.po` translation files
def _init_update_po_files(self, domains):
"""Update or initialize the `.po` translation files"""
for language in settings.TRANSLATIONS:
for domain, options in domains.items():
if language == options['default']: continue # Default language of the domain doesn't need translations
if os.path.isfile(_po_path(language, domain)):
# If the translation already exists, update it, keeping the parts already translated
self._update_po_file(language, domain, options['pot'])
else:
# The translation doesn't exist, create a new translation file
self._init_po_file(language, domain, options['pot']) |
Remove the temporary '.pot' files that were created for the domains.
def _cleanup(self, domains):
"""Remove the temporary '.pot' files that were created for the domains."""
for option in domains.values():
try:
os.remove(option['pot'])
except (IOError, OSError):
# It is not a problem if we can't actually remove the temporary file
pass |
read workflows, checks if it's updated,
tries to update if there aren't any running instances of that wf
def run(self):
"""
read workflows, checks if it's updated,
tries to update if there aren't any running instances of that wf
"""
from zengine.lib.cache import WFSpecNames
if self.manager.args.clear:
self._clear_models()
return
if self.manager.args.wf_path:
paths = self.get_wf_from_path(self.manager.args.wf_path)
else:
paths = self.get_workflows()
self.count = 0
self.do_with_submit(self.load_diagram, paths, threads=self.manager.args.threads)
WFSpecNames().refresh()
print("%s BPMN file loaded" % self.count) |
load xml from given path
Args:
path: diagram path
Returns:
def get_wf_from_path(self, path):
"""
load xml from given path
Args:
path: diagram path
Returns:
"""
with open(path) as fp:
content = fp.read()
return [(os.path.basename(os.path.splitext(path)[0]), content), ] |
Scans and loads all wf found under WORKFLOW_PACKAGES_PATHS
Yields: XML content of diagram file
def get_workflows(self):
"""
Scans and loads all wf found under WORKFLOW_PACKAGES_PATHS
Yields: XML content of diagram file
"""
for pth in settings.WORKFLOW_PACKAGES_PATHS:
for f in glob.glob("%s/*.bpmn" % pth):
with open(f) as fp:
yield os.path.basename(os.path.splitext(f)[0]), fp.read() |
The model or models are checked for migrations that need to be done.
Solr is also checked.
def check_migration_and_solr(self):
"""
The model or models are checked for migrations that need to be done.
Solr is also checked.
"""
from pyoko.db.schema_update import SchemaUpdater
from socket import error as socket_error
from pyoko.conf import settings
from importlib import import_module
import_module(settings.MODELS_MODULE)
registry = import_module('pyoko.model').model_registry
models = [model for model in registry.get_base_models()]
try:
print(__(u"Checking migration and solr ..."))
updater = SchemaUpdater(models, 1, False)
updater.run(check_only=True)
except socket_error as e:
print(__(u"{0}Error not connected, open redis and rabbitmq{1}").format(CheckList.FAIL,
CheckList.ENDC)) |
Redis checks the connection
It displays on the screen whether or not you have a connection.
def check_redis():
"""
Redis checks the connection
It displays on the screen whether or not you have a connection.
"""
from pyoko.db.connection import cache
from redis.exceptions import ConnectionError
try:
cache.ping()
print(CheckList.OKGREEN + "{0}Redis is working{1}" + CheckList.ENDC)
except ConnectionError as e:
print(__(u"{0}Redis is not working{1} ").format(CheckList.FAIL,
CheckList.ENDC), e.message) |
Riak checks the connection
It displays on the screen whether or not you have a connection.
def check_riak():
"""
Riak checks the connection
It displays on the screen whether or not you have a connection.
"""
from pyoko.db.connection import client
from socket import error as socket_error
try:
if client.ping():
print(__(u"{0}Riak is working{1}").format(CheckList.OKGREEN, CheckList.ENDC))
else:
print(__(u"{0}Riak is not working{1}").format(CheckList.FAIL, CheckList.ENDC))
except socket_error as e:
print(__(u"{0}Riak is not working{1}").format(CheckList.FAIL,
CheckList.ENDC), e.message) |
RabbitMQ checks the connection
It displays on the screen whether or not you have a connection.
def check_mq_connection(self):
"""
RabbitMQ checks the connection
It displays on the screen whether or not you have a connection.
"""
import pika
from zengine.client_queue import BLOCKING_MQ_PARAMS
from pika.exceptions import ProbableAuthenticationError, ConnectionClosed
try:
connection = pika.BlockingConnection(BLOCKING_MQ_PARAMS)
channel = connection.channel()
if channel.is_open:
print(__(u"{0}RabbitMQ is working{1}").format(CheckList.OKGREEN, CheckList.ENDC))
elif self.channel.is_closed or self.channel.is_closing:
print(__(u"{0}RabbitMQ is not working!{1}").format(CheckList.FAIL, CheckList.ENDC))
except ConnectionClosed as e:
print(__(u"{0}RabbitMQ is not working!{1}").format(CheckList.FAIL, CheckList.ENDC), e)
except ProbableAuthenticationError as e:
print(__(u"{0}RabbitMQ username and password wrong{1}").format(CheckList.FAIL,
CheckList.ENDC)) |
It brings the environment variables to the screen.
The user checks to see if they are using the correct variables.
def check_encoding_and_env():
"""
It brings the environment variables to the screen.
The user checks to see if they are using the correct variables.
"""
import sys
import os
if sys.getfilesystemencoding() in ['utf-8', 'UTF-8']:
print(__(u"{0}File system encoding correct{1}").format(CheckList.OKGREEN,
CheckList.ENDC))
else:
print(__(u"{0}File system encoding wrong!!{1}").format(CheckList.FAIL,
CheckList.ENDC))
check_env_list = ['RIAK_PROTOCOL', 'RIAK_SERVER', 'RIAK_PORT', 'REDIS_SERVER',
'DEFAULT_BUCKET_TYPE', 'PYOKO_SETTINGS',
'MQ_HOST', 'MQ_PORT', 'MQ_USER', 'MQ_VHOST',
]
env = os.environ
for k, v in env.items():
if k in check_env_list:
print(__(u"{0}{1} : {2}{3}").format(CheckList.BOLD, k, v, CheckList.ENDC)) |
Finds if the game is over.
:type: position: Board
:rtype: bool
def no_moves(position):
"""
Finds if the game is over.
:type: position: Board
:rtype: bool
"""
return position.no_moves(color.white) \
or position.no_moves(color.black) |
Finds if particular King is checkmated.
:type: position: Board
:type: input_color: Color
:rtype: bool
def is_checkmate(position, input_color):
"""
Finds if particular King is checkmated.
:type: position: Board
:type: input_color: Color
:rtype: bool
"""
return position.no_moves(input_color) and \
position.get_king(input_color).in_check(position) |
Handles pagination of object listings.
Args:
current_page int:
Current page number
query_set (:class:`QuerySet<pyoko:pyoko.db.queryset.QuerySet>`):
Object listing queryset.
per_page int:
Objects per page.
Returns:
QuerySet object, pagination data dict as a tuple
def _paginate(self, current_page, query_set, per_page=10):
"""
Handles pagination of object listings.
Args:
current_page int:
Current page number
query_set (:class:`QuerySet<pyoko:pyoko.db.queryset.QuerySet>`):
Object listing queryset.
per_page int:
Objects per page.
Returns:
QuerySet object, pagination data dict as a tuple
"""
total_objects = query_set.count()
total_pages = int(total_objects / per_page or 1)
# add orphans to last page
current_per_page = per_page + (
total_objects % per_page if current_page == total_pages else 0)
pagination_data = dict(page=current_page,
total_pages=total_pages,
total_objects=total_objects,
per_page=current_per_page)
query_set = query_set.set_params(rows=current_per_page, start=(current_page - 1) * per_page)
return query_set, pagination_data |
Creates a message for the given channel.
.. code-block:: python
# request:
{
'view':'_zops_create_message',
'message': {
'channel': key, # of channel
'body': string, # message text.,
'type': int, # zengine.messaging.model.MSG_TYPES,
'attachments': [{
'description': string, # can be blank,
'name': string, # file name with extension,
'content': string, # base64 encoded file content
}]}
# response:
{
'status': 'Created',
'code': 201,
'msg_key': key, # key of the message object,
}
def create_message(current):
"""
Creates a message for the given channel.
.. code-block:: python
# request:
{
'view':'_zops_create_message',
'message': {
'channel': key, # of channel
'body': string, # message text.,
'type': int, # zengine.messaging.model.MSG_TYPES,
'attachments': [{
'description': string, # can be blank,
'name': string, # file name with extension,
'content': string, # base64 encoded file content
}]}
# response:
{
'status': 'Created',
'code': 201,
'msg_key': key, # key of the message object,
}
"""
msg = current.input['message']
msg_obj = Channel.add_message(msg['channel'], body=msg['body'], typ=msg['type'],
sender=current.user,
title=msg['title'], receiver=msg['receiver'] or None)
current.output = {
'msg_key': msg_obj.key,
'status': 'Created',
'code': 201
}
if 'attachment' in msg:
for atch in msg['attachments']:
typ = current._dedect_file_type(atch['name'], atch['content'])
Attachment(channel_id=msg['channel'], msg=msg_obj, name=atch['name'],
file=atch['content'], description=atch['description'], typ=typ).save() |
Initial display of channel content.
Returns channel description, members, no of members, last 20 messages etc.
.. code-block:: python
# request:
{
'view':'_zops_show_channel',
'key': key,
}
# response:
{
'channel_key': key,
'description': string,
'no_of_members': int,
'member_list': [
{'name': string,
'is_online': bool,
'avatar_url': string,
}],
'name': string,
'last_messages': [MSG_DICT]
'status': 'OK',
'code': 200
}
def show_channel(current, waited=False):
"""
Initial display of channel content.
Returns channel description, members, no of members, last 20 messages etc.
.. code-block:: python
# request:
{
'view':'_zops_show_channel',
'key': key,
}
# response:
{
'channel_key': key,
'description': string,
'no_of_members': int,
'member_list': [
{'name': string,
'is_online': bool,
'avatar_url': string,
}],
'name': string,
'last_messages': [MSG_DICT]
'status': 'OK',
'code': 200
}
"""
ch = Channel(current).objects.get(current.input['key'])
sbs = ch.get_subscription_for_user(current.user_id)
current.output = {'key': current.input['key'],
'description': ch.description,
'name': sbs.name,
'actions': sbs.get_actions(),
'avatar_url': ch.get_avatar(current.user),
'no_of_members': len(ch.subscriber_set),
'member_list': [{'name': sb.user.full_name,
'is_online': sb.user.is_online(),
'avatar_url': sb.user.get_avatar_url()
} for sb in ch.subscriber_set.objects.all()],
'last_messages': [],
'status': 'OK',
'code': 200
}
for msg in ch.get_last_messages():
current.output['last_messages'].insert(0, msg.serialize(current.user)) |
Get old messages for a channel. 20 messages per request
.. code-block:: python
# request:
{
'view':'_zops_channel_history,
'channel_key': key,
'timestamp': datetime, # timestamp data of oldest shown message
}
# response:
{
'messages': [MSG_DICT, ],
'status': 'OK',
'code': 200
}
def channel_history(current):
"""
Get old messages for a channel. 20 messages per request
.. code-block:: python
# request:
{
'view':'_zops_channel_history,
'channel_key': key,
'timestamp': datetime, # timestamp data of oldest shown message
}
# response:
{
'messages': [MSG_DICT, ],
'status': 'OK',
'code': 200
}
"""
current.output = {
'status': 'OK',
'code': 201,
'messages': []
}
for msg in list(Message.objects.filter(channel_id=current.input['channel_key'],
updated_at__lte=current.input['timestamp'])[:20]):
current.output['messages'].insert(0, msg.serialize(current.user))
# FIXME: looks like pyoko's __lt is broken
# TODO: convert lte to lt and remove this block, when __lt filter fixed
if current.output['messages']:
current.output['messages'].pop(-1) |
Push timestamp of latest message of an ACTIVE channel.
This view should be called with timestamp of latest message;
- When user opens (clicks on) a channel.
- Periodically (eg: setInterval for 15secs) while user staying in a channel.
.. code-block:: python
# request:
{
'view':'_zops_last_seen_msg',
'channel_key': key,
'key': key,
'timestamp': datetime,
}
# response:
{
'status': 'OK',
'code': 200,
}
def report_last_seen_message(current):
"""
Push timestamp of latest message of an ACTIVE channel.
This view should be called with timestamp of latest message;
- When user opens (clicks on) a channel.
- Periodically (eg: setInterval for 15secs) while user staying in a channel.
.. code-block:: python
# request:
{
'view':'_zops_last_seen_msg',
'channel_key': key,
'key': key,
'timestamp': datetime,
}
# response:
{
'status': 'OK',
'code': 200,
}
"""
sbs = Subscriber(current).objects.filter(channel_id=current.input['channel_key'],
user_id=current.user_id)[0]
sbs.last_seen_msg_time = current.input['timestamp']
sbs.save()
current.output = {
'status': 'OK',
'code': 200} |
List channel memberships of current user
.. code-block:: python
# request:
{
'view':'_zops_list_channels',
}
# response:
{
'channels': [
{'name': string, # name of channel
'key': key, # key of channel
'unread': int, # unread message count
'type': int, # channel type,
# 15: public channels (chat room/broadcast channel distinction
comes from "read_only" flag)
# 10: direct channels
# 5: one and only private channel which is "Notifications"
'read_only': boolean,
# true if this is a read-only subscription to a broadcast channel
# false if it's a public chat room
'actions':[('action name', 'view name'),]
},]
}
def list_channels(current):
"""
List channel memberships of current user
.. code-block:: python
# request:
{
'view':'_zops_list_channels',
}
# response:
{
'channels': [
{'name': string, # name of channel
'key': key, # key of channel
'unread': int, # unread message count
'type': int, # channel type,
# 15: public channels (chat room/broadcast channel distinction
comes from "read_only" flag)
# 10: direct channels
# 5: one and only private channel which is "Notifications"
'read_only': boolean,
# true if this is a read-only subscription to a broadcast channel
# false if it's a public chat room
'actions':[('action name', 'view name'),]
},]
}
"""
current.output = {
'status': 'OK',
'code': 200,
'channels': []}
for sbs in current.user.subscriptions.objects.filter(is_visible=True):
try:
current.output['channels'].append(sbs.get_channel_listing())
except ObjectDoesNotExist:
# FIXME: This should not happen,
log.exception("UNPAIRED DIRECT EXCHANGES!!!!")
sbs.delete() |
Number of unread messages for current user
.. code-block:: python
# request:
{
'view':'_zops_unread_count',
}
# response:
{
'status': 'OK',
'code': 200,
'notifications': int,
'messages': int,
}
def unread_count(current):
"""
Number of unread messages for current user
.. code-block:: python
# request:
{
'view':'_zops_unread_count',
}
# response:
{
'status': 'OK',
'code': 200,
'notifications': int,
'messages': int,
}
"""
unread_ntf = 0
unread_msg = 0
for sbs in current.user.subscriptions.objects.filter(is_visible=True):
try:
if sbs.channel.key == current.user.prv_exchange:
unread_ntf += sbs.unread_count()
else:
unread_msg += sbs.unread_count()
except ObjectDoesNotExist:
# FIXME: This should not happen,
log.exception("MULTIPLE PRV EXCHANGES!!!!")
sbs.delete()
current.output = {
'status': 'OK',
'code': 200,
'notifications': unread_ntf,
'messages': unread_msg
} |
Returns last N notifications for current user
.. code-block:: python
# request:
{
'view':'_zops_unread_messages',
'amount': int, # Optional, defaults to 8
}
# response:
{
'status': 'OK',
'code': 200,
'notifications': [{'title':string,
'body': string,
'channel_key': key,
'type': int,
'url': string, # could be a in app JS URL prefixed with "#" or
# full blown URL prefixed with "http"
'message_key': key,
'timestamp': datetime},],
}
def get_notifications(current):
"""
Returns last N notifications for current user
.. code-block:: python
# request:
{
'view':'_zops_unread_messages',
'amount': int, # Optional, defaults to 8
}
# response:
{
'status': 'OK',
'code': 200,
'notifications': [{'title':string,
'body': string,
'channel_key': key,
'type': int,
'url': string, # could be a in app JS URL prefixed with "#" or
# full blown URL prefixed with "http"
'message_key': key,
'timestamp': datetime},],
}
"""
current.output = {
'status': 'OK',
'code': 200,
'notifications': [],
}
amount = current.input.get('amount', 8)
try:
notif_sbs = current.user.subscriptions.objects.get(channel_id=current.user.prv_exchange)
except MultipleObjectsReturned:
# FIXME: This should not happen,
log.exception("MULTIPLE PRV EXCHANGES!!!!")
sbs = current.user.subscriptions.objects.filter(channel_id=current.user.prv_exchange)
sbs[0].delete()
notif_sbs = sbs[1]
for msg in notif_sbs.channel.message_set.objects.all()[:amount]:
current.output['notifications'].insert(0, {
'title': msg.msg_title,
'body': msg.body,
'type': msg.typ,
'url': msg.url,
'channel_key': msg.channel.key,
'message_key': msg.key,
'timestamp': msg.updated_at}) |
Create a public channel. Can be a broadcast channel or normal chat room.
Chat room and broadcast distinction will be made at user subscription phase.
.. code-block:: python
# request:
{
'view':'_zops_create_channel',
'name': string,
'description': string,
}
# response:
{
'description': string,
'name': string,
'no_of_members': int,
'member_list': [
{'name': string,
'is_online': bool,
'avatar_url': string,
}],
'last_messages': [MSG_DICT]
'status': 'Created',
'code': 201,
'key': key, # of just created channel
}
def create_channel(current):
"""
Create a public channel. Can be a broadcast channel or normal chat room.
Chat room and broadcast distinction will be made at user subscription phase.
.. code-block:: python
# request:
{
'view':'_zops_create_channel',
'name': string,
'description': string,
}
# response:
{
'description': string,
'name': string,
'no_of_members': int,
'member_list': [
{'name': string,
'is_online': bool,
'avatar_url': string,
}],
'last_messages': [MSG_DICT]
'status': 'Created',
'code': 201,
'key': key, # of just created channel
}
"""
channel = Channel(name=current.input['name'],
description=current.input['description'],
owner=current.user,
typ=15).save()
with BlockSave(Subscriber):
Subscriber.objects.get_or_create(user=channel.owner,
channel=channel,
can_manage=True,
can_leave=False)
current.input['key'] = channel.key
show_channel(current)
current.output.update({
'status': 'Created',
'code': 201
}) |
Subscribe member(s) to a channel
.. code-block:: python
# request:
{
'view':'_zops_add_members',
'channel_key': key,
'read_only': boolean, # true if this is a Broadcast channel,
# false if it's a normal chat room
'members': [key, key],
}
# response:
{
'existing': [key,], # existing members
'newly_added': [key,], # newly added members
'status': 'Created',
'code': 201
}
def add_members(current):
"""
Subscribe member(s) to a channel
.. code-block:: python
# request:
{
'view':'_zops_add_members',
'channel_key': key,
'read_only': boolean, # true if this is a Broadcast channel,
# false if it's a normal chat room
'members': [key, key],
}
# response:
{
'existing': [key,], # existing members
'newly_added': [key,], # newly added members
'status': 'Created',
'code': 201
}
"""
newly_added, existing = [], []
read_only = current.input['read_only']
for member_key in current.input['members']:
sb, new = Subscriber(current).objects.get_or_create(user_id=member_key,
read_only=read_only,
channel_id=current.input['channel_key'])
if new:
newly_added.append(member_key)
else:
existing.append(member_key)
current.output = {
'existing': existing,
'newly_added': newly_added,
'status': 'OK',
'code': 201
} |
Subscribe users of a given unit to given channel
JSON API:
.. code-block:: python
# request:
{
'view':'_zops_add_unit_to_channel',
'unit_key': key,
'channel_key': key,
'read_only': boolean, # true if this is a Broadcast channel,
# false if it's a normal chat room
}
# response:
{
'existing': [key,], # existing members
'newly_added': [key,], # newly added members
'status': 'Created',
'code': 201
}
def add_unit_to_channel(current):
"""
Subscribe users of a given unit to given channel
JSON API:
.. code-block:: python
# request:
{
'view':'_zops_add_unit_to_channel',
'unit_key': key,
'channel_key': key,
'read_only': boolean, # true if this is a Broadcast channel,
# false if it's a normal chat room
}
# response:
{
'existing': [key,], # existing members
'newly_added': [key,], # newly added members
'status': 'Created',
'code': 201
}
"""
read_only = current.input['read_only']
newly_added, existing = [], []
for member_key in UnitModel.get_user_keys(current, current.input['unit_key']):
sb, new = Subscriber(current).objects.get_or_create(user_id=member_key,
read_only=read_only,
channel_id=current.input['channel_key'])
if new:
newly_added.append(member_key)
else:
existing.append(member_key)
current.output = {
'existing': existing,
'newly_added': newly_added,
'status': 'OK',
'code': 201
} |
Search users for adding to a public room
or creating one to one direct messaging
.. code-block:: python
# request:
{
'view':'_zops_search_user',
'query': string,
}
# response:
{
'results': [('full_name', 'key', 'avatar_url'), ],
'status': 'OK',
'code': 200
}
def search_user(current):
"""
Search users for adding to a public room
or creating one to one direct messaging
.. code-block:: python
# request:
{
'view':'_zops_search_user',
'query': string,
}
# response:
{
'results': [('full_name', 'key', 'avatar_url'), ],
'status': 'OK',
'code': 200
}
"""
current.output = {
'results': [],
'status': 'OK',
'code': 201
}
qs = UserModel(current).objects.exclude(key=current.user_id).search_on(
*settings.MESSAGING_USER_SEARCH_FIELDS,
contains=current.input['query'])
# FIXME: somehow exclude(key=current.user_id) not working with search_on()
for user in qs:
if user.key != current.user_id:
current.output['results'].append((user.full_name, user.key, user.get_avatar_url())) |
Search on units for subscribing it's users to a channel
.. code-block:: python
# request:
{
'view':'_zops_search_unit',
'query': string,
}
# response:
{
'results': [('name', 'key'), ],
'status': 'OK',
'code': 200
}
def search_unit(current):
"""
Search on units for subscribing it's users to a channel
.. code-block:: python
# request:
{
'view':'_zops_search_unit',
'query': string,
}
# response:
{
'results': [('name', 'key'), ],
'status': 'OK',
'code': 200
}
"""
current.output = {
'results': [],
'status': 'OK',
'code': 201
}
for user in UnitModel(current).objects.search_on(*settings.MESSAGING_UNIT_SEARCH_FIELDS,
contains=current.input['query']):
current.output['results'].append((user.name, user.key)) |
Create a One-To-One channel between current and selected user.
.. code-block:: python
# request:
{
'view':'_zops_create_direct_channel',
'user_key': key,
}
# response:
{
'description': string,
'no_of_members': int,
'member_list': [
{'name': string,
'is_online': bool,
'avatar_url': string,
}],
'last_messages': [MSG_DICT]
'status': 'Created',
'code': 201,
'channel_key': key, # of just created channel
'name': string, # name of subscribed channel
}
def create_direct_channel(current):
"""
Create a One-To-One channel between current and selected user.
.. code-block:: python
# request:
{
'view':'_zops_create_direct_channel',
'user_key': key,
}
# response:
{
'description': string,
'no_of_members': int,
'member_list': [
{'name': string,
'is_online': bool,
'avatar_url': string,
}],
'last_messages': [MSG_DICT]
'status': 'Created',
'code': 201,
'channel_key': key, # of just created channel
'name': string, # name of subscribed channel
}
"""
channel, sub_name = Channel.get_or_create_direct_channel(current.user_id,
current.input['user_key'])
current.input['key'] = channel.key
show_channel(current)
current.output.update({
'status': 'Created',
'code': 201
}) |
Search in messages. If "channel_key" given, search will be limited to that channel,
otherwise search will be performed on all of user's subscribed channels.
.. code-block:: python
# request:
{
'view':'_zops_search_unit,
'channel_key': key,
'query': string,
'page': int,
}
# response:
{
'results': [MSG_DICT, ],
'pagination': {
'page': int, # current page
'total_pages': int,
'total_objects': int,
'per_page': int, # object per page
},
'status': 'OK',
'code': 200
}
def find_message(current):
"""
Search in messages. If "channel_key" given, search will be limited to that channel,
otherwise search will be performed on all of user's subscribed channels.
.. code-block:: python
# request:
{
'view':'_zops_search_unit,
'channel_key': key,
'query': string,
'page': int,
}
# response:
{
'results': [MSG_DICT, ],
'pagination': {
'page': int, # current page
'total_pages': int,
'total_objects': int,
'per_page': int, # object per page
},
'status': 'OK',
'code': 200
}
"""
current.output = {
'results': [],
'status': 'OK',
'code': 201
}
query_set = Message(current).objects.search_on(['msg_title', 'body', 'url'],
contains=current.input['query'])
if current.input['channel_key']:
query_set = query_set.filter(channel_id=current.input['channel_key'])
else:
subscribed_channels = Subscriber.objects.filter(user_id=current.user_id).values_list(
"channel_id", flatten=True)
query_set = query_set.filter(channel_id__in=subscribed_channels)
query_set, pagination_data = _paginate(current_page=current.input['page'], query_set=query_set)
current.output['pagination'] = pagination_data
for msg in query_set:
current.output['results'].append(msg.serialize(current.user)) |
Delete a channel
.. code-block:: python
# request:
{
'view':'_zops_delete_channel,
'channel_key': key,
}
# response:
{
'status': 'OK',
'code': 200
}
def delete_channel(current):
"""
Delete a channel
.. code-block:: python
# request:
{
'view':'_zops_delete_channel,
'channel_key': key,
}
# response:
{
'status': 'OK',
'code': 200
}
"""
ch_key = current.input['channel_key']
ch = Channel(current).objects.get(owner_id=current.user_id, key=ch_key)
ch.delete()
Subscriber.objects.filter(channel_id=ch_key).delete()
Message.objects.filter(channel_id=ch_key).delete()
current.output = {'status': 'Deleted', 'code': 200} |
Update channel name or description
.. code-block:: python
# request:
{
'view':'_zops_edit_channel,
'channel_key': key,
'name': string,
'description': string,
}
# response:
{
'status': 'OK',
'code': 200
}
def edit_channel(current):
"""
Update channel name or description
.. code-block:: python
# request:
{
'view':'_zops_edit_channel,
'channel_key': key,
'name': string,
'description': string,
}
# response:
{
'status': 'OK',
'code': 200
}
"""
ch = Channel(current).objects.get(owner_id=current.user_id,
key=current.input['channel_key'])
ch.name = current.input['name']
ch.description = current.input['description']
ch.save()
for sbs in ch.subscriber_set.objects.all():
sbs.name = ch.name
sbs.save()
current.output = {'status': 'OK', 'code': 200} |
Pin a channel to top of channel list
.. code-block:: python
# request:
{
'view':'_zops_pin_channel,
'channel_key': key,
}
# response:
{
'status': 'OK',
'code': 200
}
def pin_channel(current):
"""
Pin a channel to top of channel list
.. code-block:: python
# request:
{
'view':'_zops_pin_channel,
'channel_key': key,
}
# response:
{
'status': 'OK',
'code': 200
}
"""
try:
Subscriber(current).objects.filter(user_id=current.user_id,
channel_id=current.input['channel_key']).update(
pinned=True)
current.output = {'status': 'OK', 'code': 200}
except ObjectDoesNotExist:
raise HTTPError(404, "") |
Delete a message
.. code-block:: python
# request:
{
'view':'_zops_delete_message,
'message_key': key,
}
# response:
{
'key': key,
'status': 'OK',
'code': 200
}
def delete_message(current):
"""
Delete a message
.. code-block:: python
# request:
{
'view':'_zops_delete_message,
'message_key': key,
}
# response:
{
'key': key,
'status': 'OK',
'code': 200
}
"""
try:
Message(current).objects.get(sender_id=current.user_id,
key=current.input['key']).delete()
current.output = {'status': 'Deleted', 'code': 200, 'key': current.input['key']}
except ObjectDoesNotExist:
raise HTTPError(404, "") |
Edit a message a user own.
.. code-block:: python
# request:
{
'view':'_zops_edit_message',
'message': {
'body': string, # message text
'key': key
}
}
# response:
{
'status': string, # 'OK' for success
'code': int, # 200 for success
}
def edit_message(current):
"""
Edit a message a user own.
.. code-block:: python
# request:
{
'view':'_zops_edit_message',
'message': {
'body': string, # message text
'key': key
}
}
# response:
{
'status': string, # 'OK' for success
'code': int, # 200 for success
}
"""
current.output = {'status': 'OK', 'code': 200}
in_msg = current.input['message']
try:
msg = Message(current).objects.get(sender_id=current.user_id, key=in_msg['key'])
msg.body = in_msg['body']
msg.save()
except ObjectDoesNotExist:
raise HTTPError(404, "") |
Flag inappropriate messages
.. code-block:: python
# request:
{
'view':'_zops_flag_message',
'message_key': key,
}
# response:
{
'
'status': 'Created',
'code': 201,
}
def flag_message(current):
"""
Flag inappropriate messages
.. code-block:: python
# request:
{
'view':'_zops_flag_message',
'message_key': key,
}
# response:
{
'
'status': 'Created',
'code': 201,
}
"""
current.output = {'status': 'Created', 'code': 201}
FlaggedMessage.objects.get_or_create(user_id=current.user_id,
message_id=current.input['key']) |
remove flag of a message
.. code-block:: python
# request:
{
'view':'_zops_flag_message',
'key': key,
}
# response:
{
'
'status': 'OK',
'code': 200,
}
def unflag_message(current):
"""
remove flag of a message
.. code-block:: python
# request:
{
'view':'_zops_flag_message',
'key': key,
}
# response:
{
'
'status': 'OK',
'code': 200,
}
"""
current.output = {'status': 'OK', 'code': 200}
FlaggedMessage(current).objects.filter(user_id=current.user_id,
message_id=current.input['key']).delete() |
Returns applicable actions for current user for given message key
.. code-block:: python
# request:
{
'view':'_zops_get_message_actions',
'key': key,
}
# response:
{
'actions':[('name_string', 'cmd_string'),]
'status': string, # 'OK' for success
'code': int, # 200 for success
}
def get_message_actions(current):
"""
Returns applicable actions for current user for given message key
.. code-block:: python
# request:
{
'view':'_zops_get_message_actions',
'key': key,
}
# response:
{
'actions':[('name_string', 'cmd_string'),]
'status': string, # 'OK' for success
'code': int, # 200 for success
}
"""
current.output = {'status': 'OK',
'code': 200,
'actions': Message.objects.get(
current.input['key']).get_actions_for(current.user)} |
Favorite a message
.. code-block:: python
# request:
{
'view':'_zops_add_to_favorites,
'key': key,
}
# response:
{
'status': 'Created',
'code': 201
'favorite_key': key
}
def add_to_favorites(current):
"""
Favorite a message
.. code-block:: python
# request:
{
'view':'_zops_add_to_favorites,
'key': key,
}
# response:
{
'status': 'Created',
'code': 201
'favorite_key': key
}
"""
msg = Message.objects.get(current.input['key'])
current.output = {'status': 'Created', 'code': 201}
fav, new = Favorite.objects.get_or_create(user_id=current.user_id, message=msg)
current.output['favorite_key'] = fav.key |
Remove a message from favorites
.. code-block:: python
# request:
{
'view':'_zops_remove_from_favorites,
'key': key,
}
# response:
{
'status': 'OK',
'code': 200
}
def remove_from_favorites(current):
"""
Remove a message from favorites
.. code-block:: python
# request:
{
'view':'_zops_remove_from_favorites,
'key': key,
}
# response:
{
'status': 'OK',
'code': 200
}
"""
try:
current.output = {'status': 'OK', 'code': 200}
Favorite(current).objects.get(user_id=current.user_id,
key=current.input['key']).delete()
except ObjectDoesNotExist:
raise HTTPError(404, "") |
List user's favorites. If "channel_key" given, will return favorites belong to that channel.
.. code-block:: python
# request:
{
'view':'_zops_list_favorites,
'channel_key': key,
}
# response:
{
'status': 'OK',
'code': 200
'favorites':[{'key': key,
'channel_key': key,
'message_key': key,
'message_summary': string, # max 60 char
'channel_name': string,
},]
}
def list_favorites(current):
"""
List user's favorites. If "channel_key" given, will return favorites belong to that channel.
.. code-block:: python
# request:
{
'view':'_zops_list_favorites,
'channel_key': key,
}
# response:
{
'status': 'OK',
'code': 200
'favorites':[{'key': key,
'channel_key': key,
'message_key': key,
'message_summary': string, # max 60 char
'channel_name': string,
},]
}
"""
current.output = {'status': 'OK', 'code': 200, 'favorites': []}
query_set = Favorite(current).objects.filter(user_id=current.user_id)
if current.input['channel_key']:
query_set = query_set.filter(channel_id=current.input['channel_key'])
current.output['favorites'] = [{
'key': fav.key,
'channel_key': fav.channel.key,
'message_key': fav.message.key,
'message_summary': fav.summary,
'channel_name': fav.channel_name
} for fav in query_set] |
Creates a direct messaging channel between two user
Args:
initiator: User, who want's to make first contact
receiver: User, other party
Returns:
(Channel, receiver_name)
def get_or_create_direct_channel(cls, initiator_key, receiver_key):
"""
Creates a direct messaging channel between two user
Args:
initiator: User, who want's to make first contact
receiver: User, other party
Returns:
(Channel, receiver_name)
"""
existing = cls.objects.OR().filter(
code_name='%s_%s' % (initiator_key, receiver_key)).filter(
code_name='%s_%s' % (receiver_key, initiator_key))
receiver_name = UserModel.objects.get(receiver_key).full_name
if existing:
channel = existing[0]
else:
channel_name = '%s_%s' % (initiator_key, receiver_key)
channel = cls(is_direct=True, code_name=channel_name, typ=10).blocking_save()
with BlockSave(Subscriber):
Subscriber.objects.get_or_create(channel=channel,
user_id=initiator_key,
name=receiver_name)
Subscriber.objects.get_or_create(channel=channel,
user_id=receiver_key,
name=UserModel.objects.get(initiator_key).full_name)
return channel, receiver_name |
Creates MQ exchange for this channel
Needs to be defined only once.
def create_exchange(self):
"""
Creates MQ exchange for this channel
Needs to be defined only once.
"""
mq_channel = self._connect_mq()
mq_channel.exchange_declare(exchange=self.code_name,
exchange_type='fanout',
durable=True) |
Deletes MQ exchange for this channel
Needs to be defined only once.
def delete_exchange(self):
"""
Deletes MQ exchange for this channel
Needs to be defined only once.
"""
mq_channel = self._connect_mq()
mq_channel.exchange_delete(exchange=self.code_name) |
serialized form for channel listing
def get_channel_listing(self):
"""
serialized form for channel listing
"""
return {'name': self.name,
'key': self.channel.key,
'type': self.channel.typ,
'read_only': self.read_only,
'is_online': self.is_online(),
'actions': self.get_actions(),
'unread': self.unread_count()} |
Creates user's private exchange
Actually user's private channel needed to be defined only once,
and this should be happened when user first created.
But since this has a little performance cost,
to be safe we always call it before binding to the channel we currently subscribe
def create_exchange(self):
"""
Creates user's private exchange
Actually user's private channel needed to be defined only once,
and this should be happened when user first created.
But since this has a little performance cost,
to be safe we always call it before binding to the channel we currently subscribe
"""
channel = self._connect_mq()
channel.exchange_declare(exchange=self.user.prv_exchange,
exchange_type='fanout',
durable=True) |
Binds (subscribes) users private exchange to channel exchange
Automatically called at creation of subscription record.
def bind_to_channel(self):
"""
Binds (subscribes) users private exchange to channel exchange
Automatically called at creation of subscription record.
"""
if self.channel.code_name != self.user.prv_exchange:
channel = self._connect_mq()
channel.exchange_bind(source=self.channel.code_name, destination=self.user.prv_exchange) |
Serializes message for given user.
Note:
Should be called before first save(). Otherwise "is_update" will get wrong value.
Args:
user: User object
Returns:
Dict. JSON serialization ready dictionary object
def serialize(self, user=None):
"""
Serializes message for given user.
Note:
Should be called before first save(). Otherwise "is_update" will get wrong value.
Args:
user: User object
Returns:
Dict. JSON serialization ready dictionary object
"""
return {
'content': self.body,
'type': self.typ,
'updated_at': self.updated_at,
'timestamp': self.updated_at,
'is_update': not hasattr(self, 'unsaved'),
'attachments': [attachment.serialize() for attachment in self.attachment_set],
'title': self.msg_title,
'url': self.url,
'sender_name': self.sender.full_name,
'sender_key': self.sender.key,
'channel_key': self.channel.key,
'cmd': 'message',
'avatar_url': self.sender.avatar,
'key': self.key,
} |
Re-publishes updated message
def _republish(self):
"""
Re-publishes updated message
"""
mq_channel = self.channel._connect_mq()
mq_channel.basic_publish(exchange=self.channel.key, routing_key='',
body=json.dumps(self.serialize())) |
Provide a reasonable default crawl name using the user name and date
def defaultCrawlId():
"""
Provide a reasonable default crawl name using the user name and date
"""
timestamp = datetime.now().isoformat().replace(':', '_')
user = getuser()
return '_'.join(('crawl', user, timestamp)) |
Run Nutch command using REST API.
def main(argv=None):
"""Run Nutch command using REST API."""
global Verbose, Mock
if argv is None:
argv = sys.argv
if len(argv) < 5: die('Bad args')
try:
opts, argv = getopt.getopt(argv[1:], 'hs:p:mv',
['help', 'server=', 'port=', 'mock', 'verbose'])
except getopt.GetoptError as err:
# print help information and exit:
print(err) # will print something like "option -a not recognized"
die()
serverEndpoint = DefaultServerEndpoint
# TODO: Fix this
for opt, val in opts:
if opt in ('-h', '--help'): echo2(USAGE); sys.exit()
elif opt in ('-s', '--server'): serverEndpoint = val
elif opt in ('-p', '--port'): serverEndpoint = 'http://localhost:%s' % val
elif opt in ('-m', '--mock'): Mock = 1
elif opt in ('-v', '--verbose'): Verbose = 1
else: die(USAGE)
cmd = argv[0]
crawlId = argv[1]
confId = argv[2]
urlDir = argv[3]
args = {}
if len(argv) > 4: args = eval(argv[4])
nt = Nutch(crawlId, confId, serverEndpoint, urlDir)
nt.Jobs().create(cmd, **args) |
Call the Nutch Server, do some error checking, and return the response.
:param verb: One of nutch.RequestVerbs
:param servicePath: path component of URL to append to endpoint, e.g. '/config'
:param data: Data to attach to this request
:param headers: headers to attach to this request, default are JsonAcceptHeader
:param forceText: don't trust the response headers and just get the text
:param sendJson: Whether to treat attached data as JSON or not
def call(self, verb, servicePath, data=None, headers=None, forceText=False, sendJson=True):
"""Call the Nutch Server, do some error checking, and return the response.
:param verb: One of nutch.RequestVerbs
:param servicePath: path component of URL to append to endpoint, e.g. '/config'
:param data: Data to attach to this request
:param headers: headers to attach to this request, default are JsonAcceptHeader
:param forceText: don't trust the response headers and just get the text
:param sendJson: Whether to treat attached data as JSON or not
"""
default_data = {} if sendJson else ""
data = data if data else default_data
headers = headers if headers else JsonAcceptHeader.copy()
if not sendJson:
headers.update(TextSendHeader)
if verb not in RequestVerbs:
die('Server call verb must be one of %s' % str(RequestVerbs.keys()))
if Verbose:
echo2("%s Endpoint:" % verb.upper(), servicePath)
echo2("%s Request data:" % verb.upper(), data)
echo2("%s Request headers:" % verb.upper(), headers)
verbFn = RequestVerbs[verb]
if sendJson:
resp = verbFn(self.serverEndpoint + servicePath, json=data, headers=headers)
else:
resp = verbFn(self.serverEndpoint + servicePath, data=data, headers=headers)
if Verbose:
echo2("Response headers:", resp.headers)
echo2("Response status:", resp.status_code)
if resp.status_code != 200:
if self.raiseErrors:
error = NutchException("Unexpected server response: %d" % resp.status_code)
error.status_code = resp.status_code
raise error
else:
warn('Nutch server returned status:', resp.status_code)
if forceText or 'content-type' not in resp.headers or resp.headers['content-type'] == 'text/plain':
if Verbose:
echo2("Response text:", resp.text)
return resp.text
content_type = resp.headers['content-type']
if content_type == 'application/json' and not forceText:
if Verbose:
echo2("Response JSON:", resp.json())
return resp.json()
else:
die('Did not understand server response: %s' % resp.headers) |
Create a new named (cid) configuration from a parameter dictionary (config_data).
def create(self, cid, configData):
"""
Create a new named (cid) configuration from a parameter dictionary (config_data).
"""
configArgs = {'configId': cid, 'params': configData, 'force': True}
cid = self.server.call('post', "/config/create", configArgs, forceText=True, headers=TextAcceptHeader)
new_config = Config(cid, self.server)
return new_config |
Return list of jobs at this endpoint.
Call get(allJobs=True) to see all jobs, not just the ones managed by this Client
def list(self, allJobs=False):
"""
Return list of jobs at this endpoint.
Call get(allJobs=True) to see all jobs, not just the ones managed by this Client
"""
jobs = self.server.call('get', '/job')
return [Job(job['id'], self.server) for job in jobs if allJobs or self._job_owned(job)] |
Create a job given a command
:param command: Nutch command, one of nutch.LegalJobs
:param args: Additional arguments to pass to the job
:return: The created Job
def create(self, command, **args):
"""
Create a job given a command
:param command: Nutch command, one of nutch.LegalJobs
:param args: Additional arguments to pass to the job
:return: The created Job
"""
command = command.upper()
if command not in LegalJobs:
warn('Nutch command must be one of: %s' % ', '.join(LegalJobs))
else:
echo2('Starting %s job with args %s' % (command, str(args)))
parameters = self.parameters.copy()
parameters['type'] = command
parameters['crawlId'] = self.crawlId
parameters['confId'] = self.confId
parameters['args'].update(args)
job_info = self.server.call('post', "/job/create", parameters, JsonAcceptHeader)
job = Job(job_info['id'], self.server)
return job |
:param seed: A Seed object (this or urlDir must be specified)
:param urlDir: The directory on the server containing the seed list (this or urlDir must be specified)
:param args: Extra arguments for the job
:return: a created Job object
def inject(self, seed=None, urlDir=None, **args):
"""
:param seed: A Seed object (this or urlDir must be specified)
:param urlDir: The directory on the server containing the seed list (this or urlDir must be specified)
:param args: Extra arguments for the job
:return: a created Job object
"""
if seed:
if urlDir and urlDir != seed.seedPath:
raise NutchException("Can't specify both seed and urlDir")
urlDir = seed.seedPath
elif urlDir:
pass
else:
raise NutchException("Must specify seed or urlDir")
args['url_dir'] = urlDir
return self.create('INJECT', **args) |
Create a new named (sid) Seed from a list of seed URLs
:param sid: the name to assign to the new seed list
:param seedList: the list of seeds to use
:return: the created Seed object
def create(self, sid, seedList):
"""
Create a new named (sid) Seed from a list of seed URLs
:param sid: the name to assign to the new seed list
:param seedList: the list of seeds to use
:return: the created Seed object
"""
seedUrl = lambda uid, url: {"id": uid, "url": url}
if not isinstance(seedList,tuple):
seedList = (seedList,)
seedListData = {
"id": "12345",
"name": sid,
"seedUrls": [seedUrl(uid, url) for uid, url in enumerate(seedList)]
}
# As per resolution of https://issues.apache.org/jira/browse/NUTCH-2123
seedPath = self.server.call('post', "/seed/create", seedListData, TextAcceptHeader)
new_seed = Seed(sid, seedPath, self.server)
return new_seed |
Create a new named (sid) Seed from a file containing URLs
It's assumed URLs are whitespace seperated.
:param sid: the name to assign to the new seed list
:param filename: the name of the file that contains URLs
:return: the created Seed object
def createFromFile(self, sid, filename):
"""
Create a new named (sid) Seed from a file containing URLs
It's assumed URLs are whitespace seperated.
:param sid: the name to assign to the new seed list
:param filename: the name of the file that contains URLs
:return: the created Seed object
"""
urls = []
with open(filename) as f:
for line in f:
for url in line.split():
urls.append(url)
return self.create(sid, tuple(urls)) |
Given a completed job, start the next job in the round, or return None
:param nextRound: whether to start jobs from the next round if the current round is completed.
:return: the newly started Job, or None if no job was started
def _nextJob(self, job, nextRound=True):
"""
Given a completed job, start the next job in the round, or return None
:param nextRound: whether to start jobs from the next round if the current round is completed.
:return: the newly started Job, or None if no job was started
"""
jobInfo = job.info()
assert jobInfo['state'] == 'FINISHED'
roundEnd = False
if jobInfo['type'] == 'INJECT':
nextCommand = 'GENERATE'
elif jobInfo['type'] == 'GENERATE':
nextCommand = 'FETCH'
elif jobInfo['type'] == 'FETCH':
nextCommand = 'PARSE'
elif jobInfo['type'] == 'PARSE':
nextCommand = 'UPDATEDB'
elif jobInfo['type'] == 'UPDATEDB':
nextCommand = 'INVERTLINKS'
elif jobInfo['type'] == 'INVERTLINKS':
nextCommand = 'DEDUP'
elif jobInfo['type'] == 'DEDUP':
if self.enable_index:
nextCommand = 'INDEX'
else:
roundEnd = True
elif jobInfo['type'] == 'INDEX':
roundEnd = True
else:
raise NutchException("Unrecognized job type {}".format(jobInfo['type']))
if roundEnd:
if nextRound and self.currentRound < self.totalRounds:
nextCommand = 'GENERATE'
self.currentRound += 1
else:
return None
return self.jobClient.create(nextCommand) |
Check the status of the current job, activate the next job if it's finished, and return the active job
If the current job has failed, a NutchCrawlException will be raised with no jobs attached.
:param nextRound: whether to start jobs from the next round if the current job/round is completed.
:return: the currently running Job, or None if no jobs are running.
def progress(self, nextRound=True):
"""
Check the status of the current job, activate the next job if it's finished, and return the active job
If the current job has failed, a NutchCrawlException will be raised with no jobs attached.
:param nextRound: whether to start jobs from the next round if the current job/round is completed.
:return: the currently running Job, or None if no jobs are running.
"""
currentJob = self.currentJob
if currentJob is None:
return currentJob
jobInfo = currentJob.info()
if jobInfo['state'] == 'RUNNING':
return currentJob
elif jobInfo['state'] == 'FINISHED':
nextJob = self._nextJob(currentJob, nextRound)
self.currentJob = nextJob
return nextJob
else:
error = NutchCrawlException("Unexpected job state: {}".format(jobInfo['state']))
error.current_job = currentJob
raise NutchCrawlException |
Execute all jobs in the current round and return when they have finished.
If a job fails, a NutchCrawlException will be raised, with all completed jobs from this round attached
to the exception.
:return: a list of all completed Jobs
def nextRound(self):
"""
Execute all jobs in the current round and return when they have finished.
If a job fails, a NutchCrawlException will be raised, with all completed jobs from this round attached
to the exception.
:return: a list of all completed Jobs
"""
finishedJobs = []
if self.currentJob is None:
self.currentJob = self.jobClient.create('GENERATE')
activeJob = self.progress(nextRound=False)
while activeJob:
oldJob = activeJob
activeJob = self.progress(nextRound=False) # updates self.currentJob
if oldJob and oldJob != activeJob:
finishedJobs.append(oldJob)
sleep(self.sleepTime)
self.currentRound += 1
return finishedJobs |
Execute all queued rounds and return when they have finished.
If a job fails, a NutchCrawlException will be raised, with all completed jobs attached
to the exception
:return: a list of jobs completed for each round, organized by round (list-of-lists)
def waitAll(self):
"""
Execute all queued rounds and return when they have finished.
If a job fails, a NutchCrawlException will be raised, with all completed jobs attached
to the exception
:return: a list of jobs completed for each round, organized by round (list-of-lists)
"""
finishedRounds = [self.nextRound()]
while self.currentRound < self.totalRounds:
finishedRounds.append(self.nextRound())
return finishedRounds |
Create a JobClient for listing and creating jobs.
The JobClient inherits the confId from the Nutch client.
:param crawlId: crawlIds to use for this client. If not provided, will be generated
by nutch.defaultCrawlId()
:return: a JobClient
def Jobs(self, crawlId=None):
"""
Create a JobClient for listing and creating jobs.
The JobClient inherits the confId from the Nutch client.
:param crawlId: crawlIds to use for this client. If not provided, will be generated
by nutch.defaultCrawlId()
:return: a JobClient
"""
crawlId = crawlId if crawlId else defaultCrawlId()
return JobClient(self.server, crawlId, self.confId) |
Launch a crawl using the given seed
:param seed: Type (Seed or SeedList) - used for crawl
:param seedClient: if a SeedList is given, the SeedClient to upload, if None a default will be created
:param jobClient: the JobClient to be used, if None a default will be created
:param rounds: the number of rounds in the crawl
:return: a CrawlClient to monitor and control the crawl
def Crawl(self, seed, seedClient=None, jobClient=None, rounds=1, index=True):
"""
Launch a crawl using the given seed
:param seed: Type (Seed or SeedList) - used for crawl
:param seedClient: if a SeedList is given, the SeedClient to upload, if None a default will be created
:param jobClient: the JobClient to be used, if None a default will be created
:param rounds: the number of rounds in the crawl
:return: a CrawlClient to monitor and control the crawl
"""
if seedClient is None:
seedClient = self.Seeds()
if jobClient is None:
jobClient = self.Jobs()
if type(seed) != Seed:
seed = seedClient.create(jobClient.crawlId + '_seeds', seed)
return CrawlClient(self.server, seed, jobClient, rounds, index) |
>>> import pprint
>>> input_line1 = 'Apr 24 00:00:02 node haproxy[12298]: 1.1.1.1:48660 [24/Apr/2019:00:00:02.358] pre-staging~ pre-staging_doc/pre-staging_active 261/0/2/8/271 200 2406 - - ---- 4/4/0/1/0 0/0 {AAAAAA:AAAAA_AAAAA:AAAAA_AAAAA_AAAAA:300A||| user@mail.net:sdasdasdasdsdasAHDivsjd=|user@mail.net|2018} "GET /doc/api/get?call=apple HTTP/1.1"'
>>> output_line1 = haproxy(input_line1)
>>> pprint.pprint(output_line1)
{'data': {'Tc': 2.0,
'Tq': 261.0,
'Tr': 8.0,
'Tw': 0.0,
'_api': '/doc/api/get?call=apple',
'_headers': ['AAAAAA:AAAAA_AAAAA:AAAAA_AAAAA_AAAAA:300A||| user@mail.net:sdasdasdasdsdasAHDivsjd=|user@mail.net|2018'],
'actconn': 4,
'backend': 'pre-staging_doc/pre-staging_active',
'backend_queue': 0,
'beconn': 1,
'bytes_read': 2406.0,
'client_port': '48660',
'client_server': '1.1.1.1',
'feconn': 4,
'front_end': 'pre-staging~',
'haproxy_server': 'node',
'method': 'GET',
'resp_time': 271.0,
'retries': 0,
'srv_conn': 0,
'srv_queue': 0,
'status': '200',
'timestamp': '2019-04-24T00:00:02.358000'},
'event': 'haproxy_event',
'timestamp': '2019-04-24T00:00:02.358000',
'type': 'metric'}
def haproxy(line):
#TODO Handle all message formats
'''
>>> import pprint
>>> input_line1 = 'Apr 24 00:00:02 node haproxy[12298]: 1.1.1.1:48660 [24/Apr/2019:00:00:02.358] pre-staging~ pre-staging_doc/pre-staging_active 261/0/2/8/271 200 2406 - - ---- 4/4/0/1/0 0/0 {AAAAAA:AAAAA_AAAAA:AAAAA_AAAAA_AAAAA:300A||| user@mail.net:sdasdasdasdsdasAHDivsjd=|user@mail.net|2018} "GET /doc/api/get?call=apple HTTP/1.1"'
>>> output_line1 = haproxy(input_line1)
>>> pprint.pprint(output_line1)
{'data': {'Tc': 2.0,
'Tq': 261.0,
'Tr': 8.0,
'Tw': 0.0,
'_api': '/doc/api/get?call=apple',
'_headers': ['AAAAAA:AAAAA_AAAAA:AAAAA_AAAAA_AAAAA:300A||| user@mail.net:sdasdasdasdsdasAHDivsjd=|user@mail.net|2018'],
'actconn': 4,
'backend': 'pre-staging_doc/pre-staging_active',
'backend_queue': 0,
'beconn': 1,
'bytes_read': 2406.0,
'client_port': '48660',
'client_server': '1.1.1.1',
'feconn': 4,
'front_end': 'pre-staging~',
'haproxy_server': 'node',
'method': 'GET',
'resp_time': 271.0,
'retries': 0,
'srv_conn': 0,
'srv_queue': 0,
'status': '200',
'timestamp': '2019-04-24T00:00:02.358000'},
'event': 'haproxy_event',
'timestamp': '2019-04-24T00:00:02.358000',
'type': 'metric'}
'''
_line = line.strip().split()
log = {}
log['client_server'] = _line[5].split(':')[0].strip()
log['client_port'] = _line[5].split(':')[1].strip()
_timestamp = re.findall(r'\[(.*?)\]', _line[6])[0]
log['timestamp'] = datetime.datetime.strptime(_timestamp, '%d/%b/%Y:%H:%M:%S.%f').isoformat()
log['front_end'] = _line[7].strip()
log['backend'] = _line[8].strip()
log['Tq'] = float(_line[9].split('/')[0].strip())
log['Tw'] = float(_line[9].split('/')[1].strip())
log['Tc'] = float(_line[9].split('/')[2].strip())
log['Tr'] = float(_line[9].split('/')[3].strip())
log['resp_time'] = float(_line[9].split('/')[-1].strip())
log['status'] = _line[10].strip()
log['bytes_read'] = float(_line[11].strip())
log['_headers'] = re.findall(r'{(.*)}', line)
log['haproxy_server'] = _line[3].strip()
log['method'] = _line[-3].strip('"').strip()
log['_api'] = _line[-2].strip()
log['retries'] = int(_line[15].split('/')[-1].strip())
log['actconn'] = int(_line[15].split('/')[0].strip())
log['feconn'] = int(_line[15].split('/')[1].strip())
log['beconn'] = int(_line[15].split('/')[-2].strip())
log['srv_conn'] = int(_line[15].split('/')[-3].strip())
log['srv_queue'] = int(_line[16].split('/')[0].strip())
log['backend_queue'] = int(_line[16].split('/')[1].strip())
return dict(
data=log,
event='haproxy_event',
timestamp=log.get('timestamp'),
type='metric'
) |
>>> import pprint
>>> input_line1 = '{ \
"remote_addr": "127.0.0.1","remote_user": "-","timestamp": "1515144699.201", \
"request": "GET / HTTP/1.1","status": "200","request_time": "0.000", \
"body_bytes_sent": "396","http_referer": "-","http_user_agent": "python-requests/2.18.4", \
"http_x_forwarded_for": "-","upstream_response_time": "-" \
}'
>>> output_line1 = nginx_access(input_line1)
>>> pprint.pprint(output_line1)
{'data': {u'body_bytes_sent': 396.0,
u'http_referer': u'-',
u'http_user_agent': u'python-requests/2.18.4',
u'http_x_forwarded_for': u'-',
u'remote_addr': u'127.0.0.1',
u'remote_user': u'-',
u'request': u'GET / HTTP/1.1',
u'request_time': 0.0,
u'status': u'200',
u'timestamp': '2018-01-05T09:31:39.201000',
u'upstream_response_time': 0.0},
'event': 'nginx_event',
'timestamp': '2018-01-05T09:31:39.201000',
'type': 'metric'}
>>> input_line2 = '{ \
"remote_addr": "192.158.0.51","remote_user": "-","timestamp": "1515143686.415", \
"request": "POST /mpub?topic=heartbeat HTTP/1.1","status": "404","request_time": "0.000", \
"body_bytes_sent": "152","http_referer": "-","http_user_agent": "python-requests/2.18.4", \
"http_x_forwarded_for": "-","upstream_response_time": "-" \
}'
>>> output_line2 = nginx_access(input_line2)
>>> pprint.pprint(output_line2)
{'data': {u'body_bytes_sent': 152.0,
u'http_referer': u'-',
u'http_user_agent': u'python-requests/2.18.4',
u'http_x_forwarded_for': u'-',
u'remote_addr': u'192.158.0.51',
u'remote_user': u'-',
u'request': u'POST /mpub?topic=heartbeat HTTP/1.1',
u'request_time': 0.0,
u'status': u'404',
u'timestamp': '2018-01-05T09:14:46.415000',
u'upstream_response_time': 0.0},
'event': 'nginx_event',
'timestamp': '2018-01-05T09:14:46.415000',
'type': 'metric'}
def nginx_access(line):
'''
>>> import pprint
>>> input_line1 = '{ \
"remote_addr": "127.0.0.1","remote_user": "-","timestamp": "1515144699.201", \
"request": "GET / HTTP/1.1","status": "200","request_time": "0.000", \
"body_bytes_sent": "396","http_referer": "-","http_user_agent": "python-requests/2.18.4", \
"http_x_forwarded_for": "-","upstream_response_time": "-" \
}'
>>> output_line1 = nginx_access(input_line1)
>>> pprint.pprint(output_line1)
{'data': {u'body_bytes_sent': 396.0,
u'http_referer': u'-',
u'http_user_agent': u'python-requests/2.18.4',
u'http_x_forwarded_for': u'-',
u'remote_addr': u'127.0.0.1',
u'remote_user': u'-',
u'request': u'GET / HTTP/1.1',
u'request_time': 0.0,
u'status': u'200',
u'timestamp': '2018-01-05T09:31:39.201000',
u'upstream_response_time': 0.0},
'event': 'nginx_event',
'timestamp': '2018-01-05T09:31:39.201000',
'type': 'metric'}
>>> input_line2 = '{ \
"remote_addr": "192.158.0.51","remote_user": "-","timestamp": "1515143686.415", \
"request": "POST /mpub?topic=heartbeat HTTP/1.1","status": "404","request_time": "0.000", \
"body_bytes_sent": "152","http_referer": "-","http_user_agent": "python-requests/2.18.4", \
"http_x_forwarded_for": "-","upstream_response_time": "-" \
}'
>>> output_line2 = nginx_access(input_line2)
>>> pprint.pprint(output_line2)
{'data': {u'body_bytes_sent': 152.0,
u'http_referer': u'-',
u'http_user_agent': u'python-requests/2.18.4',
u'http_x_forwarded_for': u'-',
u'remote_addr': u'192.158.0.51',
u'remote_user': u'-',
u'request': u'POST /mpub?topic=heartbeat HTTP/1.1',
u'request_time': 0.0,
u'status': u'404',
u'timestamp': '2018-01-05T09:14:46.415000',
u'upstream_response_time': 0.0},
'event': 'nginx_event',
'timestamp': '2018-01-05T09:14:46.415000',
'type': 'metric'}
'''
#TODO Handle nginx error logs
log = json.loads(line)
timestamp_iso = datetime.datetime.utcfromtimestamp(float(log['timestamp'])).isoformat()
log.update({'timestamp':timestamp_iso})
if '-' in log.get('upstream_response_time'):
log['upstream_response_time'] = 0.0
log['body_bytes_sent'] = float(log['body_bytes_sent'])
log['request_time'] = float(log['request_time'])
log['upstream_response_time'] = float(log['upstream_response_time'])
return dict(
timestamp=log.get('timestamp',' '),
data=log,
type='metric',
event='nginx_event',
) |
>>> import pprint
>>> input_line1 = '2017-08-17T07:56:33.489+0200 I REPL [signalProcessingThread] shutting down replication subsystems'
>>> output_line1 = mongodb(input_line1)
>>> pprint.pprint(output_line1)
{'data': {'component': 'REPL',
'context': '[signalProcessingThread]',
'message': 'shutting down replication subsystems',
'severity': 'I',
'timestamp': '2017-08-17T07:56:33.489+0200'},
'timestamp': '2017-08-17T07:56:33.489+0200',
'type': 'log'}
>>> input_line2 = '2017-08-17T07:56:33.515+0200 W NETWORK [initandlisten] No primary detected for set confsvr_repl1'
>>> output_line2 = mongodb(input_line2)
>>> pprint.pprint(output_line2)
{'data': {'component': 'NETWORK',
'context': '[initandlisten]',
'message': 'No primary detected for set confsvr_repl1',
'severity': 'W',
'timestamp': '2017-08-17T07:56:33.515+0200'},
'timestamp': '2017-08-17T07:56:33.515+0200',
'type': 'log'}
def mongodb(line):
'''
>>> import pprint
>>> input_line1 = '2017-08-17T07:56:33.489+0200 I REPL [signalProcessingThread] shutting down replication subsystems'
>>> output_line1 = mongodb(input_line1)
>>> pprint.pprint(output_line1)
{'data': {'component': 'REPL',
'context': '[signalProcessingThread]',
'message': 'shutting down replication subsystems',
'severity': 'I',
'timestamp': '2017-08-17T07:56:33.489+0200'},
'timestamp': '2017-08-17T07:56:33.489+0200',
'type': 'log'}
>>> input_line2 = '2017-08-17T07:56:33.515+0200 W NETWORK [initandlisten] No primary detected for set confsvr_repl1'
>>> output_line2 = mongodb(input_line2)
>>> pprint.pprint(output_line2)
{'data': {'component': 'NETWORK',
'context': '[initandlisten]',
'message': 'No primary detected for set confsvr_repl1',
'severity': 'W',
'timestamp': '2017-08-17T07:56:33.515+0200'},
'timestamp': '2017-08-17T07:56:33.515+0200',
'type': 'log'}
'''
keys = ['timestamp', 'severity', 'component', 'context', 'message']
values = re.split(r'\s+', line, maxsplit=4)
mongodb_log = dict(zip(keys,values))
return dict(
timestamp=values[0],
data=mongodb_log,
type='log',
) |
>>> import pprint
>>> input_line1 = '[23/Aug/2017 11:35:25] INFO [app.middleware_log_req:50]View func called:{"exception": null,"processing_time": 0.00011801719665527344, "url": "<url>",host": "localhost", "user": "testing", "post_contents": "", "method": "POST" }'
>>> output_line1 = django(input_line1)
>>> pprint.pprint(output_line1)
{'data': {'loglevel': 'INFO',
'logname': '[app.middleware_log_req:50]',
'message': 'View func called:{"exception": null,"processing_time": 0.00011801719665527344, "url": "<url>",host": "localhost", "user": "testing", "post_contents": "", "method": "POST" }',
'timestamp': '2017-08-23T11:35:25'},
'level': 'INFO',
'timestamp': '2017-08-23T11:35:25'}
>>> input_line2 = '[22/Sep/2017 06:32:15] INFO [app.function:6022] {"UUID": "c47f3530-9f5f-11e7-a559-917d011459f7", "timestamp":1506061932546, "misc": {"status": 200, "ready_state": 4, "end_time_ms": 1506061932546, "url": "/api/function?", "start_time_ms": 1506061932113, "response_length": 31, "status_message": "OK", "request_time_ms": 433}, "user": "root", "host_url": "localhost:8888", "message": "ajax success"}'
>>> output_line2 = django(input_line2)
>>> pprint.pprint(output_line2)
{'data': {'loglevel': 'INFO',
'logname': '[app.function:6022]',
'message': {u'UUID': u'c47f3530-9f5f-11e7-a559-917d011459f7',
u'host_url': u'localhost:8888',
u'message': u'ajax success',
u'misc': {u'end_time_ms': 1506061932546L,
u'ready_state': 4,
u'request_time_ms': 433,
u'response_length': 31,
u'start_time_ms': 1506061932113L,
u'status': 200,
u'status_message': u'OK',
u'url': u'/api/function?'},
u'timestamp': 1506061932546L,
u'user': u'root'},
'timestamp': '2017-09-22T06:32:15'},
'level': 'INFO',
'timestamp': '2017-09-22T06:32:15'}
Case2:
[18/Sep/2017 05:40:36] ERROR [app.apps:78] failed to get the record, collection = Collection(Database(MongoClient(host=['localhost:27017'], document_class=dict, tz_aware=False, connect=True, serverselectiontimeoutms=3000), u'collection_cache'), u'function_dummy_version')
Traceback (most recent call last):
File "/usr/local/lib/python2.7/dist-packages/mongo_cache/mongocache.py", line 70, in __getitem__
result = self.collection.find_one({"_id": key})
OperationFailure: not authorized on collection_cache to execute command { find: "function", filter: { _id: "zydelig-cosine-20" }, limit: 1, singleBatch: true }
def django(line):
'''
>>> import pprint
>>> input_line1 = '[23/Aug/2017 11:35:25] INFO [app.middleware_log_req:50]View func called:{"exception": null,"processing_time": 0.00011801719665527344, "url": "<url>",host": "localhost", "user": "testing", "post_contents": "", "method": "POST" }'
>>> output_line1 = django(input_line1)
>>> pprint.pprint(output_line1)
{'data': {'loglevel': 'INFO',
'logname': '[app.middleware_log_req:50]',
'message': 'View func called:{"exception": null,"processing_time": 0.00011801719665527344, "url": "<url>",host": "localhost", "user": "testing", "post_contents": "", "method": "POST" }',
'timestamp': '2017-08-23T11:35:25'},
'level': 'INFO',
'timestamp': '2017-08-23T11:35:25'}
>>> input_line2 = '[22/Sep/2017 06:32:15] INFO [app.function:6022] {"UUID": "c47f3530-9f5f-11e7-a559-917d011459f7", "timestamp":1506061932546, "misc": {"status": 200, "ready_state": 4, "end_time_ms": 1506061932546, "url": "/api/function?", "start_time_ms": 1506061932113, "response_length": 31, "status_message": "OK", "request_time_ms": 433}, "user": "root", "host_url": "localhost:8888", "message": "ajax success"}'
>>> output_line2 = django(input_line2)
>>> pprint.pprint(output_line2)
{'data': {'loglevel': 'INFO',
'logname': '[app.function:6022]',
'message': {u'UUID': u'c47f3530-9f5f-11e7-a559-917d011459f7',
u'host_url': u'localhost:8888',
u'message': u'ajax success',
u'misc': {u'end_time_ms': 1506061932546L,
u'ready_state': 4,
u'request_time_ms': 433,
u'response_length': 31,
u'start_time_ms': 1506061932113L,
u'status': 200,
u'status_message': u'OK',
u'url': u'/api/function?'},
u'timestamp': 1506061932546L,
u'user': u'root'},
'timestamp': '2017-09-22T06:32:15'},
'level': 'INFO',
'timestamp': '2017-09-22T06:32:15'}
Case2:
[18/Sep/2017 05:40:36] ERROR [app.apps:78] failed to get the record, collection = Collection(Database(MongoClient(host=['localhost:27017'], document_class=dict, tz_aware=False, connect=True, serverselectiontimeoutms=3000), u'collection_cache'), u'function_dummy_version')
Traceback (most recent call last):
File "/usr/local/lib/python2.7/dist-packages/mongo_cache/mongocache.py", line 70, in __getitem__
result = self.collection.find_one({"_id": key})
OperationFailure: not authorized on collection_cache to execute command { find: "function", filter: { _id: "zydelig-cosine-20" }, limit: 1, singleBatch: true }
'''
#TODO we need to handle case2 logs
data = {}
log = re.findall(r'^(\[\d+/\w+/\d+ \d+:\d+:\d+\].*)', line)
if len(log) == 1:
data['timestamp'] = datetime.datetime.strptime(re.findall(r'(\d+/\w+/\d+ \d+:\d+:\d+)',\
log[0])[0],"%d/%b/%Y %H:%M:%S").isoformat()
data['loglevel'] = re.findall('[A-Z]+', log[0])[1]
data['logname'] = re.findall('\[\D+.\w+:\d+\]', log[0])[0]
message = re.findall('\{.+\}', log[0])
try:
if len(message) > 0:
message = json.loads(message[0])
else:
message = re.split(']', log[0])
message = ''.join(message[2:])
except ValueError:
message = re.split(']', log[0])
message = ''.join(message[2:])
data['message'] = message
return dict(
timestamp=data['timestamp'],
level=data['loglevel'],
data=data,
)
else:
return dict(
timestamp=datetime.datetime.isoformat(datetime.datetime.utcnow()),
data={raw:line}
) |
>>> import pprint
>>> input_line = '{"level": "warning", "timestamp": "2018-02-07T06:37:00.297610Z", "event": "exited via keyboard interrupt", "type": "log", "id": "20180207T063700_4d03fe800bd111e89ecb96000007bc65", "_": {"ln": 58, "file": "/usr/local/lib/python2.7/dist-packages/basescript/basescript.py", "name": "basescript.basescript", "fn": "start"}}'
>>> output_line1 = basescript(input_line)
>>> pprint.pprint(output_line1)
{'data': {u'_': {u'file': u'/usr/local/lib/python2.7/dist-packages/basescript/basescript.py',
u'fn': u'start',
u'ln': 58,
u'name': u'basescript.basescript'},
u'event': u'exited via keyboard interrupt',
u'id': u'20180207T063700_4d03fe800bd111e89ecb96000007bc65',
u'level': u'warning',
u'timestamp': u'2018-02-07T06:37:00.297610Z',
u'type': u'log'},
'event': u'exited via keyboard interrupt',
'id': u'20180207T063700_4d03fe800bd111e89ecb96000007bc65',
'level': u'warning',
'timestamp': u'2018-02-07T06:37:00.297610Z',
'type': u'log'}
def basescript(line):
'''
>>> import pprint
>>> input_line = '{"level": "warning", "timestamp": "2018-02-07T06:37:00.297610Z", "event": "exited via keyboard interrupt", "type": "log", "id": "20180207T063700_4d03fe800bd111e89ecb96000007bc65", "_": {"ln": 58, "file": "/usr/local/lib/python2.7/dist-packages/basescript/basescript.py", "name": "basescript.basescript", "fn": "start"}}'
>>> output_line1 = basescript(input_line)
>>> pprint.pprint(output_line1)
{'data': {u'_': {u'file': u'/usr/local/lib/python2.7/dist-packages/basescript/basescript.py',
u'fn': u'start',
u'ln': 58,
u'name': u'basescript.basescript'},
u'event': u'exited via keyboard interrupt',
u'id': u'20180207T063700_4d03fe800bd111e89ecb96000007bc65',
u'level': u'warning',
u'timestamp': u'2018-02-07T06:37:00.297610Z',
u'type': u'log'},
'event': u'exited via keyboard interrupt',
'id': u'20180207T063700_4d03fe800bd111e89ecb96000007bc65',
'level': u'warning',
'timestamp': u'2018-02-07T06:37:00.297610Z',
'type': u'log'}
'''
log = json.loads(line)
return dict(
timestamp=log['timestamp'],
data=log,
id=log['id'],
type=log['type'],
level=log['level'],
event=log['event']
) |
>>> import pprint
>>> input_line = '[2017-08-30T06:27:19,158] [WARN ][o.e.m.j.JvmGcMonitorService] [Glsuj_2] [gc][296816] overhead, spent [1.2s] collecting in the last [1.3s]'
>>> output_line = elasticsearch(input_line)
>>> pprint.pprint(output_line)
{'data': {'garbage_collector': 'gc',
'gc_count': 296816.0,
'level': 'WARN',
'message': 'o.e.m.j.JvmGcMonitorService',
'plugin': 'Glsuj_2',
'query_time_ms': 1200.0,
'resp_time_ms': 1300.0,
'timestamp': '2017-08-30T06:27:19,158'},
'event': 'o.e.m.j.JvmGcMonitorService',
'level': 'WARN ',
'timestamp': '2017-08-30T06:27:19,158',
'type': 'metric'}
Case 2:
[2017-09-13T23:15:00,415][WARN ][o.e.i.e.Engine ] [Glsuj_2] [filebeat-2017.09.09][3] failed engine [index]
java.nio.file.FileSystemException: /home/user/elasticsearch/data/nodes/0/indices/jsVSO6f3Rl-wwBpQyNRCbQ/3/index/_0.fdx: Too many open files
at sun.nio.fs.UnixException.translateToIOException(UnixException.java:91) ~[?:?]
def elasticsearch(line):
'''
>>> import pprint
>>> input_line = '[2017-08-30T06:27:19,158] [WARN ][o.e.m.j.JvmGcMonitorService] [Glsuj_2] [gc][296816] overhead, spent [1.2s] collecting in the last [1.3s]'
>>> output_line = elasticsearch(input_line)
>>> pprint.pprint(output_line)
{'data': {'garbage_collector': 'gc',
'gc_count': 296816.0,
'level': 'WARN',
'message': 'o.e.m.j.JvmGcMonitorService',
'plugin': 'Glsuj_2',
'query_time_ms': 1200.0,
'resp_time_ms': 1300.0,
'timestamp': '2017-08-30T06:27:19,158'},
'event': 'o.e.m.j.JvmGcMonitorService',
'level': 'WARN ',
'timestamp': '2017-08-30T06:27:19,158',
'type': 'metric'}
Case 2:
[2017-09-13T23:15:00,415][WARN ][o.e.i.e.Engine ] [Glsuj_2] [filebeat-2017.09.09][3] failed engine [index]
java.nio.file.FileSystemException: /home/user/elasticsearch/data/nodes/0/indices/jsVSO6f3Rl-wwBpQyNRCbQ/3/index/_0.fdx: Too many open files
at sun.nio.fs.UnixException.translateToIOException(UnixException.java:91) ~[?:?]
'''
# TODO we need to handle case2 logs
elasticsearch_log = line
actuallog = re.findall(r'(\[\d+\-+\d+\d+\-+\d+\w+\d+:\d+:\d+,+\d\d\d+\].*)', elasticsearch_log)
if len(actuallog) == 1:
keys = ['timestamp','level','message','plugin','garbage_collector','gc_count','query_time_ms', 'resp_time_ms']
values = re.findall(r'\[(.*?)\]', actuallog[0])
for index, i in enumerate(values):
if not isinstance(i, str):
continue
if len(re.findall(r'.*ms$', i)) > 0 and 'ms' in re.findall(r'.*ms$', i)[0]:
num = re.split('ms', i)[0]
values[index] = float(num)
continue
if len(re.findall(r'.*s$', i)) > 0 and 's' in re.findall(r'.*s$', i)[0]:
num = re.split('s', i)[0]
values[index] = float(num) * 1000
continue
data = dict(zip(keys,values))
if 'level' in data and data['level'][-1] == ' ':
data['level'] = data['level'][:-1]
if 'gc_count' in data:
data['gc_count'] = float(data['gc_count'])
event = data['message']
level=values[1]
timestamp=values[0]
return dict(
timestamp=timestamp,
level=level,
type='metric',
data=data,
event=event
)
else:
return dict(
timestamp=datetime.datetime.isoformat(datetime.datetime.now()),
data={'raw': line}
) |
>>> line1 = ' [2018-04-03T00:22:38,048][DEBUG][o.e.c.u.c.QueueResizingEsThreadPoolExecutor] [search17/search]: there were [2000] tasks in [809ms], avg task time [28.4micros], EWMA task execution [790nanos], [35165.36 tasks/s], optimal queue is [35165], current capacity [1000]'
>>> line2 = ' org.elasticsearch.ResourceAlreadyExistsException: index [media_corpus_refresh/6_3sRAMsRr2r63J6gbOjQw] already exists'
>>> line3 = ' at org.elasticsearch.cluster.metadata.MetaDataCreateIndexService.validateIndexName(MetaDataCreateIndexService.java:151) ~[elasticsearch-6.2.0.jar:6.2.0]'
>>> elasticsearch_ispartial_log(line1)
False
>>> elasticsearch_ispartial_log(line2)
True
>>> elasticsearch_ispartial_log(line3)
True
def elasticsearch_ispartial_log(line):
'''
>>> line1 = ' [2018-04-03T00:22:38,048][DEBUG][o.e.c.u.c.QueueResizingEsThreadPoolExecutor] [search17/search]: there were [2000] tasks in [809ms], avg task time [28.4micros], EWMA task execution [790nanos], [35165.36 tasks/s], optimal queue is [35165], current capacity [1000]'
>>> line2 = ' org.elasticsearch.ResourceAlreadyExistsException: index [media_corpus_refresh/6_3sRAMsRr2r63J6gbOjQw] already exists'
>>> line3 = ' at org.elasticsearch.cluster.metadata.MetaDataCreateIndexService.validateIndexName(MetaDataCreateIndexService.java:151) ~[elasticsearch-6.2.0.jar:6.2.0]'
>>> elasticsearch_ispartial_log(line1)
False
>>> elasticsearch_ispartial_log(line2)
True
>>> elasticsearch_ispartial_log(line3)
True
'''
match_result = []
for p in LOG_BEGIN_PATTERN:
if re.match(p, line) != None:
return False
return True |
Get an attribute defined by this session
def get(self, attr, default=None):
"""Get an attribute defined by this session"""
attrs = self.body.get('attributes') or {}
return attrs.get(attr, default) |
if data can't found in cache then it will be fetched from db,
parsed and stored to cache for each lang_code.
:param cat: cat of catalog data
:return:
def get_all(self, cat):
"""
if data can't found in cache then it will be fetched from db,
parsed and stored to cache for each lang_code.
:param cat: cat of catalog data
:return:
"""
return self._get_from_local_cache(cat) or self._get_from_cache(cat) or self._get_from_db(cat) |
get from redis, cache locally then return
:param catalog: catalog name
:param key:
:return:
def _fill_get_item_cache(self, catalog, key):
"""
get from redis, cache locally then return
:param catalog: catalog name
:param key:
:return:
"""
lang = self._get_lang()
keylist = self.get_all(catalog)
self.ITEM_CACHE[lang][catalog] = dict([(i['value'], i['name']) for i in keylist])
return self.ITEM_CACHE[lang][catalog].get(key) |
Utility method to quickly get a server up and running.
:param debug: turns on Werkzeug debugger, code reloading, and full
logging.
:param validate_requests: whether or not to ensure that requests are
sent by Amazon. This can be usefulfor manually testing the server.
def run(self, host, port, debug=True, validate_requests=True):
"""Utility method to quickly get a server up and running.
:param debug: turns on Werkzeug debugger, code reloading, and full
logging.
:param validate_requests: whether or not to ensure that requests are
sent by Amazon. This can be usefulfor manually testing the server.
"""
if debug:
# Turn on all alexandra log output
logging.basicConfig(level=logging.DEBUG)
app = self.create_wsgi_app(validate_requests)
run_simple(host, port, app, use_reloader=debug, use_debugger=debug) |
Given a parsed JSON request object, call the correct Intent, Launch,
or SessionEnded function.
This function is called after request parsing and validaion and will
raise a `ValueError` if an unknown request type comes in.
:param body: JSON object loaded from incoming request's POST data.
def dispatch_request(self, body):
"""Given a parsed JSON request object, call the correct Intent, Launch,
or SessionEnded function.
This function is called after request parsing and validaion and will
raise a `ValueError` if an unknown request type comes in.
:param body: JSON object loaded from incoming request's POST data.
"""
req_type = body.get('request', {}).get('type')
session_obj = body.get('session')
session = Session(session_obj) if session_obj else None
if req_type == 'LaunchRequest':
return self.launch_fn(session)
elif req_type == 'IntentRequest':
intent = body['request']['intent']['name']
intent_fn = self.intent_map.get(intent, self.unknown_intent_fn)
slots = {
slot['name']: slot.get('value')
for _, slot in
body['request']['intent'].get('slots', {}).items()
}
arity = intent_fn.__code__.co_argcount
if arity == 2:
return intent_fn(slots, session)
return intent_fn()
elif req_type == 'SessionEndedRequest':
return self.session_end_fn()
log.error('invalid request type: %s', req_type)
raise ValueError('bad request: %s', body) |
Decorator to register a handler for the given intent.
The decorated function can either take 0 or 2 arguments. If two are
specified, it will be provided a dictionary of `{slot_name: value}` and
a :py:class:`alexandra.session.Session` instance.
If no session was provided in the request, the session object will be
`None`. ::
@alexa_app.intent('FooBarBaz')
def foo_bar_baz_intent(slots, session):
pass
@alexa_app.intent('NoArgs')
def noargs_intent():
pass
def intent(self, intent_name):
"""Decorator to register a handler for the given intent.
The decorated function can either take 0 or 2 arguments. If two are
specified, it will be provided a dictionary of `{slot_name: value}` and
a :py:class:`alexandra.session.Session` instance.
If no session was provided in the request, the session object will be
`None`. ::
@alexa_app.intent('FooBarBaz')
def foo_bar_baz_intent(slots, session):
pass
@alexa_app.intent('NoArgs')
def noargs_intent():
pass
"""
# nested decorator so we can have params.
def _decorator(func):
arity = func.__code__.co_argcount
if arity not in [0, 2]:
raise ValueError("expected 0 or 2 argument function")
self.intent_map[intent_name] = func
return func
return _decorator |
Kullanıcı şifresini encrypt ederek set eder.
Args:
raw_password (str)
def set_password(self, raw_password):
"""
Kullanıcı şifresini encrypt ederek set eder.
Args:
raw_password (str)
"""
self.password = pbkdf2_sha512.encrypt(raw_password, rounds=10000,
salt_size=10) |
encrypt password if not already encrypted
def encrypt_password(self):
""" encrypt password if not already encrypted """
if self.password and not self.password.startswith('$pbkdf2'):
self.set_password(self.password) |
sends message to users private mq exchange
Args:
title:
message:
sender:
url:
typ:
def send_notification(self, title, message, typ=1, url=None, sender=None):
"""
sends message to users private mq exchange
Args:
title:
message:
sender:
url:
typ:
"""
self.created_channels.channel.add_message(
channel_key=self.prv_exchange,
body=message,
title=title,
typ=typ,
url=url,
sender=sender,
receiver=self
) |
Send arbitrary cmd and data to client
if queue name passed by "via_queue" parameter,
that queue will be used instead of users private exchange.
Args:
data: dict
cmd: string
via_queue: queue name,
def send_client_cmd(self, data, cmd=None, via_queue=None):
"""
Send arbitrary cmd and data to client
if queue name passed by "via_queue" parameter,
that queue will be used instead of users private exchange.
Args:
data: dict
cmd: string
via_queue: queue name,
"""
mq_channel = self._connect_mq()
if cmd:
data['cmd'] = cmd
if via_queue:
mq_channel.basic_publish(exchange='',
routing_key=via_queue,
body=json.dumps(data))
else:
mq_channel.basic_publish(exchange=self.prv_exchange,
routing_key='',
body=json.dumps(data)) |
shifts on a given number of record in the original file
:param offset: number of record
def seek(self, offset):
"""
shifts on a given number of record in the original file
:param offset: number of record
"""
if self._shifts:
if 0 <= offset < len(self._shifts):
current_pos = self._file.tell()
new_pos = self._shifts[offset]
if current_pos != new_pos:
if current_pos == self._shifts[-1]: # reached the end of the file
self._data = self.__reader()
self.__file = iter(self._file.readline, '')
self._file.seek(0)
next(self._data)
if offset: # move not to the beginning of the file
self._file.seek(new_pos)
else:
if not self.__already_seeked:
if self._shifts[0] < current_pos: # in the middle of the file
self._data.send(True)
self.__already_seeked = True
self._file.seek(new_pos)
else:
raise IndexError('invalid offset')
else:
raise self._implement_error |
:return: number of records processed from the original file
def tell(self):
"""
:return: number of records processed from the original file
"""
if self._shifts:
t = self._file.tell()
if t == self._shifts[0]:
return 0
elif t == self._shifts[-1]:
return len(self._shifts) - 1
elif t in self._shifts:
return bisect_left(self._shifts, t)
else:
return bisect_left(self._shifts, t) - 1
raise self._implement_error |
Assigning the workflow to itself.
The selected job is checked to see if there is an assigned role.
If it does not have a role assigned to it, it takes the job to itself
and displays a message that the process is successful.
If there is a role assigned to it, it does not do any operation
and the message is displayed on the screen.
.. code-block:: python
# request:
{
'task_inv_key': string,
}
def assign_yourself(self):
"""
Assigning the workflow to itself.
The selected job is checked to see if there is an assigned role.
If it does not have a role assigned to it, it takes the job to itself
and displays a message that the process is successful.
If there is a role assigned to it, it does not do any operation
and the message is displayed on the screen.
.. code-block:: python
# request:
{
'task_inv_key': string,
}
"""
task_invitation = TaskInvitation.objects.get(self.task_invitation_key)
wfi = task_invitation.instance
if not wfi.current_actor.exist:
wfi.current_actor = self.current.role
wfi.save()
[inv.delete() for inv in TaskInvitation.objects.filter(instance=wfi) if
not inv == task_invitation]
title = _(u"Successful")
msg = _(u"You have successfully assigned the job to yourself.")
else:
title = _(u"Unsuccessful")
msg = _(u"Unfortunately, this job is already taken by someone else.")
self.current.msg_box(title=title, msg=msg) |
The workflow method to be assigned to the person with the same role and unit as the user.
.. code-block:: python
# request:
{
'task_inv_key': string,
}
def select_role(self):
"""
The workflow method to be assigned to the person with the same role and unit as the user.
.. code-block:: python
# request:
{
'task_inv_key': string,
}
"""
roles = [(m.key, m.__unicode__()) for m in RoleModel.objects.filter(
abstract_role=self.current.role.abstract_role,
unit=self.current.role.unit) if m != self.current.role]
if roles:
_form = forms.JsonForm(title=_(u'Assign to workflow'))
_form.select_role = fields.Integer(_(u"Chose Role"), choices=roles)
_form.explain_text = fields.String(_(u"Explain Text"), required=False)
_form.send_button = fields.Button(_(u"Send"))
self.form_out(_form)
else:
title = _(u"Unsuccessful")
msg = _(u"Assign role not found")
self.current.msg_box(title=title, msg=msg) |
With the workflow instance and the task invitation is assigned a role.
def send_workflow(self):
"""
With the workflow instance and the task invitation is assigned a role.
"""
task_invitation = TaskInvitation.objects.get(self.task_invitation_key)
wfi = task_invitation.instance
select_role = self.input['form']['select_role']
if wfi.current_actor == self.current.role:
task_invitation.role = RoleModel.objects.get(select_role)
wfi.current_actor = RoleModel.objects.get(select_role)
wfi.save()
task_invitation.save()
[inv.delete() for inv in TaskInvitation.objects.filter(instance=wfi) if
not inv == task_invitation]
title = _(u"Successful")
msg = _(u"The workflow was assigned to someone else with success.")
else:
title = _(u"Unsuccessful")
msg = _(u"This workflow does not belong to you, you cannot assign it to someone else.")
self.current.msg_box(title=title, msg=msg) |
The time intervals at which the workflow is to be extended are determined.
.. code-block:: python
# request:
{
'task_inv_key': string,
}
def select_postponed_date(self):
"""
The time intervals at which the workflow is to be extended are determined.
.. code-block:: python
# request:
{
'task_inv_key': string,
}
"""
_form = forms.JsonForm(title="Postponed Workflow")
_form.start_date = fields.DateTime("Start Date")
_form.finish_date = fields.DateTime("Finish Date")
_form.save_button = fields.Button("Save")
self.form_out(_form) |
Invitations with the same workflow status are deleted.
Workflow instance and invitation roles change.
def save_date(self):
"""
Invitations with the same workflow status are deleted.
Workflow instance and invitation roles change.
"""
task_invitation = TaskInvitation.objects.get(self.task_invitation_key)
wfi = task_invitation.instance
if wfi.current_actor.exist and wfi.current_actor == self.current.role:
dt_start = datetime.strptime(self.input['form']['start_date'], "%d.%m.%Y")
dt_finish = datetime.strptime(self.input['form']['finish_date'], "%d.%m.%Y")
task_invitation.start_date = dt_start
task_invitation.finish_date = dt_finish
task_invitation.save()
wfi.start_date = dt_start
wfi.finish_date = dt_finish
wfi.save()
title = _(u"Successful")
msg = _(u"You've extended the workflow time.")
else:
title = _(u"Unsuccessful")
msg = _(u"This workflow does not belong to you.")
self.current.msg_box(title=title, msg=msg) |
If there is a role assigned to the workflow and
it is the same as the user, it can drop the workflow.
If it does not exist, it can not do anything.
.. code-block:: python
# request:
{
'task_inv_key': string,
}
def suspend(self):
"""
If there is a role assigned to the workflow and
it is the same as the user, it can drop the workflow.
If it does not exist, it can not do anything.
.. code-block:: python
# request:
{
'task_inv_key': string,
}
"""
task_invitation = TaskInvitation.objects.get(self.task_invitation_key)
wfi = task_invitation.instance
if wfi.current_actor.exist and wfi.current_actor == self.current.role:
for m in RoleModel.objects.filter(abstract_role=self.current.role.abstract_role,
unit=self.current.role.unit):
if m != self.current.role:
task_invitation.key = ''
task_invitation.role = m
task_invitation.save()
wfi.current_actor = RoleModel()
wfi.save()
title = _(u"Successful")
msg = _(u"You left the workflow.")
else:
title = _(u"Unsuccessful")
msg = _(u"Unfortunately, this workflow does not belong to you or is already idle.")
self.current.msg_box(title=title, msg=msg) |
Finds out if the piece is on the home row.
:return: bool for whether piece is on home row or not
def on_home_row(self, location=None):
"""
Finds out if the piece is on the home row.
:return: bool for whether piece is on home row or not
"""
location = location or self.location
return (self.color == color.white and location.rank == 1) or \
(self.color == color.black and location.rank == 6) |
Finds if move from current get_location would result in promotion
:type: location: Location
:rtype: bool
def would_move_be_promotion(self, location=None):
"""
Finds if move from current get_location would result in promotion
:type: location: Location
:rtype: bool
"""
location = location or self.location
return (location.rank == 1 and self.color == color.black) or \
(location.rank == 6 and self.color == color.white) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.