repo_name stringlengths 7 65 | path stringlengths 5 185 | copies stringlengths 1 4 | size stringlengths 4 6 | content stringlengths 977 990k | license stringclasses 14 values | hash stringlengths 32 32 | line_mean float64 7.18 99.4 | line_max int64 31 999 | alpha_frac float64 0.25 0.95 | ratio float64 1.5 7.84 | autogenerated bool 1 class | config_or_test bool 2 classes | has_no_keywords bool 2 classes | has_few_assignments bool 1 class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
jupyterhub/oauthenticator | oauthenticator/gitlab.py | 1 | 7764 | """
Custom Authenticator to use GitLab OAuth with JupyterHub
"""
import os
import warnings
from urllib.parse import quote
from jupyterhub.auth import LocalAuthenticator
from tornado.escape import url_escape
from tornado.httpclient import HTTPRequest
from traitlets import CUnicode, Set, Unicode, default
from .oauth2 import OAuthenticator
def _api_headers(access_token):
return {
"Accept": "application/json",
"User-Agent": "JupyterHub",
"Authorization": "Bearer {}".format(access_token),
}
class GitLabOAuthenticator(OAuthenticator):
# see gitlab_scopes.md for details about scope config
# set scopes via config, e.g.
# c.GitLabOAuthenticator.scope = ['read_user']
_deprecated_oauth_aliases = {
"gitlab_group_whitelist": ("allowed_gitlab_groups", "0.12.0"),
"gitlab_project_id_whitelist": ("allowed_project_ids", "0.12.0"),
**OAuthenticator._deprecated_oauth_aliases,
}
login_service = "GitLab"
client_id_env = 'GITLAB_CLIENT_ID'
client_secret_env = 'GITLAB_CLIENT_SECRET'
@default("user_auth_state_key")
def _user_auth_state_key_default(self):
return "gitlab_user"
gitlab_url = Unicode("https://gitlab.com", config=True)
@default("gitlab_url")
def _default_gitlab_url(self):
"""get default gitlab url from env"""
gitlab_url = os.getenv('GITLAB_URL')
gitlab_host = os.getenv('GITLAB_HOST')
if not gitlab_url and gitlab_host:
warnings.warn(
'Use of GITLAB_HOST might be deprecated in the future. '
'Rename GITLAB_HOST environment variable to GITLAB_URL.',
PendingDeprecationWarning,
)
if gitlab_host.startswith(('https:', 'http:')):
gitlab_url = gitlab_host
else:
# Hides common mistake of users which set the GITLAB_HOST
# without a protocol specification.
gitlab_url = 'https://{0}'.format(gitlab_host)
warnings.warn(
'The https:// prefix has been added to GITLAB_HOST.'
'Set GITLAB_URL="{0}" instead.'.format(gitlab_host)
)
# default to gitlab.com
if not gitlab_url:
gitlab_url = 'https://gitlab.com'
return gitlab_url
gitlab_api_version = CUnicode('4', config=True)
@default('gitlab_api_version')
def _gitlab_api_version_default(self):
return os.environ.get('GITLAB_API_VERSION') or '4'
gitlab_api = Unicode(config=True)
@default("gitlab_api")
def _default_gitlab_api(self):
return '%s/api/v%s' % (self.gitlab_url, self.gitlab_api_version)
@default("authorize_url")
def _authorize_url_default(self):
return "%s/oauth/authorize" % self.gitlab_url
@default("token_url")
def _token_url_default(self):
return "%s/oauth/token" % self.gitlab_url
@default("userdata_url")
def _userdata_url_default(self):
return "%s/user" % self.gitlab_api
gitlab_group_whitelist = Set(
help="Deprecated, use `GitLabOAuthenticator.allowed_gitlab_groups`",
config=True,
)
allowed_gitlab_groups = Set(
config=True, help="Automatically allow members of selected groups"
)
gitlab_project_id_whitelist = Set(
help="Deprecated, use `GitLabOAuthenticator.allowed_project_ids`",
config=True,
)
allowed_project_ids = Set(
config=True,
help="Automatically allow members with Developer access to selected project ids",
)
gitlab_version = None
async def user_is_authorized(self, auth_model):
access_token = auth_model["auth_state"]["token_response"]["access_token"]
user_id = auth_model["auth_state"][self.user_auth_state_key]["id"]
# memoize gitlab version for class lifetime
if self.gitlab_version is None:
self.gitlab_version = await self._get_gitlab_version(access_token)
self.member_api_variant = 'all/' if self.gitlab_version >= [12, 4] else ''
# Check if user is a member of any allowed groups or projects.
# These checks are performed here, as it requires `access_token`.
user_in_group = user_in_project = False
is_group_specified = is_project_id_specified = False
if self.allowed_gitlab_groups:
is_group_specified = True
user_in_group = await self._check_membership_allowed_groups(
user_id, access_token
)
# We skip project_id check if user is in allowed group.
if self.allowed_project_ids and not user_in_group:
is_project_id_specified = True
user_in_project = await self._check_membership_allowed_project_ids(
user_id, access_token
)
no_config_specified = not (is_group_specified or is_project_id_specified)
if (
(is_group_specified and user_in_group)
or (is_project_id_specified and user_in_project)
or no_config_specified
):
return True
self.log.warning(
"%s not in group or project allowed list",
auth_model["name"],
)
return False
async def _get_gitlab_version(self, access_token):
url = '%s/version' % self.gitlab_api
req = HTTPRequest(
url,
method="GET",
headers=_api_headers(access_token),
validate_cert=self.validate_server_cert,
)
resp_json = await self.fetch(req)
version_strings = resp_json['version'].split('-')[0].split('.')[:3]
version_ints = list(map(int, version_strings))
return version_ints
async def _check_membership_allowed_groups(self, user_id, access_token):
headers = _api_headers(access_token)
# Check if user is a member of any group in the allowed list
for group in map(url_escape, self.allowed_gitlab_groups):
url = "%s/groups/%s/members/%s%d" % (
self.gitlab_api,
quote(group, safe=''),
self.member_api_variant,
user_id,
)
req = HTTPRequest(
url,
method="GET",
headers=headers,
validate_cert=self.validate_server_cert,
)
resp = await self.fetch(req, raise_error=False, parse_json=False)
if resp.code == 200:
return True # user _is_ in group
return False
async def _check_membership_allowed_project_ids(self, user_id, access_token):
headers = _api_headers(access_token)
# Check if user has developer access to any project in the allowed list
for project in self.allowed_project_ids:
url = "%s/projects/%s/members/%s%d" % (
self.gitlab_api,
project,
self.member_api_variant,
user_id,
)
req = HTTPRequest(
url,
method="GET",
headers=headers,
validate_cert=self.validate_server_cert,
)
resp_json = await self.fetch(req, raise_error=False)
if resp_json:
access_level = resp_json.get('access_level', 0)
# We only allow access level Developer and above
# Reference: https://docs.gitlab.com/ee/api/members.html
if access_level >= 30:
return True
return False
class LocalGitLabOAuthenticator(LocalAuthenticator, GitLabOAuthenticator):
"""A version that mixes in local system user creation"""
pass
| bsd-3-clause | 7d05d881fac97aac98d1d65b43d9bd7f | 33.202643 | 89 | 0.588614 | 3.983581 | false | false | false | false |
samuel/kokki | kokki/cookbooks/cloudkick/recipes/default.py | 1 | 2131 |
from kokki import Execute, Fail, File, Template, Package, Service
assert env.config.cloudkick.oauth_key and env.config.cloudkick.oauth_secret and env.config.cloudkick.hostname
apt_list_path = '/etc/apt/sources.list.d/cloudkick.list'
apt = None
if env.system.platform == "ubuntu":
ver = env.system.lsb['release']
if ver in ("10.10", "11.04", "11.10"):
apt = "deb http://packages.cloudkick.com/ubuntu maverick main"
elif ver == "10.04":
apt = "deb http://packages.cloudkick.com/ubuntu lucid main"
elif ver == "9.10":
apt = "deb http://packages.cloudkick.com/ubuntu karmic main"
elif ver == "9.04":
apt = "deb http://packages.cloudkick.com/ubuntu jaunty main"
elif ver == "8.10":
apt = "deb http://packages.cloudkick.com/ubuntu intrepid main"
elif ver == "8.04":
apt = "deb http://packages.cloudkick.com/ubuntu hardy main"
elif ver == "6.04":
apt = "deb http://packages.cloudkick.com/ubuntu dapper main"
elif env.system.platform == "debian":
ver = env.system.lsb['release']
apt = "deb http://packages.cloudkick.com/ubuntu lucid main"
# if ver == '5.0':
# apt = "deb http://apt.librato.com/debian/ lenny non-free"
if not apt:
raise Fail("Can't find a cloudkick package for your platform/version")
Execute("apt-update-cloudkick",
command = "apt-get update",
action = "nothing")
Execute("curl http://packages.cloudkick.com/cloudkick.packages.key | apt-key add -",
not_if = "(apt-key list | grep 'Cloudkick' > /dev/null)")
File(apt_list_path,
owner = "root",
group ="root",
mode = 0644,
content = apt+"\n",
notifies = [("run", env.resources["Execute"]["apt-update-cloudkick"], True)])
File("/etc/cloudkick.conf",
owner = "root",
group = "root",
mode = 0644,
content = Template("cloudkick/cloudkick.conf.j2"))
Package("cloudkick-agent",
action = "upgrade")
Service("cloudkick-agent",
supports_restart = True,
subscribes = [("restart", env.resources["File"]["/etc/cloudkick.conf"])])
Package("libssl0.9.8") # This seems to not get installed for some reason
| bsd-3-clause | c998108a99c7d8ac224723d0b22845d0 | 34.516667 | 109 | 0.644768 | 3.194903 | false | false | false | false |
samuel/kokki | kokki/cookbooks/ssh/libraries/utils.py | 1 | 5290 |
import hashlib
import hmac
import os
from base64 import b64decode, b64encode
from kokki import Fail, Environment
class SSHKnownHostsFile(object):
def __init__(self, path=None):
self.hosts = []
self.parse(path)
def parse(self, path):
self.hosts = []
with open(path, "r") as fp:
for line in fp:
line = line.strip()
if not line:
continue
addr, keytype, key = line.split(' ')
if addr.startswith('|1|'):
# Hashed host entry
salt, hosthash = addr.split('|')[2:]
self.hosts.append((1, b64decode(salt), b64decode(hosthash), keytype, key))
else:
# Unhashed
for a in addr.split(','):
self.hosts.append((0, a, keytype, key))
def save(self, path):
with open(path, "w") as fp:
fp.write(str(self))
def includes(self, host):
host = host.lower()
for h in self.hosts:
if h[0] == 0:
if h[1] == host:
return True
elif h[0] == 1:
hosthash = self.hash(host, h[1])[0]
if hosthash == h[2]:
return True
return False
def hash(self, host, salt=None):
if not salt:
salt = self.generate_salt()
return hmac.new(salt, host, digestmod=hashlib.sha1).digest(), salt
def generate_salt(self):
return os.urandom(20)
def add_host(self, host, keytype, key, hashed=True, verify=True):
host = host.lower()
if verify and self.includes(host):
return False
if hashed:
hosthash, salt = self.hash(host)
self.hosts.append((1, salt, hosthash, keytype, key))
else:
self.hosts.append((0, host, keytype, key))
return True
def remove_host(self, host):
host = host.lower()
new_hosts = []
for h in self.hosts:
if h[0] == 0:
if h[1] == host:
continue
elif h[0] == 1:
hosthash = self.hash(host, h[1])[0]
if hosthash == h[2]:
continue
new_hosts.append(h)
found = len(new_hosts) != len(self.hosts)
self.hosts = new_hosts
return found
def __str__(self):
out = []
unhashed = {} # Group unhashed hosts by the key
for h in self.hosts:
if h[0] == 0:
k = (h[2], h[3])
if k not in unhashed:
unhashed[k] = [h[1]]
else:
unhashed[k].append(h[1])
elif h[0] == 1:
out.append("|1|%s|%s %s %s" % (b64encode(h[1]), b64encode(h[2]), h[3], h[4]))
for k, host in unhashed.items():
out.append("%s %s %s" % (",".join(host), k[0], k[1]))
out.append("")
return "\n".join(out)
class SSHAuthorizedKeysFile(object):
def __init__(self, path=None):
self.keys = {}
if path:
self.parse(path)
def parse(self, path):
self.keys = {}
try:
with open(path, "r") as fp:
for line in fp:
line = line.strip()
if not line:
continue
if line.startswith("command="):
# TODO: This is a bit of a hack.. not sure what else could be here
# TODO: Do something with cmd? It'll get overwritten
line = line[line.find("ssh-"):]
l = line.split(' ')
cmd = None
if len(l) == 3:
keytype, key, name = l
else:
keytype, key = l
name = ""
self.keys[(keytype, key)] = name
except IOError as exc:
if exc.errno != 2: # No such file
raise
def save(self, path):
with open(path, "w") as fp:
fp.write(str(self))
def includes(self, keytype, key):
return (keytype, key) in self.keys
def add_key(self, keytype, key, name, verify=True):
if verify and self.includes(keytype, key):
return False
self.keys[(keytype, key)] = name
return True
def remove_key(self, keytype, key):
try:
self.keys.pop((keytype, key))
except KeyError:
return False
return True
def __str__(self):
out = []
for k, name in self.keys.items():
keytype, key = k
out.append(" ".join((keytype, key, name)))
out.append("")
return "\n".join(out)
def ssh_path_for_user(user):
env = Environment.get_instance()
if env.system.os == "linux":
if user == "root":
return "/root/.ssh/"
return "/home/%s/.ssh/" % user
elif env.system.platform == "mac_os_x":
return "/Users/%s/.ssh/" % user
raise Fail("Unable to determine ssh path for user %s on os %s platform %s" % (user, env.system.os, env.system.platform))
| bsd-3-clause | 86e9a329830079537e9b0948ea6231b9 | 30.117647 | 124 | 0.462571 | 3.956619 | false | false | false | false |
samuel/kokki | kokki/cookbooks/aws/libraries/volume.py | 1 | 1206 |
from kokki import Environment, Mount, Execute, Package
def setup_ebs_volume(name=None, availability_zone=None, volume_id=None, device=None, linux_device=None, snapshot_id=None, size=None, fstype=None, mount_point=None, fsoptions=None):
env = Environment.get_instance()
if linux_device is None:
linux_device = device
env.cookbooks.aws.EBSVolume(name or volume_id,
volume_id = volume_id,
availability_zone = availability_zone or env.config.aws.availability_zone,
device = device,
linux_device = linux_device,
snapshot_id = snapshot_id,
size = size,
action = "attach" if volume_id else ["create", "attach"])
if fstype:
if fstype == "xfs":
Package("xfsprogs")
Execute("mkfs.%(fstype)s -f %(device)s" % dict(fstype=fstype, device=linux_device),
not_if = """if [ "`file -s %(device)s`" = "%(device)s: data" ]; then exit 1; fi""" % dict(device=linux_device))
if mount_point:
Mount(mount_point,
device = linux_device,
fstype = fstype,
options = fsoptions if fsoptions is not None else ["noatime"],
action = ["mount", "enable"])
| bsd-3-clause | 408de29da1ddda0bdbe79ae9e7604d7d | 39.2 | 180 | 0.61194 | 3.526316 | false | false | false | false |
samuel/kokki | kokki/providers/package/emerge.py | 1 | 1953 |
from subprocess import Popen, STDOUT, PIPE, check_call
from kokki.base import Fail
from kokki.providers.package import PackageProvider
class GentooEmergeProvider(PackageProvider):
def get_current_status(self):
self.current_version = None
self.candidate_version = None
proc = Popen("qlist --installed --exact --verbose --nocolor %s"
% self.resource.package_name, shell=True, stdout=PIPE)
out = proc.communicate()[0]
for line in out.split("\n"):
line = line.split('/', 1)
if len(line) != 2:
continue
_category, nameversion = line
_name, version = nameversion.split('-', 1)
self.current_version = version
self.log.debug("Current version of package %s is %s",
self.resource.package_name, self.current_version)
proc = Popen("emerge --pretend --quiet --color n %s" % self.resource.package_name, shell=True, stdout=PIPE)
out = proc.communicate()[0]
for line in out.split("\n"):
line = line.strip(' [').split(']', 1)
if len(line) != 2:
continue
# kind, flag = line[0].split()
_category, nameversion = line[1].split('/', 1)
_name, version = nameversion.split('-', 1)
self.candidate_version = version
self.log.debug("Candidate version of package %s is %s",
self.resource.package_name, self.candidate_version)
if self.candidate_version is None:
raise Fail("emerge does not provide a version of package %s" % self.resource.package_name)
def install_package(self, name, version):
return 0 == check_call("emerge --color n =%s-%s" % (name, version),
shell=True, stdout=PIPE, stderr=STDOUT)
def upgrade_package(self, name, version):
return self.install_package(name, version)
| bsd-3-clause | 20402337303c3667e89d5a0bbd33b76d | 41.456522 | 115 | 0.582693 | 4.077244 | false | false | false | false |
samuel/kokki | kokki/cookbooks/java/recipes/default.py | 2 | 1968 |
import os
from kokki import Package, Execute, File, Script
Package("debconf-utils")
Execute("apt-update-java",
command = "apt-get update",
action = "nothing")
if env.system.lsb['codename'] == 'karmic':
def enter_the_multiverse():
with open("/etc/apt/sources.list", "r") as fp:
source = fp.read().split(' ')[1]
return (
"deb {source} karmic multiverse\n"
"deb-src {source} karmic multiverse\n"
"deb {source} karmic-updates multiverse\n"
"deb-src {source} karmic-updates multiverse\n"
"deb http://security.ubuntu.com/ubuntu karmic-security multiverse\n"
).format(source=source)
File("/etc/apt/sources.list.d/multiverse.list",
owner = "root",
group = "root",
mode = 0644,
not_if = lambda:os.path.exists("/etc/apt/sources.list.d/multiverse.list"),
content = enter_the_multiverse,
notifies = [("run", env.resources["Execute"]["apt-update-java"], True)])
ubuntu_sources = ("lucid", "maverick")
if env.system.lsb['codename'] in ubuntu_sources:
Execute('add-apt-repository "deb http://archive.canonical.com/ %s partner" ; apt-get update' % env.system.lsb['codename'],
not_if = "grep '%s partner' /etc/apt/sources.list > /dev/null" % env.system.lsb['codename'])
Script("accept-java-license",
not_if = "debconf-show sun-java6-jre | grep accepted > /dev/null",
cwd = "/usr/local/src",
code = """#!/bin/sh
echo 'sun-java6-bin shared/accepted-sun-dlj-v1-1 boolean true
sun-java6-jdk shared/accepted-sun-dlj-v1-1 boolean true
sun-java6-jre shared/accepted-sun-dlj-v1-1 boolean true
sun-java6-jre sun-java6-jre/stopthread boolean true
sun-java6-jre sun-java6-jre/jcepolicy note
sun-java6-bin shared/present-sun-dlj-v1-1 note
sun-java6-jdk shared/present-sun-dlj-v1-1 note
sun-java6-jre shared/present-sun-dlj-v1-1 note
'|debconf-set-selections""")
| bsd-3-clause | 3338be4fe88eefae0b131df16706a657 | 40 | 126 | 0.640244 | 2.995434 | false | false | false | false |
samuel/kokki | kokki/providers/package/__init__.py | 1 | 2458 |
from kokki.base import Fail
from kokki.providers import Provider
class PackageProvider(Provider):
def __init__(self, *args, **kwargs):
super(PackageProvider, self).__init__(*args, **kwargs)
self.get_current_status()
def get_current_status(self):
raise NotImplementedError()
def install_package(self, name, version):
raise NotImplementedError()
def remove_package(self, name):
raise NotImplementedError()
def purge_package(self, name):
raise NotImplementedError()
def upgrade_package(self, name, version):
raise NotImplementedError()
def action_install(self):
if self.resource.version != None and self.resource.version != self.current_version:
install_version = self.resource.version
elif self.current_version is None:
install_version = self.candidate_version
else:
return
if not install_version:
raise Fail("No version specified, and no candidate version available for package %s." % self.resource.package_name)
self.log.info("Install %s version %s (resource %s, current %s, candidate %s) location %s",
self.resource.package_name, install_version, self.resource.version,
self.current_version, self.candidate_version, self.resource.location)
status = self.install_package(self.resource.location, install_version)
if status:
self.resource.updated()
def action_upgrade(self):
if self.current_version != self.candidate_version:
orig_version = self.current_version or "uninstalled"
self.log.info("Upgrading %s from version %s to %s",
str(self.resource), orig_version, self.candidate_version)
status = self.upgrade_package(self.resource.location, self.candidate_version)
if status:
self.resource.updated()
def action_remove(self):
if self.current_version:
self.log.info("Remove %s version %s", self.resource.package_name, self.current_version)
self.remove_package(self.resource.package_name)
self.resource.updated()
def action_purge(self):
if self.current_version:
self.log.info("Purging %s version %s", self.resource.package_name, self.current_version)
self.purge_package(self.resource.package_name)
self.resource.updated()
| bsd-3-clause | 1ce36f30dc8b7996c4885d2da7858b0b | 37.40625 | 127 | 0.643613 | 4.289703 | false | false | false | false |
hydroshare/hydroshare | hs_core/views/resource_rest_api.py | 1 | 37299 | import os
import mimetypes
import copy
import tempfile
import shutil
import logging
import json
from django.urls import reverse
from django.core.exceptions import ObjectDoesNotExist, SuspiciousFileOperation
from django.core.exceptions import ValidationError as CoreValidationError
from django.http import HttpResponseRedirect
from django.shortcuts import redirect
from django.contrib.sites.models import Site
from rest_framework.pagination import PageNumberPagination
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework import generics, status
from rest_framework.request import Request
from rest_framework.exceptions import ValidationError, NotAuthenticated, PermissionDenied, NotFound
from hs_core import hydroshare
from hs_core.models import AbstractResource
from hs_core.hydroshare.utils import get_resource_by_shortkey, get_resource_types, \
get_content_types
from hs_core.views import utils as view_utils
from hs_core.views.utils import ACTION_TO_AUTHORIZE
from hs_core.views import serializers
from hs_core.hydroshare.utils import get_file_storage, resource_modified
from hs_core.serialization import GenericResourceMeta, HsDeserializationDependencyException, \
HsDeserializationException
from hs_core.hydroshare.hs_bagit import create_bag_metadata_files
from drf_yasg.utils import swagger_auto_schema
from drf_yasg import openapi
from rest_framework.parsers import MultiPartParser
logger = logging.getLogger(__name__)
# Mixins
class ResourceFileToListItemMixin(object):
def resourceFileToListItem(self, f):
# URLs in metadata should be fully qualified.
# ALWAYS qualify them with www.hydroshare.org, rather than the local server name.
site_url = hydroshare.utils.current_site_url()
url = site_url + f.url
fsize = f.size
logical_file_type = f.logical_file_type_name
file_name = os.path.basename(f.resource_file.name)
modified_time = f.modified_time
checksum = f.checksum
# trailing slash confuses mime guesser
mimetype = mimetypes.guess_type(url)
if mimetype[0]:
ftype = mimetype[0]
else:
ftype = repr(None)
resource_file_info_item = serializers.ResourceFileItem(url=url,
file_name=file_name,
size=fsize,
content_type=ftype,
logical_file_type=logical_file_type,
modified_time=modified_time,
checksum=checksum)
return resource_file_info_item
class ResourceTypes(generics.ListAPIView):
# We don't need pagination for a list of resource types
pagination_class = None
@swagger_auto_schema(operation_description="List Resource Types",
responses={200: serializers.ResourceTypesSerializer})
def get(self, request):
return self.list(request)
def get_queryset(self):
return [serializers.ResourceType(resource_type=rtype.__name__) for rtype in
get_resource_types()]
def get_serializer_class(self):
return serializers.ResourceTypesSerializer
class ContentTypes(generics.ListAPIView):
pagination_class = None
@swagger_auto_schema(operation_description="List Content Types",
responses={200: serializers.ContentTypesSerializer})
def get(self, request):
return self.list(request)
def get_queryset(self):
return [serializers.ContentType(content_type=ctype.__name__) for ctype in
get_content_types()]
def get_serializer_class(self):
return serializers.ContentTypesSerializer
class CheckTaskStatus(generics.RetrieveAPIView):
# TODO, setup a serializer for in, figure out if redirect is needed...
tid = openapi.Parameter('task_id', openapi.IN_PATH, description="id of the task", type=openapi.TYPE_STRING)
@swagger_auto_schema(operation_description="Get the status of an asynchronous task",
responses={200: serializers.CheckStatusSerializer}, manual_parameters=[tid])
def get(self, request, task_id):
'''
Get the status of an asynchronous task
:param request:
:param task_id: Id of the task
:return: JSON response to return result from asynchronous task
'''
url = reverse('rest_check_task_status', kwargs={'task_id': task_id})
return HttpResponseRedirect(url)
class ResourceReadUpdateDelete(generics.RetrieveUpdateDestroyAPIView):
# pagination doesn't make sense as there is only one resource
pagination_class = None
allowed_methods = ('GET', 'PUT', 'DELETE')
@swagger_auto_schema(operation_description="Get a resource in zipped BagIt format",
responses={200: serializers.TaskStatusSerializer})
def get(self, request, pk):
res, _, _ = view_utils.authorize(request, pk,
needed_permission=ACTION_TO_AUTHORIZE.VIEW_RESOURCE)
bag_url = reverse('rest_download',
kwargs={'path': 'bags/{}.zip'.format(pk)})
return HttpResponseRedirect(bag_url)
@swagger_auto_schema(operation_description="Not Implemented")
def put(self, request, pk):
# TODO: update resource - involves overwriting a resource from the provided bag file
raise NotImplementedError()
def delete(self, request, pk):
# only resource owners are allowed to delete
view_utils.authorize(request, pk, needed_permission=ACTION_TO_AUTHORIZE.DELETE_RESOURCE)
hydroshare.delete_resource(pk, request_username=request.user.username)
return Response(status=status.HTTP_204_NO_CONTENT)
class ResourceListCreate(generics.ListCreateAPIView):
@swagger_auto_schema(request_body=serializers.ResourceCreateRequestValidator,
operation_description="Create a resource",
responses={201: serializers.ResourceCreatedSerializer})
def post(self, request):
return self.create(request)
# Override the create() method from the CreateAPIView class
def create(self, request, *args, **kwargs):
if not request.user.is_authenticated:
raise NotAuthenticated()
resource_create_request_validator = serializers.ResourceCreateRequestValidator(
data=request.data)
if not resource_create_request_validator.is_valid():
raise ValidationError(detail=resource_create_request_validator.errors)
validated_request_data = resource_create_request_validator.validated_data
resource_type = validated_request_data['resource_type']
if resource_type in ["RasterResource", "GeographicFeatureResource", "RefTimeSeriesResource",
"NetcdfResource", "TimeSeriesResource", "GenericResource"]:
# force deprecated resource types to composite
resource_type = "CompositeResource"
res_title = validated_request_data.get('title', 'Untitled resource')
keywords = validated_request_data.get('keywords', [])
abstract = validated_request_data.get('abstract', None)
metadata = validated_request_data.get('metadata', None)
extra_metadata = validated_request_data.get('extra_metadata', None)
num_files = len(request.FILES)
# TODO: (Couch) reconsider whether multiple file upload should be
# supported when multipart bug fixed.
if num_files > 0:
if num_files > 1:
raise ValidationError(detail={'file': 'Multiple file upload is not allowed on '
'resource creation. Add additional files '
'after the resource is created.'})
# Place files into format expected by hydroshare.utils.resource_pre_create_actions and
# hydroshare.create_resource, i.e. a tuple of
# django.core.files.uploadedfile.TemporaryUploadedFile objects.
files = [request.FILES['file'], ]
else:
files = []
if metadata is not None:
metadata = json.loads(metadata)
_validate_metadata(metadata)
if extra_metadata is not None:
extra_metadata = json.loads(extra_metadata)
# TODO: validate extra metadata here
try:
_, res_title, metadata = hydroshare.utils.resource_pre_create_actions(
resource_type=resource_type, resource_title=res_title,
page_redirect_url_key=None, files=files, metadata=metadata,
**kwargs)
except Exception as ex:
error_msg = {'resource': "Resource creation failed. %s" % str(ex)}
raise ValidationError(detail=error_msg)
try:
resource = hydroshare.create_resource(
resource_type=resource_type,
owner=request.user,
title=res_title,
edit_users=validated_request_data.get('edit_users', None),
view_users=validated_request_data.get('view_users', None),
edit_groups=validated_request_data.get('edit_groups', None),
view_groups=validated_request_data.get('view_groups', None),
keywords=keywords,
metadata=metadata,
extra_metadata=extra_metadata,
files=files
)
if abstract:
resource.metadata.create_element('description', abstract=abstract)
except Exception as ex:
error_msg = {'resource': "Resource creation failed. %s" % str(ex)}
raise ValidationError(detail=error_msg)
post_creation_error_msg = ''
try:
hydroshare.utils.resource_post_create_actions(request=request, resource=resource,
user=request.user,
metadata=metadata, **kwargs)
except (hydroshare.utils.ResourceFileValidationException, Exception) as ex:
post_creation_error_msg = str(ex)
response_data = {'resource_type': resource_type, 'resource_id': resource.short_id,
'message': post_creation_error_msg}
return Response(data=response_data, status=status.HTTP_201_CREATED)
pagination_class = PageNumberPagination
pagination_class.page_size_query_param = 'count'
@swagger_auto_schema(query_serializer=serializers.ResourceListRequestValidator,
operation_description="List resources")
def get(self, request):
return self.list(request)
# needed for list of resources
# copied from ResourceList
def get_queryset(self):
resource_list_request_validator = serializers.ResourceListRequestValidator(
data=self.request.query_params)
if not resource_list_request_validator.is_valid():
raise ValidationError(detail=resource_list_request_validator.errors)
filter_parms = resource_list_request_validator.validated_data
filter_parms['user'] = (self.request.user if self.request.user.is_authenticated else None)
if len(filter_parms['type']) == 0:
filter_parms['type'] = None
else:
filter_parms['type'] = list(filter_parms['type'])
filter_parms['public'] = not self.request.user.is_authenticated
return hydroshare.get_resource_list(**filter_parms)
# covers serialization of output from GET request
def get_serializer_class(self):
return serializers.ResourceListItemSerializer
# covers serialization of output from POST request
def post_serializer_class(self):
return serializers.ResourceCreatedSerializer
class SystemMetadataRetrieve(APIView):
allowed_methods = ('GET',)
@swagger_auto_schema(operation_description="Get resource system metadata, as well as URLs to "
"the bag and science metadata",
responses={200: serializers.ResourceListItemSerializer})
def get(self, request, pk):
res, _, _ = view_utils.authorize(request, pk,
needed_permission=ACTION_TO_AUTHORIZE.VIEW_METADATA)
ser = self.get_serializer_class()(res)
return Response(data=ser.data, status=status.HTTP_200_OK)
def get_serializer_class(self):
return serializers.ResourceListItemSerializer
class AccessRulesUpdate(APIView):
"""
Set access rules for a resource
REST URL: hsapi/resource/{pk}/access
DEPRECATED: hsapi/resource/accessRules/{pk}
HTTP method: PUT
:type pk: str
:param pk: id of the resource
:return: No content. Status code will 200 (OK)
"""
# TODO: (Couch) Need GET as well.
allowed_methods = ('PUT',)
def put(self, request, pk):
""" Update access rules
"""
# only resource owners are allowed to change resource flags (e.g., public)
view_utils.authorize(request, pk, needed_permission=ACTION_TO_AUTHORIZE.SET_RESOURCE_FLAG)
access_rules_validator = serializers.AccessRulesRequestValidator(data=request.data)
if not access_rules_validator.is_valid():
raise ValidationError(detail=access_rules_validator.errors)
validated_request_data = access_rules_validator.validated_data
res = get_resource_by_shortkey(pk)
try:
res.set_public(validated_request_data['public'], request.user)
except CoreValidationError:
return Response(data={'resource_id': pk}, status=status.HTTP_403_FORBIDDEN)
return Response(data={'resource_id': pk}, status=status.HTTP_200_OK)
class ScienceMetadataRetrieveUpdate(APIView):
"""
Retrieve resource science metadata
REST URL: hsapi/scimeta/{pk}
HTTP method: GET
:type pk: str
:param pk: id of the resource
:return: science metadata as XML document
:rtype: str
:raises:
NotFound: return json format: {'detail': 'No resource was found for resource id:pk'}
PermissionDenied: return json format: {'detail': 'You do not have permission to perform
this action.'}
REST URL: hsapi/scimeta/{pk}
HTTP method: PUT
:type pk: str
:param pk: id of the resource
:type metadata: json
:param metadata: resource metadata
:return: resource id
:rtype: json of the format: {'resource_id':pk}
:raises:
NotFound: return json format: {'detail': 'No resource was found for resource id':pk}
PermissionDenied: return json format: {'detail': 'You do not have permission to perform
this action.'}
ValidationError: return json format: {parameter-1': ['error message-1'],
'parameter-2': ['error message-2'], .. }
"""
ACCEPT_FORMATS = ('application/xml', 'application/rdf+xml')
allowed_methods = ('GET', 'PUT')
def get(self, request, pk):
view_utils.authorize(request, pk, needed_permission=ACTION_TO_AUTHORIZE.VIEW_METADATA)
scimeta_url = AbstractResource.scimeta_url(pk)
return redirect(scimeta_url)
def put(self, request, pk):
# Update science metadata based on resourcemetadata.xml uploaded
resource, authorized, user = view_utils.authorize(
request, pk,
needed_permission=ACTION_TO_AUTHORIZE.EDIT_RESOURCE,
raises_exception=False)
if not authorized:
raise PermissionDenied()
files = list(request.FILES.values())
if len(files) == 0:
error_msg = {'file': 'No resourcemetadata.xml file was found to update resource '
'metadata.'}
raise ValidationError(detail=error_msg)
elif len(files) > 1:
error_msg = {'file': ('More than one file was found. Only one file, named '
'resourcemetadata.xml, '
'can be used to update resource metadata.')}
raise ValidationError(detail=error_msg)
scimeta = files[0]
if scimeta.content_type not in self.ACCEPT_FORMATS:
error_msg = {'file': ("Uploaded file has content type {t}, "
"but only these types are accepted: {e}.").format(
t=scimeta.content_type, e=",".join(self.ACCEPT_FORMATS))}
raise ValidationError(detail=error_msg)
expect = 'resourcemetadata.xml'
if scimeta.name != expect:
error_msg = {'file': "Uploaded file has name {n}, but expected {e}.".format(
n=scimeta.name, e=expect)}
raise ValidationError(detail=error_msg)
# Temp directory to store resourcemetadata.xml
tmp_dir = tempfile.mkdtemp()
try:
# Fake the bag structure so that GenericResourceMeta.read_metadata_from_resource_bag
# can read and validate the system and science metadata for us.
bag_data_path = os.path.join(tmp_dir, 'data')
os.mkdir(bag_data_path)
# Copy new science metadata to bag data path
scimeta_path = os.path.join(bag_data_path, 'resourcemetadata.xml')
shutil.copy(scimeta.temporary_file_path(), scimeta_path)
# Copy existing resource map to bag data path
# (use a file-like object as the file may be in iRODS, so we can't
# just copy it to a local path)
resmeta_path = os.path.join(bag_data_path, 'resourcemap.xml')
with open(resmeta_path, 'wb') as resmeta:
storage = get_file_storage()
resmeta_irods = storage.open(AbstractResource.sysmeta_path(pk))
shutil.copyfileobj(resmeta_irods, resmeta)
resmeta_irods.close()
try:
# Read resource system and science metadata
domain = Site.objects.get_current().domain
rm = GenericResourceMeta.read_metadata_from_resource_bag(tmp_dir,
hydroshare_host=domain)
# Update resource metadata
rm.write_metadata_to_resource(resource, update_title=True, update_keywords=True)
create_bag_metadata_files(resource)
except HsDeserializationDependencyException as e:
msg = ("HsDeserializationDependencyException encountered when updating "
"science metadata for resource {pk}; depedent resource was {dep}.")
msg = msg.format(pk=pk, dep=e.dependency_resource_id)
logger.error(msg)
raise ValidationError(detail=msg)
except HsDeserializationException as e:
raise ValidationError(detail=str(e))
resource_modified(resource, request.user, overwrite_bag=False)
return Response(data={'resource_id': pk}, status=status.HTTP_202_ACCEPTED)
finally:
shutil.rmtree(tmp_dir)
class ResourceMapRetrieve(APIView):
"""
Retrieve resource map
REST URL: hsapi/resource/{pk}/map
HTTP method: GET
:type pk: str
:param pk: id of the resource
:return: resource map as XML document
:rtype: str
:raises:
NotFound: return json format: {'detail': 'No resource was found for resource id:pk'}
PermissionDenied: return json format: {'detail': 'You do not have permission to perform
this action.'}
"""
allowed_methods = ('GET',)
def get(self, request, pk):
view_utils.authorize(request, pk, needed_permission=ACTION_TO_AUTHORIZE.VIEW_METADATA)
resmap_url = AbstractResource.resmap_url(pk)
return redirect(resmap_url)
class ResourceFileCRUD(APIView):
"""
Retrieve, add, update or delete a resource file
REST URL: hsapi/resource/{pk}/files/{filename}
HTTP method: GET
:type pk: str
:type filename: str
:param pk: resource id
:param filename: name of the file to retrieve/download
:return: resource file data
:rtype: file data bytes
REST URL: POST hsapi/resource/{pk}/files/
UNUSED: See ResourceFileListCreate for details.
HTTP method: POST
Request post data: file data (required)
:type pk: str
:param pk: resource id
:return: id of the resource and name of the file added
:rtype: json string of format: {'resource_id':pk, 'file_name': name of the file added}
REST URL: hsapi/resource/{pk}/files/{filename}
HTTP method: PUT
:type pk: str
:type filename: str
:param pk: resource id
:param filename: name of the file to update
:return: id of the resource and name of the file
:rtype: json string of format: {'resource_id':pk, 'file_name': name of the file updates}
REST URL: hsapi/resource/{pk}/files/{filename}
HTTP method: DELETE
:type pk: str
:type filename: str
:param pk: resource id
:param filename: name of the file to delete
:return: id of the resource and name of the file
:rtype: json string of format: {'resource_id':pk, 'file_name': name of the file deleted}
:raises:
NotFound: return json format: {'detail': 'No resource was found for resource id':pk}
PermissionDenied: return json format: {'detail': 'You do not have permission to perform
this action.'}
ValidationError: return json format: {'parameter-1':['error message-1'],
'parameter-2': ['error message-2'], .. }
"""
allowed_methods = ('GET', 'POST', 'PUT', 'DELETE')
parser_classes = (MultiPartParser,)
def initialize_request(self, request, *args, **kwargs):
"""
Hack to work around the following issue in django-rest-framework:
https://github.com/tomchristie/django-rest-framework/issues/3951
Couch: This issue was recently closed (10/12/2016, 2 days before this writing)
and is slated to be incorporated in the Django REST API 3.5.0 release.
At that time, we should remove this hack.
:param request:
:param args:
:param kwargs:
:return:
"""
if not isinstance(request, Request):
# Don't deep copy the file data as it may contain an open file handle
old_file_data = copy.copy(request.FILES)
old_post_data = copy.deepcopy(request.POST)
request = super(ResourceFileCRUD, self).initialize_request(request, *args, **kwargs)
request.POST._mutable = True
request.POST.update(old_post_data)
request.FILES.update(old_file_data)
return request
def get(self, request, pk, pathname):
resource, _, _ = view_utils.authorize(
request, pk,
needed_permission=ACTION_TO_AUTHORIZE.VIEW_RESOURCE)
if not resource.supports_folders and '/' in pathname:
return Response("Resource type does not support folders", status.HTTP_403_FORBIDDEN)
try:
view_utils.irods_path_is_allowed(pathname)
except (ValidationError, SuspiciousFileOperation) as ex:
return Response(str(ex), status_code=status.HTTP_400_BAD_REQUEST)
try:
f = hydroshare.get_resource_file(pk, pathname).resource_file
except ObjectDoesNotExist:
err_msg = 'File with file name {file_name} does not exist for resource with ' \
'resource id {res_id}'.format(file_name=pathname, res_id=pk)
raise NotFound(detail=err_msg)
# redirects to django_irods/views.download function
# use new internal url for rest call
# TODO: (Couch) Migrate model (with a "data migration") so that this hack is not needed.
redirect_url = f.url.replace('django_irods/download/', 'django_irods/rest_download/')
return HttpResponseRedirect(redirect_url)
@swagger_auto_schema(request_body=serializers.ResourceFileValidator)
def post(self, request, pk, pathname):
"""
Add a file to a resource.
:param request:
:param pk: Primary key of the resource (i.e. resource short ID)
:param pathname: the path to the containing folder in the folder hierarchy
:return:
Leaving out pathname in the URI calls a different class function in ResourceFileListCreate
that stores in the root directory instead.
"""
resource, _, _ = view_utils.authorize(request, pk,
needed_permission=ACTION_TO_AUTHORIZE.EDIT_RESOURCE)
resource_files = list(request.FILES.values())
if len(resource_files) == 0:
error_msg = {'file': 'No file was found to add to the resource.'}
raise ValidationError(detail=error_msg)
elif len(resource_files) > 1:
error_msg = {'file': 'More than one file was found. Only one file can be '
'added at a time.'}
raise ValidationError(detail=error_msg)
# TODO: (Brian) I know there has been some discussion when to validate a file
# I agree that we should not validate and extract metadata as part of the file add api
# Once we have a decision, I will change this implementation accordingly. In that case
# we have to implement additional rest endpoints for file validation and extraction.
try:
hydroshare.utils.resource_file_add_pre_process(resource=resource,
files=[resource_files[0]],
folder=pathname,
user=request.user, extract_metadata=True)
except (hydroshare.utils.ResourceFileSizeException,
hydroshare.utils.ResourceFileValidationException, Exception) as ex:
error_msg = {'file': 'Adding file to resource failed. %s' % str(ex)}
raise ValidationError(detail=error_msg)
try:
res_file_objects = hydroshare.utils.resource_file_add_process(resource=resource,
files=[resource_files[0]],
folder=pathname,
user=request.user,
extract_metadata=True)
except (hydroshare.utils.ResourceFileValidationException, Exception) as ex:
error_msg = {'file': 'Adding file to resource failed. %s' % str(ex)}
raise ValidationError(detail=error_msg)
# prepare response data
file_name = os.path.basename(res_file_objects[0].resource_file.name)
file_path = res_file_objects[0].resource_file.name.split('/data/contents/')[1]
response_data = {'resource_id': pk, 'file_name': file_name, 'file_path': file_path}
resource_modified(resource, request.user, overwrite_bag=False)
return Response(data=response_data, status=status.HTTP_201_CREATED)
def delete(self, request, pk, pathname):
resource, _, user = view_utils.authorize(
request, pk, needed_permission=ACTION_TO_AUTHORIZE.EDIT_RESOURCE)
if not resource.supports_folders and '/' in pathname:
return Response("Resource type does not support folders", status.HTTP_403_FORBIDDEN)
try:
view_utils.irods_path_is_allowed(pathname) # check for hacking attempts
except (ValidationError, SuspiciousFileOperation) as ex:
return Response(str(ex), status=status.HTTP_400_BAD_REQUEST)
try:
hydroshare.delete_resource_file(pk, pathname, user)
except ObjectDoesNotExist as ex: # matching file not found
raise NotFound(detail=str(ex))
# prepare response data
response_data = {'resource_id': pk, 'file_name': pathname}
resource_modified(resource, request.user, overwrite_bag=False)
return Response(data=response_data, status=status.HTTP_200_OK)
@swagger_auto_schema(auto_schema=None)
def put(self, request, pk, pathname):
# TODO: (Brian) Currently we do not have this action for the front end. Will implement
# in the next iteration. Implement only after we have a decision on when to validate a file
raise NotImplementedError()
class ResourceFileListCreate(ResourceFileToListItemMixin, generics.ListCreateAPIView):
"""
Create a resource file or retrieve a list of resource files
REST URL: hsapi/resource/{pk}/files/
DEPRECATED: hsapi/resource/{pk}/file_list/
HTTP method: GET
:type pk: str
:type filename: str
:param pk: resource id
:param filename: name of the file to retrieve/download
:return: JSON representation of list of files of the form:
REST URL: POST hsapi/resource/{pk}/files/
HTTP method: POST
Request post data: file data (required)
:type pk: str
:param pk: resource id
:return: id of the resource and name of the file added
:rtype: json string of format: {'resource_id':pk, 'file_name': name of the file added}
{
"count": 2,
"next": null,
"previous": null,
"results": [
{
"url": "http://mill24.cep.unc.edu/django_irods/
download/bd88d2a152894134928c587d38cf0272/data/contents/
mytest_resource/text_file.txt",
"size": 21,
"content_type": "text/plain",
"modified_time": "2020-02-25T08:28:14",
"checksum": "7265548b8f345605113bd9539313b4e7"
},
{
"url": "http://mill24.cep.unc.edu/django_irods/download/
bd88d2a152894134928c587d38cf0272/data/contents/mytest_resource/a_directory/cea.tif",
"size": 270993,
"content_type": "image/tiff",
"modified_time": "2020-02-25T08:28:14",
"checksum": "ed06b456c22f7123d20888d16bcd181d"
}
]
}
:raises:
NotFound: return json format: {'detail': 'No resource was found for resource id':pk}
PermissionDenied: return json format: {'detail': 'You do not have permission to perform
this action.'}
"""
allowed_methods = ('GET', 'POST')
def initialize_request(self, request, *args, **kwargs):
"""
Hack to work around the following issue in django-rest-framework:
https://github.com/tomchristie/django-rest-framework/issues/3951
Couch: This issue was recently closed (10/12/2016, 2 days before this writing)
and is slated to be incorporated in the Django REST API 3.5.0 release.
At that time, we should remove this hack.
:param request:
:param args:
:param kwargs:
:return:
"""
if not isinstance(request, Request):
# Don't deep copy the file data as it may contain an open file handle
old_file_data = copy.copy(request.FILES)
old_post_data = copy.deepcopy(request.POST)
request = super(ResourceFileListCreate, self).initialize_request(
request, *args, **kwargs)
request.POST._mutable = True
request.POST.update(old_post_data)
request.FILES.update(old_file_data)
return request
def get(self, request, pk):
"""
Get a listing of files within a resource.
:param request:
:param pk: Primary key of the resource (i.e. resource short ID)
:return:
"""
return self.list(request)
def get_queryset(self):
resource, _, _ = view_utils.authorize(self.request, self.kwargs['pk'],
needed_permission=ACTION_TO_AUTHORIZE.VIEW_RESOURCE)
resource_file_info_list = []
for f in resource.files.all():
resource_file_info_list.append(self.resourceFileToListItem(f))
return resource_file_info_list
def get_serializer_class(self):
return serializers.ResourceFileSerializer
def post(self, request, pk):
"""
Add a file to a resource.
:param request:
:param pk: Primary key of the resource (i.e. resource short ID)
:return:
"""
resource, _, _ = view_utils.authorize(request, pk,
needed_permission=ACTION_TO_AUTHORIZE.EDIT_RESOURCE)
resource_files = list(request.FILES.values())
if len(resource_files) == 0:
error_msg = {'file': 'No file was found to add to the resource.'}
raise ValidationError(detail=error_msg)
elif len(resource_files) > 1:
error_msg = {'file': 'More than one file was found. Only one file can be '
'added at a time.'}
raise ValidationError(detail=error_msg)
# TODO: (Brian) I know there has been some discussion when to validate a file
# I agree that we should not validate and extract metadata as part of the file add api
# Once we have a decision, I will change this implementation accordingly. In that case
# we have to implement additional rest endpoints for file validation and extraction.
folder = request.POST.get('folder', '')
try:
hydroshare.utils.resource_file_add_pre_process(resource=resource,
files=[resource_files[0]],
user=request.user,
folder=folder,
extract_metadata=True)
except (hydroshare.utils.ResourceFileSizeException,
hydroshare.utils.ResourceFileValidationException, Exception) as ex:
error_msg = {'file': 'Adding file to resource failed. %s' % str(ex)}
raise ValidationError(detail=error_msg)
try:
res_file_objects = hydroshare.utils.resource_file_add_process(resource=resource,
files=[resource_files[0]],
user=request.user,
folder=folder,
extract_metadata=True)
except (hydroshare.utils.ResourceFileValidationException, Exception) as ex:
error_msg = {'file': 'Adding file to resource failed. %s' % str(ex)}
raise ValidationError(detail=error_msg)
if len(res_file_objects) == 0:
# metadata ingestion
response_data = {'resource_id': pk}
else:
file_name = os.path.basename(res_file_objects[0].resource_file.name)
file_path = res_file_objects[0].resource_file.name.split('/data/contents/')[1]
response_data = {'resource_id': pk, 'file_name': file_name, 'file_path': file_path}
resource_modified(resource, request.user, overwrite_bag=False)
return Response(data=response_data, status=status.HTTP_201_CREATED)
def _validate_metadata(metadata_list):
"""
Make sure the metadata_list does not have data for the following
core metadata elements. Exception is raised if any of the following elements is present
in metadata_list:
title - (endpoint has a title parameter which should be used for specifying resource title)
subject (keyword) - (endpoint has a keywords parameter which should be used for specifying
resource keywords)
description (abstract)- (endpoint has a abstract parameter which should be used for specifying
resource abstract)
publisher - this element is created upon resource publication
format - this element is created by the system based on the resource content files
date - this element is created by the system
type - this element is created by the system
:param metadata_list: list of dicts each representing data for a specific metadata element
:return:
"""
err_message = "Metadata validation failed. Metadata element '{}' was found in value passed " \
"for parameter 'metadata'. Though it's a valid element it can't be passed " \
"as part of 'metadata' parameter."
for element in metadata_list:
# here k is the name of the element
# v is a dict of all element attributes/field names and field values
k, v = list(element.items())[0]
if k.lower() in ('title', 'subject', 'description', 'publisher', 'format', 'date', 'type'):
err_message = err_message.format(k.lower())
raise ValidationError(detail=err_message)
| bsd-3-clause | 981703ed133fb25835e337ca465003e6 | 42.573598 | 111 | 0.617738 | 4.410951 | false | false | false | false |
hydroshare/hydroshare | hs_core/urls.py | 1 | 8536 | from django.conf.urls import url
from hs_core import views
from hs_core.views.autocomplete import autocomplete
urlpatterns = [
# internal API
url(r'^_internal/(?P<shortkey>[0-9a-f-]+)/add-files-to-resource/$',
views.add_files_to_resource, name='add_files_to_resource'),
url(r'^_internal/(?P<shortkey>[0-9a-f-]+)/change-quota-holder/$',
views.change_quota_holder, name='change_quota_holder'),
url(r'^_internal/(?P<shortkey>[0-9a-f-]+)/(?P<element_name>[A-z]+)/add-metadata/$',
views.add_metadata_element, name='add_metadata_element'),
url(r'^_internal/(?P<shortkey>[0-9a-f-]+)/(?P<element_name>[A-z]+)/(?P<element_id>[A-z0-9]+)/update-metadata/$',
views.update_metadata_element, name='update_metadata_element'),
url(r'^_internal/(?P<shortkey>[0-9a-f-]+)/(?P<element_name>[A-z]+)/(?P<element_id>[A-z0-9]+)/delete-metadata/$',
views.delete_metadata_element, name='delete_metadata_element'),
url(r'^_internal/(?P<shortkey>[0-9a-f-]+)/delete-author/(?P<element_id>[A-z0-9]+)/$',
views.delete_author, name='delete_author'),
url(r'^_internal/(?P<shortkey>[0-9a-f-]+)/get-metadata/$',
views.get_resource_metadata, name='get_metadata'),
url(r'^_internal/(?P<shortkey>[0-9a-f-]+)/update-key-value-metadata/$',
views.update_key_value_metadata, name="update_key_value_metadata"),
url(r'^_internal/(?P<shortkey>[0-9a-f-]+)/relevant-tools/$',
views.get_relevant_tools, name="get_relevant_tools"),
url(r'^_internal/(?P<shortkey>[0-9a-f-]+)/delete-resource-file/(?P<f>[0-9]+)/$',
views.delete_file, name='delete_file'),
url(r'^_internal/(?P<shortkey>[A-z0-9]+)/delete-multiple-files/$',
views.delete_multiple_files, name='delete_multiple_files'),
url(r'^_internal/(?P<shortkey>[0-9a-f-]+)/delete-resource/(?P<usertext>[A-z]+)/$',
views.delete_resource, name='delete_resource'),
url(r'^_internal/(?P<shortkey>[0-9a-f-]+)/create-new-version-resource/$',
views.create_new_version_resource, name='create_resource_version'),
url(r'^_internal/(?P<shortkey>[0-9a-f-]+)/copy-resource/$', views.copy_resource,
name='copy_resource'),
url(r'^_internal/(?P<shortkey>[0-9a-f-]+)/list-referenced-content/$', views.list_referenced_content,
name='list_referenced_content'),
url(r'^_internal/(?P<shortkey>[0-9a-f-]+)/rep-res-bag-to-irods-user-zone/$',
views.rep_res_bag_to_irods_user_zone, name='replicate_bag_user_zone'),
url(r'^_internal/(?P<shortkey>[0-9a-f-]+)/set-resource-flag/$',
views.set_resource_flag, name='set_resource_flag'),
url(r'^_internal/(?P<shortkey>[0-9a-f-]+)/share-resource-with-user/(?P<privilege>[a-z]+)/(?P<user_id>[0-9]+)/$',
views.share_resource_with_user, name='share_resource_with_user'),
url(r'^_internal/(?P<shortkey>[0-9a-f-]+)/unshare-resource-with-user/(?P<user_id>[0-9]+)/$',
views.unshare_resource_with_user, name='unshare_resource_with_user'),
url(r'^_internal/(?P<shortkey>[0-9a-f-]+)/undo-share-resource-with-user/(?P<user_id>[0-9]+)/$',
views.undo_share_resource_with_user, name='undo_share_resource_with_user'),
url(r'^_internal/(?P<shortkey>[0-9a-f-]+)/share-resource-with-group/(?P<privilege>[a-z]+)/(?P<group_id>[0-9]+)/$',
views.share_resource_with_group, name='share_resource_with_group'),
url(r'^_internal/(?P<shortkey>[0-9a-f-]+)/unshare-resource-with-group/(?P<group_id>[0-9]+)/$',
views.unshare_resource_with_group, name='unshare_resource_with_group'),
url(r'^_internal/(?P<shortkey>[0-9a-f-]+)/undo-share-resource-with-group/(?P<group_id>[0-9]+)/$',
views.undo_share_resource_with_group, name='undo_share_resource_with_group'),
url(r'^_internal/create-user-group/$', views.create_user_group, name='create_user_group'),
url(r'^_internal/update-user-group/(?P<group_id>[0-9]+)$', views.update_user_group,
name='update_user_group'),
url(r'^_internal/delete-user-group/(?P<group_id>[0-9]+)$', views.delete_user_group,
name='delete_user_group'),
url(r'^_internal/restore-user-group/(?P<group_id>[0-9]+)$', views.restore_user_group,
name='restore_user_group'),
url(r'^_internal/share-group-with-user/(?P<group_id>[0-9]+)/(?P<user_id>[0-9]+)/(?P<privilege>[a-z]+)/$',
views.share_group_with_user, name='share_group_with_user'),
url(r'^_internal/unshare-group-with-user/(?P<group_id>[0-9]+)/(?P<user_id>[0-9]+)/$',
views.unshare_group_with_user, name='unshare_group_with_user'),
url(r'^_internal/make-group-membership-request/(?P<group_id>[0-9]+)/(?P<user_id>[0-9]+)/$',
views.make_group_membership_request, name='make_group_membership_request'),
url(r'^_internal/make-group-membership-request/(?P<group_id>[0-9]+)/$',
views.make_group_membership_request, name='make_group_membership_request'),
url(r'^_internal/act-on-group-membership-request/(?P<membership_request_id>[0-9]+)/(?P<action>[a-z]+)/$',
views.act_on_group_membership_request, name='act_on_group_membership_request'),
url(r'^_internal/group_membership/(?P<token>[-\w]+)/(?P<uidb36>[-\w]+)/(?P<membership_request_id>[0-9]+)/',
views.group_membership,
name='group_membership'),
url(r'^_internal/metadata_review/(?P<shortkey>[0-9a-f-]+)/(?P<action>[a-z]+)/',
views.metadata_review,
name='metadata_review_noauth'),
url(r'^_internal/metadata_review/(?P<shortkey>[0-9a-f-]+)/(?P<action>[a-z]+)/(?P<uidb36>[-\w]+)/(?P<token>[-\w]+)/',
views.metadata_review,
name='metadata_review'),
url(r'^_internal/get-user-or-group-data/(?P<user_or_group_id>[0-9]+)/(?P<is_group>[a-z]+)$',
views.get_user_or_group_data, name='get_user_or_group_data'),
url(r'^_internal/(?P<shortkey>[0-9a-f-]+)/publish/$', views.publish),
url(r'^_internal/(?P<shortkey>[0-9a-f-]+)/submit-for-review/$', views.submit_for_review),
url(r'^_internal/create-resource/do/$', views.create_resource, name='create_resource'),
url(r'^_internal/verify-account/$', views.verify_account),
url(r'^_internal/resend_verification_email/$', views.resend_verification_email),
url(r'^_internal/(?P<resource_type>[A-z]+)/supported-file-types/$',
views.get_supported_file_types_for_resource_type, name='resource_type_file_types'),
url(r'^_internal/(?P<resource_type>[A-z]+)/allow-multiple-file/$',
views.is_multiple_file_upload_allowed, name="resource_type_multiple_file_upload"),
url(r'^_internal/search/autocomplete/', autocomplete),
url(r'^_internal/data-store-structure/$', views.resource_folder_hierarchy.data_store_structure),
url(r'^_internal/data-store-folder-zip/$',
views.resource_folder_hierarchy.data_store_folder_zip, name="zip_folder"),
url(r'^_internal/zip-by-aggregation-file/$',
views.resource_folder_hierarchy.zip_aggregation_file, name="zip_by_aggr_file"),
url(r'^_internal/data-store-folder-unzip/$',
views.resource_folder_hierarchy.data_store_folder_unzip),
url(r'^_internal/data-store-create-folder/$',
views.resource_folder_hierarchy.data_store_create_folder),
url(r'^_internal/data-store-add-reference/$',
views.resource_folder_hierarchy.data_store_add_reference),
url(r'^_internal/data-store-edit-reference-url/$',
views.resource_folder_hierarchy.data_store_edit_reference_url),
url(r'^_internal/data-store-move-or-rename/$',
views.resource_folder_hierarchy.data_store_file_or_folder_move_or_rename),
url(r'^_internal/data-store-move-to-folder/$',
views.resource_folder_hierarchy.data_store_move_to_folder),
url(r'^_internal/data-store-rename-file-or-folder/$',
views.resource_folder_hierarchy.data_store_rename_file_or_folder),
url(r'^_internal/data-store-delete-folder/$',
views.resource_folder_hierarchy.data_store_remove_folder),
url(r'^_internal/update_quota_usage/(?P<username>[\w.@+-]+)/$',
views.update_quota_usage, name='update_quota_usage'),
url(r'^_internal/get_tasks_by_user/$', views.get_tasks_by_user, name='get_tasks_by_user'),
url(r'^_internal/get_task/(?P<task_id>[0-9a-f-]+)$', views.get_task, name='get_task'),
url(r'^_internal/abort_task/(?P<task_id>[0-9a-f-]+)$', views.abort_task, name='abort_task'),
url(r'^_internal/dismiss_task/(?P<task_id>[0-9a-f-]+)$', views.dismiss_task, name='dismiss_task'),
url(r'^_internal/set_task_delivered/(?P<task_id>[0-9a-f-]+)$', views.set_task_delivered, name='set_task_delivered'),
]
| bsd-3-clause | cf722bc861e6c3c33b6eb176c4cfc17a | 70.731092 | 120 | 0.645619 | 2.966979 | false | false | true | false |
hydroshare/hydroshare | hs_core/views/autocomplete.py | 4 | 2796 | from django.contrib.auth.models import User
from hs_core.models import Contributor, Creator, Subject
from hs_core.hydroshare.utils import get_resource_types
from hs_core.views.utils import json_or_jsonp
def autocomplete(request):
term = request.GET.get('term')
resp = []
types = [t for t in get_resource_types() if term.lower() in t.__name__.lower()]
resp += [{'label': 'type', 'value': t.__name__, 'id': t.__name__} for t in types]
# Party calculations are expensive and complicated. Deferring to focus on lower hanging fruit
#
parties = []
def get_party_type(party):
if Contributor.objects.filter(id=party.id).exists():
return 'Contributor'
elif Creator.objects.filter(id=party.id).exists():
return 'Author'
else:
return None
seen = set()
filter_types = {
'name': 'name__istartswith',
'email': 'email__iexact',
}
for model in (Creator, Contributor):
for filter_type in filter_types:
for party in model.objects.filter(**{filter_types[filter_type]: term}):
party_type = get_party_type(party)
if party_type:
name = model.__name__
if model is Creator:
name = "Author"
if (name, party.name) not in seen:
seen.add((name, party.name))
resp.append({
'label': name,
'type': 'party',
'id': getattr(party, filter_type, 'id'),
'value': party.name,
})
owners = User.objects.filter(username__istartswith=term)
for owner in owners:
if owner.first_name and owner.last_name:
name = "%s %s (%s)" % (owner.first_name, owner.last_name, owner.username)
elif owner.first_name:
name = "%s (%s)" % (owner.first_name, owner.username)
elif owner.last_name:
name = "%s (%s)" % (owner.last_name, owner.username)
else:
name = owner.username
resp.append({
'label': 'Owner',
'type': 'owner',
'id': owner.username,
'value': name,
})
subjects = Subject.objects.filter(value__istartswith=term)
for subject in subjects:
if ('subject', subject.value) not in seen:
seen.add(('subject', subject.value))
resp.append({
'label': 'Subject',
'type': 'subject',
'id': subject.value,
'value': subject.value,
})
# todo: users
# todo: groups
# todo: other conditions?
return json_or_jsonp(request, resp)
| bsd-3-clause | 8c2e55225961e9bb8594f2eceb054665 | 34.392405 | 97 | 0.519313 | 4.099707 | false | false | false | false |
hydroshare/hydroshare | hs_core/management/commands/fix_missing_license_statement.py | 1 | 1255 | from django.core.management.base import BaseCommand
from hs_core.models import BaseResource
from hs_core.hydroshare import set_dirty_bag_flag
class Command(BaseCommand):
help = "Copies license URL as license statement if a value for license is missing in any resource"
def handle(self, *args, **options):
resource_counter = 0
print(f"Total resources found:{BaseResource.objects.count()}")
for res in BaseResource.objects.all().iterator():
res = res.get_content_model()
if res.metadata is None:
print(f"Not fixing license. Metadata object is missing for this resource:{res.short_id}")
continue
rights = res.metadata.rights
if rights is None:
print(f"Not fixing license. Rights metadata object is missing for this resource:{res.short_id}")
continue
if len(rights.statement.strip()) == 0:
rights.statement = rights.url
rights.save()
set_dirty_bag_flag(res)
resource_counter += 1
print(f"Fixed license for resource:{res.short_id}")
print(f"Number of resources for which licence was fixed:{resource_counter}")
| bsd-3-clause | 393beedf5cb2cd7aa3a79936344d1f9f | 40.833333 | 112 | 0.622311 | 4.498208 | false | false | false | false |
hydroshare/hydroshare | hs_core/views/resource_access_api.py | 1 | 7766 | from django.contrib.auth.models import Group
from rest_framework.response import Response
from rest_framework import generics, serializers
from rest_framework import status
from hs_core import hydroshare
from hs_core.hydroshare import utils
from hs_access_control.models import UserResourcePrivilege, GroupResourcePrivilege, \
PrivilegeCodes, UserAccess
from hs_core.views import utils as view_utils
from hs_core.views.utils import ACTION_TO_AUTHORIZE
class PrivilegeField(serializers.Field):
def to_representation(self, privilege):
return PrivilegeCodes.CHOICES[privilege-1][1]
class GroupResourcePrivilegeSerializer(serializers.ModelSerializer):
privilege = PrivilegeField()
class Meta:
model = GroupResourcePrivilege
fields = ('id', 'privilege', 'group', 'resource', 'grantor')
class UserResourcePrivilegeSerializer(serializers.ModelSerializer):
privilege = PrivilegeField()
class Meta:
model = UserResourcePrivilege
fields = ('id', 'privilege', 'user', 'resource', 'grantor')
class ResourceAccessUpdateDelete(generics.RetrieveUpdateDestroyAPIView):
"""
Read, update, or delete access permission for a resource
REST URL: hsapi/resource/{pk}/access
HTTP method: GET
:return: (on success): JSON representation of resource access with 'groups' and 'users' keys.
REST URL: hsapi/resource/{pk}/access?(user_id=#|group_id=#)
HTTP method: DELETE
:type int
:param user_id: user ID to remove
:type int
:param group_id: group ID to remove
:return: (on success): Success or Error JSON object
REST URL: hsapi/resource/{pk}/access
HTTP method: PUT
:return: (on success): Success or Error JSON object
:type int
:param user_id: user ID to remove
:type int
:param group_id: group ID to remove
:type PrivilegeCode int
:param privilege: PrivilegeCode to specifiy access level
:return: (on success): Success or Error JSON objectit
"""
serializer_class = UserResourcePrivilegeSerializer
allowed_methods = ('GET', 'PUT', 'DELETE')
def get(self, request, pk):
view_utils.authorize(request, pk,
needed_permission=ACTION_TO_AUTHORIZE.VIEW_RESOURCE_ACCESS)
user_resource_serializer, group_resource_serializer = self.get_serializer_classes()
user_resource_privilege, group_resource_privilege = self.get_queryset(pk, request.user)
response_data = dict()
response_data['users'] = \
user_resource_serializer(user_resource_privilege, many=True).data
response_data['groups'] = \
group_resource_serializer(group_resource_privilege, many=True).data
return Response(data=response_data, status=status.HTTP_200_OK)
def put(self, request, pk):
view_utils.authorize(request, pk,
needed_permission=ACTION_TO_AUTHORIZE.EDIT_RESOURCE_ACCESS)
user_access = UserAccess(user=request.user)
resource = hydroshare.get_resource_by_shortkey(shortkey=pk)
keys = list(request.data.keys())
if "user_id" in keys and "group_id" in keys:
return Response(
data={
'error': "Request cannot contain both a 'user_id' and a 'group_id' parameter."
},
status=status.HTTP_400_BAD_REQUEST
)
if "user_id" in keys and "privilege" in keys:
if int(request.data['privilege']) in (1, 2, 3, 4):
try:
user_to_add = utils.user_from_id(request.data['user_id'])
user_access.share_resource_with_user(resource,
user_to_add,
request.data['privilege'])
return Response(
data={'success': "Resource access privileges added."},
status=status.HTTP_202_ACCEPTED
)
except Exception:
return Response(
data={'error': "This resource may not be shared with that user."},
status=status.HTTP_400_BAD_REQUEST
)
if "group_id" in keys and "privilege" in keys:
if int(request.data['privilege']) in (1, 2, 3, 4):
group_to_add = utils.group_from_id(request.data['group_id'])
try:
user_access.share_resource_with_group(resource,
group_to_add,
request.data['privilege'])
return Response(
data={'success': "Resource access privileges added."},
status=status.HTTP_202_ACCEPTED
)
except Exception:
return Response(
data={'error': "This group may not be added to any resources."},
status=status.HTTP_400_BAD_REQUEST
)
message = "Request must contain a 'resource' ID as well as a 'user_id' or " \
"'group_id', and 'privilege' must be one of 1, 2, or 3."
return Response(
data={'error': message},
status=status.HTTP_400_BAD_REQUEST
)
def delete(self, request, pk):
view_utils.authorize(request, pk,
needed_permission=ACTION_TO_AUTHORIZE.EDIT_RESOURCE_ACCESS)
keys = list(request.query_params.keys())
user_access = UserAccess(user=request.user)
resource = hydroshare.get_resource_by_shortkey(shortkey=pk)
if "user_id" in keys and "group_id" in keys:
message = "Request cannot contain both a 'user_id' and a 'group_id' parameter."
return Response(
data={'error': message},
status=status.HTTP_400_BAD_REQUEST
)
if "user_id" in keys:
user_to_remove = utils.user_from_id(request.query_params['user_id'])
user_access.unshare_resource_with_user(resource, user_to_remove)
return Response(
data={'success': "Resource access privileges removed."},
status=status.HTTP_202_ACCEPTED
)
if "group_id" in keys:
group_to_remove = utils.group_from_id(request.query_params['group_id'])
user_access.unshare_resource_with_group(resource, group_to_remove)
return Response(
data={'success': "Resource access privileges removed."},
status=status.HTTP_202_ACCEPTED
)
message = "Request must contain a 'resource' ID as well as a 'user_id' or 'group_id'"
return Response(
data={'error': message},
status=status.HTTP_400_BAD_REQUEST
)
def get_serializer_classes(self):
return (UserResourcePrivilegeSerializer, GroupResourcePrivilegeSerializer,)
def get_queryset(self, pk, user):
resource = hydroshare.get_resource_by_shortkey(shortkey=pk)
if user in resource.raccess.owners:
querysets = (
UserResourcePrivilege.objects.filter(resource=resource),
GroupResourcePrivilege.objects.filter(resource=resource)
)
else:
user_groups = Group.objects.filter(gaccess__g2ugp__user=user)
querysets = (
UserResourcePrivilege.objects.filter(resource=resource, user=user),
GroupResourcePrivilege.objects.filter(resource=resource, group__in=user_groups)
)
return querysets
| bsd-3-clause | 29f3ff050562f61b4acb5a5c3d9bdd76 | 39.030928 | 98 | 0.589235 | 4.3507 | false | false | false | false |
hydroshare/hydroshare | hs_file_types/nc_functions/nc_dump.py | 1 | 5845 | """
Module used to get the header info of netcdf file
WORKFLOW:
There are two ways to get the netcdf header string.
1) method1 run ncdump -h by python subprocess module: get_nc_dump_string_by_ncdump()
2) method2 use the netCDF4 python lib to look into the netcdf to extract the the header info:
get_nc_dump_string()
3) get_netcdf_header_file() will try the first method and if it fails it will call the second method
NOTES:
1) make sure the 'ncdump' is registered by the system path. otherwise suprocess won't recoganize
the ncdump command
REF
ncdump c code:
http://www.unidata.ucar.edu/software/netcdf/docs/ncdump_8c_source.html
json dump dict in pretty format:
http://stackoverflow.com/questions/3229419/pretty-printing-nested-dictionaries-in-python
subprocess call:
https://docs.python.org/2/library/subprocess.html
http://stackoverflow.com/questions/923079/how-can-i-capture-the-stdout-output-of-a-child
-process/923108#923108
"""
from collections import OrderedDict
from os.path import basename
import os
import json
import subprocess
import netCDF4
from .nc_utils import get_nc_dataset
def get_netcdf_header_file(nc_file_name, dump_folder=''):
"""
(string,string) -> file
Return: given the full netcdf file path name, return text file for the netcdf header information
"""
# create a new text file
# name with no file extension
nc_file_basename = '.'.join(basename(nc_file_name).split('.')[:-1])
nc_dump_file_folder = dump_folder if dump_folder else os.getcwd()
nc_dump_file_name = nc_dump_file_folder + '/' + nc_file_basename + '_header_info.txt'
nc_dump_file = open(nc_dump_file_name, 'w')
# write the nc_dump string in text fle
dump_string = get_nc_dump_string_by_ncdump(nc_file_name) \
if get_nc_dump_string_by_ncdump(nc_file_name) else get_nc_dump_string(nc_file_name)
if dump_string:
nc_dump_file.write(dump_string)
def get_nc_dump_string_by_ncdump(nc_file_name):
"""
(string) -> string
Return: string create by running "ncdump -h" command for netcdf file.
"""
try:
process = subprocess.Popen(['ncdump', '-h', nc_file_name], stdout=subprocess.PIPE, encoding="UTF-8")
nc_dump_string = process.communicate()[0]
except Exception:
nc_dump_string = ''
return nc_dump_string
def get_nc_dump_string(nc_file_name):
"""
(string) -> string
Return: string created by python netCDF4 lib similar as the "ncdump -h" command for netcdf file.
"""
try:
nc_dataset = get_nc_dataset(nc_file_name)
nc_file_basename = '.'.join(basename(nc_file_name).split('.')[:-1])
nc_dump_dict = get_nc_dump_dict(nc_dataset)
if nc_dump_dict:
nc_dump_string = 'netcdf {0} \n'.format(nc_file_basename)
nc_dump_string += json.dumps(nc_dump_dict, indent=4)
else:
nc_dump_string = ''
except Exception:
nc_dump_string = ''
return nc_dump_string
def get_nc_dump_dict(nc_group):
"""
(obj) -> dict
Return: Dictionary storing the header information of netcdf similar as running 'ncdump -h'
"""
info = OrderedDict()
if isinstance(nc_group, netCDF4.Dataset):
if get_dimensions_info(nc_group):
info['dimensions'] = get_dimensions_info(nc_group)
if get_variables_info(nc_group):
info['variables'] = get_variables_info(nc_group)
if get_global_attr_info(nc_group):
info['global attributes'] = get_global_attr_info(nc_group)
if nc_group.groups:
for group_name, group_obj in list(nc_group.groups.items()):
try:
info['group: ' + group_name] = get_nc_dump_dict(group_obj)
except Exception:
continue
return info
def get_dimensions_info(nc_group):
"""
(obj) -> dict
Return: Dimension info of a netcdf group object.
"""
dimensions_info = OrderedDict()
for dim_name, dim_obj in list(nc_group.dimensions.items()):
try:
if dim_obj.isunlimited():
dimensions_info[dim_name] = 'UNLIMITED; // ({0} currently)'.format(len(dim_obj))
else:
dimensions_info[dim_name] = len(dim_obj)
except:
continue
return dimensions_info
def get_global_attr_info(nc_group):
"""
(obj) -> dict
Return: global attribute info of a netcdf group object.
"""
global_attr_info = OrderedDict()
if nc_group.__dict__:
for name, val in list(nc_group.__dict__.items()):
value = str(val).split('\n') if '\n' in str(val) else str(val)
global_attr_info[name] = value
return global_attr_info
def get_variables_info(nc_group):
"""
(obj) -> dict
Return: global attribute info of a netcdf group object.
"""
variables_info = OrderedDict()
if nc_group.variables:
for var_name, var_obj in list(nc_group.variables.items()):
try:
if isinstance(var_obj.datatype, netCDF4.CompoundType):
var_type = 'compound'
elif isinstance(var_obj.datatype, netCDF4.VLType):
var_type = 'variable length'
else:
var_type = var_obj.datatype.name
var_dimensions = '({0})'.format(','.join(var_obj.dimensions).encode())
var_title = '{0} {1}{2}'.format(var_type, var_name, var_dimensions)
variables_info[var_title] = OrderedDict()
for name, val in list(var_obj.__dict__.items()):
value = str(val).split('\n') if '\n' in str(val) else str(val)
variables_info[var_title][name] = value
except Exception:
continue
return variables_info
| bsd-3-clause | 37275416b57a8ae69d9605fb79e0b479 | 31.653631 | 108 | 0.616424 | 3.568376 | false | false | false | false |
hydroshare/hydroshare | hs_core/tests/api/views/test_group.py | 1 | 42071 | import os
import shutil
import json
from mock import patch
from django.test import Client
from django.urls import reverse
from django.contrib.auth.models import Group
from django.db import transaction
from django.utils.http import int_to_base36
from rest_framework import status
from hs_core import hydroshare
from hs_core.views import create_user_group, update_user_group, share_group_with_user, unshare_group_with_user, \
make_group_membership_request, act_on_group_membership_request, share_resource_with_group, \
unshare_resource_with_group, delete_user_group, restore_user_group
from hs_core.testing import MockIRODSTestCaseMixin, ViewTestCase
from hs_access_control.models import PrivilegeCodes
from theme.backends import without_login_date_token_generator
class TestGroup(MockIRODSTestCaseMixin, ViewTestCase):
def setUp(self):
super(TestGroup, self).setUp()
patcher_email_send_call = patch('hs_core.views.send_action_to_take_email')
patcher_email_send_call.start()
self.addCleanup(patcher_email_send_call.stop)
self.group, _ = Group.objects.get_or_create(name='Hydroshare Author')
self.username = 'john'
self.password = 'jhmypassword'
self.john = hydroshare.create_account(
'john@gmail.com',
username=self.username,
first_name='John',
last_name='Clarson',
superuser=False,
password=self.password,
groups=[]
)
self.mike = hydroshare.create_account(
'mike@gmail.com',
username='mike',
first_name='Mike',
last_name='Jensen',
superuser=False,
groups=[]
)
# create a resource for sharing with group
self.resource = hydroshare.create_resource(resource_type='GenericResource',
owner=self.john,
title='Test Resource',
metadata=[]
)
def tearDown(self):
if os.path.exists(self.temp_dir):
shutil.rmtree(self.temp_dir)
super(TestGroup, self).tearDown()
def test_create_group(self):
# TODO: test with picture file upload for the group
url = reverse('create_user_group')
# test passing privacy_level = 'public'
grp_data = {'name': 'Test Group-1', 'description': 'This is a cool group-1', 'privacy_level': 'public'}
request = self.factory.post(url, data=grp_data)
self.set_request_message_attributes(request)
request.user = self.john
response = create_user_group(request)
new_group = Group.objects.filter(name='Test Group-1').first()
self.assertNotEqual(new_group, None)
self.assertEqual(new_group.gaccess.description, 'This is a cool group-1')
self.assertEqual(new_group.gaccess.public, True)
self.assertEqual(new_group.gaccess.discoverable, True)
self.assertEqual(response.status_code, status.HTTP_302_FOUND)
self.assertEqual(response['Location'], reverse('group', args=[new_group.id]))
# test passing privacy_level = 'private'
grp_data = {'name': 'Test Group-2', 'description': 'This is a cool group-2', 'privacy_level': 'private'}
request = self.factory.post(url, data=grp_data)
self.set_request_message_attributes(request)
request.user = self.john
create_user_group(request)
new_group = Group.objects.filter(name='Test Group-2').first()
self.assertNotEqual(new_group, None)
self.assertEqual(new_group.gaccess.description, 'This is a cool group-2')
self.assertEqual(new_group.gaccess.public, False)
self.assertEqual(new_group.gaccess.discoverable, False)
# test passing privacy_level = 'discoverable'
grp_data = {'name': 'Test Group-3', 'description': 'This is a cool group-3', 'privacy_level': 'discoverable'}
request = self.factory.post(url, data=grp_data)
self.set_request_message_attributes(request)
request.user = self.john
create_user_group(request)
new_group = Group.objects.filter(name='Test Group-3').first()
self.assertNotEqual(new_group, None)
self.assertEqual(new_group.gaccess.description, 'This is a cool group-3')
self.assertEqual(new_group.gaccess.public, False)
self.assertEqual(new_group.gaccess.discoverable, True)
def test_group_create_failures(self):
# test that post data for 'name' and 'description' are required
# for creating a group. Also post data must have a key 'privacy_level'
# with one of these values ('public', 'private', 'discoverable'). Duplicate group names are
# not allowed
# at this point there should be only one group
self.assertEqual(Group.objects.count(), 1)
url = reverse('create_user_group')
# test 'name' is required
grp_data = {'description': 'This is a cool group', 'privacy_level': 'public'}
request = self.factory.post(url, data=grp_data)
self.set_request_message_attributes(request)
request.user = self.john
request.META['HTTP_REFERER'] = "/some_url/"
response = create_user_group(request)
self.assertEqual(response.status_code, status.HTTP_302_FOUND)
self.assertEqual(response['Location'], request.META['HTTP_REFERER'])
new_group = Group.objects.filter(gaccess__description='This is a cool group').first()
self.assertEqual(new_group, None)
# at this point there should be only one group
self.assertEqual(Group.objects.count(), 1)
# test 'description' is required
grp_data = {'name': 'Test Group', 'privacy_level': 'public'}
request = self.factory.post(url, data=grp_data)
self.set_request_message_attributes(request)
request.user = self.john
request.META['HTTP_REFERER'] = "/some_url/"
response = create_user_group(request)
self.assertEqual(response.status_code, status.HTTP_302_FOUND)
self.assertEqual(response['Location'], request.META['HTTP_REFERER'])
new_group = Group.objects.filter(name='Test Group').first()
self.assertEqual(new_group, None)
# at this point there should be only one group
self.assertEqual(Group.objects.count(), 1)
# test 'privacy_level' is required
grp_data = {'name': 'Test Group', 'description': 'This is a cool group'}
request = self.factory.post(url, data=grp_data)
self.set_request_message_attributes(request)
request.user = self.john
request.META['HTTP_REFERER'] = "/some_url/"
response = create_user_group(request)
self.assertEqual(response.status_code, status.HTTP_302_FOUND)
self.assertEqual(response['Location'], request.META['HTTP_REFERER'])
new_group = Group.objects.filter(name='Test Group').first()
self.assertEqual(new_group, None)
# at this point there should be only one group
self.assertEqual(Group.objects.count(), 1)
# test 'privacy_level' should have one of these values (public, private, discoverable)
grp_data = {'name': 'Test Group', 'description': 'This is a cool group', 'privacy_level': 'some-level'}
request = self.factory.post(url, data=grp_data)
self.set_request_message_attributes(request)
request.user = self.john
request.META['HTTP_REFERER'] = "/some_url/"
response = create_user_group(request)
self.assertEqual(response.status_code, status.HTTP_302_FOUND)
self.assertEqual(response['Location'], request.META['HTTP_REFERER'])
new_group = Group.objects.filter(name='Test Group').first()
self.assertEqual(new_group, None)
# at this point there should be only one group
self.assertEqual(Group.objects.count(), 1)
# test that duplicate group names are not allowed
grp_data = {'name': 'Test Group', 'description': 'This is a cool group', 'privacy_level': 'private'}
request = self.factory.post(url, data=grp_data)
self.set_request_message_attributes(request)
request.user = self.john
response = create_user_group(request)
self.assertEqual(response.status_code, status.HTTP_302_FOUND)
# at this point there should be 2 groups
self.assertEqual(Group.objects.count(), 2)
# create a group with duplicate name
grp_data = {'name': 'Test Group', 'description': 'This is a very cool group', 'privacy_level': 'private'}
request = self.factory.post(url, data=grp_data)
self.set_request_message_attributes(request)
request.user = self.john
request.META['HTTP_REFERER'] = "/some_url/"
# had to do this as a transaction for some reason, otherwise the last statement
# of this function generates a transaction error
with transaction.atomic():
response = create_user_group(request)
self.assertEqual(response.status_code, status.HTTP_302_FOUND)
self.assertEqual(response['Location'], request.META['HTTP_REFERER'])
# at this point there should be still 2 groups
self.assertEqual(Group.objects.count(), 2)
def test_update_group(self):
# TODO: test with picture file upload for the group
# first create a group to test updating group
url = reverse('create_user_group')
grp_data = {'name': 'Test Group-1', 'description': 'This is a cool group-1',
'purpose': 'This group has no purpose', 'privacy_level': 'public'}
request = self.factory.post(url, data=grp_data)
self.set_request_message_attributes(request)
request.user = self.john
response = create_user_group(request)
new_group = Group.objects.filter(name='Test Group-1').first()
self.assertEqual(new_group.gaccess.active, True)
self.assertNotEqual(new_group, None)
self.assertEqual(new_group.gaccess.description, 'This is a cool group-1')
self.assertEqual(new_group.gaccess.purpose, 'This group has no purpose')
self.assertEqual(new_group.gaccess.public, True)
self.assertEqual(new_group.gaccess.discoverable, True)
self.assertEqual(new_group.gaccess.active, True)
self.assertEqual(new_group.gaccess.shareable, True)
self.assertEqual(response.status_code, status.HTTP_302_FOUND)
self.assertEqual(response['Location'], reverse('group', args=[new_group.id]))
# now test updating new_group
url_params = {'group_id': new_group.id}
url = reverse('update_user_group', kwargs=url_params)
# update name, description, purpose
grp_data = {'name': 'Test Group-2', 'description': 'This is a cool group-2',
'purpose': 'This group now has purpose', 'privacy_level': 'public'}
request = self.factory.post(url, data=grp_data)
self.set_request_message_attributes(request)
request.user = self.john
request.META['HTTP_REFERER'] = "/some_url/"
response = update_user_group(request, group_id=new_group.id)
new_group = Group.objects.filter(name='Test Group-2').first()
self.assertNotEqual(new_group, None)
self.assertEqual(new_group.gaccess.description, 'This is a cool group-2')
self.assertEqual(new_group.gaccess.purpose, 'This group now has purpose')
self.assertEqual(new_group.gaccess.public, True)
self.assertEqual(new_group.gaccess.discoverable, True)
self.assertEqual(new_group.gaccess.active, True)
self.assertEqual(response.status_code, status.HTTP_302_FOUND)
self.assertEqual(response['Location'], request.META['HTTP_REFERER'])
# update group to remove purpose
grp_data = {'name': 'Test Group-2', 'description': 'This is a cool group-2',
'privacy_level': 'public'}
request = self.factory.post(url, data=grp_data)
self.set_request_message_attributes(request)
request.user = self.john
request.META['HTTP_REFERER'] = "/some_url/"
response = update_user_group(request, group_id=new_group.id)
new_group = Group.objects.filter(name='Test Group-2').first()
self.assertNotEqual(new_group, None)
self.assertEqual(new_group.gaccess.description, 'This is a cool group-2')
self.assertEqual(new_group.gaccess.purpose, '')
self.assertEqual(new_group.gaccess.public, True)
self.assertEqual(new_group.gaccess.discoverable, True)
self.assertEqual(new_group.gaccess.active, True)
self.assertEqual(response.status_code, status.HTTP_302_FOUND)
self.assertEqual(response['Location'], request.META['HTTP_REFERER'])
# update privacy_level (set to private)- this set public to false and discoverable to false
grp_data = {'name': 'Test Group-2', 'description': 'This is a cool group-2',
'privacy_level': 'private'}
request = self.factory.post(url, data=grp_data)
self.set_request_message_attributes(request)
request.user = self.john
request.META['HTTP_REFERER'] = "/some_url/"
response = update_user_group(request, group_id=new_group.id)
new_group = Group.objects.filter(name='Test Group-2').first()
self.assertNotEqual(new_group, None)
self.assertEqual(new_group.gaccess.description, 'This is a cool group-2')
self.assertEqual(new_group.gaccess.purpose, '')
self.assertEqual(new_group.gaccess.public, False)
self.assertEqual(new_group.gaccess.discoverable, False)
self.assertEqual(new_group.gaccess.active, True)
self.assertEqual(response.status_code, status.HTTP_302_FOUND)
self.assertEqual(response['Location'], request.META['HTTP_REFERER'])
# update privacy_level (set to public) - this set public to true and discoverable to true
grp_data = {'name': 'Test Group-2', 'description': 'This is a cool group-2',
'privacy_level': 'public'}
request = self.factory.post(url, data=grp_data)
self.set_request_message_attributes(request)
request.user = self.john
request.META['HTTP_REFERER'] = "/some_url/"
response = update_user_group(request, group_id=new_group.id)
new_group = Group.objects.filter(name='Test Group-2').first()
self.assertNotEqual(new_group, None)
self.assertEqual(new_group.gaccess.description, 'This is a cool group-2')
self.assertEqual(new_group.gaccess.purpose, '')
self.assertEqual(new_group.gaccess.public, True)
self.assertEqual(new_group.gaccess.discoverable, True)
self.assertEqual(new_group.gaccess.active, True)
self.assertEqual(response.status_code, status.HTTP_302_FOUND)
self.assertEqual(response['Location'], request.META['HTTP_REFERER'])
# update privacy_level (set to discoverable) - this should set discoverable to
# true and public to false
grp_data = {'name': 'Test Group-2', 'description': 'This is a cool group-2',
'privacy_level': 'discoverable'}
request = self.factory.post(url, data=grp_data)
self.set_request_message_attributes(request)
request.user = self.john
request.META['HTTP_REFERER'] = "/some_url/"
response = update_user_group(request, group_id=new_group.id)
new_group = Group.objects.filter(name='Test Group-2').first()
self.assertNotEqual(new_group, None)
self.assertEqual(new_group.gaccess.description, 'This is a cool group-2')
self.assertEqual(new_group.gaccess.purpose, '')
self.assertEqual(new_group.gaccess.public, False)
self.assertEqual(new_group.gaccess.discoverable, True)
self.assertEqual(new_group.gaccess.active, True)
self.assertEqual(response.status_code, status.HTTP_302_FOUND)
self.assertEqual(response['Location'], request.META['HTTP_REFERER'])
def test_delete_restore_group(self):
# test a group can be deleted or restored
# first create a group to test updating group
url = reverse('create_user_group')
grp_data = {'name': 'Test Group-1', 'description': 'This is a cool group-1',
'purpose': 'This group has no purpose', 'privacy_level': 'public'}
request = self.factory.post(url, data=grp_data)
self.set_request_message_attributes(request)
request.user = self.john
create_user_group(request)
new_group = Group.objects.filter(name='Test Group-1').first()
self.assertEqual(new_group.gaccess.active, True)
post_data = {'group_id': new_group.id}
url = reverse('delete_user_group', kwargs=post_data)
request = self.factory.post(url, data=post_data)
self.set_request_message_attributes(request)
request.user = self.john
request.META['HTTP_REFERER'] = "/some_url/"
response = delete_user_group(request, group_id=new_group.id)
new_group = Group.objects.filter(name='Test Group-1').first()
self.assertEqual(new_group.gaccess.active, False)
self.assertEqual(response.status_code, status.HTTP_302_FOUND)
self.assertEqual(response['Location'], request.META['HTTP_REFERER'])
# test undeleting the group
url = reverse('restore_user_group', kwargs=post_data)
request = self.factory.post(url, data=post_data)
self.set_request_message_attributes(request)
request.user = self.john
request.META['HTTP_REFERER'] = "/some_url/"
response = restore_user_group(request, group_id=new_group.id)
new_group = Group.objects.filter(name='Test Group-1').first()
self.assertEqual(new_group.gaccess.active, True)
self.assertEqual(response.status_code, status.HTTP_302_FOUND)
self.assertEqual(response['Location'], request.META['HTTP_REFERER'])
def test_group_update_failure(self):
# test that post data for 'name', 'description', and 'privacy_level' are required
# when updating a group
# first create a group to test updating group
url = reverse('create_user_group')
grp_data = {'name': 'Test Group-1', 'description': 'This is a cool group-1',
'purpose': 'This group has purpose', 'privacy_level': 'public'}
request = self.factory.post(url, data=grp_data)
self.set_request_message_attributes(request)
request.user = self.john
response = create_user_group(request)
new_group = Group.objects.filter(name='Test Group-1').first()
# now test updating new_group
url_params = {'group_id': new_group.id}
url = reverse('update_user_group', kwargs=url_params)
# test name is required -> update should fail
grp_data = {'description': 'This is a cool group-2', 'purpose': 'This group has purpose'}
request = self.factory.post(url, data=grp_data)
self.set_request_message_attributes(request)
request.user = self.john
request.META['HTTP_REFERER'] = "/some_url/"
response = update_user_group(request, group_id=new_group.id)
# description has not changed proves update failed
self.assertNotEqual(new_group.gaccess.description, 'This is a cool group-2')
self.assertEqual(new_group.gaccess.description, 'This is a cool group-1')
self.assertEqual(response.status_code, status.HTTP_302_FOUND)
self.assertEqual(response['Location'], request.META['HTTP_REFERER'])
# test description is required -> update should fail
grp_data = {'name': 'Test Group-2', 'purpose': 'This group has purpose',
'privacy_level': 'public'}
request = self.factory.post(url, data=grp_data)
self._update_failure(new_group, request)
# test privacy_level is required -> update should fail
grp_data = {'name': 'Test Group-2', 'description': 'This is a cool group',
'purpose': 'This group has purpose'}
request = self.factory.post(url, data=grp_data)
self._update_failure(new_group, request)
# test trying to update group with a duplicate name ('HydroShare Author') should fail
grp_data = {'name': 'Hydroshare Author', 'description': 'This is a cool group-1',
'purpose': 'This group has purpose'}
request = self.factory.post(url, data=grp_data)
self.set_request_message_attributes(request)
request.user = self.john
request.META['HTTP_REFERER'] = "/some_url/"
response = update_user_group(request, group_id=new_group.id)
# name has not changed proves update failed
self.assertEqual(Group.objects.filter(name='Hydroshare Author').count(), 1)
updated_group = Group.objects.filter(name='Hydroshare Author').first()
self.assertNotEqual(updated_group.id, new_group.id)
self.assertEqual(response.status_code, status.HTTP_302_FOUND)
self.assertEqual(response['Location'], request.META['HTTP_REFERER'])
def test_share_group_with_user(self):
# create a group to share
new_group = self._create_group()
# check mike is not a member of the group
self.assertNotIn(self.mike, new_group.gaccess.members)
# John to share 'Test Group' with user Mike with 'view' privilege
self._share_group_with_user(new_group, 'view')
# John to share 'Test Group' with user Mike with 'edit' privilege
self._share_group_with_user(new_group, 'edit')
# John to share 'Test Group' with user Mike with 'edit' privilege
self._share_group_with_user(new_group, 'edit')
# John to share 'Test Group' with user Mike with 'owner' privilege
self._share_group_with_user(new_group, 'owner')
def test_share_group_with_user_invalid_privilege(self):
# a group can shared with a user with privilege of one of these (view, edit or owner)
# create a group to share
new_group = self._create_group()
# John to share 'Test Group' with user Mike with invalid privilege
url_params = {'group_id': new_group.id, 'user_id': self.mike.id, 'privilege': "badprivilege"}
url = reverse('share_group_with_user', kwargs=url_params)
request = self.factory.post(url)
request.META['HTTP_REFERER'] = "/some_url/"
self.set_request_message_attributes(request)
request.user = self.john
response = share_group_with_user(request, group_id=new_group.id, user_id=self.mike.id, privilege="badprivilege")
self.assertEqual(response.status_code, status.HTTP_302_FOUND)
# check mike is not a member of the group
self.assertNotIn(self.mike, new_group.gaccess.members)
def test_unshare_group_with_user(self):
# create a group to share
new_group = self._create_group()
# John to share 'Test Group' with user Mike with 'view' privilege
self._share_group_with_user(new_group, 'view')
# check mike is a member of the group
self.assertIn(self.mike, new_group.gaccess.members)
# unshare test group with mike
url_params = {'group_id': new_group.id, 'user_id': self.mike.id}
url = reverse('unshare_group_with_user', kwargs=url_params)
request = self.factory.post(url)
request.META['HTTP_REFERER'] = "/some_url/"
self.set_request_message_attributes(request)
request.user = self.john
response = unshare_group_with_user(request, group_id=new_group.id, user_id=self.mike.id)
self.assertEqual(response.status_code, status.HTTP_302_FOUND)
# check mike is not a member of the group
self.assertNotIn(self.mike, new_group.gaccess.members)
def test_share_resource_with_group(self):
# create a group to share with a resource
new_group = self._create_group()
# let group owner john share resource with view privilege
response = self._share_resource_with_group(group=new_group, privilege='view')
self.assertEqual(response.status_code, status.HTTP_200_OK)
response_content = json.loads(response.content.decode())
self.assertEqual(response_content['status'], 'success')
self.assertIn(self.resource, new_group.gaccess.view_resources)
# share resource with group with edit privilege
# first unshare resource with group
self.john.uaccess.unshare_resource_with_group(self.resource, new_group)
self.assertNotIn(self.resource, new_group.gaccess.view_resources)
response = self._share_resource_with_group(group=new_group, privilege='edit')
self.assertEqual(response.status_code, status.HTTP_200_OK)
response_content = json.loads(response.content.decode())
self.assertEqual(response_content['status'], 'success')
self.assertIn(self.resource, new_group.gaccess.edit_resources)
# test a group can't have owner privilege over a resource
# first unshare resource with group
self.john.uaccess.unshare_resource_with_group(self.resource, new_group)
self.assertNotIn(self.resource, new_group.gaccess.view_resources)
response = self._share_resource_with_group(group=new_group, privilege='owner')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
response_content = json.loads(response.content.decode())
self.assertEqual(response_content['status'], 'error')
self.assertNotIn(self.resource, new_group.gaccess.view_resources)
def test_unshare_resource_with_group(self):
# create a group to share/unshare with a resource
new_group = self._create_group()
# first share the resource with the group
self.john.uaccess.share_resource_with_group(self.resource, new_group, PrivilegeCodes.VIEW)
self.assertIn(self.resource, new_group.gaccess.view_resources)
# now unshare the resource with the group
url_params = {'shortkey': self.resource.short_id, 'group_id': new_group.id}
url = reverse('unshare_resource_with_group', kwargs=url_params)
request = self.factory.post(url)
request.user = self.john
response = unshare_resource_with_group(request, shortkey=self.resource.short_id,
group_id=new_group.id)
self.assertEqual(response.status_code, status.HTTP_200_OK)
response_content = json.loads(response.content.decode())
self.assertEqual(response_content['status'], 'success')
self.assertNotIn(self.resource, new_group.gaccess.view_resources)
# test group member (non-owner) unsharing a resource with a group
# returns response status as 'error' and the group is not unshared
# let make mike a member of group
self.john.uaccess.share_group_with_user(new_group, self.mike, PrivilegeCodes.VIEW)
self.assertIn(new_group, self.mike.uaccess.view_groups)
# let john share the resource with group
self.john.uaccess.share_resource_with_group(self.resource, new_group, PrivilegeCodes.VIEW)
self.assertIn(self.resource, new_group.gaccess.view_resources)
# let mike unshare the resource with group
request.user = self.mike
response = unshare_resource_with_group(request, shortkey=self.resource.short_id,
group_id=new_group.id)
self.assertEqual(response.status_code, status.HTTP_200_OK)
response_content = json.loads(response.content.decode())
self.assertEqual(response_content['status'], 'error')
self.assertIn(self.resource, new_group.gaccess.view_resources)
def test_make_group_membership_request(self):
# test that user can make request to join a group
# create a group
new_group = self._create_group()
# now there should be no GroupMembershipRequest associated with Mile
self.assertEqual(self.mike.uaccess.group_membership_requests.count(), 0)
# test that user mike can make a request to join the new_group
url_params = {'group_id': new_group.id}
url = reverse('make_group_membership_request', kwargs=url_params)
request = self.factory.post(url)
self.set_request_message_attributes(request)
request.META['HTTP_REFERER'] = "/some_url/"
request.user = self.mike
response = make_group_membership_request(request, group_id=new_group.id)
self.assertEqual(response.status_code, status.HTTP_302_FOUND)
self.assertEqual(response['Location'], request.META['HTTP_REFERER'])
# now there should be one GroupMembershipRequest associated with Mike
self.assertEqual(self.mike.uaccess.group_membership_requests.count(), 1)
# test user making request more than once for the same group should fail
response = make_group_membership_request(request, group_id=new_group.id)
self.assertEqual(response.status_code, status.HTTP_302_FOUND)
self.assertEqual(response['Location'], request.META['HTTP_REFERER'])
# there should be still one GroupMembershipRequest associated with Mike
self.assertEqual(self.mike.uaccess.group_membership_requests.count(), 1)
def test_make_group_membership_invitation(self):
# test group owner inviting a user to join a group
# create a group
new_group = self._create_group()
# there should be no GroupMembershipRequest associated with John
self.assertEqual(self.john.uaccess.group_membership_requests.count(), 0)
# test that group owner john can invite mike to join the new_group
url_params = {'group_id': new_group.id, 'user_id': self.mike.id}
url = reverse('make_group_membership_request', kwargs=url_params)
request = self.factory.post(url)
self.set_request_message_attributes(request)
request.META['HTTP_REFERER'] = "/some_url/"
request.user = self.john
response = make_group_membership_request(request, group_id=new_group.id, user_id=self.mike.id)
self.assertEqual(response.status_code, status.HTTP_302_FOUND)
self.assertEqual(response['Location'], request.META['HTTP_REFERER'])
# now there should be one GroupMembershipRequest associated with John
self.assertEqual(self.john.uaccess.group_membership_requests.count(), 1)
# test group owner inviting same user to the same group more than once should fail
response = make_group_membership_request(request, group_id=new_group.id, user_id=self.mike.id)
self.assertEqual(response.status_code, status.HTTP_302_FOUND)
self.assertEqual(response['Location'], request.META['HTTP_REFERER'])
# there should be still one GroupMembershipRequest associated with John
self.assertEqual(self.john.uaccess.group_membership_requests.count(), 1)
def test_act_on_group_membership_request(self):
# test group owner accepting/declining a request from a user to join a group
# let user mike make a request
# create a group
new_group = self._create_group()
# let user mike make a request to join the new_group
membership_request = self._generate_user_request_to_join_group(new_group)
# test john can accept the request
# check mike is not a member of the group yet
self.assertNotIn(self.mike, new_group.gaccess.members)
# john accepts mike's request
self._owner_act_on_request(membership_request, 'accept')
# check mike is now a member of the group
self.assertIn(self.mike, new_group.gaccess.members)
# test owner decline user request
# remove mike from group
self.john.uaccess.unshare_group_with_user(new_group, self.mike)
# check mike is no more a member of the group
self.assertNotIn(self.mike, new_group.gaccess.members)
# let mike again make a request
membership_request = self._generate_user_request_to_join_group(new_group)
# let john decline mike's request
self._owner_act_on_request(membership_request, 'decline')
# check mike is not a member of the group
self.assertNotIn(self.mike, new_group.gaccess.members)
def test_act_on_group_membership_invitation(self):
# test user invited to join a group can accept/decline the invitation
# create a group
new_group = self._create_group()
# let john invite mike
membership_request = self._generate_owner_invitation_to_join_group(new_group)
# check mike is not a member of the group yet
self.assertNotIn(self.mike, new_group.gaccess.members)
# test mike is a member of the group after accepting john's request
self._user_act_on_invitation(membership_request, 'accept')
# check mike is now a member of the group
self.assertIn(self.mike, new_group.gaccess.members)
# test mike can decline invitation to join a group
# remove mike from group
self.john.uaccess.unshare_group_with_user(new_group, self.mike)
# check mike is no more a member of the group
self.assertNotIn(self.mike, new_group.gaccess.members)
# let john invite mike again
membership_request = self._generate_owner_invitation_to_join_group(new_group)
# let mike decline john's invitation
self._user_act_on_invitation(membership_request, 'decline')
# check mike is no more a member of the group
self.assertNotIn(self.mike, new_group.gaccess.members)
def test_group_membership_acceptance_via_email_link(self):
# here we are testing group_membership view function which is invoked
# when the user clicks the link provided in the email
# create a group
new_group = self._create_group()
# test user accepting group owner's invitation
# check mike is no more a member of the group
self.assertNotIn(self.mike, new_group.gaccess.members)
# let john invite mike to join group
membership_request = self.john.uaccess.create_group_membership_request(new_group, self.mike)
# create the link that mike should find in his email
uidb36 = int_to_base36(self.mike.id)
token = without_login_date_token_generator.make_token(self.mike)
url_params = {"uidb36": uidb36, "token": token, "membership_request_id": membership_request.id}
url = reverse('group_membership', kwargs=url_params)
# due to session requirement of the view being tested, using the Client class
client = Client()
# let mike click the link in the email
response = client.get(url)
redirect_url = '/group/{}/'.format(new_group.id)
self.assertEqual(response.status_code, status.HTTP_302_FOUND)
self.assertTrue(response['Location'].endswith(redirect_url))
# check mike is now a member of the group
self.assertIn(self.mike, new_group.gaccess.members)
# test group owner (john) accepting user (mike) request to join a group
# remove mike from group
self.john.uaccess.unshare_group_with_user(new_group, self.mike)
# check mike is no more a member of the group
self.assertNotIn(self.mike, new_group.gaccess.members)
# let mike make a request to join group
membership_request = self.mike.uaccess.create_group_membership_request(new_group)
# create the link that john should find in his email
uidb36 = int_to_base36(self.john.id)
token = without_login_date_token_generator.make_token(self.john)
url_params = {"uidb36": uidb36, "token": token, "membership_request_id": membership_request.id}
url = reverse('group_membership', kwargs=url_params)
# let john click the link
response = client.get(url)
redirect_url = '/group/{}/'.format(new_group.id)
self.assertEqual(response.status_code, status.HTTP_302_FOUND)
self.assertTrue(response['Location'].endswith(redirect_url))
# check mike is now a member of the group
self.assertIn(self.mike, new_group.gaccess.members)
def _update_failure(self, group, request):
group_name = group.name
self.set_request_message_attributes(request)
request.user = self.john
request.META['HTTP_REFERER'] = "/some_url/"
response = update_user_group(request, group_id=group.id)
# name has not changed proves update failed
updated_group = Group.objects.filter(name='Test Group-2').first()
self.assertEqual(updated_group, None)
original_group = Group.objects.filter(name=group_name).first()
self.assertNotEqual(original_group, None)
self.assertEqual(response.status_code, status.HTTP_302_FOUND)
self.assertEqual(response['Location'], request.META['HTTP_REFERER'])
def _share_resource_with_group(self, group, privilege):
url_params = {'shortkey': self.resource.short_id, 'privilege': privilege, 'group_id': group.id}
url = reverse('share_resource_with_group', kwargs=url_params)
request = self.factory.post(url)
self.set_request_message_attributes(request)
request.user = self.john
response = share_resource_with_group(request, shortkey=self.resource.short_id, privilege=privilege,
group_id=group.id)
return response
def _owner_act_on_request(self, membership_request, action):
url_params = {'membership_request_id': membership_request.id, 'action': action}
url = reverse('act_on_group_membership_request', kwargs=url_params)
request = self.factory.post(url)
self.set_request_message_attributes(request)
request.user = self.john
request.META['HTTP_REFERER'] = "/some_url/"
response = act_on_group_membership_request(request, membership_request_id=membership_request.id,
action=action)
self.assertEqual(response.status_code, status.HTTP_302_FOUND)
self.assertEqual(response['Location'], request.META['HTTP_REFERER'])
def _user_act_on_invitation(self, membership_request, action):
url_params = {'membership_request_id': membership_request.id, 'action': action}
url = reverse('act_on_group_membership_request', kwargs=url_params)
request = self.factory.post(url)
self.set_request_message_attributes(request)
request.user = self.mike
request.META['HTTP_REFERER'] = "/some_url/"
response = act_on_group_membership_request(request, membership_request_id=membership_request.id,
action=action)
self.assertEqual(response.status_code, status.HTTP_302_FOUND)
self.assertEqual(response['Location'], request.META['HTTP_REFERER'])
def _generate_user_request_to_join_group(self, group):
url_params = {'group_id': group.id}
url = reverse('make_group_membership_request', kwargs=url_params)
request = self.factory.post(url)
self.set_request_message_attributes(request)
request.META['HTTP_REFERER'] = "/some_url/"
request.user = self.mike
make_group_membership_request(request, group_id=group.id)
membership_request = self.mike.uaccess.group_membership_requests.first()
return membership_request
def _generate_owner_invitation_to_join_group(self, group):
url_params = {'group_id': group.id, 'user_id': self.mike.id}
url = reverse('make_group_membership_request', kwargs=url_params)
request = self.factory.post(url)
self.set_request_message_attributes(request)
request.META['HTTP_REFERER'] = "/some_url/"
request.user = self.john
make_group_membership_request(request, group_id=group.id, user_id=self.mike.id)
membership_request = self.john.uaccess.group_membership_requests.first()
return membership_request
def _create_group(self):
url = reverse('create_user_group')
# test passing privacy_level = 'public'
grp_data = {'name': 'Test Group', 'description': 'This is a cool group', 'privacy_level': 'public'}
request = self.factory.post(url, data=grp_data)
self.set_request_message_attributes(request)
request.user = self.john
create_user_group(request)
new_group = Group.objects.filter(name='Test Group').first()
return new_group
def _share_group_with_user(self, group, privilege):
url_params = {'group_id': group.id, 'user_id': self.mike.id, 'privilege': privilege}
url = reverse('share_group_with_user', kwargs=url_params)
request = self.factory.post(url)
request.META['HTTP_REFERER'] = "/some_url/"
self.set_request_message_attributes(request)
request.user = self.john
response = share_group_with_user(request, group_id=group.id, user_id=self.mike.id, privilege=privilege)
self.assertEqual(response.status_code, status.HTTP_302_FOUND)
self.assertEqual(response['Location'], request.META['HTTP_REFERER'])
# check mike is a member of the group
self.assertIn(self.mike, group.gaccess.members)
# check mike has the specified privilege over the group
if privilege == 'view':
self.assertIn(self.mike, group.gaccess.get_users_with_explicit_access(PrivilegeCodes.VIEW))
elif privilege == 'edit':
self.assertIn(self.mike, group.gaccess.get_users_with_explicit_access(PrivilegeCodes.CHANGE))
else:
self.assertIn(self.mike, group.gaccess.get_users_with_explicit_access(PrivilegeCodes.OWNER))
| bsd-3-clause | c340c1af246591242aa3901f48acab2a | 48.495294 | 120 | 0.656509 | 3.830556 | false | true | false | false |
hydroshare/hydroshare | hs_communities/management/commands/check_czo_groups.py | 1 | 13799 | """
Check that CZO groups are set up properly.
If a resource is owned by a CZO owner, and not part of the CZO group,
then add it to the group.
If a resource is in a CZO group and not owned by the corresponding group owner,
then make it owned by that owner.
If a resource owned by a CZO group owner is not owned by CZO national,
then make it owned by CZO national.
If a resource has an inappropriate prefix for a group,
then unshare it with that group and share with the appropriate group.
"""
from django.core.management.base import BaseCommand
from django.contrib.auth.models import User, Group
from hs_access_control.models import UserResourcePrivilege, GroupResourcePrivilege, \
GroupCommunityPrivilege, PrivilegeCodes, Community
from hs_core.models import BaseResource
from django_irods.icommands import SessionException
# Details of CZO setup.
# This should be updated as groups are added.
czo_setup = [
# group owner group name title prefix
["czo_national", "CZO National", "Cross-CZO"],
["czo_boulder", "CZO Boulder", "BCCZO"],
["czo_calhoun", "CZO Calhoun", "CCZO"],
["czo_catalina-jemez", "CZO Catalina-Jemez", "CJCZO"],
["czo_eel", "CZO Eel", "ERCZO"],
["czo_luquillo", "CZO Luquillo", "LCZO"],
["czo_reynolds", "CZO Reynolds", "RCCZO"],
["czo_shale-hills", "CZO Shale Hills", "SSHCZO"],
["czo_sierra", "CZO Southern Sierra", "SSCZO"],
["czo_christina", "CZO Christina", "CRBCZO"],
["czo_iml", "CZO IML", "IMLCZO"],
]
def set_quota_holder(resource, user):
""" set quota holder and deal with iRODS failures """
try:
if resource.get_quota_holder() != user:
print(" SET QUOTA HOLDER FOR {} {} TO {}"
.format(resource.short_id,
resource.title.encode('ascii', 'ignore'),
user.username))
resource.set_quota_holder(user, user)
except SessionException as ex:
# some resources copied from www for testing do not exist in the iRODS backend,
# hence need to skip these test artifects
print(resource.short_id + ' raised SessionException when setting quota holder: ' +
ex.stderr)
return False
except AttributeError as ex:
# when federation is not set up correctly, istorage does not have a session
# attribute, hence raise AttributeError - ignore for testing
print(resource.short_id + ' raised AttributeError when setting quota holder: ' +
ex.message)
return False
except ValueError as ex:
# when federation is not set up correctly, istorage does not have a session
# attribute, hence raise AttributeError - ignore for testing
print(resource.short_id + ' raised ValueError when setting quota holder: ' +
ex.message)
return False
return True
def check_resource_prefix(user, group, resource, prefix, mapper, grantor):
if not resource.title.startswith(prefix):
print(" UNSHARING {} {}: prefix not {} (UNSHARING)"
.format(resource.short_id,
resource.title.encode('ascii', 'ignore'),
prefix))
# not in the user's resources
UserResourcePrivilege.unshare(resource=resource,
user=user,
grantor=grantor)
# not in the group's resources
GroupResourcePrivilege.unshare(resource=resource,
group=group,
grantor=grantor)
# Where does it really go?
new_prefix = resource.title.split(" ")[0]
if new_prefix in mapper:
new_username = mapper[new_prefix][0]
new_groupname = mapper[new_prefix][1]
new_user = User.objects.get(username=new_username)
new_group = Group.objects.get(name=new_groupname)
print(" SHARING {} {} with user={} group={}"
.format(resource.short_id, resource.title.encode('ascii', 'ignore'),
new_username, new_groupname))
UserResourcePrivilege.share(resource=resource,
user=new_user,
privilege=PrivilegeCodes.OWNER,
grantor=grantor)
GroupResourcePrivilege.share(resource=resource,
group=new_group,
privilege=PrivilegeCodes.VIEW,
grantor=grantor)
else:
print(" ERROR {} {} unknown prefix {}"
.format(resource.short_id,
resource.title.encode('ascii', 'ignore'),
new_prefix))
def check_resource_owners(user1, user2, resource, grantor):
""" check that each resource has the proper number of owners """
owners = User.objects.filter(u2urp__resource=resource,
u2urp__privilege=PrivilegeCodes.OWNER)
if user1 not in owners:
# fix it NOW
print(" SHARING {} {} with first owner {}"
.format(resource.short_id,
resource.title.encode('ascii', 'ignore'),
user1.username))
UserResourcePrivilege.share(user=user1, resource=resource,
privilege=PrivilegeCodes.OWNER, grantor=grantor)
# first argument is also quota holder.
set_quota_holder(resource, user1)
# for CZO national group, there's only one owner.
if user1 != user2 and user2 not in owners:
# fix it NOW
print(" SHARING {} {} with second owner {}"
.format(resource.short_id,
resource.title.encode('ascii', 'ignore'),
user2.username))
UserResourcePrivilege.share(user=user2, resource=resource,
privilege=PrivilegeCodes.OWNER, grantor=grantor)
for o in owners:
if o.username != user1.username and o.username != user2.username:
# fix it NOW
print(" UNSHARING {} {} with owner {}"
.format(resource.short_id,
resource.title.encode('ascii', 'ignore'),
o.username))
UserResourcePrivilege.unshare(user=o, resource=resource, grantor=grantor)
def check_resource_group(group, resource, grantor):
""" check that a resource is in exactly one group """
groups = Group.objects.filter(g2grp__resource=resource)
if group not in groups:
# fix it NOW
print(" SHARING {} {} with group {}"
.format(resource.short_id,
resource.title.encode('ascii', 'ignore'),
group.name))
GroupResourcePrivilege.share(resource=resource,
group=group,
grantor=grantor,
privilege=PrivilegeCodes.VIEW)
for g in groups:
if g != group:
# fix it NOW
print(" UNSHARING {} {} with group {}"
.format(resource.short_id,
resource.title.encode('ascii', 'ignore'),
g.name))
GroupResourcePrivilege.unshare(resource=resource,
group=group,
grantor=grantor)
class Command(BaseCommand):
help = "check czo setup for proper group and owners of each resource"
def handle(self, *args, **options):
national_user = User.objects.get(username='czo_national')
czo_community = Community.objects.get(name='CZO National Community')
czo_mapper = {}
for czo in czo_setup: # index by prefix
czo_mapper[czo[2]] = czo
# check each group in turn
for czo in czo_setup:
czo_username = czo[0]
czo_groupname = czo[1]
czo_prefix = czo[2] # prefix for all titles for this group.
print("CHECKING user {} against group {}".format(czo_username, czo_groupname))
czo_user = User.objects.get(username=czo_username)
czo_group = Group.objects.get(name=czo_groupname)
user_resources = set(BaseResource.objects.filter(r2urp__user=czo_user))
print(" There are {} user resources".format(len(user_resources)))
# for r in user_resources:
# print(" {} {}".format(r.short_id, r.title.encode('ascii', 'ignore')))
group_resources = set(BaseResource.objects.filter(r2grp__group=czo_group))
print(" There are {} group resources".format(len(group_resources)))
# for r in group_resources:
# print(" {} {}".format(r.short_id, r.title.encode('ascii', 'ignore')))
# check that group is in the community
if not Community.objects.filter(c2gcp__community=czo_community,
c2gcp__group=czo_group).exists():
print(" SHARING group {} with community {}"
.format(czo_group.name, czo_community.name))
# fix it NOW
GroupCommunityPrivilege.share(group=czo_group, community=czo_community,
privilege=PrivilegeCodes.VIEW,
grantor=national_user)
# check whether all resources are owned by czo national
for r in user_resources | group_resources:
if not UserResourcePrivilege.objects.filter(user=national_user,
privilege=PrivilegeCodes.OWNER,
resource=r).exists():
print(" SHARING {} {} with czo national user"
.format(r.short_id, r.title.encode('ascii', 'ignore')))
UserResourcePrivilege.share(user=national_user,
resource=r,
privilege=PrivilegeCodes.OWNER,
grantor=national_user)
# set quota holder to CZO national
set_quota_holder(r, national_user)
# Now everything is owned by CZO national so we can remove other owners safely.
if czo_user != national_user:
# Check that all resources have the appropriate prefix
for r in user_resources | group_resources: # or r in user_resources for non-czo
check_resource_prefix(czo_user, czo_group, r, czo_prefix, czo_mapper, national_user)
# refresh for user and group changes from above
user_resources = set(BaseResource.objects.filter(r2urp__user=czo_user))
group_resources = set(BaseResource.objects.filter(r2grp__group=czo_group))
# Now every resource is filed in the appropriate group,
# and non-matching resources are owned by CZO National.
# group owner should own all group resources and vice versa.
# This will only pick up changes for resources that had the proper prefix.
if len(user_resources - group_resources) != 0:
print(" The following user resources are not group resources")
for r in (user_resources - group_resources):
check_resource_group(czo_group, r, national_user)
# refresh group membership
group_resources = set(BaseResource.objects.filter(r2grp__group=czo_group))
if len(group_resources - user_resources) != 0:
print(" The following group resources are not user resources:")
for r in (group_resources - user_resources):
check_resource_owners(national_user, czo_user, r, national_user)
# refresh ownership
user_resources = set(BaseResource.objects.filter(r2urp__user=czo_user))
else:
# czo national user and group only runs this clause
# no assumption that user resources and group resources are the same.
# * user resources are all resources.
# * group resources are those that come from multiple sources.
# Check that all resources have the appropriate prefix
for r in group_resources: # no user_resources because that's everything
check_resource_prefix(czo_user, czo_group, r, czo_prefix,
czo_mapper, national_user)
# pick up changes from above
group_resources = set(BaseResource.objects.filter(r2grp__group=czo_group))
for r in group_resources:
check_resource_group(czo_group, r, national_user)
# pick up changes from above
group_resources = set(BaseResource.objects.filter(r2grp__group=czo_group))
for r in group_resources:
check_resource_owners(national_user, czo_user, r, national_user)
def is_equal_to_as_set(l1, l2):
""" return true if two lists contain the same content
:param l1: first list
:param l2: second list
:return: whether lists match
"""
# Note specifically that set(l1) == set(l2) does not work as expected.
return len(set(l1).symmetric_difference(set(l2))) == 0
| bsd-3-clause | 3181b33da9b3874e40b821f465a35fc3 | 46.747405 | 104 | 0.558446 | 4.347511 | false | false | false | false |
hydroshare/hydroshare | hs_core/tests/api/rest/test_create_resource_version.py | 1 | 1071 | from rest_framework import status
from hs_core.hydroshare import resource
from .base import HSRESTTestCase
class TestCreateResourceVersion(HSRESTTestCase):
def setUp(self):
super(TestCreateResourceVersion, self).setUp()
self.rtype = 'GenericResource'
self.title = 'My Test resource'
res = resource.create_resource(self.rtype,
self.user,
self.title)
self.pid = res.short_id
def test_create_resource_version(self):
version_url = "/hsapi/resource/%s/version/" % self.pid
response = self.client.post(version_url, {}, format='json')
self.resources_to_delete.append(response.content.decode())
self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED)
def test_create_version_bad_resource(self):
version_url = "/hsapi/resource/%s/version/" % "fafafa"
response = self.client.post(version_url, {}, format='json')
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
| bsd-3-clause | 6b96b38c25cd5c0828b9e4a67182a496 | 35.931034 | 73 | 0.637722 | 3.9375 | false | true | false | false |
hydroshare/hydroshare | hs_core/management/commands/check_bag.py | 1 | 12581 | # -*- coding: utf-8 -*-
"""
Generate metadata and bag for a resource from Django
"""
import os
import requests
from django.conf import settings
from django.core.management.base import BaseCommand
from hs_core.models import BaseResource
from hs_core.hydroshare import hs_requests
from hs_core.hydroshare.hs_bagit import create_bag_metadata_files
from hs_core.tasks import create_bag_by_irods
from django_irods.icommands import SessionException
def check_bag(rid, options):
requests.packages.urllib3.disable_warnings()
try:
resource = BaseResource.objects.get(short_id=rid)
istorage = resource.get_irods_storage()
root_exists = istorage.exists(resource.root_path)
if root_exists:
# print status of metadata/bag system
scimeta_path = os.path.join(resource.root_path, 'data',
'resourcemetadata.xml')
scimeta_exists = istorage.exists(scimeta_path)
if scimeta_exists:
print("resource metadata {} found".format(scimeta_path))
else:
print("resource metadata {} NOT FOUND".format(scimeta_path))
resmap_path = os.path.join(resource.root_path, 'data', 'resourcemap.xml')
resmap_exists = istorage.exists(resmap_path)
if resmap_exists:
print("resource map {} found".format(resmap_path))
else:
print("resource map {} NOT FOUND".format(resmap_path))
bag_exists = istorage.exists(resource.bag_path)
if bag_exists:
print("bag {} found".format(resource.bag_path))
else:
print("bag {} NOT FOUND".format(resource.bag_path))
dirty = resource.getAVU('metadata_dirty')
print("{}.metadata_dirty is {}".format(rid, str(dirty)))
modified = resource.getAVU('bag_modified')
print("{}.bag_modified is {}".format(rid, str(modified)))
if options['reset']: # reset all data to pristine
resource.setAVU('metadata_dirty', 'true')
print("{}.metadata_dirty set to true".format(rid))
try:
istorage.delete(resource.scimeta_path)
print("{} deleted".format(resource.scimeta_path))
except SessionException as ex:
print("{} delete failed: {}"
.format(resource.scimeta_path,
ex.stderr))
try:
istorage.delete(resource.resmap_path)
print("{} deleted".format(resource.resmap_path))
except SessionException as ex:
print("{} delete failed: {}"
.format(resource.resmap_path,
ex.stderr))
resource.setAVU('bag_modified', 'true')
print("{}.bag_modified set to true".format(rid))
try:
istorage.delete(resource.bag_path)
print("{} deleted".format(resource.bag_path))
except SessionException as ex:
print("{} delete failed: {}"
.format(resource.bag_path,
ex.stderr))
if options['reset_metadata']:
resource.setAVU('metadata_dirty', 'true')
print("{}.metadata_dirty set to true".format(rid))
try:
istorage.delete(resource.scimeta_path)
print("{} deleted".format(resource.scimeta_path))
except SessionException as ex:
print("delete of {} failed: {}"
.format(resource.scimeta_path,
ex.stderr))
try:
istorage.delete(resource.resmap_path)
print("{} deleted".format(resource.resmap_path))
except SessionException as ex:
print("{} delete failed: {}"
.format(resource.resmap_path,
ex.stderr))
if options['reset_bag']:
resource.setAVU('bag_modified', 'true')
print("{}.bag_modified set to true".format(rid))
try:
istorage.delete(resource.bag_path)
print("{} deleted".format(resource.bag_path))
except SessionException as ex:
print("{} delete failed: {}"
.format(resource.bag_path,
ex.stderr))
if options['generate']: # generate usable bag
if not options['if_needed'] or dirty or not scimeta_exists or not resmap_exists:
try:
create_bag_metadata_files(resource)
except ValueError as e:
print(("{}: value error encountered: {}".format(rid, str(e))))
return
print("{} metadata generated from Django".format(rid))
resource.setAVU('metadata_dirty', 'false')
resource.setAVU('bag_modified', 'true')
print("{}.metadata_dirty set to false".format(rid))
if not options['if_needed'] or modified or not bag_exists:
create_bag_by_irods(rid)
print("{} bag generated from iRODs".format(rid))
resource.setAVU('bag_modified', 'false')
print("{}.bag_modified set to false".format(rid))
if options['generate_metadata']:
if not options['if_needed'] or dirty or not scimeta_exists or not resmap_exists:
try:
create_bag_metadata_files(resource)
except ValueError as e:
print(("{}: value error encountered: {}".format(rid, str(e))))
return
print("{}: metadata generated from Django".format(rid))
resource.setAVU('metadata_dirty', 'false')
print("{}.metadata_dirty set to false".format(rid))
resource.setAVU('bag_modified', 'true')
print("{}.bag_modified set to false".format(rid))
if options['generate_bag']:
if not options['if_needed'] or modified or not bag_exists:
create_bag_by_irods(rid)
print("{}: bag generated from iRODs".format(rid))
resource.setAVU('bag_modified', 'false')
print("{}.bag_modified set to false".format(rid))
if options['download_bag']:
if options['password']:
server = getattr(settings, 'FQDN_OR_IP', 'www.hydroshare.org')
uri = "https://{}/hsapi/resource/{}/".format(server, rid)
print("download uri is {}".format(uri))
r = hs_requests.get(uri, verify=False, stream=True,
auth=requests.auth.HTTPBasicAuth(options['login'],
options['password']))
print("download return status is {}".format(str(r.status_code)))
print("redirects:")
for thing in r.history:
print("...url: {}".format(thing.url))
filename = 'tmp/check_bag_block'
with open(filename, 'wb') as fd:
for chunk in r.iter_content(chunk_size=128):
fd.write(chunk)
else:
print("cannot download bag without username and password.")
if options['open_bag']:
if options['password']:
server = getattr(settings, 'FQDN_OR_IP', 'www.hydroshare.org')
uri = "https://{}/hsapi/resource/{}/".format(server, rid)
print("download uri is {}".format(uri))
r = hs_requests.get(uri, verify=False, stream=True,
auth=requests.auth.HTTPBasicAuth(options['login'],
options['password']))
print("download return status is {}".format(str(r.status_code)))
print("redirects:")
for thing in r.history:
print("...url: {}".format(thing.url))
filename = 'tmp/check_bag_block'
with open(filename, 'wb') as fd:
for chunk in r.iter_content(chunk_size=128):
fd.write(chunk)
break
else:
print("cannot open bag without username and password.")
else:
print("Resource with id {} does not exist in iRODS".format(rid))
except BaseResource.DoesNotExist:
print("Resource with id {} NOT FOUND in Django".format(rid))
class Command(BaseCommand):
help = "Create metadata files and bag for a resource."
def add_arguments(self, parser):
# a list of resource id's, or none to check all resources
parser.add_argument('resource_ids', nargs='*', type=str)
# Named (optional) arguments
parser.add_argument(
'--reset',
action='store_true', # True for presence, False for absence
dest='reset', # value is options['reset']
help='delete metadata and bag and start over'
)
parser.add_argument(
'--reset_metadata',
action='store_true', # True for presence, False for absence
dest='reset_metadata', # value is options['reset_metadata']
help='delete metadata files and start over'
)
parser.add_argument(
'--reset_bag',
action='store_true', # True for presence, False for absence
dest='reset_bag', # value is options['reset_bag']
help='delete bag and start over'
)
parser.add_argument(
'--generate',
action='store_true', # True for presence, False for absence
dest='generate', # value is options['generate']
help='force generation of metadata and bag'
)
parser.add_argument(
'--generate_metadata',
action='store_true', # True for presence, False for absence
dest='generate_metadata', # value is options['generate_metadata']
help='force generation of metadata and bag'
)
parser.add_argument(
'--generate_bag',
action='store_true', # True for presence, False for absence
dest='generate_bag', # value is options['generate_bag']
help='force generation of metadata and bag'
)
parser.add_argument(
'--if_needed',
action='store_true', # True for presence, False for absence
dest='if_needed', # value is options['if_needed']
help='generate only if not present'
)
parser.add_argument(
'--download_bag',
action='store_true', # True for presence, False for absence
dest='download_bag', # value is options['download_bag']
help='try downloading the bag'
)
parser.add_argument(
'--open_bag',
action='store_true', # True for presence, False for absence
dest='open_bag', # value is options['open_bag']
help='try opening the bag in http without downloading'
)
parser.add_argument(
'--login',
default='admin',
dest='login', # value is options['login']
help='HydroShare login name'
)
parser.add_argument(
'--password',
default=None,
dest='password', # value is options['password']
help='HydroShare password'
)
def handle(self, *args, **options):
if len(options['resource_ids']) > 0: # an array of resource short_id to check.
for rid in options['resource_ids']:
check_bag(rid, options)
else:
for r in BaseResource.objects.all():
check_bag(r.short_id, options)
| bsd-3-clause | 9333949270f038c91856deaf7b818632 | 42.382759 | 96 | 0.507193 | 4.678691 | false | false | false | false |
hydroshare/hydroshare | theme/management/commands/email_users.py | 1 | 1227 | from django.core.management.base import BaseCommand
from django.core.mail import send_mail
from django.conf import settings
from django.contrib.auth.models import User
from theme.utils import get_quota_message
class Command(BaseCommand):
help = "Send users emails for reporting over-quota usages and warnings, etc."
def add_arguments(self, parser):
parser.add_argument('--username', nargs='*')
def handle(self, *args, **options):
unames = options['username']
for uname in unames:
if User.objects.filter(username=uname).exists():
user = User.objects.filter(username=uname).first()
uemail = user.email
uqs = user.quotas.all()
msg_str = ''
if uqs:
msg_str = 'Dear ' + uname + ':\n\n'
msg_str += get_quota_message(user)
if msg_str:
msg_str += '\n\nHydroShare Support'
subject = 'HydroShare Quota warning'
# send email for people monitoring and follow-up as needed
send_mail(subject, msg_str, settings.DEFAULT_FROM_EMAIL,
[uemail])
| bsd-3-clause | d8e763c99aa5fa9607346773b3e9b70f | 37.34375 | 81 | 0.568052 | 4.320423 | false | false | false | false |
hydroshare/hydroshare | hs_core/tests/api/rest/test_set_file_type.py | 1 | 7109 | import os
from django.core.files.uploadedfile import UploadedFile
from rest_framework import status
from hs_core.hydroshare import resource
from hs_core.hydroshare.utils import resource_file_add_process
from hs_core.views.utils import create_folder, move_or_rename_file_or_folder
from .base import HSRESTTestCase
class TestSetFileTypeEndPoint(HSRESTTestCase):
def setUp(self):
super(TestSetFileTypeEndPoint, self).setUp()
self.raster_file_name = 'cea.tif'
self.raster_file_path = 'hs_core/tests/data/cea.tif'
self.rtype = 'CompositeResource'
self.title = 'My Test resource'
self.resource = resource.create_resource(self.rtype,
self.user,
self.title)
self.resources_to_delete.append(self.resource.short_id)
def test_set_file_type_success_1(self):
# here we will set the tif file to GeoRaster file type
# resource should have no file at this point
self.assertEqual(self.resource.files.count(), 0)
# add the tif file to the composite resource
tif_file_obj = open(self.raster_file_path, "rb")
uploaded_file = UploadedFile(file=tif_file_obj,
name=os.path.basename(tif_file_obj.name))
resource_file_add_process(resource=self.resource, files=(uploaded_file,), user=self.user,
auto_aggregate=False)
# resource should have one file at this point
self.assertEqual(self.resource.files.count(), 1)
res_file = self.resource.files.all().first()
self.assertEqual(res_file.file_name, self.raster_file_name)
# test the set file type endpoint
url_template = "/hsapi/resource/{res_id}/functions/set-file-type/{file_path}/{file_type}/"
set_file_type_url = url_template.format(res_id=self.resource.short_id,
file_path=self.raster_file_name,
file_type="GeoRaster")
response = self.client.post(set_file_type_url, {}, format='json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
def test_set_file_type_success_2(self):
# here we will set the tif file (the file being not in root dir)to GeoRaster file type
# resource should have no file at this point
self.assertEqual(self.resource.files.count(), 0)
# add the tif file to the composite resource
tif_file_obj = open(self.raster_file_path, "rb")
uploaded_file = UploadedFile(file=tif_file_obj,
name=os.path.basename(tif_file_obj.name))
resource_file_add_process(resource=self.resource, files=(uploaded_file,), user=self.user,
auto_aggregate=False)
# resource should have one file at this point
self.assertEqual(self.resource.files.count(), 1)
res_file = self.resource.files.all().first()
self.assertEqual(res_file.file_name, self.raster_file_name)
create_folder(self.resource.short_id, 'data/contents/sub_test_dir')
# move the first two files in file_name_list to the new folder
move_or_rename_file_or_folder(self.user, self.resource.short_id,
'data/contents/' + self.raster_file_name,
'data/contents/sub_test_dir/' + self.raster_file_name)
res_file = self.resource.files.all().first()
self.assertEqual(res_file.short_path, "sub_test_dir/" + self.raster_file_name)
# test the set file type endpoint
url_template = "/hsapi/resource/{res_id}/functions/set-file-type/{file_path}/{file_type}/"
set_file_type_url = url_template.format(res_id=self.resource.short_id,
file_path=res_file.short_path,
file_type="GeoRaster")
response = self.client.post(set_file_type_url, {}, format='json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
def test_set_file_type_failure_1(self):
# here we will set the tif file to NetCDF file type which should fail
# resource should have no file at this point
self.assertEqual(self.resource.files.count(), 0)
# add the tif file to the composite resource
tif_file_obj = open(self.raster_file_path, "rb")
uploaded_file = UploadedFile(file=tif_file_obj,
name=os.path.basename(tif_file_obj.name))
resource_file_add_process(resource=self.resource, files=(uploaded_file,), user=self.user,
auto_aggregate=False)
# resource should have one file at this point
self.assertEqual(self.resource.files.count(), 1)
res_file = self.resource.files.all().first()
self.assertEqual(res_file.file_name, self.raster_file_name)
# test the set file type endpoint using a wrong file type (NetCDF)
url_template = "/hsapi/resource/{res_id}/functions/set-file-type/{file_path}/{file_type}/"
set_file_type_url = url_template.format(res_id=self.resource.short_id,
file_path=self.raster_file_name,
file_type="NetCDF")
response = self.client.post(set_file_type_url, {}, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_set_file_type_failure_2(self):
# here we will set the tif file to GeoRaster file type with an invalid file path
# which should fail
# resource should have no file at this point
self.assertEqual(self.resource.files.count(), 0)
# add the tif file to the composite resource
tif_file_obj = open(self.raster_file_path, "rb")
uploaded_file = UploadedFile(file=tif_file_obj,
name=os.path.basename(tif_file_obj.name))
resource_file_add_process(resource=self.resource, files=(uploaded_file,), user=self.user,
auto_aggregate=False)
# resource should have one file at this point
self.assertEqual(self.resource.files.count(), 1)
res_file = self.resource.files.all().first()
self.assertEqual(res_file.file_name, self.raster_file_name)
# test the set file type endpoint using a wrong file path
url_template = "/hsapi/resource/{res_id}/functions/set-file-type/{file_path}/{file_type}/"
file_path = os.path.join("no-such-folder", self.raster_file_name)
set_file_type_url = url_template.format(res_id=self.resource.short_id,
file_path=file_path,
file_type="GeoRaster")
response = self.client.post(set_file_type_url, {}, format='json')
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
| bsd-3-clause | ec564c292923c6ef438d7824121734e8 | 51.272059 | 98 | 0.604867 | 3.916804 | false | true | false | false |
hydroshare/hydroshare | theme/migrations/0017_phone_validation_message.py | 1 | 1384 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.29 on 2021-05-10 21:40
from __future__ import unicode_literals
import django.core.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('theme', '0016_userprofile_phone_validations'),
]
operations = [
migrations.AlterField(
model_name='userprofile',
name='phone_1',
field=models.CharField(blank=True, max_length=16, null=True, validators=[django.core.validators.RegexValidator(message="Our validation for phone numbers has recently changed. Please ensure that your phone number is entered in the following format: '999999999'. Up to 16 digits are allowed. If you made other changes to your profile - please make sure they saved successfully.", regex='^\\d{8,15}$')]),
),
migrations.AlterField(
model_name='userprofile',
name='phone_2',
field=models.CharField(blank=True, max_length=16, null=True, validators=[django.core.validators.RegexValidator(message="Our validation for phone numbers has recently changed. Please ensure that your phone number is entered in the following format: '999999999'. Up to 16 digits are allowed. If you made other changes to your profile - please make sure they saved successfully.", regex='^\\d{8,15}$')]),
),
]
| bsd-3-clause | b359a4b6a1aedb5f060556396cc1e128 | 52.230769 | 414 | 0.690751 | 4.245399 | false | false | false | false |
hydroshare/hydroshare | hs_access_control/models/group.py | 1 | 17912 | from django.contrib.auth.models import User, Group
from django.db import models
from django.db.models import Q, F, Exists, OuterRef
from django.contrib.contenttypes.models import ContentType
from hs_core.models import BaseResource
from hs_access_control.models.privilege import PrivilegeCodes, UserGroupPrivilege
from hs_access_control.models.community import Community
from sorl.thumbnail import ImageField as ThumbnailImageField
from theme.utils import get_upload_path_group
#############################################
# Group access data.
#
# GroupAccess has a one-to-one correspondence with the Group object
# and contains access control flags and methods specific to groups.
#
# To avoid UI difficulties, there has been an explicit decision not to modify
# the display routines for groups to display communities of groups.
# Rather, communities are exposed through a separate module community.py
# Only access-list functions have been modified for communities.
# * GroupAccess.view_resources and GroupAccess.edit_resources
# do not reflect community privileges.
# * GroupAccess.get_resources_with_explicit_access does *not* reflect
# community privileges.
# (Revised Sept 17, 2021)
#############################################
class GroupMembershipRequest(models.Model):
request_from = models.ForeignKey(User, on_delete=models.CASCADE, related_name='ru2gmrequest')
# when user is requesting to join a group this will be blank
# when a group owner is sending an invitation, this field will represent the inviting user
invitation_to = models.ForeignKey(User, on_delete=models.CASCADE, null=True, blank=True,
related_name='iu2gmrequest')
group_to_join = models.ForeignKey(Group, on_delete=models.CASCADE, related_name='g2gmrequest')
date_requested = models.DateTimeField(editable=False, auto_now_add=True)
explanation = models.TextField(null=True, blank=True, max_length=300)
redeemed = models.BooleanField(default=False)
class GroupAccess(models.Model):
"""
GroupAccess is in essence a group profile object
Members are actually recorded in a separate model.
Membership is equivalent with holding some privilege over the group.
There is a well-defined notion of PrivilegeCodes.NONE for group,
which to be a member with no privileges over the group, including
even being able to view the member list. However, this is currently disallowed
"""
# Django Group object: this has a side effect of creating Group.gaccess back relation.
group = models.OneToOneField(Group, on_delete=models.CASCADE,
editable=False,
null=False,
related_name='gaccess',
related_query_name='gaccess',
help_text='group object that this object protects')
active = models.BooleanField(default=True,
editable=False,
help_text='whether group is currently active')
discoverable = models.BooleanField(default=True,
editable=False,
help_text='whether group description is discoverable by everyone')
public = models.BooleanField(default=True,
editable=False,
help_text='whether group members can be listed by everyone')
shareable = models.BooleanField(default=True,
editable=False,
help_text='whether group can be shared by non-owners')
auto_approve = models.BooleanField(default=False,
editable=False,
help_text='whether group membership can be auto approved')
requires_explanation = models.BooleanField(default=False, editable=False,
help_text='whether membership requests include explanation')
description = models.TextField(null=False, blank=False)
purpose = models.TextField(null=True, blank=True)
date_created = models.DateTimeField(editable=False, auto_now_add=True)
picture = ThumbnailImageField(upload_to=get_upload_path_group, null=True, blank=True)
####################################
# group membership: owners, edit_users, view_users are parallel to those in resources
####################################
@property
def owners(self):
"""
Return list of owners for a group.
:return: list of users
Users can only own groups via direct links. Community-based ownership is not possible.
"""
return User.objects.filter(is_active=True,
u2ugp__group=self.group,
u2ugp__privilege=PrivilegeCodes.OWNER).select_related('userprofile')
@property
def __edit_users_of_group(self):
"""
Q expression for users who can edit a group according to group privilege
"""
return Q(is_active=True,
u2ugp__group=self.group,
u2ugp__privilege__lte=PrivilegeCodes.CHANGE)
@property
def edit_users(self):
"""
Return list of users who can add members to a group.
:return: list of users
This eliminates duplicates due to multiple invitations.
"""
return User.objects.filter(self.__edit_users_of_group)
@property
def __view_users_of_group(self):
"""
Q expression for users who can view a group according to group privilege
"""
return Q(is_active=True,
u2ugp__group=self.group,
u2ugp__privilege__lte=PrivilegeCodes.VIEW)
@property
def view_users(self):
"""
Return list of users who can add members to a group
:return: list of users
This eliminates duplicates due to multiple memberships,
unlike members, which just lists explicit group members.
"""
return User.objects.filter(self.__view_users_of_group)
@property
def members(self):
"""
Return list of members for a group. This does not include communities.
:return: list of users
This eliminates duplicates due to multiple invitations.
"""
return User.objects.filter(is_active=True,
u2ugp__group=self.group,
u2ugp__privilege__lte=PrivilegeCodes.VIEW).select_related('userprofile')
@property
def viewers(self):
""" viewers are group members """
return User.objects.filter(
Q(is_active=True) &
(Q(u2ugp__group__gaccess__active=True,
u2ugp__group=self.group))).distinct()
def communities(self):
"""
Return list of communities of which this group is a member.
:return: list of communities
"""
return Community.objects.filter(c2gcp__group=self.group)
@property
def __view_resources_of_group(self):
"""
resources viewable according to group privileges
Used in queries of BaseResource
"""
return Q(r2grp__group=self.group)
@property
def __edit_resources_of_group(self):
"""
resources editable according to group privileges
Used in queries of BaseResource
"""
return Q(r2grp__group=self.group,
raccess__immutable=False,
r2grp__privilege__lte=PrivilegeCodes.CHANGE)
@property
def __owned_resources_of_group(self):
"""
resources owned by some group member
Used in queries of BaseResource
"""
return Q(r2grp__group=self.group,
r2urp__user__u2ugp__group=self.group,
r2urp__privilege=PrivilegeCodes.OWNER)
@property
def view_resources(self):
"""
QuerySet of resources held by group.
:return: QuerySet of resource objects held by group.
"""
return BaseResource.objects.filter(self.__view_resources_of_group).select_related('raccess')
@property
def edit_resources(self):
"""
QuerySet of resources that can be edited by group.
:return: List of resource objects that can be edited by this group.
These include resources that are directly editable, as well as those editable
via membership in a group.
"""
return BaseResource.objects.filter(self.__edit_resources_of_group)
@property
def owned_resources(self):
"""
QuerySet of resources that are owned by some group member
:return: List of resource objects owned by some group member.
This is independent of whether the resource is editable by the group.
"""
return BaseResource.objects.filter(self.__owned_resources_of_group)
@property
def group_membership_requests(self):
"""
get a list of pending group membership requests for this group (self)
:return: QuerySet
"""
return GroupMembershipRequest.objects.filter(group_to_join=self.group,
group_to_join__gaccess__active=True,
redeemed=False)
def get_resources_with_explicit_access(self, this_privilege):
"""
Get a list of resources for which the group has the specified privilege
:param this_privilege: one of the PrivilegeCodes
:return: QuerySet of resource objects (QuerySet)
This routine is an attempt to organize resources for displayability. It looks at the
effective privilege rather than declared privilege, and squashes privilege that is in
conflict with resource flags. If the resource is immutable, it is reported as a "VIEW"
resource when the permission is "CHANGE", and as the original resource otherwise.
"""
if __debug__:
assert this_privilege >= PrivilegeCodes.OWNER and this_privilege <= PrivilegeCodes.VIEW
# this query computes resources with privilege X as follows:
# a) There is a privilege of X for the object for group.
# b) There is no lower privilege in either group privileges for the object.
# c) Thus X is the effective privilege of the object.
if this_privilege == PrivilegeCodes.OWNER:
return BaseResource.objects.none() # groups cannot own resources
elif this_privilege == PrivilegeCodes.CHANGE:
# CHANGE does not include immutable resources
return BaseResource.objects.filter(raccess__immutable=False,
r2grp__privilege=this_privilege,
r2grp__group=self.group)
# there are no excluded resources; maximum privilege is CHANGE
else: # this_privilege == PrivilegeCodes.VIEW
# VIEW includes CHANGE & immutable as well as explicit VIEW
return BaseResource.objects.filter(Q(r2grp__privilege=PrivilegeCodes.VIEW,
r2grp__group=self.group) |
Q(raccess__immutable=True,
r2grp__privilege=PrivilegeCodes.CHANGE,
r2grp__group=self.group)).distinct()
def get_users_with_explicit_access(self, this_privilege):
"""
Get a list of users for which the group has the specified privilege
:param this_privilege: one of the PrivilegeCodes
:return: QuerySet of user objects (QuerySet)
This does not account for community privileges. Just group privileges.
"""
if __debug__:
assert this_privilege >= PrivilegeCodes.OWNER and this_privilege <= PrivilegeCodes.VIEW
if this_privilege == PrivilegeCodes.OWNER:
return self.owners
elif this_privilege == PrivilegeCodes.CHANGE:
return User.objects.filter(is_active=True,
u2ugp__group=self.group,
u2ugp__privilege=PrivilegeCodes.CHANGE)
else: # this_privilege == PrivilegeCodes.VIEW
return User.objects.filter(is_active=True,
u2ugp__group=self.group,
u2ugp__privilege=PrivilegeCodes.VIEW)
def get_effective_privilege(self, this_user):
"""
Return cumulative privilege for a user over a group
:param this_user: User to check
:return: Privilege code 1-4
This does not account for community privileges. Just group privileges.
"""
if not this_user.is_active:
return PrivilegeCodes.NONE
try:
p = UserGroupPrivilege.objects.get(group=self.group,
user=this_user)
return p.privilege
except UserGroupPrivilege.DoesNotExist:
return PrivilegeCodes.NONE
@classmethod
def groups_with_public_resources(cls):
""" Return the list of groups that have discoverable or public resources
These must contain at least one resource that is discoverable and
is owned by a group member.
This query is subtle. See
https://medium.com/@hansonkd/\
the-dramatic-benefits-of-django-subqueries-and-annotations-4195e0dafb16
for details of how this improves performance.
As a short summary, all we need to know is that one resource exists.
This is not possible to notate in the main query except through an annotation.
However, that annotation is really efficient, and is implemented as a postgres
subquery. This is a Django 1.11 extension.
"""
return Group.objects\
.annotate(
has_public_resources=Exists(
BaseResource.objects.filter(
raccess__discoverable=True,
r2grp__group__id=OuterRef('id'),
r2urp__user__u2ugp__group__id=OuterRef('id'),
r2urp__privilege=PrivilegeCodes.OWNER)))\
.filter(has_public_resources=True)\
.order_by('name')
@property
def public_resources(self):
"""
prepare a list of everything that gets displayed about each resource in a group.
Based upon hs_access_control/models/community.py:Community:public_resources
"""
res = BaseResource.objects.filter(r2grp__group__gaccess=self,
r2grp__group__gaccess__active=True)\
.filter(Q(raccess__public=True) |
Q(raccess__published=True) |
Q(raccess__discoverable=True))\
.filter(r2urp__privilege=PrivilegeCodes.OWNER,
r2urp__user__u2ugp__group=self.group)\
.annotate(group_name=F("r2grp__group__name"),
group_id=F("r2grp__group__id"),
public=F("raccess__public"),
published=F("raccess__published"),
discoverable=F("raccess__discoverable"))
res = res.only('title', 'resource_type', 'created', 'updated')
# # Can't do the following because the content model is polymorphic.
# # This is documented as only working for monomorphic content_type
# res = res.prefetch_related("content_object___title",
# "content_object___description",
# "content_object__creators")
# We want something that is not O(# resources + # content types).
# O(# content types) is sufficiently faster.
# The following strategy is documented here:
# https://blog.roseman.org.uk/2010/02/22/django-patterns-part-4-forwards-generic-relations/
# collect generics from resources
generics = {}
for item in res:
generics.setdefault(item.content_type.id, set()).add(item.object_id)
# fetch all content types in one query
content_types = ContentType.objects.in_bulk(list(generics.keys()))
# build a map between content types and the objects that use them.
relations = {}
for ct, fk_list in list(generics.items()):
ct_model = content_types[ct].model_class()
relations[ct] = ct_model.objects.in_bulk(list(fk_list))
# force-populate the cache of content type objects.
for item in res:
setattr(item, '_content_object_cache',
relations[item.content_type.id][item.object_id])
# Detailed notes:
# This subverts chained lookup by pre-populating the content object cache
# that is populated by an object reference. It is very dependent upon the
# implementation of GenericRelation and its pre-fetching strategy.
# Thus it is quite brittle and vulnerable to major revisions of Generics.
return res
@property
def first_owner(self):
opriv = UserGroupPrivilege.objects.filter(group=self.group, privilege=PrivilegeCodes.OWNER)\
.order_by('start')
opriv = list(opriv)
if opriv:
return opriv[0].user
else:
return None
| bsd-3-clause | a7c67395955711f1c0995d002207c0c8 | 40.752914 | 107 | 0.593904 | 4.662155 | false | false | false | false |
hydroshare/hydroshare | hs_tools_resource/utils.py | 1 | 1912 | import logging
from string import Template
from hs_collection_resource.models import CollectionResource
from hs_composite_resource.models import CompositeResource
logger = logging.getLogger(__name__)
def parse_app_url_template(url_template_string, term_dict_list=()):
"""
This func replaces pre-defined HS Terms in url_template_string with real values;
Example: http://www.myapps.com/app1/?res_type=${HS_RES_TYPE}
--> http://www.myapps.com/app1/?res_type=GenericResource
:param url_template_string: The url template string contains HS Terms
:param term_dict_list: a list of dict that stores pairs of Term Name and Term Value
:return: the updated url string, or None if template contains undefined terms
"""
if not url_template_string:
return None
new_url_string = url_template_string
merged_term_dic = {}
try:
for term_dict in term_dict_list:
merged_term_dic.update(term_dict)
new_url_string = Template(new_url_string).substitute(merged_term_dic)
except Exception:
log_msg = "[WebApp] '{0}' cannot be parsed by term_dict {1}, skipping."
log_msg = log_msg.format(new_url_string, str(merged_term_dic))
logger.debug(log_msg)
new_url_string = None
finally:
return new_url_string
def get_SupportedResTypes_choices():
"""
This function generates a list of resource types currently supported for web app
"""
supported_resource_types = [[CompositeResource.__name__, CompositeResource._meta.verbose_name],
[CollectionResource.__name__, CollectionResource._meta.verbose_name]]
return supported_resource_types
def get_SupportedSharingStatus_choices():
return [['Published', 'Published'],
['Public', 'Public'],
['Discoverable', 'Discoverable'],
['Private', 'Private'],
]
| bsd-3-clause | 20ae771cda2e64ce4443cc7943d17c0c | 33.763636 | 101 | 0.664226 | 3.975052 | false | false | false | false |
hydroshare/hydroshare | hs_core/management/commands/modify_resource_id.py | 1 | 4041 | # -*- coding: utf-8 -*-
"""
Modify the resource id of an existing resource
"""
from django.core.management.base import BaseCommand, CommandError
from django.core.exceptions import ObjectDoesNotExist
from hs_core.models import BaseResource, short_id
from hs_core.hydroshare.utils import get_resource_by_shortkey
from hs_core.hydroshare.hs_bagit import create_bag
from uuid import UUID
from django.db import transaction, IntegrityError
from django_irods.icommands import SessionException
class Command(BaseCommand):
help = "Modify the resource id of an existing resource"
def add_arguments(self, parser):
# a list of resource id's, or none to check all resources
parser.add_argument('resource_id', type=str, help=('Required. The existing id (short_id) of'
' the resource'))
parser.add_argument('new_resource_id', type=str,
help=('Optional. The new id (short_id) for the resource. A random one '
'is generated if none is provided. Must be a valid string '
'representation of a uuid hex'))
def handle(self, *args, **options):
if not options['resource_id']:
raise CommandError('resource_id argument is required')
res_id = options['resource_id']
try:
res = get_resource_by_shortkey(res_id, or_404=False)
except ObjectDoesNotExist:
raise CommandError("No Resource found for id {}".format(res_id))
if options['new_resource_id']:
try:
UUID(options['new_resource_id'])
new_res_id = options['new_resource_id']
except Exception as e:
raise CommandError('new_resource_id {} must be a valid uuid hex string'
.format(options['new_resource_id']), e)
try:
if BaseResource.objects.get(short_id=new_res_id):
raise CommandError('resource with id {} already exists'.format(new_res_id))
except ObjectDoesNotExist:
pass
else:
new_res_id = short_id()
storage = res.get_irods_storage()
if storage.exists(res.bag_path):
try:
storage.delete(res.bag_path)
print("{} deleted".format(res.bag_path))
except SessionException as ex:
print("{} delete failed: {}".format(res.bag_path, ex.stderr))
raise EnvironmentError()
try:
with transaction.atomic():
print("Deleting existing bag")
res.setAVU("bag_modified", True)
res.setAVU('metadata_dirty', 'true')
print("Updating BaseResource short_id from {} to {}".format(res_id, new_res_id))
res.short_id = new_res_id
res.save()
print("Updating resource slug")
res.set_slug('resource/{}'.format(new_res_id))
print("Updating Resource files short_path")
for file in res.files.all():
file_name = file.short_path.split('data/contents/')[1]
file.set_short_path(file_name)
print("Updating metadata identifiers")
for i in res.metadata.identifiers.all():
i.url = i.url.replace(res_id, new_res_id)
i.save()
print("Updating logical_files metadata")
for aggregation in res.logical_files:
aggregation.metadata.is_dirty = True
aggregation.metadata.save()
except IntegrityError:
raise EnvironmentError("Error occurred while updating")
print("Moving Resource files")
storage.moveFile(res_id, new_res_id)
print("Creating Bag")
create_bag(res)
print(("Resource id successfully update from {} to {}".format(res_id, new_res_id)))
| bsd-3-clause | d192f8f22f14d261d5387137fc610ddb | 38.23301 | 100 | 0.564712 | 4.540449 | false | false | false | false |
hydroshare/hydroshare | hs_core/management/commands/delete_hanging_logical_files.py | 1 | 2806 | """Removes hanging LogicalFiles in composite resources. Hanging LogicalFiles do not have a
Resource nor reference any files.
"""
from django.core.management.base import BaseCommand
from hs_file_types.models.generic import GenericLogicalFile
from hs_file_types.models.geofeature import GeoFeatureLogicalFile
from hs_file_types.models.netcdf import NetCDFLogicalFile
from hs_file_types.models.raster import GeoRasterLogicalFile
from hs_file_types.models.reftimeseries import RefTimeseriesLogicalFile
from hs_file_types.models.timeseries import TimeSeriesLogicalFile
from hs_file_types.models.model_instance import ModelInstanceLogicalFile
from hs_file_types.models.model_program import ModelProgramLogicalFile
from hs_file_types.models.fileset import FileSetLogicalFile
def delete_hanging_logical_files(logical_files):
count = 0
for lf in logical_files:
if not hasattr(lf, 'resource'):
lf.delete()
count = count + 1
elif not lf.files.all():
if lf.is_fileset:
# we allow fileset to not have any files
continue
elif lf.is_model_instance and lf.folder:
# we allow model instance based on folder to not have any files
continue
lf.delete()
count = count + 1
return count
class Command(BaseCommand):
help = "Removes Logical Files without a resource and a file"
def handle(self, *args, **options):
count = delete_hanging_logical_files(GenericLogicalFile.objects.all())
print(">> {} GenericLogicalFiles deleted".format(count))
count = delete_hanging_logical_files(GeoFeatureLogicalFile.objects.all())
print(">> {} GeoFeatureLogicalFile deleted".format(count))
count = delete_hanging_logical_files(NetCDFLogicalFile.objects.all())
print(">> {} NetCDFLogicalFile deleted".format(count))
count = delete_hanging_logical_files(GeoRasterLogicalFile.objects.all())
print(">> {} GeoRasterLogicalFile deleted".format(count))
count = delete_hanging_logical_files(RefTimeseriesLogicalFile.objects.all())
print(">> {} RefTimeseriesLogicalFile deleted".format(count))
count = delete_hanging_logical_files(TimeSeriesLogicalFile.objects.all())
print(">> {} TimeSeriesLogicalFile deleted".format(count))
count = delete_hanging_logical_files(ModelInstanceLogicalFile.objects.all())
print(">> {} ModelInstanceLogicalFile deleted".format(count))
count = delete_hanging_logical_files(ModelProgramLogicalFile.objects.all())
print(">> {} ModelProgramLogicalFile deleted".format(count))
count = delete_hanging_logical_files(FileSetLogicalFile.objects.all())
print(">> {} FileSetLogicalFile deleted".format(count))
| bsd-3-clause | dca77381cfc697f52ffd119d7099f7c9 | 48.22807 | 92 | 0.709195 | 4.163205 | false | false | false | false |
hydroshare/hydroshare | hs_access_control/management/commands/access_provenance.py | 1 | 1539 | """
This prints the provenance of an access control relationship between a user and a resource.
This is invaluable for access control debugging.
"""
from django.core.management.base import BaseCommand
from hs_core.models import BaseResource
from hs_access_control.models.utilities import access_provenance
from hs_core.hydroshare.utils import get_resource_by_shortkey
from hs_access_control.management.utilities import user_from_name
def usage():
print("Provenance usage:")
print(" provenance {username} {resource-id}")
print("Where:")
print(" {username} is a user name.")
print(" {resource-id} is a 32-character resource guid.")
class Command(BaseCommand):
help = """Print access control provenance."""
def add_arguments(self, parser):
# a command to execute
parser.add_argument('username', type=str)
parser.add_argument('resource_id', type=str)
def handle(self, *args, **options):
if options['username'] is None or options['resource_id'] is None:
usage()
exit(1)
username = options['username']
resource_id = options['resource_id']
user = user_from_name(username)
if user is None:
usage()
exit(1)
try:
resource = get_resource_by_shortkey(resource_id, or_404=False)
except BaseResource.DoesNotExist:
print("No such resource {}.".format(resource_id))
usage()
exit(1)
print(access_provenance(user, resource))
| bsd-3-clause | 66054f592b2dcb9f6fce78a1d16e9aaf | 29.176471 | 91 | 0.647823 | 4.007813 | false | false | false | false |
hydroshare/hydroshare | theme/templatetags/ratings_tags.py | 1 | 1295 |
from mezzanine import template
from mezzanine.generic.models import Rating
from theme.forms import RatingForm
register = template.Library()
@register.inclusion_tag("generic/includes/rating.html", takes_context=True)
def rating_for(context, obj):
"""
Provides a generic context variable name for the object that
ratings are being rendered for, and the rating form.
"""
context["rating_object"] = context["rating_obj"] = obj
context["rating_form"] = RatingForm(context["request"], obj, auto_id=False)
ratings = context["request"].COOKIES.get("mezzanine-rating", "")
rating_string = "%s.%s" % (obj._meta, obj.pk)
context["rated"] = (rating_string in ratings)
rating_name = obj.get_ratingfield_name()
rating_manager = getattr(obj, rating_name)
user = context["request"].user
if not user.is_authenticated:
context["you_rated"] = False
else:
try:
rating_instance = rating_manager.get(user=user)
except Rating.DoesNotExist:
context["you_rated"] = False
else: # rating for the requesting user exists
context["you_rated"] = True
for f in ("average", "count", "sum"):
context["rating_" + f] = getattr(obj, "%s_%s" % (rating_name, f))
return context.flatten() | bsd-3-clause | 725700f69da3d7ce28d84dc12fd31e04 | 34.027027 | 79 | 0.651737 | 3.710602 | false | false | false | false |
hydroshare/hydroshare | hs_access_control/migrations/0006_auto_add_new_fields.py | 1 | 3421 | # -*- coding: utf-8 -*-
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
('auth', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('hs_core', '0014_auto_20151123_1451'),
('hs_access_control', '0005_remove_useraccess_active'),
]
operations = [
migrations.AddField(
model_name='groupresourceprivilege',
name='grantornew',
field=models.ForeignKey(related_name='x2grpnew', editable=False, on_delete=models.CASCADE, to=settings.AUTH_USER_MODEL, help_text='grantor of privilege', null=True),
preserve_default=True,
),
migrations.AddField(
model_name='groupresourceprivilege',
name='groupnew',
field=models.ForeignKey(related_name='g2grpnew', editable=False, on_delete=models.CASCADE, to='auth.Group', help_text='group to be granted privilege', null=True),
preserve_default=True,
),
migrations.AddField(
model_name='groupresourceprivilege',
name='resourcenew',
field=models.ForeignKey(related_name='r2grpnew', editable=False, on_delete=models.CASCADE, to='hs_core.BaseResource', help_text='resource to which privilege applies', null=True),
preserve_default=True,
),
migrations.AddField(
model_name='usergroupprivilege',
name='grantornew',
field=models.ForeignKey(related_name='x2ugpnew', editable=False, on_delete=models.CASCADE, to=settings.AUTH_USER_MODEL, help_text='grantor of privilege', null=True),
preserve_default=True,
),
migrations.AddField(
model_name='usergroupprivilege',
name='groupnew',
field=models.ForeignKey(related_name='g2ugpnew', editable=False, on_delete=models.CASCADE, to='auth.Group', help_text='group to which privilege applies', null=True),
preserve_default=True,
),
migrations.AddField(
model_name='usergroupprivilege',
name='usernew',
field=models.ForeignKey(related_name='u2ugpnew', editable=False, on_delete=models.CASCADE, to=settings.AUTH_USER_MODEL, help_text='user to be granted privilege', null=True),
preserve_default=True,
),
migrations.AddField(
model_name='userresourceprivilege',
name='grantornew',
field=models.ForeignKey(related_name='x2urpnew', editable=False, on_delete=models.CASCADE, to=settings.AUTH_USER_MODEL, help_text='grantor of privilege', null=True),
preserve_default=True,
),
migrations.AddField(
model_name='userresourceprivilege',
name='resourcenew',
field=models.ForeignKey(related_name='r2urpnew', editable=False, on_delete=models.CASCADE, to='hs_core.BaseResource', help_text='resource to which privilege applies', null=True),
preserve_default=True,
),
migrations.AddField(
model_name='userresourceprivilege',
name='usernew',
field=models.ForeignKey(related_name='u2urpnew', editable=False, on_delete=models.CASCADE, to=settings.AUTH_USER_MODEL, help_text='user to be granted privilege', null=True),
preserve_default=True,
),
]
| bsd-3-clause | 824a73a0db84ff0ad952ce872e2e6a52 | 46.513889 | 190 | 0.636656 | 4.005855 | false | false | false | false |
hydroshare/hydroshare | hs_dictionary/views.py | 1 | 1048 | from django.db.models import Q
from rest_framework.views import APIView
from rest_framework.response import Response
from .models import University
from drf_yasg.utils import swagger_auto_schema
from functools import reduce
class ListUniversities(APIView):
"""
View to list all known universities in the system
"""
@swagger_auto_schema(auto_schema=None)
def get(self, request, format=None, query=None):
"""
Return a list of all vocabulary items
:return:
"""
terms = request.GET.get('term', '')
term_list = terms.split(' ')
if len(term_list):
filtered_unis = University.objects.filter(
reduce(lambda x, y: x & y, [Q(name__icontains=word) for word in term_list])
)
universities = [uni.name for uni in filtered_unis]
else:
universities = []
if len(universities) > 50:
universities = ['Too many items to list, please continue typing...']
return Response(universities)
| bsd-3-clause | 875b523224eea53c26d4bb07030ff69c | 28.111111 | 91 | 0.621183 | 3.867159 | false | false | false | false |
hydroshare/hydroshare | hs_file_types/migrations/0005_reftimeseriesfilemetadata_reftimeserieslogicalfile.py | 1 | 1580 | # -*- coding: utf-8 -*-
from django.db import migrations, models
import django.contrib.postgres.fields
import django.contrib.postgres.fields.hstore
class Migration(migrations.Migration):
dependencies = [
('hs_file_types', '0004_geofeaturefilemetadata_geofeaturelogicalfile'),
]
operations = [
migrations.CreateModel(
name='RefTimeseriesFileMetaData',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('extra_metadata', django.contrib.postgres.fields.hstore.HStoreField(default={})),
('keywords', django.contrib.postgres.fields.ArrayField(default=[], base_field=models.CharField(max_length=100, null=True, blank=True), size=None)),
('is_dirty', models.BooleanField(default=False)),
('json_file_content', models.TextField()),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='RefTimeseriesLogicalFile',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('dataset_name', models.CharField(max_length=255, null=True, blank=True)),
('metadata', models.OneToOneField(related_name='logical_file', on_delete=models.CASCADE, to='hs_file_types.RefTimeseriesFileMetaData')),
],
options={
'abstract': False,
},
),
]
| bsd-3-clause | 0716fcc130f630b42f8fe27ceb87684e | 38.5 | 163 | 0.590506 | 4.364641 | false | false | false | false |
hydroshare/hydroshare | hs_access_control/migrations/0003_auto_20150824_2215.py | 1 | 3190 | # -*- coding: utf-8 -*-
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('hs_access_control', '0002_auto_20150817_1150'),
]
operations = [
migrations.AlterField(
model_name='groupresourceprivilege',
name='grantor',
field=models.ForeignKey(related_name='x2grp', editable=False, on_delete=models.CASCADE, to='hs_access_control.UserAccess', help_text='grantor of privilege'),
preserve_default=True,
),
migrations.AlterField(
model_name='groupresourceprivilege',
name='group',
field=models.ForeignKey(related_name='g2grp', editable=False, on_delete=models.CASCADE, to='hs_access_control.GroupAccess', help_text='group to be granted privilege'),
preserve_default=True,
),
migrations.AlterField(
model_name='groupresourceprivilege',
name='resource',
field=models.ForeignKey(related_name='r2grp', editable=False, on_delete=models.CASCADE, to='hs_access_control.ResourceAccess', help_text='resource to which privilege applies'),
preserve_default=True,
),
migrations.AlterField(
model_name='usergroupprivilege',
name='grantor',
field=models.ForeignKey(related_name='x2ugp', editable=False, on_delete=models.CASCADE, to='hs_access_control.UserAccess', help_text='grantor of privilege'),
preserve_default=True,
),
migrations.AlterField(
model_name='usergroupprivilege',
name='group',
field=models.ForeignKey(related_name='g2ugp', editable=False, on_delete=models.CASCADE, to='hs_access_control.GroupAccess', help_text='group to which privilege applies'),
preserve_default=True,
),
migrations.AlterField(
model_name='usergroupprivilege',
name='user',
field=models.ForeignKey(related_name='u2ugp', editable=False, on_delete=models.CASCADE, to='hs_access_control.UserAccess', help_text='user to be granted privilege'),
preserve_default=True,
),
migrations.AlterField(
model_name='userresourceprivilege',
name='grantor',
field=models.ForeignKey(related_name='x2urp', editable=False, on_delete=models.CASCADE, to='hs_access_control.UserAccess', help_text='grantor of privilege'),
preserve_default=True,
),
migrations.AlterField(
model_name='userresourceprivilege',
name='resource',
field=models.ForeignKey(related_name='r2urp', editable=False, on_delete=models.CASCADE, to='hs_access_control.ResourceAccess', help_text='resource to which privilege applies'),
preserve_default=True,
),
migrations.AlterField(
model_name='userresourceprivilege',
name='user',
field=models.ForeignKey(related_name='u2urp', editable=False, on_delete=models.CASCADE, to='hs_access_control.UserAccess', help_text='user to be granted privilege'),
preserve_default=True,
),
]
| bsd-3-clause | 777da93ea5749263cc359c40cd39e45e | 45.911765 | 188 | 0.633542 | 4.153646 | false | false | false | false |
hydroshare/hydroshare | hs_access_control/tests/test_units.py | 1 | 24922 | from django.test import TestCase
from django.contrib.auth.models import Group
from hs_access_control.models import PrivilegeCodes
from hs_core import hydroshare
from hs_core.testing import MockIRODSTestCaseMixin
from hs_access_control.tests.utilities import global_reset, is_equal_to_as_set
class UnitTests(MockIRODSTestCaseMixin, TestCase):
""" test basic behavior of each routine """
def setUp(self):
super(UnitTests, self).setUp()
global_reset()
self.group, _ = Group.objects.get_or_create(name='Hydroshare Author')
self.alva = hydroshare.create_account(
'alva@gmail.com',
username='alva',
first_name='alva',
last_name='couch',
superuser=False,
groups=[]
)
self.george = hydroshare.create_account(
'george@gmail.com',
username='george',
first_name='george',
last_name='miller',
superuser=False,
groups=[]
)
self.john = hydroshare.create_account(
'john@gmail.com',
username='john',
first_name='john',
last_name='miller',
superuser=False,
groups=[]
)
self.admin = hydroshare.create_account(
'admin@gmail.com',
username='admin',
first_name='first_name_admin',
last_name='last_name_admin',
superuser=True,
groups=[]
)
# george creates a resource 'bikes'
self.bikes = hydroshare.create_resource(
resource_type='GenericResource',
owner=self.george,
title='Bikes',
metadata=[],
)
# george creates a group 'bikers'
self.bikers = self.george.uaccess.create_group(
title='Bikers', description="We rae the bikers")
def test_user_create_group(self):
george = self.george
bikers = self.bikers
self.assertTrue(
is_equal_to_as_set(
george.uaccess.view_groups,
[bikers]))
foo = george.uaccess.create_group(
title='Foozball', description="We are the foozball")
self.assertTrue(
is_equal_to_as_set(
george.uaccess.view_groups, [
foo, bikers]))
def test_user_delete_group(self):
george = self.george
bikers = self.bikers
self.assertTrue(
is_equal_to_as_set(
george.uaccess.view_groups,
[bikers]))
george.uaccess.delete_group(bikers)
self.assertTrue(is_equal_to_as_set(george.uaccess.view_groups, []))
def test_user_owned_groups(self):
george = self.george
bikers = self.bikers
self.assertTrue(
is_equal_to_as_set(
george.uaccess.owned_groups,
[bikers]))
def test_user_owns_group(self):
george = self.george
alva = self.alva
bikers = self.bikers
self.assertTrue(george.uaccess.owns_group(bikers))
self.assertFalse(alva.uaccess.owns_group(bikers))
def test_user_can_change_group(self):
george = self.george
alva = self.alva
bikers = self.bikers
self.assertTrue(george.uaccess.can_change_group(bikers))
self.assertFalse(alva.uaccess.can_change_group(bikers))
def test_user_can_view_group(self):
george = self.george
alva = self.alva
bikers = self.bikers
self.assertTrue(george.uaccess.can_view_group(bikers))
bikers.gaccess.public = False
bikers.save()
self.assertFalse(alva.uaccess.can_view_group(bikers))
def test_user_can_view_group_metadata(self):
george = self.george
alva = self.alva
bikers = self.bikers
self.assertTrue(george.uaccess.can_view_group_metadata(bikers))
bikers.gaccess.public = False
bikers.gaccess.discoverable = False
bikers.save()
self.assertFalse(alva.uaccess.can_view_group_metadata(bikers))
def test_user_can_change_group_flags(self):
george = self.george
alva = self.alva
bikers = self.bikers
self.assertTrue(george.uaccess.can_change_group_flags(bikers))
self.assertFalse(alva.uaccess.can_change_group_flags(bikers))
def test_user_can_delete_group(self):
george = self.george
alva = self.alva
bikers = self.bikers
self.assertTrue(george.uaccess.can_delete_group(bikers))
self.assertFalse(alva.uaccess.can_delete_group(bikers))
def test_user_can_share_group(self):
george = self.george
alva = self.alva
bikers = self.bikers
self.assertTrue(
george.uaccess.can_share_group(
bikers, PrivilegeCodes.VIEW))
self.assertFalse(
alva.uaccess.can_share_group(
bikers, PrivilegeCodes.VIEW))
def test_user_can_share_group_with_user(self):
george = self.george
alva = self.alva
bikers = self.bikers
self.assertTrue(
george.uaccess.can_share_group_with_user(
bikers, alva, PrivilegeCodes.VIEW))
self.assertFalse(
alva.uaccess.can_share_group_with_user(
bikers, george, PrivilegeCodes.VIEW))
def test_user_share_group_with_user(self):
george = self.george
alva = self.alva
bikers = self.bikers
self.assertTrue(is_equal_to_as_set(bikers.gaccess.members, [george]))
george.uaccess.share_group_with_user(bikers, alva, PrivilegeCodes.VIEW)
self.assertTrue(
is_equal_to_as_set(
bikers.gaccess.members, [
george, alva]))
def test_user_unshare_group_with_user(self):
george = self.george
alva = self.alva
bikers = self.bikers
self.assertTrue(is_equal_to_as_set(bikers.gaccess.members, [george]))
george.uaccess.share_group_with_user(bikers, alva, PrivilegeCodes.VIEW)
self.assertTrue(
is_equal_to_as_set(
bikers.gaccess.members, [
george, alva]))
george.uaccess.unshare_group_with_user(bikers, alva)
self.assertTrue(is_equal_to_as_set(bikers.gaccess.members, [george]))
def test_user_can_unshare_group_with_user(self):
george = self.george
alva = self.alva
bikers = self.bikers
self.assertFalse(
george.uaccess.can_unshare_group_with_user(
bikers, alva))
george.uaccess.share_group_with_user(bikers, alva, PrivilegeCodes.VIEW)
self.assertTrue(
george.uaccess.can_unshare_group_with_user(
bikers, alva))
def test_user_get_group_unshare_users(self):
george = self.george
alva = self.alva
bikers = self.bikers
self.assertTrue(
is_equal_to_as_set(
george.uaccess.get_group_unshare_users(bikers),
[]))
george.uaccess.share_group_with_user(bikers, alva, PrivilegeCodes.VIEW)
self.assertTrue(
is_equal_to_as_set(
george.uaccess.get_group_unshare_users(bikers),
[alva]))
def test_user_view_resources(self):
george = self.george
bikes = self.bikes
self.assertTrue(
is_equal_to_as_set(
george.uaccess.view_resources,
[bikes]))
trikes = hydroshare.create_resource(resource_type='GenericResource',
owner=self.george,
title='Trikes',
metadata=[],)
self.assertTrue(
is_equal_to_as_set(
george.uaccess.view_resources, [
bikes, trikes]))
def test_user_owned_resources(self):
george = self.george
bikes = self.bikes
self.assertTrue(
is_equal_to_as_set(
george.uaccess.owned_resources,
[bikes]))
trikes = hydroshare.create_resource(resource_type='GenericResource',
owner=self.george,
title='Trikes',
metadata=[],)
self.assertTrue(
is_equal_to_as_set(
george.uaccess.owned_resources, [
bikes, trikes]))
def test_user_edit_resources(self):
george = self.george
bikes = self.bikes
self.assertTrue(
is_equal_to_as_set(
george.uaccess.edit_resources,
[bikes]))
trikes = hydroshare.create_resource(resource_type='GenericResource',
owner=self.george,
title='Trikes',
metadata=[],)
self.assertTrue(
is_equal_to_as_set(
george.uaccess.edit_resources, [
bikes, trikes]))
def test_user_get_resources_with_explicit_access(self):
george = self.george
bikes = self.bikes
self.assertTrue(
is_equal_to_as_set(
george.uaccess.get_resources_with_explicit_access(
PrivilegeCodes.OWNER),
[bikes]))
self.assertTrue(
is_equal_to_as_set(
george.uaccess.get_resources_with_explicit_access(
PrivilegeCodes.CHANGE), []))
self.assertTrue(
is_equal_to_as_set(
george.uaccess.get_resources_with_explicit_access(
PrivilegeCodes.VIEW), []))
def test_user_get_groups_with_explicit_access(self):
george = self.george
alva = self.alva
bikers = self.bikers
self.assertTrue(
is_equal_to_as_set(
george.uaccess.get_groups_with_explicit_access(
PrivilegeCodes.OWNER),
[bikers]))
self.assertTrue(
is_equal_to_as_set(
alva.uaccess.get_groups_with_explicit_access(
PrivilegeCodes.CHANGE), []))
self.assertTrue(
is_equal_to_as_set(
alva.uaccess.get_groups_with_explicit_access(
PrivilegeCodes.VIEW), []))
def test_user_owns_resource(self):
george = self.george
alva = self.alva
bikes = self.bikes
self.assertTrue(george.uaccess.owns_resource(bikes))
self.assertFalse(alva.uaccess.owns_resource(bikes))
def test_user_can_change_resource(self):
george = self.george
alva = self.alva
bikes = self.bikes
self.assertTrue(george.uaccess.can_change_resource(bikes))
self.assertFalse(alva.uaccess.can_change_resource(bikes))
def test_user_can_change_resource_flags(self):
george = self.george
alva = self.alva
bikes = self.bikes
self.assertTrue(george.uaccess.can_change_resource_flags(bikes))
self.assertFalse(alva.uaccess.can_change_resource_flags(bikes))
def test_user_can_view_resource(self):
george = self.george
alva = self.alva
bikes = self.bikes
self.assertTrue(george.uaccess.can_view_resource(bikes))
self.assertFalse(alva.uaccess.can_view_resource(bikes))
def test_user_can_delete_resource(self):
george = self.george
alva = self.alva
bikes = self.bikes
self.assertTrue(george.uaccess.can_delete_resource(bikes))
self.assertFalse(alva.uaccess.can_delete_resource(bikes))
def test_user_can_share_resource(self):
george = self.george
alva = self.alva
bikes = self.bikes
self.assertTrue(
george.uaccess.can_share_resource(
bikes, PrivilegeCodes.VIEW))
self.assertFalse(
alva.uaccess.can_share_resource(
bikes, PrivilegeCodes.VIEW))
def test_user_can_share_resource_with_user(self):
george = self.george
alva = self.alva
bikes = self.bikes
self.assertTrue(
george.uaccess.can_share_resource_with_user(
bikes, alva, PrivilegeCodes.VIEW))
self.assertFalse(
alva.uaccess.can_share_resource_with_user(
bikes, george, PrivilegeCodes.VIEW))
def test_user_can_share_resource_with_group(self):
george = self.george
alva = self.alva
bikes = self.bikes
bikers = self.bikers
self.assertTrue(
george.uaccess.can_share_resource_with_group(
bikes, bikers, PrivilegeCodes.VIEW))
self.assertFalse(
alva.uaccess.can_share_resource_with_group(
bikes, bikers, PrivilegeCodes.VIEW))
def test_user_share_resource_with_user(self):
george = self.george
alva = self.alva
bikes = self.bikes
self.assertTrue(is_equal_to_as_set(alva.uaccess.view_resources, []))
george.uaccess.share_resource_with_user(
bikes, alva, PrivilegeCodes.VIEW)
self.assertTrue(
is_equal_to_as_set(
alva.uaccess.view_resources,
[bikes]))
def test_user_unshare_resource_with_user(self):
george = self.george
alva = self.alva
bikes = self.bikes
self.assertTrue(is_equal_to_as_set(alva.uaccess.view_resources, []))
george.uaccess.share_resource_with_user(
bikes, alva, PrivilegeCodes.VIEW)
self.assertTrue(
is_equal_to_as_set(
alva.uaccess.view_resources,
[bikes]))
george.uaccess.unshare_resource_with_user(bikes, alva)
self.assertTrue(is_equal_to_as_set(alva.uaccess.view_resources, []))
def test_user_can_unshare_resource_with_user(self):
george = self.george
alva = self.alva
bikes = self.bikes
self.assertFalse(
george.uaccess.can_unshare_resource_with_user(
bikes, alva))
george.uaccess.share_resource_with_user(
bikes, alva, PrivilegeCodes.VIEW)
self.assertTrue(
george.uaccess.can_unshare_resource_with_user(
bikes, alva))
def test_user_share_resource_with_group(self):
george = self.george
bikes = self.bikes
bikers = self.bikers
self.assertTrue(is_equal_to_as_set(bikers.gaccess.view_resources, []))
george.uaccess.share_resource_with_group(
bikes, bikers, PrivilegeCodes.VIEW)
self.assertTrue(
is_equal_to_as_set(
bikers.gaccess.view_resources,
[bikes]))
def test_user_unshare_resource_with_group(self):
george = self.george
bikes = self.bikes
bikers = self.bikers
self.assertTrue(is_equal_to_as_set(bikers.gaccess.view_resources, []))
george.uaccess.share_resource_with_group(
bikes, bikers, PrivilegeCodes.VIEW)
self.assertTrue(
is_equal_to_as_set(
bikers.gaccess.view_resources,
[bikes]))
george.uaccess.unshare_resource_with_group(bikes, bikers)
self.assertTrue(is_equal_to_as_set(bikers.gaccess.view_resources, []))
def test_user_can_unshare_resource_with_group(self):
george = self.george
bikes = self.bikes
bikers = self.bikers
self.assertFalse(
george.uaccess.can_unshare_resource_with_group(
bikes, bikers))
george.uaccess.share_resource_with_group(
bikes, bikers, PrivilegeCodes.VIEW)
self.assertTrue(
george.uaccess.can_unshare_resource_with_group(
bikes, bikers))
def test_user_get_resource_unshare_users(self):
george = self.george
alva = self.alva
bikes = self.bikes
self.assertTrue(
is_equal_to_as_set(
george.uaccess.get_resource_unshare_users(bikes),
[]))
george.uaccess.share_resource_with_user(
bikes, alva, PrivilegeCodes.VIEW)
self.assertTrue(
is_equal_to_as_set(
george.uaccess.get_resource_unshare_users(bikes),
[alva]))
def test_user_get_resource_unshare_groups(self):
george = self.george
bikes = self.bikes
bikers = self.bikers
self.assertTrue(
is_equal_to_as_set(
george.uaccess.get_resource_unshare_groups(bikes),
[]))
george.uaccess.share_resource_with_group(
bikes, bikers, PrivilegeCodes.VIEW)
self.assertTrue(
is_equal_to_as_set(
george.uaccess.get_resource_unshare_groups(bikes),
[bikers]))
def test_group_members(self):
george = self.george
alva = self.alva
bikers = self.bikers
self.assertTrue(is_equal_to_as_set(bikers.gaccess.members, [george]))
george.uaccess.share_group_with_user(
bikers, alva, PrivilegeCodes.CHANGE)
self.assertTrue(
is_equal_to_as_set(
bikers.gaccess.members, [
george, alva]))
def test_group_view_resources(self):
george = self.george
bikes = self.bikes
bikers = self.bikers
self.assertTrue(is_equal_to_as_set(bikers.gaccess.view_resources, []))
george.uaccess.share_resource_with_group(
bikes, bikers, PrivilegeCodes.CHANGE)
self.assertTrue(
is_equal_to_as_set(
bikers.gaccess.view_resources,
[bikes]))
def test_group_edit_resources(self):
george = self.george
bikes = self.bikes
bikers = self.bikers
self.assertTrue(is_equal_to_as_set(bikers.gaccess.edit_resources, []))
george.uaccess.share_resource_with_group(
bikes, bikers, PrivilegeCodes.CHANGE)
self.assertTrue(
is_equal_to_as_set(
bikers.gaccess.edit_resources,
[bikes]))
def test_group_get_resources_with_explicit_access(self):
george = self.george
bikers = self.bikers
bikes = self.bikes
self.assertTrue(
is_equal_to_as_set(
bikers.gaccess.get_resources_with_explicit_access(
PrivilegeCodes.VIEW), []))
george.uaccess.share_resource_with_group(
bikes, bikers, PrivilegeCodes.CHANGE)
self.assertTrue(
is_equal_to_as_set(
bikers.gaccess.get_resources_with_explicit_access(
PrivilegeCodes.CHANGE),
[bikes]))
def test_group_owners(self):
george = self.george
alva = self.alva
bikers = self.bikers
self.assertTrue(is_equal_to_as_set(bikers.gaccess.owners, [george]))
george.uaccess.share_group_with_user(
bikers, alva, PrivilegeCodes.OWNER)
self.assertTrue(
is_equal_to_as_set(
bikers.gaccess.owners, [
george, alva]))
def test_group_view_users(self):
george = self.george
alva = self.alva
bikers = self.bikers
self.assertTrue(is_equal_to_as_set(bikers.gaccess.members, [george]))
george.uaccess.share_group_with_user(
bikers, alva, PrivilegeCodes.OWNER)
self.assertTrue(
is_equal_to_as_set(
bikers.gaccess.members, [
george, alva]))
def test_group_edit_users(self):
george = self.george
alva = self.alva
bikers = self.bikers
self.assertTrue(
is_equal_to_as_set(
bikers.gaccess.edit_users,
[george]))
george.uaccess.share_group_with_user(
bikers, alva, PrivilegeCodes.OWNER)
self.assertTrue(
is_equal_to_as_set(
bikers.gaccess.edit_users, [
george, alva]))
def test_group_get_effective_privilege(self):
george = self.george
alva = self.alva
bikers = self.bikers
self.assertEqual(
bikers.gaccess.get_effective_privilege(george),
PrivilegeCodes.OWNER)
self.assertEqual(
bikers.gaccess.get_effective_privilege(alva),
PrivilegeCodes.NONE)
def test_resource_view_users(self):
george = self.george
alva = self.alva
bikes = self.bikes
self.assertTrue(is_equal_to_as_set(bikes.raccess.view_users, [george]))
george.uaccess.share_resource_with_user(
bikes, alva, PrivilegeCodes.OWNER)
self.assertTrue(
is_equal_to_as_set(
bikes.raccess.view_users, [
george, alva]))
def test_resource_edit_users(self):
george = self.george
alva = self.alva
bikes = self.bikes
self.assertTrue(is_equal_to_as_set(bikes.raccess.edit_users, [george]))
george.uaccess.share_resource_with_user(
bikes, alva, PrivilegeCodes.OWNER)
self.assertTrue(
is_equal_to_as_set(
bikes.raccess.edit_users, [
george, alva]))
def test_resource_view_groups(self):
george = self.george
bikes = self.bikes
bikers = self.bikers
self.assertTrue(is_equal_to_as_set(bikes.raccess.view_groups, []))
george.uaccess.share_resource_with_group(
bikes, bikers, PrivilegeCodes.CHANGE)
self.assertTrue(
is_equal_to_as_set(
bikes.raccess.view_groups,
[bikers]))
def test_resource_edit_groups(self):
george = self.george
bikes = self.bikes
bikers = self.bikers
self.assertTrue(is_equal_to_as_set(bikes.raccess.edit_groups, []))
george.uaccess.share_resource_with_group(
bikes, bikers, PrivilegeCodes.CHANGE)
self.assertTrue(
is_equal_to_as_set(
bikes.raccess.edit_groups,
[bikers]))
def test_resource_owners(self):
george = self.george
alva = self.alva
bikes = self.bikes
self.assertTrue(is_equal_to_as_set(bikes.raccess.owners, [george]))
george.uaccess.share_resource_with_user(
bikes, alva, PrivilegeCodes.OWNER)
self.assertTrue(
is_equal_to_as_set(
bikes.raccess.owners, [
george, alva]))
def test_resource_get_effective_user_privilege(self):
george = self.george
alva = self.alva
bikes = self.bikes
self.assertEqual(
bikes.raccess.get_effective_user_privilege(george),
PrivilegeCodes.OWNER)
self.assertEqual(
bikes.raccess.get_effective_user_privilege(alva),
PrivilegeCodes.NONE)
def test_resource_get_effective_group_privilege(self):
george = self.george
alva = self.alva
bikes = self.bikes
bikers = self.bikers
george.uaccess.share_resource_with_group(
bikes, bikers, PrivilegeCodes.CHANGE)
self.assertEqual(
bikes.raccess.get_effective_group_privilege(alva),
PrivilegeCodes.NONE)
george.uaccess.share_group_with_user(bikers, alva, PrivilegeCodes.VIEW)
self.assertEqual(
bikes.raccess.get_effective_group_privilege(alva),
PrivilegeCodes.CHANGE)
def test_resource_get_effective_privilege(self):
george = self.george
alva = self.alva
bikes = self.bikes
self.assertEqual(
bikes.raccess.get_effective_privilege(george),
PrivilegeCodes.OWNER)
self.assertEqual(
bikes.raccess.get_effective_privilege(alva),
PrivilegeCodes.NONE)
def test_resource_get_effective_privilege_ignore_super(self):
george = self.george
alva = self.alva
admin = self.admin
bikes = self.bikes
self.assertEqual(
bikes.raccess.get_effective_privilege(admin, ignore_superuser=False),
PrivilegeCodes.OWNER)
self.assertEqual(
bikes.raccess.get_effective_privilege(admin, ignore_superuser=True),
PrivilegeCodes.NONE)
self.assertEqual(
bikes.raccess.get_effective_privilege(george, ignore_superuser=True),
PrivilegeCodes.OWNER)
self.assertEqual(
bikes.raccess.get_effective_privilege(alva, ignore_superuser=True),
PrivilegeCodes.NONE)
| bsd-3-clause | 8d1e7ea2b484d6b3a908dd0ec68f2872 | 34.910663 | 81 | 0.573389 | 3.501756 | false | true | false | false |
hydroshare/hydroshare | hs_core/management/commands/check_django_metadata.py | 1 | 4006 | # -*- coding: utf-8 -*-
"""
Check Django metadata
This checks that:
1. Every resource has a metadata entry.
2. Every metadata entry has a title.
More tests are left for later.
* By default, prints errors on stdout.
* Optional argument --log instead logs output to system log.
"""
from django.conf import settings
from django.core.management.base import BaseCommand
from hs_core.models import BaseResource
import logging
def check_django_metadata(self, stop_on_error=False,
echo_errors=True,
log_errors=False,
return_errors=False):
# print("check_django_metadata: check {}".format(self.short_id))
logger = logging.getLogger(__name__)
istorage = self.get_irods_storage()
errors = []
ecount = 0
# skip federated resources if not configured to handle these
if self.is_federated and not settings.REMOTE_USE_IRODS:
msg = "check_django_metadata: skipping check of federated resource {} in unfederated mode"\
.format(self.short_id)
if echo_errors:
print(msg)
if log_errors:
logger.info(msg)
# flag non-existent resources in iRODS
else:
if not istorage.exists(self.root_path):
msg = "root path {} does not exist in iRODS".format(self.root_path)
ecount += 1
if echo_errors:
print(msg)
if log_errors:
logger.error(msg)
if return_errors:
errors.append(msg)
# basic check: metadata exists
if self.metadata is None:
msg = "metadata for {} does not exist".format(self.short_id)
ecount += 1
if echo_errors:
print(msg)
if log_errors:
logger.error(msg)
if return_errors:
errors.append(msg)
elif self.metadata.title is None:
msg = "{} has no title".format(self.short_id)
ecount += 1
if echo_errors:
print(msg)
if log_errors:
logger.error(msg)
if return_errors:
errors.append(msg)
return errors, ecount
class Command(BaseCommand):
help = "Check existence of proper Django metadata."
def add_arguments(self, parser):
# a list of resource id's, or none to check all resources
parser.add_argument('resource_ids', nargs='*', type=str)
# Named (optional) arguments
parser.add_argument(
'--log',
action='store_true', # True for presence, False for absence
dest='log', # value is options['log']
help='log errors to system log',
)
def handle(self, *args, **options):
if len(options['resource_ids']) > 0: # an array of resource short_id to check.
for rid in options['resource_ids']:
try:
resource = BaseResource.objects.get(short_id=rid)
except BaseResource.DoesNotExist:
msg = "Resource with id {} not found in Django Resources".format(rid)
print(msg)
continue
print("LOOKING FOR METADATA ERRORS FOR RESOURCE {}".format(rid))
check_django_metadata(resource, stop_on_error=False,
echo_errors=not options['log'],
log_errors=options['log'],
return_errors=False)
else: # check all resources
print("LOOKING FOR METADATA ERRORS FOR ALL RESOURCES")
for r in BaseResource.objects.all():
check_django_metadata(r, stop_on_error=False,
echo_errors=not options['log'], # Don't both log and echo
log_errors=options['log'],
return_errors=False)
| bsd-3-clause | 679fb50a08b760cfeb47ea276838c42f | 32.663866 | 99 | 0.542936 | 4.441242 | false | false | false | false |
mitodl/micromasters | roles/migrations/0001_initial.py | 1 | 1187 | # -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-07-18 15:55
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('courses', '0011_courserun_upgrade_deadline'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Role',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('role', models.CharField(choices=[('staff', 'staff'), ('instructor', 'instructor')], default='staff', max_length=30)),
('program', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='courses.Program')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.AlterUniqueTogether(
name='role',
unique_together=set([('user', 'program', 'role')]),
),
]
| bsd-3-clause | 85cc9be28687bd61819dd5525da9588c | 34.969697 | 135 | 0.613311 | 4.15035 | false | false | false | false |
mitodl/micromasters | exams/admin.py | 1 | 2875 |
"""
Admin for the grades app
"""
from django.contrib import admin
from exams import models
class ExamRunAdmin(admin.ModelAdmin):
"""Admin for ExamRun"""
model = models.ExamRun
list_display = (
'id',
'course',
'semester',
'exam_series_code',
'date_first_schedulable',
'date_last_schedulable',
'date_first_eligible',
'date_last_eligible',
'authorized',
)
list_filter = ('course__title', 'course__program__title', 'semester', )
ordering = ('-date_first_eligible',)
readonly_fields = ('authorized',)
def get_readonly_fields(self, request, obj=None):
"""Conditionally determine readonly fields"""
if not self.is_modifiable(obj):
# exam_series_code cannot be changed due to Pearson requirement
return self.readonly_fields + ('exam_series_code',)
return self.readonly_fields
def has_delete_permission(self, request, obj=None):
"""Whether record can be deleted or not"""
return self.is_modifiable(obj)
def is_modifiable(self, exam_run):
"""
Determines if an ExamRun can be modified/deleted
Returns:
bool: True if the run can be modified/deleted
"""
return exam_run is None or exam_run.id is None or not exam_run.has_authorizations
class ExamAuthorizationAdmin(admin.ModelAdmin):
"""Admin for ExamAuthorization"""
model = models.ExamAuthorization
list_display = (
'id',
'user_email',
'course_number',
'exam_run_id',
'exam_coupon_url',
)
list_filter = (
'exam_run__id',
'course__course_number',
'course__title',
)
search_fields = (
'exam_run__id',
'course__course_number',
)
raw_id_fields = ('user',)
def user_email(self, obj):
"""Getter for the User foreign-key element email"""
return obj.user.email
def course_number(self, obj):
"""Getter for the Course foreign-key element course_number"""
return obj.course.course_number
def exam_run_id(self, obj):
"""Getter for the ExamRun foreign-key element id"""
return obj.exam_run.id
class ExamRunCouponAdmin(admin.ModelAdmin):
"""Admin for ExamRunCoupon"""
model = models.ExamRunCoupon
list_display = (
'id',
'is_taken',
'course_title',
'coupon_url',
'expiration_date',
)
list_filter = (
'is_taken',
'course__title',
)
def course_title(self, obj):
"""Getter for the Course foreign-key element course_number"""
return obj.course.title
admin.site.register(models.ExamRun, ExamRunAdmin)
admin.site.register(models.ExamAuthorization, ExamAuthorizationAdmin)
admin.site.register(models.ExamRunCoupon, ExamRunCouponAdmin)
| bsd-3-clause | 2ef1dcb0e31b5c9d89781af5d79281f3 | 26.122642 | 89 | 0.606957 | 3.797886 | false | false | false | false |
mitodl/micromasters | grades/tasks.py | 1 | 7724 | """
Tasks for the grades app
"""
import logging
from celery import group
from celery.result import GroupResult
from django.contrib.auth.models import User
from django.core.cache import caches
from django.db import IntegrityError
from django.db.models import OuterRef, Exists
from django_redis import get_redis_connection
from courses.models import CourseRun, Course
from grades import api
from grades.constants import FinalGradeStatus
from grades.models import (
FinalGrade,
ProctoredExamGrade,
MicromastersCourseCertificate,
CourseRunGradingStatus,
CombinedFinalGrade,
)
from micromasters.celery import app
from micromasters.utils import chunks, now_in_utc
CACHE_ID_BASE_STR = "freeze_grade_{0}"
log = logging.getLogger(__name__)
cache_redis = caches['redis']
@app.task
def generate_course_certificates_for_fa_students():
"""
Creates any missing unique course-user FACourseCertificates
"""
courses = Course.objects.filter(
program__live=True,
program__financial_aid_availability=True
)
for course in courses:
if not course.has_frozen_runs():
continue
course_certificates = MicromastersCourseCertificate.objects.filter(
course=course,
user=OuterRef('user')
)
# Find users that passed the course but don't have a certificate yet
users_need_cert = FinalGrade.objects.annotate(
course_certificate=Exists(course_certificates)
).filter(
course_run__course=course,
status=FinalGradeStatus.COMPLETE,
passed=True,
course_certificate=False
).values_list('user', flat=True)
if course.has_exam:
# need also to pass exam
users_need_cert = ProctoredExamGrade.objects.filter(
course=course,
passed=True,
exam_run__date_grades_available__lte=now_in_utc(),
user__in=users_need_cert
).values_list('user', flat=True)
for user in users_need_cert:
try:
MicromastersCourseCertificate.objects.get_or_create(
user_id=user,
course=course
)
except (IntegrityError, MicromastersCourseCertificate.DoesNotExist):
log.exception(
"Unable to fetch or create certificate for user id: %d and course: %s",
user,
course.title
)
@app.task
def create_combined_final_grades():
"""
Creates any missing CombinedFinalGrades
"""
courses = Course.objects.filter(
program__live=True,
program__financial_aid_availability=True
)
for course in courses:
if course.has_frozen_runs() and course.has_exam:
exam_grades = ProctoredExamGrade.objects.filter(
course=course,
passed=True,
exam_run__date_grades_available__lte=now_in_utc()
)
users_with_grade = set(CombinedFinalGrade.objects.filter(course=course).values_list('user', flat=True))
for exam_grade in exam_grades:
if exam_grade.user.id not in users_with_grade:
api.update_or_create_combined_final_grade(exam_grade.user, course)
@app.task
def find_course_runs_and_freeze_grades():
"""
Async task that takes care of finding all the course
runs that can freeze the final grade to their students.
Args:
None
Returns:
None
"""
runs_to_freeze = CourseRun.get_freezable()
for run in runs_to_freeze:
freeze_course_run_final_grades.delay(run.id)
@app.task
def freeze_course_run_final_grades(course_run_id):
"""
Async task manager to freeze all the users' final grade in a course run
Args:
course_run_id (int): a course run id
Returns:
None
"""
course_run = CourseRun.objects.get(id=course_run_id)
# no need to do anything if the course run is not ready
if not course_run.can_freeze_grades:
log.info('the grades course "%s" cannot be frozen yet', course_run.edx_course_key)
return
# if it has already completed, do not do anything
if CourseRunGradingStatus.is_complete(course_run):
log.info('Final Grades freezing for course run "%s" has already been completed', course_run.edx_course_key)
return
# cache id string for this task
cache_id = CACHE_ID_BASE_STR.format(course_run.edx_course_key)
# try to get the result id from a previous iteration of this task for this course run
group_results_id = cache_redis.get(cache_id)
# if the id is not none, it means that this task already run before for this course run
# so we need to check if its subtasks have finished
if group_results_id is not None:
# delete the entry from the cache (if needed it will be added again later)
cache_redis.delete(cache_id)
# extract the results from the id
results = GroupResult.restore(group_results_id, app=app)
# if the subtasks are not done, revoke them
results.revoke()
# delete the results anyway
results.delete()
# extract the users to be frozen for this course
user_ids_qset = api.get_users_without_frozen_final_grade(course_run).values_list('id', flat=True)
# find number of users for which cache could not be updated
con = get_redis_connection("redis")
failed_users_cache_key = api.CACHE_KEY_FAILED_USERS_BASE_STR.format(course_run.edx_course_key)
failed_users_count = con.llen(failed_users_cache_key)
# get the list of users that failed authentication last run of the task
failed_users_list = list(map(int, con.lrange(failed_users_cache_key, 0, failed_users_count)))
users_need_freeze = list(user_ids_qset)
users_left = list(set(users_need_freeze) - set(failed_users_list))
# if there are no more users to be frozen, just complete the task
if not users_left:
log.info('Completing grading with %d users getting refresh cache errors', len(failed_users_list))
CourseRunGradingStatus.set_to_complete(course_run)
con.delete(failed_users_cache_key)
return
# if the task reaches this point, it means there are users still to be processed
# clear the list for users for whom cache update failed
con.delete(failed_users_cache_key)
# create an entry in with pending status ('pending' is the default status)
CourseRunGradingStatus.create_pending(course_run=course_run)
# create a group of subtasks to be run in parallel
job = group(
freeze_users_final_grade_async.s(list_user_ids, course_run.id) for list_user_ids in chunks(user_ids_qset)
)
results = job.apply_async()
# save the result ID in the celery backend
results.save()
# put the results id in the cache to be retrieved and finalized later
cache_redis.set(cache_id, results.id, None)
@app.task
def freeze_users_final_grade_async(user_ids, course_run_id):
"""
Async task to freeze the final grade in a course run for a list of users.
Args:
user_ids (list): a list of django user ids
course_run_id (int): a course run id
Returns:
None
"""
# pylint: disable=bare-except
course_run = CourseRun.objects.get(id=course_run_id)
for user in User.objects.filter(id__in=user_ids):
try:
api.freeze_user_final_grade(user, course_run)
except:
log.exception(
'Impossible to freeze final grade for user "%s" in course %s',
user.username, course_run.edx_course_key
)
| bsd-3-clause | 8e7f5c12f3a58c7badf726f5298ce4a5 | 34.109091 | 115 | 0.651217 | 3.865866 | false | false | false | false |
mitodl/micromasters | ecommerce/migrations/0011_user_coupon.py | 1 | 1856 | # -*- coding: utf-8 -*-
# Generated by Django 1.10.3 on 2017-01-06 19:31
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('ecommerce', '0010_coupon'),
]
operations = [
migrations.AddField(
model_name='coupon',
name='coupon_type',
field=models.CharField(choices=[('standard', 'standard'), ('discounted-previous-course', 'discounted-previous-course')], help_text='The type of the coupon which describes what circumstances the coupon can be redeemed', max_length=30),
preserve_default=False,
),
migrations.CreateModel(
name='UserCoupon',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
],
),
migrations.RemoveField(
model_name='coupon',
name='num_coupons_available',
),
migrations.RemoveField(
model_name='coupon',
name='num_redemptions_per_user',
),
migrations.AddField(
model_name='usercoupon',
name='coupon',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='ecommerce.Coupon'),
),
migrations.AddField(
model_name='usercoupon',
name='user',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL),
),
migrations.AlterUniqueTogether(
name='usercoupon',
unique_together=set([('user', 'coupon')]),
),
]
| bsd-3-clause | f11f9d0d62044208f4bba92678702149 | 34.692308 | 246 | 0.595905 | 4.208617 | false | false | false | false |
mitodl/micromasters | search/indexing_api_test.py | 1 | 42701 | """
Tests for search API functions.
"""
import itertools
from unittest.mock import patch
from ddt import (
data,
ddt,
unpack,
)
from django.conf import settings
from django.db.models.signals import post_save
from django.test import override_settings
from opensearchpy.exceptions import NotFoundError
from factory.django import mute_signals
from dashboard.factories import (
CachedCertificateFactory,
CachedEnrollmentFactory,
CachedCurrentGradeFactory,
ProgramEnrollmentFactory
)
from dashboard.models import ProgramEnrollment
from dashboard.serializers import UserProgramSearchSerializer
from courses.factories import (
ProgramFactory,
CourseFactory,
CourseRunFactory,
)
from profiles.factories import (
EducationFactory,
EmploymentFactory,
ProfileFactory,
)
from profiles.serializers import (
ProfileSerializer
)
from roles.models import Role
from roles.roles import (
Instructor,
Staff
)
from search.base import ESTestCase, reindex_test_es_data
from search.connection import (
ALL_INDEX_TYPES,
get_aliases,
get_default_alias,
make_alias_name,
make_backing_index_name,
GLOBAL_DOC_TYPE,
PERCOLATE_INDEX_TYPE,
PUBLIC_ENROLLMENT_INDEX_TYPE,
PRIVATE_ENROLLMENT_INDEX_TYPE,
)
from search.exceptions import ReindexException
from search.factories import PercolateQueryFactory
from search.indexing_api import (
clear_and_create_index,
delete_indices,
get_conn,
refresh_index,
index_program_enrolled_users,
remove_program_enrolled_user,
serialize_program_enrolled_user,
serialize_public_enrolled_user,
filter_current_work,
index_percolate_queries,
delete_percolate_query, create_backing_indices,
)
from search.models import PercolateQuery
from search.util import traverse_mapping
DOC_TYPES_PER_ENROLLMENT = 1
# pylint: disable=too-many-lines
class ESTestActions:
"""
Provides helper functions for tests to communicate with ES
"""
def __init__(self):
self.conn = get_conn(verify=False)
def search(self, index_type):
"""Gets full index data from the _search endpoint"""
alias = get_default_alias(index_type)
refresh_index(alias)
return self.conn.search(index=alias)['hits']
def get_percolate_query(self, _id):
"""Get percolate query"""
index = get_default_alias(PERCOLATE_INDEX_TYPE)
return self.conn.get(id=_id, doc_type=GLOBAL_DOC_TYPE, index=index)
def get_mappings(self, index_type):
"""Gets mapping data"""
alias = get_default_alias(index_type)
refresh_index(alias)
mapping = self.conn.indices.get_mapping(index=alias)
return list(mapping.values())[0]['mappings']
def get_default_backing_index(self, index_type):
"""Get the default backing index"""
alias = get_default_alias(index_type)
return list(self.conn.indices.get_alias(name=alias).keys())[0]
es = ESTestActions()
def get_sources(results):
"""
Get sources from hits, sorted by source id
Args:
results (dict): Opensearch results
Returns:
list of dict: The list of source dicts
"""
sorted_hits = sorted(results['hits'], key=lambda hit: hit['_source']['id'])
return [hit['_source'] for hit in sorted_hits]
def remove_es_keys(hit):
"""
Removes ES keys from a hit object in-place
Args:
hit (dict): Opensearch hit object
Returns:
dict: modified Opensearch hit object
"""
del hit['_id']
if '_type' in hit:
del hit['_type']
return hit
def assert_search(results, program_enrollments, *, index_type):
"""
Assert that search results match program-enrolled users
"""
assert results['total']['value'] == len(program_enrollments) * DOC_TYPES_PER_ENROLLMENT
sources_advanced = get_sources(results)
sorted_program_enrollments = sorted(program_enrollments, key=lambda program_enrollment: program_enrollment.id)
if index_type == PRIVATE_ENROLLMENT_INDEX_TYPE:
serialized = [
remove_es_keys(serialize_program_enrolled_user(program_enrollment))
for program_enrollment in sorted_program_enrollments
]
elif index_type == PUBLIC_ENROLLMENT_INDEX_TYPE:
serialized = [
remove_es_keys(serialize_public_enrolled_user(
serialize_program_enrolled_user(program_enrollment)
))
for program_enrollment in sorted_program_enrollments
]
else:
raise Exception("Unexpected index type")
assert serialized == sources_advanced
# pylint: disable=unused-argument
@ddt
@patch('search.signals.transaction.on_commit', side_effect=lambda callback: callback())
class IndexTests(ESTestCase):
"""
Tests for indexing
"""
# pylint: disable=too-many-public-methods
@data(PRIVATE_ENROLLMENT_INDEX_TYPE, PUBLIC_ENROLLMENT_INDEX_TYPE)
def test_program_enrollment_add(self, index_type, mock_on_commit):
"""
Test that a newly created ProgramEnrollment is indexed properly
"""
assert es.search(index_type)['total']['value'] == 0
program_enrollment = ProgramEnrollmentFactory.create()
assert_search(es.search(index_type), [program_enrollment], index_type=index_type)
@data(PRIVATE_ENROLLMENT_INDEX_TYPE, PUBLIC_ENROLLMENT_INDEX_TYPE)
def test_program_enrollment_delete(self, index_type, mock_on_commit):
"""
Test that ProgramEnrollment is removed from index after the user is removed
"""
program_enrollment = ProgramEnrollmentFactory.create()
assert es.search(index_type)['total']['value'] == DOC_TYPES_PER_ENROLLMENT
program_enrollment.user.delete()
assert es.search(index_type)['total']['value'] == 0
@data(PRIVATE_ENROLLMENT_INDEX_TYPE, PUBLIC_ENROLLMENT_INDEX_TYPE)
def test_profile_update(self, index_type, mock_on_commit):
"""
Test that ProgramEnrollment is reindexed after the User's Profile has been updated
"""
program_enrollment = ProgramEnrollmentFactory.create()
assert es.search(index_type)['total']['value'] == DOC_TYPES_PER_ENROLLMENT
profile = program_enrollment.user.profile
profile.first_name = 'updated'
profile.save()
assert_search(es.search(index_type), [program_enrollment], index_type=index_type)
@data(PRIVATE_ENROLLMENT_INDEX_TYPE, PUBLIC_ENROLLMENT_INDEX_TYPE)
def test_education_add(self, index_type, mock_on_commit):
"""
Test that Education is indexed after being added
"""
program_enrollment = ProgramEnrollmentFactory.create()
assert es.search(index_type)['total']['value'] == DOC_TYPES_PER_ENROLLMENT
EducationFactory.create(profile=program_enrollment.user.profile)
assert_search(es.search(index_type), [program_enrollment], index_type=index_type)
@data(PRIVATE_ENROLLMENT_INDEX_TYPE, PUBLIC_ENROLLMENT_INDEX_TYPE)
def test_education_update(self, index_type, mock_on_commit):
"""
Test that Education is reindexed after being updated
"""
program_enrollment = ProgramEnrollmentFactory.create()
assert es.search(index_type)['total']['value'] == DOC_TYPES_PER_ENROLLMENT
education = EducationFactory.create(profile=program_enrollment.user.profile)
education.school_city = 'city'
education.save()
assert_search(es.search(index_type), [program_enrollment], index_type=index_type)
@data(PRIVATE_ENROLLMENT_INDEX_TYPE, PUBLIC_ENROLLMENT_INDEX_TYPE)
def test_education_delete(self, index_type, mock_on_commit):
"""
Test that Education is removed from index after being deleted
"""
program_enrollment = ProgramEnrollmentFactory.create()
education = EducationFactory.create(profile=program_enrollment.user.profile)
assert_search(es.search(index_type), [program_enrollment], index_type=index_type)
education.delete()
assert_search(es.search(index_type), [program_enrollment], index_type=index_type)
@data(PRIVATE_ENROLLMENT_INDEX_TYPE, PUBLIC_ENROLLMENT_INDEX_TYPE)
def test_employment_add(self, index_type, mock_on_commit):
"""
Test that Employment is indexed after being added
"""
program_enrollment = ProgramEnrollmentFactory.create()
assert es.search(index_type)['total']['value'] == DOC_TYPES_PER_ENROLLMENT
EmploymentFactory.create(profile=program_enrollment.user.profile, end_date=None)
assert_search(es.search(index_type), [program_enrollment], index_type=index_type)
@data(PRIVATE_ENROLLMENT_INDEX_TYPE, PUBLIC_ENROLLMENT_INDEX_TYPE)
def test_employment_update(self, index_type, mock_on_commit):
"""
Test that Employment is reindexed after being updated
"""
program_enrollment = ProgramEnrollmentFactory.create()
assert es.search(index_type)['total']['value'] == DOC_TYPES_PER_ENROLLMENT
employment = EmploymentFactory.create(profile=program_enrollment.user.profile, end_date=None)
employment.city = 'city'
employment.save()
assert_search(es.search(index_type), [program_enrollment], index_type=index_type)
@data(PRIVATE_ENROLLMENT_INDEX_TYPE, PUBLIC_ENROLLMENT_INDEX_TYPE)
def test_employment_delete(self, index_type, mock_on_commit):
"""
Test that Employment is removed from index after being deleted
"""
program_enrollment = ProgramEnrollmentFactory.create()
employment = EmploymentFactory.create(profile=program_enrollment.user.profile, end_date=None)
assert_search(es.search(index_type), [program_enrollment], index_type=index_type)
employment.delete()
assert_search(es.search(index_type), [program_enrollment], index_type=index_type)
@data(PRIVATE_ENROLLMENT_INDEX_TYPE, PUBLIC_ENROLLMENT_INDEX_TYPE)
def test_past_employment_add(self, index_type, mock_on_commit):
"""
Test that past work history is not indexed
"""
program_enrollment = ProgramEnrollmentFactory.create()
EmploymentFactory.create(profile=program_enrollment.user.profile, end_date=None)
EmploymentFactory.create(profile=program_enrollment.user.profile)
search_result = es.search(index_type)['hits'][0]['_source']['profile']['work_history']
assert len(search_result) == 1
self.assertFalse(search_result[0]['end_date'])
@data(PRIVATE_ENROLLMENT_INDEX_TYPE, PUBLIC_ENROLLMENT_INDEX_TYPE)
def test_remove_program_enrolled_user(self, index_type, mock_on_commit):
"""
Test that remove_program_enrolled_user removes the user from the index for that program
"""
program_enrollment = ProgramEnrollmentFactory.create()
assert_search(es.search(index_type), [program_enrollment], index_type=index_type)
remove_program_enrolled_user(program_enrollment.id)
assert_search(es.search(index_type), [], index_type=index_type)
# pylint: disable=too-many-locals
def test_index_program_enrolled_users(self, mock_on_commit):
"""
Test that index_program_enrolled_users indexes an iterable of program-enrolled users
"""
num_enrollments = 10
chunk_size = 4
with mute_signals(post_save):
program_enrollments = [
ProgramEnrollmentFactory.create() for _ in range(num_enrollments)
]
for enrollment in program_enrollments:
ProfileFactory.create(user=enrollment.user)
private = [serialize_program_enrolled_user(enrollment) for enrollment in program_enrollments]
private_dicts = {serialized['id']: serialized for serialized in private}
public = [serialize_public_enrolled_user(serialized) for serialized in private]
public_dicts = {serialized['id']: serialized for serialized in public}
with patch(
'search.indexing_api._index_chunk', autospec=True, return_value=0
) as index_chunk, patch(
'search.indexing_api.serialize_program_enrolled_user', autospec=True,
side_effect=lambda x: private_dicts[x.id]
) as serialize_mock, patch(
'search.indexing_api.serialize_public_enrolled_user', autospec=True,
side_effect=lambda x: public_dicts[x['id']]
) as serialize_public_mock:
index_program_enrolled_users(program_enrollments, chunk_size=chunk_size)
assert index_chunk.call_count == 6 # 10 enrollments divided in chunks of 4, times the number of types (2)
public_index = make_alias_name(PUBLIC_ENROLLMENT_INDEX_TYPE, is_reindexing=False)
private_index = make_alias_name(PRIVATE_ENROLLMENT_INDEX_TYPE, is_reindexing=False)
for offset in range(0, num_enrollments, chunk_size):
# each enrollment should get yielded twice to account for each doctype
index_chunk.assert_any_call(
public[offset:offset+4], # ordered dicts FTW
index=public_index
)
index_chunk.assert_any_call(
private[offset:offset+4],
index=private_index
)
assert serialize_mock.call_count == len(program_enrollments)
assert serialize_public_mock.call_count == len(program_enrollments)
for enrollment in program_enrollments:
serialize_mock.assert_any_call(enrollment)
serialize_public_mock.assert_any_call(private_dicts[enrollment.id])
def test_index_program_enrolled_users_missing_profiles(self, mock_on_commit):
"""
Test that index_program_enrolled_users doesn't index users missing profiles
"""
with mute_signals(post_save):
program_enrollments = [ProgramEnrollmentFactory.build() for _ in range(10)]
with patch(
'search.indexing_api._index_chunk', autospec=True, return_value=0
) as index_chunk, patch(
'search.indexing_api.serialize_program_enrolled_user',
autospec=True,
side_effect=lambda x: None # simulate a missing profile
) as serialize_mock, patch(
'search.indexing_api.serialize_public_enrolled_user', autospec=True, side_effect=lambda x: x
) as serialize_public_mock:
index_program_enrolled_users(program_enrollments)
assert index_chunk.call_count == 0
assert serialize_public_mock.call_count == 0
assert serialize_mock.call_count == len(program_enrollments)
@data(PRIVATE_ENROLLMENT_INDEX_TYPE, PUBLIC_ENROLLMENT_INDEX_TYPE)
def test_add_edx_record(self, index_type, mock_on_commit):
"""
Test that cached edX records are indexed after being added
"""
program_enrollment = ProgramEnrollmentFactory.create()
for edx_cached_model_factory in [CachedCertificateFactory, CachedEnrollmentFactory, CachedCurrentGradeFactory]:
assert es.search(index_type)['total']['value'] == DOC_TYPES_PER_ENROLLMENT
course = CourseFactory.create(program=program_enrollment.program)
course_run = CourseRunFactory.create(course=course)
edx_cached_model_factory.create(user=program_enrollment.user, course_run=course_run)
index_program_enrolled_users([program_enrollment])
assert_search(es.search(index_type), [program_enrollment], index_type=index_type)
@data(PRIVATE_ENROLLMENT_INDEX_TYPE, PUBLIC_ENROLLMENT_INDEX_TYPE)
def test_update_edx_record(self, index_type, mock_on_commit):
"""
Test that a cached edX record is reindexed after being updated
"""
program_enrollment = ProgramEnrollmentFactory.create()
for edx_cached_model_factory in [CachedCertificateFactory, CachedEnrollmentFactory, CachedCurrentGradeFactory]:
assert es.search(index_type)['total']['value'] == DOC_TYPES_PER_ENROLLMENT
course = CourseFactory.create(program=program_enrollment.program)
course_run = CourseRunFactory.create(course=course)
edx_record = edx_cached_model_factory.create(user=program_enrollment.user, course_run=course_run)
index_program_enrolled_users([program_enrollment])
edx_record.data.update({'new': 'data'})
edx_record.save()
index_program_enrolled_users([program_enrollment])
assert_search(es.search(index_type), [program_enrollment], index_type=index_type)
@data(PRIVATE_ENROLLMENT_INDEX_TYPE, PUBLIC_ENROLLMENT_INDEX_TYPE)
def test_delete_edx_record(self, index_type, mock_on_commit):
"""
Test that a cached edX record is removed from index after being deleted
"""
program_enrollment = ProgramEnrollmentFactory.create()
for edx_cached_model_factory in [CachedCertificateFactory, CachedEnrollmentFactory, CachedCurrentGradeFactory]:
course = CourseFactory.create(program=program_enrollment.program)
course_run = CourseRunFactory.create(course=course)
edx_record = edx_cached_model_factory.create(user=program_enrollment.user, course_run=course_run)
index_program_enrolled_users([program_enrollment])
assert_search(es.search(index_type), [program_enrollment], index_type=index_type)
edx_record.delete()
index_program_enrolled_users([program_enrollment])
assert_search(es.search(index_type), [program_enrollment], index_type=index_type)
def test_analyzed(self, mock_on_commit):
"""
Most string fields in the mapping should be 'analyzed' since we don't want to
tokenize strings arbitrarily when filtering on fields.
"""
program_enrollment = ProgramEnrollmentFactory.create()
EducationFactory.create(profile=program_enrollment.user.profile)
EmploymentFactory.create(profile=program_enrollment.user.profile)
for index_type in ALL_INDEX_TYPES:
mapping = es.get_mappings(index_type)
nodes = list(traverse_mapping(mapping, ""))
for key, node in nodes:
if key == "folded":
assert node['analyzer'] == "folding"
elif node.get('type') == 'string':
assert node['index'] == 'not_analyzed'
def test_folded(self, mock_on_commit):
"""
Check that we have a folded type for first_name, last_name, and preferred_name
"""
program_enrollment = ProgramEnrollmentFactory.create()
EducationFactory.create(profile=program_enrollment.user.profile)
EmploymentFactory.create(profile=program_enrollment.user.profile)
for index_type in ALL_INDEX_TYPES:
mapping = es.get_mappings(index_type)
properties = mapping['properties']
if index_type == PUBLIC_ENROLLMENT_INDEX_TYPE:
# Make sure we aren't exposing people's email addresses
assert 'email' not in properties
else:
assert properties['email']['fields']['folded']['analyzer'] == 'folding'
profile_properties = properties['profile']['properties']
for key in 'first_name', 'last_name', 'preferred_name', 'full_name', 'username':
assert profile_properties[key]['fields']['folded']['analyzer'] == 'folding'
@data(
*itertools.product(
[Staff.ROLE_ID, Instructor.ROLE_ID],
[PRIVATE_ENROLLMENT_INDEX_TYPE, PUBLIC_ENROLLMENT_INDEX_TYPE],
)
)
@unpack
def test_role_add(self, role, index_type, mock_on_commit):
"""
Test that `is_learner` status is change when role is save
"""
program_enrollment = ProgramEnrollmentFactory.create()
assert es.search(index_type)['total']['value'] == DOC_TYPES_PER_ENROLLMENT
sources = get_sources(es.search(index_type))
# user is learner
assert sources[0]['program']['is_learner'] is True
Role.objects.create(
user=program_enrollment.user,
program=program_enrollment.program,
role=role
)
assert es.search(index_type)['total']['value'] == DOC_TYPES_PER_ENROLLMENT
# user is not learner
sources = get_sources(es.search(index_type))
assert sources[0]['program']['is_learner'] is False
@data(
*itertools.product(
[Staff.ROLE_ID, Instructor.ROLE_ID],
[PRIVATE_ENROLLMENT_INDEX_TYPE, PUBLIC_ENROLLMENT_INDEX_TYPE],
)
)
@unpack
def test_role_delete(self, role, index_type, mock_on_commit):
"""
Test that `is_learner` status is restore once role is removed for a user.
"""
program_enrollment = ProgramEnrollmentFactory.create()
assert es.search(index_type)['total']['value'] == DOC_TYPES_PER_ENROLLMENT
sources = get_sources(es.search(index_type))
# user is learner
assert sources[0]['program']['is_learner'] is True
Role.objects.create(
user=program_enrollment.user,
program=program_enrollment.program,
role=role
)
assert es.search(index_type)['total']['value'] == DOC_TYPES_PER_ENROLLMENT
# user is not learner
sources = get_sources(es.search(index_type))
assert sources[0]['program']['is_learner'] is False
# when staff role is deleted
Role.objects.filter(
user=program_enrollment.user,
program=program_enrollment.program,
role=role
).delete()
assert es.search(index_type)['total']['value'] == DOC_TYPES_PER_ENROLLMENT
sources = get_sources(es.search(index_type))
# user is learner
assert sources[0]['program']['is_learner'] is True
class SerializerTests(ESTestCase):
"""
Tests for document serializers
"""
@classmethod
def setUpTestData(cls):
with mute_signals(post_save):
cls.profile = ProfileFactory.create()
EducationFactory.create(profile=cls.profile)
EmploymentFactory.create(profile=cls.profile)
EmploymentFactory.create(profile=cls.profile, end_date=None)
program = ProgramFactory.create()
course = CourseFactory.create(program=program)
course_runs = [CourseRunFactory.create(course=course) for _ in range(2)]
for course_run in course_runs:
CachedCertificateFactory.create(user=cls.profile.user, course_run=course_run)
CachedEnrollmentFactory.create(user=cls.profile.user, course_run=course_run)
cls.program_enrollment = ProgramEnrollment.objects.create(user=cls.profile.user, program=program)
def test_program_enrolled_user_serializer(self):
"""
Asserts the output of the serializer for program-enrolled users (ProgramEnrollments)
"""
profile = self.profile
program_enrollment = self.program_enrollment
assert serialize_program_enrolled_user(program_enrollment) == {
'_id': program_enrollment.id,
'id': program_enrollment.id,
'user_id': profile.user.id,
'email': profile.user.email,
'profile': filter_current_work(ProfileSerializer(profile).data),
'program': UserProgramSearchSerializer.serialize(program_enrollment)
}
def test_public_enrolled_user_serializer(self):
"""
Asserts the output of the public serializer for program-enrolled users (ProgramEnrollments)
"""
profile = self.profile
program_enrollment = self.program_enrollment
serialized = serialize_program_enrolled_user(program_enrollment)
assert serialize_public_enrolled_user(serialized) == {
'_id': program_enrollment.id,
'id': program_enrollment.id,
'user_id': profile.user.id,
'profile': {
'first_name': profile.first_name,
'last_name': profile.last_name,
'full_name': profile.full_name,
'preferred_name': profile.preferred_name,
'romanized_first_name': profile.romanized_first_name,
'romanized_last_name': profile.romanized_last_name,
'image': '/media/{}'.format(profile.image),
'image_small': '/media/{}'.format(profile.image_small),
'image_medium': '/media/{}'.format(profile.image_medium),
'username': profile.user.username,
'filled_out': profile.filled_out,
'account_privacy': profile.account_privacy,
'country': profile.country,
'state_or_territory': profile.state_or_territory,
'city': profile.city,
'birth_country': profile.birth_country,
'work_history': serialized['profile']['work_history'],
},
'program': {
'id': program_enrollment.program.id,
'enrollments': [{
'course_title': enrollment['course_title'],
'semester': enrollment['semester']
} for enrollment in serialized['program']['enrollments']],
'courses': [{
'course_title': enrollment['course_title']
} for enrollment in serialized['program']['courses']],
'course_runs': [{
'semester': semester_enrolled['semester']
} for semester_enrolled in serialized['program']['course_runs']],
'is_learner': True,
'total_courses': 1,
}
}
@ddt
class GetConnTests(ESTestCase):
"""
Tests for get_conn
"""
def setUp(self):
"""
Start without any index
"""
super().setUp()
conn = get_conn(verify=False)
for index in conn.indices.get_alias().keys():
if index.startswith(settings.OPENSEARCH_INDEX):
conn.indices.delete(index)
# Clear globals
from search import indexing_api
indexing_api._CONN = None # pylint: disable=protected-access
indexing_api._CONN_VERIFIED = False # pylint: disable=protected-access
def test_no_index(self):
"""
Test that an error is raised if we don't have an index
"""
with self.assertRaises(ReindexException) as ex:
get_conn()
assert "Unable to find index" in str(ex.exception)
def test_no_index_not_default(self):
"""
Test that an error is raised if we don't have an index
"""
# Reset default index so it does not cause an error
reindex_test_es_data()
other_index = "other"
delete_indices()
with self.assertRaises(ReindexException) as ex:
get_conn(verify_indices=[other_index])
assert str(ex.exception) == "Unable to find index {}".format(other_index)
@data(
[False, PRIVATE_ENROLLMENT_INDEX_TYPE, ('testindex_private_enrollment_default',)],
[False, PUBLIC_ENROLLMENT_INDEX_TYPE, ('testindex_public_enrollment_default',)],
[False, PERCOLATE_INDEX_TYPE, ('testindex_percolate_default',)],
[True, PRIVATE_ENROLLMENT_INDEX_TYPE,
('testindex_private_enrollment_default', 'testindex_private_enrollment_reindexing')],
[True, PUBLIC_ENROLLMENT_INDEX_TYPE,
('testindex_public_enrollment_default', 'testindex_public_enrollment_reindexing')],
[True, PERCOLATE_INDEX_TYPE,
('testindex_percolate_default', 'testindex_percolate_reindexing')],
)
@override_settings(OPENSEARCH_INDEX='testindex')
@unpack
# pylint: disable=too-many-arguments
def test_get_aliases(self, is_reindex, index_type, expected_indices):
"""
We should choose the correct alias and doc type given the circumstances
"""
conn = get_conn(verify=False)
alias = make_alias_name(index_type, is_reindexing=False)
backing_index = make_backing_index_name()
# Skip the mapping because it's invalid for 2.x schema, and we don't need it here
clear_and_create_index(backing_index, index_type=index_type, skip_mapping=True)
conn.indices.put_alias(index=backing_index, name=alias)
if is_reindex:
conn.indices.put_alias(index=backing_index, name=make_alias_name(index_type, is_reindexing=True))
aliases = get_aliases(index_type)
assert aliases == list(expected_indices)
assert get_default_alias(index_type) == aliases[0]
@ddt
class RecreateIndexTests(ESTestCase):
"""
Tests for management commands
"""
def tearDown(self):
super().tearDown()
conn = get_conn(verify=False)
for index in conn.indices.get_mapping().keys():
conn.indices.delete(index=index)
@data(PUBLIC_ENROLLMENT_INDEX_TYPE, PRIVATE_ENROLLMENT_INDEX_TYPE)
def test_create_index(self, index_type):
"""
Test that recreate_index will create an index and let search successfully
"""
assert es.search(index_type)['total']['value'] == 0
@data(*itertools.product(
[True, False],
[PRIVATE_ENROLLMENT_INDEX_TYPE, PUBLIC_ENROLLMENT_INDEX_TYPE],
))
@unpack
def test_keep_alias(self, existing_temp_alias, index_type):
"""
Test that recreate_index will point an existing alias at a new backing index
"""
conn = get_conn(verify=False)
default_alias = make_alias_name(index_type, is_reindexing=False)
temp_alias = make_alias_name(index_type, is_reindexing=True)
assert conn.indices.exists_alias(name=temp_alias) is False
if existing_temp_alias:
# Create a temp alias to assert that it doesn't change anything
backing_index = "{}_backing".format(temp_alias)
conn.indices.create(backing_index)
conn.indices.put_alias(name=temp_alias, index=backing_index)
old_backing_indexes = list(conn.indices.get_alias(name=default_alias).keys())
assert len(old_backing_indexes) == 1
reindex_test_es_data()
new_backing_indexes = list(conn.indices.get_alias(name=default_alias).keys())
assert len(new_backing_indexes) == 1
# Backing index should have changed
assert old_backing_indexes != new_backing_indexes
# Temp index should have been deleted
assert conn.indices.exists_alias(name=temp_alias) is False
@data(PUBLIC_ENROLLMENT_INDEX_TYPE, PRIVATE_ENROLLMENT_INDEX_TYPE)
def test_update_index(self, index_type):
"""
Test that recreate_index will clear old data and index all profiles
"""
with patch('search.signals.transaction.on_commit', side_effect=lambda callback: callback()):
program_enrollment = ProgramEnrollmentFactory.create()
assert_search(es.search(index_type), [program_enrollment], index_type=index_type)
remove_program_enrolled_user(program_enrollment.id)
assert_search(es.search(index_type), [], index_type=index_type)
# recreate_index should index the program-enrolled user
reindex_test_es_data()
assert_search(es.search(index_type), [program_enrollment], index_type=index_type)
def test_update_during_recreate_index(self):
"""
If an indexing action happens during a recreate_index it should update all active indices
"""
conn = get_conn(verify=False)
reindex_test_es_data()
temp_aliases = {}
index_types = [PRIVATE_ENROLLMENT_INDEX_TYPE, PUBLIC_ENROLLMENT_INDEX_TYPE]
for index_type in index_types:
# create temporary index
temp_index = make_backing_index_name()
temp_alias = make_alias_name(index_type=index_type, is_reindexing=True)
clear_and_create_index(temp_index, index_type=index_type)
conn.indices.put_alias(index=temp_index, name=temp_alias)
temp_aliases[index_type] = temp_alias
with patch('search.signals.transaction.on_commit', side_effect=lambda callback: callback()):
program_enrollment = ProgramEnrollmentFactory.create()
for index_type in index_types:
assert_search(es.search(index_type), [program_enrollment], index_type=index_type)
# Temp alias should get updated
temp_alias = temp_aliases[index_type]
refresh_index(temp_alias)
temp_hits = conn.search(index=temp_alias)['hits']
assert_search(temp_hits, [program_enrollment], index_type=index_type)
class PercolateQueryTests(ESTestCase):
"""
Tests for indexing of percolate queries
"""
def test_index_percolate_query(self):
"""Test that we index the percolate query"""
query = {"query": {"match": {"profile.first_name": "here"}}}
percolate_query = PercolateQueryFactory.create(query=query, original_query="original")
percolate_query_id = 123
percolate_query.id = percolate_query_id
# Don't save since that will trigger a signal which will update the index
with self.assertRaises(NotFoundError):
es.get_percolate_query(percolate_query_id)
index_percolate_queries([percolate_query])
assert es.get_percolate_query(percolate_query_id) == {
'_id': str(percolate_query_id),
'_index': es.get_default_backing_index(PERCOLATE_INDEX_TYPE),
'_source': query,
'_seq_no': 0,
'_primary_term': 1,
'_type': GLOBAL_DOC_TYPE,
'_version': 1,
'found': True,
}
def test_delete_percolate_queries(self):
"""Test that we delete the percolate query from the index"""
query = {"query": {"match": {"profile.first_name": "here"}}}
with patch('search.signals.transaction', on_commit=lambda callback: callback()):
percolate_query = PercolateQueryFactory.create(query=query, original_query="original")
assert es.get_percolate_query(percolate_query.id) == {
'_id': str(percolate_query.id),
'_index': es.get_default_backing_index(PERCOLATE_INDEX_TYPE),
'_source': query,
'_seq_no': 0,
'_primary_term': 1,
'_type': GLOBAL_DOC_TYPE,
'_version': 1,
'found': True,
}
delete_percolate_query(percolate_query.id)
with self.assertRaises(NotFoundError):
es.get_percolate_query(percolate_query.id)
# If we delete it again there should be no exception
delete_percolate_query(percolate_query.id)
with self.assertRaises(NotFoundError):
es.get_percolate_query(percolate_query.id)
def test_fix_percolate_query(self):
"""
Make sure all nested -> filter are replaced with nested -> query
"""
input_query = {
"query": {
"bool": {
"filter": [
{
"bool": {
"must": [
{
"term": {
"program.is_learner": True
}
}
],
"should": [
{
"term": {
"program.id": 34
}
}
],
"minimum_should_match": 1
}
},
{
"term": {
"profile.filled_out": True
}
},
{
"bool": {
"must": [
{
"nested": {
"path": "program.course_runs",
"filter": {
"term": {
"program.course_runs.semester": "2015 - Summer"
}
}
}
},
{
"term": {
"program.id": 34
}
}
]
}
}
]
}
}
}
query = PercolateQueryFactory.create(query=input_query)
assert index_percolate_queries([query]) == 1
def test_fix_field_error(self):
"""recreate_index should not cause any error with this percolate query"""
query = {
"query": {
"bool": {
"filter": [
{
"bool": {
"must": [
{
"term": {
"program.is_learner": True
}
},
{
"term": {
"profile.email_optin": True
}
}
],
"should": [
{
"term": {
"program.id": 1
}
},
{
"term": {
"program.id": 2
}
},
{
"term": {
"program.id": 13
}
}
],
"minimum_should_match": 1
}
},
{
"term": {
"profile.filled_out": True
}
},
{
"bool": {
"must": [
{
"nested": {
"path": "program.courses",
"query": {
"bool": {
"must": [
{
"term": {
"program.courses.course_title":
"Supply Chain Fundamentals (SC1x)"
}
},
{
"term": {
"program.courses.payment_status": "Auditing"
}
}
]
}
}
}
},
{
'nested': {
'path': "program.course_runs",
'query': {
'term': {
'program.course_runs.semester': "2016 - Summer"
}
}
}
},
{
"term": {
"profile.birth_country": "DE"
}
},
{
"term": {
"profile.country": "US"
}
},
{
"term": {
"program.id": 1
}
}
]
}
}
]
}
}
}
PercolateQuery.objects.create(query=query, original_query=query)
reindex_test_es_data()
def test_create_backing_indices(self):
"""
Test that create_backing_indices creates the right backing indices for reindexing
"""
indices = create_backing_indices()
conn = get_conn(verify=False)
for index_name, _ in indices:
assert conn.indices.get_alias(index_name) is not None
| bsd-3-clause | 3576877df3ab349b947a0b92c8c4e277 | 41.829488 | 119 | 0.556146 | 4.475527 | false | true | false | false |
mitodl/micromasters | mail/migrations/0001_initial.py | 1 | 1436 | # -*- coding: utf-8 -*-
# Generated by Django 1.9.8 on 2016-09-26 20:28
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('financialaid', '0004_modified_financial_aid_audit'),
]
operations = [
migrations.CreateModel(
name='FinancialAidEmailAudit',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_on', models.DateTimeField(auto_now_add=True)),
('updated_on', models.DateTimeField(auto_now=True)),
('to_email', models.CharField(max_length=250)),
('from_email', models.CharField(max_length=250)),
('email_subject', models.CharField(max_length=250)),
('email_body', models.TextField()),
('acting_user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
('financial_aid', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='financialaid.FinancialAid')),
],
options={
'abstract': False,
},
),
]
| bsd-3-clause | b117968abe9e80f0f498e08a15b57fd6 | 37.810811 | 142 | 0.60376 | 4.067989 | false | false | false | false |
mitodl/micromasters | micromasters/sentry.py | 1 | 1710 | """Sentry setup and configuration"""
from celery.exceptions import WorkerLostError
import sentry_sdk
from sentry_sdk.integrations.celery import CeleryIntegration
from sentry_sdk.integrations.django import DjangoIntegration
from sentry_sdk.integrations.redis import RedisIntegration
from sentry_sdk.integrations.logging import LoggingIntegration
# these errors occur when a shutdown is happening (usually caused by a SIGTERM)
SHUTDOWN_ERRORS = (WorkerLostError, SystemExit)
def init_sentry(*, dsn, environment, version, log_level):
"""
Initializes sentry
Args:
dsn (str): the sentry DSN key
environment (str): the application environment
version (str): the version of the application
log_level (str): the sentry log level
"""
def before_send(event, hint):
"""
Filter or transform events before they're sent to Sentry
Args:
event (dict): event object
hint (dict): event hint, see https://docs.sentry.io/platforms/python/#hints
Returns:
dict or None: returns the modified event or None to filter out the event
"""
if 'exc_info' in hint:
_, exc_value, _ = hint['exc_info']
if isinstance(exc_value, SHUTDOWN_ERRORS):
# we don't want to report shutdown errors to sentry
return None
return event
sentry_sdk.init(
dsn=dsn,
environment=environment,
release=version,
before_send=before_send,
integrations=[
DjangoIntegration(),
CeleryIntegration(),
RedisIntegration(),
LoggingIntegration(level=log_level),
],
)
| bsd-3-clause | 9056fccea075b1983aab2b312d116457 | 33.2 | 87 | 0.643275 | 4.511873 | false | false | false | false |
mitodl/micromasters | search/api.py | 1 | 15685 | """
Functions for executing ES searches
"""
import json
import logging
from django.conf import settings
from django.contrib.auth.models import User
from django.core.exceptions import ImproperlyConfigured
from django.db import transaction
from django.db.models import Q as Query
from opensearchpy.exceptions import NotFoundError
from opensearch_dsl import Search, Q
from jsonpatch import make_patch
from courses.models import Program
from dashboard.models import ProgramEnrollment
from profiles.models import Profile
from roles.api import get_advance_searchable_program_ids
from search.connection import (
get_default_alias,
get_conn,
PRIVATE_ENROLLMENT_INDEX_TYPE,
PUBLIC_ENROLLMENT_INDEX_TYPE,
PERCOLATE_INDEX_TYPE,
)
from search.models import (
PercolateQuery,
PercolateQueryMembership,
)
from search.exceptions import (
NoProgramAccessException,
PercolateException,
)
from search.indexing_api import serialize_program_enrolled_user
DEFAULT_ES_LOOP_PAGE_SIZE = 100
log = logging.getLogger(__name__)
def execute_search(search_obj):
"""
Executes a search against ES after checking the connection
Args:
search_obj (Search): opensearch_dsl Search object
Returns:
opensearch_dsl.result.Response: ES response
"""
# make sure there is a live connection
if search_obj._index is None: # pylint: disable=protected-access
# If you're seeing this it means you're creating Search() without using
# create_search_obj which sets important fields like the index.
raise ImproperlyConfigured("search object is missing an index")
get_conn()
return search_obj.execute()
def scan_search(search_obj):
"""
Executes a scan search after checking the connection and return a
generator that will iterate over all the documents matching the query.
Args:
search_obj (Search): opensearch_dsl Search object
Returns:
generator of dict:
A generator that will iterate over all the documents matching the query
"""
# make sure there is a live connection
if search_obj._index is None: # pylint: disable=protected-access
# If you're seeing this it means you're creating Search() without using
# create_search_obj which sets important fields like the index.
raise ImproperlyConfigured("search object is missing an index")
get_conn()
return search_obj.scan()
def get_searchable_programs(user, staff_program_ids):
"""
Determines the programs a user is eligible to search
Args:
user (django.contrib.auth.models.User): the user that is searching
staff_program_ids (list of int): the list of program ids the user is staff for if any
Returns:
set of courses.models.Program: set of programs the user can search in
"""
# filter only to the staff programs or enrolled programs
# NOTE: this has an accepted limitation that if you are staff on any program,
# you can't use search on non-staff programs
return set(Program.objects.filter(
Query(id__in=staff_program_ids) if staff_program_ids else Query(programenrollment__user=user)
).distinct())
def create_program_limit_query(user, staff_program_ids, filter_on_email_optin=False):
"""
Constructs and returns a query that limits a user to data for their allowed programs
Args:
user (django.contrib.auth.models.User): A user
staff_program_ids (list of int): the list of program ids the user is staff for if any
filter_on_email_optin (bool): If true, filter out profiles where email_optin != true
Returns:
opensearch_dsl.query.Q: An opensearch query
"""
users_allowed_programs = get_searchable_programs(user, staff_program_ids)
# if the user cannot search any program, raise an exception.
# in theory this should never happen because `UserCanAdvanceSearchPermission`
# takes care of doing the same check, but better to keep it to avoid
# that a theoretical bug exposes all the data in the index
if not users_allowed_programs:
raise NoProgramAccessException()
must = [
Q('term', **{'program.is_learner': True})
]
if filter_on_email_optin:
must.append(Q('term', **{'profile.email_optin': True}))
# no matter what the query is, limit the programs to the allowed ones
# if this is a superset of what searchkit sends, this will not impact the result
return Q(
'bool',
should=[
Q('term', **{'program.id': program.id}) for program in users_allowed_programs
],
# require that at least one program id matches the user's allowed programs
minimum_should_match=1,
must=must,
)
def create_search_obj(user, search_param_dict=None, filter_on_email_optin=False):
"""
Creates a search object and prepares it with metadata and query parameters that
we want to apply for all ES requests
Args:
user (User): User object
search_param_dict (dict): A dict representing the body of an ES query
filter_on_email_optin (bool): If true, filter out profiles where email_optin != True
Returns:
Search: opensearch_dsl Search object
"""
staff_program_ids = get_advance_searchable_program_ids(user)
is_advance_search_capable = bool(staff_program_ids)
index_type = PRIVATE_ENROLLMENT_INDEX_TYPE if is_advance_search_capable else PUBLIC_ENROLLMENT_INDEX_TYPE
index = get_default_alias(index_type)
search_obj = Search(index=index)
# Update from search params first so our server-side filtering will overwrite it if necessary
if search_param_dict is not None:
search_obj.update_from_dict(search_param_dict)
if not is_advance_search_capable:
# Learners can't search for other learners with privacy set to private
search_obj = search_obj.filter(
~Q('term', **{'profile.account_privacy': Profile.PRIVATE}) # pylint: disable=invalid-unary-operand-type
)
# Limit results to one of the programs the user is staff on
search_obj = search_obj.filter(create_program_limit_query(
user,
staff_program_ids,
filter_on_email_optin=filter_on_email_optin
))
# Filter so that only filled_out profiles are seen
search_obj = search_obj.filter(
Q('term', **{'profile.filled_out': True})
)
# Force size to be the one we set on the server
update_dict = {'size': settings.OPENSEARCH_DEFAULT_PAGE_SIZE}
if search_param_dict is not None and search_param_dict.get('from') is not None:
update_dict['from'] = search_param_dict['from']
search_obj.update_from_dict(update_dict)
return search_obj
def prepare_and_execute_search(user, search_param_dict=None, search_func=execute_search,
filter_on_email_optin=False):
"""
Prepares a Search object and executes the search against ES
Args:
user (User): User object
search_param_dict (dict): A dict representing the body of an ES query
search_func (callable): The function that executes the search
filter_on_email_optin (bool): If true, filter out profiles where email_optin != True
Returns:
opensearch_dsl.result.Response: ES response
"""
search_obj = create_search_obj(
user,
search_param_dict=search_param_dict,
filter_on_email_optin=filter_on_email_optin,
)
return search_func(search_obj)
def search_for_field(search_obj, field_name):
"""
Retrieves all unique instances of a field for documents that match an ES query
Args:
search_obj (Search): Search object
field_name (str): The name of the field for the value to get
Returns:
set: Set of unique values
"""
results = set()
# Maintaining a consistent sort on '_doc' will help prevent bugs where the
# index is altered during the loop.
# This also limits the query to only return the field value.
search_obj = search_obj.sort('_doc').source(includes=[field_name])
search_results = scan_search(search_obj)
# add the field value for every search result hit to the set
for hit in search_results:
results.add(getattr(hit, field_name))
return results
def get_all_query_matching_emails(search_obj):
"""
Retrieves all unique emails for documents that match an ES query
Args:
search_obj (Search): Search object
page_size (int): Number of docs per page of results
Returns:
set: Set of unique emails
"""
return search_for_field(search_obj, "email")
def search_percolate_queries(program_enrollment_id, source_type):
"""
Find all PercolateQuery objects whose queries match a user document
Args:
program_enrollment_id (int): A ProgramEnrollment id
source_type (str): The type of the percolate query to filter on
Returns:
django.db.models.query.QuerySet: A QuerySet of PercolateQuery matching the percolate results
"""
enrollment = ProgramEnrollment.objects.get(id=program_enrollment_id)
result_ids = _search_percolate_queries(enrollment)
return PercolateQuery.objects.filter(id__in=result_ids, source_type=source_type).exclude(is_deleted=True)
def _search_percolate_queries(program_enrollment):
"""
Find all PercolateQuery ids whose queries match a user document
Args:
program_enrollment (ProgramEnrollment): A ProgramEnrollment
Returns:
list of int: A list of PercolateQuery ids
"""
conn = get_conn()
percolate_index = get_default_alias(PERCOLATE_INDEX_TYPE)
doc = serialize_program_enrolled_user(program_enrollment)
if not doc:
return []
# We don't need this to search for percolator queries and
# it causes a dynamic mapping failure so we need to remove it
del doc['_id']
body = {
"query": {
"percolate": {
"field": "query",
"document": doc
}
}
}
result = conn.search(index=percolate_index, body=body)
failures = result.get('_shards', {}).get('failures', [])
if len(failures) > 0:
raise PercolateException("Failed to percolate: {}".format(failures))
return [int(row['_id']) for row in result['hits']['hits']]
def adjust_search_for_percolator(search):
"""
Returns an updated Search which can be used with percolator.
Percolated queries can only store the query portion of the search object
(see https://github.com/elastic/elasticsearch/issues/19680). This will modify the original search query
to add post_filter arguments to the query part of the search. Then all parts of the Search other than
query will be removed.
Args:
search (Search): A search object
Returns:
Search: updated search object
"""
search_dict = search.to_dict()
if 'post_filter' in search_dict:
search = search.filter(search_dict['post_filter'])
# Remove all other keys besides query
updated_search_dict = {}
search_dict = search.to_dict()
if 'query' in search_dict:
updated_search_dict['query'] = search_dict['query']
updated_search = Search(index=search._index) # pylint: disable=protected-access
updated_search.update_from_dict(updated_search_dict)
return updated_search
def document_needs_updating(enrollment):
"""
Get the document from elasticsearch and see if it matches what's in the database
Args:
enrollment (ProgramEnrollment): A program enrollment
Returns:
bool: True if the document needs to be updated via reindex
"""
index = get_default_alias(PRIVATE_ENROLLMENT_INDEX_TYPE)
conn = get_conn()
try:
document = conn.get(index=index, id=enrollment.id)
except NotFoundError:
return True
serialized_enrollment = serialize_program_enrolled_user(enrollment)
del serialized_enrollment['_id']
source = document['_source']
if serialized_enrollment != source:
# Convert OrderedDict to dict
reserialized_enrollment = json.loads(json.dumps(serialized_enrollment))
diff = make_patch(source, reserialized_enrollment).patch
serialized_diff = json.dumps(diff, indent=" ")
log.info("Difference found for enrollment %s: %s", enrollment, serialized_diff)
return True
return False
def update_percolate_memberships(user, source_type):
"""
Updates membership in a PercolateQuery
Args:
user (User): A User to check for membership changes
source_type (str): The type of the percolate query to filter on
"""
# ensure we have a membership for each of the queries so we can acquire a lock on them
percolate_queries = list(PercolateQuery.objects.filter(source_type=source_type).exclude(is_deleted=True))
membership_ids = _ensure_memberships_for_queries(
percolate_queries,
user
)
# if there are no percolate queries or memberships then there's nothing to do
if membership_ids:
_update_memberships([query.id for query in percolate_queries], membership_ids, user)
def _ensure_memberships_for_queries(percolate_queries, user):
"""
Ensures PercolateQueryMemberships exist for the user on the designated PercolateQueries
Args:
percolate_queries (list of PercolateQuery): A list of PercolateQuerys to add PercolateQueryMemberships for
user (User): The user to ensure memberships for
"""
membership_ids = []
for query in percolate_queries:
membership, _ = PercolateQueryMembership.objects.get_or_create(query=query, user=user)
membership_ids.append(membership.id)
return membership_ids
def _update_memberships(percolate_query_ids, membership_ids, user, force_save=False):
"""
Atomically determine and update memberships
Args:
percolate_query_ids (set of int): a set of PercolateQuery.id
membership_ids (list of int): A list of ids for PercolateQueryMemberships to update
user (User): A User to check for membership changes
force_save (bool): True if membership saves should be force even if no change
"""
with transaction.atomic():
memberships = PercolateQueryMembership.objects.filter(id__in=membership_ids).select_for_update()
# limit the query_ids to the queries we are trying to update
query_ids = set()
for enrollment in user.programenrollment_set.all():
query_ids.update(set(_search_percolate_queries(enrollment)))
query_ids.intersection_update(percolate_query_ids)
for membership in memberships:
# only update if there's a delta in membership status
is_member = membership.query_id in query_ids
if force_save or (membership.is_member is not is_member):
membership.is_member = is_member
membership.needs_update = True
membership.save()
def populate_query_memberships(percolate_query_id):
"""
Populates PercolateQueryMemberships for the given query and enrollments
Args:
percolate_query_id (int): Database id for the PercolateQuery to populate
"""
# practically this is a list of 1 query, but _ensure_memberships_for_queries requires a list
query = PercolateQuery.objects.get(id=percolate_query_id)
users = User.objects.filter(is_active=True).iterator()
for user in users:
membership_ids = _ensure_memberships_for_queries([query], user)
_update_memberships(set([query.id]), membership_ids, user, force_save=True)
| bsd-3-clause | 214296d8bfb12ee26ac7753aa8f04467 | 34.486425 | 116 | 0.684221 | 4.017674 | false | false | false | false |
mitodl/micromasters | search/migrations/0004_add_percolate_query_membership.py | 1 | 1420 | # -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-10-24 13:55
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('search', '0003_percolatequery_source_type'),
]
operations = [
migrations.CreateModel(
name='PercolateQueryMembership',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_on', models.DateTimeField(auto_now_add=True)),
('updated_on', models.DateTimeField(auto_now=True)),
('is_member', models.BooleanField(default=False)),
('needs_update', models.BooleanField(default=False)),
('query', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='percolate_memberships', to='search.PercolateQuery')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='percolate_memberships', to=settings.AUTH_USER_MODEL)),
],
),
migrations.AlterUniqueTogether(
name='percolatequerymembership',
unique_together=set([('user', 'query')]),
),
]
| bsd-3-clause | b2f4c1234550881db7fd9955ea174047 | 40.764706 | 156 | 0.632394 | 4.127907 | false | false | false | false |
mitodl/micromasters | backends/pipeline_api.py | 1 | 5802 | """
APIs for extending the python social auth pipeline
"""
import logging
from django.shortcuts import redirect
from django_redis import get_redis_connection
from rolepermissions.checkers import has_role
from social_core.exceptions import AuthException, AuthFailed
from backends.base import BaseEdxOAuth2
from backends.utils import update_email
from dashboard.api import CACHE_KEY_FAILURE_NUMS_BY_USER, FIELD_USER_ID_BASE_STR, CACHE_KEY_FAILED_USERS_NOT_TO_UPDATE
from micromasters.utils import now_in_utc
from profiles.models import Profile
from profiles.util import split_name
from roles.models import (
Instructor,
Staff,
)
log = logging.getLogger(__name__)
def update_profile_from_edx(backend, user, response, is_new, *args, **kwargs):
# pylint: disable=unused-argument
"""
Gets profile information from EDX and saves them in the user profile
Args:
backend (social.backends.oauth.BaseOAuth2): the python social auth backend
user (User): user object
response (dict): dictionary of the user information coming
from previous functions in the pipeline
is_new (bool): whether the authenticated user created a new local instance
Returns:
None
"""
# this function is completely skipped if the backend is not edx or
# the user has not created now
if not isinstance(backend, BaseEdxOAuth2):
return
if has_role(user, [Staff.ROLE_ID, Instructor.ROLE_ID]):
next_relative_url = "/learners"
else:
next_relative_url = "/dashboard"
next_url = backend.strategy.session.load().get('next') or backend.strategy.session.get('next')
if not next_url:
next_url = next_relative_url
backend.strategy.session_set('next', next_url)
user_profile_edx = kwargs.get('edx_profile')
update_email(user_profile_edx, user)
if not is_new:
return
try:
user_profile = Profile.objects.get(user=user)
except Profile.DoesNotExist:
# this should never happen, since the profile is created with a signal
# right after the user is created
log.error('No profile found for the user %s', user.username)
return
name = user_profile_edx.get('name', "")
user_profile.edx_name = name
user_profile.first_name, user_profile.last_name = split_name(name)
user_profile.preferred_name = name
user_profile.edx_bio = user_profile_edx.get('bio')
user_profile.country = user_profile_edx.get('country')
user_profile.edx_requires_parental_consent = user_profile_edx.get('requires_parental_consent')
user_profile.edx_level_of_education = user_profile_edx.get('level_of_education')
user_profile.edx_goals = user_profile_edx.get('goals')
user_profile.edx_language_proficiencies = user_profile_edx.get('language_proficiencies')
try:
user_profile.preferred_language = user_profile.edx_language_proficiencies[0]['code']
except (IndexError, ValueError, KeyError, TypeError):
pass
user_profile.gender = user_profile_edx.get('gender')
user_profile.edx_mailing_address = user_profile_edx.get('mailing_address')
user_profile.agreed_to_terms_of_service = True
user_profile.save()
log.debug(
'Profile for user "%s" updated with values from EDX %s',
user.username,
user_profile_edx
)
def check_edx_verified_email(backend, response, details, *args, **kwargs): # pylint: disable=unused-argument
"""Get account information to check if email was verified for account on edX"""
if not isinstance(backend, BaseEdxOAuth2):
return {}
username = details.get('username')
access_token = response.get('access_token')
if not access_token:
# this should never happen for the edx oauth provider, but just in case...
raise AuthException('Missing access token for the edX user {0}'.format(username))
user_profile_edx = backend.get_json(
backend.get_url('/api/user/v1/accounts/{0}'.format(username)),
headers={
"Authorization": "Bearer {}".format(access_token),
}
)
if not user_profile_edx.get('is_active'):
return redirect('verify-email')
return {'edx_profile': user_profile_edx}
def set_last_update(details, *args, **kwargs): # pylint: disable=unused-argument
"""
Pipeline function to add extra information about when the social auth
profile has been updated.
Args:
details (dict): dictionary of informations about the user
Returns:
dict: updated details dictionary
"""
details['updated_at'] = now_in_utc().timestamp()
return details
def flush_redis_cache(*, user, **kwargs): # pylint: disable=unused-argument
"""
flush the redis cache on a new login
Args:
user (User): user object
"""
if not user:
return
# Update redis cache if user had invalid credentials
con = get_redis_connection("redis")
user_key = FIELD_USER_ID_BASE_STR.format(user.id)
con.hdel(CACHE_KEY_FAILURE_NUMS_BY_USER, user_key)
con.srem(CACHE_KEY_FAILED_USERS_NOT_TO_UPDATE, user.id)
def limit_one_auth_per_backend(
*, backend, user, strategy, uid, **kwargs # pylint: disable=unused-argument
):
"""Limit the user to one social auth account per backend"""
if not user:
return {}
user_storage = strategy.storage.user
social_auths = user_storage.get_social_auth_for_user(user, backend.name)
# if there's at least one social auth and any of them don't match the incoming uid
# we have or are trying to add mutltiple accounts
if social_auths and any(auth.uid != uid for auth in social_auths):
raise AuthFailed(backend.name, "Another edX account is already linked to your MicroMasters account.")
return {}
| bsd-3-clause | 0483f791ff2e70e09dcee06a583a9468 | 33.742515 | 118 | 0.684936 | 3.807087 | false | false | false | false |
mitodl/micromasters | roles/signals_test.py | 1 | 3476 | """
Tests for signals
"""
from django.db.models.signals import post_save
from factory.django import mute_signals
from rolepermissions.checkers import (
has_role,
has_permission,
has_object_permission,
)
from courses.factories import ProgramFactory
from roles.models import Role
from micromasters.factories import UserFactory
from search.base import MockedESTestCase
class SignalsTest(MockedESTestCase):
"""
Tests for signals triggered by the role assignment
"""
def setUp(self):
super().setUp()
self.user = UserFactory.create()
self.program = ProgramFactory.create()
def assert_standard_role_permissions(self, expected_bool, program=None):
"""
Helper function to assert role and permissions assignment
"""
assert isinstance(expected_bool, bool)
assert has_role(self.user, 'staff') is expected_bool
assert has_permission(self.user, 'can_advance_search') is expected_bool
assert has_object_permission('can_advance_search', self.user, program or self.program) is expected_bool
def test_assign_role(self):
"""
Assigning the role with the model triggers a signal
to assign the correspondent role to the user.
"""
self.assert_standard_role_permissions(False)
Role.objects.create(
program=self.program,
user=self.user,
role='staff',
)
self.assert_standard_role_permissions(True)
def test_presave_removes_current_role(self):
"""
Updating the role in the model triggers a pre_save
signal that removes the previous role from the user.
"""
mm_role = Role.objects.create(
program=self.program,
user=self.user,
role='staff',
)
self.assert_standard_role_permissions(True)
# muting the post_save signal to avoid the reassignment of the roles and related permissions
# in this way only the pre_save will run and the effect will be only to remove the old role
with mute_signals(post_save):
mm_role.role = 'instructor'
mm_role.save()
self.assert_standard_role_permissions(False)
def test_postdelete_removes_role(self):
"""
If role assignment is removed, a signal takes
care of removing the role from the user.
"""
mm_role = Role.objects.create(
program=self.program,
user=self.user,
role='staff',
)
self.assert_standard_role_permissions(True)
mm_role.delete()
self.assert_standard_role_permissions(False)
def test_postdelete_multiple_programs_role(self):
"""
If role assignment is removed for one program but the
same role is assigned to another program, the user keeps the role.
"""
existing_program = self.program
new_program = ProgramFactory.create()
mm_role_1 = Role.objects.create(
program=existing_program,
user=self.user,
role='staff',
)
Role.objects.create(
program=new_program,
user=self.user,
role='staff',
)
self.assert_standard_role_permissions(True, existing_program)
self.assert_standard_role_permissions(True, new_program)
mm_role_1.delete()
self.assert_standard_role_permissions(True, new_program)
| bsd-3-clause | 4dde3d37bb68d86b2f68834c73a3f253 | 32.104762 | 111 | 0.630035 | 4.291358 | false | true | false | false |
mitodl/micromasters | exams/management/commands/import_edx_exam_grades.py | 1 | 4367 | """
Import proctored exam grades from edx
"""
import csv
import argparse
from django.contrib.auth.models import User
from django.core.management import BaseCommand, CommandError
from courses.models import Course
from exams.models import ExamRun, ExamAuthorization
from exams.constants import EXAM_GRADE_PASS, BACKEND_MITX_ONLINE
from grades.models import ProctoredExamGrade
from micromasters.utils import now_in_utc
from social_django.models import UserSocialAuth
class Command(BaseCommand):
"""Parses a csv with exam grades creating or updating ProctoredExamGrade"""
help = "Parses a csv with exam grades and creates ProctoredExamGrade"
def add_arguments(self, parser):
parser.add_argument('csvfile', type=argparse.FileType('r'), help='')
def handle(self, *args, **kwargs): # pylint: disable=unused-argument,too-many-locals
csvfile = kwargs.get('csvfile')
reader = csv.DictReader(csvfile)
grade_count = 0
existing_grades = 0
for row in reader:
try:
user_social_auth = UserSocialAuth.objects.get(uid=row['username'], provider=BACKEND_MITX_ONLINE)
except UserSocialAuth.DoesNotExist:
self.stdout.write(
self.style.ERROR('Could not find social auth for user for username {}'.format(row['username']))
)
continue
user = user_social_auth.user
course_id = row['course_id']
try:
course = Course.objects.get(id=course_id)
except Course.DoesNotExist:
raise CommandError(
'Could not find a course with number "{}"'.format(course_id)
)
# should pick the latest past exam run
now = now_in_utc()
exam_run = ExamRun.objects.filter(
course=course,
date_first_schedulable__lte=now
).order_by('-date_last_schedulable').first()
if exam_run is None:
raise CommandError(
'There are no eligible exam runs for course "{}"'.format(course.title)
)
try:
exam_authorization = ExamAuthorization.objects.get(user=user, exam_run=exam_run)
except ExamAuthorization.DoesNotExist:
self.stdout.write(
self.style.ERROR('Could not find authorization for user {} and exam run {}'.format(
user.username,
exam_run.id
))
)
continue
if int(row['no_show']):
exam_authorization.exam_taken = True
exam_authorization.exam_no_show = True
exam_authorization.save()
else:
try:
score = float(row['score'])
except ValueError:
self.stdout.write(
self.style.ERROR('Failed to create grade: empty score for user {} and exam run {}'.format(
user.username,
exam_run.id
))
)
continue
defaults = {
'passing_score': exam_run.passing_score,
'score': score,
'grade': row['grade'],
'percentage_grade': score / 100.0 if score else 0,
'passed': row['grade'].lower() == EXAM_GRADE_PASS,
'row_data': row,
'exam_date': now_in_utc()
}
_, created = ProctoredExamGrade.objects.update_or_create(
user=user,
course=course,
exam_run=exam_run,
defaults=defaults
)
if created:
grade_count += 1
exam_authorization.exam_taken = True
exam_authorization.save()
else:
existing_grades += 1
result_messages = [
'Total exam grades created: {}'.format(grade_count),
'Total number of modified grades: {}'.format(existing_grades)
]
self.stdout.write(self.style.SUCCESS('\n'.join(result_messages)))
| bsd-3-clause | 081e64cd404c0d13e2c088cb39fb6d1f | 37.646018 | 115 | 0.524387 | 4.710895 | false | false | false | false |
mitodl/micromasters | dashboard/api.py | 1 | 31974 | """
Apis for the dashboard
"""
import datetime
import logging
from urllib.parse import urljoin
import pytz
from django.conf import settings
from django.contrib.auth.models import User
from django.core.exceptions import ImproperlyConfigured, ObjectDoesNotExist
from django.db import transaction
from django.db.models import Prefetch
from django.urls import reverse
from django_redis import get_redis_connection
from edx_api.client import EdxApi
from backends.constants import COURSEWARE_BACKEND_URL, BACKEND_EDX_ORG, BACKEND_MITX_ONLINE
from backends.exceptions import InvalidCredentialStored
from backends import utils
from courses.models import Program, ElectiveCourse, CourseRun
from courses.utils import format_season_year_for_course_run
from dashboard.api_edx_cache import CachedEdxDataApi
from dashboard.constants import DEDP_PROGRAM_TITLE
from dashboard.models import ProgramEnrollment
from dashboard.utils import get_mmtrack
from financialaid.serializers import FinancialAidDashboardSerializer
from grades import api
from grades.models import FinalGrade
from grades.serializers import ProctoredExamGradeSerializer
from exams.models import ExamAuthorization, ExamRun
from micromasters.utils import now_in_utc
# key that stores user_key and number of failures in a hash
from profiles.api import get_social_auth
CACHE_KEY_FAILURE_NUMS_BY_USER = "update_cache_401_failure_numbers"
# key that stores user ids to exclude from cache update
CACHE_KEY_FAILED_USERS_NOT_TO_UPDATE = "failed_cache_update_users_not_to_update"
FIELD_USER_ID_BASE_STR = "user_{0}"
log = logging.getLogger(__name__)
# pylint: disable=too-many-branches
class CourseStatus:
"""
Possible statuses for a course for a user. These are the course run statuses used in the dashboard API.
"""
PASSED = 'passed'
NOT_PASSED = 'not-passed'
CURRENTLY_ENROLLED = 'currently-enrolled'
WILL_ATTEND = 'will-attend'
CAN_UPGRADE = 'can-upgrade'
MISSED_DEADLINE = 'missed-deadline'
OFFERED = 'offered'
PAID_BUT_NOT_ENROLLED = 'paid-but-not-enrolled'
@classmethod
def all_statuses(cls):
"""Helper to get all the statuses"""
return [cls.PASSED, cls.NOT_PASSED, cls.CURRENTLY_ENROLLED, cls.PAID_BUT_NOT_ENROLLED,
cls.CAN_UPGRADE, cls.OFFERED, cls.WILL_ATTEND, cls.MISSED_DEADLINE, ]
class CourseRunStatus:
"""
Possible statuses for a course run for a user. These are used internally.
"""
NOT_ENROLLED = 'not-enrolled'
CURRENTLY_ENROLLED = 'currently-enrolled'
CHECK_IF_PASSED = 'check-if-passed'
WILL_ATTEND = 'will-attend'
CAN_UPGRADE = 'can-upgrade'
MISSED_DEADLINE = 'missed-deadline'
NOT_PASSED = 'not-passed'
PAID_BUT_NOT_ENROLLED = 'paid-but-not-enrolled'
class CourseFormatConditionalFields:
"""
The formatting of a course run is dependent
on the CourseStatus status passed on the function.
There are some fields that are common and others
that depend on the status. Also the name of the fields changes.
This class contains the association between the CourseStatus status
that need specific fields, the field associated correspondent
to a course run and the new name they need to have.
"""
ASSOCIATED_FIELDS = {
CourseStatus.OFFERED: [
{
'course_run_field': 'enrollment_start',
'format_field': 'enrollment_start_date'
},
{
'course_run_field': 'fuzzy_enrollment_start_date',
'format_field': 'fuzzy_enrollment_start_date'
},
],
CourseStatus.PAID_BUT_NOT_ENROLLED: [
{
'course_run_field': 'enrollment_start',
'format_field': 'enrollment_start_date'
},
]
}
@classmethod
def get_assoc_field(cls, course_status):
"""
Method to get from the ASSOCIATED_FIELDS dict
"""
if course_status not in CourseStatus.all_statuses():
log.error('%s not defined in Courses.api.CourseStatus', course_status)
raise ImproperlyConfigured(
'{} not defined in Courses.api.CourseStatus'.format(course_status))
return cls.ASSOCIATED_FIELDS.get(course_status, [])
class CourseRunUserStatus:
"""
Representation of a course run status for a specific user
"""
def __init__(self, status, course_run=None):
self.status = status
self.course_run = course_run
def __repr__(self):
return "<CourseRunUserStatus for course {course} status {status} at {address}>".format(
status=self.status,
course=self.course_run.title if self.course_run is not None else '"None"',
address=hex(id(self))
)
def get_user_program_info(user, *, update_cache=True):
"""
Provides a detailed serialization all of a User's enrolled Programs with enrollment/grade info
Args:
user (User): A User
update_cache (bool): if True, update the cache for the backends
Returns:
list: Enrolled Program information
"""
if update_cache:
update_cache_for_backend(user, BACKEND_EDX_ORG)
update_cache_for_backend(user, BACKEND_MITX_ONLINE)
response_data = {
"programs": [],
"is_edx_data_fresh": CachedEdxDataApi.are_all_caches_fresh(user)
}
all_programs = (
Program.objects.filter(live=True, programenrollment__user=user).prefetch_related(
Prefetch(
'course_set__courserun_set',
queryset=CourseRun.objects.not_discontinued()
)
)
)
for program in all_programs:
mmtrack_info = get_mmtrack(user, program)
response_data['programs'].append(get_info_for_program(mmtrack_info))
return response_data
def get_info_for_program(mmtrack):
"""
Helper function that formats a program with all the courses and runs
Args:
mmtrack (dashboard.utils.MMTrack): a instance of all user information about a program
Returns:
dict: a dictionary containing information about the program
"""
# basic data for the program
data = {
"id": mmtrack.program.pk,
"description": mmtrack.program.description,
"title": mmtrack.program.title,
"financial_aid_availability": mmtrack.financial_aid_available,
"has_exams": mmtrack.has_exams,
"courses": [],
"exam_card_status": mmtrack.get_exam_card_status(),
"grade_average": mmtrack.calculate_final_grade_average(),
"certificate": mmtrack.get_program_certificate_url(),
"number_courses_required": (
mmtrack.program.num_required_courses
if mmtrack.program.electives_set.exists()
else mmtrack.program.course_set.count()
),
"number_courses_passed": mmtrack.get_number_of_passed_courses_for_completion(),
"has_mitxonline_courses": mmtrack.program.has_mitxonline_courses
}
if mmtrack.financial_aid_available:
data["financial_aid_user_info"] = FinancialAidDashboardSerializer.serialize(mmtrack.user, mmtrack.program)
if mmtrack.has_exams:
data["grade_records_url"] = reverse('grade_records', args=[mmtrack.get_program_enrollment().id])
program_letter_url = mmtrack.get_program_letter_url()
if program_letter_url:
data["program_letter_url"] = program_letter_url
for course in mmtrack.program.course_set.all():
data['courses'].append(
get_info_for_course(course, mmtrack)
)
return data
def get_info_for_course(course, mmtrack):
"""
Checks the status of a course given the status of all its runs
Args:
course (Course): a course object
mmtrack (dashboard.utils.MMTrack): a instance of all user information about a program
Returns:
dict: dictionary representing the course status for the user
"""
# pylint: disable=too-many-statements
# data about the course to be returned anyway
course_data = {
"id": course.pk,
"title": course.title,
"position_in_program": course.position_in_program,
"description": course.description,
"prerequisites": course.prerequisites,
"has_contact_email": bool(course.contact_email),
"can_schedule_exam": is_exam_schedulable(mmtrack.user, course),
"exam_register_end_date": get_exam_register_end_date(course),
"exam_course_key": get_edx_exam_course_key(mmtrack.user, course),
"exams_schedulable_in_future": get_future_exam_runs(course),
"exam_date_next_semester": get_exam_date_next_semester(course),
"current_exam_dates": get_current_exam_run_dates(course),
"has_to_pay": has_to_pay_for_exam(mmtrack, course),
"runs": [],
"proctorate_exams_grades": ProctoredExamGradeSerializer(
mmtrack.get_course_proctorate_exam_results(course), many=True
).data,
"is_elective": ElectiveCourse.objects.filter(course=course).exists(),
"has_exam": course.has_exam,
"certificate_url": get_certificate_url(mmtrack, course),
"overall_grade": mmtrack.get_overall_final_grade_for_course(course)
}
def _add_run(run, mmtrack_, status):
"""Helper function to add a course run to the status dictionary"""
formatted_run = format_courserun_for_dashboard(
run,
status,
mmtrack=mmtrack_,
position=len(course_data['runs']) + 1
)
if run.is_current and mmtrack_.is_enrolled_mmtrack(formatted_run['course_id']):
# Prepend current run on the top because user can pay and enroll for current run as well as
# future run and the dashboard UI picks first run to display. User should be able to
# see current run progress on dashboard UI.
course_data['runs'] = [formatted_run] + course_data['runs']
else:
course_data['runs'].append(formatted_run)
with transaction.atomic():
runs_qs = course.courserun_set.not_discontinued()
if not runs_qs.count():
return course_data
# get all the run statuses
run_statuses = [get_status_for_courserun(course_run, mmtrack)
for course_run in runs_qs]
# sort them by end date
run_statuses.sort(key=lambda x: x.course_run.end_date or
datetime.datetime(datetime.MAXYEAR, 1, 1, tzinfo=pytz.utc), reverse=True)
# pick the first `not enrolled` or the first
for run_status in run_statuses:
if run_status.status not in [CourseRunStatus.NOT_ENROLLED, CourseRunStatus.PAID_BUT_NOT_ENROLLED]:
break
else:
run_status = run_statuses[0]
# remove the picked run_status from the list
run_statuses.remove(run_status)
if run_status.status == CourseRunStatus.NOT_ENROLLED:
next_run = course.first_unexpired_run()
if next_run is not None:
_add_run(next_run, mmtrack, CourseStatus.OFFERED)
elif run_status.status == CourseRunStatus.NOT_PASSED:
_add_run(run_status.course_run, mmtrack, CourseRunStatus.NOT_PASSED)
next_run = course.first_unexpired_run()
if next_run is not None:
_add_run(next_run, mmtrack, CourseStatus.OFFERED)
elif run_status.status == CourseRunStatus.MISSED_DEADLINE:
_add_run(run_status.course_run, mmtrack, CourseStatus.MISSED_DEADLINE)
next_run = course.first_unexpired_run()
if next_run is not None:
_add_run(next_run, mmtrack, CourseStatus.OFFERED)
elif run_status.status == CourseRunStatus.CURRENTLY_ENROLLED:
_add_run(run_status.course_run, mmtrack, CourseStatus.CURRENTLY_ENROLLED)
# check if we need to check the certificate
elif run_status.status == CourseRunStatus.CHECK_IF_PASSED:
if not mmtrack.has_passed_course(run_status.course_run.edx_course_key):
_add_run(run_status.course_run, mmtrack, CourseRunStatus.NOT_PASSED)
else:
_add_run(run_status.course_run, mmtrack, CourseStatus.PASSED)
# we want to return the next run, so the user can re-enroll regardless
# of whether they passed or failed
next_run = course.first_unexpired_run()
if next_run is not None:
_add_run(next_run, mmtrack, CourseStatus.OFFERED)
elif run_status.status == CourseRunStatus.WILL_ATTEND:
_add_run(run_status.course_run, mmtrack, CourseStatus.WILL_ATTEND)
elif run_status.status == CourseRunStatus.CAN_UPGRADE:
_add_run(run_status.course_run, mmtrack, CourseStatus.CAN_UPGRADE)
elif run_status.status == CourseRunStatus.PAID_BUT_NOT_ENROLLED:
if course.program.financial_aid_availability:
next_run = course.first_unexpired_run()
if next_run is not None:
_add_run(next_run, mmtrack, CourseStatus.PAID_BUT_NOT_ENROLLED)
else:
_add_run(run_status.course_run, mmtrack, CourseStatus.PAID_BUT_NOT_ENROLLED)
# add all the other runs with status != NOT_ENROLLED
# the first one (or two in some cases) has been added with the logic before
for run_status in run_statuses:
if run_status.status == CourseRunStatus.CHECK_IF_PASSED:
if mmtrack.has_passed_course(run_status.course_run.edx_course_key):
# in this case the user might have passed the course also in the past
_add_run(run_status.course_run, mmtrack, CourseStatus.PASSED)
else:
# any other status means that the student never passed the course run
_add_run(run_status.course_run, mmtrack, CourseStatus.NOT_PASSED)
elif run_status.status == CourseRunStatus.MISSED_DEADLINE:
_add_run(run_status.course_run, mmtrack, CourseStatus.MISSED_DEADLINE)
elif run_status.status == CourseRunStatus.CAN_UPGRADE:
_add_run(run_status.course_run, mmtrack, CourseStatus.CAN_UPGRADE)
elif run_status.status == CourseRunStatus.CURRENTLY_ENROLLED:
_add_run(run_status.course_run, mmtrack, CourseStatus.CURRENTLY_ENROLLED)
return course_data
def get_final_grade(mmtrack, course_run):
"""
returns final grade if available otherwise freezes the grade.
Args:
course_run (CourseRun): a course run
mmtrack (dashboard.utils.MMTrack): a instance of all user information about a program
Returns:
final_grade: an object representing the FinalGrade
"""
try:
final_grade = mmtrack.get_required_final_grade(course_run.edx_course_key)
except FinalGrade.DoesNotExist:
# this is a very special case that happens if the user has logged in
# for the first time after we have already frozen the final grades
log.warning(
'The user "%s" doesn\'t have a final grade for the course run "%s" '
'but the course run has already been frozen. Trying to freeze the user now.',
mmtrack.user.username,
course_run.edx_course_key,
)
final_grade = api.freeze_user_final_grade(mmtrack.user, course_run, raise_on_exception=True)
return final_grade
def get_status_for_courserun(course_run, mmtrack): # pylint: disable=too-many-return-statements
"""
Checks the status of a course run for a user given her enrollments
Args:
course_run (CourseRun): a course run
mmtrack (dashboard.utils.MMTrack): a instance of all user information about a program
Returns:
CourseRunUserStatus: an object representing the run status for the user
"""
if mmtrack.has_paid_final_grade(course_run.edx_course_key):
return CourseRunUserStatus(CourseRunStatus.CHECK_IF_PASSED, course_run)
elif mmtrack.has_final_grade(course_run.edx_course_key):
if course_run.is_upgradable:
final_grade = get_final_grade(mmtrack, course_run)
if final_grade.passed:
return CourseRunUserStatus(CourseRunStatus.CAN_UPGRADE, course_run)
else:
return CourseRunUserStatus(CourseRunStatus.NOT_PASSED, course_run)
else:
return CourseRunUserStatus(CourseRunStatus.MISSED_DEADLINE, course_run)
elif not mmtrack.is_enrolled(course_run.edx_course_key):
if mmtrack.has_paid(course_run.edx_course_key):
return CourseRunUserStatus(CourseRunStatus.PAID_BUT_NOT_ENROLLED, course_run)
return CourseRunUserStatus(CourseRunStatus.NOT_ENROLLED, course_run)
status = None
if mmtrack.is_enrolled_mmtrack(course_run.edx_course_key):
if course_run.is_current:
status = CourseRunStatus.CURRENTLY_ENROLLED
elif course_run.is_future or course_run.is_promised:
status = CourseRunStatus.WILL_ATTEND
# the following statement needs to happen only with the new version of the algorithm
elif course_run.has_frozen_grades:
# be sure that the user has a final grade or freeze now
if not mmtrack.has_final_grade(course_run.edx_course_key):
api.freeze_user_final_grade(mmtrack.user, course_run, raise_on_exception=True)
status = CourseRunStatus.CHECK_IF_PASSED
# this last check needs to be done as last one
elif course_run.is_past:
# At this point the course has no frozen final grades yet
status = CourseRunStatus.CURRENTLY_ENROLLED
else:
raise ImproperlyConfigured(
'The course {0} results are not either current, past, or future at the same time'.format(
course_run.edx_course_key
)
)
else:
if not course_run.is_past:
if course_run.is_upgradable:
status = CourseRunStatus.CAN_UPGRADE
else:
status = CourseRunStatus.MISSED_DEADLINE
else:
if not course_run.is_upgradable:
status = CourseRunStatus.MISSED_DEADLINE
else:
if not course_run.has_frozen_grades:
status = CourseRunStatus.CAN_UPGRADE
else:
final_grade = get_final_grade(mmtrack, course_run)
if final_grade.passed:
status = CourseRunStatus.CAN_UPGRADE
else:
status = CourseRunStatus.NOT_PASSED
return CourseRunUserStatus(
status=status,
course_run=course_run
)
def format_courserun_for_dashboard(course_run, status_for_user, mmtrack, position=1):
"""
Helper function that formats a course run adding informations to the fields coming from the DB
Args:
course_run (CourseRun): a course run
status_for_user (str): a string representing the status of a course for the user
mmtrack (dashboard.utils.MMTrack): a instance of all user information about a program
position (int): The position of the course run within the list
Returns:
dict: a dictionary containing information about the course
"""
if course_run is None:
return None
formatted_run = {
'id': course_run.id,
'course_id': course_run.edx_course_key,
'title': course_run.title,
'status': status_for_user,
'has_paid': mmtrack.has_paid(course_run.edx_course_key),
'position': position,
'course_start_date': course_run.start_date,
'course_end_date': course_run.end_date,
'course_upgrade_deadline': course_run.upgrade_deadline,
'fuzzy_start_date': course_run.fuzzy_start_date,
'enrollment_url': course_run.enrollment_url,
'courseware_backend': course_run.courseware_backend,
'year_season': format_season_year_for_course_run(course_run),
}
# check if there are extra fields to pull in
extra_fields = CourseFormatConditionalFields.get_assoc_field(status_for_user)
for extra_field in extra_fields:
formatted_run[extra_field['format_field']] = getattr(course_run, extra_field['course_run_field'])
if status_for_user in (CourseStatus.PASSED, CourseStatus.NOT_PASSED):
formatted_run['final_grade'] = mmtrack.get_final_grade_percent(course_run.edx_course_key)
# if the course is can-upgrade, we need to show the current grade if it is in progress
# or the final grade if it is final
elif status_for_user == CourseStatus.CAN_UPGRADE:
if mmtrack.has_final_grade(course_run.edx_course_key):
formatted_run['final_grade'] = mmtrack.get_final_grade_percent(course_run.edx_course_key)
elif course_run.course.should_display_progress:
formatted_run['current_grade'] = mmtrack.get_current_grade(course_run.edx_course_key)
# any other status but "offered" should have the current grade
elif status_for_user != CourseStatus.OFFERED:
if mmtrack.has_final_grade(course_run.edx_course_key):
formatted_run['final_grade'] = mmtrack.get_final_grade_percent(course_run.edx_course_key)
elif course_run.course.should_display_progress:
formatted_run['current_grade'] = mmtrack.get_current_grade(course_run.edx_course_key)
return formatted_run
def is_exam_schedulable(user, course):
"""
Check if a course is ready to schedule an exam or not
"""
now = now_in_utc()
schedulable_exam_runs = ExamRun.objects.filter(
course=course, date_last_eligible__gte=now.date()
)
return ExamAuthorization.objects.filter(
user=user,
status=ExamAuthorization.STATUS_SUCCESS,
exam_run__in=schedulable_exam_runs,
).exclude(
operation=ExamAuthorization.OPERATION_DELETE).exists()
def get_exam_register_end_date(course):
"""
Get a formatted string of dates during which the exam is schedulable
"""
schedulable_exam_run = ExamRun.get_currently_schedulable(course).first()
if schedulable_exam_run is not None:
return schedulable_exam_run.date_last_schedulable.strftime("%B %-d, %I:%M %p %Z")
return ""
def get_edx_exam_course_key(user, course):
"""
Find a successful exam authorization and return the edx course key for the exam
Args:
user (User): a user
course (courses.models.Course): A course
Returns:
str: a course key to the exam or empty string
"""
schedulable_exam_runs = ExamRun.get_currently_schedulable(course)
exam_auth = ExamAuthorization.objects.filter(
user=user,
course=course,
status=ExamAuthorization.STATUS_SUCCESS,
exam_run__in=schedulable_exam_runs
).first()
if exam_auth is None:
return ""
return exam_auth.exam_run.edx_exam_course_key or ''
def get_future_exam_runs(course):
"""
Return a list of first dates when exams can be scheduled
Args:
course (courses.models.Course): A course
Returns:
list(str): a list of dates when future exams become schedulable
"""
return (ExamRun.get_schedulable_in_future(course).
order_by('date_first_schedulable').values_list('date_first_schedulable', flat=True))
def get_exam_date_next_semester(course):
"""
Return a start date of an exam next semester.
Looking for the latest course run that has just finished or is currently running,
and uses its upgrade deadline as an even in time relative to which we can find an
exam run this semester or next semester.
Args:
course (courses.models.Course): A course
Returns:
str: a string representation exam start date, example: Apr 5, 2021
"""
current_course_run = (
CourseRun.objects.not_discontinued()
.filter(start_date__lte=now_in_utc(), course=course)
.order_by('-start_date').first()
)
three_months = datetime.timedelta(weeks=12)
if current_course_run is None or current_course_run.upgrade_deadline is None:
next_date = now_in_utc() + three_months
else:
next_date = current_course_run.upgrade_deadline + three_months
exam_run = ExamRun.get_schedulable_in_future(course).filter(
date_first_schedulable__gte=next_date
).order_by('date_first_schedulable').first()
return exam_run.date_last_eligible.strftime('%b %-d, %Y') if exam_run else ""
def get_current_exam_run_dates(course):
"""
Return eligibility dates for an exam this term, example: 'Mar 7 - Mar 17, 2018',
i.e. the dates during which a learner can take the exam
Args:
course (courses.models.Course): A course
Returns:
str: a string representation of scheduling window for current exam run
"""
schedulable_exam_run = ExamRun.get_currently_schedulable(course).first()
return '{} and {}'.format(
schedulable_exam_run.date_first_eligible.strftime('%b %-d'),
schedulable_exam_run.date_last_eligible.strftime('%b %-d, %Y')
) if schedulable_exam_run else ''
def has_to_pay_for_exam(mmtrack, course):
"""
Determine if payment is required for another exam attempt
Args:
mmtrack (dashboard.utils.MMTrack): a instance of all user information about a program
course (courses.models.Course): A course
Returns:
bool: if the user has to pay for another exam attempt
"""
return mmtrack.get_number_of_attempts_left(course) < 1
def get_certificate_url(mmtrack, course):
"""
Find certificate associated with highest passing grade for the course
Args:
mmtrack (dashboard.utils.MMTrack): a instance of all user information about a program
course (courses.models.Course): A course
Returns:
str: url to view the certificate
"""
url = ""
best_grade = mmtrack.get_best_final_grade_for_course(course)
if best_grade:
course_key = best_grade.course_run.edx_course_key
if mmtrack.has_exams:
certificate = mmtrack.get_course_certificate(course)
if certificate:
url = reverse('certificate', args=[certificate.hash])
elif mmtrack.has_passing_certificate(course_key):
download_url = mmtrack.certificates.get_verified_cert(course_key).download_url
if download_url:
url = urljoin(settings.EDXORG_BASE_URL, download_url)
return url
def calculate_users_to_refresh_in_bulk():
"""
Calculate the set of user ids which would be updated when running a bulk update. This uses a 6 hour delta
because this is a bulk operation. For individual updates see CachedEdxDataApi.is_cache_fresh.
Returns:
list of int: A list of user ids which need to be updated
"""
refresh_time_limit = now_in_utc() - datetime.timedelta(hours=6)
all_users = User.objects.filter(is_active=True, profile__fake_user=False).exclude(social_auth=None)
con = get_redis_connection("redis")
user_ids_invalid_credentials = con.smembers(CACHE_KEY_FAILED_USERS_NOT_TO_UPDATE)
# If one of these fields is null in the database the gte expression will be false, so we will refresh those users
users_not_expired = all_users.filter(
usercacherefreshtime__enrollment__gte=refresh_time_limit,
usercacherefreshtime__certificate__gte=refresh_time_limit,
usercacherefreshtime__current_grade__gte=refresh_time_limit
)
only_dedp_users = []
if not settings.UPDATE_EDX_DATA_FOR_DEDP_PROGRAM_USERS:
dedp_users = ProgramEnrollment.objects.filter(
program__title=DEDP_PROGRAM_TITLE
).values_list('user__id', flat=True)
non_dedp_users = ProgramEnrollment.objects.exclude(
program__title=DEDP_PROGRAM_TITLE
).filter(
user__id__in=dedp_users
).values_list('user__id', flat=True).distinct()
only_dedp_users = list(set(dedp_users) - set(non_dedp_users))
return list(
all_users
.exclude(id__in=users_not_expired.values_list("id", flat=True))
.exclude(id__in=user_ids_invalid_credentials)
.exclude(id__in=only_dedp_users)
.values_list("id", flat=True)
)
def refresh_user_data(user_id, provider=BACKEND_EDX_ORG):
"""
Refresh the edx cache data for a user.
Note that this function will not raise an exception on error, instead the errors are logged.
Args:
provider (str): name of the courseware backend
user_id (int): The user id
"""
# pylint: disable=bare-except
try:
user = User.objects.get(pk=user_id)
except:
log.exception('edX data refresh task: unable to get user "%s"', user_id)
return
# get the credentials for the current user for edX
try:
user_social = get_social_auth(user, provider)
except:
log.exception('user "%s" does not have edX credentials', user.username)
return
try:
utils.refresh_user_token(user_social)
except:
save_cache_update_failure(user_id)
log.exception("Unable to refresh token for student %s", user.username)
return
try:
edx_client = EdxApi(user_social.extra_data, COURSEWARE_BACKEND_URL[provider])
except:
log.exception("Unable to create an edX client object for student %s", user.username)
return
for cache_type in CachedEdxDataApi.CACHE_TYPES_BACKEND[provider]:
try:
CachedEdxDataApi.update_cache_if_expired(user, edx_client, cache_type, provider)
except:
save_cache_update_failure(user_id)
log.exception("Unable to refresh cache %s for student %s", cache_type, user.username)
continue
def save_cache_update_failure(user_id):
"""
Store the number of time update cache failed for a user
Args:
user_id (int): The user id
"""
con = get_redis_connection("redis")
user_key = FIELD_USER_ID_BASE_STR.format(user_id)
new_value = con.hincrby(CACHE_KEY_FAILURE_NUMS_BY_USER, user_key, 1)
if int(new_value) >= 3:
con.sadd(CACHE_KEY_FAILED_USERS_NOT_TO_UPDATE, user_id)
def update_cache_for_backend(user, provider):
"""
Update learners cache based on courseware backend
Args:
provider (str): name of the courseware backend
user (django.contrib.auth.models.User): A user
"""
try:
user_social = get_social_auth(user, provider)
except ObjectDoesNotExist:
log.info('No social auth for %s for user %s', provider, user.username)
return
if user_social is not None:
try:
utils.refresh_user_token(user_social)
except utils.InvalidCredentialStored:
raise
except: # pylint: disable=bare-except
log.exception('Impossible to refresh user credentials in dashboard view')
# create an instance of the client to query edX
edx_client = EdxApi(user_social.extra_data, COURSEWARE_BACKEND_URL[provider])
try:
for cache_type in CachedEdxDataApi.CACHE_TYPES_BACKEND[provider]:
CachedEdxDataApi.update_cache_if_expired(user, edx_client, cache_type, provider)
except InvalidCredentialStored:
# this needs to raise in order to force the user re-login
raise
except: # pylint: disable=bare-except
log.exception('Impossible to refresh edX cache')
def is_user_enrolled_in_exam_course(edx_client, exam_run):
"""
Query edX to check if user is already enrolled in exam course
Args:
edx_client: edx api client
exam_run: exam run instance to check enrollment for
Returns:
bool: if user is enrolled
"""
enrollments = edx_client.enrollments.get_student_enrollments()
all_enrolled_course_ids = enrollments.get_enrolled_course_ids()
# if user already enrolled in this exam course
if exam_run.edx_exam_course_key in all_enrolled_course_ids:
return True
return False
| bsd-3-clause | a5e752326cda2b12861dbe049c16143a | 38.668734 | 117 | 0.659494 | 3.755344 | false | false | false | false |
mitodl/micromasters | courses/admin.py | 1 | 2797 | """
Admin views for Courses & Programs
"""
from django.contrib import admin
from courses.models import Course, CourseRun, Program, ElectivesSet, ElectiveCourse, Topic
class CourseInline(admin.StackedInline):
"""Admin Inline for Course objects"""
model = Course
extra = 1
show_change_link = True
class CourseRunInline(admin.StackedInline):
"""Admin Inline for CourseRun objects"""
model = CourseRun
extra = 1
show_change_link = True
class ProgramAdmin(admin.ModelAdmin):
"""ModelAdmin for Programs"""
list_display = ('title', 'live',)
list_filter = ('live', 'topics')
inlines = [CourseInline]
class CourseAdmin(admin.ModelAdmin):
"""ModelAdmin for Courses"""
list_display = ('title', 'course_number', 'program_title', 'position_in_program',)
list_filter = ('program__live', 'program',)
inlines = [CourseRunInline]
ordering = ('program__title', 'position_in_program',)
def program_title(self, course):
"""Getter for the foreign key element"""
return course.program.title
class CourseRunAdmin(admin.ModelAdmin):
"""ModelAdmin for Courses"""
list_display = ('title', 'course_number', 'edx_course_key', 'enrollment_start', 'start_date', 'enrollment_end',
'end_date', 'upgrade_deadline', 'freeze_grade_date', )
list_filter = ('course__program__live', 'course__program', 'course', 'course__course_number', 'courseware_backend', 'is_discontinued')
list_editable = ('enrollment_start', 'start_date', 'enrollment_end', 'end_date', 'upgrade_deadline',
'freeze_grade_date', )
search_fields = ('edx_course_key',)
ordering = ('course__title', 'course__program__title', 'course__position_in_program', )
def program(self, run):
"""method to show program for list display."""
return run.course.program.title
def course(self, run):
"""Getter for course foreign key"""
return run.course.title
def course_number(self, run):
"""Getter for course's course_number"""
return run.course.course_number
class ElectivesSetAdmin(admin.ModelAdmin):
"""ModelAdmin for ElectivesSet"""
list_display = ('program', 'required_number', 'title',)
list_filter = ('program',)
class ElectiveCourseAdmin(admin.ModelAdmin):
"""ModelAdmin for ElectiveCourse"""
list_display = ('course', 'electives_set',)
class TopicAdmin(admin.ModelAdmin):
"""ModelAdmin for Programs"""
list_display = ('name',)
admin.site.register(CourseRun, CourseRunAdmin)
admin.site.register(Course, CourseAdmin)
admin.site.register(Program, ProgramAdmin)
admin.site.register(ElectivesSet, ElectivesSetAdmin)
admin.site.register(ElectiveCourse, ElectiveCourseAdmin)
admin.site.register(Topic, TopicAdmin)
| bsd-3-clause | 7459a414f4edaf21b99ee31d0dd795c4 | 31.149425 | 138 | 0.673221 | 3.831507 | false | false | false | false |
mitodl/micromasters | ui/urls.py | 1 | 1996 | """
URLs for ui
"""
from django.conf.urls import url
from django.contrib.auth import views as auth_views
from certificates.views import (
CourseCertificateView,
GradeRecordView,
ProgramCertificateView,
ProgramLetterView,
SharedGradeRecordView,
)
from profiles.constants import USERNAME_RE_PARTIAL
from ui.url_utils import (
DASHBOARD_URLS,
)
from ui.views import (
DashboardView,
UsersView,
SignInView,
page_404,
page_500,
BackgroundImagesCSSView,
need_verified_email,
oauth_maintenance)
dashboard_urlpatterns = [
url(r'^{}$'.format(dashboard_url.lstrip("/")), DashboardView.as_view(), name='ui-dashboard')
for dashboard_url in DASHBOARD_URLS
]
urlpatterns = [
url(r'^logout/$', auth_views.LogoutView.as_view(), name='logout'),
url(r'^signin/$', SignInView.as_view(), name='signin'),
url(r'^404/$', page_404, name='ui-404'),
url(r'^500/$', page_500, name='ui-500'),
url(r'^verify-email/$', need_verified_email, name='verify-email'),
url(r'^oauth_maintenance/(?P<backend>[^/]+)/$', oauth_maintenance, name='oauth_maintenance'),
url(fr'^learner/(?P<user>{USERNAME_RE_PARTIAL})?/?', UsersView.as_view(), name='ui-users'),
url(r'^certificate/course/(?P<certificate_hash>[-\w.]+)?/?', CourseCertificateView.as_view(), name='certificate'),
url(r'^certificate/program/(?P<certificate_hash>[-\w.]+)?/?', ProgramCertificateView.as_view(),
name='program-certificate'),
url(r'^letter/program/(?P<letter_uuid>[-\w.]+)?/?', ProgramLetterView.as_view(),
name='program_letter'),
url(r'^records/programs/(?P<enrollment_id>[\d]+)/shared/(?P<record_share_hash>[-\w.]+)?/?', SharedGradeRecordView.as_view(),
name='shared_grade_records'),
url(r'^records/programs/(?P<enrollment_id>[\d]+)', GradeRecordView.as_view(),
name='grade_records'),
url(r'^background-images\.css$', BackgroundImagesCSSView.as_view(), name='background-images-css'),
] + dashboard_urlpatterns
| bsd-3-clause | 3c045be79976e1e64273339a2430e866 | 38.137255 | 128 | 0.66483 | 3.354622 | false | false | false | false |
mitodl/micromasters | seed_data/management/commands/seed_db.py | 1 | 12439 | """
Generates a set of realistic users/programs to help us test search functionality
"""
from decimal import Decimal
from django.core.management import BaseCommand
from django.contrib.auth.models import User
from django.db.models.signals import post_save
from factory.django import mute_signals
from backends.edxorg import EdxOrgOAuth2
from courses.models import Program, Course, CourseRun, ElectivesSet, ElectiveCourse
from dashboard.models import ProgramEnrollment
from grades.models import FinalGrade, FinalGradeStatus
from micromasters.utils import (
get_field_names,
load_json_from_file,
first_matching_item,
)
from profiles.api import get_social_username
from profiles.models import Employment, Education, Profile
from roles.models import Role
from roles.roles import Staff
from search.tasks import start_recreate_index
from seed_data.utils import filter_dict_by_key_set
from seed_data.lib import (
CachedEnrollmentHandler,
fake_programs_query,
ensure_cached_data_freshness,
add_paid_order_for_course
)
from seed_data.management.commands import ( # pylint: disable=import-error
USER_DATA_PATH, PROGRAM_DATA_PATH,
FAKE_USER_USERNAME_PREFIX, FAKE_PROGRAM_DESC_PREFIX,
PASSING_GRADE
)
from seed_data.management.commands.create_tiers import create_tiers
MODEL_DEFAULTS = {
User: {
'is_active': True,
'is_staff': False,
'is_superuser': False
},
Profile: {
'account_privacy': 'private',
'edx_requires_parental_consent': False,
'email_optin': True,
'filled_out': True,
},
Education: {
'online_degree': False
}
}
def compile_model_data(model_cls, data, **additional_data):
"""
Compiles a dictionary of data that will be set on a model object
"""
model_data = {}
field_names = get_field_names(model_cls)
# If default values have been specified, set them on the model data dict
if model_cls in MODEL_DEFAULTS:
model_data.update(MODEL_DEFAULTS[model_cls])
# For all keys that match valid model fields, update the model data dict
model_data.update(filter_dict_by_key_set(data, field_names))
# For any other data that has been specifically passed in, update the model data dict
if additional_data:
model_data.update(additional_data)
return model_data
def deserialize_model_data(model_cls, data, **additional_data):
"""
Creates a new instance of a model class and fills in field values using some supplied data
"""
model_data = compile_model_data(model_cls, data, **additional_data)
return model_cls.objects.create(**model_data)
# User data deserialization
def deserialize_user_data(user_data, programs):
"""
Deserializes a dict of mixed User/Profile data and returns the newly-inserted User
"""
username = FAKE_USER_USERNAME_PREFIX + user_data['email'].split('@')[0]
user = deserialize_model_data(User, user_data, username=username)
# Create social username
user.social_auth.create(
provider=EdxOrgOAuth2.name,
uid=user.username,
)
# The user data is generated in this script with mute_signals(post_save)
# so we need to create the profile explicitly.
profile = deserialize_model_data(Profile, user_data, user=user, fake_user=True)
deserialize_profile_detail_data(profile, Employment, user_data['work_history'])
deserialize_profile_detail_data(profile, Education, user_data['education'])
deserialize_dashboard_data(user, user_data, programs)
ensure_cached_data_freshness(user)
return user
def deserialize_dashboard_data(user, user_data, programs):
"""
Deserializes enrollment/grade data for a user
"""
fake_course_runs = CourseRun.objects.filter(
course__program__in=programs
).select_related('course__program').all()
social_username = get_social_username(user)
enrollment_list = user_data.get('_enrollments', [])
grade_list = user_data.get('_grades', [])
deserialize_enrollment_data(user, social_username, fake_course_runs, enrollment_list)
deserialize_grade_data(user, fake_course_runs, grade_list)
def deserialize_enrollment_data(user, social_username, course_runs, enrollment_data_list):
"""
Deserializes enrollment data for a user
"""
enrollment_handler = CachedEnrollmentHandler(user, social_username=social_username)
enrolled_programs = set()
edx_course_key = None
for enrollment_data in enrollment_data_list:
edx_course_key = enrollment_data['edx_course_key']
course_run = first_matching_item(
course_runs,
lambda cr: cr.edx_course_key == edx_course_key
)
enrollment_handler.set_or_create(course_run)
enrolled_programs.add(course_run.course.program)
if course_run.course.program.financial_aid_availability:
add_paid_order_for_course(user, course_run)
# Add ProgramEnrollments for any Program that has an associated CachedEnrollment
for enrolled_program in enrolled_programs:
ProgramEnrollment.objects.get_or_create(user=user, program=enrolled_program)
def deserialize_grade_data(user, course_runs, grade_data_list):
"""
Deserializes grade data for a user
"""
edx_course_key = None
for grade_data in grade_data_list:
edx_course_key = grade_data['edx_course_key']
course_run = first_matching_item(
course_runs,
lambda cr: cr.edx_course_key == edx_course_key
)
grade = Decimal(grade_data['grade'])
FinalGrade.objects.update_or_create(
user=user,
course_run=course_run,
grade=grade,
passed=grade >= PASSING_GRADE,
status=FinalGradeStatus.COMPLETE,
course_run_paid_on_edx=True
)
def deserialize_profile_detail_data(profile, model_cls, profile_detail_data):
"""
Deserializes a list of data for a model with a many-to-one relationship with Profile (eg: Education)
"""
for profile_detail in profile_detail_data:
deserialize_model_data(model_cls, profile_detail, profile=profile)
def deserialize_user_data_list(user_data_list, programs):
"""
Deserializes a list of user data and returns the count of new Users created
"""
new_user_count = 0
for user_data in user_data_list:
deserialize_user_data(user_data, programs)
new_user_count += 1
return new_user_count
# Program data deserialization
def deserialize_course_run_data(course, course_run_data):
"""Deserializes a CourseRun object"""
course_run = deserialize_model_data(
CourseRun, course_run_data, course=course
)
return course_run
def deserialize_course_data(program, course_data):
"""Deserializes a Course object"""
course = deserialize_model_data(Course, course_data, program=program)
for course_run_data in course_data['course_runs']:
deserialize_course_run_data(course, course_run_data)
return course
def deserialize_elective_data(program, elective_data):
"""Deserializes an Elective set object"""
elective_set = deserialize_model_data(ElectivesSet, elective_data, program=program)
elective_courses_data = elective_data.get('courses')
if elective_courses_data:
for elective_course_data in elective_courses_data:
course = deserialize_course_data(program, elective_course_data)
deserialize_elective_course_data(elective_set, course)
return elective_set
def deserialize_elective_course_data(elective_set, course):
"""Deserializes an elective course object"""
elective_course = deserialize_model_data(ElectiveCourse, electives_set=elective_set, course=course, data={})
return elective_course
def deserialize_program_data(program_data):
"""Deserializes a Program object"""
program = deserialize_model_data(Program, program_data)
for course_data in program_data['courses']:
deserialize_course_data(program, course_data)
elective_data_list = program_data.get('elective_sets')
if elective_data_list:
[deserialize_elective_data(program, elective_data) for elective_data in elective_data_list]
return program
def deserialize_program_data_list(program_data_list):
"""Deserializes a list of Program data"""
programs = []
for program_data in program_data_list:
# Set the description to make this Program easily identifiable as a 'fake'
program_data['description'] = FAKE_PROGRAM_DESC_PREFIX + program_data['description']
program_data['live'] = True
program = deserialize_program_data(program_data)
programs.append(program)
return programs
class Command(BaseCommand):
"""
Seed the database with a set of realistic data, for development purposes.
"""
help = "Seed the database with a set of realistic data, for development purposes."
def add_arguments(self, parser):
parser.add_argument(
"--tiers",
dest="tiers",
default=4,
help="Number of TierPrograms to generate per Program.",
type=int
)
parser.add_argument(
'--staff-user',
action='store',
dest='staff_user',
help=(
"Username for a user to assign the 'staff' role "
"for the programs created by this script."
)
)
@staticmethod
def assign_staff_user_to_programs(username, programs):
"""
Assigns the 'staff' role to all given programs for a user with a given username
"""
staff_user = User.objects.get(username=username)
for program in programs:
ProgramEnrollment.objects.create(user=staff_user, program=program)
Role.objects.create(user=staff_user, program=program, role=Staff.ROLE_ID)
def handle(self, *args, **options): # pylint: disable=too-many-locals
program_data_list = load_json_from_file(PROGRAM_DATA_PATH)
user_data_list = load_json_from_file(USER_DATA_PATH)
existing_fake_user_count = User.objects.filter(username__startswith=FAKE_USER_USERNAME_PREFIX).count()
existing_fake_program_count = fake_programs_query().count()
if len(user_data_list) == existing_fake_user_count and len(program_data_list) == existing_fake_program_count:
fake_programs = fake_programs_query().all()
self.stdout.write("Seed data appears to already exist.")
else:
start_recreate_index.delay().get()
# Mute post_save to prevent updates to Opensearch on a per program or user basis.
# recreate_index() is run afterwards to do this indexing in bulk.
with mute_signals(post_save):
fake_programs = deserialize_program_data_list(program_data_list)
fake_user_count = deserialize_user_data_list(user_data_list, fake_programs)
# Handle FA programs
fake_financial_aid_programs = filter(lambda program: program.financial_aid_availability, fake_programs)
tiered_program_count, tiers_created = (
create_tiers(fake_financial_aid_programs, int(options["tiers"]))
)
start_recreate_index.delay().get()
program_msg = (
"Created {num} new programs from '{path}'."
).format(
num=len(fake_programs),
path=PROGRAM_DATA_PATH
)
if tiers_created:
program_msg = "{}\nCreated {} tiers for {} FA-enabled programs".format(
program_msg,
tiers_created,
tiered_program_count
)
user_msg = (
"Created {num} new users from '{path}'."
).format(
num=fake_user_count,
path=USER_DATA_PATH,
)
self.stdout.write(program_msg)
self.stdout.write(user_msg)
if fake_programs and options.get('staff_user'):
self.assign_staff_user_to_programs(options['staff_user'], fake_programs)
msg = (
"Added enrollment and 'staff' role for user '{user}' to {num} programs"
).format(
user=options['staff_user'],
num=len(fake_programs),
)
self.stdout.write(msg)
| bsd-3-clause | 9abbe0477dfe473986b409105e900e34 | 37.156442 | 117 | 0.659297 | 3.905495 | false | false | false | false |
mitodl/micromasters | ecommerce/factories.py | 1 | 2779 | """
Factories for ecommerce models
"""
from factory import (
LazyAttribute,
SelfAttribute,
SubFactory,
Trait,
)
from factory.django import DjangoModelFactory
from factory.fuzzy import (
FuzzyChoice,
FuzzyDecimal,
FuzzyText,
)
import faker
from courses.factories import (
CourseFactory,
ProgramFactory,
)
from ecommerce.api import (
make_reference_id,
generate_cybersource_sa_signature,
)
from ecommerce.models import (
Coupon,
Line,
Order,
Receipt,
)
from micromasters.factories import UserFactory
FAKE = faker.Factory.create()
class OrderFactory(DjangoModelFactory):
"""Factory for Order"""
user = SubFactory(UserFactory)
status = FuzzyChoice(
Order.STATUSES
)
total_price_paid = FuzzyDecimal(low=0, high=12345)
class Meta:
model = Order
class Params:
fulfilled = Trait(
status=Order.FULFILLED
)
class LineFactory(DjangoModelFactory):
"""Factory for Line"""
order = SubFactory(OrderFactory)
price = SelfAttribute('order.total_price_paid')
description = FuzzyText(prefix="Line ")
course_key = FuzzyText()
class Meta:
model = Line
def gen_fake_receipt_data(order=None):
"""
Helper function to generate a fake signed piece of data
"""
data = {}
for _ in range(10):
data[FAKE.text()] = FAKE.text()
keys = sorted(data.keys())
data['signed_field_names'] = ",".join(keys)
data['unsigned_field_names'] = ''
data['req_reference_number'] = make_reference_id(order) if order else ''
data['signature'] = generate_cybersource_sa_signature(data)
return data
class ReceiptFactory(DjangoModelFactory):
"""Factory for Receipt"""
order = SubFactory(OrderFactory)
data = LazyAttribute(lambda receipt: gen_fake_receipt_data(receipt.order))
class Meta:
model = Receipt
class CouponFactory(DjangoModelFactory):
"""Factory for Coupon"""
coupon_code = FuzzyText()
coupon_type = Coupon.STANDARD
amount_type = Coupon.PERCENT_DISCOUNT
amount = FuzzyDecimal(0, 1)
class Meta:
model = Coupon
content_object = SubFactory(ProgramFactory, financial_aid_availability=True)
class Params: # pylint: disable=missing-docstring
percent = Trait(
amount_type='percent-discount',
amount=FuzzyDecimal(0, 1),
)
fixed = Trait(
amount_type='fixed-discount',
amount=FuzzyDecimal(50, 1000),
)
program = Trait(
content_object=SubFactory(ProgramFactory, financial_aid_availability=True)
)
course = Trait(
content_object=SubFactory(CourseFactory, program__financial_aid_availability=True)
)
| bsd-3-clause | 15eeeeacb5d65864df4d0664087ed53b | 22.956897 | 94 | 0.649154 | 3.84903 | false | false | false | false |
mitodl/micromasters | micromasters/serializers.py | 1 | 2139 | """
Serializers for Django contrib models
"""
import logging
from rest_framework import serializers
from django.contrib.auth.models import User
from django.core.exceptions import ObjectDoesNotExist
log = logging.getLogger(__name__)
class UserSerializer(serializers.ModelSerializer):
"""
Serializer for User objects. Note that this will only work with
logged-in users, not anonymous users.
"""
username = serializers.SerializerMethodField()
first_name = serializers.SerializerMethodField()
last_name = serializers.SerializerMethodField()
preferred_name = serializers.SerializerMethodField()
social_auth_providers = serializers.SerializerMethodField()
class Meta:
model = User
fields = (
"username", "email",
"first_name", "last_name", "preferred_name",
"social_auth_providers",
)
def get_username(self, obj):
"""
Look up the user's username.
"""
return obj.username
def get_first_name(self, obj):
"""
Get first_name from user profile, if profile exists
"""
try:
return obj.profile.first_name
except ObjectDoesNotExist:
return None
def get_last_name(self, obj):
"""
Get last_name from user profile, if profile exists
"""
try:
return obj.profile.last_name
except ObjectDoesNotExist:
return None
def get_preferred_name(self, obj):
"""
Get preferred_name from user profile, if profile exists
"""
try:
return obj.profile.preferred_name
except ObjectDoesNotExist:
return None
def get_social_auth_providers(self, obj):
"""
Get the list of social auth providers
"""
return list(obj.social_auth.values_list("provider", flat=True).distinct())
def serialize_maybe_user(user):
"""
Serialize a logged-in user to Python primitives, or an anonymous user
to `None`.
"""
if user.is_anonymous:
return None
return UserSerializer(user).data
| bsd-3-clause | f486fd55ec825c69803e16e0635dd424 | 26.423077 | 82 | 0.620851 | 4.639913 | false | false | false | false |
mitodl/micromasters | search/signals.py | 1 | 3359 | """
Signals used for indexing
"""
import logging
from django.db import transaction
from django.db.models.signals import post_delete, post_save
from django.dispatch import receiver
from profiles.models import (
Education,
Employment,
Profile,
)
from search.models import PercolateQuery
from search.tasks import (
index_users,
index_percolate_queries,
delete_percolate_query,
)
from roles.models import Role
log = logging.getLogger(__name__)
# all the following signal handlers do basically the same.
# The reason why there is one function per sender is
# because each signal handler needs to be hooked to a single sender
# otherwise it would run for any `post_save`/`post_delete` coming from any model
# pylint: disable=unused-argument
@receiver(post_save, sender=Profile, dispatch_uid="profile_post_save_index")
def handle_update_profile(sender, instance, **kwargs):
"""Update index when Profile model is updated."""
transaction.on_commit(lambda: index_users.delay([instance.user.id], check_if_changed=True))
@receiver(post_save, sender=Education, dispatch_uid="education_post_save_index")
def handle_update_education(sender, instance, **kwargs):
"""Update index when Education model is updated."""
transaction.on_commit(lambda: index_users.delay([instance.profile.user.id], check_if_changed=True))
@receiver(post_save, sender=Employment, dispatch_uid="employment_post_save_index")
def handle_update_employment(sender, instance, **kwargs):
"""Update index when Employment model is updated."""
transaction.on_commit(lambda: index_users.delay([instance.profile.user.id], check_if_changed=True))
@receiver(post_delete, sender=Education, dispatch_uid="education_post_delete_index")
def handle_delete_education(sender, instance, **kwargs):
"""Update index when Education model instance is deleted."""
transaction.on_commit(lambda: index_users.delay([instance.profile.user.id]))
@receiver(post_delete, sender=Employment, dispatch_uid="employment_post_delete_index")
def handle_delete_employment(sender, instance, **kwargs):
"""Update index when Employment model instance is deleted."""
transaction.on_commit(lambda: index_users.delay([instance.profile.user.id]))
@receiver(post_save, sender=PercolateQuery, dispatch_uid="percolate_query_save")
def handle_update_percolate(sender, instance, **kwargs):
"""When a new query is created or a query is updated, update Opensearch too"""
transaction.on_commit(lambda: index_percolate_queries.delay([instance.id]))
@receiver(post_delete, sender=PercolateQuery, dispatch_uid="percolate_query_delete")
def handle_delete_percolate(sender, instance, **kwargs):
"""When a query is deleted, make sure we also delete it on Opensearch"""
transaction.on_commit(lambda: delete_percolate_query.delay(instance.id))
@receiver(post_save, sender=Role, dispatch_uid="role_post_create_index")
def handle_create_role(sender, instance, **kwargs):
"""Update index when Role model instance is created."""
transaction.on_commit(lambda: index_users.delay([instance.user.id]))
@receiver(post_delete, sender=Role, dispatch_uid="role_post_remove_index")
def handle_remove_role(sender, instance, **kwargs):
"""Update index when Role model instance is deleted."""
transaction.on_commit(lambda: index_users.delay([instance.user.id]))
| bsd-3-clause | 105db2f4d60b09acd516e1cf4ef5e80c | 38.988095 | 103 | 0.751116 | 3.607948 | false | false | false | false |
mitodl/micromasters | cms/migrations/0015_add_home_page_and_email_to_program_page.py | 1 | 2360 | # -*- coding: utf-8 -*-
# Generated by Django 1.9.8 on 2016-09-14 19:32
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import wagtail.core.fields
class Migration(migrations.Migration):
dependencies = [
('cms', '0014_html_in_faq_answers'),
]
operations = [
migrations.AddField(
model_name='programpage',
name='program_contact_email',
field=models.EmailField(blank=True, help_text='A contact email for the program.', max_length=254, null=True),
),
migrations.AddField(
model_name='programpage',
name='program_home_page_url',
field=models.URLField(blank=True, help_text='A url for an external homepage. There will be a link to this url from the program page.', null=True),
),
migrations.AlterField(
model_name='programpage',
name='background_image',
field=models.ForeignKey(blank=True, help_text='The hero image on the program page', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image'),
),
migrations.AlterField(
model_name='programpage',
name='description',
field=wagtail.core.fields.RichTextField(blank=True, help_text='The description shown on the program page'),
),
migrations.AlterField(
model_name='programpage',
name='external_program_page_url',
field=models.URLField(blank=True, help_text='If this field is set the program page link on the home page will go to this URL.', null=True),
),
migrations.AlterField(
model_name='programpage',
name='program',
field=models.OneToOneField(help_text='The program for this page', null=True, on_delete=django.db.models.deletion.SET_NULL, to='courses.Program'),
),
migrations.AlterField(
model_name='programpage',
name='thumbnail_image',
field=models.ForeignKey(blank=True, help_text='Thumbnail size must be at least 690x530 pixels. Thumbnails are cropped down to this size, preserving aspect ratio.', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image'),
),
]
| bsd-3-clause | fdacae78287818f94565a2b8168b75b5 | 44.384615 | 277 | 0.636017 | 3.979764 | false | false | false | false |
mitodl/micromasters | seed_data/management/commands/unseed_db.py | 1 | 3499 | """
Deletes a set of realistic users/programs that were added to help us test search functionality
"""
from contextlib import contextmanager
from factory.django import mute_signals
from django.core.management import BaseCommand
from django.db import connection
from django.db.models import Q
from django.db.models.signals import post_delete
from django.contrib.auth.models import User
from courses.models import Program
from dashboard.models import CachedEnrollment, CachedCertificate, CachedCurrentGrade
from financialaid.models import FinancialAid, FinancialAidAudit, Tier, TierProgram
from grades.models import FinalGrade
from mail.models import FinancialAidEmailAudit
from search.tasks import start_recreate_index
from seed_data.management.commands import ( # pylint: disable=import-error
FAKE_USER_USERNAME_PREFIX,
FAKE_PROGRAM_DESC_PREFIX,
)
@contextmanager
def remove_delete_protection(*models):
"""
Temporarily removes delete protection on any number of models
Args:
*models: One or more models whose tables will have delete protection temporarily removed
"""
table_names = [model._meta.db_table for model in models]
with connection.cursor() as cursor:
for table_name in table_names:
cursor.execute("DROP RULE delete_protect ON {}".format(table_name))
try:
yield
finally:
for table_name in reversed(table_names):
cursor.execute("CREATE RULE delete_protect AS ON DELETE TO {} DO INSTEAD NOTHING".format(table_name))
def unseed_db():
"""
Deletes all seed data from the database
"""
fake_program_ids = (
Program.objects
.filter(description__startswith=FAKE_PROGRAM_DESC_PREFIX)
.values_list('id', flat=True)
)
fake_user_ids = (
User.objects
.filter(username__startswith=FAKE_USER_USERNAME_PREFIX)
.values_list('id', flat=True)
)
fake_tier_ids = (
TierProgram.objects
.filter(program__id__in=fake_program_ids)
.values_list('tier__id', flat=True)
)
fake_final_grade_ids = (
FinalGrade.objects
.filter(course_run__course__program__id__in=fake_program_ids)
.values_list('id', flat=True)
)
financial_aid_ids = (
FinancialAid.objects
.filter(Q(user_id__in=fake_user_ids) | Q(tier_program__program__id__in=fake_program_ids))
.values_list('id', flat=True)
)
fin_aid_audit_models = [FinancialAidAudit, FinancialAidEmailAudit]
with mute_signals(post_delete):
with remove_delete_protection(*fin_aid_audit_models):
for audit_model in fin_aid_audit_models:
audit_model.objects.filter(financial_aid__id__in=financial_aid_ids).delete()
for model_cls in [CachedEnrollment, CachedCertificate, CachedCurrentGrade]:
model_cls.objects.filter(course_run__course__program__id__in=fake_program_ids).delete()
Tier.objects.filter(id__in=fake_tier_ids).delete()
FinalGrade.objects.filter(id__in=fake_final_grade_ids).delete()
Program.objects.filter(id__in=fake_program_ids).delete()
User.objects.filter(id__in=fake_user_ids).delete()
class Command(BaseCommand):
"""
Delete seeded data from the database, for development purposes.
"""
help = "Delete seeded data from the database, for development purposes."
def handle(self, *args, **options):
unseed_db()
start_recreate_index.delay().get()
| bsd-3-clause | fb2e38d46ef30afac280477237a7bd50 | 36.623656 | 117 | 0.685053 | 3.770474 | false | false | false | false |
mitodl/micromasters | profiles/migrations/0008_rename_edx_fields.py | 1 | 1688 | # -*- coding: utf-8 -*-
# Generated by Django 1.9.4 on 2016-04-13 15:28
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('profiles', '0007_city_and_birthplace'),
]
operations = [
migrations.RenameField(
model_name='profile',
old_name='bio',
new_name='edx_bio',
),
migrations.RenameField(
model_name='profile',
old_name='employer',
new_name='edx_employer',
),
migrations.RenameField(
model_name='profile',
old_name='goals',
new_name='edx_goals',
),
migrations.RenameField(
model_name='profile',
old_name='job_title',
new_name='edx_job_title',
),
migrations.RenameField(
model_name='profile',
old_name='language_proficiencies',
new_name='edx_language_proficiencies',
),
migrations.RenameField(
model_name='profile',
old_name='level_of_education',
new_name='edx_level_of_education',
),
migrations.RenameField(
model_name='profile',
old_name='mailing_address',
new_name='edx_mailing_address',
),
migrations.RenameField(
model_name='profile',
old_name='name',
new_name='edx_name',
),
migrations.RenameField(
model_name='profile',
old_name='requires_parental_consent',
new_name='edx_requires_parental_consent',
),
]
| bsd-3-clause | 6acfbcde30006e15ecc6748378aa31ff | 27.133333 | 53 | 0.51955 | 4.188586 | false | false | false | false |
mitodl/micromasters | ecommerce/permissions_test.py | 1 | 1437 | """
Tests for ecommerce permissions
"""
from unittest.mock import MagicMock
from django.test import (
override_settings,
TestCase,
)
from ecommerce.api import generate_cybersource_sa_signature
from ecommerce.permissions import IsSignedByCyberSource
@override_settings(CYBERSOURCE_SECURITY_KEY="fake")
class PermissionsTests(TestCase):
"""
Tests for ecommerce permissions
"""
def test_has_signature(self):
"""
If the payload has a valid signature, it should pass the permissions test
"""
payload = {
'a': 'b',
'c': 'd',
'e': 'f',
}
keys = sorted(payload.keys())
payload['signed_field_names'] = ','.join(keys)
payload['signature'] = generate_cybersource_sa_signature(payload)
request = MagicMock(data=payload)
assert IsSignedByCyberSource().has_permission(request, MagicMock()) is True
def test_has_wrong_signature(self):
"""
If the payload has an invalid signature, it should fail the permissions test
"""
payload = {
'a': 'b',
'c': 'd',
'e': 'f',
}
keys = sorted(payload.keys())
payload['signed_field_names'] = ','.join(keys)
payload['signature'] = 'signed'
request = MagicMock(data=payload)
assert IsSignedByCyberSource().has_permission(request, MagicMock()) is False
| bsd-3-clause | 23806274ee11af8b934bc3ac855501bc | 27.176471 | 84 | 0.600557 | 4.165217 | false | true | false | false |
mitodl/micromasters | grades/management/commands/adjust_exam_grades_from_csv.py | 1 | 4974 | """
Freezes final grades for a course
"""
import csv
import argparse
from collections import namedtuple
from django.core.management import BaseCommand, CommandError
from grades.models import ProctoredExamGrade
class ParsingError(CommandError):
"""Custom class for parsing exceptions"""
pass
class GradeRowParser:
"""Parser for rows of grade adjustment information in a CSV"""
RowProps = namedtuple('RowProps', ['exam_grade_id', 'score'])
default_col_names = dict(
exam_grade_id='proctoredexam_id',
score='score',
)
def __init__(self, col_names=None):
"""
Args:
col_names (dict): Mapping of RowProps property name to the name of the column in the CSV
"""
col_names = col_names or {}
self.col_names = self.RowProps(**{**self.default_col_names, **col_names})
def parse_and_validate_row(self, row):
"""Parses a row of grade adjustment info and makes sure it doesn't contain bad data"""
try:
parsed_row = self.RowProps(
exam_grade_id=int(row[self.col_names.exam_grade_id]),
score=float(row[self.col_names.score]),
)
except KeyError as e:
raise ParsingError('Row is missing a required column: {}'.format(str(e)))
except ValueError as e:
raise ParsingError('Row has an invalid value: {}'.format(str(e)))
if parsed_row.score < 0.0 or parsed_row.score > 100.0:
row_identifier = '{}: {}'.format(self.col_names.exam_grade_id, parsed_row.exam_grade_id)
raise ParsingError('[{}] "score" value must be between 0 and 100'.format(row_identifier))
return parsed_row
def parse_exam_grade_adjustments(self, csv_reader):
"""
Parses all rows of grade adjustment info from a CSV and yields each ProctoredExamGrade object
with its associated grade adjustment row from the CSV
Args:
csv_reader (csv.DictReader): A DictReader instance
Returns:
tuple(ProctoredExamGrade, RowProps):
A tuple containing a ProctoredExamGrade and its associated parsed CSV row
"""
parsed_row_dict = {}
for row in csv_reader:
parsed_row = self.parse_and_validate_row(row)
parsed_row_dict[parsed_row.exam_grade_id] = parsed_row
exam_grade_query = ProctoredExamGrade.objects.filter(id__in=parsed_row_dict.keys())
if exam_grade_query.count() < len(parsed_row_dict):
bad_exam_grade_ids = set(parsed_row_dict.keys()) - set(exam_grade_query.values_list('id', flat=True))
raise ParsingError(
'Some exam grade IDs do not match any ProctoredExamGrade records: {}'.format(bad_exam_grade_ids)
)
for exam_grade in exam_grade_query.all():
yield exam_grade, parsed_row_dict[exam_grade.id]
class Command(BaseCommand):
"""Parses a csv with exam grade adjustment information and changes the appropriate grades"""
help = "Parses a csv with exam grade adjustment information and changes the appropriate grades"
def add_arguments(self, parser):
parser.add_argument('csvfile', type=argparse.FileType('r'), help='')
parser.add_argument(
'--grade-id-col-name',
default=GradeRowParser.default_col_names['exam_grade_id'],
help='Name of the column that contains the proctored exam grade id')
parser.add_argument(
'--score-col-name',
default=GradeRowParser.default_col_names['score'],
help='Name of the column that contains the score value'
)
def handle(self, *args, **kwargs): # pylint: disable=unused-argument,too-many-locals
col_names = dict(
exam_grade_id=kwargs.get('grade_id_col_name'),
score=kwargs.get('score_col_name'),
)
csvfile = kwargs.get('csvfile')
reader = csv.DictReader(csvfile.read().splitlines())
grade_row_parser = GradeRowParser(col_names=col_names)
total_rows = 0
grades_changed = 0
grades_unchanged = 0
for exam_grade, parsed_adjustment_row in grade_row_parser.parse_exam_grade_adjustments(reader):
if exam_grade.score != parsed_adjustment_row.score:
exam_grade.set_score(parsed_adjustment_row.score)
exam_grade.save_and_log(None)
grades_changed = grades_changed + 1
else:
grades_unchanged = grades_unchanged + 1
total_rows = total_rows + 1
result_messages = ['Total rows: {}'.format(total_rows)]
if grades_changed:
result_messages.append('Grades changed: {}'.format(grades_changed))
if grades_unchanged:
result_messages.append('Grades found with no change in score: {}'.format(grades_unchanged))
self.stdout.write(self.style.SUCCESS('\n'.join(result_messages)))
| bsd-3-clause | 1c302ed0c0462900c821a5288a1f0eaf | 40.798319 | 113 | 0.625251 | 3.941363 | false | false | false | false |
mitodl/micromasters | mail/utils.py | 1 | 4731 | """
Utils for mail
"""
import logging
from django.core.exceptions import ValidationError
from dashboard.models import ProgramEnrollment
from financialaid.api import get_formatted_course_price
from financialaid.constants import (
FINANCIAL_AID_APPROVAL_MESSAGE,
FINANCIAL_AID_APPROVAL_SUBJECT,
FINANCIAL_AID_DOCUMENTS_RECEIVED_MESSAGE,
FINANCIAL_AID_DOCUMENTS_RESET_MESSAGE,
FINANCIAL_AID_RESET_SUBJECT,
FINANCIAL_AID_DOCUMENTS_RECEIVED_SUBJECT,
FINANCIAL_AID_EMAIL_BODY,
FinancialAidStatus
)
log = logging.getLogger(__name__)
RECIPIENT_VARIABLE_NAMES = {
'PreferredName': 'preferred_name',
'Email': 'email',
}
def generate_financial_aid_email(financial_aid):
"""
Generates the email subject and body for a FinancialAid status update. Accepted statuses are
FinancialAidStatus.APPROVED and FinancialAidStatus.PENDING_MANUAL_APPROVAL (documents have been received).
Args:
financial_aid (FinancialAid): The FinancialAid object in question
Returns:
dict: {"subject": (str), "body": (str)}
"""
if financial_aid.status == FinancialAidStatus.APPROVED:
program_enrollment = ProgramEnrollment.objects.get(
user=financial_aid.user,
program=financial_aid.tier_program.program
)
message = FINANCIAL_AID_APPROVAL_MESSAGE.format(
program_name=financial_aid.tier_program.program.title,
price=get_formatted_course_price(program_enrollment)["price"]
)
subject = FINANCIAL_AID_APPROVAL_SUBJECT.format(program_name=financial_aid.tier_program.program.title)
elif financial_aid.status == FinancialAidStatus.PENDING_MANUAL_APPROVAL:
message = FINANCIAL_AID_DOCUMENTS_RECEIVED_MESSAGE
subject = FINANCIAL_AID_DOCUMENTS_RECEIVED_SUBJECT.format(
program_name=financial_aid.tier_program.program.title
)
elif financial_aid.status == FinancialAidStatus.RESET:
message = FINANCIAL_AID_DOCUMENTS_RESET_MESSAGE
subject = FINANCIAL_AID_RESET_SUBJECT.format(
program_name=financial_aid.tier_program.program.title
)
else:
# django.core.exceptions.ValidationError
raise ValidationError("Invalid status on FinancialAid for generate_financial_aid_email()")
body = FINANCIAL_AID_EMAIL_BODY.format(
first_name=financial_aid.user.profile.first_name,
message=message,
program_name=financial_aid.tier_program.program.title
)
return {"subject": subject, "body": body}
def generate_mailgun_response_json(response):
"""
Generates the json object for the mailgun response.
This is necessary because of inconsistent Response object formatting. Calling response.json() will return a valid
JSON-serializable dictionary object, except when the response returns 401 (and maybe others) from mailgun, in which
it will raise an exception because of improperly formatted content for the .json() call.
This function solves that problem by raising ImproperlyConfigured if the response returns 401, which will be caught
by a micromasters.utils.custom_exception_handler():
Args:
response (requests.Response): response object
Returns:
dict
"""
try:
response_json = response.json()
except ValueError: # Includes JSONDecodeError since it inherits from ValueError
response_json = {
"message": response.reason
}
return response_json
def filter_recipient_variables(text):
"""
Filter out recipient variables, like [PreferredName], and substitute it with %recipient.preferred_name%
Args:
text (string): subject or body of the email
Returns:
string: with replaced correct recipient variables
"""
for key, value in RECIPIENT_VARIABLE_NAMES.items():
text = text.replace('[{}]'.format(key), '%recipient.{}%'.format(value))
return text
def get_email_footer(url):
"""
Construct a footer for email
Args:
url: To change the settings
Returns:
string: with the html styled footer
"""
text = ("You are receiving this e-mail because you signed up for MITx"
" MicroMasters.<br/> If you don't want to receive these emails in the"
" future, you can<br/> <a href='{0}'>edit your settings</a>"
" or <a href='{0}'>unsubscribe</a>.").format(url)
return ("<div style='margin-top:80px; text-align: center; color: #757575;'>"
"<div style='margin:auto; max-width:50%;'><p>{0}</p>"
"<p>MIT Office of Digital Learning<br/>"
"600 Technology Square, 2nd Floor, Cambridge, MA 02139</p>"
"</div></div>").format(text)
| bsd-3-clause | 252420a2faffe95a8e2e6eca259795eb | 36.547619 | 119 | 0.682731 | 3.78783 | false | false | false | false |
pywinauto/pywinauto | pywinauto/windows/win32_element_info.py | 1 | 11270 | # GUI Application automation and testing library
# Copyright (C) 2006-2018 Mark Mc Mahon and Contributors
# https://github.com/pywinauto/pywinauto/graphs/contributors
# http://pywinauto.readthedocs.io/en/latest/credits.html
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of pywinauto nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Implementation of the class to deal with a native element (window with a handle)"""
import ctypes
import six
import win32gui
from . import win32functions
from . import win32structures
from .. import handleprops
from ..element_info import ElementInfo
from .remote_memory_block import RemoteMemoryBlock
def _register_win_msg(msg_name):
msg_id = win32functions.RegisterWindowMessage(six.text_type(msg_name))
if not isinstance(msg_id, six.integer_types):
return -1 # return dummy value if win32functions is mocked (on ReadTheDocs)
if msg_id > 0:
return msg_id
else:
raise Exception("Cannot register {}".format(msg_name))
class HwndElementInfo(ElementInfo):
"""Wrapper for window handler"""
wm_get_ctrl_name = _register_win_msg('WM_GETCONTROLNAME')
wm_get_ctrl_type = _register_win_msg('WM_GETCONTROLTYPE')
re_props = ["class_name", "name", "auto_id", "control_type", "full_control_type"]
exact_only_props = ["handle", "pid", "control_id", "enabled", "visible", "rectangle"]
search_order = ["handle", "class_name", "pid", "control_id", "visible", "enabled", "name",
"auto_id", "control_type", "full_control_type", "rectangle"]
assert set(re_props + exact_only_props) == set(search_order)
renamed_props = {
"title": ("name", None),
"title_re": ("name_re", None),
"process": ("pid", None),
"visible_only": ("visible", {True: True, False: None}),
"enabled_only": ("enabled", {True: True, False: None}),
"top_level_only": ("depth", {True: 1, False: None}),
}
def __init__(self, handle=None):
"""Create element by handle (default is root element)"""
self._cache = {}
if handle is None: # root element
self._handle = win32functions.GetDesktopWindow()
else:
self._handle = handle
def set_cache_strategy(self, cached):
"""Set a cache strategy for frequently used attributes of the element"""
pass # TODO: implement a cache strategy for native elements
@property
def handle(self):
"""Return the handle of the window"""
return self._handle
@property
def rich_text(self):
"""Return the text of the window"""
return handleprops.text(self.handle)
name = rich_text
@property
def control_id(self):
"""Return the ID of the window"""
return handleprops.controlid(self.handle)
@property
def process_id(self):
"""Return the ID of process that controls this window"""
return handleprops.processid(self.handle)
pid = process_id
@property
def class_name(self):
"""Return the class name of the window"""
return handleprops.classname(self.handle)
@property
def enabled(self):
"""Return True if the window is enabled"""
return handleprops.isenabled(self.handle)
@property
def visible(self):
"""Return True if the window is visible"""
return handleprops.isvisible(self.handle)
@property
def parent(self):
"""Return the parent of the window"""
parent_hwnd = handleprops.parent(self.handle)
if parent_hwnd:
return HwndElementInfo(parent_hwnd)
else:
return None
def children(self, **kwargs):
"""Return a list of immediate children of the window"""
class_name = kwargs.get('class_name', None)
name = kwargs.get('name', None)
control_type = kwargs.get('control_type', None)
process = kwargs.get('process', None)
# TODO: 'cache_enable' and 'depth' are ignored so far
# this will be filled in the callback function
child_elements = []
# The callback function that will be called for each HWND
# all we do is append the wrapped handle
def enum_window_proc(hwnd, lparam):
"""Called for each window - adds wrapped elements to a list"""
element = HwndElementInfo(hwnd)
if process is not None and process != element.pid:
return True
if class_name is not None and class_name != element.class_name:
return True
if name is not None and name != element.rich_text:
return True
if control_type is not None and control_type != element.control_type:
return True
child_elements.append(element)
return True
# define the type of the child procedure
enum_win_proc_t = ctypes.WINFUNCTYPE(ctypes.wintypes.BOOL,
ctypes.wintypes.HWND,
ctypes.wintypes.LPARAM)
# 'construct' the callback with our function
proc = enum_win_proc_t(enum_window_proc)
if self == HwndElementInfo(): # self == root
# loop over all the top level windows (callback called for each)
win32functions.EnumWindows(proc, 0)
else:
# loop over all the children (callback called for each)
win32functions.EnumChildWindows(self.handle, proc, 0)
return child_elements
def iter_children(self, **kwargs):
"""Return a generator of immediate children of the window"""
# TODO: Iterate over children using Win32 API
for child in self.children(**kwargs):
yield child
def descendants(self, **kwargs):
"""Return descendants of the window (all children from sub-tree)"""
if self == HwndElementInfo(): # root
top_elements = self.children()
child_elements = self.children(**kwargs)
for child in top_elements:
child_elements.extend(child.children(**kwargs))
else:
child_elements = self.children(**kwargs)
depth = kwargs.pop('depth', None)
child_elements = ElementInfo.filter_with_depth(child_elements, self, depth)
return child_elements
@property
def rectangle(self):
"""Return rectangle of the element"""
return handleprops.rectangle(self.handle)
def dump_window(self):
"""Dump a window as a set of properties"""
return handleprops.dumpwindow(self.handle)
def __hash__(self):
"""Return a unique hash value based on the element's handle"""
return hash(self.handle)
def __eq__(self, other):
"""Check if 2 HwndElementInfo objects describe 1 actual element"""
if not isinstance(other, HwndElementInfo):
return self.handle == other
return self.handle == other.handle
def __ne__(self, other):
"""Check if two HwndElementInfo objects describe different elements"""
return not (self == other)
@property
def auto_id(self):
"""Return AutomationId of the element"""
textval = ''
length = 1024
remote_mem = RemoteMemoryBlock(self, size=length*2)
ret = win32gui.SendMessage(self.handle, self.wm_get_ctrl_name, length, remote_mem.mem_address)
if ret:
text = ctypes.create_unicode_buffer(length)
remote_mem.Read(text)
textval = text.value
del remote_mem
return textval
def __get_control_type(self, full=False):
"""Internal parameterized method to distinguish control_type and full_control_type properties"""
textval = ''
length = 1024
remote_mem = RemoteMemoryBlock(self, size=length*2)
ret = win32gui.SendMessage(self.handle, self.wm_get_ctrl_type, length, remote_mem.mem_address)
if ret:
text = ctypes.create_unicode_buffer(length)
remote_mem.Read(text)
textval = text.value
del remote_mem
# simplify control type for WinForms controls
if (not full) and ("PublicKeyToken" in textval):
textval = textval.split(", ")[0]
return textval
@property
def control_type(self):
"""Return control type of the element"""
return self.__get_control_type(full=False)
@property
def full_control_type(self):
"""Return full string of control type of the element"""
return self.__get_control_type(full=True)
@classmethod
def from_point(cls, x, y):
"""Return child element at specified point coordinates"""
current_handle = win32gui.WindowFromPoint((x, y))
child_handle = win32gui.ChildWindowFromPoint(current_handle, (x, y))
if child_handle:
return cls(child_handle)
else:
return cls(current_handle)
@classmethod
def top_from_point(cls, x, y):
"""Return top level element at specified point coordinates"""
current_elem = cls.from_point(x, y)
current_parent = current_elem.parent
while current_parent is not None and current_parent != cls():
current_elem = current_parent
current_parent = current_elem.parent
return current_elem
@classmethod
def get_active(cls):
"""Return current active element"""
gui_info = win32structures.GUITHREADINFO()
gui_info.cbSize = ctypes.sizeof(gui_info)
# get all the active elements (not just the specified process)
ret = win32functions.GetGUIThreadInfo(0, ctypes.byref(gui_info))
if not ret:
raise ctypes.WinError()
hwndActive = gui_info.hwndActive
return cls(hwndActive) if hwndActive is not None else None
| bsd-3-clause | ccaf46675da7b77c0fb53e408dd044ee | 35.830065 | 104 | 0.639397 | 4.232069 | false | false | false | false |
pywinauto/pywinauto | pywinauto/findbestmatch.py | 1 | 20676 | # GUI Application automation and testing library
# Copyright (C) 2006-2018 Mark Mc Mahon and Contributors
# https://github.com/pywinauto/pywinauto/graphs/contributors
# http://pywinauto.readthedocs.io/en/latest/credits.html
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of pywinauto nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Module to find the closest match of a string in a list"""
from __future__ import unicode_literals
import re
import difflib
import six
#import ctypes
#import ldistance
#levenshtein_distance = ctypes.cdll.levenshtein.levenshtein_distance
#levenshtein_distance = ldistance.distance
find_best_control_match_cutoff = .6
#====================================================================
class MatchError(IndexError):
"""A suitable match could not be found"""
def __init__(self, items = None, tofind = ''):
"""Init the parent with the message"""
self.tofind = tofind
self.items = items
if self.items is None:
self.items = []
IndexError.__init__(self,
"Could not find '{0}' in '{1}'".format(tofind, self.items))
_cache = {}
# given a list of texts return the match score for each
# and the best score and text with best score
#====================================================================
def _get_match_ratios(texts, match_against):
"""Get the match ratio of how each item in texts compared to match_against"""
# now time to figure out the matching
ratio_calc = difflib.SequenceMatcher()
ratio_calc.set_seq1(match_against)
ratios = {}
best_ratio = 0
best_text = ''
for text in texts:
if 0:
pass
if (text, match_against) in _cache:
ratios[text] = _cache[(text, match_against)]
elif(match_against, text) in _cache:
ratios[text] = _cache[(match_against, text)]
else:
# set up the SequenceMatcher with other text
ratio_calc.set_seq2(text)
# try using the levenshtein distance instead
#lev_dist = levenshtein_distance(six.text_type(match_against), six.text_type(text))
#ratio = 1 - lev_dist / 10.0
#ratios[text] = ratio
# calculate ratio and store it
ratios[text] = ratio_calc.ratio()
_cache[(match_against, text)] = ratios[text]
# if this is the best so far then update best stats
if ratios[text] > best_ratio:
best_ratio = ratios[text]
best_text = text
return ratios, best_ratio, best_text
#====================================================================
def find_best_match(search_text, item_texts, items, limit_ratio = .5):
"""Return the item that best matches the search_text
* **search_text** The text to search for
* **item_texts** The list of texts to search through
* **items** The list of items corresponding (1 to 1)
to the list of texts to search through.
* **limit_ratio** How well the text has to match the best match.
If the best match matches lower then this then it is not
considered a match and a MatchError is raised, (default = .5)
"""
search_text = _cut_at_eol(_cut_at_tab(search_text))
text_item_map = UniqueDict()
# Clean each item, make it unique and map to
# to the item index
for text, item in zip(item_texts, items):
text_item_map[_cut_at_eol(_cut_at_tab(text))] = item
ratios, best_ratio, best_text = \
_get_match_ratios(text_item_map.keys(), search_text)
if best_ratio < limit_ratio:
raise MatchError(items = text_item_map.keys(), tofind = search_text)
return text_item_map[best_text]
#====================================================================
_after_tab = re.compile(r"\t.*", re.UNICODE)
_after_eol = re.compile(r"\n.*", re.UNICODE)
_non_word_chars = re.compile(r"\W", re.UNICODE)
def _cut_at_tab(text):
"""Clean out non characters from the string and return it"""
# remove anything after the first tab
return _after_tab.sub("", text)
def _cut_at_eol(text):
"""Clean out non characters from the string and return it"""
# remove anything after the first EOL
return _after_eol.sub("", text)
def _clean_non_chars(text):
"""Remove non word characters"""
# should this also remove everything after the first tab?
# remove non alphanumeric characters
return _non_word_chars.sub("", text)
def is_above_or_to_left(ref_control, other_ctrl):
"""Return true if the other_ctrl is above or to the left of ref_control"""
text_r = other_ctrl.rectangle()
ctrl_r = ref_control.rectangle()
# skip controls where text win is to the right of ctrl
if text_r.left >= ctrl_r.right:
return False
# skip controls where text win is below ctrl
if text_r.top >= ctrl_r.bottom:
return False
# text control top left corner is below control
# top left corner - so not to the above or left :)
if text_r.top >= ctrl_r.top and text_r.left >= ctrl_r.left:
return False
return True
#====================================================================
distance_cuttoff = 999
def get_non_text_control_name(ctrl, controls, text_ctrls):
"""
return the name for this control by finding the closest
text control above and to its left
"""
names = []
# simply look for an instance of the control in the list,
# we don't use list.index() method as it invokes __eq__
ctrl_index = 0
for i, c in enumerate(controls):
if c is ctrl:
ctrl_index = i
break
ctrl_friendly_class_name = ctrl.friendly_class_name()
if ctrl_index != 0:
prev_ctrl = controls[ctrl_index-1]
prev_ctrl_text = prev_ctrl.window_text()
if prev_ctrl.friendly_class_name() == "Static" and \
prev_ctrl.is_visible() and prev_ctrl_text and \
is_above_or_to_left(ctrl, prev_ctrl):
names.append(
prev_ctrl_text +
ctrl_friendly_class_name)
best_name = ''
closest = distance_cuttoff
# now for each of the visible text controls
for text_ctrl in text_ctrls:
# get aliases to the control rectangles
text_r = text_ctrl.rectangle()
ctrl_r = ctrl.rectangle()
# skip controls where text win is to the right of ctrl
if text_r.left >= ctrl_r.right:
continue
# skip controls where text win is below ctrl
if text_r.top >= ctrl_r.bottom:
continue
# calculate the distance between the controls
# at first I just calculated the distance from the top left
# corner of one control to the top left corner of the other control
# but this was not best, so as a text control should either be above
# or to the left of the control I get the distance between
# the top left of the non text control against the
# Top-Right of the text control (text control to the left)
# Bottom-Left of the text control (text control above)
# then I get the min of these two
# We do not actually need to calculate the difference here as we
# only need a comparative number. As long as we find the closest one
# the actual distance is not all that important to us.
# this reduced the unit tests run on my by about 1 second
# (from 61 ->60 s)
# (x^2 + y^2)^.5
#distance = (
# (text_r.left - ctrl_r.left) ** 2 + # (x^2 + y^2)
# (text_r.bottom - ctrl_r.top) ** 2) \
# ** .5 # ^.5
#distance2 = (
# (text_r.right - ctrl_r.left) ** 2 + # (x^2 + y^2)
# (text_r.top - ctrl_r.top) ** 2) \
# ** .5 # ^.5
distance = abs(text_r.left - ctrl_r.left) + abs(text_r.bottom - ctrl_r.top)
distance2 = abs(text_r.right - ctrl_r.left) + abs(text_r.top - ctrl_r.top)
distance = min(distance, distance2)
# UpDown control should use Static text only because edit box text is often useless
if ctrl_friendly_class_name == "UpDown" and \
text_ctrl.friendly_class_name() == "Static" and distance < closest:
# TODO: use search in all text controls for all non-text ones
# (like Dijkstra algorithm vs Floyd one)
closest = distance
ctrl_text = text_ctrl.window_text()
if ctrl_text is None:
# the control probably doesn't exist so skip it
continue
best_name = ctrl_text + ctrl_friendly_class_name
# if this distance was closer than the last one
elif distance < closest:
closest = distance
#if text_ctrl.window_text() == '':
# best_name = ctrl_friendly_class_name + ' '.join(text_ctrl.texts()[1:2])
#else:
ctrl_text = text_ctrl.window_text()
if ctrl_text is None:
# the control probably doesn't exist so skip it
continue
best_name = ctrl_text + ctrl_friendly_class_name
names.append(best_name)
return names
#====================================================================
def get_control_names(control, allcontrols, textcontrols):
"""Returns a list of names for this control"""
names = []
# if it has a reference control - then use that
#if hasattr(control, 'ref') and control.ref:
# control = control.ref
# Add the control based on it's friendly class name
friendly_class_name = control.friendly_class_name()
names.append(friendly_class_name)
# if it has some character text then add it base on that
# and based on that with friendly class name appended
cleaned = control.window_text()
# Todo - I don't like the hardcoded classnames here!
if cleaned and control.has_title:
names.append(cleaned)
names.append(cleaned + friendly_class_name)
elif control.has_title and friendly_class_name != 'TreeView':
try:
for text in control.texts()[1:]:
names.append(friendly_class_name + text)
except Exception:
#import traceback
#from .actionlogger import ActionLogger
pass #ActionLogger().log('Warning! Cannot get control.texts()') #\nTraceback:\n' + traceback.format_exc())
# so find the text of the nearest text visible control
non_text_names = get_non_text_control_name(control, allcontrols, textcontrols)
# and if one was found - add it
if non_text_names:
names.extend(non_text_names)
# it didn't have visible text
else:
# so find the text of the nearest text visible control
non_text_names = get_non_text_control_name(control, allcontrols, textcontrols)
# and if one was found - add it
if non_text_names:
names.extend(non_text_names)
# return the names - and make sure there are no duplicates or empty values
cleaned_names = set(names) - set([None, ""])
return cleaned_names
#====================================================================
class UniqueDict(dict):
"""A dictionary subclass that handles making its keys unique"""
def __setitem__(self, text, item):
"""Set an item of the dictionary"""
# this text is already in the map
# so we need to make it unique
if text in self:
# find next unique text after text1
unique_text = text
counter = 2
while unique_text in self:
unique_text = text + str(counter)
counter += 1
# now we also need to make sure the original item
# is under text0 and text1 also!
if text + '0' not in self:
dict.__setitem__(self, text+'0', self[text])
dict.__setitem__(self, text+'1', self[text])
# now that we don't need original 'text' anymore
# replace it with the uniq text
text = unique_text
# add our current item
dict.__setitem__(self, text, item)
def find_best_matches(
self,
search_text,
clean = False,
ignore_case = False):
"""Return the best matches for search_text in the items
* **search_text** the text to look for
* **clean** whether to clean non text characters out of the strings
* **ignore_case** compare strings case insensitively
"""
# now time to figure out the matching
ratio_calc = difflib.SequenceMatcher()
if ignore_case:
search_text = search_text.lower()
ratio_calc.set_seq1(search_text)
ratios = {}
best_ratio = 0
best_texts = []
ratio_offset = 1
if clean:
ratio_offset *= .9
if ignore_case:
ratio_offset *= .9
for text_ in self:
# make a copy of the text as we need the original later
text = text_
if clean:
text = _clean_non_chars(text)
if ignore_case:
text = text.lower()
# check if this item is in the cache - if yes, then retrieve it
if (text, search_text) in _cache:
ratios[text_] = _cache[(text, search_text)]
elif(search_text, text) in _cache:
ratios[text_] = _cache[(search_text, text)]
# not in the cache - calculate it and add it to the cache
else:
# set up the SequenceMatcher with other text
ratio_calc.set_seq2(text)
# if a very quick check reveals that this is not going
# to match then
ratio = ratio_calc.real_quick_ratio() * ratio_offset
if ratio >= find_best_control_match_cutoff:
ratio = ratio_calc.quick_ratio() * ratio_offset
if ratio >= find_best_control_match_cutoff:
ratio = ratio_calc.ratio() * ratio_offset
# save the match we got and store it in the cache
ratios[text_] = ratio
_cache[(text, search_text)] = ratio
# try using the levenshtein distance instead
#lev_dist = levenshtein_distance(six.text_type(search_text), six.text_type(text))
#ratio = 1 - lev_dist / 10.0
#ratios[text_] = ratio
#print "%5s" %("%0.2f"% ratio), search_text, `text`
# if this is the best so far then update best stats
if ratios[text_] > best_ratio and \
ratios[text_] >= find_best_control_match_cutoff:
best_ratio = ratios[text_]
best_texts = [text_]
elif ratios[text_] == best_ratio:
best_texts.append(text_)
#best_ratio *= ratio_offset
return best_ratio, best_texts
#====================================================================
def build_unique_dict(controls):
"""Build the disambiguated list of controls
Separated out to a different function so that we can get
the control identifiers for printing.
"""
name_control_map = UniqueDict()
# get the visible text controls so that we can get
# the closest text if the control has no text
text_ctrls = [ctrl_ for ctrl_ in controls
if ctrl_.can_be_label and ctrl_.is_visible() and ctrl_.window_text()]
# collect all the possible names for all controls
# and build a list of them
for ctrl in controls:
ctrl_names = get_control_names(ctrl, controls, text_ctrls)
# for each of the names
for name in ctrl_names:
name_control_map[name] = ctrl
return name_control_map
#====================================================================
def find_best_control_matches(search_text, controls):
"""Returns the control that is the the best match to search_text
This is slightly differnt from find_best_match in that it builds
up the list of text items to search through using information
from each control. So for example for there is an OK, Button
then the following are all added to the search list:
"OK", "Button", "OKButton"
But if there is a ListView (which do not have visible 'text')
then it will just add "ListView".
"""
name_control_map = build_unique_dict(controls)
# # collect all the possible names for all controls
# # and build a list of them
# for ctrl in controls:
# ctrl_names = get_control_names(ctrl, controls)
#
# # for each of the names
# for name in ctrl_names:
# name_control_map[name] = ctrl
search_text = six.text_type(search_text)
best_ratio, best_texts = name_control_map.find_best_matches(search_text)
best_ratio_ci, best_texts_ci = \
name_control_map.find_best_matches(search_text, ignore_case = True)
best_ratio_clean, best_texts_clean = \
name_control_map.find_best_matches(search_text, clean = True)
best_ratio_clean_ci, best_texts_clean_ci = \
name_control_map.find_best_matches(
search_text, clean = True, ignore_case = True)
if best_ratio_ci > best_ratio:
best_ratio = best_ratio_ci
best_texts = best_texts_ci
if best_ratio_clean > best_ratio:
best_ratio = best_ratio_clean
best_texts = best_texts_clean
if best_ratio_clean_ci > best_ratio:
best_ratio = best_ratio_clean_ci
best_texts = best_texts_clean_ci
if best_ratio < find_best_control_match_cutoff:
raise MatchError(items = name_control_map.keys(), tofind = search_text)
return [name_control_map[best_text] for best_text in best_texts]
#
#def GetControlMatchRatio(text, ctrl):
# # get the texts for the control
# ctrl_names = get_control_names(ctrl)
#
# #get the best match for these
# matcher = UniqueDict()
# for name in ctrl_names:
# matcher[name] = ctrl
#
# best_ratio, unused = matcher.find_best_matches(text)
#
# return best_ratio
#
#
#
#def get_controls_ratios(search_text, controls):
# name_control_map = UniqueDict()
#
# # collect all the possible names for all controls
# # and build a list of them
# for ctrl in controls:
# ctrl_names = get_control_names(ctrl)
#
# # for each of the names
# for name in ctrl_names:
# name_control_map[name] = ctrl
#
# match_ratios, best_ratio, best_text = \
# _get_match_ratios(name_control_map.keys(), search_text)
#
# return match_ratios, best_ratio, best_text,
| bsd-3-clause | 873b08e89dd8cd5180e63c0c194f9468 | 34.210158 | 118 | 0.581399 | 4.032768 | false | false | false | false |
pywinauto/pywinauto | pywinauto/timings.py | 1 | 15638 | # GUI Application automation and testing library
# Copyright (C) 2006-2018 Mark Mc Mahon and Contributors
# https://github.com/pywinauto/pywinauto/graphs/contributors
# http://pywinauto.readthedocs.io/en/latest/credits.html
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of pywinauto nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Global timing settings for all of pywinauto
This module has one object that should be used for all timing adjustments:
* timings.Timings
There are a couple of predefined settings:
* ``timings.Timings.fast()``
* ``timings.Timings.defaults()``
* ``timings.Timings.slow()``
The Following are the individual timing settings that can be adjusted:
* window_find_timeout (default 5)
* window_find_retry (default .09)
* app_start_timeout (default 10)
* app_start_retry (default .90)
* app_connect_timeout (default 5.)
* app_connect_retry (default .1)
* cpu_usage_interval (default .5)
* cpu_usage_wait_timeout (default 20)
* exists_timeout (default .5)
* exists_retry (default .3)
* after_invoke_wait (default .1)
* after_click_wait (default .09)
* after_clickinput_wait (default .09)
* after_menu_wait (default .1)
* after_sendkeys_key_wait (default .01)
* after_button_click_wait (default 0)
* before_closeclick_wait (default .1)
* closeclick_retry (default .05)
* closeclick_dialog_close_wait (default 2)
* after_closeclick_wait (default .2)
* after_windowclose_timeout (default 2)
* after_windowclose_retry (default .5)
* after_setfocus_wait (default .06)
* setfocus_timeout (default 2)
* setfocus_retry (default .1)
* after_setcursorpos_wait (default .01)
* sendmessagetimeout_timeout (default .01)
* after_tabselect_wait (default .05)
* after_listviewselect_wait (default .01)
* after_listviewcheck_wait default(.001)
* listviewitemcontrol_timeout default(1.5)
* after_treeviewselect_wait default(.1)
* after_toobarpressbutton_wait default(.01)
* after_updownchange_wait default(.1)
* after_movewindow_wait default(0)
* after_buttoncheck_wait default(0)
* after_comboboxselect_wait default(.001)
* after_listboxselect_wait default(0)
* after_listboxfocuschange_wait default(0)
* after_editsetedittext_wait default(0)
* after_editselect_wait default(.02)
* drag_n_drop_move_mouse_wait default(.1)
* before_drag_wait default(.2)
* before_drop_wait default(.1)
* after_drag_n_drop_wait default(.1)
* scroll_step_wait default(.1)
"""
import six
import time
import operator
from functools import wraps
from . import deprecated
#=========================================================================
class TimeConfig(object):
"""Central storage and manipulation of timing values"""
__default_timing = {
'window_find_timeout': 5.,
'window_find_retry': .09,
'app_start_timeout': 10.,
'app_start_retry': .90,
'app_connect_timeout': 5.,
'app_connect_retry': .1,
'cpu_usage_interval': .5,
'cpu_usage_wait_timeout': 20.,
'exists_timeout': .5,
'exists_retry': .3,
'after_invoke_wait': .1,
'after_click_wait': .09,
'after_clickinput_wait': .09,
'after_menu_wait': .1,
'after_sendkeys_key_wait': .01,
'after_button_click_wait': 0,
'before_closeclick_wait': .1,
'closeclick_retry': .05,
'closeclick_dialog_close_wait': 2.,
'after_closeclick_wait': .2,
'after_windowclose_timeout': 2,
'after_windowclose_retry': .5,
'after_setfocus_wait': .06,
'setfocus_timeout': 2,
'setfocus_retry': .1,
'after_setcursorpos_wait': .01,
'sendmessagetimeout_timeout': .01,
'after_tabselect_wait': .05,
'after_listviewselect_wait': .01,
'after_listviewcheck_wait': .001,
'listviewitemcontrol_timeout': 1.5,
'after_treeviewselect_wait': .1,
'after_toobarpressbutton_wait': .01,
'after_updownchange_wait': .1,
'after_movewindow_wait': 0,
'after_buttoncheck_wait': 0,
'after_comboboxselect_wait': 0.001,
'after_listboxselect_wait': 0,
'after_listboxfocuschange_wait': 0,
'after_editsetedittext_wait': 0,
'after_editselect_wait': 0.02,
'drag_n_drop_move_mouse_wait': 0.1,
'before_drag_wait': 0.2,
'before_drop_wait': 0.1,
'after_drag_n_drop_wait': 0.1,
'scroll_step_wait': 0.1,
'app_exit_timeout': 10.,
'app_exit_retry': .1,
}
assert(__default_timing['window_find_timeout'] >=
__default_timing['window_find_retry'] * 2)
_timings = __default_timing.copy()
_cur_speed = 1
def __getattribute__(self, attr):
"""Get the value for a particular timing"""
if attr in ['__dict__', '__members__', '__methods__', '__class__']:
return object.__getattribute__(self, attr)
if attr in dir(TimeConfig):
return object.__getattribute__(self, attr)
if attr in self.__default_timing:
return self._timings[attr]
else:
raise AttributeError("Unknown timing setting: {0}".format(attr))
def __setattr__(self, attr, value):
"""Set a particular timing"""
if attr == '_timings':
object.__setattr__(self, attr, value)
elif attr in self.__default_timing:
self._timings[attr] = value
else:
raise AttributeError("Unknown timing setting: {0}".format(attr))
def fast(self):
"""Set fast timing values
Currently this changes the timing in the following ways:
timeouts = 1 second
waits = 0 seconds
retries = .001 seconds (minimum!)
(if existing times are faster then keep existing times)
"""
for setting in self.__default_timing:
# set timeouts to the min of the current speed or 1 second
if "_timeout" in setting:
self._timings[setting] = \
min(1, self._timings[setting])
if "_wait" in setting:
self._timings[setting] = self._timings[setting] / 2
elif setting.endswith("_retry"):
self._timings[setting] = 0.001
#self._timings['app_start_timeout'] = .5
def slow(self):
"""Set slow timing values
Currently this changes the timing in the following ways:
timeouts = default timeouts * 10
waits = default waits * 3
retries = default retries * 3
(if existing times are slower then keep existing times)
"""
for setting in self.__default_timing:
if "_timeout" in setting:
self._timings[setting] = max(
self.__default_timing[setting] * 10,
self._timings[setting])
if "_wait" in setting:
self._timings[setting] = max(
self.__default_timing[setting] * 3,
self._timings[setting])
elif setting.endswith("_retry"):
self._timings[setting] = max(
self.__default_timing[setting] * 3,
self._timings[setting])
if self._timings[setting] < .2:
self._timings[setting] = .2
def defaults(self):
"""Set all timings to the default time"""
self._timings = self.__default_timing.copy()
Fast = deprecated(fast)
Slow = deprecated(slow)
Defaults = deprecated(defaults)
Timings = TimeConfig()
#=========================================================================
class TimeoutError(RuntimeError):
pass
#=========================================================================
if six.PY3:
_clock_func = time.perf_counter
else:
_clock_func = time.clock
def timestamp():
"""Get a precise timestamp"""
return _clock_func()
#=========================================================================
def always_wait_until(timeout,
retry_interval,
value=True,
op=operator.eq):
"""Decorator to call wait_until(...) every time for a decorated function/method"""
def wait_until_decorator(func):
"""Callable object that must be returned by the @always_wait_until decorator"""
@wraps(func)
def wrapper(*args, **kwargs):
"""pre-callback, target function call and post-callback"""
return wait_until(timeout, retry_interval,
func, value, op, *args, **kwargs)
return wrapper
return wait_until_decorator
#=========================================================================
def wait_until(timeout,
retry_interval,
func,
value=True,
op=operator.eq,
*args, **kwargs):
r"""
Wait until ``op(function(*args, **kwargs), value)`` is True or until timeout expires
* **timeout** how long the function will try the function
* **retry_interval** how long to wait between retries
* **func** the function that will be executed
* **value** the value to be compared against (defaults to True)
* **op** the comparison function (defaults to equality)\
* **args** optional arguments to be passed to func when called
* **kwargs** optional keyword arguments to be passed to func when called
Returns the return value of the function
If the operation times out then the return value of the the function
is in the 'function_value' attribute of the raised exception.
e.g. ::
try:
# wait a maximum of 10.5 seconds for the
# the objects item_count() method to return 10
# in increments of .5 of a second
wait_until(10.5, .5, self.item_count, 10)
except TimeoutError as e:
print("timed out")
"""
start = timestamp()
func_val = func(*args, **kwargs)
# while the function hasn't returned what we are waiting for
while not op(func_val, value):
# find out how much of the time is left
time_left = timeout - (timestamp() - start)
# if we have to wait some more
if time_left > 0:
# wait either the retry_interval or else the amount of
# time until the timeout expires (whichever is less)
time.sleep(min(retry_interval, time_left))
func_val = func(*args, **kwargs)
else:
err = TimeoutError("timed out")
err.function_value = func_val
raise err
return func_val
# Non PEP-8 alias
WaitUntil = deprecated(wait_until)
#=========================================================================
def always_wait_until_passes(timeout,
retry_interval,
exceptions=(Exception)):
"""Decorator to call wait_until_passes(...) every time for a decorated function/method"""
def wait_until_passes_decorator(func):
"""Callable object that must be returned by the @always_wait_until_passes decorator"""
@wraps(func)
def wrapper(*args, **kwargs):
"""pre-callback, target function call and post-callback"""
return wait_until_passes(timeout, retry_interval,
func, exceptions, *args, **kwargs)
return wrapper
return wait_until_passes_decorator
#=========================================================================
def wait_until_passes(timeout,
retry_interval,
func,
exceptions=(Exception),
*args, **kwargs):
"""
Wait until ``func(*args, **kwargs)`` does not raise one of the exceptions
* **timeout** how long the function will try the function
* **retry_interval** how long to wait between retries
* **func** the function that will be executed
* **exceptions** list of exceptions to test against (default: Exception)
* **args** optional arguments to be passed to func when called
* **kwargs** optional keyword arguments to be passed to func when called
Returns the return value of the function
If the operation times out then the original exception raised is in
the 'original_exception' attribute of the raised exception.
e.g. ::
try:
# wait a maximum of 10.5 seconds for the
# window to be found in increments of .5 of a second.
# P.int a message and re-raise the original exception if never found.
wait_until_passes(10.5, .5, self.Exists, (ElementNotFoundError))
except TimeoutError as e:
print("timed out")
raise e.
"""
start = timestamp()
# keep trying until the timeout is passed
while True:
try:
# Call the function with any arguments
func_val = func(*args, **kwargs)
# if no exception is raised then we are finished
break
# An exception was raised - so wait and try again
except exceptions as e:
# find out how much of the time is left
time_left = timeout - (timestamp() - start)
# if we have to wait some more
if time_left > 0:
# wait either the retry_interval or else the amount of
# time until the timeout expires (whichever is less)
time.sleep(min(retry_interval, time_left))
else:
err = TimeoutError()
err.original_exception = e
raise err
# return the function value
return func_val
# Non PEP-8 alias
WaitUntilPasses = deprecated(wait_until_passes)
| bsd-3-clause | 2d01403ec0262e73497b75aaa60a9b15 | 31.55794 | 94 | 0.576992 | 4.194742 | false | false | false | false |
pywinauto/pywinauto | pywinauto/controlproperties.py | 1 | 9337 | # GUI Application automation and testing library
# Copyright (C) 2006-2018 Mark Mc Mahon and Contributors
# https://github.com/pywinauto/pywinauto/graphs/contributors
# http://pywinauto.readthedocs.io/en/latest/credits.html
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of pywinauto nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Wrap"""
from .windows.win32structures import RECT, LOGFONTW
from . import deprecated
#====================================================================
class FuncWrapper(object):
"""Little class to allow attribute access to return a callable object"""
def __init__(self, value):
self.value = value
def __call__(self, *args, **kwargs):
"""Return the saved value"""
return self.value
#====================================================================
class ControlProps(dict):
"""Wrap controls read from a file to resemble hwnd controls"""
def __init__(self, *args, **kwargs):
dict.__init__(self, *args, **kwargs)
self.ref = None
#self.menu_items = []
def __getattr__(self, attr):
# if the key is not in the dictionary but the plural is
if attr not in self and attr + "s" in self:
# return the first element of the possible list item
return FuncWrapper(self[attr+'s'][0])
return FuncWrapper(self[attr])
#def friendly_class_name(self):
# print "sdafafasdfafasdfasdf",
# try:
# print "---", self['friendly_class_name']
# except Exception as e:
# print "fffffffffffffffffffff"
# print `e`
# return self['friendly_class_name']
def window_text(self):
return self['texts'][0]
# Non PEP-8 alias
WindowText = deprecated(window_text)
def has_style(self, style):
return self['style'] & style == style
# Non PEP-8 alias
HasStyle = deprecated(has_style)
def has_exstyle(self, exstyle):
return self['exstyle'] & exstyle == exstyle
# Non PEP-8 alias
HasExStyle = deprecated(has_exstyle, deprecated_name="HasExStyle")
#====================================================================
def GetMenuBlocks(ctrls):
allMenuBlocks = []
for ctrl in ctrls:
if 'menu_items' in ctrl.keys():
# we need to get all the separate menu blocks!
menuBlocks = MenuBlockAsControls(ctrl.menu_items())
allMenuBlocks.extend(menuBlocks)
return allMenuBlocks
#====================================================================
def MenuBlockAsControls(menuItems, parentage = None):
if parentage is None:
parentage = []
blocks = []
curBlock = []
for item in menuItems:
# do a bit of conversion first :-)
itemAsCtrl = MenuItemAsControl(item)
# update the friendly_class_name to contain the 'path' to
# this particular item
# TODO: CHECK - as itemPath is currently unused!
if parentage:
itemPath = "%s->%s" % ("->".join(parentage), item['text'])
else:
itemPath = item['text']
#append the item to the current menu block
curBlock.append(itemAsCtrl)
# If the item has a sub menu
if 'menu_items' in item.keys():
# add the current item the path
parentage.append(item['text'])
# Get the block for the SubMenu, and add it to the list of
# blocks we have found
blocks.extend(
MenuBlockAsControls(
item['menu_items']['menu_items'], parentage))
# and seeing as we are dong with that sub menu remove the current
# item from the path
del(parentage[-1])
# add the current block to the list of blocks
blocks.append(curBlock)
return blocks
#====================================================================
def MenuItemAsControl(menuItem):
"""Make a menu item look like a control for tests"""
itemAsCtrl = ControlProps()
itemAsCtrl["texts"] = [menuItem['text'], ]
itemAsCtrl["control_id"] = menuItem['id']
itemAsCtrl["type"] = menuItem['type']
itemAsCtrl["state"] = menuItem['state']
itemAsCtrl["class_name"] = "MenuItem"
itemAsCtrl["friendly_class_name"] = "MenuItem"
# as most of these don't matter - just set them up with default stuff
itemAsCtrl["rectangle"] = RECT(0, 0, 999, 999)
itemAsCtrl["fonts"] = [LOGFONTW(), ]
itemAsCtrl["client_rects"] = [RECT(0, 0, 999, 999), ]
itemAsCtrl["context_help_id"] = 0
itemAsCtrl["user_data"] = 0
itemAsCtrl["style"] = 0
itemAsCtrl["exstyle"] = 0
itemAsCtrl["is_visible"] = 1
return itemAsCtrl
#====================================================================
def SetReferenceControls(controls, refControls):
"""Set the reference controls for the controls passed in
This does some minor checking as following:
* test that there are the same number of reference controls as
controls - fails with an exception if there are not
* test if all the ID's are the same or not
"""
# numbers of controls must be the same (though in future I could imagine
# relaxing this constraint)
if len(controls) != len(refControls):
raise RuntimeError(
"Numbers of controls on ref. dialog does not match Loc. dialog")
# set the controls
for i, ctrl in enumerate(controls):
ctrl.ref = refControls[i]
toRet = 1
allIDsSameFlag = 2
allClassesSameFlag = 4
# find if all the control id's match
if [ctrl.control_id() for ctrl in controls] == \
[ctrl.control_id() for ctrl in refControls]:
toRet += allIDsSameFlag
# check if the control classes match
if [ctrl.class_name() for ctrl in controls] == \
[ctrl.class_name() for ctrl in refControls]:
toRet += allClassesSameFlag
return toRet
##====================================================================
#class ControlProps(dict):
# #----------------------------------------------------------------
# def __init__(self, props = {}):
# # default to having menuItems for all things
# self.menu_items = []
#
# self.update(props)
# #for x in props:
# #self[x] = props[x]
#
# if hasattr(props, "handle"):
# self.__dict__['handle'] = props.handle
# else:
# self.__dict__['handle'] = None
#
# self.__dict__['ref'] = None
#
# #----------------------------------------------------------------
# # handles attribute access for dictionary items and
# # for plurals (e.g. if self.fonts = [4, 2] then self.font = 4)
# def __getattr__(self, key):
#
# # if the key is not in the dictionary but the plural is
# if key not in self and key + "s" in self:
#
# # try to get the first element of the possible list item
# try:
# return self[key + "s"][0]
# except TypeError as e:
# pass
#
# if key in self:
# return self[key]
#
# return self.__dict__[key]
#
# #----------------------------------------------------------------
# def __setattr__(self, key, value):
# if key in self.__dict__:
# self.__dict__[key] = value
# else:
# self[key] = value
#
# #----------------------------------------------------------------
# def has_style(self, flag):
# return self.style & flag == flag
#
# #----------------------------------------------------------------
# def has_exstyle(self, flag):
# return self.exstyle & flag == flag
#
#
| bsd-3-clause | 7a3cb08173418cea2f41bfa85ecf97d3 | 32.453875 | 80 | 0.554996 | 4.179499 | false | false | false | false |
pywinauto/pywinauto | pywinauto/windows/remote_memory_block.py | 1 | 12193 | # GUI Application automation and testing library
# Copyright (C) 2006-2018 Mark Mc Mahon and Contributors
# https://github.com/pywinauto/pywinauto/graphs/contributors
# http://pywinauto.readthedocs.io/en/latest/credits.html
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of pywinauto nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Module containing wrapper around VirtualAllocEx/VirtualFreeEx
Win32 API functions to perform custom marshalling
"""
from __future__ import print_function
import sys
from ctypes import wintypes
from ctypes import c_void_p
from ctypes import pointer
from ctypes import sizeof
from ctypes import byref
from ctypes import c_size_t
from ctypes import WinError
import win32api
from ..windows import win32functions
from ..windows import win32defines
from ..windows import win32structures
from ..actionlogger import ActionLogger
class AccessDenied(RuntimeError):
"""Raised when we cannot allocate memory in the control's process"""
pass
# ====================================================================
class RemoteMemoryBlock(object):
"""Class that enables reading and writing memory in a different process"""
#----------------------------------------------------------------
def __init__(self, ctrl, size=4096):
"""Allocate the memory"""
self.mem_address = 0
self.size = size
self.process = 0
self.handle = ctrl.handle
if self.handle == 0xffffffff80000000:
raise Exception('Incorrect handle: ' + str(self.handle))
self._as_parameter_ = self.mem_address
pid = wintypes.DWORD()
win32functions.GetWindowThreadProcessId(self.handle, byref(pid))
process_id = pid.value
if not process_id:
raise AccessDenied(
str(WinError()) + " Cannot get process ID from handle.")
self.process = win32functions.OpenProcess(
win32defines.PROCESS_VM_OPERATION |
win32defines.PROCESS_VM_READ |
win32defines.PROCESS_VM_WRITE,
0,
process_id
)
if not self.process:
raise AccessDenied(
str(WinError()) + "process: %d",
process_id)
self.mem_address = win32functions.VirtualAllocEx(
c_void_p(self.process), # remote process
c_void_p(0), # let Valloc decide where
win32structures.ULONG_PTR(self.size + 4), # how much to allocate
win32defines.MEM_RESERVE | \
win32defines.MEM_COMMIT, # allocation type
win32defines.PAGE_READWRITE # protection
)
if hasattr(self.mem_address, 'value'):
self.mem_address = self.mem_address.value
if self.mem_address == 0:
raise WinError()
if hex(self.mem_address) == '0xffffffff80000000' or hex(self.mem_address).upper() == '0xFFFFFFFF00000000':
raise Exception('Incorrect allocation: ' + hex(self.mem_address))
self._as_parameter_ = self.mem_address
# write guard signature at the end of memory block
signature = wintypes.LONG(0x66666666)
ret = win32functions.WriteProcessMemory(
c_void_p(self.process),
c_void_p(self.mem_address + self.size),
pointer(signature),
win32structures.ULONG_PTR(4),
win32structures.ULONG_PTR(0)
)
if ret == 0:
ActionLogger().log('================== Error: Failed to write guard signature: address = ' +
hex(self.mem_address) + ', size = ' + str(self.size))
last_error = win32api.GetLastError()
ActionLogger().log('LastError = ' + str(last_error) + ': ' + win32api.FormatMessage(last_error).rstrip())
def _CloseHandle(self):
"""Close the handle to the process."""
ret = win32functions.CloseHandle(self.process)
#win32api.CloseHandle(self.process)
if ret == 0:
ActionLogger().log('Warning: cannot close process handle!')
#raise WinError()
#----------------------------------------------------------------
def CleanUp(self):
"""Free Memory and the process handle"""
if self.process != 0 and self.mem_address != 0:
# free up the memory we allocated
#win32api.SetLastError(0)
self.CheckGuardSignature()
ret = win32functions.VirtualFreeEx(
c_void_p(self.process),
c_void_p(self.mem_address),
win32structures.ULONG_PTR(0),
wintypes.DWORD(win32defines.MEM_RELEASE))
if ret == 0:
print('Error: CleanUp: VirtualFreeEx() returned zero for address ', hex(self.mem_address))
last_error = win32api.GetLastError()
print('LastError = ', last_error, ': ', win32api.FormatMessage(last_error).rstrip())
sys.stdout.flush()
self._CloseHandle()
raise WinError()
self.mem_address = 0
self._CloseHandle()
else:
pass # ActionLogger().log('\nWARNING: Cannot call VirtualFreeEx! process_id == 0.')
#----------------------------------------------------------------
def __del__(self):
"""Ensure that the memory is Freed"""
# Free the memory in the remote process's address space
self.CleanUp()
#----------------------------------------------------------------
def Address(self):
"""Return the address of the memory block"""
return self.mem_address
#----------------------------------------------------------------
def Write(self, data, address=None, size=None):
"""Write data into the memory block"""
# write the data from this process into the memory allocated
# from the other process
if not address:
address = self.mem_address
if hasattr(address, 'value'):
address = address.value
if size:
nSize = win32structures.ULONG_PTR(size)
else:
nSize = win32structures.ULONG_PTR(sizeof(data))
if self.size < nSize.value:
raise Exception(('Write: RemoteMemoryBlock is too small ({0} bytes),' +
' {1} is required.').format(self.size, nSize.value))
if hex(address).lower().startswith('0xffffff'):
raise Exception('Write: RemoteMemoryBlock has incorrect address = ' + hex(address))
ret = win32functions.WriteProcessMemory(
c_void_p(self.process),
c_void_p(address),
pointer(data),
nSize,
win32structures.ULONG_PTR(0)
)
if ret == 0:
ActionLogger().log('Error: Write failed: address = ' + str(address))
last_error = win32api.GetLastError()
ActionLogger().log('Error: LastError = ' + str(last_error) + ': ' +
win32api.FormatMessage(last_error).rstrip())
raise WinError()
self.CheckGuardSignature()
#----------------------------------------------------------------
def Read(self, data, address=None, size=None):
"""Read data from the memory block"""
if not address:
address = self.mem_address
if hasattr(address, 'value'):
address = address.value
if size:
nSize = win32structures.ULONG_PTR(size)
else:
nSize = win32structures.ULONG_PTR(sizeof(data))
if self.size < nSize.value:
raise Exception(('Read: RemoteMemoryBlock is too small ({0} bytes),' +
' {1} is required.').format(self.size, nSize.value))
if hex(address).lower().startswith('0xffffff'):
raise Exception('Read: RemoteMemoryBlock has incorrect address =' + hex(address))
lpNumberOfBytesRead = c_size_t(0)
ret = win32functions.ReadProcessMemory(
c_void_p(self.process),
c_void_p(address),
byref(data),
nSize,
byref(lpNumberOfBytesRead)
)
# disabled as it often returns an error - but
# seems to work fine anyway!!
if ret == 0:
# try again
ret = win32functions.ReadProcessMemory(
c_void_p(self.process),
c_void_p(address),
byref(data),
nSize,
byref(lpNumberOfBytesRead)
)
if ret == 0:
last_error = win32api.GetLastError()
if last_error != win32defines.ERROR_PARTIAL_COPY:
ActionLogger().log('Read: WARNING! self.mem_address =' +
hex(self.mem_address) + ' data address =' + str(byref(data)))
ActionLogger().log('LastError = ' + str(last_error) +
': ' + win32api.FormatMessage(last_error).rstrip())
else:
ActionLogger().log('Error: ERROR_PARTIAL_COPY')
ActionLogger().log('\nRead: WARNING! self.mem_address =' +
hex(self.mem_address) + ' data address =' + str(byref(data)))
ActionLogger().log('lpNumberOfBytesRead =' +
str(lpNumberOfBytesRead) + ' nSize =' + str(nSize))
raise WinError()
else:
ActionLogger().log('Warning! Read OK: 2nd attempt!')
#else:
# print 'Read OK: lpNumberOfBytesRead =', lpNumberOfBytesRead, ' nSize =', nSize
self.CheckGuardSignature()
return data
#----------------------------------------------------------------
def CheckGuardSignature(self):
"""read guard signature at the end of memory block"""
signature = win32structures.LONG(0)
lpNumberOfBytesRead = c_size_t(0)
ret = win32functions.ReadProcessMemory(
c_void_p(self.process),
c_void_p(self.mem_address + self.size),
pointer(signature), # 0x66666666
win32structures.ULONG_PTR(4),
byref(lpNumberOfBytesRead))
if ret == 0:
ActionLogger().log('Error: Failed to read guard signature: address = ' +
hex(self.mem_address) + ', size = ' + str(self.size) +
', lpNumberOfBytesRead = ' + str(lpNumberOfBytesRead))
raise WinError()
else:
if hex(signature.value) != '0x66666666':
raise Exception('---------------------------------------- ' +
'Error: read incorrect guard signature = ' + hex(signature.value))
| bsd-3-clause | cccd8bb69a58d9302e66b216b4fd781a | 39.916107 | 117 | 0.565816 | 4.509246 | false | false | false | false |
pywinauto/pywinauto | pywinauto/unittests/test_xml_helpers.py | 4 | 5040 | # GUI Application automation and testing library
# Copyright (C) 2006-2018 Mark Mc Mahon and Contributors
# https://github.com/pywinauto/pywinauto/graphs/contributors
# http://pywinauto.readthedocs.io/en/latest/credits.html
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of pywinauto nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Module containing tests for xml_helpers Module"""
import os
import sys
import unittest
import six
sys.path.append(".")
from pywinauto.xml_helpers import WriteDialogToFile
from pywinauto.xml_helpers import ReadPropertiesFromFile
from pywinauto.xml_helpers import LOGFONTW
from pywinauto.xml_helpers import RECT
class XMLHelperTestCases(unittest.TestCase):
"""Unit tests for the ListViewWrapper class"""
#def setUp(self):
# """Actually does nothing!"""
# pass
def tearDown(self):
"""delete the file we have created"""
os.unlink("__unittests.xml")
def assertReadWriteSame(self, props):
"""Make sure that roundtripping produces identical file"""
WriteDialogToFile("__unittests.xml", props)
read_props = ReadPropertiesFromFile("__unittests.xml")
self.assertEqual(props, read_props)
def testOneUnicode(self):
"""Test writing/reading a unicode string"""
props = [dict(test = u"hiya")]
self.assertReadWriteSame(props)
def testOneString(self):
"""Test writing/reading a string"""
props = [dict(test = "hiya")]
self.assertReadWriteSame(props)
def testSomeEscapes(self):
"""Test writing/reading a dictionary with some escape sequences"""
test_string = []
for i in range(0, 50000):
test_string.append(six.unichr(i))
test_string = "".join(test_string)
props = [dict(test = test_string)]
self.assertReadWriteSame(props)
def testOneBool(self):
"""Test writing/reading Bool"""
props = [dict(test = True)]
self.assertReadWriteSame(props)
def testOneList(self):
"""Test writing/reading a list"""
props = [dict(test = [1, 2, 3, 4, 5, 6])]
self.assertReadWriteSame(props)
def testOneDict(self):
"""Test writing/reading a dictionary with one element"""
props = [dict(test_value = dict(test = 1))]
self.assertReadWriteSame(props)
def testOneLong(self):
"""Test writing/reading one long is correct"""
props = [dict(test = 1)]
self.assertReadWriteSame(props)
def testLOGFONTW(self):
"""Test writing/reading one LOGFONTW is correct"""
font = LOGFONTW()
font.lfWeight = 23
font.lfFaceName = u"wowow"
props = [dict(test = font)]
self.assertReadWriteSame(props)
def testRECT(self):
"""Test writing/reading one RECT is correct"""
props = [dict(test = RECT(1, 2, 3, 4))]
self.assertReadWriteSame(props)
def testTwoLong(self):
"""Test writing/reading two longs is correct"""
props = [dict(test = 1), dict(test_blah = 2)]
self.assertReadWriteSame(props)
def testEmptyList(self):
"""Test writing/reading empty list"""
props = [dict(test = [])]
self.assertReadWriteSame(props)
def testEmptyDict(self):
"""Test writing/reading empty dict"""
props = [dict(test = {})]
self.assertReadWriteSame(props)
#====================================================================
if __name__ == "__main__":
unittest.main()
| bsd-3-clause | 88ebc078f5db1346bd20f67880df2edf | 34.521739 | 80 | 0.653571 | 4.151565 | false | true | false | false |
pywinauto/pywinauto | examples/notepad_slow.py | 1 | 9097 | # GUI Application automation and testing library
# Copyright (C) 2006-2018 Mark Mc Mahon and Contributors
# https://github.com/pywinauto/pywinauto/graphs/contributors
# http://pywinauto.readthedocs.io/en/latest/credits.html
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of pywinauto nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Run some automations to test things"""
from __future__ import unicode_literals
from __future__ import print_function
import os.path
import sys
import time
try:
from pywinauto import application
except ImportError:
pywinauto_path = os.path.abspath(__file__)
pywinauto_path = os.path.split(os.path.split(pywinauto_path)[0])[0]
sys.path.append(pywinauto_path)
from pywinauto import application
from pywinauto import tests
from pywinauto.findbestmatch import MatchError
from pywinauto.timings import Timings
print("Setting timings to slow settings, may be necessary for")
print("slow applications or slow machines.")
Timings.slow()
#application.set_timing(3, .5, 10, .5, .4, .2, .2, .1, .2, .5)
def run_notepad():
"""Run notepad and do some small stuff with it"""
start = time.time()
app = application.Application()
## for distribution we don't want to connect to anybodies application
## because we may mess up something they are working on!
#try:
# app.connect_(path = r"c:\windows\system32\notepad.exe")
#except application.ProcessNotFoundError:
# app.start_(r"c:\windows\system32\notepad.exe")
app.start(r"notepad.exe")
app.Notepad.menu_select("File->PageSetup")
# ----- Page Setup Dialog ----
# Select the 4th combobox item
app.PageSetupDlg.SizeComboBox.select(4)
# Select the 'Letter' combobox item or the Letter
try:
app.PageSetupDlg.SizeComboBox.select("Letter")
except ValueError:
app.PageSetupDlg.SizeComboBox.select('Letter (8.5" x 11")')
app.PageSetupDlg.SizeComboBox.select(2)
# run some tests on the Dialog. List of available tests:
# "AllControls",
# "AsianHotkey",
# "ComboBoxDroppedHeight",
# "CompareToRefFont",
# "LeadTrailSpaces",
# "MiscValues",
# "Missalignment",
# "MissingExtraString",
# "Overlapping",
# "RepeatedHotkey",
# "Translation",
# "Truncation",
bugs = app.PageSetupDlg.run_tests('RepeatedHotkey Truncation')
# if there are any bugs they will be printed to the console
# and the controls will be highlighted
tests.print_bugs(bugs)
# ----- Next Page Setup Dialog ----
app.PageSetupDlg.Printer.click()
# do some radio button clicks
# Open the Connect to printer dialog so we can
# try out checking/unchecking a checkbox
app.PageSetupDlg.Network.click()
# ----- Connect To Printer Dialog ----
# Select a checkbox
app.ConnectToPrinter.ExpandByDefault.check()
app.ConnectToPrinter.ExpandByDefault.uncheck()
# try doing the same by using click
app.ConnectToPrinter.ExpandByDefault.click()
app.ConnectToPrinter.ExpandByDefault.click()
# close the dialog
app.ConnectToPrinter.Cancel.close_click()
# ----- 2nd Page Setup Dialog again ----
app.PageSetupDlg.Properties.click()
doc_props = app.window(name_re = ".*Properties$")
doc_props.wait('exists', timeout=40)
# ----- Document Properties Dialog ----
# some tab control selections
# Two ways of selecting tabs with indices...
doc_props.TabCtrl.select(0)
doc_props.TabCtrl.select(1)
try:
doc_props.TabCtrl.select(2)
except IndexError:
# not all users have 3 tabs in this dialog
print('Skip 3rd tab selection...')
# or with text...
doc_props.TabCtrl.select("PaperQuality")
try:
doc_props.TabCtrl.select("JobRetention")
except MatchError:
# some people do not have the "Job Retention" tab
print('Skip "Job Retention" tab...')
# doc_props.TabCtrl.select("Layout")
#
# # do some radio button clicks
# doc_props.RotatedLandscape.click()
# doc_props.BackToFront.click()
# doc_props.FlipOnShortEdge.click()
#
# doc_props.Portrait.click()
# doc_props._None.click()
# doc_props.FrontToBack.click()
#
# # open the Advanced options dialog in two steps
# advbutton = doc_props.Advanced
# advbutton.click()
#
# # close the 4 windows
#
# # ----- Advanced Options Dialog ----
# app.window(name_re = ".* Advanced Options").Ok.click()
# ----- Document Properties Dialog again ----
doc_props.Cancel.close_click()
# for some reason my current printer driver
# window does not close cleanly :(
if doc_props.Cancel.exists():
doc_props.OK.close_click()
# ----- 2nd Page Setup Dialog again ----
app.PageSetupDlg.OK.close_click()
# ----- Page Setup Dialog ----
app.PageSetupDlg.Ok.close_click()
# type some text - note that extended characters ARE allowed
app.Notepad.Edit.set_edit_text("I am typing s\xe4me text to Notepad\r\n\r\n"
"And then I am going to quit")
app.Notepad.Edit.right_click()
app.Popup.menu_item("Right To Left Reading Order").click()
#app.PopupMenu.menu_select("Paste", app.Notepad.ctrl_())
#app.Notepad.Edit.right_click()
#app.PopupMenu.menu_select("Right To Left Reading Order", app.Notepad.ctrl_())
#app.PopupMenu.menu_select("Show unicode control characters", app.Notepad.ctrl_())
#time.sleep(1)
#app.Notepad.Edit.right_click()
#app.PopupMenu.menu_select("Right To Left Reading Order", app.Notepad.ctrl_())
#time.sleep(1)
#app.Notepad.Edit.right_click()
#app.PopupMenu.menu_select("Insert Unicode control character -> IAFS", app.Notepad.ctrl_())
#time.sleep(1)
#app.Notepad.Edit.type_keys("{ESC}")
# the following shows that Sendtext does not accept
# accented characters - but does allow 'control' characters
app.Notepad.Edit.type_keys("{END}{ENTER}SendText d\xf6\xe9s "
u"s\xfcpp\xf4rt \xe0cce\xf1ted characters!!!", with_spaces = True)
# Try and save
app.Notepad.menu_select("File->SaveAs")
app.SaveAs.EncodingComboBox.select("UTF-8")
app.SaveAs.FileNameEdit.set_edit_text("Example-utf8.txt")
app.SaveAs.Save.close_click()
# my machine has a weird problem - when connected to the network
# the SaveAs Dialog appears - but doing anything with it can
# cause a LONG delay - the easiest thing is to just wait
# until the dialog is no longer active
# - Dialog might just be gone - because click worked
# - dialog might be waiting to disappear
# so can't wait for next dialog or for it to be disabled
# - dialog might be waiting to display message box so can't wait
# for it to be gone or for the main dialog to be enabled.
# while the dialog exists wait upto 30 seconds (and yes it can
# take that long on my computer sometimes :-( )
app.SaveAsDialog2.Cancel.wait_not('enabled')
# If file exists - it asks you if you want to overwrite
try:
app.SaveAs.Yes.wait('exists').close_click()
except MatchError:
print('Skip overwriting...')
# exit notepad
app.Notepad.menu_select("File->Exit")
#if not run_with_appdata:
# app.WriteAppData(os.path.join(scriptdir, "Notepad_fast.pkl"))
print("That took %.3f to run"% (time.time() - start))
if __name__ == "__main__":
run_notepad()
| bsd-3-clause | 4700fc7cc66efaa16529437bc0c3f262 | 34.388 | 95 | 0.665714 | 3.713061 | false | false | false | false |
mozilla/addons-server | src/olympia/activity/admin.py | 3 | 2240 | from django.contrib import admin
from .models import ActivityLog, ReviewActionReasonLog
from olympia.reviewers.models import ReviewActionReason
class ActivityLogAdmin(admin.ModelAdmin):
list_display = (
'created',
'user',
'__str__',
)
raw_id_fields = ('user',)
readonly_fields = (
'created',
'user',
'__str__',
)
date_hierarchy = 'created'
fields = (
'user',
'created',
'__str__',
)
raw_id_fields = ('user',)
view_on_site = False
def lookup_allowed(self, lookup, value):
if lookup == 'addonlog__addon':
return True
return super().lookup_allowed(lookup, value)
def has_add_permission(self, request):
return False
def has_delete_permission(self, request, obj=None):
return False
class ReviewActionReasonLogAdmin(admin.ModelAdmin):
date_hierarchy = 'created'
fields = (
'created',
'activity_log',
'activity_log__user__email',
'reason',
)
list_display = (
'created',
'activity_log',
'reason',
'activity_log__user__email',
)
list_filter = ('reason',)
list_select_related = ('activity_log__user',)
readonly_fields = (
'created',
'activity_log',
'activity_log__user__email',
)
search_fields = ('activity_log__user__email',)
view_on_site = False
def activity_log__user__email(self, obj):
return obj.activity_log.user.email
def has_add_permission(self, request):
return False
def get_form(self, request, obj=None, **kwargs):
form = super(ReviewActionReasonLogAdmin, self).get_form(request, obj, **kwargs)
form.base_fields['reason'].widget.can_add_related = False
form.base_fields['reason'].widget.can_change_related = False
form.base_fields['reason'].empty_label = None
form.base_fields['reason'].choices = [
(reason.id, reason.labelled_name())
for reason in ReviewActionReason.objects.all()
]
return form
admin.site.register(ActivityLog, ActivityLogAdmin)
admin.site.register(ReviewActionReasonLog, ReviewActionReasonLogAdmin)
| bsd-3-clause | 5942c8487a2880d485e7c2fa154719c0 | 25.987952 | 87 | 0.597321 | 3.916084 | false | false | false | false |
mozilla/addons-server | src/olympia/constants/scanners.py | 3 | 1784 | from django.utils.translation import gettext_lazy as _
CUSTOMS = 1
# We do not use the WAT scanner anymore but we keep this constant for the model
# definition. We shouldn't use this constant, though.
# See: https://github.com/mozilla/addons-server/issues/19152
_WAT = 2
YARA = 3
MAD = 4
SCANNERS = {CUSTOMS: 'customs', _WAT: 'wat', YARA: 'yara', MAD: 'mad'}
# Action IDs are also used for severity (the higher, the more severe).
# The field is a PositiveSmallIntegerField, it should go up to 65535.
NO_ACTION = 1
FLAG_FOR_HUMAN_REVIEW = 20
DELAY_AUTO_APPROVAL = 100
DELAY_AUTO_APPROVAL_INDEFINITELY = 200
DELAY_AUTO_APPROVAL_INDEFINITELY_AND_RESTRICT = 300
DELAY_AUTO_APPROVAL_INDEFINITELY_AND_RESTRICT_FUTURE_APPROVALS = 400
ACTIONS = {
NO_ACTION: _('No action'),
FLAG_FOR_HUMAN_REVIEW: _('Flag for human review'),
DELAY_AUTO_APPROVAL: _('Delay auto-approval'),
DELAY_AUTO_APPROVAL_INDEFINITELY: _('Delay auto-approval indefinitely'),
DELAY_AUTO_APPROVAL_INDEFINITELY_AND_RESTRICT: _(
'Delay auto-approval indefinitely and add restrictions'
),
DELAY_AUTO_APPROVAL_INDEFINITELY_AND_RESTRICT_FUTURE_APPROVALS: _(
'Delay auto-approval indefinitely and add restrictions to future approvals'
),
}
UNKNOWN = None
TRUE_POSITIVE = 1
FALSE_POSITIVE = 2
INCONCLUSIVE = 3
RESULT_STATES = {
UNKNOWN: _('Unknown'),
TRUE_POSITIVE: _('True positive'),
FALSE_POSITIVE: _('False positive'),
INCONCLUSIVE: _('Inconclusive'),
}
NEW = 1
RUNNING = 2
ABORTED = 3
COMPLETED = 4
ABORTING = 5
SCHEDULED = 6
QUERY_RULE_STATES = {
NEW: _('New'),
RUNNING: _('Running'),
ABORTED: _('Aborted'),
ABORTING: _('Aborting'),
COMPLETED: _('Completed'),
SCHEDULED: _('Scheduled'),
}
LABEL_BAD = 'bad'
LABEL_GOOD = 'good'
| bsd-3-clause | b30c0dda7d1ee9d3ed80af3e066a3e46 | 26.446154 | 83 | 0.689462 | 2.939044 | false | false | false | false |
mozilla/addons-server | src/olympia/addons/management/commands/fix_langpacks_with_max_version_star.py | 5 | 2510 | from django.core.management.base import BaseCommand
from olympia import amo
from olympia.core.logger import getLogger
from olympia.versions.compare import version_dict
from olympia.versions.models import AppVersion, Version
log = getLogger('z.fix_langpacks_with_max_version_star')
class Command(BaseCommand):
help = 'Fix language packs that have a max version compatibility set to *'
def find_affected_langpacks(self):
qs = Version.unfiltered.filter(
addon__type=amo.ADDON_LPAPP, apps__max__version='*'
).distinct()
return qs
def fix_max_appversion_for_version(self, version):
for app in (amo.FIREFOX, amo.ANDROID):
if app not in version.compatible_apps:
log.info(
'Version %s for addon %s min version is not compatible '
'with %s, skipping this version for that app.',
version,
version.addon,
app.pretty,
)
continue
if version.compatible_apps[app].max.version != '*':
log.info(
'Version %s for addon %s max version is not "*" for %s '
'app, skipping this version for that app.',
version,
version.addon,
app.pretty,
)
continue
min_appversion_str = version.compatible_apps[app].min.version
max_appversion_str = '%d.*' % version_dict(min_appversion_str)['major']
log.warning(
'Version %s for addon %s min version is %s for %s app, '
'max will be changed to %s instead of *',
version,
version.addon,
min_appversion_str,
app.pretty,
max_appversion_str,
)
max_appversion = AppVersion.objects.get(
application=app.id, version=max_appversion_str
)
version.compatible_apps[app].max = max_appversion
version.compatible_apps[app].save()
def handle(self, *args, **options):
versions = self.find_affected_langpacks()
log.info(
'Found %d langpack versions with an incorrect max version', versions.count()
)
for version in versions:
log.info('Fixing version %s for addon %s', version, version.addon)
self.fix_max_appversion_for_version(version)
| bsd-3-clause | fc371c7722a4c12c4bf9404c0d6f0e21 | 38.21875 | 88 | 0.553386 | 4.342561 | false | false | false | false |
mozilla/addons-server | src/olympia/addons/indexers.py | 3 | 29191 | from django.conf import settings
from olympia.constants.promoted import RECOMMENDED
import olympia.core.logger
from olympia import amo
from olympia.amo.utils import attach_trans_dict
from olympia.amo.celery import create_chunked_tasks_signatures
from olympia.amo.utils import to_language
from olympia.constants.search import SEARCH_LANGUAGE_TO_ANALYZER
from olympia.search.utils import create_index
from olympia.versions.compare import version_int
log = olympia.core.logger.getLogger('z.es')
class AddonIndexer:
"""
Base Indexer class for add-ons.
"""
@classmethod
def attach_translation_mappings(cls, mapping, field_names):
"""
For each field in field_names, attach a dict to the ES mapping
properties making "<field_name>_translations" an object containing
"string" and "lang" as non-indexed strings.
Used to store non-indexed, non-analyzed translations in ES that will be
sent back by the API for each item. It does not take care of the
indexed content for search, it's there only to store and return
raw translations.
"""
for field_name in field_names:
# _translations is the suffix in TranslationSerializer.
mapping['properties'][
'%s_translations' % field_name
] = cls.get_translations_definition()
@classmethod
def get_translations_definition(cls):
"""
Return the mapping to use for raw translations (to be returned directly
by the API, not used for analysis).
See attach_translation_mappings() for more information.
"""
return {
'type': 'object',
'properties': {
'lang': {'type': 'text', 'index': False},
'string': {'type': 'text', 'index': False},
},
}
@classmethod
def get_raw_field_definition(cls):
"""
Return the mapping to use for the "raw" version of a field. Meant to be
used as part of a 'fields': {'raw': ... } definition in the mapping of
an existing field.
Used for exact matches and sorting
"""
# It needs to be a keyword to turnoff all analysis ; that means we
# don't get the lowercase filter applied by the standard &
# language-specific analyzers, so we need to do that ourselves through
# a custom normalizer for exact matches to work in a case-insensitive
# way.
return {
'type': 'keyword',
'normalizer': 'lowercase_keyword_normalizer',
}
@classmethod
def attach_language_specific_analyzers(cls, mapping, field_names):
"""
For each field in field_names, attach language-specific mappings that
will use specific analyzers for these fields in every language that we
support.
These mappings are used by the search filtering code if they exist.
"""
for lang, analyzer in SEARCH_LANGUAGE_TO_ANALYZER.items():
for field in field_names:
property_name = '%s_l10n_%s' % (field, lang)
mapping['properties'][property_name] = {
'type': 'text',
'analyzer': analyzer,
}
@classmethod
def attach_language_specific_analyzers_with_raw_variant(cls, mapping, field_names):
"""
Like attach_language_specific_analyzers() but with an extra field to
storethe "raw" variant of the value, for exact matches.
"""
for lang, analyzer in SEARCH_LANGUAGE_TO_ANALYZER.items():
for field in field_names:
property_name = '%s_l10n_%s' % (field, lang)
mapping['properties'][property_name] = {
'type': 'text',
'analyzer': analyzer,
'fields': {
'raw': cls.get_raw_field_definition(),
},
}
@classmethod
def extract_field_api_translations(cls, obj, field, db_field=None):
"""
Returns a dict containing translations that we need to store for
the API. Empty translations are skipped entirely.
"""
if db_field is None:
db_field = '%s_id' % field
extend_with_me = {
'%s_translations'
% field: [
{'lang': to_language(lang), 'string': str(string)}
for lang, string in obj.translations[getattr(obj, db_field)]
if string
]
}
return extend_with_me
@classmethod
def extract_field_search_translation(cls, obj, field, default_locale):
"""
Returns the translation for this field in the object's default locale,
in the form a dict with one entry (the field being the key and the
translation being the value, or an empty string if none was found).
That field will be analyzed and indexed by ES *without*
language-specific analyzers.
"""
translations = dict(obj.translations[getattr(obj, '%s_id' % field)])
default_locale = default_locale.lower() if default_locale else None
value = translations.get(default_locale, getattr(obj, field))
return {field: str(value) if value else ''}
@classmethod
def extract_field_analyzed_translations(cls, obj, field, db_field=None):
"""
Returns a dict containing translations for each language that we have
an analyzer for, for the given field.
When no translation exist for a given language+field combo, the value
returned is an empty string, to avoid storing the word "None" as the
field does not understand null values.
"""
if db_field is None:
db_field = '%s_id' % field
translations = dict(obj.translations[getattr(obj, db_field)])
return {
'%s_l10n_%s' % (field, lang): translations.get(lang) or ''
for lang in SEARCH_LANGUAGE_TO_ANALYZER
}
# Fields we don't need to expose in the results, only used for filtering
# or sorting.
hidden_fields = (
'*.raw',
'boost',
'colors',
'hotness',
# Translated content that is used for filtering purposes is stored
# under 3 different fields:
# - One field with all translations (e.g., "name").
# - One field for each language, using corresponding analyzer
# (e.g., "name_l10n_en-us", "name_l10n_fr", etc.)
# - One field with all translations in separate objects for the API
# (e.g. "name_translations")
# Only that last one with all translations needs to be returned.
'name',
'description',
'name_l10n_*',
'description_l10n_*',
'summary',
'summary_l10n_*',
)
index_settings = {
'analysis': {
'analyzer': {
'standard_with_word_split': {
# This analyzer tries to split the text into words by using
# various methods. It also lowercases them and make sure
# each token is only returned once.
# Only use for short things with extremely meaningful
# content like add-on name - it makes too many
# modifications to be useful for things like descriptions,
# for instance.
'tokenizer': 'standard',
'filter': [
'custom_word_delimiter',
'lowercase',
'stop',
'custom_dictionary_decompounder',
'unique',
],
},
'trigram': {
# Analyzer that splits the text into trigrams.
'tokenizer': 'ngram_tokenizer',
'filter': [
'lowercase',
],
},
},
'tokenizer': {
'ngram_tokenizer': {
'type': 'ngram',
'min_gram': 3,
'max_gram': 3,
'token_chars': ['letter', 'digit'],
}
},
'normalizer': {
'lowercase_keyword_normalizer': {
# By default keywords are indexed 'as-is', but for exact
# name matches we need to lowercase them before indexing,
# so this normalizer does that for us.
'type': 'custom',
'filter': ['lowercase'],
},
},
'filter': {
'custom_word_delimiter': {
# This filter is useful for add-on names that have multiple
# words sticked together in a way that is easy to
# recognize, like FooBar, which should be indexed as FooBar
# and Foo Bar. (preserve_original: True makes us index both
# the original and the split version.)
'type': 'word_delimiter',
'preserve_original': True,
},
'custom_dictionary_decompounder': {
# This filter is also useful for add-on names that have
# multiple words sticked together, but without a pattern
# that we can automatically recognize. To deal with those,
# we use a small dictionary of common words. It allows us
# to index 'awesometabpassword' as 'awesome tab password',
# helping users looking for 'tab password' find that addon.
'type': 'dictionary_decompounder',
'word_list': [
'all',
'auto',
'ball',
'bar',
'block',
'blog',
'bookmark',
'browser',
'bug',
'button',
'cat',
'chat',
'click',
'clip',
'close',
'color',
'context',
'cookie',
'cool',
'css',
'delete',
'dictionary',
'down',
'download',
'easy',
'edit',
'fill',
'fire',
'firefox',
'fix',
'flag',
'flash',
'fly',
'forecast',
'fox',
'foxy',
'google',
'grab',
'grease',
'html',
'http',
'image',
'input',
'inspect',
'inspector',
'iris',
'js',
'key',
'keys',
'lang',
'link',
'mail',
'manager',
'map',
'mega',
'menu',
'menus',
'monkey',
'name',
'net',
'new',
'open',
'password',
'persona',
'privacy',
'query',
'screen',
'scroll',
'search',
'secure',
'select',
'smart',
'spring',
'status',
'style',
'super',
'sync',
'tab',
'text',
'think',
'this',
'time',
'title',
'translate',
'tree',
'undo',
'upload',
'url',
'user',
'video',
'window',
'with',
'word',
'zilla',
],
},
},
}
}
@classmethod
def get_model(cls):
from olympia.addons.models import Addon
return Addon
@classmethod
def get_index_alias(cls):
"""Return the index alias name."""
return settings.ES_INDEXES.get('default')
@classmethod
def get_mapping(cls):
appver_mapping = {
'properties': {
'max': {'type': 'long'},
'min': {'type': 'long'},
'max_human': {'type': 'keyword', 'index': False},
'min_human': {'type': 'keyword', 'index': False},
}
}
version_mapping = {
'type': 'object',
'properties': {
'compatible_apps': {
'properties': {app.id: appver_mapping for app in amo.APP_USAGE}
},
# Keep '<version>.id' indexed to be able to run exists queries
# on it.
'id': {'type': 'long'},
'reviewed': {'type': 'date', 'index': False},
'files': {
'type': 'object',
'properties': {
'id': {'type': 'long', 'index': False},
'created': {'type': 'date', 'index': False},
'hash': {'type': 'keyword', 'index': False},
'filename': {'type': 'keyword', 'index': False},
'is_mozilla_signed_extension': {'type': 'boolean'},
'size': {'type': 'long', 'index': False},
'strict_compatibility': {'type': 'boolean', 'index': False},
'status': {'type': 'byte'},
'permissions': {'type': 'keyword', 'index': False},
'optional_permissions': {'type': 'keyword', 'index': False},
},
},
'license': {
'type': 'object',
'properties': {
'id': {'type': 'long', 'index': False},
'builtin': {'type': 'short', 'index': False},
'name_translations': cls.get_translations_definition(),
'url': {'type': 'text', 'index': False},
},
},
'release_notes_translations': cls.get_translations_definition(),
'version': {'type': 'keyword', 'index': False},
},
}
mapping = {
'properties': {
'id': {'type': 'long'},
'app': {'type': 'byte'},
'average_daily_users': {'type': 'long'},
'bayesian_rating': {'type': 'double'},
'boost': {'type': 'float', 'null_value': 1.0},
'category': {'type': 'integer'},
'colors': {
'type': 'nested',
'properties': {
'h': {'type': 'integer'},
's': {'type': 'integer'},
'l': {'type': 'integer'},
'ratio': {'type': 'double'},
},
},
'contributions': {'type': 'text'},
'created': {'type': 'date'},
'current_version': version_mapping,
'default_locale': {'type': 'keyword', 'index': False},
'description': {'type': 'text', 'analyzer': 'snowball'},
'guid': {'type': 'keyword'},
'has_eula': {'type': 'boolean', 'index': False},
'has_privacy_policy': {'type': 'boolean', 'index': False},
'hotness': {'type': 'double'},
'icon_hash': {'type': 'keyword', 'index': False},
'icon_type': {'type': 'keyword', 'index': False},
'is_disabled': {'type': 'boolean'},
'is_experimental': {'type': 'boolean'},
'is_recommended': {'type': 'boolean'},
'last_updated': {'type': 'date'},
'listed_authors': {
'type': 'object',
'properties': {
'id': {'type': 'long'},
'name': {'type': 'text'},
'username': {'type': 'keyword'},
'is_public': {'type': 'boolean', 'index': False},
},
},
'modified': {'type': 'date', 'index': False},
'name': {
'type': 'text',
# Adding word-delimiter to split on camelcase, known
# words like 'tab', and punctuation, and eliminate
# duplicates.
'analyzer': 'standard_with_word_split',
'fields': {
# Raw field for exact matches and sorting.
'raw': cls.get_raw_field_definition(),
# Trigrams for partial matches.
'trigrams': {
'type': 'text',
'analyzer': 'trigram',
},
},
},
'previews': {
'type': 'object',
'properties': {
'id': {'type': 'long', 'index': False},
'caption_translations': cls.get_translations_definition(),
'modified': {'type': 'date', 'index': False},
'position': {'type': 'long', 'index': False},
'sizes': {
'type': 'object',
'properties': {
'thumbnail': {'type': 'short', 'index': False},
'image': {'type': 'short', 'index': False},
},
},
},
},
'promoted': {
'type': 'object',
'properties': {
'group_id': {'type': 'byte'},
'approved_for_apps': {'type': 'byte'},
},
},
'ratings': {
'type': 'object',
'properties': {
'count': {'type': 'short', 'index': False},
'average': {'type': 'float'},
},
},
'slug': {'type': 'keyword'},
'requires_payment': {'type': 'boolean', 'index': False},
'status': {'type': 'byte'},
'summary': {'type': 'text', 'analyzer': 'snowball'},
'tags': {'type': 'keyword'},
'type': {'type': 'byte'},
'weekly_downloads': {'type': 'long'},
},
}
# Add fields that we expect to return all translations without being
# analyzed/indexed.
cls.attach_translation_mappings(
mapping,
(
'description',
'developer_comments',
'homepage',
'name',
'summary',
'support_email',
'support_url',
),
)
# Add language-specific analyzers for localized fields that are
# analyzed/indexed.
cls.attach_language_specific_analyzers(mapping, ('description', 'summary'))
cls.attach_language_specific_analyzers_with_raw_variant(mapping, ('name',))
return mapping
@classmethod
def extract_version(cls, obj, version_obj):
from olympia.versions.models import License, Version
data = (
{
'id': version_obj.pk,
'compatible_apps': cls.extract_compatibility_info(obj, version_obj),
'files': [
{
'id': version_obj.file.id,
'created': version_obj.file.created,
'filename': version_obj.file.file.name,
'hash': version_obj.file.hash,
'is_mozilla_signed_extension': (
version_obj.file.is_mozilla_signed_extension
),
'size': version_obj.file.size,
'status': version_obj.file.status,
'strict_compatibility': version_obj.file.strict_compatibility,
'permissions': version_obj.file.permissions,
'optional_permissions': version_obj.file.optional_permissions,
}
],
'reviewed': version_obj.reviewed,
'version': version_obj.version,
}
if version_obj
else None
)
if data and version_obj:
attach_trans_dict(Version, [version_obj])
data.update(
cls.extract_field_api_translations(
version_obj, 'release_notes', db_field='release_notes_id'
)
)
if version_obj.license:
data['license'] = {
'id': version_obj.license.id,
'builtin': version_obj.license.builtin,
'url': version_obj.license.url,
}
attach_trans_dict(License, [version_obj.license])
data['license'].update(
cls.extract_field_api_translations(version_obj.license, 'name')
)
return data
@classmethod
def extract_compatibility_info(cls, obj, version_obj):
"""Return compatibility info for the specified version_obj, as will be
indexed in ES."""
compatible_apps = {}
for app, appver in version_obj.compatible_apps.items():
min_, max_ = appver.min.version_int, appver.max.version_int
min_human, max_human = appver.min.version, appver.max.version
if not version_obj.file.strict_compatibility:
# The files attached to this version are not using strict
# compatibility, so the max version essentially needs to be
# ignored - let's fake a super high one. We leave max_human
# alone to leave the API representation intact.
max_ = version_int('*')
compatible_apps[app.id] = {
'min': min_,
'min_human': min_human,
'max': max_,
'max_human': max_human,
}
return compatible_apps
@classmethod
def extract_document(cls, obj):
"""Extract indexable attributes from an add-on."""
from olympia.addons.models import Preview
attrs = (
'id',
'average_daily_users',
'bayesian_rating',
'contributions',
'created',
'default_locale',
'guid',
'hotness',
'icon_hash',
'icon_type',
'is_disabled',
'is_experimental',
'last_updated',
'modified',
'requires_payment',
'slug',
'status',
'type',
'weekly_downloads',
)
data = {attr: getattr(obj, attr) for attr in attrs}
data['colors'] = None
# Extract dominant colors from static themes.
if obj.type == amo.ADDON_STATICTHEME:
if obj.current_previews:
data['colors'] = obj.current_previews[0].colors
data['app'] = [app.id for app in obj.compatible_apps.keys()]
# Boost by the number of users on a logarithmic scale.
data['boost'] = float(data['average_daily_users'] ** 0.2)
# Quadruple the boost if the add-on is public.
if (
obj.status == amo.STATUS_APPROVED
and not obj.is_experimental
and 'boost' in data
):
data['boost'] = float(max(data['boost'], 1) * 4)
# We can use all_categories because the indexing code goes through the
# transformer that sets it.
data['category'] = [cat.id for cat in obj.all_categories]
data['current_version'] = cls.extract_version(obj, obj.current_version)
data['listed_authors'] = [
{
'name': a.name,
'id': a.id,
'username': a.username,
'is_public': a.is_public,
}
for a in obj.listed_authors
]
data['has_eula'] = bool(obj.eula)
data['has_privacy_policy'] = bool(obj.privacy_policy)
data['is_recommended'] = bool(
obj.promoted and obj.promoted.group == RECOMMENDED
)
data['previews'] = [
{
'id': preview.id,
'modified': preview.modified,
'sizes': preview.sizes,
'position': preview.position,
}
for preview in obj.current_previews
]
data['promoted'] = (
{
'group_id': obj.promoted.group_id,
# store the app approvals because .approved_applications needs it.
'approved_for_apps': [
app.id for app in obj.promoted.approved_applications
],
}
if obj.promoted
else None
)
data['ratings'] = {
'average': obj.average_rating,
'count': obj.total_ratings,
'text_count': obj.text_ratings_count,
}
# We can use tag_list because the indexing code goes through the
# transformer that sets it (attach_tags).
data['tags'] = getattr(obj, 'tag_list', [])
# Handle localized fields.
# First, deal with the 3 fields that need everything:
for field in ('description', 'name', 'summary'):
data.update(cls.extract_field_api_translations(obj, field))
data.update(
cls.extract_field_search_translation(obj, field, obj.default_locale)
)
data.update(cls.extract_field_analyzed_translations(obj, field))
# Then add fields that only need to be returned to the API without
# contributing to search relevancy.
for field in ('developer_comments', 'homepage', 'support_email', 'support_url'):
data.update(cls.extract_field_api_translations(obj, field))
if obj.type != amo.ADDON_STATICTHEME:
# Also do that for preview captions, which are set on each preview
# object.
attach_trans_dict(Preview, obj.current_previews)
for i, preview in enumerate(obj.current_previews):
data['previews'][i].update(
cls.extract_field_api_translations(preview, 'caption')
)
return data
@classmethod
def create_new_index(cls, index_name):
"""
Create a new index for addons in ES.
Intended to be used by reindexation (and tests), generally a bad idea
to call manually.
"""
create_index(
index=index_name,
mappings=cls.get_mapping(),
index_settings={
# create_index() will add its own index settings like number of
# shards and replicas.
'index': cls.index_settings
},
)
@classmethod
def reindex_tasks_group(cls, index_name):
"""
Return the group of tasks to execute for a full reindex of addons on
the index called `index_name` (which is not an alias but the real
index name).
"""
from olympia.addons.tasks import index_addons
ids = cls.get_model().unfiltered.values_list('id', flat=True).order_by('id')
chunk_size = 150
return create_chunked_tasks_signatures(
index_addons, list(ids), chunk_size, task_kwargs={'index': index_name}
)
| bsd-3-clause | ca02831da9e668106e3512a374a35343 | 37.817819 | 88 | 0.450104 | 4.883071 | false | false | false | false |
mozilla/addons-server | src/olympia/versions/migrations/0025_auto_20220614_1653.py | 3 | 2146 | # Generated by Django 3.2.13 on 2022-06-14 16:53
from django.db import migrations
from olympia import amo
from olympia.addons.tasks import index_addons
from olympia.versions import compare
def update_dictionary_compat(apps, schema_editor):
Version = apps.get_model('versions', 'Version')
ApplicationsVersions = apps.get_model('versions', 'ApplicationsVersions')
AppVersion = apps.get_model('applications', 'AppVersion')
Addon = apps.get_model('addons', 'Addon')
# Find the versions without correct compatibility
version_ids = list(
Version.unfiltered.filter(addon__type=amo.ADDON_DICT, deleted=False)
.exclude(addon__status=amo.STATUS_DELETED)
.exclude(
apps__min__version=amo.DEFAULT_WEBEXT_DICT_MIN_VERSION_FIREFOX,
apps__max__version=amo.DEFAULT_WEBEXT_MAX_VERSION,
)
.values_list('id', flat=True)
)
if version_ids:
webext_dict_min, _ = AppVersion.objects.get_or_create(
application=amo.FIREFOX.id,
version=amo.DEFAULT_WEBEXT_DICT_MIN_VERSION_FIREFOX,
version_int=compare.version_int(amo.DEFAULT_WEBEXT_DICT_MIN_VERSION_FIREFOX),
)
webext_max, _ = AppVersion.objects.get_or_create(
application=amo.FIREFOX.id,
version=amo.DEFAULT_WEBEXT_MAX_VERSION,
version_int=compare.version_int(amo.DEFAULT_WEBEXT_MAX_VERSION),
)
for version_id in version_ids:
ApplicationsVersions.objects.update_or_create(
version_id=version_id,
application=amo.FIREFOX.id,
defaults={'min': webext_dict_min, 'max': webext_max},
)
# reindex all public dictionaries, as they'll all have fake compat data in the index
addons_ids = list(Addon.unfiltered.filter(
type=amo.ADDON_DICT, status=amo.STATUS_APPROVED).values_list('id', flat=True))
if addons_ids:
index_addons.delay(addons_ids)
class Migration(migrations.Migration):
dependencies = [
('versions', '0024_auto_20220530_1639'),
]
operations = [
migrations.RunPython(update_dictionary_compat)
]
| bsd-3-clause | 3602104fddae3e0fdc3876e6fd619ceb | 36 | 89 | 0.660298 | 3.547107 | false | false | false | false |
mozilla/addons-server | src/olympia/bandwagon/serializers.py | 3 | 6264 | from django.utils.translation import gettext, gettext_lazy as _
from rest_framework import serializers
from rest_framework.validators import UniqueTogetherValidator
from olympia.accounts.serializers import BaseUserSerializer
from olympia.addons.models import Addon
from olympia.addons.serializers import AddonSerializer
from olympia.amo.templatetags.jinja_helpers import absolutify
from olympia.amo.utils import clean_nl, has_links, slug_validator
from olympia.api.fields import (
SlugOrPrimaryKeyRelatedField,
SplitField,
TranslationSerializerField,
)
from olympia.api.utils import is_gate_active
from olympia.bandwagon.models import Collection, CollectionAddon
from olympia.users.models import DeniedName
class CollectionSerializer(serializers.ModelSerializer):
name = TranslationSerializerField()
description = TranslationSerializerField(allow_blank=True, required=False)
url = serializers.SerializerMethodField()
# DRF's default=serializers.CurrentUserDefault() is necessary to pass
# validation but we also need the custom create() below for the author to
# be added to the created instance.
author = BaseUserSerializer(
read_only=True, default=serializers.CurrentUserDefault()
)
public = serializers.BooleanField(source='listed', default=True)
uuid = serializers.UUIDField(format='hex', required=False, read_only=True)
class Meta:
model = Collection
fields = (
'id',
'uuid',
'url',
'addon_count',
'author',
'description',
'modified',
'name',
'slug',
'public',
'default_locale',
)
writeable_fields = (
'description',
'name',
'slug',
'public',
'default_locale',
)
read_only_fields = tuple(set(fields) - set(writeable_fields))
validators = [
UniqueTogetherValidator(
queryset=Collection.objects.all(),
message=_(
'This custom URL is already in use by another one '
'of your collections.'
),
fields=('slug', 'author'),
),
]
def get_url(self, obj):
return absolutify(obj.get_url_path())
def validate_name(self, value):
# if we have a localised dict of values validate them all.
if isinstance(value, dict):
return {
locale: self.validate_name(sub_value)
for locale, sub_value in value.items()
}
if DeniedName.blocked(value):
raise serializers.ValidationError(gettext('This name cannot be used.'))
return value
def validate_description(self, value):
if has_links(clean_nl(str(value))):
# There's some links, we don't want them.
raise serializers.ValidationError(gettext('No links are allowed.'))
return value
def validate_slug(self, value):
slug_validator(
value,
message=gettext(
'The custom URL must consist of letters, '
'numbers, underscores or hyphens.'
),
)
if DeniedName.blocked(value):
raise serializers.ValidationError(
gettext('This custom URL cannot be used.')
)
return value
def create(self, validated_data):
validated_data['author'] = self.context['request'].user
return super().create(validated_data)
class ThisCollectionDefault:
requires_context = True
def __call__(self, serializer_field):
viewset = serializer_field.context['view']
return viewset.get_collection()
class CollectionAddonSerializer(serializers.ModelSerializer):
addon = SplitField(
# Only used for writes (this is input field), so there are no perf
# concerns and we don't use any special caching.
SlugOrPrimaryKeyRelatedField(queryset=Addon.objects.public()),
AddonSerializer(),
)
notes = TranslationSerializerField(
source='comments', required=False, allow_blank=True
)
collection = serializers.HiddenField(default=ThisCollectionDefault())
class Meta:
model = CollectionAddon
fields = ('addon', 'notes', 'collection')
validators = [
UniqueTogetherValidator(
queryset=CollectionAddon.objects.all(),
message=_('This add-on already belongs to the collection'),
fields=('addon', 'collection'),
),
]
writeable_fields = (
# addon is technically writeable but we ignore updates in
# validate() below.
'addon',
# collection is technically writeable but we should be ignoring any
# incoming data to always use the collection from the viewset,
# through HiddenField(default=ThisCollectionDefault()).
'collection',
'notes',
)
read_only_fields = tuple(set(fields) - set(writeable_fields))
def validate(self, data):
if self.partial:
# addon is read_only but SplitField messes with the initialization.
# DRF normally ignores updates to read_only fields, so do the same.
data.pop('addon', None)
return super().validate(data)
def to_representation(self, instance):
request = self.context.get('request')
out = super().to_representation(instance)
if request and is_gate_active(request, 'collections-downloads-shim'):
out['downloads'] = 0
return out
class CollectionWithAddonsSerializer(CollectionSerializer):
addons = serializers.SerializerMethodField()
class Meta(CollectionSerializer.Meta):
fields = CollectionSerializer.Meta.fields + ('addons',)
read_only_fields = tuple(
set(fields) - set(CollectionSerializer.Meta.writeable_fields)
)
def get_addons(self, obj):
addons_qs = self.context['view'].get_addons_queryset()
return CollectionAddonSerializer(
addons_qs, context=self.context, many=True
).data
| bsd-3-clause | 22e6058d464df49103001b6919356c92 | 34.191011 | 83 | 0.620211 | 4.605882 | false | false | false | false |
mozilla/addons-server | src/olympia/lib/akismet/migrations/0001_initial.py | 3 | 2810 | # Generated by Django 2.2.5 on 2019-09-12 15:18
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
import olympia.amo.models
class Migration(migrations.Migration):
initial = True
dependencies = [
('bandwagon', '0001_initial'),
('addons', '0002_addon_fk'),
('ratings', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('files', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='AkismetReport',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(blank=True, default=django.utils.timezone.now, editable=False)),
('modified', models.DateTimeField(auto_now=True)),
('comment_type', models.CharField(max_length=255)),
('user_ip', models.CharField(max_length=255)),
('user_agent', models.CharField(max_length=255)),
('referrer', models.CharField(max_length=255)),
('user_name', models.CharField(max_length=255)),
('user_email', models.CharField(max_length=255)),
('user_homepage', models.CharField(max_length=255)),
('comment', models.TextField()),
('comment_modified', models.DateTimeField()),
('content_link', models.CharField(max_length=255, null=True)),
('content_modified', models.DateTimeField(null=True)),
('result', models.PositiveSmallIntegerField(choices=[(3, 'Unknown'), (0, 'Ham'), (1, 'Definite Spam'), (2, 'Maybe Spam')], null=True)),
('reported', models.BooleanField(default=False)),
('addon_instance', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='addons.Addon')),
('collection_instance', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='bandwagon.Collection')),
('rating_instance', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='ratings.Rating')),
('upload_instance', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='files.FileUpload')),
('user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
],
options={
'db_table': 'akismet_reports',
},
bases=(olympia.amo.models.SaveUpdateMixin, models.Model),
),
]
| bsd-3-clause | 32de5fdbf7652b774f7d9e0e381da4e6 | 52.018868 | 161 | 0.60427 | 4.002849 | false | false | false | false |
mozilla/addons-server | src/olympia/access/acl.py | 3 | 6844 | from olympia import amo
def match_rules(rules, app, action):
"""
This will match rules found in Group.
"""
for rule in rules.split(','):
rule_app, rule_action = rule.split(':')
if rule_app == '*' or rule_app == app:
if rule_action == '*' or rule_action == action or action == '%':
return True
return False
def action_allowed_for(user, permission):
"""
Determines if the user has permission to do a certain action.
`permission` is a tuple constant in constants.permissions.
Note: relies in user.groups_list, which is cached on the user instance the
first time it's accessed.
"""
if user is None or not user.is_authenticated:
return False
assert permission in amo.permissions.PERMISSIONS_LIST # constants only.
return any(
match_rules(group.rules, permission.app, permission.action)
for group in user.groups_list
)
def experiments_submission_allowed(user, parsed_addon_data):
"""Experiments can only be submitted by the people with the right
permission.
See bug 1220097.
"""
return not parsed_addon_data.get('is_experiment', False) or action_allowed_for(
user, amo.permissions.EXPERIMENTS_SUBMIT
)
def langpack_submission_allowed(user, parsed_addon_data):
"""Language packs can only be submitted by people with the right
permission.
See https://github.com/mozilla/addons-server/issues/11788 and
https://github.com/mozilla/addons-server/issues/11793
"""
return not parsed_addon_data.get('type') == amo.ADDON_LPAPP or action_allowed_for(
user, amo.permissions.LANGPACK_SUBMIT
)
def reserved_guid_addon_submission_allowed(user, parsed_addon_data):
"""Add-ons with a guid ending with reserved suffixes can only be submitted
by people with the right permission.
"""
guid = parsed_addon_data.get('guid') or ''
return not guid.lower().endswith(amo.RESERVED_ADDON_GUIDS) or action_allowed_for(
user, amo.permissions.SYSTEM_ADDON_SUBMIT
)
def mozilla_signed_extension_submission_allowed(user, parsed_addon_data):
"""Add-ons already signed with mozilla internal certificate can only be
submitted by people with the right permission.
"""
return not parsed_addon_data.get(
'is_mozilla_signed_extension'
) or action_allowed_for(user, amo.permissions.SYSTEM_ADDON_SUBMIT)
def check_addon_ownership(
user,
addon,
allow_developer=False,
allow_addons_edit_permission=True,
allow_mozilla_disabled_addon=False,
):
"""
Check that user is the owner of the add-on.
Will always return False for deleted add-ons.
By default, this function will:
- return False for mozilla disabled add-ons. Can be bypassed with
allow_mozilla_disabled_addon=True.
- return False if the author is just a developer and not an owner. Can be
bypassed with allow_developer=True.
- return False for non authors. Can be bypassed with
allow_addons_edit_permission=True and the user has the Addons:Edit
permission. This has precedence over all other checks.
"""
if not user.is_authenticated:
return False
# Deleted addons can't be edited at all.
if addon.is_deleted:
return False
# Users with 'Addons:Edit' can do anything.
if allow_addons_edit_permission and action_allowed_for(
user, amo.permissions.ADDONS_EDIT
):
return True
# Only admins can edit admin-disabled addons.
if addon.status == amo.STATUS_DISABLED and not allow_mozilla_disabled_addon:
return False
# Addon owners can do everything else.
roles = (amo.AUTHOR_ROLE_OWNER,)
if allow_developer:
roles += (amo.AUTHOR_ROLE_DEV,)
return addon.addonuser_set.filter(user=user, role__in=roles).exists()
def is_listed_addons_reviewer(user, allow_content_reviewers=True):
permissions = [
amo.permissions.ADDONS_REVIEW,
amo.permissions.ADDONS_RECOMMENDED_REVIEW,
]
if allow_content_reviewers:
permissions.append(amo.permissions.ADDONS_CONTENT_REVIEW)
allow_access = any(action_allowed_for(user, perm) for perm in permissions)
return allow_access
def is_listed_addons_viewer_or_reviewer(user, allow_content_reviewers=True):
return action_allowed_for(
user, amo.permissions.REVIEWER_TOOLS_VIEW
) or is_listed_addons_reviewer(user, allow_content_reviewers)
def is_unlisted_addons_reviewer(user):
return action_allowed_for(user, amo.permissions.ADDONS_REVIEW_UNLISTED)
def is_unlisted_addons_viewer_or_reviewer(user):
return action_allowed_for(
user, amo.permissions.REVIEWER_TOOLS_UNLISTED_VIEW
) or is_unlisted_addons_reviewer(user)
def is_static_theme_reviewer(user):
return action_allowed_for(user, amo.permissions.STATIC_THEMES_REVIEW)
def is_reviewer(user, addon, allow_content_reviewers=True):
"""Return True if the user is an addons reviewer, or a theme reviewer
and the addon is a theme.
If allow_content_reviewers is passed and False (defaults to True), then
having content review permission is not enough to be considered an addons
reviewer.
"""
if addon.type == amo.ADDON_STATICTHEME:
return is_static_theme_reviewer(user)
return is_listed_addons_reviewer(
user, allow_content_reviewers=allow_content_reviewers
)
def is_user_any_kind_of_reviewer(user, allow_viewers=False):
"""More lax version of is_reviewer: does not check what kind of reviewer
the user is, and accepts unlisted reviewers, post reviewers, content
reviewers. If allow_viewers is passed and truthy, also allows users with
just reviewer tools view access.
Don't use on anything that would alter add-on data.
any_reviewer_required() decorator and AllowAnyKindOfReviewer DRF permission
use this function behind the scenes to guard views that don't change the
add-on but still need to be restricted to reviewers only.
"""
permissions = [
amo.permissions.ADDONS_REVIEW,
amo.permissions.ADDONS_REVIEW_UNLISTED,
amo.permissions.ADDONS_CONTENT_REVIEW,
amo.permissions.ADDONS_RECOMMENDED_REVIEW,
amo.permissions.STATIC_THEMES_REVIEW,
]
if allow_viewers:
permissions.extend(
[
amo.permissions.REVIEWER_TOOLS_VIEW,
amo.permissions.REVIEWER_TOOLS_UNLISTED_VIEW,
]
)
allow_access = any(action_allowed_for(user, perm) for perm in permissions)
return allow_access
def author_or_unlisted_viewer_or_reviewer(user, addon):
return is_unlisted_addons_viewer_or_reviewer(user) or check_addon_ownership(
user,
addon,
allow_addons_edit_permission=False,
allow_developer=True,
)
| bsd-3-clause | 4e212b0f930b1150148e239eba372af8 | 33.049751 | 86 | 0.691262 | 3.81069 | false | false | false | false |
mozilla/addons-server | src/olympia/devhub/forms.py | 2 | 52093 | import os
import tarfile
import zipfile
from urllib.parse import urlsplit
from django import forms
from django.conf import settings
from django.core.validators import MinLengthValidator
from django.db.models import Q
from django.forms.formsets import BaseFormSet, formset_factory
from django.forms.models import BaseModelFormSet, modelformset_factory
from django.forms.widgets import RadioSelect
from django.urls import reverse
from django.utils.functional import keep_lazy_text
from django.utils.html import escape, format_html
from django.utils.safestring import mark_safe
from django.utils.translation import gettext, gettext_lazy as _, ngettext
import waffle
from django_statsd.clients import statsd
from rest_framework.exceptions import Throttled
from olympia import amo
from olympia.access import acl
from olympia.activity.models import ActivityLog
from olympia.addons import tasks as addons_tasks
from olympia.addons.models import (
Addon,
AddonApprovalsCounter,
AddonCategory,
AddonUser,
AddonUserPendingConfirmation,
DeniedSlug,
Preview,
)
from olympia.addons.utils import (
fetch_translations_from_addon,
RestrictionChecker,
validate_version_number_is_gt_latest_signed_listed_version,
verify_mozilla_trademark,
)
from olympia.amo.fields import HttpHttpsOnlyURLField, ReCaptchaField
from olympia.amo.forms import AMOModelForm
from olympia.amo.messages import DoubleSafe
from olympia.amo.utils import remove_icons, slug_validator
from olympia.amo.validators import OneOrMoreLetterOrNumberCharacterValidator
from olympia.applications.models import AppVersion
from olympia.blocklist.models import Block
from olympia.constants.categories import CATEGORIES, CATEGORIES_BY_ID, CATEGORIES_NO_APP
from olympia.devhub.widgets import CategoriesSelectMultiple, IconTypeSelect
from olympia.files.models import FileUpload
from olympia.files.utils import SafeTar, SafeZip, parse_addon
from olympia.tags.models import Tag
from olympia.translations import LOCALES
from olympia.translations.fields import LocaleErrorMessage, TransField, TransTextarea
from olympia.translations.forms import TranslationFormMixin
from olympia.translations.models import Translation, delete_translation
from olympia.translations.widgets import TranslationTextarea, TranslationTextInput
from olympia.users.models import (
EmailUserRestriction,
RESTRICTION_TYPES,
UserEmailField,
UserProfile,
)
from olympia.versions.models import (
VALID_SOURCE_EXTENSIONS,
ApplicationsVersions,
License,
Version,
)
format_html_lazy = keep_lazy_text(format_html)
def clean_addon_slug(slug, instance):
slug_validator(slug)
if slug != instance.slug:
if Addon.objects.filter(slug=slug).exists():
raise forms.ValidationError(
gettext('This slug is already in use. Please choose another.')
)
if DeniedSlug.blocked(slug):
msg = gettext('The slug cannot be "%(slug)s". Please choose another.')
raise forms.ValidationError(msg % {'slug': slug})
return slug
class AddonFormBase(TranslationFormMixin, forms.ModelForm):
fields_to_trigger_content_review = ('name', 'summary')
def __init__(self, *args, **kw):
self.request = kw.pop('request')
self.version = kw.pop('version', None)
super().__init__(*args, **kw)
for field in ('name', 'summary'):
if field in self.fields:
self.fields[field].validators.append(
OneOrMoreLetterOrNumberCharacterValidator()
)
class Meta:
models = Addon
fields = ('name', 'summary')
def clean_slug(self):
return clean_addon_slug(self.cleaned_data['slug'], self.instance)
def clean_name(self):
user = getattr(self.request, 'user', None)
name = verify_mozilla_trademark(self.cleaned_data['name'], user, form=self)
return name
def save(self, *args, **kwargs):
metadata_content_review = self.instance and self.instance.has_listed_versions()
existing_data = (
fetch_translations_from_addon(
self.instance, self.fields_to_trigger_content_review
)
if metadata_content_review
else {}
)
obj = super().save(*args, **kwargs)
if not metadata_content_review:
return obj
new_data = fetch_translations_from_addon(
obj, self.fields_to_trigger_content_review
)
if existing_data != new_data:
# flag for content review
statsd.incr('devhub.metadata_content_review_triggered')
AddonApprovalsCounter.reset_content_for_addon(addon=obj)
return obj
class CategoryForm(forms.Form):
application = forms.TypedChoiceField(
choices=amo.APPS_CHOICES, coerce=int, widget=forms.HiddenInput, required=True
)
categories = forms.MultipleChoiceField(choices=(), widget=CategoriesSelectMultiple)
def save(self, addon):
application = self.cleaned_data.get('application')
categories_new = [int(c) for c in self.cleaned_data['categories']]
categories_old = [
c.id for c in addon.app_categories.get(amo.APP_IDS[application].short, [])
]
# Add new categories.
for c_id in set(categories_new) - set(categories_old):
AddonCategory(addon=addon, category_id=c_id).save()
# Remove old categories.
for c_id in set(categories_old) - set(categories_new):
AddonCategory.objects.filter(addon=addon, category_id=c_id).delete()
# Remove old, outdated categories cache on the model.
del addon.all_categories
# Make sure the add-on is properly re-indexed
addons_tasks.index_addons.delay([addon.id])
def clean_categories(self):
categories = self.cleaned_data['categories']
total = len(categories)
max_cat = amo.MAX_CATEGORIES
if total > max_cat:
# L10n: {0} is the number of categories.
raise forms.ValidationError(
ngettext(
'You can have only {0} category.',
'You can have only {0} categories.',
max_cat,
).format(max_cat)
)
has_misc = list(filter(lambda x: CATEGORIES_BY_ID.get(int(x)).misc, categories))
if has_misc and total > 1:
raise forms.ValidationError(
gettext(
'The miscellaneous category cannot be combined with '
'additional categories.'
)
)
return categories
class BaseCategoryFormSet(BaseFormSet):
def __init__(self, *args, **kw):
self.addon = kw.pop('addon')
self.request = kw.pop('request', None)
super().__init__(*args, **kw)
self.initial = []
apps = sorted(self.addon.compatible_apps.keys(), key=lambda x: x.id)
# Drop any apps that don't have appropriate categories.
for app in list(apps):
if app and not CATEGORIES.get(app.id, {}).get(self.addon.type):
apps.remove(app)
if not CATEGORIES_NO_APP.get(self.addon.type):
apps = []
for app in apps:
cats = self.addon.app_categories.get(app.short, [])
self.initial.append({'categories': [c.id for c in cats]})
for app, form in zip(apps, self.forms):
key = app.id if app else None
form.request = self.request
form.initial['application'] = key
form.app = app
cats = sorted(
CATEGORIES.get(key, {}).get(self.addon.type, {}).values(),
key=lambda x: x.name,
)
form.fields['categories'].choices = [(c.id, c.name) for c in cats]
def save(self):
for f in self.forms:
f.save(self.addon)
CategoryFormSet = formset_factory(
form=CategoryForm, formset=BaseCategoryFormSet, extra=0
)
ICON_TYPES = [('', 'default'), ('image/jpeg', 'jpeg'), ('image/png', 'png')]
class AddonFormMedia(AddonFormBase):
icon_type = forms.CharField(
widget=IconTypeSelect(choices=ICON_TYPES), required=False
)
icon_upload_hash = forms.CharField(required=False)
class Meta:
model = Addon
fields = ('icon_upload_hash', 'icon_type')
def save(self, addon, commit=True):
if self.cleaned_data['icon_upload_hash']:
upload_hash = self.cleaned_data['icon_upload_hash']
upload_path = os.path.join(settings.TMP_PATH, 'icon', upload_hash)
dirname = addon.get_icon_dir()
destination = os.path.join(dirname, '%s' % addon.id)
remove_icons(destination)
addons_tasks.resize_icon.delay(
upload_path,
destination,
amo.ADDON_ICON_SIZES,
set_modified_on=addon.serializable_reference(),
)
return super().save(commit)
class AdditionalDetailsForm(AddonFormBase):
default_locale = forms.TypedChoiceField(choices=LOCALES)
homepage = TransField.adapt(HttpHttpsOnlyURLField)(required=False)
tags = forms.MultipleChoiceField(
choices=(), widget=forms.CheckboxSelectMultiple, required=False
)
contributions = HttpHttpsOnlyURLField(required=False, max_length=255)
class Meta:
model = Addon
fields = ('default_locale', 'homepage', 'tags', 'contributions')
def __init__(self, *args, **kw):
super().__init__(*args, **kw)
if tags_field := self.fields.get('tags'):
self.all_tags = {t.tag_text: t for t in Tag.objects.all()}
tags_field.choices = ((t, t) for t in self.all_tags)
tags_field.initial = list(
self.instance.tags.all().values_list('tag_text', flat=True)
)
def clean_contributions(self):
if self.cleaned_data['contributions']:
parsed_url = urlsplit(self.cleaned_data['contributions'])
hostname = parsed_url.hostname
path = parsed_url.path
if hostname not in amo.VALID_CONTRIBUTION_DOMAINS:
raise forms.ValidationError(
gettext('URL domain must be one of [%s].')
% ', '.join(amo.VALID_CONTRIBUTION_DOMAINS)
)
elif hostname == 'github.com' and not path.startswith('/sponsors/'):
# Issue 15497, validate path for GitHub Sponsors
raise forms.ValidationError(
gettext('URL path for GitHub Sponsors must contain /sponsors/.')
)
elif parsed_url.scheme != 'https':
raise forms.ValidationError(gettext('URLs must start with https://.'))
return self.cleaned_data['contributions']
def clean_tags(self):
tags = self.cleaned_data['tags']
if (over := len(tags) - amo.MAX_TAGS) > 0:
msg = ngettext(
'You have {0} too many tags.', 'You have {0} too many tags.', over
).format(over)
raise forms.ValidationError(msg)
return tags
def clean(self):
# Make sure we have the required translations in the new locale.
required = 'name', 'summary', 'description'
if not self.errors and 'default_locale' in self.changed_data:
fields = {k: getattr(self.instance, k + '_id') for k in required}
locale = self.cleaned_data['default_locale']
ids = filter(None, fields.values())
qs = Translation.objects.filter(
locale=locale, id__in=ids, localized_string__isnull=False
).values_list('id', flat=True)
missing = [k for k, v in fields.items() if v not in qs]
if missing:
raise forms.ValidationError(
gettext(
'Before changing your default locale you must have a '
'name, summary, and description in that locale. '
'You are missing %s.'
)
% ', '.join(map(repr, missing))
)
return super().clean()
def save(self, addon, commit=False):
if self.fields.get('tags'):
tags_new = self.cleaned_data['tags']
tags_old = self.fields['tags'].initial
# Add new tags.
for t in set(tags_new) - set(tags_old):
self.all_tags[t].add_tag(addon)
# Remove old tags.
for t in set(tags_old) - set(tags_new):
self.all_tags[t].remove_tag(addon)
# We ignore `commit`, since we need it to be `False` so we can save
# the ManyToMany fields on our own.
addonform = super().save(commit=False)
addonform.save()
return addonform
class AdditionalDetailsFormUnlisted(AdditionalDetailsForm):
# We want the same fields as the listed version. In particular,
# default_locale is referenced in the template and needs to exist.
pass
class AddonFormTechnical(AddonFormBase):
developer_comments = TransField(widget=TransTextarea, required=False)
class Meta:
model = Addon
fields = ('developer_comments',)
class AddonFormTechnicalUnlisted(AddonFormBase):
class Meta:
model = Addon
fields = ()
class AuthorForm(forms.ModelForm):
user = UserEmailField(required=True, queryset=UserProfile.objects.all())
role = forms.TypedChoiceField(
required=True,
choices=amo.AUTHOR_CHOICES,
initial=amo.AUTHOR_ROLE_OWNER,
coerce=int,
)
class Meta:
model = AddonUser
exclude = ('addon',)
def __init__(self, *args, **kwargs):
# addon should be passed through form_kwargs={'addon': addon} when
# initializing the formset.
self.addon = kwargs.pop('addon')
super().__init__(*args, **kwargs)
instance = getattr(self, 'instance', None)
if instance and instance.pk:
# Clients are not allowed to change existing authors. If they want
# to do that, they need to remove the existing author and add a new
# one. This makes the confirmation system easier to manage.
self.fields['user'].disabled = True
# Set the email to be displayed in the form instead of the pk.
self.initial['user'] = instance.user.email
def clean(self):
rval = super().clean()
if self._meta.model == AddonUser and (
self.instance is None or not self.instance.pk
):
# This should never happen, the client is trying to add a user
# directly to AddonUser through the formset, they should have
# been added to AuthorWaitingConfirmation instead.
raise forms.ValidationError(gettext('Users can not be added directly'))
return rval
class AuthorWaitingConfirmationForm(AuthorForm):
class Meta(AuthorForm.Meta):
model = AddonUserPendingConfirmation
def clean_user(self):
user = self.cleaned_data.get('user')
if user:
if not EmailUserRestriction.allow_email(
user.email, restriction_type=RESTRICTION_TYPES.SUBMISSION
):
raise forms.ValidationError(EmailUserRestriction.error_message)
if self.addon.authors.filter(pk=user.pk).exists():
raise forms.ValidationError(
gettext('An author can only be present once.')
)
name_validators = user._meta.get_field('display_name').validators
try:
if user.display_name is None:
raise forms.ValidationError('') # Caught below.
for validator in name_validators:
validator(user.display_name)
except forms.ValidationError:
raise forms.ValidationError(
gettext(
'The account needs a display name before it can be added '
'as an author.'
)
)
return user
class BaseModelFormSet(BaseModelFormSet):
"""
Override the parent's is_valid to prevent deleting all forms.
"""
def is_valid(self):
# clean() won't get called in is_valid() if all the rows are getting
# deleted. We can't allow deleting everything.
rv = super().is_valid()
return rv and not any(self.errors) and not bool(self.non_form_errors())
class BaseAuthorFormSet(BaseModelFormSet):
def clean(self):
if any(self.errors):
return
# cleaned_data could be None if it's the empty extra form.
data = list(
filter(
None,
[
f.cleaned_data
for f in self.forms
if not f.cleaned_data.get('DELETE', False)
],
)
)
if not any(d['role'] == amo.AUTHOR_ROLE_OWNER for d in data):
raise forms.ValidationError(gettext('Must have at least one owner.'))
if not any(d['listed'] for d in data):
raise forms.ValidationError(gettext('At least one author must be listed.'))
class BaseAuthorWaitingConfirmationFormSet(BaseModelFormSet):
def clean(self):
if any(self.errors):
return
# cleaned_data could be None if it's the empty extra form.
data = list(
filter(
None,
[
f.cleaned_data
for f in self.forms
if not f.cleaned_data.get('DELETE', False)
],
)
)
users = [d['user'].id for d in data]
if len(users) != len(set(users)):
raise forms.ValidationError(gettext('An author can only be present once.'))
AuthorFormSet = modelformset_factory(
AddonUser, formset=BaseAuthorFormSet, form=AuthorForm, can_delete=True, extra=0
)
AuthorWaitingConfirmationFormSet = modelformset_factory(
AddonUserPendingConfirmation,
formset=BaseAuthorWaitingConfirmationFormSet,
form=AuthorWaitingConfirmationForm,
can_delete=True,
extra=0,
)
class DeleteForm(forms.Form):
slug = forms.CharField()
reason = forms.CharField(required=False)
def __init__(self, *args, **kwargs):
self.addon = kwargs.pop('addon')
super().__init__(*args, **kwargs)
def clean_slug(self):
data = self.cleaned_data
if not data['slug'] == self.addon.slug:
raise forms.ValidationError(gettext('Slug incorrect.'))
class LicenseRadioSelect(forms.RadioSelect):
def get_context(self, name, value, attrs):
context = super().get_context(name, value, attrs)
# Make sure the `class` is only set on the radio fields and
# not on the `ul`. This avoids style issues among other things.
# See https://github.com/mozilla/addons-server/issues/8902
# and https://github.com/mozilla/addons-server/issues/8920
del context['widget']['attrs']['class']
return context
def create_option(
self, name, value, label, selected, index, subindex=None, attrs=None
):
context = super().create_option(
name=name,
value=value,
label=label,
selected=selected,
index=index,
subindex=subindex,
attrs=attrs,
)
link = (
'<a class="xx extra" href="%s" target="_blank" '
'rel="noopener noreferrer">%s</a>'
)
license = self.choices[index][1]
if hasattr(license, 'url') and license.url:
details = link % (license.url, gettext('Details'))
context['label'] = mark_safe(str(context['label']) + ' ' + details)
if hasattr(license, 'icons'):
context['attrs']['data-cc'] = license.icons
context['attrs']['data-name'] = str(license)
return context
class LicenseForm(AMOModelForm):
# Hack to restore behavior from pre Django 1.10 times.
# Django 1.10 enabled `required` rendering for required widgets. That
# wasn't the case before, this should be fixed properly but simplifies
# the actual Django 1.11 deployment for now.
# See https://github.com/mozilla/addons-server/issues/8912 for proper fix.
use_required_attribute = False
builtin = forms.TypedChoiceField(
choices=[], coerce=int, widget=LicenseRadioSelect(attrs={'class': 'license'})
)
name = forms.CharField(
widget=TranslationTextInput(),
label=_("What is your license's name?"),
required=False,
initial=_('Custom License'),
)
text = forms.CharField(
widget=TranslationTextarea(),
required=False,
label=_('Provide the text of your license.'),
)
def __init__(self, *args, **kwargs):
self.version = kwargs.pop('version', None)
if self.version:
kwargs['instance'], kwargs['initial'] = self.version.license, None
# Clear out initial data if it's a builtin license.
if getattr(kwargs['instance'], 'builtin', None):
kwargs['initial'] = {'builtin': kwargs['instance'].builtin}
kwargs['instance'] = None
self.cc_licenses = kwargs.pop(
'cc', self.version.addon.type == amo.ADDON_STATICTHEME
)
else:
self.cc_licenses = kwargs.pop('cc', False)
super().__init__(*args, **kwargs)
licenses = License.objects.builtins(cc=self.cc_licenses, on_form=True)
cs = [(x.builtin, x) for x in licenses]
if not self.cc_licenses:
# creative commons licenses don't have an 'other' option.
cs.append((License.OTHER, gettext('Other')))
self.fields['builtin'].choices = cs
if self.version and self.version.channel == amo.CHANNEL_UNLISTED:
self.fields['builtin'].required = False
class Meta:
model = License
fields = ('builtin', 'name', 'text')
def clean_name(self):
name = self.cleaned_data['name']
return name.strip() or gettext('Custom License')
def clean(self):
data = self.cleaned_data
if self.errors:
return data
elif data['builtin'] == License.OTHER and not data['text']:
raise forms.ValidationError(
gettext('License text is required when choosing Other.')
)
return data
def get_context(self):
"""Returns a view context dict having keys license_form,
and license_other_val.
"""
return {
'version': self.version,
'license_form': self.version and self,
'license_other_val': License.OTHER,
}
def save(self, *args, **kw):
"""Save all form data.
This will only create a new license if it's not one of the builtin
ones.
Keyword arguments
**log=True**
Set to False if you do not want to log this action for display
on the developer dashboard.
"""
log = kw.pop('log', True)
changed = self.changed_data
builtin = self.cleaned_data['builtin']
if builtin == '': # No license chosen, it must be an unlisted add-on.
return
is_other = builtin == License.OTHER
if not is_other:
# We're dealing with a builtin license, there is no modifications
# allowed to it, just return it.
license = License.objects.get(builtin=builtin)
else:
# We're not dealing with a builtin license, so save it to the
# database.
license = super().save(*args, **kw)
if self.version:
if (changed and is_other) or license != self.version.license:
self.version.update(license=license)
if log:
ActivityLog.create(
amo.LOG.CHANGE_LICENSE, license, self.version.addon
)
return license
class PolicyForm(TranslationFormMixin, AMOModelForm):
"""Form for editing the add-ons EULA and privacy policy."""
has_eula = forms.BooleanField(
required=False, label=_('This add-on has an End-User License Agreement')
)
eula = TransField(
widget=TransTextarea(),
required=False,
label=_("Please specify your add-on's " 'End-User License Agreement:'),
)
has_priv = forms.BooleanField(
required=False, label=_('This add-on has a Privacy Policy'), label_suffix=''
)
privacy_policy = TransField(
widget=TransTextarea(),
required=False,
label=_("Please specify your add-on's Privacy Policy:"),
)
def __init__(self, *args, **kw):
self.addon = kw.pop('addon', None)
if not self.addon:
raise ValueError('addon keyword arg cannot be None')
kw['instance'] = self.addon
kw['initial'] = dict(
has_priv=self._has_field('privacy_policy'), has_eula=self._has_field('eula')
)
super().__init__(*args, **kw)
def _has_field(self, name):
# If there's a eula in any language, this addon has a eula.
n = getattr(self.addon, '%s_id' % name)
return any(map(bool, Translation.objects.filter(id=n)))
class Meta:
model = Addon
fields = ('eula', 'privacy_policy')
def save(self, commit=True):
ob = super().save(commit)
for k, field in (('has_eula', 'eula'), ('has_priv', 'privacy_policy')):
if not self.cleaned_data[k]:
delete_translation(self.instance, field)
if 'privacy_policy' in self.changed_data:
ActivityLog.create(amo.LOG.CHANGE_POLICY, self.addon, self.instance)
return ob
class WithSourceMixin:
def get_invalid_source_file_type_message(self):
valid_extensions_string = '(%s)' % ', '.join(VALID_SOURCE_EXTENSIONS)
return gettext(
'Unsupported file type, please upload an archive '
'file {extensions}.'.format(extensions=valid_extensions_string)
)
def clean_source(self):
source = self.cleaned_data.get('source')
if source:
# Ensure the file type is one we support.
if not source.name.endswith(VALID_SOURCE_EXTENSIONS):
raise forms.ValidationError(self.get_invalid_source_file_type_message())
# Check inside to see if the file extension matches the content.
try:
if source.name.endswith('.zip'):
# For zip files, opening them though SafeZip() checks that
# we can accept the file and testzip() on top of that
# returns None if there are no broken CRCs.
zip_file = SafeZip(source)
if zip_file.zip_file.testzip() is not None:
raise zipfile.BadZipFile()
elif source.name.endswith(('.tar.gz', '.tar.bz2', '.tgz')):
# For tar files, opening them through SafeTar.open() checks
# that we can accept it.
mode = 'r:bz2' if source.name.endswith('bz2') else 'r:gz'
with SafeTar.open(mode=mode, fileobj=source):
pass
else:
raise forms.ValidationError(
self.get_invalid_source_file_type_message()
)
except (zipfile.BadZipFile, tarfile.ReadError, OSError, EOFError):
raise forms.ValidationError(gettext('Invalid or broken archive.'))
return source
class SourceFileInput(forms.widgets.ClearableFileInput):
"""
Like ClearableFileInput but with custom link URL and text for the initial
data. Uses a custom template because django's is not flexible enough for
our needs.
"""
initial_text = _('View current')
template_name = 'devhub/addons/includes/source_file_input.html'
def get_context(self, name, value, attrs):
context = super().get_context(name, value, attrs)
if value and hasattr(value, 'instance'):
context['download_url'] = reverse(
'downloads.source', args=(value.instance.pk,)
)
return context
class VersionForm(WithSourceMixin, forms.ModelForm):
release_notes = TransField(widget=TransTextarea(), required=False)
approval_notes = forms.CharField(
widget=forms.Textarea(attrs={'rows': 4}), required=False
)
source = forms.FileField(required=False, widget=SourceFileInput)
class Meta:
model = Version
fields = (
'release_notes',
'approval_notes',
'source',
)
def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
if (
self.instance.has_been_human_reviewed
and not self.instance.pending_rejection
):
self.fields['source'].disabled = True
class AppVersionChoiceField(forms.ModelChoiceField):
def label_from_instance(self, obj):
return obj.version
class CompatForm(forms.ModelForm):
application = forms.TypedChoiceField(
choices=amo.APPS_CHOICES, coerce=int, widget=forms.HiddenInput
)
min = AppVersionChoiceField(AppVersion.objects.none())
max = AppVersionChoiceField(AppVersion.objects.none())
class Meta:
model = ApplicationsVersions
fields = ('application', 'min', 'max')
def __init__(self, *args, **kwargs):
# 'version' should always be passed as a kwarg to this form. If it's
# absent, it probably means form_kwargs={'version': version} is missing
# from the instantiation of the formset.
super().__init__(*args, **kwargs)
if self.initial:
app = self.initial['application']
else:
app = self.data[self.add_prefix('application')]
self.app = amo.APPS_ALL[int(app)]
qs = AppVersion.objects.filter(application=app).order_by('version_int')
self.fields['min'].queryset = qs.filter(~Q(version__contains='*'))
self.fields['max'].queryset = qs.all()
def clean(self):
min_ = self.cleaned_data.get('min')
max_ = self.cleaned_data.get('max')
if not (min_ and max_ and min_.version_int <= max_.version_int):
raise forms.ValidationError(gettext('Invalid version range.'))
return self.cleaned_data
class BaseCompatFormSet(BaseModelFormSet):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# We always want a form for each app, so force extras for apps
# the add-on does not already have.
version = self.form_kwargs.pop('version')
static_theme = version and version.addon.type == amo.ADDON_STATICTHEME
available_apps = amo.APP_USAGE
self.can_delete = not static_theme # No tinkering with apps please.
# Only display the apps we care about, if somehow obsolete apps were
# recorded before.
self.queryset = self.queryset.filter(
application__in=[a.id for a in available_apps]
)
initial_apps = self.queryset.values_list('application', flat=True)
self.initial = [
{
'application': appver.application,
'min': appver.min.pk,
'max': appver.max.pk,
}
for appver in self.queryset
] + [
{'application': app.id}
for app in available_apps
if app.id not in initial_apps
]
self.extra = (
max(len(available_apps) - len(self.forms), 0) if not static_theme else 0
)
# After these changes, the forms need to be rebuilt. `forms`
# is a cached property, so we delete the existing cache and
# ask for a new one to be built.
# del self.forms
if hasattr(self, 'forms'):
del self.forms
self.forms
def clean(self):
if any(self.errors):
return
apps = list(
filter(
None,
[
f.cleaned_data
for f in self.forms
if not f.cleaned_data.get('DELETE', False)
],
)
)
if not apps:
# At this point, we're raising a global error and re-displaying the
# applications that were present before. We don't want to keep the
# hidden delete fields in the data attribute, cause that's used to
# populate initial data for all forms, and would therefore make
# those delete fields active again.
self.data = {
k: v for k, v in self.data.items() if not k.endswith('-DELETE')
}
for form in self.forms:
form.data = self.data
raise forms.ValidationError(
gettext('Need at least one compatible application.')
)
CompatFormSet = modelformset_factory(
ApplicationsVersions,
formset=BaseCompatFormSet,
form=CompatForm,
can_delete=True,
extra=0,
)
class CompatAppSelectWidget(forms.CheckboxSelectMultiple):
option_template_name = 'devhub/forms/widgets/compat_app_input_option.html'
def create_option(
self, name, value, label, selected, index, subindex=None, attrs=None
):
data = super().create_option(
name=name,
value=value,
label=label,
selected=selected,
index=index,
subindex=subindex,
attrs=attrs,
)
# Inject the short application name for easier styling
data['compat_app_short'] = amo.APPS_ALL[int(data['value'])].short
return data
class CheckThrottlesMixin:
def check_throttles(self, request):
"""
Check if request should be throttled by calling the signing API
throttling method.
Raises ValidationError if the request is throttled.
"""
from olympia.signing.views import VersionView # circular import
view = VersionView()
try:
view.check_throttles(request)
except Throttled:
raise forms.ValidationError(
_(
'You have submitted too many uploads recently. '
'Please try again after some time.'
)
)
class NewUploadForm(CheckThrottlesMixin, forms.Form):
upload = forms.ModelChoiceField(
widget=forms.HiddenInput,
queryset=FileUpload.objects,
to_field_name='uuid',
error_messages={
'invalid_choice': _(
'There was an error with your upload. Please try again.'
)
},
)
admin_override_validation = forms.BooleanField(
required=False, label=_('Override failed validation')
)
compatible_apps = forms.TypedMultipleChoiceField(
choices=amo.APPS_CHOICES,
# Pre-select only Desktop Firefox, most of the times developers
# don't develop their WebExtensions for Android.
# See this GitHub comment: https://bit.ly/2QaMicU
initial=[amo.FIREFOX.id],
coerce=int,
widget=CompatAppSelectWidget(),
error_messages={'required': _('Need to select at least one application.')},
)
def __init__(self, *args, **kw):
self.request = kw.pop('request')
self.addon = kw.pop('addon', None)
super().__init__(*args, **kw)
# Preselect compatible apps based on the current version
if self.addon and self.addon.current_version:
# Fetch list of applications freshly from the database to not
# rely on potentially outdated data since `addon.compatible_apps`
# is a cached property
compat_apps = list(
self.addon.current_version.apps.values_list('application', flat=True)
)
self.fields['compatible_apps'].initial = compat_apps
def _clean_upload(self):
own_upload = self.cleaned_data['upload'].user == self.request.user
if (
not own_upload
or not self.cleaned_data['upload'].valid
or self.cleaned_data['upload'].validation_timeout
) and not (
self.cleaned_data['admin_override_validation']
and acl.action_allowed_for(self.request.user, amo.permissions.REVIEWS_ADMIN)
):
raise forms.ValidationError(
gettext('There was an error with your upload. Please try again.')
)
def check_blocklist(self, guid, version_string):
# check the guid/version isn't in the addon blocklist
block = Block.objects.filter(guid=guid).first()
if block and block.is_version_blocked(version_string):
msg = escape(
gettext(
'Version {version} matches {block_link} for this add-on. '
'You can contact {amo_admins} for additional information.'
)
)
formatted_msg = DoubleSafe(
msg.format(
version=version_string,
block_link=format_html(
'<a href="{}">{}</a>',
reverse('blocklist.block', args=[guid]),
gettext('a blocklist entry'),
),
amo_admins=(
'<a href="mailto:amo-admins@mozilla.com">AMO Admins</a>'
),
)
)
raise forms.ValidationError(formatted_msg)
def check_for_existing_versions(self, version_string):
# Make sure we don't already have this version.
existing_versions = Version.unfiltered.filter(
addon=self.addon, version=version_string
)
if existing_versions.exists():
version = existing_versions[0]
if version.deleted:
msg = gettext('Version {version} was uploaded before and deleted.')
elif version.file.status == amo.STATUS_AWAITING_REVIEW:
next_url = reverse(
'devhub.submit.version.details',
args=[self.addon.slug, version.pk],
)
msg = DoubleSafe(
'%s <a href="%s">%s</a>'
% (
gettext('Version {version} already exists.'),
next_url,
gettext('Continue with existing upload instead?'),
)
)
else:
msg = gettext('Version {version} already exists.')
raise forms.ValidationError(msg.format(version=version))
def clean(self):
self.check_throttles(self.request)
if not self.errors:
self._clean_upload()
parsed_data = parse_addon(
self.cleaned_data['upload'], self.addon, user=self.request.user
)
self.check_blocklist(
self.addon.guid if self.addon else parsed_data.get('guid'),
parsed_data.get('version'),
)
if self.addon:
self.check_for_existing_versions(parsed_data.get('version'))
if self.cleaned_data['upload'].channel == amo.CHANNEL_LISTED:
if error_message := (
validate_version_number_is_gt_latest_signed_listed_version(
self.addon, parsed_data.get('version')
)
):
raise forms.ValidationError(error_message)
self.cleaned_data['parsed_data'] = parsed_data
return self.cleaned_data
class SourceForm(WithSourceMixin, forms.ModelForm):
source = forms.FileField(required=False, widget=SourceFileInput)
has_source = forms.ChoiceField(
choices=(('yes', _('Yes')), ('no', _('No'))), required=True, widget=RadioSelect
)
class Meta:
model = Version
fields = ('source',)
def __init__(self, *args, **kwargs):
self.request = kwargs.pop('request')
super().__init__(*args, **kwargs)
def clean_source(self):
source = self.cleaned_data.get('source')
has_source = self.data.get('has_source') # Not cleaned yet.
if has_source == 'yes' and not source:
raise forms.ValidationError(gettext('You have not uploaded a source file.'))
elif has_source == 'no' and source:
raise forms.ValidationError(
gettext('Source file uploaded but you indicated no source was needed.')
)
# At this point we know we can proceed with the actual archive
# validation.
return super().clean_source()
class DescribeForm(AddonFormBase):
name = TransField(max_length=50)
slug = forms.CharField(max_length=30)
summary = TransField(widget=TransTextarea(attrs={'rows': 4}), max_length=250)
description = TransField(widget=TransTextarea(attrs={'rows': 6}), min_length=10)
is_experimental = forms.BooleanField(required=False)
requires_payment = forms.BooleanField(required=False)
support_url = TransField.adapt(HttpHttpsOnlyURLField)(required=False)
support_email = TransField.adapt(forms.EmailField)(required=False)
class Meta:
model = Addon
fields = (
'name',
'slug',
'summary',
'description',
'is_experimental',
'support_url',
'support_email',
'requires_payment',
)
def __init__(self, *args, **kw):
super().__init__(*args, **kw)
content_waffle = waffle.switch_is_active('content-optimization')
if not content_waffle or self.instance.type != amo.ADDON_EXTENSION:
description = self.fields['description']
description.min_length = None
description.widget.attrs.pop('minlength', None)
description.validators = [
validator
for validator in description.validators
if not isinstance(validator, MinLengthValidator)
]
description.required = False
class CombinedNameSummaryCleanMixin:
MAX_LENGTH = 70
def __init__(self, *args, **kw):
self.should_auto_crop = kw.pop('should_auto_crop', False)
super().__init__(*args, **kw)
# We need the values for the template but not the MaxLengthValidators
self.fields['name'].max_length = (
self.MAX_LENGTH - self.fields['summary'].min_length
)
self.fields['summary'].max_length = (
self.MAX_LENGTH - self.fields['name'].min_length
)
def clean(self):
message = _(
'Ensure name and summary combined are at most '
'{limit_value} characters (they have {show_value}).'
)
super().clean()
name_summary_locales = set(
list(self.cleaned_data.get('name', {}).keys())
+ list(self.cleaned_data.get('summary', {}).keys())
)
default_locale = self.instance.default_locale.lower()
name_values = self.cleaned_data.get('name') or {}
name_default = name_values.get(default_locale) or ''
summary_values = self.cleaned_data.get('summary') or {}
summary_default = summary_values.get(default_locale) or ''
for locale in name_summary_locales:
val_len = len(
name_values.get(locale, name_default)
+ summary_values.get(locale, summary_default)
)
if val_len > self.MAX_LENGTH:
if locale == default_locale:
# only error in default locale.
formatted_message = message.format(
limit_value=self.MAX_LENGTH, show_value=val_len
)
self.add_error(
'name',
LocaleErrorMessage(message=formatted_message, locale=locale),
)
elif self.should_auto_crop:
# otherwise we need to shorten the summary (and or name?)
if locale in name_values:
# if only default summary need to shorten name instead.
max_name_length = (
self.fields['name'].max_length
if locale in summary_values
else self.MAX_LENGTH - len(summary_default)
)
name = name_values[locale][:max_name_length]
name_length = len(name)
self.cleaned_data.setdefault('name', {})[locale] = name
else:
name_length = len(name_default)
if locale in summary_values:
max_summary_length = self.MAX_LENGTH - name_length
self.cleaned_data.setdefault('summary', {})[
locale
] = summary_values[locale][:max_summary_length]
return self.cleaned_data
class DescribeFormContentOptimization(CombinedNameSummaryCleanMixin, DescribeForm):
name = TransField(min_length=2)
summary = TransField(min_length=2)
class DescribeFormUnlisted(AddonFormBase):
name = TransField(max_length=50)
slug = forms.CharField(max_length=30)
summary = TransField(widget=TransTextarea(attrs={'rows': 4}), max_length=250)
description = TransField(widget=TransTextarea(attrs={'rows': 4}), required=False)
class Meta:
model = Addon
fields = ('name', 'slug', 'summary', 'description')
class DescribeFormUnlistedContentOptimization(
CombinedNameSummaryCleanMixin, DescribeFormUnlisted
):
name = TransField(max_length=68, min_length=2)
summary = TransField(max_length=68, min_length=2)
class PreviewForm(forms.ModelForm):
caption = TransField(widget=TransTextarea, required=False)
file_upload = forms.FileField(required=False)
upload_hash = forms.CharField(required=False)
def save(self, addon, commit=True):
if self.cleaned_data:
self.instance.addon = addon
if self.cleaned_data.get('DELETE'):
# Existing preview.
if self.instance.id:
self.instance.delete()
# User has no desire to save this preview.
return
super().save(commit=commit)
if self.cleaned_data['upload_hash']:
upload_hash = self.cleaned_data['upload_hash']
upload_path = os.path.join(settings.TMP_PATH, 'preview', upload_hash)
addons_tasks.resize_preview.delay(
upload_path,
self.instance.pk,
set_modified_on=self.instance.serializable_reference(),
)
class Meta:
model = Preview
fields = ('caption', 'file_upload', 'upload_hash', 'id', 'position')
class BasePreviewFormSet(BaseModelFormSet):
def clean(self):
if any(self.errors):
return
PreviewFormSet = modelformset_factory(
Preview, formset=BasePreviewFormSet, form=PreviewForm, can_delete=True, extra=1
)
class DistributionChoiceForm(forms.Form):
# Gotta keep the format_html call lazy, otherwise these would be evaluated
# to a string right away and never translated.
LISTED_LABEL = format_html_lazy(
_(
'On this site. <span class="helptext">'
'Your submission is publicly listed on {site_domain}.</span>'
),
site_domain=settings.DOMAIN,
)
UNLISTED_LABEL = format_html_lazy(
_(
'On your own. <span class="helptext">'
'After your submission is signed by Mozilla, you can download the .xpi '
'file from the Developer Hub and distribute it to your audience. Please '
'make sure the add-on manifest’s <a {a_attrs}>update_url</a> is provided, '
'as this is the URL where Firefox finds updates for automatic deployment '
'to your users.</span>'
),
a_attrs=mark_safe(
'target="_blank" rel="noopener noreferrer"'
f'href="{settings.EXTENSION_WORKSHOP_URL}'
'/documentation/manage/updating-your-extension/'
'?utm_source=addons.mozilla.org&utm_medium=referral&utm_content=submission"'
),
)
channel = forms.ChoiceField(
choices=[],
initial='listed',
widget=forms.RadioSelect(attrs={'class': 'channel'}),
)
def __init__(self, *args, **kwargs):
self.addon = kwargs.pop('addon', None)
super().__init__(*args, **kwargs)
choices = [
('listed', mark_safe(self.LISTED_LABEL)),
('unlisted', mark_safe(self.UNLISTED_LABEL)),
]
if self.addon and self.addon.disabled_by_user:
# If the add-on is disabled, 'listed' is not a valid choice,
# "invisible" add-ons can not upload new listed versions.
choices.pop(0)
self.fields['channel'].choices = choices
class AgreementForm(forms.Form):
distribution_agreement = forms.BooleanField()
review_policy = forms.BooleanField()
display_name = forms.CharField(label=_('Display Name'))
recaptcha = ReCaptchaField(label='')
def __init__(self, *args, **kwargs):
self.request = kwargs.pop('request', None)
super().__init__(*args, **kwargs)
if not waffle.switch_is_active('developer-agreement-captcha'):
del self.fields['recaptcha']
if self.request.user.is_authenticated and self.request.user.display_name:
# Don't bother asking for a display name if there is one already.
del self.fields['display_name']
else:
# If there isn't one... we want to make sure to use the same
# validators as the model.
self.fields['display_name'].validators += UserProfile._meta.get_field(
'display_name'
).validators
def clean(self):
# Check if user ip or email is not supposed to be allowed to submit.
checker = RestrictionChecker(request=self.request)
if not checker.is_submission_allowed(check_dev_agreement=False):
raise forms.ValidationError(checker.get_error_message())
return self.cleaned_data
class SingleCategoryForm(forms.Form):
category = forms.ChoiceField(widget=forms.RadioSelect)
def __init__(self, *args, **kw):
self.addon = kw.pop('addon')
self.request = kw.pop('request', None)
if len(self.addon.all_categories) > 0:
kw['initial'] = {'category': self.addon.all_categories[0].slug}
super().__init__(*args, **kw)
sorted_cats = sorted(
CATEGORIES_NO_APP[self.addon.type].items(), key=lambda slug_cat: slug_cat[0]
)
self.fields['category'].choices = [(slug, c.name) for slug, c in sorted_cats]
def save(self):
category_slug = self.cleaned_data['category']
# Clear any old categor[y|ies]
AddonCategory.objects.filter(addon=self.addon).delete()
# Add new categor[y|ies]
for app in CATEGORIES.keys():
category = CATEGORIES[app].get(self.addon.type, {}).get(category_slug, None)
if category:
AddonCategory(addon=self.addon, category_id=category.id).save()
# Remove old, outdated categories cache on the model.
del self.addon.all_categories
| bsd-3-clause | 6d788ad5034837970082c4100ccc7226 | 35.709655 | 88 | 0.586358 | 4.277468 | false | false | false | false |
opencivicdata/pupa | pupa/ext/ansistrm.py | 5 | 4990 | # flake8: NOQA
#
# Copyright (C) 2010-2012 Vinay Sajip. All rights reserved.
# Licensed under the new BSD license.
#
import ctypes
import logging
import os
class ColorizingStreamHandler(logging.StreamHandler):
# color names to indices
color_map = {
'black': 0,
'red': 1,
'green': 2,
'yellow': 3,
'blue': 4,
'magenta': 5,
'cyan': 6,
'white': 7,
}
# levels to (background, foreground, bold/intense)
if os.name == 'nt':
level_map = {
logging.DEBUG: (None, 'blue', True),
logging.INFO: (None, 'white', False),
logging.WARNING: (None, 'yellow', True),
logging.ERROR: (None, 'red', True),
logging.CRITICAL: ('red', 'white', True),
}
else:
level_map = {
logging.DEBUG: (None, 'blue', False),
logging.INFO: (None, 'white', False),
logging.WARNING: (None, 'yellow', False),
logging.ERROR: (None, 'red', False),
logging.CRITICAL: ('red', 'white', True),
}
csi = '\x1b['
reset = '\x1b[0m'
@property
def is_tty(self):
# bluff for Jenkins
if os.environ.get('JENKINS_URL'):
return True
isatty = getattr(self.stream, 'isatty', None)
return isatty and isatty()
def emit(self, record):
try:
message = self.format(record)
stream = self.stream
if not self.is_tty:
stream.write(message)
else:
self.output_colorized(message)
stream.write(getattr(self, 'terminator', '\n'))
self.flush()
except (KeyboardInterrupt, SystemExit):
raise
except:
self.handleError(record)
if os.name != 'nt':
def output_colorized(self, message):
self.stream.write(message)
else:
import re
ansi_esc = re.compile(r'\x1b\[((?:\d+)(?:;(?:\d+))*)m')
nt_color_map = {
0: 0x00, # black
1: 0x04, # red
2: 0x02, # green
3: 0x06, # yellow
4: 0x01, # blue
5: 0x05, # magenta
6: 0x03, # cyan
7: 0x07, # white
}
def output_colorized(self, message):
parts = self.ansi_esc.split(message)
write = self.stream.write
h = None
fd = getattr(self.stream, 'fileno', None)
if fd is not None:
fd = fd()
if fd in (1, 2): # stdout or stderr
h = ctypes.windll.kernel32.GetStdHandle(-10 - fd)
while parts:
text = parts.pop(0)
if text:
write(text)
if parts:
params = parts.pop(0)
if h is not None:
params = [int(p) for p in params.split(';')]
color = 0
for p in params:
if 40 <= p <= 47:
color |= self.nt_color_map[p - 40] << 4
elif 30 <= p <= 37:
color |= self.nt_color_map[p - 30]
elif p == 1:
color |= 0x08 # foreground intensity on
elif p == 0: # reset to default color
color = 0x07
else:
pass # error condition ignored
ctypes.windll.kernel32.SetConsoleTextAttribute(h,
color)
def colorize(self, message, record):
if record.levelno in self.level_map:
bg, fg, bold = self.level_map[record.levelno]
params = []
if bg in self.color_map:
params.append(str(self.color_map[bg] + 40))
if fg in self.color_map:
params.append(str(self.color_map[fg] + 30))
if bold:
params.append('1')
if params:
message = ''.join((self.csi, ';'.join(params),
'm', message, self.reset))
return message
def format(self, record):
message = logging.StreamHandler.format(self, record)
if self.is_tty:
# Don't colorize any traceback
parts = message.split('\n', 1)
parts[0] = self.colorize(parts[0], record)
message = '\n'.join(parts)
return message
def main():
root = logging.getLogger()
root.setLevel(logging.DEBUG)
root.addHandler(ColorizingStreamHandler())
logging.debug('DEBUG')
logging.info('INFO')
logging.warning('WARNING')
logging.error('ERROR')
logging.critical('CRITICAL')
if __name__ == '__main__':
main()
| bsd-3-clause | f73b67347d7062683a8bf6092341068d | 31.614379 | 77 | 0.456914 | 4.168755 | false | false | false | false |
opencivicdata/pupa | pupa/migrations/0002_auto_20150906_1458.py | 2 | 1290 | # -*- coding: utf-8 -*-
# Generated by Django 1.9.dev20150906080247 on 2015-09-06 14:58
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('pupa', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='importobjects',
name='report',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='imported_objects', to='pupa.RunPlan'),
),
migrations.AlterField(
model_name='runplan',
name='jurisdiction',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='runs', to='core.Jurisdiction'),
),
migrations.AlterField(
model_name='scrapeobjects',
name='report',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='scraped_objects', to='pupa.ScrapeReport'),
),
migrations.AlterField(
model_name='scrapereport',
name='plan',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='scrapers', to='pupa.RunPlan'),
),
]
| bsd-3-clause | b4eef90a7eab2f4e4f01f837da236bfa | 34.833333 | 137 | 0.625581 | 3.932927 | false | false | false | false |
google/deepvariant | deepvariant/tf_utils.py | 1 | 14992 | # Copyright 2017 Google LLC.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from this
# software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Utility functions for DeepVariant.
Started with a collection of utilities for working with the TF models. Now this
file includes broader utilities we use in DeepVariant.
"""
import enum
import numpy as np
import six
import tensorflow as tf
from deepvariant.protos import deepvariant_pb2
from third_party.nucleus.io import sharded_file_utils
from third_party.nucleus.io import tfrecord
from third_party.nucleus.protos import variants_pb2
from third_party.nucleus.util import ranges
from third_party.nucleus.util import variant_utils
from tensorflow.core.example import example_pb2
from typing import Optional
# Convert strings up to this length, then clip. We picked a number that
# was less than 1K, with a bit of extra space for the length element,
# to give enough space without overflowing to a larger multiple of 128.
STRING_TO_INT_MAX_CONTENTS_LEN = 1020
# This is the length of the resulting buffer (including the length entry).
STRING_TO_INT_BUFFER_LENGTH = STRING_TO_INT_MAX_CONTENTS_LEN + 1
class EncodedVariantType(enum.Enum):
"""Enum capturing the int64 values we encode for different variant types.
TPUs really like fixed length features, which makes it very difficult to use
extract the type of a variant for an example using an encoded Variant
protobufs or even a string value like "snp". The current best option appears
to be to encode the type of a variant directly in an example as an int64. This
enum provides a mapping between those raw int64 values in the example and a
human-meaningful name for that type.
"""
UNKNOWN = 0 # A variant of unknown type.
SNP = 1 # The variant is a SNP.
INDEL = 2 # The variant is an indel.
def encoded_variant_type(variant):
"""Gets the EncodedVariantType for variant.
This function examines variant and returns the EncodedVariantType that best
describes the variation type of variant. For example, if variant has
`reference_bases = "A"` and `alternative_bases = ["C"]` this function would
return EncodedVariantType.SNP.
Args:
variant: nucleus.Variant proto. The variant whose EncodedVariantType we want
to get.
Returns:
EncodedVariantType enum value.
"""
if variant_utils.is_snp(variant):
return EncodedVariantType.SNP
elif variant_utils.is_indel(variant):
return EncodedVariantType.INDEL
else:
return EncodedVariantType.UNKNOWN
def example_variant_type(example):
"""Gets the locus field from example as a string."""
return example.features.feature['variant_type'].int64_list.value[0]
def example_locus(example):
"""Gets the locus field from example as a string."""
return example.features.feature['locus'].bytes_list.value[0]
def example_alt_alleles_indices(example):
"""Gets an iterable of the alt allele indices in example."""
return deepvariant_pb2.CallVariantsOutput.AltAlleleIndices.FromString(
example.features.feature['alt_allele_indices/encoded'].bytes_list.value[0]
).indices
def example_alt_alleles(example, variant=None):
"""Gets a list of the alt alleles in example."""
variant = variant if variant else example_variant(example)
return [
variant.alternate_bases[i] for i in example_alt_alleles_indices(example)
]
def example_encoded_image(example):
"""Gets image field from example as a string."""
return example.features.feature['image/encoded'].bytes_list.value[0]
def example_variant(example):
"""Gets and decodes the variant field from example as a Variant."""
encoded = example.features.feature['variant/encoded'].bytes_list.value[0]
return variants_pb2.Variant.FromString(encoded)
def example_label(example):
"""Gets the label field from example as a string."""
return int(example.features.feature['label'].int64_list.value[0])
def example_image_shape(example):
"""Gets the image shape field from example as a list of int64."""
if len(example.features.feature['image/shape'].int64_list.value) != 3:
raise ValueError('Invalid image/shape: we expect to find an image/shape '
'field with length 3.')
return example.features.feature['image/shape'].int64_list.value[0:3]
def example_key(example):
"""Constructs a key for example based on its position and alleles."""
variant = example_variant(example)
alts = example_alt_alleles(example)
return '{}:{}:{}->{}'.format(variant.reference_name, variant.start + 1,
variant.reference_bases, '/'.join(alts))
def example_set_label(example, numeric_label):
"""Sets the label features of example.
Sets the label feature of example to numeric_label.
Args:
example: a tf.Example proto.
numeric_label: A numeric (int64 compatible) label for example.
"""
example.features.feature['label'].int64_list.value[:] = [numeric_label]
def example_set_variant(example, variant):
"""Sets the variant/encoded feature of example to variant.SerializeToString().
Args:
example: a tf.Example proto.
variant: third_party.nucleus.protos.Variant protobuf containing information
about a candidate variant call.
"""
example.features.feature['variant/encoded'].bytes_list.value[:] = [
variant.SerializeToString()
]
def example_sequencing_type(example):
return example.features.feature['sequencing_type'].int64_list.value[0]
def get_one_example_from_examples_path(source, proto=None):
"""Get the first record from `source`.
Args:
source: str. A pattern or a comma-separated list of patterns that represent
file names.
proto: A proto class. proto.FromString() will be called on each serialized
record in path to parse it.
Returns:
The first record, or None.
"""
files = sharded_file_utils.glob_list_sharded_file_patterns(source)
if not files:
raise ValueError(
'Cannot find matching files with the pattern "{}"'.format(source))
for f in files:
try:
return next(tfrecord.read_tfrecords(f, proto=proto))
except StopIteration:
# Getting a StopIteration from one next() means source_path is empty.
# Move on to the next one to try to get one example.
pass
return None
def get_shape_from_examples_path(source):
"""Reads one record from source to determine the tensor shape for all."""
one_example = get_one_example_from_examples_path(source)
if one_example:
return example_image_shape(one_example)
return None
def _simplify_variant(variant):
"""Returns a new Variant with only the basic fields of variant."""
def _simplify_variant_call(call):
"""Returns a new VariantCall with the basic fields of call."""
return variants_pb2.VariantCall(
call_set_name=call.call_set_name,
genotype=call.genotype,
info=dict(call.info)) # dict() is necessary to actually set info.
return variants_pb2.Variant(
reference_name=variant.reference_name,
start=variant.start,
end=variant.end,
reference_bases=variant.reference_bases,
alternate_bases=variant.alternate_bases,
filter=variant.filter,
quality=variant.quality,
calls=[_simplify_variant_call(call) for call in variant.calls])
def make_example(variant,
alt_alleles,
encoded_image,
shape,
second_image=None,
sequencing_type=0):
"""Creates a new tf.Example suitable for use with DeepVariant.
Args:
variant: third_party.nucleus.protos.Variant protobuf containing information
about a candidate variant call.
alt_alleles: A set of strings. Indicates the alternate alleles used as "alt"
when constructing the image.
encoded_image: a Tensor of type tf.string. Should contain an image encoding
the reference and read data supporting variant. The encoding should be
consistent with the image_format argument.
shape: a list of (width, height, channel).
second_image: a Tensor of type tf.string or None. Contains second image that
encodes read data from another DNA sample. Must satisfy the same
requirements as encoded_image.
sequencing_type: int. The sequencing type of the input image.
Returns:
A tf.Example proto containing the standard DeepVariant features.
"""
example = example_pb2.Example()
features = example.features
features.feature['locus'].bytes_list.value.append(
six.b(
ranges.to_literal(
ranges.make_range(variant.reference_name, variant.start,
variant.end))))
example_set_variant(example, variant)
variant_type = encoded_variant_type(variant).value
features.feature['variant_type'].int64_list.value.append(variant_type)
all_alts = list(variant.alternate_bases)
alt_indices = sorted(all_alts.index(alt) for alt in alt_alleles)
features.feature['alt_allele_indices/encoded'].bytes_list.value.append(
deepvariant_pb2.CallVariantsOutput.AltAlleleIndices(
indices=alt_indices).SerializeToString())
features.feature['image/encoded'].bytes_list.value.append(encoded_image)
features.feature['image/shape'].int64_list.value.extend(shape)
if second_image is not None:
features.feature['second_image/encoded'].bytes_list.value.append(
six.b(second_image))
features.feature['second_image/shape'].int64_list.value.extend(shape)
features.feature['sequencing_type'].int64_list.value.append(sequencing_type)
return example
def model_shapes(checkpoint_path, variables_to_get=None):
"""Returns the shape of each tensor in the model at checkpoint_path.
Args:
checkpoint_path: string. The path to a tensorflow checkpoint containing a
model whose tensor shapes we want to get.
variables_to_get: options, list of strings. If provided, only returns the
shapes of tensors in variables whose name is present in this list. If
None, the default, gets all of the tensors. A KeyError will be raised if
any variable name in variables_to_get isn't present in the checkpointed
model.
Returns:
A dictionary mapping variable names [string] to tensor shapes [tuple].
"""
reader = tf.compat.v1.train.NewCheckpointReader(checkpoint_path)
var_to_shape_map = reader.get_variable_to_shape_map()
keys = variables_to_get if variables_to_get else var_to_shape_map.keys()
return {key: tuple(var_to_shape_map[key]) for key in keys}
def model_num_classes(checkpoint_path, n_classes_model_variable):
"""Returns the number of classes in the checkpoint."""
if not checkpoint_path:
return None
# Figure out how many classes this inception model was trained to predict.
try:
shapes = model_shapes(checkpoint_path, [n_classes_model_variable])
except KeyError:
return None
if n_classes_model_variable not in shapes:
return None
return shapes[n_classes_model_variable][-1]
def string_to_int_tensor(x):
"""Graph operations decode a string into a fixed-size tensor of ints."""
decoded = tf.compat.v1.decode_raw(x, tf.uint8)
clipped = decoded[:STRING_TO_INT_MAX_CONTENTS_LEN] # clip to allowed max_len
shape = tf.shape(input=clipped)
slen = shape[0]
# pad to desired max_len
padded = tf.pad(
tensor=clipped, paddings=[[0, STRING_TO_INT_MAX_CONTENTS_LEN - slen]])
casted = tf.cast(padded, tf.int32)
casted.set_shape([STRING_TO_INT_MAX_CONTENTS_LEN])
return tf.concat([[slen], casted], 0)
def int_tensor_to_string(x):
"""Python operations to encode a tensor of ints into string of bytes."""
slen = x[0]
v = x[1:slen + 1]
return np.array(v, dtype=np.uint8).tostring()
def compression_type_of_files(files):
"""Return GZIP or None for the compression type of the files."""
return 'GZIP' if all(f.endswith('.gz') for f in files) else None
def tpu_available(sess=None):
"""Return true if a TPU device is available to the default session."""
if sess is None:
init_op = tf.group(tf.compat.v1.global_variables_initializer(),
tf.compat.v1.local_variables_initializer())
with tf.compat.v1.Session() as sess:
sess.run(init_op)
devices = sess.list_devices()
else:
devices = sess.list_devices()
return any(dev.device_type == 'TPU' for dev in devices)
def resolve_master(master, tpu_name, tpu_zone, gcp_project):
"""Resolve the master's URL given standard flags."""
if master is not None:
return master
elif tpu_name is not None:
return tf.distribute.cluster_resolver.TPUClusterResolver(
tpu=[tpu_name], zone=tpu_zone, project=gcp_project).get_master()
else:
# For k8s TPU we do not have/need tpu_name. See
# https://cloud.google.com/tpu/docs/kubernetes-engine-setup#tensorflow-code
return tf.distribute.cluster_resolver.TPUClusterResolver().get_master()
def get_example_info_json_filename(examples_filename: str,
task_id: Optional[int]) -> str:
"""Returns corresponding example_info.json filename for examples_filename."""
if sharded_file_utils.is_sharded_file_spec(examples_filename):
assert task_id is not None
# If examples_filename has the @shards representation, resolve it into
# the first shard. We only write .example_info.json to the first shard.
example_info_prefix = sharded_file_utils.sharded_filename(
examples_filename, task_id)
else:
# In all other cases, including non-sharded files,
# or sharded filenames with -ddddd-of-ddddd, just append.
example_info_prefix = examples_filename
return example_info_prefix + '.example_info.json'
| bsd-3-clause | 8116aa151278d53edf0fb0138dc4e9e9 | 37.539846 | 80 | 0.722319 | 3.82449 | false | false | false | false |
google/deepvariant | deepvariant/call_variants_keras.py | 1 | 15917 | # Copyright 2017 Google LLC.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from this
# software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Experimental code for calling variants with a trained DeepVariant TF2/Keras model.
Added in v1.4.0 but not officially supported.
TODO: Write a unit test suite like call_variants_test.py.
"""
import json
import os
from absl import flags
from absl import logging
import numpy as np
import tensorflow as tf
from deepvariant import logging_level
from deepvariant import modeling
from deepvariant import tf_utils
from deepvariant.protos import deepvariant_pb2
from third_party.nucleus.io import sharded_file_utils
from third_party.nucleus.io import tfrecord
from third_party.nucleus.protos import variants_pb2
from third_party.nucleus.util import errors
from third_party.nucleus.util import proto_utils
from third_party.nucleus.util import variant_utils
# This is necessary but not sure why, it might be because we are using the
# same dependies as the original call_variants.py
tf.compat.v2.enable_v2_behavior()
_ALLOW_EXECUTION_HARDWARE = [
'auto', # Default, no validation.
'cpu', # Don't use accelerators, even if available.
'accelerator', # Must be hardware acceleration or an error will be raised.
]
# The number of digits past the decimal point that genotype likelihoods are
# rounded to, for numerical stability.
_GL_PRECISION = 10
# This number is estimated by the following logic:
# For a sample with 10,000,000 examples, if we log every 50,000 examples,
# there will be 200 lines per sample.
_LOG_EVERY_N = 50000
_DEFAULT_INPUT_READ_THREADS = 32
_DEFAULT_PREFETCH_BUFFER_BYTES = 16 * 1000 * 1000
_CLASSES = 3
FLAGS = flags.FLAGS
flags.DEFINE_string(
'examples', None,
'Required. tf.Example protos containing DeepVariant candidate variants in '
'TFRecord format, as emitted by make_examples. Can be a comma-separated '
'list of files, and the file names can contain wildcard characters.')
flags.DEFINE_string(
'outfile', None,
'Required. Destination path where we will write output candidate variants '
'with additional likelihood information in TFRecord format of '
'CallVariantsOutput protos.')
flags.DEFINE_string(
'checkpoint', None,
'Required. Path to the TensorFlow model checkpoint to use to evaluate '
'candidate variant calls.')
flags.DEFINE_integer(
'batch_size', 512,
'Number of candidate variant tensors to batch together during inference. '
'Larger batches use more memory but are more computational efficient.')
flags.DEFINE_integer('max_batches', None,
'Max. batches to evaluate. Defaults to all.')
flags.DEFINE_integer('num_readers', 8,
'Number of parallel readers to create for examples.')
flags.DEFINE_string('model_name', 'inception_v3',
'The name of the model architecture of --checkpoint.')
flags.DEFINE_boolean('include_debug_info', False,
'If true, include extra debug info in the output.')
flags.DEFINE_boolean(
'debugging_true_label_mode', False,
'If true, read the true labels from examples and add to '
'output. Note that the program will crash if the input '
'examples do not have the label field. '
'When true, this will also fill everything when '
'--include_debug_info is set to true.')
flags.DEFINE_string(
'execution_hardware', 'auto',
'When in cpu mode, call_variants will not place any ops on the GPU, even '
'if one is available. In accelerator mode call_variants validates that at '
'least some hardware accelerator (GPU/TPU) was available for us. This '
'option is primarily for QA purposes to allow users to validate their '
'accelerator environment is correctly configured. In auto mode, the '
'default, op placement is entirely left up to TensorFlow. In tpu mode, '
'use and require TPU.')
flags.DEFINE_string(
'config_string', None,
'String representation of a tf.ConfigProto message, with comma-separated '
'key: value pairs, such as "allow_soft_placement: True". The value can '
'itself be another message, such as '
'"gpu_options: {per_process_gpu_memory_fraction: 0.5}".')
flags.DEFINE_string(
'kmp_blocktime', '0',
'Value to set the KMP_BLOCKTIME environment variable to for efficient MKL '
'inference. See https://www.tensorflow.org/performance/performance_guide '
'for more information. The default value is 0, which provides the best '
'performance in our tests. Set this flag to "" to not set the variable.')
class ExecutionHardwareError(Exception):
pass
def round_gls(gls, precision=None):
"""Returns genotype likelihoods rounded to the desired precision level.
Args:
gls: A list of floats. The input genotype likelihoods at any precision.
precision: Positive int. The number of places past the decimal point to
round to. If None, no rounding is performed.
Returns:
A list of floats rounded to the desired precision.
Raises:
ValueError: The input gls do not sum to nearly 1.
"""
if abs(sum(gls) - 1) > 1e-6:
raise ValueError(
'Invalid genotype likelihoods do not sum to one: sum({}) = {}'.format(
gls, sum(gls)))
if precision is None:
return gls
min_ix = 0
min_gl = gls[0]
for ix, gl in enumerate(gls):
if gl < min_gl:
min_gl = gl
min_ix = ix
rounded_gls = [round(gl, precision) for gl in gls]
rounded_gls[min_ix] = max(
0.0,
round(1 - sum(rounded_gls[:min_ix] + rounded_gls[min_ix + 1:]),
precision))
return rounded_gls
def write_variant_call(writer, prediction):
"""Write the variant call based on prediction.
Args:
writer: A object with a write() function that will be called for each
encoded_variant and genotype likelihoods.
prediction: A [3] tensor of floats. These are the predicted genotype
likelihoods (p00, p0x, pxx) for some alt allele x, in the same order as
encoded_variants.
Returns:
The return status from writer.
"""
encoded_variant = prediction['variant']
encoded_alt_allele_indices = prediction['alt_allele_indices']
rounded_gls = round_gls(prediction['probabilities'], precision=_GL_PRECISION)
# Write it out.
true_labels = prediction['label'] if FLAGS.debugging_true_label_mode else None
cvo = _create_cvo_proto(
encoded_variant,
rounded_gls,
encoded_alt_allele_indices,
true_labels,
logits=prediction.get('logits'),
prelogits=prediction.get('prelogits'))
return writer.write(cvo)
def write_call_variants(predictions: np.ndarray,
variant_alt_allele_dataset: tf.data.Dataset,
output_file: str):
"""Consume predictions one at a time and write them to out_dir."""
writer = tfrecord.Writer(output_file)
with writer:
prediction_idx = 0
for idx, batch in variant_alt_allele_dataset.enumerate():
if prediction_idx >= len(predictions):
break
curr_batch_variant, curr_batch_alt_allele_indices = batch[0], batch[1]
for i in range(len(curr_batch_variant)):
variant = curr_batch_variant[i]
alt_allele_indices = curr_batch_alt_allele_indices[i]
# Calculate the position of the corresponding prediction
prediction_idx = (idx * FLAGS.batch_size) + i
logging.info('batch idx: %d, position in batch: %d, prediction_idx: %d',
idx, i, prediction_idx)
if prediction_idx >= len(predictions):
logging.info(
'Prediction idx %d > num predictions %d. '
'Probably because #steps was capped.', prediction_idx,
len(predictions))
break
probabilities = predictions[prediction_idx]
pred = {
'probabilities': probabilities,
'variant': variant.numpy(),
'alt_allele_indices': alt_allele_indices.numpy()
}
logging.info(pred)
write_variant_call(writer, pred)
def _create_cvo_proto(encoded_variant,
gls,
encoded_alt_allele_indices,
true_labels=None,
logits=None,
prelogits=None):
"""Returns a CallVariantsOutput proto from the relevant input information."""
variant = variants_pb2.Variant.FromString(encoded_variant)
alt_allele_indices = (
deepvariant_pb2.CallVariantsOutput.AltAlleleIndices.FromString(
encoded_alt_allele_indices))
debug_info = None
if FLAGS.include_debug_info or FLAGS.debugging_true_label_mode:
if prelogits is not None:
assert prelogits.shape == (1, 1, 2048)
prelogits = prelogits[0][0]
debug_info = deepvariant_pb2.CallVariantsOutput.DebugInfo(
has_insertion=variant_utils.has_insertion(variant),
has_deletion=variant_utils.has_deletion(variant),
is_snp=variant_utils.is_snp(variant),
predicted_label=np.argmax(gls),
true_label=true_labels,
logits=logits,
prelogits=prelogits)
call_variants_output = deepvariant_pb2.CallVariantsOutput(
variant=variant,
alt_allele_indices=alt_allele_indices,
genotype_probabilities=gls,
debug_info=debug_info)
return call_variants_output
def get_shape_and_channels_from_json(example_info_json):
"""Returns the shape and channels list from the input json."""
if not tf.io.gfile.exists(example_info_json):
logging.warning(
'Starting from v1.4.0, we expect %s to '
'include information for shape and channels.', example_info_json)
return None, None
with tf.io.gfile.GFile(example_info_json) as f:
example_info = json.load(f)
example_shape = example_info['shape']
example_channels_enum = example_info['channels']
logging.info(
'From %s: '
'Shape of input examples: %s, '
'Channels of input examples: %s.', example_info_json, str(example_shape),
str(example_channels_enum))
return example_shape, example_channels_enum
def get_dataset(path, example_shape):
"""Parse TFRecords, do image preprocessing, and return the image dataset for inference and the variant/alt-allele dataset for writing the variant calls."""
proto_features = {
'image/encoded': tf.io.FixedLenFeature((), tf.string),
'variant/encoded': tf.io.FixedLenFeature((), tf.string),
'alt_allele_indices/encoded': tf.io.FixedLenFeature((), tf.string)
}
def _parse_example_variant_alt_allele(example):
"""Parses a serialized tf.Example."""
parsed_features = tf.io.parse_single_example(
serialized=example, features=proto_features)
variant = parsed_features['variant/encoded']
alt_allele_indices = parsed_features['alt_allele_indices/encoded']
return variant, alt_allele_indices
def _parse_example_image(example):
"""Parses a serialized tf.Example."""
parsed_features = tf.io.parse_single_example(
serialized=example, features=proto_features)
image = tf.io.decode_raw(parsed_features['image/encoded'], tf.uint8)
image = tf.reshape(image, example_shape)
image = tf.cast(image, tf.float32)
image = tf.keras.applications.inception_v3.preprocess_input(image)
return image
ds = tf.data.TFRecordDataset.list_files(
sharded_file_utils.normalize_to_sharded_file_pattern(path), shuffle=False)
def load_dataset(filename):
dataset = tf.data.TFRecordDataset(
filename,
buffer_size=_DEFAULT_PREFETCH_BUFFER_BYTES,
compression_type='GZIP')
return dataset
ds = ds.interleave(
load_dataset,
cycle_length=_DEFAULT_INPUT_READ_THREADS,
num_parallel_calls=tf.data.AUTOTUNE)
image_ds = ds.map(map_func=_parse_example_image)
variant_alt_allele_ds = ds.map(map_func=_parse_example_variant_alt_allele)
image_ds = image_ds.batch(batch_size=FLAGS.batch_size)
variant_alt_allele_ds = variant_alt_allele_ds.batch(
batch_size=FLAGS.batch_size)
return image_ds, variant_alt_allele_ds
def call_variants(examples_filename, checkpoint_path, model, output_file):
"""Main driver of call_variants."""
if FLAGS.kmp_blocktime:
os.environ['KMP_BLOCKTIME'] = FLAGS.kmp_blocktime
logging.vlog(3,
'Set KMP_BLOCKTIME to {}'.format(os.environ['KMP_BLOCKTIME']))
# Read a single TFExample to make sure we're not loading an older version.
first_example = tf_utils.get_one_example_from_examples_path(examples_filename)
if first_example is None:
logging.warning(
'Unable to read any records from %s. Output will contain '
'zero records.', examples_filename)
tfrecord.write_tfrecords([], output_file)
return
# TODO: Check example shape and format and throw a readable
# error if incorrect
example_info_json = tf_utils.get_example_info_json_filename(
examples_filename, 0)
example_shape = get_shape_and_channels_from_json(example_info_json)[0]
logging.info('Shape of input examples: %s', str(example_shape))
if checkpoint_path is not None:
input_tensor = tf.keras.Input(shape=example_shape)
model = tf.keras.applications.inception_v3.InceptionV3(
weights=None,
include_top=True,
input_tensor=input_tensor,
classes=_CLASSES,
classifier_activation='softmax')
model.load_weights(checkpoint_path)
examples_dataset, variant_alt_allele_dataset = get_dataset(
examples_filename, example_shape)
# TODO: Do we need the equivalent of session_predict_hooks?
predictions = model.predict(examples_dataset)
write_call_variants(predictions, variant_alt_allele_dataset, output_file)
def main(argv=()):
with errors.clean_commandline_error_exit():
if len(argv) > 1:
errors.log_and_raise(
'Command line parsing failure: call_variants does not accept '
'positional arguments but some are present on the command line: '
'"{}".'.format(str(argv)), errors.CommandLineError)
del argv # Unused.
proto_utils.uses_fast_cpp_protos_or_die()
logging_level.set_from_flag()
model = modeling.get_model(FLAGS.model_name)
call_variants(
examples_filename=FLAGS.examples,
checkpoint_path=FLAGS.checkpoint,
model=model,
output_file=FLAGS.outfile)
if __name__ == '__main__':
flags.mark_flags_as_required([
'examples',
'outfile',
'checkpoint',
])
# Parses flags and calls main(), doesn't seem to have
# an equivalent function outside of compat
tf.compat.v1.app.run()
| bsd-3-clause | 5ee9871f24f6e7ba786f892ebfcf6ddb | 37.44686 | 157 | 0.694289 | 3.787958 | false | false | false | false |
google/deepvariant | third_party/nucleus/util/sequence_utils.py | 1 | 4197 | # Copyright 2018 Google LLC.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from this
# software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Utility functions for manipulating DNA sequences."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
class Error(Exception):
"""Base error class."""
def _add_lowercase(d):
"""Returns a dictionary with the lowercase keys and values entered."""
retval = d.copy()
retval.update({k.lower(): v.lower() for k, v in d.items()})
return retval
STRICT_DNA_COMPLEMENT_UPPER = {'A': 'T', 'T': 'A', 'C': 'G', 'G': 'C'}
DNA_COMPLEMENT_UPPER = {'A': 'T', 'T': 'A', 'C': 'G', 'G': 'C', 'N': 'N'}
IUPAC_DNA_COMPLEMENT_UPPER = {
'A': 'T',
'T': 'A',
'C': 'G',
'G': 'C',
'R': 'Y', # R is A/G
'Y': 'R', # Y is C/T
'S': 'S', # S is C/G
'W': 'W', # W is A/T
'K': 'M', # K is G/T
'M': 'K', # M is A/C
'B': 'V', # B is C/G/T
'V': 'B', # V is A/C/G
'D': 'H', # D is A/G/T
'H': 'D', # H is A/C/T
'N': 'N', # N is any base
}
IUPAC_TO_CANONICAL_BASES_UPPER = {
'A': ['A'],
'T': ['T'],
'C': ['C'],
'G': ['G'],
'R': ['A', 'G'],
'Y': ['C', 'T'],
'S': ['C', 'G'],
'W': ['A', 'T'],
'K': ['G', 'T'],
'M': ['A', 'C'],
'B': ['C', 'G', 'T'],
'V': ['A', 'C', 'G'],
'D': ['A', 'G', 'T'],
'H': ['A', 'C', 'T'],
'N': ['A', 'C', 'G', 'T'],
}
STRICT_DNA_COMPLEMENT = _add_lowercase(STRICT_DNA_COMPLEMENT_UPPER)
DNA_COMPLEMENT = _add_lowercase(DNA_COMPLEMENT_UPPER)
IUPAC_DNA_COMPLEMENT = _add_lowercase(IUPAC_DNA_COMPLEMENT_UPPER)
STRICT_DNA_BASES_UPPER = frozenset(['A', 'C', 'G', 'T'])
STRICT_DNA_BASES = frozenset(['a', 'c', 'g', 't', 'A', 'C', 'G', 'T'])
DNA_BASES_UPPER = frozenset(['A', 'C', 'G', 'T', 'N'])
DNA_BASES = frozenset(['a', 'c', 'g', 't', 'n', 'A', 'C', 'G', 'T', 'N'])
def reverse_complement(sequence, complement_dict=None):
"""Returns the reverse complement of a DNA sequence.
By default this will successfully reverse complement sequences comprised
solely of A, C, G, and T letters. Other complement dictionaries can be
passed in for more permissive matching.
Args:
sequence: str. The input sequence to reverse complement.
complement_dict: dict[str, str]. The lookup dictionary holding the
complement base pairs.
Returns:
The reverse complement DNA sequence.
Raises:
Error: The sequence contains letters not present in complement_dict.
"""
if complement_dict is None:
complement_dict = STRICT_DNA_COMPLEMENT_UPPER
try:
return ''.join(complement_dict[nt] for nt in reversed(sequence))
except KeyError:
raise Error('Unknown base in {}, cannot reverse complement using {}'.format(
sequence, str(complement_dict)))
| bsd-3-clause | a1094809beccdd7b4472235faedf363c | 33.68595 | 80 | 0.635692 | 3.235929 | false | false | false | false |
nipy/nireg | nireg/externals/transforms3d/taitbryan.py | 5 | 12510 | ''' Module implementing Euler angle rotations and their conversions
See:
* http://en.wikipedia.org/wiki/Rotation_matrix
* http://en.wikipedia.org/wiki/Euler_angles
* http://mathworld.wolfram.com/EulerAngles.html
See also: *Representing Attitude with Euler Angles and Quaternions: A
Reference* (2006) by James Diebel. A cached PDF link last found here:
http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.110.5134
Euler's rotation theorem tells us that any rotation in 3D can be
described by 3 angles. Let's call the 3 angles the *Euler angle vector*
and call the angles in the vector :math:`alpha`, :math:`beta` and
:math:`gamma`. The vector is [ :math:`alpha`,
:math:`beta`. :math:`gamma` ] and, in this description, the order of the
parameters specifies the order in which the rotations occur (so the
rotation corresponding to :math:`alpha` is applied first).
In order to specify the meaning of an *Euler angle vector* we need to
specify the axes around which each of the rotations corresponding to
:math:`alpha`, :math:`beta` and :math:`gamma` will occur.
There are therefore three axes for the rotations :math:`alpha`,
:math:`beta` and :math:`gamma`; let's call them :math:`i` :math:`j`,
:math:`k`.
Let us express the rotation :math:`alpha` around axis `i` as a 3 by 3
rotation matrix `A`. Similarly :math:`beta` around `j` becomes 3 x 3
matrix `B` and :math:`gamma` around `k` becomes matrix `G`. Then the
whole rotation expressed by the Euler angle vector [ :math:`alpha`,
:math:`beta`. :math:`gamma` ], `R` is given by::
R = np.dot(G, np.dot(B, A))
See http://mathworld.wolfram.com/EulerAngles.html
The order :math:`G B A` expresses the fact that the rotations are
performed in the order of the vector (:math:`alpha` around axis `i` =
`A` first).
To convert a given Euler angle vector to a meaningful rotation, and a
rotation matrix, we need to define:
* the axes `i`, `j`, `k`
* whether a rotation matrix should be applied on the left of a vector to
be transformed (vectors are column vectors) or on the right (vectors
are row vectors).
* whether the rotations move the axes as they are applied (intrinsic
rotations) - compared the situation where the axes stay fixed and the
vectors move within the axis frame (extrinsic)
* the handedness of the coordinate system
See: http://en.wikipedia.org/wiki/Rotation_matrix#Ambiguities
We are using the following conventions:
* axes `i`, `j`, `k` are the `z`, `y`, and `x` axes respectively. Thus
an Euler angle vector [ :math:`alpha`, :math:`beta`. :math:`gamma` ]
in our convention implies a :math:`alpha` radian rotation around the
`z` axis, followed by a :math:`beta` rotation around the `y` axis,
followed by a :math:`gamma` rotation around the `x` axis.
* the rotation matrix applies on the left, to column vectors on the
right, so if `R` is the rotation matrix, and `v` is a 3 x N matrix
with N column vectors, the transformed vector set `vdash` is given by
``vdash = np.dot(R, v)``.
* extrinsic rotations - the axes are fixed, and do not move with the
rotations.
* a right-handed coordinate system
The convention of rotation around ``z``, followed by rotation around
``y``, followed by rotation around ``x``, is known (confusingly) as
"xyz", pitch-roll-yaw, Cardan angles, or Tait-Bryan angles.
'''
from __future__ import absolute_import
import math
from functools import reduce
import numpy as np
_FLOAT_EPS_4 = np.finfo(float).eps * 4.0
def euler2mat(z=0, y=0, x=0):
''' Return matrix for rotations around z, y and x axes
Uses the z, then y, then x convention above
Parameters
----------
z : scalar
Rotation angle in radians around z-axis (performed first)
y : scalar
Rotation angle in radians around y-axis
x : scalar
Rotation angle in radians around x-axis (performed last)
Returns
-------
M : array shape (3,3)
Rotation matrix giving same rotation as for given angles
Examples
--------
>>> zrot = 1.3 # radians
>>> yrot = -0.1
>>> xrot = 0.2
>>> M = euler2mat(zrot, yrot, xrot)
>>> M.shape
(3, 3)
The output rotation matrix is equal to the composition of the
individual rotations
>>> M1 = euler2mat(zrot)
>>> M2 = euler2mat(0, yrot)
>>> M3 = euler2mat(0, 0, xrot)
>>> composed_M = np.dot(M3, np.dot(M2, M1))
>>> np.allclose(M, composed_M)
True
You can specify rotations by named arguments
>>> np.all(M3 == euler2mat(x=xrot))
True
When applying M to a vector, the vector should column vector to the
right of M. If the right hand side is a 2D array rather than a
vector, then each column of the 2D array represents a vector.
>>> vec = np.array([1, 0, 0]).reshape((3,1))
>>> v2 = np.dot(M, vec)
>>> vecs = np.array([[1, 0, 0],[0, 1, 0]]).T # giving 3x2 array
>>> vecs2 = np.dot(M, vecs)
Rotations are counter-clockwise.
>>> zred = np.dot(euler2mat(z=np.pi/2), np.eye(3))
>>> np.allclose(zred, [[0, -1, 0],[1, 0, 0], [0, 0, 1]])
True
>>> yred = np.dot(euler2mat(y=np.pi/2), np.eye(3))
>>> np.allclose(yred, [[0, 0, 1],[0, 1, 0], [-1, 0, 0]])
True
>>> xred = np.dot(euler2mat(x=np.pi/2), np.eye(3))
>>> np.allclose(xred, [[1, 0, 0],[0, 0, -1], [0, 1, 0]])
True
Notes
-----
The direction of rotation is given by the right-hand rule (orient
the thumb of the right hand along the axis around which the rotation
occurs, with the end of the thumb at the positive end of the axis;
curl your fingers; the direction your fingers curl is the direction
of rotation). Therefore, the rotations are counterclockwise if
looking along the axis of rotation from positive to negative.
'''
Ms = []
if z:
cosz = math.cos(z)
sinz = math.sin(z)
Ms.append(np.array(
[[cosz, -sinz, 0],
[sinz, cosz, 0],
[0, 0, 1]]))
if y:
cosy = math.cos(y)
siny = math.sin(y)
Ms.append(np.array(
[[cosy, 0, siny],
[0, 1, 0],
[-siny, 0, cosy]]))
if x:
cosx = math.cos(x)
sinx = math.sin(x)
Ms.append(np.array(
[[1, 0, 0],
[0, cosx, -sinx],
[0, sinx, cosx]]))
if Ms:
return reduce(np.dot, Ms[::-1])
return np.eye(3)
def mat2euler(M, cy_thresh=None):
''' Discover Euler angle vector from 3x3 matrix
Uses the conventions above.
Parameters
----------
M : array-like, shape (3,3)
cy_thresh : None or scalar, optional
threshold below which to give up on straightforward arctan for
estimating x rotation. If None (default), estimate from
precision of input.
Returns
-------
z : scalar
y : scalar
x : scalar
Rotations in radians around z, y, x axes, respectively
Notes
-----
If there was no numerical error, the routine could be derived using
Sympy expression for z then y then x rotation matrix, (see
``eulerangles.py`` in ``derivations`` subdirectory)::
[ cos(y)*cos(z), -cos(y)*sin(z), sin(y)],
[cos(x)*sin(z) + cos(z)*sin(x)*sin(y), cos(x)*cos(z) - sin(x)*sin(y)*sin(z), -cos(y)*sin(x)],
[sin(x)*sin(z) - cos(x)*cos(z)*sin(y), cos(z)*sin(x) + cos(x)*sin(y)*sin(z), cos(x)*cos(y)]
with the obvious derivations for z, y, and x
z = atan2(-r12, r11)
y = asin(r13)
x = atan2(-r23, r33)
Problems arise when cos(y) is close to zero, because both of::
z = atan2(cos(y)*sin(z), cos(y)*cos(z))
x = atan2(cos(y)*sin(x), cos(x)*cos(y))
will be close to atan2(0, 0), and highly unstable.
The ``cy`` fix for numerical instability below is from: *Graphics
Gems IV*, Paul Heckbert (editor), Academic Press, 1994, ISBN:
0123361559. Specifically it comes from EulerAngles.c by Ken
Shoemake, and deals with the case where cos(y) is close to zero:
See: http://www.graphicsgems.org/
The code appears to be licensed (from the website) as "can be used
without restrictions".
'''
M = np.asarray(M)
if cy_thresh is None:
try:
cy_thresh = np.finfo(M.dtype).eps * 4
except ValueError:
cy_thresh = _FLOAT_EPS_4
r11, r12, r13, r21, r22, r23, r31, r32, r33 = M.flat
# cy: sqrt((cos(y)*cos(z))**2 + (cos(x)*cos(y))**2)
cy = math.sqrt(r33*r33 + r23*r23)
if cy > cy_thresh: # cos(y) not close to zero, standard form
z = math.atan2(-r12, r11) # atan2(cos(y)*sin(z), cos(y)*cos(z))
y = math.atan2(r13, cy) # atan2(sin(y), cy)
x = math.atan2(-r23, r33) # atan2(cos(y)*sin(x), cos(x)*cos(y))
else: # cos(y) (close to) zero, so x -> 0.0 (see above)
# so r21 -> sin(z), r22 -> cos(z) and
z = math.atan2(r21, r22)
y = math.atan2(r13, cy) # atan2(sin(y), cy)
x = 0.0
return z, y, x
def euler2quat(z=0, y=0, x=0):
''' Return quaternion corresponding to these Euler angles
Uses the z, then y, then x convention above
Parameters
----------
z : scalar
Rotation angle in radians around z-axis (performed first)
y : scalar
Rotation angle in radians around y-axis
x : scalar
Rotation angle in radians around x-axis (performed last)
Returns
-------
quat : array shape (4,)
Quaternion in w, x, y z (real, then vector) format
Notes
-----
Formula from Sympy - see ``eulerangles.py`` in ``derivations``
subdirectory
'''
z = z/2.0
y = y/2.0
x = x/2.0
cz = math.cos(z)
sz = math.sin(z)
cy = math.cos(y)
sy = math.sin(y)
cx = math.cos(x)
sx = math.sin(x)
return np.array([
cx*cy*cz - sx*sy*sz,
cx*sy*sz + cy*cz*sx,
cx*cz*sy - sx*cy*sz,
cx*cy*sz + sx*cz*sy])
def quat2euler(q):
''' Return Euler angles corresponding to quaternion `q`
Parameters
----------
q : 4 element sequence
w, x, y, z of quaternion
Returns
-------
z : scalar
Rotation angle in radians around z-axis (performed first)
y : scalar
Rotation angle in radians around y-axis
x : scalar
Rotation angle in radians around x-axis (performed last)
Notes
-----
It's possible to reduce the amount of calculation a little, by
combining parts of the ``quat2mat`` and ``mat2euler`` functions, but
the reduction in computation is small, and the code repetition is
large.
'''
# delayed import to avoid cyclic dependencies
from . import quaternions as nq
return mat2euler(nq.quat2mat(q))
def euler2axangle(z=0, y=0, x=0):
''' Return axis, angle corresponding to these Euler angles
Uses the z, then y, then x convention above
Parameters
----------
z : scalar
Rotation angle in radians around z-axis (performed first)
y : scalar
Rotation angle in radians around y-axis
x : scalar
Rotation angle in radians around x-axis (performed last)
Returns
-------
vector : array shape (3,)
axis around which rotation occurs
theta : scalar
angle of rotation
Examples
--------
>>> vec, theta = euler2axangle(0, 1.5, 0)
>>> np.allclose(vec, [0, 1, 0])
True
>>> print(theta)
1.5
'''
# delayed import to avoid cyclic dependencies
from . import quaternions as nq
return nq.quat2axangle(euler2quat(z, y, x))
def axangle2euler(vector, theta):
''' Convert axis, angle pair to Euler angles
Parameters
----------
vector : 3 element sequence
vector specifying axis for rotation.
theta : scalar
angle of rotation
Returns
-------
z : scalar
y : scalar
x : scalar
Rotations in radians around z, y, x axes, respectively
Examples
--------
>>> z, y, x = axangle2euler([1, 0, 0], 0)
>>> np.allclose((z, y, x), 0)
True
Notes
-----
It's possible to reduce the amount of calculation a little, by
combining parts of the ``angle_axis2mat`` and ``mat2euler``
functions, but the reduction in computation is small, and the code
repetition is large.
'''
# delayed import to avoid cyclic dependencies
from . import quaternions as nq
M = nq.axangle2rmat(vector, theta)
return mat2euler(M)
| bsd-3-clause | ea6ae5973f1e566e079726493a5a849a | 30.432161 | 99 | 0.610232 | 3.321827 | false | false | false | false |
mozilla/inventory | core/keyvalue/base_option.py | 2 | 6961 | from django.db import models
from django.core.exceptions import ValidationError
from mozdns.validation import validate_name
from core.keyvalue.models import KeyValue
import ipaddr
class DHCPKeyValue(KeyValue):
is_option = models.BooleanField(default=False)
is_statement = models.BooleanField(default=False)
has_validator = models.BooleanField(default=False)
class Meta:
abstract = True
def _get_value(self):
value = self.value.strip('\'" ')
value = value.strip(';')
value = value.strip()
return value
class CommonOption(object):
class Meta:
abstract = True
def _aa_deny(self):
"""
See allow.
"""
choices = ["unknown-clients", "bootp", "booting", "duplicates",
"declines", "client-updates", "dynamic bootp clients"]
self.is_statement = True
self.is_option = False
self.has_validator = True
value = self._get_value()
values = value.split(',')
for value in values:
if value in choices:
continue
else:
raise ValidationError("Invalid option ({0}) parameter "
"({1})'".format(self.key, self.value))
def _aa_allow(self):
"""
The following usages of allow and deny will work in any scope, although
it is not recommended that they be used in pool declarations.
allow unknown-clients;
deny unknown-clients;
ignore unknown-clients;
allow bootp;
deny bootp;
ignore bootp;
allow booting;
deny booting;
ignore booting;
allow duplicates;
deny duplicates;
allow declines;
deny declines;
ignore declines;
allow client-updates;
deny client-updates;
allow dynamic bootp clients;
deny dynamic bootp clients;
"""
choices = ["unknown-clients", "bootp", "booting", "duplicates",
"declines", "client-updates", "dynamic bootp clients"]
self.is_statement = True
self.is_option = False
self.has_validator = True
value = self._get_value()
values = value.split(',')
for value in values:
if value.strip() in choices:
continue
else:
raise ValidationError(
"Invalid parameter '{0}' for the option "
"'{1}'".format(self.value, self.key))
def _routers(self, ip_type):
"""
option routers ip-address [, ip-address... ];
The routers option specifies a list of IP addresses for routers on
the client's subnet. Routers should be listed in order of
preference.
"""
self.is_option = True
self.is_statement = False
self.has_validator = True
self._ip_list(ip_type)
def _ntp_servers(self, ip_type):
"""
option ntp-servers ip-address [, ip-address... ];
This option specifies a list of IP addresses indicating NTP (RFC
1035) servers available to the client. Servers should be listed in
order of preference.
"""
self.is_option = True
self.is_statement = False
self.has_validator = True
self._ip_list(ip_type)
def _aa_domain_name_servers(self):
"""
option domain-name-servers ip-address [, ip-address... ];
The domain-name-servers option specifies a list of Domain Name
System (STD 13, RFC 1035) name servers available to the client.
Servers should be listed in order of preference.
"""
self.is_option = True
self.is_statement = False
self.has_validator = True
self._ip_list(self.obj.ip_type)
def _aa_domain_name(self):
"""
option domain-name text;
The 'text' should be a space seperated domain names. I.E.:
phx.mozilla.com phx1.mozilla.com This option specifies the domain
name that client should use when resolving hostnames via the Domain
Name System.
"""
self.is_option = True
self.is_statement = False
self.has_validator = True
if (len(self.value) < 2 or not (
self.value.startswith('"') and self.value.endswith('"'))):
raise ValidationError(
"Make sure the domain(s) name have \" \" around them"
)
for name in self.value.strip('"').split(' '):
validate_name(name)
def _aa_domain_search(self):
"""
The domain-search option specifies a 'search list' of Domain Names to
be used by the client to locate not-fully-qualified domain names. The
difference between this option and historic use of the domain-name
option for the same ends is that this option is encoded in RFC1035
compressed labels on the wire. For example:
option domain-search "example.com", "sales.example.com";
"""
self.is_option = True
self.is_statement = False
self.has_validator = True
value = self.value.strip(';')
value = value.strip(' ')
for name in value.split(','):
# Bug here. Ex: "asf, "'asdf"'
name = name.strip(' ')
if not name:
raise ValidationError("Each name needs to be a non empty "
"domain name surrounded by \"\"")
if name[0] != '"' and name[len(name) - 1] != '"':
raise ValidationError("Each name needs to be a non empty "
"domain name surrounded by \"\"")
validate_name(name.strip('"'))
def _ip_list(self, ip_type):
"""
Use this if the value is supposed to be a list of ip addresses.
"""
self.ip_option = True
self.has_validator = True
ips = self._get_value()
ips = ips.split(',')
for router in ips:
router = router.strip()
try:
if ip_type == '4':
ipaddr.IPv4Address(router)
else:
raise NotImplemented()
except ipaddr.AddressValueError:
raise ValidationError("Invalid option ({0}) parameter "
"({1})'".format(self.key, router))
def _single_ip(self, ip_type):
ip = self._get_value()
try:
if ip_type == '4':
ipaddr.IPv4Address(ip)
else:
raise NotImplemented()
except ipaddr.AddressValueError:
raise ValidationError("Invalid option ({0}) parameter "
"({1})'".format(self.key, ip))
| bsd-3-clause | 5fa37b04d4095234f03eac846257df89 | 32.791262 | 79 | 0.540152 | 4.570584 | false | false | false | false |
mozilla/inventory | vendor-local/src/django-tastypie/tests/gis/tests/views.py | 11 | 5062 | from django.http import HttpRequest
from django.test import TestCase
from django.utils import simplejson as json
class ViewsTestCase(TestCase):
def test_gets(self):
resp = self.client.get('/api/v1/', data={'format': 'json'})
self.assertEqual(resp.status_code, 200)
deserialized = json.loads(resp.content)
self.assertEqual(len(deserialized), 2)
self.assertEqual(deserialized['geonotes'], {'list_endpoint': '/api/v1/geonotes/', 'schema': '/api/v1/geonotes/schema/'})
resp = self.client.get('/api/v1/geonotes/', data={'format': 'json'})
self.assertEqual(resp.status_code, 200)
deserialized = json.loads(resp.content)
self.assertEqual(len(deserialized), 2)
self.assertEqual(deserialized['meta']['limit'], 20)
self.assertEqual(len(deserialized['objects']), 3)
self.assertEqual([obj['title'] for obj in deserialized['objects']], [u'Points inside Golden Gate Park note', u'Golden Gate Park', u'Line inside Golden Gate Park'])
resp = self.client.get('/api/v1/geonotes/1/', data={'format': 'json'})
self.assertEqual(resp.status_code, 200)
deserialized = json.loads(resp.content)
self.assertEqual(len(deserialized), 12)
self.assertEqual(deserialized['title'], u'Points inside Golden Gate Park note')
resp = self.client.get('/api/v1/geonotes/set/2;1/', data={'format': 'json'})
self.assertEqual(resp.status_code, 200)
deserialized = json.loads(resp.content)
self.assertEqual(len(deserialized), 1)
self.assertEqual(len(deserialized['objects']), 2)
self.assertEqual([obj['title'] for obj in deserialized['objects']], [u'Golden Gate Park', u'Points inside Golden Gate Park note'])
def test_posts(self):
request = HttpRequest()
post_data = '{"content": "A new post.", "is_active": true, "title": "New Title", "slug": "new-title", "user": "/api/v1/users/1/"}'
request._raw_post_data = post_data
resp = self.client.post('/api/v1/geonotes/', data=post_data, content_type='application/json')
self.assertEqual(resp.status_code, 201)
self.assertEqual(resp['location'], 'http://testserver/api/v1/geonotes/4/')
# make sure posted object exists
resp = self.client.get('/api/v1/geonotes/4/', data={'format': 'json'})
self.assertEqual(resp.status_code, 200)
obj = json.loads(resp.content)
self.assertEqual(obj['content'], 'A new post.')
self.assertEqual(obj['is_active'], True)
self.assertEqual(obj['user'], '/api/v1/users/1/')
def test_puts(self):
request = HttpRequest()
post_data = '{"content": "Another new post.", "is_active": true, "title": "Another New Title", "slug": "new-title", "user": "/api/v1/users/1/", "lines": null, "points": null, "polys": null}'
request._raw_post_data = post_data
resp = self.client.put('/api/v1/geonotes/1/', data=post_data, content_type='application/json')
self.assertEqual(resp.status_code, 204)
# make sure posted object exists
resp = self.client.get('/api/v1/geonotes/1/', data={'format': 'json'})
self.assertEqual(resp.status_code, 200)
obj = json.loads(resp.content)
self.assertEqual(obj['content'], 'Another new post.')
self.assertEqual(obj['is_active'], True)
self.assertEqual(obj['user'], '/api/v1/users/1/')
def test_api_field_error(self):
# When a field error is encountered, we should be presenting the message
# back to the user.
request = HttpRequest()
post_data = '{"content": "More internet memes.", "is_active": true, "title": "IT\'S OVER 9000!", "slug": "its-over", "user": "/api/v1/users/9001/"}'
request._raw_post_data = post_data
resp = self.client.post('/api/v1/geonotes/', data=post_data, content_type='application/json')
self.assertEqual(resp.status_code, 400)
self.assertEqual(resp.content, "Could not find the provided object via resource URI '/api/v1/users/9001/'.")
def test_options(self):
resp = self.client.options('/api/v1/geonotes/')
self.assertEqual(resp.status_code, 200)
allows = 'GET,POST,PUT,DELETE,PATCH'
self.assertEqual(resp['Allow'], allows)
self.assertEqual(resp.content, allows)
resp = self.client.options('/api/v1/geonotes/1/')
self.assertEqual(resp.status_code, 200)
allows = 'GET,POST,PUT,DELETE,PATCH'
self.assertEqual(resp['Allow'], allows)
self.assertEqual(resp.content, allows)
resp = self.client.options('/api/v1/geonotes/schema/')
self.assertEqual(resp.status_code, 200)
allows = 'GET'
self.assertEqual(resp['Allow'], allows)
self.assertEqual(resp.content, allows)
resp = self.client.options('/api/v1/geonotes/set/2;1/')
self.assertEqual(resp.status_code, 200)
allows = 'GET'
self.assertEqual(resp['Allow'], allows)
self.assertEqual(resp.content, allows)
| bsd-3-clause | b17b35cec94b44c51e3e835c272c3660 | 48.627451 | 198 | 0.634927 | 3.547302 | false | true | false | false |
mozilla/inventory | mozdns/ptr/models.py | 2 | 5435 | from django.db import models
from django.core.exceptions import ValidationError
from mozdns.domain.models import name_to_domain
from mozdns.ip.utils import ip_to_domain_name, nibbilize
from mozdns.domain.models import Domain
from mozdns.ip.models import Ip
from mozdns.cname.models import CNAME
from mozdns.ip.utils import ip_to_dns_form
from mozdns.validation import validate_name, validate_ttl
from mozdns.mixins import ObjectUrlMixin, DisplayMixin
from mozdns.models import ViewMixin, TTLRRMixin
import reversion
from gettext import gettext as _
class BasePTR(object):
def clean_reverse(self, update_reverse_domain=True):
# This indirection is so StaticReg can call this function
if update_reverse_domain:
self.update_reverse_domain()
self.check_for_illegal_rr_ttl(field_name='ip_str')
self.check_no_ns_soa_condition(self.reverse_domain)
self.reverse_validate_no_cname()
def reverse_validate_no_cname(self):
"""
Considering existing CNAMES must be done when editing and
creating new :class:`PTR` objects.
"PTR records must point back to a valid A record, not a
alias defined by a CNAME."
-- `RFC 1912 <http://tools.ietf.org/html/rfc1912>`__
An example of something that is not allowed::
FOO.BAR.COM CNAME BEE.BAR.COM
BEE.BAR.COM A 128.193.1.1
1.1.193.128 PTR FOO.BAR.COM
^-- PTR's shouldn't point to CNAMES
"""
# There are preexisting records that break this rule. We can't support
# this requirement until those records are fixed
return
if CNAME.objects.filter(fqdn=self.name).exists():
raise ValidationError(
"PTR records must point back to a valid A record, not a "
"alias defined by a CNAME. -- RFC 1034"
)
def update_reverse_domain(self):
# We are assuming that self.clean_ip has been called already
rvname = nibbilize(self.ip_str) if self.ip_type == '6' else self.ip_str
rvname = ip_to_domain_name(rvname, ip_type=self.ip_type)
self.reverse_domain = name_to_domain(rvname)
if (self.reverse_domain is None or self.reverse_domain.name in
('arpa', 'in-addr.arpa', 'ip6.arpa')):
raise ValidationError(
"No reverse Domain found for {0} ".format(self.ip_str)
)
def rebuild_reverse(self):
if self.reverse_domain and self.reverse_domain.soa:
self.reverse_domain.soa.schedule_rebuild()
def dns_name(self):
"""
Return the cononical name of this ptr that can be placed in a
reverse zone file.
"""
return ip_to_dns_form(self.ip_str)
class PTR(BasePTR, Ip, ViewMixin, ObjectUrlMixin, DisplayMixin, TTLRRMixin):
"""
A PTR is used to map an IP to a domain name.
>>> PTR(ip_str=ip_str, name=fqdn, ip_type=ip_type)
"""
id = models.AutoField(primary_key=True)
reverse_domain = models.ForeignKey(Domain, null=False, blank=True)
name = models.CharField(
max_length=255, validators=[validate_name], help_text="The name that "
"this record points to."
)
ttl = models.PositiveIntegerField(
default=3600, blank=True, null=True, validators=[validate_ttl]
)
description = models.CharField(max_length=1000, null=True, blank=True)
template = _("{bind_name:$lhs_just} {ttl_} {rdclass:$rdclass_just} "
"{rdtype:$rdtype_just} {name:1}.")
search_fields = ('ip_str', 'name')
class Meta:
db_table = 'ptr'
unique_together = ('ip_str', 'ip_type', 'name')
def __str__(self):
return "{0} {1} {2}".format(str(self.ip_str), 'PTR', self.name)
def __repr__(self):
return "<{0}>".format(str(self))
@classmethod
def get_api_fields(cls):
return ['ip_str', 'ip_type', 'name', 'ttl', 'description']
@property
def rdtype(self):
return 'PTR'
def save(self, *args, **kwargs):
urd = kwargs.pop('update_reverse_domain', True)
self.clean(update_reverse_domain=urd)
super(PTR, self).save(*args, **kwargs)
self.rebuild_reverse()
def delete(self, *args, **kwargs):
if self.reverse_domain.soa:
self.reverse_domain.soa.schedule_rebuild()
super(PTR, self).delete(*args, **kwargs)
def clean(self, update_reverse_domain=True):
self.clean_ip()
# We need to check if there is a registration using our ip and name
# because that registration will generate a ptr record.
from core.registration.static.models import StaticReg
if (StaticReg.objects.filter(
fqdn=self.name, ip_upper=self.ip_upper,
ip_lower=self.ip_lower).exists()):
raise ValidationError(
"An registration has already used this IP and Name."
)
self.clean_reverse(update_reverse_domain=update_reverse_domain)
def details(self):
return (
('Ip', str(self.ip_str)),
('Record Type', 'PTR'),
('Name', self.name),
)
def bind_render_record(self, pk=False, **kwargs):
self.fqdn = self.dns_name().strip('.')
return super(PTR, self).bind_render_record(pk=pk, **kwargs)
reversion.register(PTR)
| bsd-3-clause | f5820f2556245239f06e2d8b551c5e6b | 34.064516 | 79 | 0.616007 | 3.67478 | false | false | false | false |
mozilla/inventory | vendor-local/src/django-tastypie/tests/complex/api/resources.py | 30 | 1179 | from django.contrib.auth.models import User, Group
from django.contrib.comments.models import Comment
from tastypie.fields import CharField, ForeignKey, ManyToManyField, OneToOneField, OneToManyField
from tastypie.resources import ModelResource
from complex.models import Post, Profile
class ProfileResource(ModelResource):
class Meta:
queryset = Profile.objects.all()
resource_name = 'profiles'
class CommentResource(ModelResource):
class Meta:
queryset = Comment.objects.all()
resource_name = 'comments'
class GroupResource(ModelResource):
class Meta:
queryset = Group.objects.all()
resource_name = 'groups'
class UserResource(ModelResource):
groups = ManyToManyField(GroupResource, 'groups', full=True)
profile = OneToOneField(ProfileResource, 'profile', full=True)
class Meta:
queryset = User.objects.all()
resource_name = 'users'
class PostResource(ModelResource):
user = ForeignKey(UserResource, 'user')
comments = OneToManyField(CommentResource, 'comments', full=False)
class Meta:
queryset = Post.objects.all()
resource_name = 'posts'
| bsd-3-clause | 52e8ae8018287986bc51b74747bb5872 | 27.756098 | 97 | 0.710772 | 4.366667 | false | false | false | false |
mozilla/inventory | migrate_dns/build_nics.py | 2 | 8517 | import re
from systems.models import System
from migrate_dns.utils import *
import pprint
pp = pprint.PrettyPrinter(indent=2)
nic_nums = re.compile("^nic\.(\d+)\..*\.(\d+)$")
is_mac_key = re.compile("^nic\.\d+\.mac_address\.\d+$")
is_hostname_key = re.compile("^nic\.\d+\.hostname\.\d+$")
is_ip_key = re.compile("^nic\.\d+\.ipv4_address\.\d+$")
is_dns_auto_build_key = re.compile("^nic\.\d+\.dns_auto_build\.\d+$")
is_dns_auto_hostname_key = re.compile("^nic\.\d+\.dns_auto_hostname\.\d+$")
is_dns_has_conflict_key = re.compile("^nic\.\d+\.dns_has_conflict\.\d+$")
is_some_key = re.compile("^nic\.\d+\.(.*)\.\d+$")
def build_nic(sub_nic):
intr = Interface(sub_nic[0].system, sub_nic)
for nic_data in sub_nic:
if is_mac_key.match(nic_data.key):
if intr.mac is not None:
log("!" * 20, WARNING)
log("nic with more than one mac in system "
"{0} (https://inventory.mozilla.org/en-US/systems/edit/{1}/)"
.format(intr.system, intr.system.pk), WARNING)
log(pp.pformat(sub_nic), WARNING)
intr.mac = nic_data.value
intr.keys.append('mac')
continue
if is_hostname_key.match(nic_data.key):
if intr.hostname is not None:
log("!" * 20, WARNING)
log("nic with more than one hostname in system "
"{0} (https://inventory.mozilla.org/en-US/systems/edit/{1}/)"
.format(intr.system, intr.system.pk), WARNING)
log(pp.pformat(sub_nic), WARNING)
intr.hostname = nic_data.value
intr.keys.append('hostname')
continue
if is_ip_key.match(nic_data.key):
intr.ips.append(nic_data.value)
continue
if is_dns_auto_build_key.match(nic_data.key):
if nic_data.value == 'False':
intr.dns_auto_build = False
continue
if is_dns_auto_hostname_key.match(nic_data.key):
if nic_data.value == 'False':
intr.dns_auto_hostname = False
continue
if is_dns_has_conflict_key.match(nic_data.key):
if nic_data.value == 'True':
intr.dns_has_conflict = True
else:
intr.dns_has_conflict = False
continue
tmp = is_some_key.match(nic_data.key)
if tmp:
if hasattr(intr, tmp.group(1)):
setattr(intr, tmp.group(1), [nic_data.value,
getattr(intr, tmp.group(1))])
intr.keys.append(tmp.group(s))
else:
setattr(intr, tmp.group(1), nic_data.value)
if intr.hostname is None:
log("System {0} and nic {1}/{2} hast no hostname key, using hostname "
"found on the system.".format(print_system(intr.system), intr.primary,
intr.alias), ERROR)
intr.hostname = intr.system.hostname
return intr
def get_nic_objs():
"""
Use this function to return all data that *could* be included in a DNS
build.
:return: list of tuples. See :function:`get_nick_data`
"""
systems = System.objects.all()
formated_nics = []
for system in systems:
raw_nics = system.keyvalue_set.all()
if not raw_nics:
continue
formated_nics.append(transform_nics(raw_nics))
interfaces = []
for system_nics in formated_nics:
for primary_nic_number, primary_nic in system_nics.items():
for sub_nic_number, sub_nic in primary_nic['sub_nics'].items():
interface = build_nic(sub_nic)
if not interface:
continue
interfaces.append(interface)
return interfaces
def build_nics_from_system(system):
"""
Pass a :class:`System` instance to this function and it will return a list
of :class:`Interface` objects. Use the interface objects as a proxy for the
KV store. I.E:
>>> system = <System: try-mac-slave07>
>>> nics = build_nics(
>>> nics
[<Interface: nic.0.0 IP: [u'10.2.90.239'] Hostname: try-mac-slave07>]
>>> nics[0].alias
u'0'
>>> nics[0].primary
u'0'
>>> nics[0].mac
u'00:16:cb:a7:36:4a'
>>> nics[0].ips
[u'10.2.90.239']
>>> nics[0].name
u'nic0'
>>> nics[0].hostname
u'try-mac-slave07'
"""
raw_nics = system.keyvalue_set.all()
formated_nics = transform_nics(raw_nics)
interfaces = []
for primary_nic_number, primary_nic in formated_nics.items():
for sub_nic_number, sub_nic in primary_nic['sub_nics'].items():
interface = build_nic(sub_nic)
if not interface:
continue
interfaces.append(interface)
return interfaces
def get_dns_data():
dns_data, nic_objs = _get_dns_data()
return dns_data
get_nic_primary_number = re.compile("^nic\.(\d+).*$")
def transform_nics(nics):
"""
Since KV systems have no structure, storing structured data in a KV makes
extracting data very hard. This function applies a transform to all nics
linked to a system. It builds a structure that contains all nics in a
useful format.
The transform should return something like this::
{'0': {'sub_nics': {'0': ['nic.0.ipv4_address.0': '192.168.1.1',
'nic.0.mac_address.0': 'DE:AD:BE:EF:00:00'
'nic.0.hostname.0': 'foobar']
'1': ['nic.0.ipv4_address.1': '192.168.1.1',
'nic.0.mac_address.1': 'DE:AD:BE:EF:00:00'
'nic.0.hostname.1': 'foobar']}}
'1': {'sub_nics': {'0': ['nic.1.ipv4_address.0': '192.168.1.1',
'nic.1.mac_address.0': 'DE:AD:BE:EF:00:00'
'nic.1.hostname.0': 'foobar']
'1': ['nic.1.ipv4_address.1': '192.168.1.2',
'nic.1.mac_address.1': '11:22:33:44:55:66'
'nic.1.hostname.1': 'bazbar']}}}
"""
formated_nics = _build_primary_nics(nics)
for nic_number, nics in formated_nics.items():
formated_nics[nic_number]['sub_nics'] = _build_sub_nics(nics)
formated_nics[nic_number].pop('nics') # We don't need nics anymore.
return formated_nics
def _build_primary_nics(all_nics):
"""
Aggregate all nics into their primary groups.
I.E. All nic\.X.\.*\.Y nics go into a list where all X's are the same.
:param all_nics: All nics to consider.
:type all_nics: list
"""
primary_nics = {}
for nic in all_nics:
if not isinstance(nic.key, basestring):
log("=" * 15, DEBUG)
log("System {0} and NIC {1} not in valid format. Value is not "
"type basestring Skipping.".format(nic.system, nic), DEBUG)
log(print_system(nic.system), DEBUG)
continue
possible_primary_nic = get_nic_primary_number.match(nic.key)
if not possible_primary_nic:
log("=" * 15, DEBUG)
log("System {0} and NIC {1} not in valid format. "
"Skipping.".format(nic.system, nic), DEBUG)
log(print_system(nic.system), DEBUG)
continue
primary_nic_number = possible_primary_nic.group(1)
if primary_nic_number in primary_nics:
primary_nics[primary_nic_number]['nics'].append(nic)
else:
primary_nics[primary_nic_number] = {'nics': [nic]}
return primary_nics
get_nic_sub_number = re.compile("^nic\.\d+.*\.(\d+)$")
def _build_sub_nics(all_nics):
"""
Aggregate all sub nics into their sub groups.
I.E. All nic\.X.\.*\.Y nics go into a list where all Y's are the same.
:param all_nics: All nics to consider.
:type all_nics: list
"""
sub_nics = {}
for nic in all_nics['nics']:
possible_sub_nic = get_nic_sub_number.match(nic.key)
if not possible_sub_nic:
log("System {0} and NIC {1} not in valid format. "
"Skipping.".format(nic.system, nic.key), DEBUG)
continue
sub_nic_number = possible_sub_nic.group(1)
if sub_nic_number in sub_nics:
sub_nics[sub_nic_number].append(nic)
else:
sub_nics[sub_nic_number] = [nic]
return sub_nics
| bsd-3-clause | 68a51004658ed7f1cd13d1c5c15c1262 | 36.519824 | 85 | 0.544441 | 3.480588 | false | false | false | false |
mozilla/inventory | vendor-local/src/django-extensions/build/lib/django_extensions/jobs/daily/cache_cleanup.py | 16 | 1625 | """
Daily cleanup job.
Can be run as a cronjob to clean out old data from the database (only expired
sessions at the moment).
"""
from django_extensions.management.jobs import DailyJob
class Job(DailyJob):
help = "Cache (db) cleanup Job"
def execute(self):
from django.conf import settings
from django.db import transaction
import os
if hasattr(settings, 'CACHES'):
from django.core.cache import get_cache
from django.db import router, connections
from django.utils import timezone
for cache_name, cache_options in settings.CACHES.iteritems():
if cache_options['BACKEND'].endswith("DatabaseCache"):
cache = get_cache(cache_name)
db = router.db_for_write(cache.cache_model_class)
cursor = connections[db].cursor()
now = timezone.now()
cache._cull(db, cursor, now)
transaction.commit_unless_managed(using=db)
elif hasattr(settings, 'CACHE_BACKEND'):
if settings.CACHE_BACKEND.startswith('db://'):
from django.db import connection
os.environ['TZ'] = settings.TIME_ZONE
table_name = settings.CACHE_BACKEND[5:]
cursor = connection.cursor()
cursor.execute("DELETE FROM %s WHERE %s < current_timestamp;" % \
(connection.ops.quote_name(table_name),
connection.ops.quote_name('expires')))
transaction.commit_unless_managed()
| bsd-3-clause | 4bbb752e52313e480eaa16ee64f936f2 | 39.625 | 81 | 0.574769 | 4.642857 | false | false | false | false |
mozilla/inventory | decorators/printqueries.py | 3 | 2156 | """
Print SQL Decorator found at http://pushingkarma.com/notebookdjango-decorator-print-sql-queries/
Usage:
@print_queries('metric')
Where 'metric' is a search filter in the query itself
"""
import os, time
COLORS = {'blue':34, 'cyan':36, 'green':32, 'grey':30, 'magenta':35, 'red':31, 'white':37, 'yellow':33}
RESET = '\033[0m'
def print_queries(filter=None):
""" Print all queries executed in this funnction. """
def wrapper1(func):
def wrapper2(*args, **kwargs):
from django.db import connection
sqltime, longest, numshown = 0.0, 0.0, 0
initqueries = len(connection.queries)
starttime = time.time()
result = func(*args, **kwargs)
for query in connection.queries[initqueries:]:
sqltime += float(query['time'].strip('[]s'))
longest = max(longest, float(query['time'].strip('[]s')))
if not filter or filter in query['sql']:
numshown += 1
querystr = colored('\n[%ss] ' % query['time'], 'yellow')
querystr += colored(query['sql'], 'blue')
print querystr
numqueries = len(connection.queries) - initqueries
numhidden = numqueries - numshown
runtime = round(time.time() - starttime, 3)
proctime = round(runtime - sqltime, 3)
print colored("------", 'blue')
print colored('Total Time: %ss' % runtime, 'yellow')
print colored('Proc Time: %ss' % proctime, 'yellow')
print colored('Query Time: %ss (longest: %ss)' % (sqltime, longest), 'yellow')
print colored('Num Queries: %s (%s hidden)\n' % (numqueries, numhidden), 'yellow')
return result
return wrapper2
return wrapper1
def colored(text, color=None):
""" Colorize text {red, green, yellow, blue, magenta, cyan, white}. """
if os.getenv('ANSI_COLORS_DISABLED') is None and 1 == 2:
fmt_str = '\033[%dm%s'
if color is not None:
text = fmt_str % (COLORS[color], text)
text += RESET
return text
| bsd-3-clause | 1667cb17c5daeec59930fbfa5d32f45a | 43 | 103 | 0.558905 | 3.884685 | false | false | false | false |
mozilla/inventory | mozdns/models.py | 2 | 9439 | from django.core.exceptions import ObjectDoesNotExist, ValidationError
from django.db import models
import mozdns
from mozdns.domain.models import Domain
from mozdns.view.models import View
from mozdns.mixins import ObjectUrlMixin, DisplayMixin
from mozdns.validation import validate_first_label, validate_name
from mozdns.validation import validate_ttl
class DomainMixin(models.Model):
domain = models.ForeignKey(Domain, null=False, help_text="FQDN of the "
"domain after the short hostname. "
"(Ex: <i>Vlan</i>.<i>DC</i>.mozilla.com)")
class Meta:
abstract = True
class LabelMixin(models.Model):
# "The length of any one label is limited to between 1 and 63 octets."
# -- RFC218
label = models.CharField(max_length=63, blank=True, null=True,
validators=[validate_first_label],
help_text="Short name of the fqdn")
class Meta:
abstract = True
class FQDNMixin(models.Model):
fqdn = models.CharField(max_length=255, blank=True, null=True,
validators=[validate_name], db_index=True)
class Meta:
abstract = True
class LabelDomainMixin(LabelMixin, DomainMixin, FQDNMixin):
"""
This class provides common functionality that many DNS record
classes share. This includes a foreign key to the ``domain`` table
and a ``label`` CharField.
If you plan on using the ``unique_together`` constraint on a Model
that inherits from ``LabelDomainMixin``, you must include ``domain`` and
``label`` explicitly.
All common records have a ``fqdn`` field. This field is updated
every time the object is saved::
fqdn = name + domain.name
or if name == ''
fqdn = domain.name
``fqdn`` makes searching for records much easier. Instead of
looking at ``obj.label`` together with ``obj.domain.name``, you can
just search the ``obj.fqdn`` field.
"the total number of octets that represent a name (i.e., the sum of
all label octets and label lengths) is limited to 255" - RFC 4471
"""
class Meta:
abstract = True
class ViewMixin(models.Model):
def validate_views(instance, views):
for view in views:
instance.clean_views(views)
views = models.ManyToManyField(
View, blank=True, validators=[validate_views]
)
class Meta:
abstract = True
def clean_views(self, views):
"""cleaned_data is the data that is going to be called with for
updating an existing or creating a new object. Classes should implement
this function according to their specific needs.
"""
for view in views:
if hasattr(self, 'domain') and self.domain:
self.check_no_ns_soa_condition(self.domain, view=view)
if hasattr(self, 'reverse_domain') and self.reverse_domain:
self.check_no_ns_soa_condition(self.reverse_domain, view=view)
def check_no_ns_soa_condition(self, domain, view=None):
if domain.soa:
fail = False
root_domain = domain.soa.root_domain
if root_domain and not root_domain.nameserver_set.exists():
fail = True
elif (view and
not root_domain.nameserver_set.filter(views=view).exists()):
fail = True
if fail:
raise ValidationError(
"The zone you are trying to assign this record into does "
"not have an NS record, thus cannnot support other "
"records.")
class TTLRRMixin(object):
def check_for_illegal_rr_ttl(self, field_name='fqdn', rr_value=None):
"""
"You have different records in the same RRset <name,type,class>
with different TTLs. This is not allowed and is being
corrected."
-- Mark Andrews
BUG 892531
A new record's ttl will override any old record's TTL if those records
belong to a set or rr (Round Robin) records.
"""
if not rr_value:
rr_value = getattr(self, field_name)
for record in self.__class__.objects.filter(**{field_name: rr_value}):
if self.pk and record.pk == self.pk:
continue
if self.ttl != record.ttl:
# This sucks because I'm bypassing the records' save/clean
# call.
self.__class__.objects.filter(pk=record.pk).update(
ttl=self.ttl
)
class MozdnsRecord(ViewMixin, TTLRRMixin, DisplayMixin, ObjectUrlMixin):
ttl = models.PositiveIntegerField(default=None, blank=True, null=True,
validators=[validate_ttl],
help_text="Time to Live of this record")
description = models.CharField(max_length=1000, blank=True, null=True,
help_text="A description of this record.")
def __str__(self):
self.set_fqdn()
return self.bind_render_record()
def __repr__(self):
return "<{0} '{1}'>".format(self.rdtype, str(self))
class Meta:
abstract = True
@classmethod
def get_api_fields(cls):
"""
The purpose of this is to help the API decide which fields to expose
to the user when they are creating and updateing an Object. This
function should be implemented in inheriting models and overriden to
provide additional fields. Tastypie ignores any relational fields on
the model. See the ModelResource definitions for view and domain
fields.
"""
return ['fqdn', 'ttl', 'description', 'views']
def clean(self):
# The Nameserver and subclasses of BaseAddressRecord do not call this
# function
self.set_fqdn()
self.check_TLD_condition()
self.check_for_illegal_rr_ttl()
self.check_no_ns_soa_condition(self.domain)
self.check_for_delegation()
if self.rdtype != 'CNAME':
self.check_for_cname()
def delete(self, *args, **kwargs):
if self.domain.soa:
self.domain.soa.schedule_rebuild()
from mozdns.utils import prune_tree
call_prune_tree = kwargs.pop('call_prune_tree', True)
objs_domain = self.domain
super(MozdnsRecord, self).delete(*args, **kwargs)
if call_prune_tree:
prune_tree(objs_domain)
def save(self, *args, **kwargs):
self.full_clean()
if self.pk:
# We need to get the domain from the db. If it's not our current
# domain, call prune_tree on the domain in the db later.
db_domain = self.__class__.objects.get(pk=self.pk).domain
if self.domain == db_domain:
db_domain = None
else:
db_domain = None
no_build = kwargs.pop("no_build", False)
super(MozdnsRecord, self).save(*args, **kwargs)
if no_build:
pass
else:
# Mark the soa
if self.domain.soa:
self.domain.soa.schedule_rebuild()
if db_domain:
from mozdns.utils import prune_tree
prune_tree(db_domain)
def set_fqdn(self):
try:
if self.label == '':
self.fqdn = self.domain.name
else:
self.fqdn = "{0}.{1}".format(self.label, self.domain.name)
except ObjectDoesNotExist:
return
def check_for_cname(self):
"""
"If a CNAME RR is preent at a node, no other data should be
present; this ensures that the data for a canonical name and its
aliases cannot be different."
-- `RFC 1034 <http://tools.ietf.org/html/rfc1034>`_
Call this function in models that can't overlap with an existing
CNAME.
"""
CNAME = mozdns.cname.models.CNAME
if hasattr(self, 'label'):
if CNAME.objects.filter(domain=self.domain,
label=self.label).exists():
raise ValidationError("A CNAME with this name already exists.")
else:
if CNAME.objects.filter(label='', domain=self.domain).exists():
raise ValidationError("A CNAME with this name already exists.")
def check_for_delegation(self):
"""
If an object's domain is delegated it should not be able to
be changed. Delegated domains cannot have objects created in
them.
"""
if not (self.domain and self.domain.delegated):
return
if self.domain.nameserver_set.filter(server=self.fqdn).exists():
return
else:
raise ValidationError(
"You can only create a record in a delegated domain if "
"there is an NS record pointing to the record's fqdn."
)
def check_TLD_condition(self):
domain = Domain.objects.filter(name=self.fqdn)
if not domain:
return
if self.label == '' and domain[0] == self.domain:
return # This is allowed
else:
raise ValidationError("You cannot create an record that points "
"to the top level of another domain.")
| bsd-3-clause | 7c8950c41e6df5ed507541bb518b9478 | 34.220149 | 79 | 0.587562 | 4.30415 | false | false | false | false |
mozilla/inventory | core/site/migrations/0001_initial.py | 3 | 2859 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Site'
db.create_table('site', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=255)),
('parent', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['site.Site'], null=True, blank=True)),
))
db.send_create_signal('site', ['Site'])
# Adding unique constraint on 'Site', fields ['name', 'parent']
db.create_unique('site', ['name', 'parent_id'])
# Adding model 'SiteKeyValue'
db.create_table('site_key_value', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('key', self.gf('django.db.models.fields.CharField')(max_length=255)),
('value', self.gf('django.db.models.fields.CharField')(max_length=255)),
('site', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['site.Site'])),
))
db.send_create_signal('site', ['SiteKeyValue'])
# Adding unique constraint on 'SiteKeyValue', fields ['key', 'value']
db.create_unique('site_key_value', ['key', 'value'])
def backwards(self, orm):
# Removing unique constraint on 'SiteKeyValue', fields ['key', 'value']
db.delete_unique('site_key_value', ['key', 'value'])
# Removing unique constraint on 'Site', fields ['name', 'parent']
db.delete_unique('site', ['name', 'parent_id'])
# Deleting model 'Site'
db.delete_table('site')
# Deleting model 'SiteKeyValue'
db.delete_table('site_key_value')
models = {
'site.site': {
'Meta': {'unique_together': "(('name', 'parent'),)", 'object_name': 'Site', 'db_table': "'site'"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['site.Site']", 'null': 'True', 'blank': 'True'})
},
'site.sitekeyvalue': {
'Meta': {'unique_together': "(('key', 'value'),)", 'object_name': 'SiteKeyValue', 'db_table': "'site_key_value'"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['site.Site']"}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '255'})
}
}
complete_apps = ['site'] | bsd-3-clause | f385960e59551eedce9cfb50bb5367a5 | 43 | 133 | 0.566981 | 3.684278 | false | false | false | false |
mozilla/inventory | systems/views.py | 2 | 39831 | from django.views.decorators.csrf import csrf_exempt
import csv
from django.core.exceptions import ValidationError
from django.db import transaction, IntegrityError
from django.core.paginator import Paginator, InvalidPage, EmptyPage
from django.db.models import Q
from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import redirect, get_object_or_404, render
from django.template import RequestContext
from django.template.loader import render_to_string
from django.utils import translation
from django.forms.formsets import formset_factory
import _mysql_exceptions
import models
from middleware.restrict_to_remote import allow_anyone,sysadmin_only, LdapGroupRequired
import re
from django.test.client import RequestFactory
from jinja2.filters import contextfilter
import models
from systems.models import System, SystemStatus
from libs.jinja import render_to_response as render_to_response
from middleware.restrict_to_remote import allow_anyone,sysadmin_only, LdapGroupRequired
from Rack import Rack
from MozInvAuthorization.KeyValueACL import KeyValueACL
import simplejson as json
from mozdns.utils import ensure_label_domain, prune_tree
from mozdns.view.models import View
from forms import SystemForm
from core.group.models import Group
from core.registration.static.models import StaticReg
from core.registration.static.forms import StaticRegAutoForm
from core.hwadapter.forms import HWAdapterForm
from core.range.utils import ip_to_range
from core.site.models import Site
from slurpee.constants import P_EXTRA
from settings.scrape import config as external_config
# Import resources
from api_v2.dhcp_handler import DHCPHandler
from api_v2.keyvalue_handler import KeyValueHandler
# Use this object to generate request objects for calling tastypie views
factory = RequestFactory()
# Source: http://nedbatchelder.com/blog/200712/human_sorting.html
# Author: Ned Batchelder
def tryint(s):
try:
return int(s)
except:
return s
def alphanum_key(s):
""" Turn a string into a list of string and number chunks.
"z23a" -> ["z", 23, "a"]
"""
return [ tryint(c) for c in re.split('([0-9]+)', s) ]
def sort_nicely(l):
""" Sort the given list in the way that humans expect.
"""
l.sort(key=alphanum_key)
def parse_title_num(title):
val = 0
try:
val = int(title.rsplit('#')[-1])
except ValueError:
pass
return val
def check_dupe_nic(request,system_id, adapter_number):
try:
system = models.System.objects.get(id=system_id)
found = system.check_for_adapter(adapter_number)
except:
pass
return HttpResponse(found)
def check_dupe_nic_name(requessdft,system_id,adapter_name):
try:
system = models.System.objects.get(id=system_id)
found = system.check_for_adapter_name(adapter_name)
except:
pass
return HttpResponse(found)
@allow_anyone
def system_rack_elevation(request, rack_id):
r = Rack(rack_id)
data = {
'rack_ru': r.ru,
'ethernet_patch_panels_24': r.ethernet_patch_panel_24,
'ethernet_patch_panels_48': r.ethernet_patch_panel_48,
'systems': r.systems,
}
data = json.dumps(data)
return render_to_response('systems/rack_elevation.html', {
'data':data,
},
RequestContext(request))
@allow_anyone
def system_auto_complete_ajax(request):
query = request.GET['query']
system_list = models.System.objects.filter(hostname__icontains=query)
hostname_list = [system.hostname for system in system_list]
id_list = [system.id for system in system_list]
ret_dict = {}
ret_dict['query'] = query
ret_dict['suggestions'] = hostname_list
ret_dict['data'] = id_list
return HttpResponse(json.dumps(ret_dict))
@allow_anyone
def list_all_systems_ajax(request):
#iSortCol_0 = which column is sorted
#sSortDir_0 = which direction
cols = ['hostname','serial','asset_tag','server_model','system_rack', 'oob_ip', 'system_status']
sort_col = cols[0]
if 'iSortCol_0' in request.GET:
sort_col = cols[int(request.GET['iSortCol_0'])]
sort_dir = 'asc'
if 'sSortDir_0' in request.GET:
sort_dir = request.GET['sSortDir_0']
if 'sEcho' in request.GET:
sEcho = request.GET['sEcho']
if 'sSearch' in request.GET and request.GET['sSearch'] > '':
search_term = request.GET['sSearch']
else:
search_term = None
if 'iDisplayLength' in request.GET and request.GET['iDisplayLength'] > '':
iDisplayLength = request.GET['iDisplayLength']
else:
iDisplayLength = 100
if 'iDisplayStart' in request.GET and request.GET['iDisplayStart'] > '':
iDisplayStart = request.GET['iDisplayStart']
else:
iDisplayStart = 0
if search_term is None:
end_display = int(iDisplayStart) + int(iDisplayLength)
system_count = models.System.objects.all().count()
systems = models.System.objects.all()[iDisplayStart:end_display]
the_data = build_json(request, systems, sEcho, system_count, iDisplayLength, sort_col, sort_dir)
if search_term is not None and len(search_term) > 0:
if search_term.startswith('/') and len(search_term) > 1:
try:
search_term = search_term[1:]
search_q = Q(hostname__regex=search_term)
except:
search_q = Q(hostname__icontains=search_term)
else:
search_q = Q(hostname__icontains=search_term)
search_q |= Q(serial__icontains=search_term)
search_q |= Q(notes__icontains=search_term)
search_q |= Q(asset_tag=search_term)
search_q |= Q(oob_ip__icontains=search_term)
search_q |= Q(keyvalue__value__icontains=search_term)
try:
total_count = models.System.with_related.filter(search_q).values('hostname').distinct().count()
except:
total_count = 0
end_display = int(iDisplayStart) + int(iDisplayLength)
try:
systems = models.System.objects.filter(
pk__in=models.System.with_related.filter(search_q).values_list('id', flat=True).distinct()
)[iDisplayStart:end_display]
the_data = build_json(request, systems, sEcho, total_count, iDisplayLength, sort_col, sort_dir)
except:
the_data = '{"sEcho": %s, "iTotalRecords":0, "iTotalDisplayRecords":0, "aaData":[]}' % (sEcho)
return HttpResponse(the_data)
def build_json(request, systems, sEcho, total_records, display_count, sort_col, sort_dir):
system_list = []
for system in systems:
if system.serial is not None:
serial = system.serial.strip()
else:
serial = ''
if system.server_model is not None:
server_model = str(system.server_model)
else:
server_model = ''
if system.system_rack is not None:
system_rack = "%s - %s" % (str(system.system_rack), system.rack_order)
system_rack_id = str(system.system_rack.id)
else:
system_rack = ''
system_rack_id = ''
if system.system_status is not None:
system_status = str(system.system_status)
else:
system_status = ''
if system.asset_tag is not None:
asset_tag = system.asset_tag.strip()
else:
asset_tag = ''
if system.oob_ip is not None:
oob_ip = system.oob_ip.strip()
else:
oob_ip = ''
ro = getattr(request, 'read_only', False)
if ro:
system_id = 0
else:
system_id = system.id
system_list.append({'hostname': system.hostname.strip(), 'oob_ip': oob_ip, 'serial': serial, 'asset_tag': asset_tag, 'server_model': server_model,
'system_rack':system_rack, 'system_status':system_status, 'id':system_id, 'system_rack_id': system_rack_id})
the_data = '{"sEcho": %s, "iTotalRecords":0, "iTotalDisplayRecords":0, "aaData":[]}' % (sEcho)
#try:
if len(system_list) > 0:
system_list.sort(key=lambda x: alphanum_key(x[sort_col]))
if sort_dir == 'desc':
#system_list = system_list.reverse()
system_list.reverse()
#the_data = '{"sEcho": %s, "iTotalRecords":%i, "iTotalDisplayRecords":%s, "aaData":[' % (sEcho, total_records, display_count)
the_data = '{"sEcho": %s, "iTotalRecords":%i, "iTotalDisplayRecords":%i, "aaData":[' % (sEcho, total_records, total_records)
#sort_nicely(system_list)
counter = 0
for system in system_list:
if counter < display_count:
the_data += '["%i,%s","%s","%s","%s","%s,%s", "%s", "%s", "%i"],' % (system['id'],system['hostname'], system['serial'],system['asset_tag'],system['server_model'],system['system_rack_id'], system['system_rack'], system['oob_ip'], system['system_status'], system['id'])
counter += 1
else:
counter = display_count
the_data = the_data[:-1]
the_data += ']}'
#except:
pass
return the_data
#@ldap_group_required('build')
#@LdapGroupRequired('build_team', exclusive=False)
@allow_anyone
def home(request):
"""Index page"""
return render_to_response('systems/index.html', {
'read_only': getattr(request, 'read_only', False),
#'is_build': getattr(request.user.groups.all(), 'build', False),
})
@allow_anyone
def system_quicksearch_ajax(request):
"""Returns systems sort table"""
search_term = request.POST['quicksearch']
search_q = Q(hostname__icontains=search_term)
search_q |= Q(serial__contains=search_term)
search_q |= Q(notes__contains=search_term)
search_q |= Q(asset_tag=search_term)
systems = models.System.with_related.filter(search_q).order_by('hostname')
if 'is_test' not in request.POST:
return render_to_response('systems/quicksearch.html', {
'systems': systems,
'read_only': getattr(request, 'read_only', False),
},
RequestContext(request))
else:
from django.core import serializers
systems_data = serializers.serialize("json", systems)
return HttpResponse(systems_data)
def get_key_value_store(request, id):
system = models.System.objects.get(id=id)
key_value_store = models.KeyValue.objects.filter(obj=system)
return render_to_response('systems/key_value_store.html', {
'key_value_store': key_value_store,
},
RequestContext(request))
def delete_key_value(request, id, system_id):
kv = models.KeyValue.objects.get(id=id)
matches = re.search('^nic\.(\d+)', str(kv.key) )
if matches:
try:
existing_dhcp_scope = models.KeyValue.objects.filter(obj=kv.system).filter(key='nic.%s.dhcp_scope.0' % matches.group(1))[0].value
models.ScheduledTask(task=existing_dhcp_scope, type='dhcp').save()
except:
pass
kv.delete()
system = models.System.objects.get(id=system_id)
key_value_store = models.KeyValue.objects.filter(obj=system)
return render_to_response('systems/key_value_store.html', {
'key_value_store': key_value_store,
},
RequestContext(request))
@csrf_exempt
def save_key_value(request, id):
system_id = None
validated = True
resp = {'success': True, 'errorMessage' : ''}
post_key = request.POST.get('key').strip()
post_value = request.POST.get('value').strip()
"""
Create the key value acl object.
We can use it to validate based on criteria below
"""
try:
tmp = models.KeyValue.objects.get(id=id)
system = tmp.system
except Exception, e:
print e
pass
acl = KeyValueACL(request)
if post_key == 'shouldfailvalidation':
resp['success'] = False
resp['errorMessage'] = 'Validation Failed'
validated = False
kv = models.KeyValue.objects.get(id=id)
if kv is not None and validated:
##Here we eant to check if the existing key is a network adapter. If so we want to find out if it has a dhcp scope. If so then we want to add it to ScheduledTasks so that the dhcp file gets regenerated
matches = re.search('^nic\.(\d+)', str(kv.key).strip() )
"""
Check to see if we have a network adapter
If so we need to flag the dhcp zone file to be regenerated
"""
if matches and matches.group(1):
"""
Check to see if it's an ipv4_address key
run KeyValueACL.check_ip_not_exist_other_system
"""
if re.search('^nic\.(\d+)\.ipv4_address', str(post_key).strip() ):
try:
acl.check_ip_not_exist_other_system(system, post_value)
except Exception, e:
resp['success'] = False
resp['errorMessage'] = str(e)
return HttpResponse(json.dumps(resp))
try:
existing_dhcp_scope = models.KeyValue.objects.filter(obj=kv.system).filter(key='nic.%s.dhcp_scope.0' % matches.group(1))[0].value
if existing_dhcp_scope is not None:
models.ScheduledTask(task=existing_dhcp_scope, type='dhcp').save()
except Exception, e:
pass
try:
existing_reverse_dns_zone = models.KeyValue.objects.filter(obj=kv.system).filter(key='nic.%s.reverse_dns_zone.0' % matches.group(1))[0].value
if existing_reverse_dns_zone is not None:
models.ScheduledTask(task=existing_reverse_dns_zone, type='reverse_dns_zone').save()
except Exception, e:
pass
try:
kv.key = request.POST.get('key').strip()
kv.value = request.POST.get('value').strip()
system_id = str(kv.system_id)
kv.save()
except:
kv.key = None
kv.value = None
##Here we eant to check if the new key is a network adapter. If so we want to find out if it has a dhcp scope. If so then we want to add it to ScheduledTasks so that the dhcp file gets regenerated
if kv.key is not None:
matches = re.search('nic\.(\d+)', kv.key)
if matches and matches.group(1):
new_dhcp_scope = None
new_reverse_dns_zone = None
try:
new_dhcp_scope = models.KeyValue.objects.filter(obj=kv.system).filter(key='nic.%s.dhcp_scope.0' % matches.group(1))[0].value
except Exception, e:
pass
try:
new_reverse_dns_zone = models.KeyValue.objects.filter(obj=kv.system).filter(key='nic.%s.reverse_dns_zone.0' % matches.group(1))[0].value
except Exception, e:
pass
if new_dhcp_scope is not None:
try:
models.ScheduledTask(task=new_dhcp_scope, type='dhcp').save()
except Exception, e:
print e
##This is due to the key already existing in the db
pass
if new_reverse_dns_zone is not None:
try:
models.ScheduledTask(task=new_reverse_dns_zone, type='reverse_dns_zone').save()
except Exception ,e:
print e
##This is due to the key already existing in the db
pass
return HttpResponse(json.dumps(resp));
#return HttpResponseRedirect('/en-US/systems/get_key_value_store/' + system_id + '/')
@csrf_exempt
def create_key_value(request, id):
system = models.System.objects.get(id=id)
key = 'None'
value = 'None'
if 'key' in request.POST:
key = request.POST['key'].strip()
if 'value' in request.POST:
value = request.POST['value'].strip()
kv = models.KeyValue(obj=system,key=key,value=value)
print "Key is %s: Value is %s." % (key, value)
kv.save();
matches = re.search('^nic\.(\d+)', str(kv.key) )
if matches:
try:
existing_dhcp_scope = models.KeyValue.objects.filter(obj=kv.system).filter(key='nic.%s.dhcp_scope.0' % matches.group(1))[0].value
models.ScheduledTask(task=existing_dhcp_scope, type='dhcp').save()
except:
pass
key_value_store = models.KeyValue.objects.filter(obj=system)
return render_to_response('systems/key_value_store.html', {
'key_value_store': key_value_store,
},
RequestContext(request))
def get_network_adapters(request, id):
adapters = models.NetworkAdapter.objects.filter(system_id=id)
return render_to_response('systems/network_adapters.html', {
'adapters': adapters,
'switches': models.System.objects.filter(is_switch=1),
'dhcp_scopes': models.DHCP.objects.all()
#'read_only': getattr(request, 'read_only', False),
},
RequestContext(request))
def delete_network_adapter(request, id, system_id):
adapter = models.NetworkAdapter.objects.get(id=id)
adapter.delete()
adapters = models.NetworkAdapter.objects.filter(system_id=system_id)
return render_to_response('systems/network_adapters.html', {
'adapters': adapters,
'dhcp_scopes': models.DHCP.objects.all(),
'switches': models.System.objects.filter(is_switch=1)
#'read_only': getattr(request, 'read_only', False),
},
RequestContext(request))
def create_network_adapter(request, id):
nic = models.NetworkAdapter(system_id=id)
nic.save()
adapters = models.NetworkAdapter.objects.filter(system_id=id)
return render_to_response('systems/network_adapters.html', {
'adapters': adapters,
'dhcp_scopes': models.DHCP.objects.all(),
'switches': models.System.objects.filter(is_switch=1)
#'read_only': getattr(request, 'read_only', False),
},
RequestContext(request))
def save_network_adapter(request, id):
import re
nic = models.NetworkAdapter.objects.get(id=id)
if nic is not None:
mac = request.POST['mac_address']
mac = mac.replace(':','').replace(' ','').replace('.','')
tmp = mac[0:2] + ':' + mac[2:4] + ':' + mac[4:6] + ':' + mac[6:8] + ':' + mac[8:10] + ':' + mac[10:12]
mac = tmp
nic.dhcp_scope_id = request.POST['dhcp_scope_id']
nic.mac_address = mac
nic.ip_address = request.POST['ip_address']
nic.filename = request.POST['filename']
nic.option_host_name = request.POST['option_host_name']
nic.option_domain_name = request.POST['option_domain_name']
nic.adapter_name = request.POST['adapter_name']
if request.POST['switch_id']:
nic.switch_id = request.POST['switch_id']
else:
nic.switch_id = None
nic.switch_port = request.POST['switch_port']
nic.save()
return HttpResponseRedirect('/systems/get_network_adapters/' + id)
def sync_external_data_ajax(request):
attr, source, system_pk = (
request.POST.get('attr', None),
request.POST.get('source', None),
request.POST.get('system_pk', None)
)
if not (attr and source and system_pk):
return HttpResponse(json.dumps({
'error': "attr, source, and system_pk are required"
}), status=400)
system = get_object_or_404(models.System, pk=system_pk)
if not hasattr(system, attr):
return HttpResponse(json.dumps({
'error': "System has no attribute {0}".format(attr)
}), status=400)
try:
ed = system.externaldata_set.get(source=source, name=attr)
except system.externaldata_set.model.DoesNotExist:
return HttpResponse(json.dumps({
'error': "System {0} has no external attribute '{1}' for source "
"'{2}'".format(system.hostname, attr, source)
}), status=400)
conflict_seen = system.external_data_conflict(attr)
cur_value = getattr(system, attr)
if attr == 'oob_ip' and cur_value.strip().startswith('ssh'):
new_value = 'ssh ' + ed.data
else:
new_value = ed.data
setattr(system, attr, new_value)
system.save(request=request)
return HttpResponse(json.dumps({
'conflict-seen': conflict_seen,
'new-value': new_value
}))
@allow_anyone
def system_show(request, id):
system = get_object_or_404(models.System, pk=id)
if system.notes:
system.notes = system.notes.replace("\n", "<br />")
show_nics_in_key_value = False
is_release = False
try:
request = factory.get(
'/api/v2/keyvalue/3/',
{'key_type': 'adapters_by_system', 'system': system.hostname}
)
h = KeyValueHandler()
adapters = h.read(request, key_value_id='3')
except:
adapters = []
if system.allocation is 'release':
is_release = True
if (system.serial and
system.server_model and
system.server_model.part_number and
system.server_model.vendor == "HP"):
system.warranty_link = "http://www11.itrc.hp.com/service/ewarranty/warrantyResults.do?productNumber=%s&serialNumber1=%s&country=US" % (system.server_model.part_number, system.serial) # noqa
if show_nics_in_key_value:
key_values = system.keyvalue_set.all()
else:
key_values = system.keyvalue_set.exclude(key__istartswith='nic.')
sregs = StaticReg.objects.filter(system=system)
groups = Group.objects.all()
sreg_form = StaticRegAutoForm(prefix='sreg', initial={
'system': system,
'fqdn': system.hostname
})
blank_hw_form = HWAdapterForm(prefix='add-hw') # noqa Used for ui dialog for creation
HWAdapterFormset = formset_factory(HWAdapterForm)
hw_formset = HWAdapterFormset(prefix='hwadapters')
object_search_str = "(/^{0}$".format(system)
for sreg in filter(lambda sreg: not sreg.decommissioned, sregs):
object_search_str += " OR /^{0}$".format(sreg.fqdn)
object_search_str += " OR /^{0}$".format(sreg.ip_str)
object_search_str += " ) AND !type=:sreg AND !type=:sys"
return render(request, 'systems/system_show.html', {
'system': system,
'object_search_str': object_search_str,
'sregs': sregs,
'sreg_form': sreg_form,
'hw_formset': hw_formset,
'blank_hw_form': blank_hw_form,
'groups': groups,
'ip_to_range': ip_to_range,
'adapters': adapters,
'key_values': key_values,
'is_release': is_release,
'extra_externaldata': system.externaldata_set.filter(policy=P_EXTRA),
'external_config': external_config,
'read_only': getattr(request, 'read_only', False),
})
@allow_anyone
def system_show_by_asset_tag(request, id):
system = get_object_or_404(models.System, asset_tag=id)
is_release = True
if system.allocation is 'release':
is_release = True
if (system.serial and
system.server_model and
system.server_model.part_number and
system.server_model.vendor == "HP"):
system.warranty_link = "http://www11.itrc.hp.com/service/ewarranty/warrantyResults.do?productNumber=%s&serialNumber1=%s&country=US" % (system.server_model.part_number, system.serial)
return render_to_response('systems/system_show.html', {
'system': system,
'is_release': True,
'read_only': getattr(request, 'read_only', False),
},
RequestContext(request))
def system_view(request, template, data, instance=None):
if request.method == 'POST':
form = SystemForm(request.POST, instance=instance)
if form.is_valid():
s = form.save(commit=False)
s.save(request=request)
return redirect(system_show, s.pk)
else:
form = SystemForm(instance=instance)
data['form'] = form
return render_to_response(template,
data,
request
)
@csrf_exempt
def system_new(request):
return system_view(request, 'systems/system_new.html', {})
@csrf_exempt
def system_edit(request, id):
system = get_object_or_404(models.System, pk=id)
dhcp_scopes = None
try:
h = DHCPHandler()
dhcp_scopes = h.read(request, dhcp_scope='phx-vlan73', dhcp_action='get_scopes_with_names')
except Exception, e:
print e
pass
return system_view(request, 'systems/system_edit.html', {
'system': system,
'dhcp_scopes':dhcp_scopes,
'revision_history':models.SystemChangeLog.objects.filter(system=system).order_by('-id')
},
system
)
def system_delete(request, id):
system = get_object_or_404(models.System, pk=id)
try:
kv_length = len(system.keyvalue_set.all())
except AttributeError:
kv_length = 0
if kv_length == 0:
try:
system.delete()
except IntegrityError, e:
e_str = "Key/Value store exists"
content = "Unable to Delete system: {message}".format(message=e)
return render_to_response(
'systems/generic_output.html',
{
'system': system,
'content': content,
},
RequestContext(request))
elif kv_length > 0:
link = '/core/keyvalue/keyvalue/{id}'.format(id=system.id)
content = """Unable to Delete system. <br />
Please <a href="{link}">Delete Key/Value Entries</a>
""".format(link=link)
return render_to_response(
'systems/generic_output.html',
{
'system': system,
'content': content,
},
RequestContext(request))
return redirect(home)
def system_csv(request):
systems = models.System.objects.all().order_by('hostname')
response = HttpResponse(mimetype='text/csv')
response['Content-Disposition'] = 'attachment; filename=systems.csv'
writer = csv.writer(response)
writer.writerow(['Host Name', 'Serial', 'Asset Tag', 'Model', 'Allocation', 'Rack', 'Switch Ports', 'OOB IP'])
for s in systems:
try:
writer.writerow([s.hostname, s.serial, s.asset_tag, s.server_model, s.allocation, s.system_rack, s.switch_ports, s.oob_ip])
except:
writer.writerow([s.hostname, s.serial, s.asset_tag, s.server_model, '', s.system_rack, s.switch_ports, s.oob_ip])
return response
def system_releng_csv(request):
systems = models.System.objects.filter(allocation=2).order_by('hostname')
response = HttpResponse(mimetype='text/csv')
response['Content-Disposition'] = 'attachment; filename=systems.csv'
writer = csv.writer(response)
writer.writerow(['id','hostname', 'switch_ports', 'oob_ip', 'system_rack', 'asset_tag', 'operating_system', 'rack_order'])
for s in systems:
writer.writerow([s.id, s.hostname, s.switch_ports, s.oob_ip, s.system_rack, s.asset_tag, s.operating_system, s.rack_order])
return response
def get_expanded_key_value_store(request, system_id):
try:
system = models.System.objects.get(id=system_id)
request = factory.get('/api/v2/keyvalue/3/',
{'key_type':'adapters_by_system','system':system.hostname})
h = KeyValueHandler()
request = factory.get('/api/keyvalue/?keystore=%s' % (system.hostname), follow=True)
resp = json.dumps(h.read(request, key_value_id='3'))
return_obj = resp.replace(",",",<br />")
except:
return_obj = 'This failed'
return HttpResponse(return_obj)
def new_rack_system_ajax(request, rack_id):
from forms import RackSystemForm
rack = get_object_or_404(models.SystemRack, pk=rack_id)
data = {}
resp_data = {}
template = 'systems/rack_form_partial.html'
if request.method == 'POST':
rack_form = RackSystemForm(request.POST)
if rack_form.is_valid():
new_system = rack_form.save(commit=False)
new_system.system_rack = rack
new_system.save()
data['system'] = new_system
resp_data['success'] = True
template = 'systems/rack_row_partial.html'
else:
resp_data['success'] = False
else:
rack_form = RackSystemForm()
data['form'] = rack_form
data['rack'] = rack
resp_data['payload'] = render_to_string(template, data, RequestContext(request)).strip(' ')
return HttpResponse(json.dumps(resp_data), mimetype="application/json")
@allow_anyone
def racks_by_site(request, site_pk=0):
ret_list = []
if int(site_pk) > 0:
site= Site.objects.get(id=site_pk)
racks = models.SystemRack.objects.select_related('site').filter(site=site).order_by('name')
else:
racks = models.SystemRack.objects.select_related('site').order_by('site', 'name')
for r in racks:
ret_list.append({'name':'%s %s' % (r.site.full_name if r.site else '', r.name), 'id':r.id})
return HttpResponse(json.dumps(ret_list))
@allow_anyone
def racks(request):
from forms import RackFilterForm
filter_form = RackFilterForm(request.GET)
racks = models.SystemRack.objects.select_related('site')
system_query = Q()
if 'site' in request.GET:
site_id = request.GET['site']
has_query = True
if len(site_id) > 0 and int(site_id) > 0:
site = Site.objects.get(id=site_id)
filter_form.fields['rack'].choices = [('','ALL')] + [
(m.id, m.site.full_name + ' ' + m.name)
for m in models.SystemRack.objects.filter(site=site).order_by('name')
]
else:
has_query = False
if filter_form.is_valid():
if filter_form.cleaned_data['rack']:
racks = racks.filter(id=filter_form.cleaned_data['rack'])
has_query = True
if filter_form.cleaned_data['site'] and int(filter_form.cleaned_data['site']) > 0:
racks = racks.filter(site=filter_form.cleaned_data['site'])
has_query = True
if filter_form.cleaned_data['allocation']:
system_query = system_query & Q(allocation=filter_form.cleaned_data['allocation'])
has_query = True
filter_status = filter_form.cleaned_data['status']
if filter_status:
system_query &= Q(system_status=filter_form.cleaned_data['status'])
has_query = True
if not filter_form.cleaned_data['show_decommissioned']:
decommissioned = models.SystemStatus.objects.get(status='decommissioned')
system_query = system_query & ~Q(system_status=decommissioned)
##Here we create an object to hold decommissioned systems for the following filter
if not has_query:
racks = []
else:
racks = [(k, list(k.system_set.select_related(
'server_model',
'allocation',
'system_status',
).filter(system_query).order_by('rack_order'))) for k in racks]
return render_to_response('systems/racks.html', {
'racks': racks,
'filter_form': filter_form,
'read_only': getattr(request, 'read_only', False),
},
RequestContext(request))
def rack_delete(request, object_id):
from models import SystemRack
rack = get_object_or_404(SystemRack, pk=object_id)
if request.method == "POST":
rack.delete()
return HttpResponseRedirect('/systems/racks/')
else:
return render_to_response('systems/rack_confirm_delete.html', {
'rack': rack,
},
RequestContext(request))
def rack_edit(request, object_id):
rack = get_object_or_404(models.SystemRack, pk=object_id)
from forms import SystemRackForm
initial = {}
if request.method == 'POST':
form = SystemRackForm(request.POST, instance=rack)
if form.is_valid():
form.save()
return HttpResponseRedirect('/systems/racks/')
else:
form = SystemRackForm(instance=rack)
return render_to_response(
'systems/generic_form.html',
{
'form': form,
},
RequestContext(request))
def rack_new(request):
from forms import SystemRackForm
initial = {}
if request.method == 'POST':
form = SystemRackForm(request.POST, initial=initial)
if form.is_valid():
form.save()
return HttpResponseRedirect('/systems/racks/')
else:
form = SystemRackForm(initial=initial)
return render_to_response(
'generic_form.html',
{
'form': form,
},
RequestContext(request))
def ajax_racks_by_site(request, site_pk):
site = get_object_or_404(Site, pk=site_pk)
decom = SystemStatus.objects.get(status='decommissioned')
def filter_decom(system_Q):
return system_Q.exclude(system_status=decom)
return render(request, 'systems/rack_ajax_by_site.html', {
'racks': site.systemrack_set.all(),
'site': site,
'systems': System.objects,
'filter_decom': filter_decom
})
def server_model_edit(request, object_id):
server_model = get_object_or_404(models.ServerModel, pk=object_id)
from forms import ServerModelForm
initial = {}
if request.method == 'POST':
form = ServerModelForm(request.POST, instance=server_model)
if form.is_valid():
form.save()
return HttpResponseRedirect('/systems/server_models/')
else:
form = ServerModelForm(instance=server_model)
return render_to_response(
'generic_form.html',
{
'form': form,
},
RequestContext(request))
@csrf_exempt
def operating_system_create_ajax(request):
if request.method == "POST":
if 'name' in request.POST and 'version' in request.POST:
name = request.POST['name']
version = request.POST['version']
models.OperatingSystem(name=name, version=version).save()
return operating_system_list_ajax(request)
else:
return HttpResponse("OK")
@csrf_exempt
def server_model_create_ajax(request):
if request.method == "POST":
if 'model' in request.POST and 'vendor' in request.POST:
model = request.POST['model']
vendor = request.POST['vendor']
models.ServerModel(vendor=vendor, model=model).save()
return server_model_list_ajax(request)
else:
return HttpResponse("OK")
def operating_system_list_ajax(request):
ret = []
for m in models.OperatingSystem.objects.all():
ret.append({'id': m.id, 'name': "%s - %s" % (m.name, m.version)})
return HttpResponse(json.dumps(ret))
def server_model_list_ajax(request):
ret = []
for m in models.ServerModel.objects.all():
ret.append({'id': m.id, 'name': "%s - %s" % (m.vendor, m.model)})
return HttpResponse(json.dumps(ret))
def server_model_show(request, object_id):
object = get_object_or_404(models.ServerModel, pk=object_id)
return render_to_response(
'systems/servermodel_detail.html',
{
'object': object,
},
RequestContext(request))
def server_model_list(request):
object_list = models.ServerModel.objects.all()
return render_to_response(
'systems/servermodel_list.html',
{
'object_list': object_list,
},
RequestContext(request))
def allocation_show(request, object_id):
object = get_object_or_404(models.Allocation, pk=object_id)
return render_to_response(
'systems/allocation_detail.html',
{
'object': object,
},
RequestContext(request))
def allocation_edit(request, object_id):
allocation = get_object_or_404(models.Allocation, pk=object_id)
from forms import AllocationForm
initial = {}
if request.method == 'POST':
form = AllocationForm(request.POST, instance=allocation)
if form.is_valid():
form.save()
return HttpResponseRedirect('/systems/allocations/')
else:
form = AllocationForm(instance=allocation)
return render_to_response(
'generic_form.html',
{
'form': form,
},
RequestContext(request))
def allocation_list(request):
object_list = models.Allocation.objects.all()
return render_to_response(
'systems/allocation_list.html',
{
'object_list': object_list,
},
RequestContext(request))
def allocation_new(request):
from forms import AllocationForm
initial = {}
if request.method == 'POST':
form = AllocationForm(request.POST, initial=initial)
if form.is_valid():
form.save()
return HttpResponseRedirect('/systems/allocations/')
else:
form = AllocationForm(initial=initial)
return render_to_response(
'generic_form.html',
{
'form': form,
},
RequestContext(request))
def csv_import(request):
from forms import CSVImportForm
def generic_getter(field):
return field
def uppercase_getter(field):
return field.upper()
def allocation_getter(field):
try:
return models.Allocation.objects.get(name=field)
except models.Allocation.DoesNotExist:
return None
def system_status_getter(field):
try:
return models.SystemStatus.objects.get(status=field)
except models.SystemStatus.DoesNotExist:
return
def server_model_getter(field):
try:
return models.ServerModel.objects.get(id=field)
except models.ServerModel.DoesNotExist:
return
def rack_getter(field):
try:
return models.SystemRack.objects.get(name=field)
except models.SystemRack.DoesNotExist:
return None
ALLOWED_COLUMNS = {
'hostname': generic_getter,
'asset_tag': generic_getter,
'serial': uppercase_getter,
'notes': generic_getter,
'oob_ip': generic_getter,
'system_status': system_status_getter,
'allocation': allocation_getter,
'system_rack': rack_getter,
'rack_order': generic_getter,
'server_model': server_model_getter,
'purchase_price': generic_getter,
}
new_systems = 0
if request.method == 'POST':
form = CSVImportForm(request.POST, request.FILES)
if form.is_valid():
csv_reader = csv.reader(form.cleaned_data['csv'])
headers = csv_reader.next()
for line in csv_reader:
cur_data = dict(zip(headers, line))
system_data = dict(
(a, getter(cur_data.get(a, None)))
for a, getter in ALLOWED_COLUMNS.iteritems())
s = models.System(**system_data)
try:
s.full_clean()
except ValidationError, e:
print e
else:
s.save()
new_systems += 1
form = None
else:
form = CSVImportForm()
return render_to_response(
'systems/csv_import.html',
{
'form': form,
'allowed_columns': ALLOWED_COLUMNS,
'new_systems': new_systems,
},
RequestContext(request))
| bsd-3-clause | 658324273573a4f8c423160f0441fb88 | 34.217507 | 283 | 0.603475 | 3.722523 | false | false | false | false |
mozilla/inventory | mozdns/mozbind/views.py | 3 | 1245 | from gettext import gettext as _
from django.http import HttpResponse
from django.shortcuts import get_object_or_404, render
from mozdns.soa.models import SOA
from mozdns.view.models import View
from mozdns.mozbind.zone_builder import build_zone_data
import simplejson as json
def build_debug_soa(request, soa_pk):
soa = get_object_or_404(SOA, pk=soa_pk)
# DEBUG_BUILD_STRING = build_zone(soa, root_domain)
# Figure out what sort of domains are in this zone.
try:
private_view = View.objects.get(name='private')
public_view = View.objects.get(name='public')
private_data = build_zone_data(private_view, soa.root_domain, soa)
private_data = private_data.format(serial=soa.serial)
public_data = build_zone_data(public_view, soa.root_domain, soa)
public_data = public_data.format(serial=soa.serial)
output = _(
"""
;======= Private Data =======
{0}
;======= Public Data =======
{1}
""".format(private_data, public_data))
except Exception:
return HttpResponse(json.dumps(
{"error": "HOLY SHIT SOMETHING WENT WRONG!!!"}))
return render(request, 'mozbind/sample_build.html',
{'data': output, 'soa': soa})
| bsd-3-clause | 34f84f52bde659005cf21264f2486023 | 31.763158 | 74 | 0.646586 | 3.355795 | false | false | false | false |
mozilla/inventory | mozdns/nameserver/migrations/0001_initial.py | 2 | 17586 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Nameserver'
db.create_table('static_interface', (
('domain', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['domain.Domain'])),
('label', self.gf('django.db.models.fields.CharField')(max_length=63, null=True, blank=True)),
('fqdn', self.gf('django.db.models.fields.CharField')(db_index=True, max_length=255, null=True, blank=True)),
('ttl', self.gf('django.db.models.fields.PositiveIntegerField')(default=3600, null=True, blank=True)),
('description', self.gf('django.db.models.fields.CharField')(max_length=1000, null=True, blank=True)),
('ip_str', self.gf('django.db.models.fields.CharField')(max_length=39)),
('ip_upper', self.gf('django.db.models.fields.BigIntegerField')(null=True, blank=True)),
('ip_lower', self.gf('django.db.models.fields.BigIntegerField')(null=True, blank=True)),
('ip_type', self.gf('django.db.models.fields.CharField')(max_length=1)),
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('mac', self.gf('django.db.models.fields.CharField')(max_length=17)),
('reverse_domain', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='staticintrdomain_set', null=True, to=orm['domain.Domain'])),
('system', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['systems.System'], null=True, blank=True)),
('dhcp_enabled', self.gf('django.db.models.fields.BooleanField')(default=True)),
('dns_enabled', self.gf('django.db.models.fields.BooleanField')(default=True)),
))
db.create_table('nameserver', (
('ttl', self.gf('django.db.models.fields.PositiveIntegerField')(default=3600, null=True, blank=True)),
('description', self.gf('django.db.models.fields.CharField')(max_length=1000, null=True, blank=True)),
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('domain', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['domain.Domain'])),
('server', self.gf('django.db.models.fields.CharField')(max_length=255)),
('addr_glue', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='nameserver_set', null=True, to=orm['address_record.AddressRecord'])),
('intr_glue', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='intrnameserver_set', null=True, to=orm['static_intr.StaticReg'])),
))
db.send_create_signal('nameserver', ['Nameserver'])
# Adding unique constraint on 'Nameserver', fields ['domain', 'server']
db.create_unique('nameserver', ['domain_id', 'server'])
# Adding M2M table for field views on 'Nameserver'
db.create_table('nameserver_views', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('nameserver', models.ForeignKey(orm['nameserver.nameserver'], null=False)),
('view', models.ForeignKey(orm['view.view'], null=False))
))
db.create_unique('nameserver_views', ['nameserver_id', 'view_id'])
def backwards(self, orm):
# Removing unique constraint on 'Nameserver', fields ['domain', 'server']
db.delete_unique('nameserver', ['domain_id', 'server'])
# Deleting model 'Nameserver'
db.delete_table('nameserver')
# Removing M2M table for field views on 'Nameserver'
db.delete_table('nameserver_views')
models = {
'address_record.addressrecord': {
'Meta': {'unique_together': "(('label', 'domain', 'fqdn', 'ip_upper', 'ip_lower', 'ip_type'),)", 'object_name': 'AddressRecord', 'db_table': "'address_record'"},
'description': ('django.db.models.fields.CharField', [], {'max_length': '1000', 'null': 'True', 'blank': 'True'}),
'domain': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['domain.Domain']"}),
'fqdn': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip_lower': ('django.db.models.fields.BigIntegerField', [], {'null': 'True', 'blank': 'True'}),
'ip_str': ('django.db.models.fields.CharField', [], {'max_length': '39'}),
'ip_type': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'ip_upper': ('django.db.models.fields.BigIntegerField', [], {'null': 'True', 'blank': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '63', 'null': 'True', 'blank': 'True'}),
'ttl': ('django.db.models.fields.PositiveIntegerField', [], {'default': '3600', 'null': 'True', 'blank': 'True'}),
'views': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['view.View']", 'symmetrical': 'False', 'blank': 'True'})
},
'domain.domain': {
'Meta': {'object_name': 'Domain', 'db_table': "'domain'"},
'delegated': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'dirty': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_reverse': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'master_domain': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['domain.Domain']", 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'purgeable': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'soa': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['soa.SOA']", 'null': 'True', 'blank': 'True'})
},
'nameserver.nameserver': {
'Meta': {'unique_together': "(('domain', 'server'),)", 'object_name': 'Nameserver', 'db_table': "'nameserver'"},
'addr_glue': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'nameserver_set'", 'null': 'True', 'to': "orm['address_record.AddressRecord']"}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '1000', 'null': 'True', 'blank': 'True'}),
'domain': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['domain.Domain']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'intr_glue': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'intrnameserver_set'", 'null': 'True', 'to': "orm['static_intr.StaticReg']"}),
'server': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'ttl': ('django.db.models.fields.PositiveIntegerField', [], {'default': '3600', 'null': 'True', 'blank': 'True'}),
'views': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['view.View']", 'symmetrical': 'False', 'blank': 'True'})
},
'soa.soa': {
'Meta': {'unique_together': "(('primary', 'contact', 'description'),)", 'object_name': 'SOA', 'db_table': "'soa'"},
'contact': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'dirty': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'expire': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1209600'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'minimum': ('django.db.models.fields.PositiveIntegerField', [], {'default': '180'}),
'primary': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'refresh': ('django.db.models.fields.PositiveIntegerField', [], {'default': '180'}),
'retry': ('django.db.models.fields.PositiveIntegerField', [], {'default': '86400'}),
'serial': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1360538201'}),
'ttl': ('django.db.models.fields.PositiveIntegerField', [], {'default': '3600', 'null': 'True', 'blank': 'True'})
},
'static_intr.staticinterface': {
'Meta': {'unique_together': "(('ip_upper', 'ip_lower', 'label', 'domain', 'mac'),)", 'object_name': 'StaticReg', 'db_table': "'static_interface'"},
'description': ('django.db.models.fields.CharField', [], {'max_length': '1000', 'null': 'True', 'blank': 'True'}),
'dhcp_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'dns_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'domain': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['domain.Domain']"}),
'fqdn': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip_lower': ('django.db.models.fields.BigIntegerField', [], {'null': 'True', 'blank': 'True'}),
'ip_str': ('django.db.models.fields.CharField', [], {'max_length': '39'}),
'ip_type': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'ip_upper': ('django.db.models.fields.BigIntegerField', [], {'null': 'True', 'blank': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '63', 'null': 'True', 'blank': 'True'}),
'mac': ('django.db.models.fields.CharField', [], {'max_length': '17'}),
'reverse_domain': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'staticintrdomain_set'", 'null': 'True', 'to': "orm['domain.Domain']"}),
'system': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['systems.System']", 'null': 'True', 'blank': 'True'}),
'ttl': ('django.db.models.fields.PositiveIntegerField', [], {'default': '3600', 'null': 'True', 'blank': 'True'}),
'views': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['view.View']", 'symmetrical': 'False', 'blank': 'True'})
},
'systems.allocation': {
'Meta': {'ordering': "['name']", 'object_name': 'Allocation', 'db_table': "u'allocations'"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'systems.location': {
'Meta': {'ordering': "['name']", 'object_name': 'Location', 'db_table': "u'locations'"},
'address': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255', 'blank': 'True'}),
'note': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
'systems.operatingsystem': {
'Meta': {'ordering': "['name', 'version']", 'object_name': 'OperatingSystem', 'db_table': "u'operating_systems'"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'version': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'})
},
'systems.servermodel': {
'Meta': {'ordering': "['vendor', 'model']", 'object_name': 'ServerModel', 'db_table': "u'server_models'"},
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'part_number': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'vendor': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'})
},
'systems.system': {
'Meta': {'object_name': 'System', 'db_table': "u'systems'"},
'allocation': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['systems.Allocation']", 'null': 'True', 'blank': 'True'}),
'asset_tag': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'change_password': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'created_on': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'hostname': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_dhcp_server': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'is_dns_server': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'is_nagios_server': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'is_puppet_server': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'is_switch': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'licenses': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'oob_ip': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True', 'blank': 'True'}),
'oob_switch_port': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'operating_system': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['systems.OperatingSystem']", 'null': 'True', 'blank': 'True'}),
'patch_panel_port': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'purchase_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'purchase_price': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'rack_order': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '6', 'decimal_places': '2', 'blank': 'True'}),
'ram': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'serial': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'server_model': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['systems.ServerModel']", 'null': 'True', 'blank': 'True'}),
'switch_ports': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'system_rack': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['systems.SystemRack']", 'null': 'True', 'blank': 'True'}),
'system_status': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['systems.SystemStatus']", 'null': 'True', 'blank': 'True'}),
'updated_on': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
'systems.systemrack': {
'Meta': {'ordering': "['name']", 'object_name': 'SystemRack', 'db_table': "u'system_racks'"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['systems.Location']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'})
},
'systems.systemstatus': {
'Meta': {'ordering': "['status']", 'object_name': 'SystemStatus', 'db_table': "u'system_statuses'"},
'color': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'color_code': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'})
},
'view.view': {
'Meta': {'unique_together': "(('name',),)", 'object_name': 'View', 'db_table': "'view'"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'})
}
}
complete_apps = ['nameserver']
| bsd-3-clause | 6896d1d418b9127e3cd70733da3b77b2 | 83.143541 | 192 | 0.562663 | 3.580941 | false | false | false | false |
mozilla/inventory | core/search/views.py | 1 | 5906 | from django.shortcuts import render
from django.http import HttpResponse, Http404
from django.db.utils import DatabaseError
from django.core.exceptions import ValidationError
from mozdns.utils import get_zones
from core.search.compiler.django_compile import compile_to_django
from core.search.compiler.django_compile import search_type
from core.search.compiler.invschema import SEARCH_SCHEMA, HELP_SEARCH_SCHEMA
import simplejson as json
from gettext import gettext as _
from jinja2 import Environment, PackageLoader, ChoiceLoader
env = Environment(loader=ChoiceLoader(
[PackageLoader('mozdns.record', 'templates'),
PackageLoader('core.search', 'templates')]
))
MAX_NUM_OBJECTS = 5000
def request_to_search(request):
search = request.GET.get("search", None)
adv_search = request.GET.get("advanced_search", "")
if adv_search:
if search:
search += " AND " + adv_search
else:
search = adv_search
return search
def handle_shady_search(search):
if not search:
return HttpResponse("What do you want?!?")
dos_terms = ["10", "com", "mozilla.com", "mozilla", "network:10/8",
"network:10.0.0.0/8"]
if search in dos_terms:
return HttpResponse("Denial of Service attack prevented. The search "
"term '{0}' is too general".format(search))
return None
def search_ajax(request):
template = env.get_template('search/core_search_results.html')
error_template = env.get_template('search/core_search_error.html')
def html_response(**kwargs):
if 'error_messages' in kwargs:
return error_template.render(**kwargs)
overflow_results = {}
for type_, count in kwargs['meta']['counts'].items():
if count > MAX_NUM_OBJECTS:
overflow_results[type_] = count
new_objs = kwargs['objects'][type_][:MAX_NUM_OBJECTS]
kwargs['objects'][type_] = new_objs
kwargs['MAX_NUM_OBJECTS'] = MAX_NUM_OBJECTS
kwargs['overflow_results'] = json.dumps(overflow_results)
return template.render(**kwargs)
return _search(request, html_response)
def search_dns_text(request):
def render_rdtype(rdtype_set, **kwargs):
response_str = ""
for obj in rdtype_set:
response_str += _("{0:<6}".format(obj.pk) +
obj.bind_render_record(show_ttl=True, **kwargs) +
"\n")
return response_str
def text_response(**kwargs):
if 'error_messages' in kwargs:
return json.dumps({'text_response': kwargs['error_messages']})
response_str = ""
for type_ in [
'SOA', 'NS', 'MX', 'SRV', 'CNAME', 'SSHFP', 'TXT', 'A',
'PTR', 'NET', 'SITE', 'VLAN', 'SYS', 'SREG', 'HWADAPTER']:
response_str += render_rdtype(kwargs['objects'][type_])
response_str += render_rdtype(kwargs['objects']['SREG'], reverse=True)
return json.dumps({'text_response': response_str})
return _search(request, text_response)
def _search(request, response):
search = request_to_search(request)
errors = handle_shady_search(search)
if errors:
return errors
try:
obj_map, error_resp = compile_to_django(search)
except ValidationError as why:
return HttpResponse(response(**{'error_messages': str(why)}))
if not obj_map:
return HttpResponse(response(**{'error_messages': error_resp}))
obj_counts = {}
total_objects = 0
try: # We might have to catch shitty regular expressions
# or other things MySQL doesn't like
for type_, q in obj_map.iteritems():
obj_counts[type_] = q.count() if q else 0
total_objects += obj_counts[type_]
except DatabaseError as why:
return HttpResponse(response(**{'error_messages': str(why)}))
except Warning as why:
return HttpResponse(response(**{'error_messages': str(why)}))
format = request.GET.get('format', '')
results = {
'format': format,
'meta': {
'counts': obj_counts,
'total_objects': total_objects,
'search': search,
},
'objects': obj_map
}
return HttpResponse(response(**results))
def search(request):
"""Search page"""
search = request.GET.get('search', '')
return render(request, "search/core_search.html", {
"search": search,
"zones": sorted([z.name for z in get_zones()], reverse=True)
})
def ajax_type_search(request):
query = request.GET.get('query', '')
record_type = request.GET.get('record_type', '')
if not record_type:
raise Http404
records, error = search_type(query, record_type)
if not query:
return HttpResponse(json.dumps({record_type: []}))
if error:
records = []
else:
try:
records = records[:50]
except DatabaseError, e:
if "Got error " in str(e) and " from regexp" in str(e):
# This is nasty. If the user is using an invalid regex patter,
# the db might shit a brick
records = []
else:
raise
return HttpResponse(json.dumps({
record_type: [{'label': str(r), 'pk': r.pk} for r in records]
}))
def get_zones_json(request):
return HttpResponse(json.dumps([z.name for z in get_zones()]))
def search_schema_ajax(request):
dclass = request.GET.get('class', None)
if not dclass:
# return all schemas in HELP_SEARCH_SCHEMA
return HttpResponse(json.dumps(HELP_SEARCH_SCHEMA))
elif dclass.upper() in SEARCH_SCHEMA:
# return one schema
return HttpResponse(
json.dumps({'schema': SEARCH_SCHEMA[dclass.upper()]})
)
else:
return HttpResponse("{}")
| bsd-3-clause | c43afd262fb2a4d3a408bd6aac3060a8 | 31.994413 | 79 | 0.601422 | 3.921647 | false | false | false | false |
mozilla/inventory | dhcp/models.py | 3 | 2217 | from django.db import models
class DHCPOverride(models.Model):
dhcp_scope = models.CharField(max_length=32)
override_text = models.TextField(blank=True, null=True)
class Meta:
db_table = u'dhcp_overrides'
class DHCPFile(models.Model):
dhcp_scope = models.CharField(max_length=32)
file_text = models.TextField(blank=True, null=True)
class Meta:
db_table = u'dhcp_file'
class DHCP(models.Model):
SUBNET_CHOICES = (
('255.255.254.0', '255.255.254.0'),
('255.255.255.0', '255.255.255.0'),
('255.255.255.128', '255.255.255.128'),
('255.255.255.192', '255.255.255.192'),
('255.255.255.224', '255.255.255.224'),
('255.255.255.240', '255.255.255.240'),
('255.255.255.248', '255.255.255.248'),
('255.255.255.252', '255.255.255.252'),
('255.255.255.254', '255.255.255.254')
)
YES_NO_CHOICES = (
(0, 'No'),
(1, 'Yes'),
)
scope_name = models.CharField(max_length=64)
scope_start = models.CharField(max_length=16, blank=True, null=True)
scope_netmask = models.CharField(max_length=32, choices=SUBNET_CHOICES)
scope_notes = models.TextField(max_length=512, blank=True, null=True)
filename = models.CharField(max_length=32, blank=True, null=True)
pool_range_start = models.CharField(max_length=16, blank=True, null=True)
pool_range_end = models.CharField(max_length=16, blank=True, null=True)
pool_deny_dynamic_bootp_agents = models.IntegerField(max_length=32, choices=YES_NO_CHOICES)
allow_booting = models.IntegerField(max_length=32, choices=YES_NO_CHOICES)
allow_bootp = models.IntegerField(max_length=32, choices=YES_NO_CHOICES)
option_ntp_servers = models.CharField(max_length=32, blank=True, null=True)
option_subnet_mask = models.CharField(max_length=16, choices=SUBNET_CHOICES)
option_domain_name_servers = models.CharField(max_length=48, blank=True, null=True)
option_domain_name = models.CharField(max_length=64, blank=True, null=True)
option_routers = models.CharField(max_length=16, blank=True, null=True)
def __unicode__(self):
return self.scope_name
class Meta:
db_table = u'dhcp_scopes'
| bsd-3-clause | 533e37d65e2818452bb469c86db77b1c | 38.589286 | 95 | 0.65945 | 3.06639 | false | false | false | false |
mozilla/inventory | api_v2/relengdistro_handler.py | 6 | 1741 | from piston.handler import BaseHandler, rc
from systems.models import System, RelengDistro, SystemRack,SystemStatus,NetworkAdapter,KeyValue
from truth.models import Truth, KeyValue as TruthKeyValue
from dhcp.DHCP import DHCP as DHCPInterface
from dhcp.models import DHCP
from MacroExpansion import MacroExpansion
from KeyValueTree import KeyValueTree
import re
try:
import json
except:
from django.utils import simplejson as json
from django.test.client import Client
from settings import API_ACCESS
class RelengDistroHandler(BaseHandler):
allowed_methods = API_ACCESS
model = RelengDistro
fields = ('id','distro_name')
def create(self, request, releng_distro_id=None):
rd = RelengDistro()
rd.save()
resp = rc.CREATED
resp.write('Record Created')
return resp
def read(self, request, releng_distro_id=None):
base = RelengDistro.objects
if releng_distro_id:
return base.get(pk=releng_distro_id)
else:
return base.all()
def update(self, request, releng_distro_id=None):
model = RelengDistro
if request.method == 'PUT':
try:
rd = model.objects.get(pk=releng_distro_id)
rd.distro_name = request.POST['releng_distro_name']
rd.save()
resp = rc.ALL_OK
except:
resp = rc.NOT_FOUND
return resp
def delete(self, request, releng_distro_id=None):
try:
rd = RelengDistro.objects.get(pk=releng_distro_id)
rd.delete()
resp = rc.DELETED
resp.write('Record Deleted')
except:
resp = rc.NOT_FOUND
return resp
| bsd-3-clause | 4dae0033f5d10690af6be31840baf1d3 | 29.54386 | 96 | 0.627226 | 3.809628 | false | false | false | false |
mozilla/make.mozilla.org | vendor-local/lib/python/kombu/tests/mocks.py | 13 | 3944 | from __future__ import absolute_import
from itertools import count
import anyjson
from kombu.transport import base
class Message(base.Message):
def __init__(self, *args, **kwargs):
self.throw_decode_error = kwargs.get("throw_decode_error", False)
super(Message, self).__init__(*args, **kwargs)
def decode(self):
if self.throw_decode_error:
raise ValueError("can't decode message")
return super(Message, self).decode()
class Channel(base.StdChannel):
open = True
throw_decode_error = False
def __init__(self, connection):
self.connection = connection
self.called = []
self.deliveries = count(1).next
self.to_deliver = []
self.events = {"basic_return": []}
def _called(self, name):
self.called.append(name)
def __contains__(self, key):
return key in self.called
def exchange_declare(self, *args, **kwargs):
self._called("exchange_declare")
def prepare_message(self, message_data, properties={}, priority=0,
content_type=None, content_encoding=None, headers=None):
self._called("prepare_message")
return dict(body=message_data,
headers=headers,
properties=properties,
priority=priority,
content_type=content_type,
content_encoding=content_encoding)
def basic_publish(self, message, exchange="", routing_key="",
mandatory=False, immediate=False, **kwargs):
self._called("basic_publish")
return message, exchange, routing_key
def exchange_delete(self, *args, **kwargs):
self._called("exchange_delete")
def queue_declare(self, *args, **kwargs):
self._called("queue_declare")
def queue_bind(self, *args, **kwargs):
self._called("queue_bind")
def queue_unbind(self, *args, **kwargs):
self._called("queue_unbind")
def queue_delete(self, queue, if_unused=False, if_empty=False, **kwargs):
self._called("queue_delete")
def basic_get(self, *args, **kwargs):
self._called("basic_get")
try:
return self.to_deliver.pop()
except IndexError:
pass
def queue_purge(self, *args, **kwargs):
self._called("queue_purge")
def basic_consume(self, *args, **kwargs):
self._called("basic_consume")
def basic_cancel(self, *args, **kwargs):
self._called("basic_cancel")
def basic_ack(self, *args, **kwargs):
self._called("basic_ack")
def basic_recover(self, requeue=False):
self._called("basic_recover")
def close(self):
self._called("close")
def message_to_python(self, message, *args, **kwargs):
self._called("message_to_python")
return Message(self, body=anyjson.dumps(message),
delivery_tag=self.deliveries(),
throw_decode_error=self.throw_decode_error,
content_type="application/json", content_encoding="utf-8")
def flow(self, active):
self._called("flow")
def basic_reject(self, delivery_tag, requeue=False):
if requeue:
return self._called("basic_reject:requeue")
return self._called("basic_reject")
def basic_qos(self, prefetch_size=0, prefetch_count=0,
apply_global=False):
self._called("basic_qos")
class Connection(object):
connected = True
def __init__(self, client):
self.client = client
def channel(self):
return Channel(self)
class Transport(base.Transport):
def establish_connection(self):
return Connection(self.client)
def create_channel(self, connection):
return connection.channel()
def drain_events(self, connection, **kwargs):
return "event"
def close_connection(self, connection):
connection.connected = False
| bsd-3-clause | 8dcba278e8b45044aa986f7a15b97560 | 27.374101 | 77 | 0.604716 | 4.03272 | false | false | false | false |
mozilla/make.mozilla.org | vendor-local/lib/python/celery/decorators.py | 14 | 1117 | # -*- coding: utf-8 -*-
"""
celery.decorators✞
==================
Deprecated decorators, use `celery.task.task`,
and `celery.task.periodic_task` instead.
The new decorators does not support magic keyword arguments.
:copyright: (c) 2009 - 2012 by Ask Solem.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
import warnings
from . import task as _task
from .exceptions import CDeprecationWarning
warnings.warn(CDeprecationWarning("""
The `celery.decorators` module along with the magic keyword arguments,
are deprecated, and will be removed in version 3.0.
Please use the `celery.task` module instead of `celery.decorators`,
and the `task.request` should be used instead of the magic keyword arguments:
from celery.task import task
See http://bit.ly/celery22major for more information.
"""))
def task(*args, **kwargs): # ✞
kwargs.setdefault("accept_magic_kwargs", True)
return _task.task(*args, **kwargs)
def periodic_task(*args, **kwargs): # ✞
kwargs.setdefault("accept_magic_kwargs", True)
return _task.periodic_task(*args, **kwargs)
| bsd-3-clause | 41a7f931f0d081a67a90abf1f3825977 | 24.25 | 77 | 0.714671 | 3.654605 | false | false | false | false |
mozilla/make.mozilla.org | vendor-local/lib/python/south/db/mysql.py | 2 | 11330 | # MySQL-specific implementations for south
# Original author: Andrew Godwin
# Patches by: F. Gabriel Gosselin <gabrielNOSPAM@evidens.ca>
from django.db import connection
from django.conf import settings
from south.db import generic
from south.db.generic import DryRunError, INVALID
from south.logger import get_logger
def delete_column_constraints(func):
"""
Decorates column operation functions for MySQL.
Deletes the constraints from the database and clears local cache.
"""
def _column_rm(self, table_name, column_name, *args, **opts):
# Delete foreign key constraints
try:
self.delete_foreign_key(table_name, column_name)
except ValueError:
pass # If no foreign key on column, OK because it checks first
# Delete constraints referring to this column
try:
reverse = self._lookup_reverse_constraint(table_name, column_name)
for cname, rtable, rcolumn in reverse:
self.delete_foreign_key(rtable, rcolumn)
except DryRunError:
pass
return func(self, table_name, column_name, *args, **opts)
return _column_rm
def copy_column_constraints(func):
"""
Decorates column operation functions for MySQL.
Determines existing constraints and copies them to a new column
"""
def _column_cp(self, table_name, column_old, column_new, *args, **opts):
# Copy foreign key constraint
try:
constraint = self._find_foreign_constraints(table_name, column_old)[0]
(ftable, fcolumn) = self._lookup_constraint_references(table_name, constraint)
if ftable and fcolumn:
fk_sql = self.foreign_key_sql(
table_name, column_new, ftable, fcolumn)
get_logger().debug("Foreign key SQL: " + fk_sql)
self.add_deferred_sql(fk_sql)
except IndexError:
pass # No constraint exists so ignore
except DryRunError:
pass
# Copy constraints referring to this column
try:
reverse = self._lookup_reverse_constraint(table_name, column_old)
for cname, rtable, rcolumn in reverse:
fk_sql = self.foreign_key_sql(
rtable, rcolumn, table_name, column_new)
self.add_deferred_sql(fk_sql)
except DryRunError:
pass
return func(self, table_name, column_old, column_new, *args, **opts)
return _column_cp
def invalidate_table_constraints(func):
"""
For MySQL we grab all table constraints simultaneously, so this is
effective.
It further solves the issues of invalidating referred table constraints.
"""
def _cache_clear(self, table, *args, **opts):
db_name = self._get_setting('NAME')
if db_name in self._constraint_cache:
del self._constraint_cache[db_name]
if db_name in self._reverse_cache:
del self._reverse_cache[db_name]
if db_name in self._constraint_references:
del self._constraint_references[db_name]
return func(self, table, *args, **opts)
return _cache_clear
class DatabaseOperations(generic.DatabaseOperations):
"""
MySQL implementation of database operations.
MySQL has no DDL transaction support This can confuse people when they ask
how to roll back - hence the dry runs, etc., found in the migration code.
"""
backend_name = "mysql"
alter_string_set_type = ''
alter_string_set_null = 'MODIFY %(column)s %(type)s NULL;'
alter_string_drop_null = 'MODIFY %(column)s %(type)s NOT NULL;'
drop_index_string = 'DROP INDEX %(index_name)s ON %(table_name)s'
delete_primary_key_sql = "ALTER TABLE %(table)s DROP PRIMARY KEY"
delete_foreign_key_sql = "ALTER TABLE %(table)s DROP FOREIGN KEY %(constraint)s"
delete_unique_sql = "ALTER TABLE %s DROP INDEX %s"
rename_table_sql = "RENAME TABLE %s TO %s;"
allows_combined_alters = False
has_check_constraints = False
geom_types = ['geometry', 'point', 'linestring', 'polygon']
text_types = ['text', 'blob',]
def __init__(self, db_alias):
self._constraint_references = {}
self._reverse_cache = {}
super(DatabaseOperations, self).__init__(db_alias)
def _is_valid_cache(self, db_name, table_name):
cache = self._constraint_cache
# we cache the whole db so if there are any tables table_name is valid
return db_name in cache and cache[db_name].get(table_name, None) is not INVALID
def _fill_constraint_cache(self, db_name, table_name):
# for MySQL grab all constraints for this database. It's just as cheap as a single column.
self._constraint_cache[db_name] = {}
self._constraint_cache[db_name][table_name] = {}
self._reverse_cache[db_name] = {}
self._constraint_references[db_name] = {}
name_query = """
SELECT kc.`constraint_name`, kc.`column_name`, kc.`table_name`,
kc.`referenced_table_name`, kc.`referenced_column_name`
FROM information_schema.key_column_usage AS kc
WHERE
kc.table_schema = %s
"""
rows = self.execute(name_query, [db_name])
if not rows:
return
cnames = {}
for constraint, column, table, ref_table, ref_column in rows:
key = (table, constraint)
cnames.setdefault(key, set())
cnames[key].add((column, ref_table, ref_column))
type_query = """
SELECT c.constraint_name, c.table_name, c.constraint_type
FROM information_schema.table_constraints AS c
WHERE
c.table_schema = %s
"""
rows = self.execute(type_query, [db_name])
for constraint, table, kind in rows:
key = (table, constraint)
self._constraint_cache[db_name].setdefault(table, {})
try:
cols = cnames[key]
except KeyError:
cols = set()
for column_set in cols:
(column, ref_table, ref_column) = column_set
self._constraint_cache[db_name][table].setdefault(column, set())
if kind == 'FOREIGN KEY':
self._constraint_cache[db_name][table][column].add((kind,
constraint))
# Create constraint lookup, see constraint_references
self._constraint_references[db_name][(table,
constraint)] = (ref_table, ref_column)
# Create reverse table lookup, reverse_lookup
self._reverse_cache[db_name].setdefault(ref_table, {})
self._reverse_cache[db_name][ref_table].setdefault(ref_column,
set())
self._reverse_cache[db_name][ref_table][ref_column].add(
(constraint, table, column))
else:
self._constraint_cache[db_name][table][column].add((kind,
constraint))
def connection_init(self):
"""
Run before any SQL to let database-specific config be sent as a command,
e.g. which storage engine (MySQL) or transaction serialisability level.
"""
cursor = self._get_connection().cursor()
if self._has_setting('STORAGE_ENGINE') and self._get_setting('STORAGE_ENGINE'):
cursor.execute("SET storage_engine=%s;" % self._get_setting('STORAGE_ENGINE'))
def start_transaction(self):
super(DatabaseOperations, self).start_transaction()
self.execute("SET FOREIGN_KEY_CHECKS=0;")
@copy_column_constraints
@delete_column_constraints
@invalidate_table_constraints
def rename_column(self, table_name, old, new):
if old == new or self.dry_run:
return []
rows = [x for x in self.execute('DESCRIBE %s' % (self.quote_name(table_name),)) if x[0] == old]
if not rows:
raise ValueError("No column '%s' in '%s'." % (old, table_name))
params = (
self.quote_name(table_name),
self.quote_name(old),
self.quote_name(new),
rows[0][1],
rows[0][2] == "YES" and "NULL" or "NOT NULL",
rows[0][4] and "DEFAULT " or "",
rows[0][4] and "%s" or "",
rows[0][5] or "",
)
sql = 'ALTER TABLE %s CHANGE COLUMN %s %s %s %s %s %s %s;' % params
if rows[0][4]:
self.execute(sql, (rows[0][4],))
else:
self.execute(sql)
@delete_column_constraints
def delete_column(self, table_name, name):
super(DatabaseOperations, self).delete_column(table_name, name)
@invalidate_table_constraints
def rename_table(self, old_table_name, table_name):
super(DatabaseOperations, self).rename_table(old_table_name,
table_name)
@invalidate_table_constraints
def delete_table(self, table_name):
super(DatabaseOperations, self).delete_table(table_name)
def _lookup_constraint_references(self, table_name, cname):
"""
Provided an existing table and constraint, returns tuple of (foreign
table, column)
"""
db_name = self._get_setting('NAME')
try:
return self._constraint_references[db_name][(table_name, cname)]
except KeyError:
return None
def _lookup_reverse_constraint(self, table_name, column_name=None):
"""Look for the column referenced by a foreign constraint"""
db_name = self._get_setting('NAME')
if self.dry_run:
raise DryRunError("Cannot get constraints for columns.")
if not self._is_valid_cache(db_name, table_name):
# Piggy-back on lookup_constraint, ensures cache exists
self.lookup_constraint(db_name, table_name)
try:
table = self._reverse_cache[db_name][table_name]
if column_name == None:
return [(y, tuple(y)) for x, y in table.items()]
else:
return tuple(table[column_name])
except KeyError, e:
return []
def _field_sanity(self, field):
"""
This particular override stops us sending DEFAULTs for BLOB/TEXT columns.
"""
# MySQL does not support defaults for geometry columns also
type = self._db_type_for_alter_column(field).lower()
is_geom = True in [ type.find(t) > -1 for t in self.geom_types ]
is_text = True in [ type.find(t) > -1 for t in self.text_types ]
if is_geom or is_text:
field._suppress_default = True
return field
def _alter_set_defaults(self, field, name, params, sqls):
"""
MySQL does not support defaults on text or blob columns.
"""
type = params['type']
# MySQL does not support defaults for geometry columns also
is_geom = True in [ type.find(t) > -1 for t in self.geom_types ]
is_text = True in [ type.find(t) > -1 for t in self.text_types ]
if not is_geom and not is_text:
super(DatabaseOperations, self)._alter_set_defaults(field, name, params, sqls)
| bsd-3-clause | 450621ee070914d46a78abcff1d0c461 | 39.464286 | 103 | 0.59188 | 3.989437 | false | false | false | false |
mozilla/make.mozilla.org | vendor-local/lib/python/celery/concurrency/processes/_win.py | 14 | 3055 | # -*- coding: utf-8 -*-
from __future__ import absolute_import
import os
# psutil is painfully slow in win32. So to avoid adding big
# dependencies like pywin32 a ctypes based solution is preferred
# Code based on the winappdbg project http://winappdbg.sourceforge.net/
# (BSD License)
from ctypes import byref, sizeof, windll, Structure, WinError, POINTER
from ctypes.wintypes import DWORD, c_size_t, LONG, c_char, c_void_p
ERROR_NO_MORE_FILES = 18
INVALID_HANDLE_VALUE = c_void_p(-1).value
class PROCESSENTRY32(Structure):
_fields_ = [
('dwSize', DWORD),
('cntUsage', DWORD),
('th32ProcessID', DWORD),
('th32DefaultHeapID', c_size_t),
('th32ModuleID', DWORD),
('cntThreads', DWORD),
('th32ParentProcessID', DWORD),
('pcPriClassBase', LONG),
('dwFlags', DWORD),
('szExeFile', c_char * 260),
]
LPPROCESSENTRY32 = POINTER(PROCESSENTRY32)
def CreateToolhelp32Snapshot(dwFlags=2, th32ProcessID=0):
hSnapshot = windll.kernel32.CreateToolhelp32Snapshot(dwFlags,
th32ProcessID)
if hSnapshot == INVALID_HANDLE_VALUE:
raise WinError()
return hSnapshot
def Process32First(hSnapshot):
pe = PROCESSENTRY32()
pe.dwSize = sizeof(PROCESSENTRY32)
success = windll.kernel32.Process32First(hSnapshot, byref(pe))
if not success:
if windll.kernel32.GetLastError() == ERROR_NO_MORE_FILES:
return
raise WinError()
return pe
def Process32Next(hSnapshot, pe=None):
if pe is None:
pe = PROCESSENTRY32()
pe.dwSize = sizeof(PROCESSENTRY32)
success = windll.kernel32.Process32Next(hSnapshot, byref(pe))
if not success:
if windll.kernel32.GetLastError() == ERROR_NO_MORE_FILES:
return
raise WinError()
return pe
def get_all_processes_pids():
"""Return a dictionary with all processes pids as keys and their
parents as value. Ignore processes with no parents.
"""
h = CreateToolhelp32Snapshot()
parents = {}
pe = Process32First(h)
while pe:
if pe.th32ParentProcessID:
parents[pe.th32ProcessID] = pe.th32ParentProcessID
pe = Process32Next(h, pe)
return parents
def get_processtree_pids(pid, include_parent=True):
"""Return a list with all the pids of a process tree"""
parents = get_all_processes_pids()
all_pids = parents.keys()
pids = set([pid])
while True:
pids_new = pids.copy()
for _pid in all_pids:
if parents[_pid] in pids:
pids_new.add(_pid)
if pids_new == pids:
break
pids = pids_new.copy()
if not include_parent:
pids.remove(pid)
return list(pids)
def kill_processtree(pid, signum):
"""Kill a process and all its descendants"""
family_pids = get_processtree_pids(pid)
for _pid in family_pids:
os.kill(_pid, signum)
| bsd-3-clause | 06aa6104bfff8227d292e1f421826406 | 27.287037 | 71 | 0.614403 | 3.66307 | false | false | false | false |
mozilla/make.mozilla.org | make_mozilla/events/migrations/0009_auto__add_field_eventkind_additional.py | 1 | 4446 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'EventKind.additional'
db.add_column('events_eventkind', 'additional',
self.gf('django.db.models.fields.TextField')(null=True, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'EventKind.additional'
db.delete_column('events_eventkind', 'additional')
models = {
'events.campaign': {
'Meta': {'object_name': 'Campaign'},
'description': ('django.db.models.fields.TextField', [], {}),
'end': ('django.db.models.fields.DateField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'logo': ('django.db.models.fields.files.ImageField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
'start': ('django.db.models.fields.DateField', [], {})
},
'events.event': {
'Meta': {'object_name': 'Event'},
'campaign': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['events.Campaign']", 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {}),
'end': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'event_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'kind': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['events.EventKind']", 'null': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'official': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'organiser_email': ('django.db.models.fields.EmailField', [], {'max_length': '255'}),
'public': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'source_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'start': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'venue': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['events.Venue']"}),
'verified': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'events.eventkind': {
'Meta': {'object_name': 'EventKind'},
'additional': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'})
},
'events.partner': {
'Meta': {'object_name': 'Partner'},
'featured': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'for_campaign': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['events.Campaign']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'logo': ('django.db.models.fields.files.ImageField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200'})
},
'events.venue': {
'Meta': {'object_name': 'Venue'},
'country': ('django_countries.fields.CountryField', [], {'max_length': '2'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.contrib.gis.db.models.fields.PointField', [], {'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'street_address': ('django.db.models.fields.TextField', [], {})
}
}
complete_apps = ['events'] | bsd-3-clause | 17abadf881019f30e1fd70a1c7354dd9 | 58.293333 | 142 | 0.539811 | 3.774194 | false | false | false | false |
mozilla/make.mozilla.org | vendor-local/lib/python/celery/routes.py | 14 | 3338 | # -*- coding: utf-8 -*-
"""
celery.routes
~~~~~~~~~~~~~
Contains utilities for working with task routes
(:setting:`CELERY_ROUTES`).
:copyright: (c) 2009 - 2012 by Ask Solem.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
from .exceptions import QueueNotFound
from .utils import firstmethod, instantiate, lpmerge, mpromise
_first_route = firstmethod("route_for_task")
class MapRoute(object):
"""Creates a router out of a :class:`dict`."""
def __init__(self, map):
self.map = map
def route_for_task(self, task, *args, **kwargs):
route = self.map.get(task)
if route:
return dict(route)
class Router(object):
def __init__(self, routes=None, queues=None, create_missing=False,
app=None):
from .app import app_or_default
self.app = app_or_default(app)
self.queues = {} if queues is None else queues
self.routes = [] if routes is None else routes
self.create_missing = create_missing
def route(self, options, task, args=(), kwargs={}):
options = self.expand_destination(options) # expands 'queue'
if self.routes:
route = self.lookup_route(task, args, kwargs)
if route: # expands 'queue' in route.
return lpmerge(self.expand_destination(route), options)
if "queue" not in options:
options = lpmerge(self.expand_destination(
self.app.conf.CELERY_DEFAULT_QUEUE), options)
return options
def expand_destination(self, route):
# Route can be a queue name: convenient for direct exchanges.
if isinstance(route, basestring):
queue, route = route, {}
else:
# can use defaults from configured queue, but override specific
# things (like the routing_key): great for topic exchanges.
queue = route.pop("queue", None)
if queue: # expand config from configured queue.
try:
dest = dict(self.queues[queue])
except KeyError:
if not self.create_missing:
raise QueueNotFound(
"Queue %r is not defined in CELERY_QUEUES" % queue)
for key in "exchange", "routing_key":
if route.get(key) is None:
route[key] = queue
dest = dict(self.app.amqp.queues.add(queue, **route))
# needs to be declared by publisher
dest["queue"] = queue
# routing_key and binding_key are synonyms.
dest.setdefault("routing_key", dest.get("binding_key"))
return lpmerge(dest, route)
return route
def lookup_route(self, task, args=None, kwargs=None):
return _first_route(self.routes, task, args, kwargs)
def prepare(routes):
"""Expands the :setting:`CELERY_ROUTES` setting."""
def expand_route(route):
if isinstance(route, dict):
return MapRoute(route)
if isinstance(route, basestring):
return mpromise(instantiate, route)
return route
if routes is None:
return ()
if not isinstance(routes, (list, tuple)):
routes = (routes, )
return map(expand_route, routes)
| bsd-3-clause | db940e0562ad48cdaead4f447a3fc4e0 | 32.717172 | 77 | 0.583283 | 4.198742 | false | false | false | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.