index
int64 | repo_name
string | branch_name
string | path
string | content
string | import_graph
string |
|---|---|---|---|---|---|
28,189,868
|
matthewwithanm/django-imagekit
|
refs/heads/develop
|
/imagekit/__init__.py
|
from . import conf, generatorlibrary
from .pkgmeta import *
from .registry import register, unregister
from .specs import ImageSpec
__all__ = [
'ImageSpec', 'conf', 'generatorlibrary', 'register', 'unregister',
'__title__', '__author__', '__version__', '__license__'
]
|
{"/imagekit/models/fields/files.py": ["/imagekit/utils.py"], "/imagekit/utils.py": ["/imagekit/lib.py"], "/imagekit/__init__.py": ["/imagekit/specs/__init__.py", "/imagekit/pkgmeta.py", "/imagekit/registry.py"], "/tests/models.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/tests/test_optimistic_strategy.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/tests/utils.py"], "/tests/test_fields.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py", "/tests/models.py", "/tests/utils.py"], "/tests/test_serialization.py": ["/imagekit/cachefiles/__init__.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/tests/test_cachefiles.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/imagekit/templatetags/imagekit.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/registry.py"], "/tests/conftest.py": ["/tests/utils.py"], "/imagekit/cachefiles/namers.py": ["/imagekit/utils.py"], "/imagekit/forms/fields.py": ["/imagekit/specs/__init__.py", "/imagekit/utils.py"], "/imagekit/models/fields/__init__.py": ["/imagekit/registry.py", "/imagekit/specs/__init__.py", "/imagekit/specs/sourcegroups.py", "/imagekit/models/fields/files.py", "/imagekit/models/fields/utils.py"], "/imagekit/specs/sourcegroups.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_no_extra_queries.py": ["/tests/models.py"], "/imagekit/models/fields/utils.py": ["/imagekit/cachefiles/__init__.py"], "/imagekit/cachefiles/backends.py": ["/imagekit/utils.py"], "/tests/test_generateimage_tag.py": ["/tests/utils.py"], "/imagekit/cachefiles/__init__.py": ["/imagekit/files.py", "/imagekit/registry.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_thumbnail_tag.py": ["/tests/utils.py"], "/tests/test_settings.py": ["/imagekit/conf.py", "/imagekit/utils.py", "/tests/utils.py"], "/imagekit/files.py": ["/imagekit/utils.py"], "/tests/test_closing_fieldfiles.py": ["/tests/models.py", "/tests/utils.py"], "/imagekit/cachefiles/strategies.py": ["/imagekit/utils.py"], "/imagekit/registry.py": ["/imagekit/signals.py", "/imagekit/utils.py", "/imagekit/specs/sourcegroups.py", "/imagekit/cachefiles/__init__.py"], "/imagekit/generatorlibrary.py": ["/imagekit/processors/__init__.py", "/imagekit/registry.py", "/imagekit/specs/__init__.py"], "/tests/test_sourcegroups.py": ["/imagekit/signals.py", "/imagekit/specs/sourcegroups.py", "/tests/models.py", "/tests/utils.py"], "/tests/utils.py": ["/imagekit/cachefiles/backends.py", "/imagekit/conf.py", "/imagekit/utils.py", "/tests/models.py"], "/tests/test_abstract_models.py": ["/imagekit/utils.py", "/tests/models.py"], "/imagekit/management/commands/generateimages.py": ["/imagekit/registry.py"], "/tests/imagegenerators.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/imagekit/specs/__init__.py": ["/imagekit/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/imagekit/registry.py", "/imagekit/utils.py"]}
|
28,189,869
|
matthewwithanm/django-imagekit
|
refs/heads/develop
|
/tests/test_optimistic_strategy.py
|
from unittest.mock import Mock
from django.core.files.storage import FileSystemStorage
from imagekit.cachefiles import ImageCacheFile
from imagekit.cachefiles.backends import Simple as SimpleCFBackend
from imagekit.cachefiles.strategies import Optimistic as OptimisticStrategy
from .utils import create_image
class ImageGenerator:
def generate(self):
return create_image()
def get_hash(self):
return 'abc123'
def get_image_cache_file():
storage = Mock(FileSystemStorage)
backend = SimpleCFBackend()
strategy = OptimisticStrategy()
generator = ImageGenerator()
return ImageCacheFile(generator, storage=storage,
cachefile_backend=backend,
cachefile_strategy=strategy)
def test_no_io_on_bool():
"""
When checking the truthiness of an ImageCacheFile, the storage shouldn't
perform IO operations.
"""
file = get_image_cache_file()
bool(file)
assert not file.storage.exists.called
assert not file.storage.open.called
def test_no_io_on_url():
"""
When getting the URL of an ImageCacheFile, the storage shouldn't be
checked.
"""
file = get_image_cache_file()
file.url
assert not file.storage.exists.called
assert not file.storage.open.called
|
{"/imagekit/models/fields/files.py": ["/imagekit/utils.py"], "/imagekit/utils.py": ["/imagekit/lib.py"], "/imagekit/__init__.py": ["/imagekit/specs/__init__.py", "/imagekit/pkgmeta.py", "/imagekit/registry.py"], "/tests/models.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/tests/test_optimistic_strategy.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/tests/utils.py"], "/tests/test_fields.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py", "/tests/models.py", "/tests/utils.py"], "/tests/test_serialization.py": ["/imagekit/cachefiles/__init__.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/tests/test_cachefiles.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/imagekit/templatetags/imagekit.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/registry.py"], "/tests/conftest.py": ["/tests/utils.py"], "/imagekit/cachefiles/namers.py": ["/imagekit/utils.py"], "/imagekit/forms/fields.py": ["/imagekit/specs/__init__.py", "/imagekit/utils.py"], "/imagekit/models/fields/__init__.py": ["/imagekit/registry.py", "/imagekit/specs/__init__.py", "/imagekit/specs/sourcegroups.py", "/imagekit/models/fields/files.py", "/imagekit/models/fields/utils.py"], "/imagekit/specs/sourcegroups.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_no_extra_queries.py": ["/tests/models.py"], "/imagekit/models/fields/utils.py": ["/imagekit/cachefiles/__init__.py"], "/imagekit/cachefiles/backends.py": ["/imagekit/utils.py"], "/tests/test_generateimage_tag.py": ["/tests/utils.py"], "/imagekit/cachefiles/__init__.py": ["/imagekit/files.py", "/imagekit/registry.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_thumbnail_tag.py": ["/tests/utils.py"], "/tests/test_settings.py": ["/imagekit/conf.py", "/imagekit/utils.py", "/tests/utils.py"], "/imagekit/files.py": ["/imagekit/utils.py"], "/tests/test_closing_fieldfiles.py": ["/tests/models.py", "/tests/utils.py"], "/imagekit/cachefiles/strategies.py": ["/imagekit/utils.py"], "/imagekit/registry.py": ["/imagekit/signals.py", "/imagekit/utils.py", "/imagekit/specs/sourcegroups.py", "/imagekit/cachefiles/__init__.py"], "/imagekit/generatorlibrary.py": ["/imagekit/processors/__init__.py", "/imagekit/registry.py", "/imagekit/specs/__init__.py"], "/tests/test_sourcegroups.py": ["/imagekit/signals.py", "/imagekit/specs/sourcegroups.py", "/tests/models.py", "/tests/utils.py"], "/tests/utils.py": ["/imagekit/cachefiles/backends.py", "/imagekit/conf.py", "/imagekit/utils.py", "/tests/models.py"], "/tests/test_abstract_models.py": ["/imagekit/utils.py", "/tests/models.py"], "/imagekit/management/commands/generateimages.py": ["/imagekit/registry.py"], "/tests/imagegenerators.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/imagekit/specs/__init__.py": ["/imagekit/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/imagekit/registry.py", "/imagekit/utils.py"]}
|
28,189,870
|
matthewwithanm/django-imagekit
|
refs/heads/develop
|
/tests/test_fields.py
|
import pytest
from django import forms
from django.core.files.base import File
from django.core.files.uploadedfile import SimpleUploadedFile
from imagekit import forms as ikforms
from imagekit.processors import SmartCrop
from . import imagegenerators # noqa
from .models import (ImageModel, ProcessedImageFieldModel,
ProcessedImageFieldWithSpecModel)
from .utils import get_image_file
@pytest.mark.django_db(transaction=True)
def test_model_processedimagefield():
instance = ProcessedImageFieldModel()
with File(get_image_file()) as file:
instance.processed.save('whatever.jpeg', file)
instance.save()
assert instance.processed.width == 50
assert instance.processed.height == 50
@pytest.mark.django_db(transaction=True)
def test_model_processedimagefield_with_spec():
instance = ProcessedImageFieldWithSpecModel()
with File(get_image_file()) as file:
instance.processed.save('whatever.jpeg', file)
instance.save()
assert instance.processed.width == 100
assert instance.processed.height == 60
@pytest.mark.django_db(transaction=True)
def test_form_processedimagefield():
class TestForm(forms.ModelForm):
image = ikforms.ProcessedImageField(spec_id='tests:testform_image',
processors=[SmartCrop(50, 50)],
format='JPEG')
class Meta:
model = ImageModel
fields = 'image',
with get_image_file() as upload_file:
files = {
'image': SimpleUploadedFile('abc.jpg', upload_file.read())
}
form = TestForm({}, files)
instance = form.save()
assert instance.image.width == 50
assert instance.image.height == 50
|
{"/imagekit/models/fields/files.py": ["/imagekit/utils.py"], "/imagekit/utils.py": ["/imagekit/lib.py"], "/imagekit/__init__.py": ["/imagekit/specs/__init__.py", "/imagekit/pkgmeta.py", "/imagekit/registry.py"], "/tests/models.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/tests/test_optimistic_strategy.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/tests/utils.py"], "/tests/test_fields.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py", "/tests/models.py", "/tests/utils.py"], "/tests/test_serialization.py": ["/imagekit/cachefiles/__init__.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/tests/test_cachefiles.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/imagekit/templatetags/imagekit.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/registry.py"], "/tests/conftest.py": ["/tests/utils.py"], "/imagekit/cachefiles/namers.py": ["/imagekit/utils.py"], "/imagekit/forms/fields.py": ["/imagekit/specs/__init__.py", "/imagekit/utils.py"], "/imagekit/models/fields/__init__.py": ["/imagekit/registry.py", "/imagekit/specs/__init__.py", "/imagekit/specs/sourcegroups.py", "/imagekit/models/fields/files.py", "/imagekit/models/fields/utils.py"], "/imagekit/specs/sourcegroups.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_no_extra_queries.py": ["/tests/models.py"], "/imagekit/models/fields/utils.py": ["/imagekit/cachefiles/__init__.py"], "/imagekit/cachefiles/backends.py": ["/imagekit/utils.py"], "/tests/test_generateimage_tag.py": ["/tests/utils.py"], "/imagekit/cachefiles/__init__.py": ["/imagekit/files.py", "/imagekit/registry.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_thumbnail_tag.py": ["/tests/utils.py"], "/tests/test_settings.py": ["/imagekit/conf.py", "/imagekit/utils.py", "/tests/utils.py"], "/imagekit/files.py": ["/imagekit/utils.py"], "/tests/test_closing_fieldfiles.py": ["/tests/models.py", "/tests/utils.py"], "/imagekit/cachefiles/strategies.py": ["/imagekit/utils.py"], "/imagekit/registry.py": ["/imagekit/signals.py", "/imagekit/utils.py", "/imagekit/specs/sourcegroups.py", "/imagekit/cachefiles/__init__.py"], "/imagekit/generatorlibrary.py": ["/imagekit/processors/__init__.py", "/imagekit/registry.py", "/imagekit/specs/__init__.py"], "/tests/test_sourcegroups.py": ["/imagekit/signals.py", "/imagekit/specs/sourcegroups.py", "/tests/models.py", "/tests/utils.py"], "/tests/utils.py": ["/imagekit/cachefiles/backends.py", "/imagekit/conf.py", "/imagekit/utils.py", "/tests/models.py"], "/tests/test_abstract_models.py": ["/imagekit/utils.py", "/tests/models.py"], "/imagekit/management/commands/generateimages.py": ["/imagekit/registry.py"], "/tests/imagegenerators.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/imagekit/specs/__init__.py": ["/imagekit/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/imagekit/registry.py", "/imagekit/utils.py"]}
|
28,189,871
|
matthewwithanm/django-imagekit
|
refs/heads/develop
|
/tests/test_serialization.py
|
"""
Make sure that the various IK classes can be successfully serialized and
deserialized. This is important when using IK with Celery.
"""
import pytest
from imagekit.cachefiles import ImageCacheFile
from .imagegenerators import TestSpec
from .utils import (clear_imagekit_cache, create_photo, get_unique_image_file,
pickleback)
@pytest.mark.django_db(transaction=True)
def test_imagespecfield():
clear_imagekit_cache()
instance = create_photo('pickletest2.jpg')
thumbnail = pickleback(instance.thumbnail)
thumbnail.generate()
@pytest.mark.django_db(transaction=True)
def test_circular_ref():
"""
A model instance with a spec field in its dict shouldn't raise a KeyError.
This corresponds to #234
"""
clear_imagekit_cache()
instance = create_photo('pickletest3.jpg')
instance.thumbnail # Cause thumbnail to be added to instance's __dict__
pickleback(instance)
def test_cachefiles():
clear_imagekit_cache()
spec = TestSpec(source=get_unique_image_file())
file = ImageCacheFile(spec)
file.url
# remove link to file from spec source generator
# test __getstate__ of ImageCacheFile
file.generator.source = None
restored_file = pickleback(file)
assert file is not restored_file
# Assertion for #437 and #451
assert file.storage is restored_file.storage
|
{"/imagekit/models/fields/files.py": ["/imagekit/utils.py"], "/imagekit/utils.py": ["/imagekit/lib.py"], "/imagekit/__init__.py": ["/imagekit/specs/__init__.py", "/imagekit/pkgmeta.py", "/imagekit/registry.py"], "/tests/models.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/tests/test_optimistic_strategy.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/tests/utils.py"], "/tests/test_fields.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py", "/tests/models.py", "/tests/utils.py"], "/tests/test_serialization.py": ["/imagekit/cachefiles/__init__.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/tests/test_cachefiles.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/imagekit/templatetags/imagekit.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/registry.py"], "/tests/conftest.py": ["/tests/utils.py"], "/imagekit/cachefiles/namers.py": ["/imagekit/utils.py"], "/imagekit/forms/fields.py": ["/imagekit/specs/__init__.py", "/imagekit/utils.py"], "/imagekit/models/fields/__init__.py": ["/imagekit/registry.py", "/imagekit/specs/__init__.py", "/imagekit/specs/sourcegroups.py", "/imagekit/models/fields/files.py", "/imagekit/models/fields/utils.py"], "/imagekit/specs/sourcegroups.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_no_extra_queries.py": ["/tests/models.py"], "/imagekit/models/fields/utils.py": ["/imagekit/cachefiles/__init__.py"], "/imagekit/cachefiles/backends.py": ["/imagekit/utils.py"], "/tests/test_generateimage_tag.py": ["/tests/utils.py"], "/imagekit/cachefiles/__init__.py": ["/imagekit/files.py", "/imagekit/registry.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_thumbnail_tag.py": ["/tests/utils.py"], "/tests/test_settings.py": ["/imagekit/conf.py", "/imagekit/utils.py", "/tests/utils.py"], "/imagekit/files.py": ["/imagekit/utils.py"], "/tests/test_closing_fieldfiles.py": ["/tests/models.py", "/tests/utils.py"], "/imagekit/cachefiles/strategies.py": ["/imagekit/utils.py"], "/imagekit/registry.py": ["/imagekit/signals.py", "/imagekit/utils.py", "/imagekit/specs/sourcegroups.py", "/imagekit/cachefiles/__init__.py"], "/imagekit/generatorlibrary.py": ["/imagekit/processors/__init__.py", "/imagekit/registry.py", "/imagekit/specs/__init__.py"], "/tests/test_sourcegroups.py": ["/imagekit/signals.py", "/imagekit/specs/sourcegroups.py", "/tests/models.py", "/tests/utils.py"], "/tests/utils.py": ["/imagekit/cachefiles/backends.py", "/imagekit/conf.py", "/imagekit/utils.py", "/tests/models.py"], "/tests/test_abstract_models.py": ["/imagekit/utils.py", "/tests/models.py"], "/imagekit/management/commands/generateimages.py": ["/imagekit/registry.py"], "/tests/imagegenerators.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/imagekit/specs/__init__.py": ["/imagekit/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/imagekit/registry.py", "/imagekit/utils.py"]}
|
28,189,872
|
matthewwithanm/django-imagekit
|
refs/heads/develop
|
/imagekit/processors/__init__.py
|
from pilkit.processors import *
__all__ = [
# Base
'ProcessorPipeline', 'Adjust', 'Reflection', 'Transpose',
'Anchor', 'MakeOpaque',
# Crop
'TrimBorderColor', 'Crop', 'SmartCrop',
# Resize
'Resize', 'ResizeToCover', 'ResizeToFill', 'SmartResize',
'ResizeCanvas', 'AddBorder', 'ResizeToFit', 'Thumbnail'
]
|
{"/imagekit/models/fields/files.py": ["/imagekit/utils.py"], "/imagekit/utils.py": ["/imagekit/lib.py"], "/imagekit/__init__.py": ["/imagekit/specs/__init__.py", "/imagekit/pkgmeta.py", "/imagekit/registry.py"], "/tests/models.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/tests/test_optimistic_strategy.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/tests/utils.py"], "/tests/test_fields.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py", "/tests/models.py", "/tests/utils.py"], "/tests/test_serialization.py": ["/imagekit/cachefiles/__init__.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/tests/test_cachefiles.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/imagekit/templatetags/imagekit.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/registry.py"], "/tests/conftest.py": ["/tests/utils.py"], "/imagekit/cachefiles/namers.py": ["/imagekit/utils.py"], "/imagekit/forms/fields.py": ["/imagekit/specs/__init__.py", "/imagekit/utils.py"], "/imagekit/models/fields/__init__.py": ["/imagekit/registry.py", "/imagekit/specs/__init__.py", "/imagekit/specs/sourcegroups.py", "/imagekit/models/fields/files.py", "/imagekit/models/fields/utils.py"], "/imagekit/specs/sourcegroups.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_no_extra_queries.py": ["/tests/models.py"], "/imagekit/models/fields/utils.py": ["/imagekit/cachefiles/__init__.py"], "/imagekit/cachefiles/backends.py": ["/imagekit/utils.py"], "/tests/test_generateimage_tag.py": ["/tests/utils.py"], "/imagekit/cachefiles/__init__.py": ["/imagekit/files.py", "/imagekit/registry.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_thumbnail_tag.py": ["/tests/utils.py"], "/tests/test_settings.py": ["/imagekit/conf.py", "/imagekit/utils.py", "/tests/utils.py"], "/imagekit/files.py": ["/imagekit/utils.py"], "/tests/test_closing_fieldfiles.py": ["/tests/models.py", "/tests/utils.py"], "/imagekit/cachefiles/strategies.py": ["/imagekit/utils.py"], "/imagekit/registry.py": ["/imagekit/signals.py", "/imagekit/utils.py", "/imagekit/specs/sourcegroups.py", "/imagekit/cachefiles/__init__.py"], "/imagekit/generatorlibrary.py": ["/imagekit/processors/__init__.py", "/imagekit/registry.py", "/imagekit/specs/__init__.py"], "/tests/test_sourcegroups.py": ["/imagekit/signals.py", "/imagekit/specs/sourcegroups.py", "/tests/models.py", "/tests/utils.py"], "/tests/utils.py": ["/imagekit/cachefiles/backends.py", "/imagekit/conf.py", "/imagekit/utils.py", "/tests/models.py"], "/tests/test_abstract_models.py": ["/imagekit/utils.py", "/tests/models.py"], "/imagekit/management/commands/generateimages.py": ["/imagekit/registry.py"], "/tests/imagegenerators.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/imagekit/specs/__init__.py": ["/imagekit/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/imagekit/registry.py", "/imagekit/utils.py"]}
|
28,189,873
|
matthewwithanm/django-imagekit
|
refs/heads/develop
|
/tests/test_cachefiles.py
|
from hashlib import md5
from unittest import mock
import pytest
from django.conf import settings
from imagekit.cachefiles import ImageCacheFile, LazyImageCacheFile
from imagekit.cachefiles.backends import Simple
from .imagegenerators import TestSpec
from .utils import (DummyAsyncCacheFileBackend, assert_file_is_falsy,
assert_file_is_truthy, get_image_file,
get_unique_image_file)
def test_no_source_falsiness():
"""
Ensure cache files generated from sourceless specs are falsy.
"""
spec = TestSpec(source=None)
file = ImageCacheFile(spec)
assert_file_is_falsy(file)
def test_sync_backend_truthiness():
"""
Ensure that a cachefile with a synchronous cache file backend (the default)
is truthy.
"""
spec = TestSpec(source=get_unique_image_file())
file = ImageCacheFile(spec)
assert_file_is_truthy(file)
def test_async_backend_falsiness():
"""
Ensure that a cachefile with an asynchronous cache file backend is falsy.
"""
spec = TestSpec(source=get_unique_image_file())
file = ImageCacheFile(spec, cachefile_backend=DummyAsyncCacheFileBackend())
assert_file_is_falsy(file)
def test_no_source_error():
spec = TestSpec(source=None)
file = ImageCacheFile(spec)
with pytest.raises(TestSpec.MissingSource):
file.generate()
def test_repr_does_not_send_existence_required():
"""
Ensure that `__repr__` method does not send `existance_required` signal
Cachefile strategy may be configured to generate file on
`existance_required`.
To generate images, backend passes `ImageCacheFile` instance to worker.
Both celery and RQ calls `__repr__` method for each argument to enque call.
And if `__repr__` of object will send this signal, we will get endless
recursion
"""
with mock.patch('imagekit.cachefiles.existence_required') as signal:
# import here to apply mock
from imagekit.cachefiles import ImageCacheFile
spec = TestSpec(source=get_unique_image_file())
file = ImageCacheFile(
spec,
cachefile_backend=DummyAsyncCacheFileBackend()
)
file.__repr__()
assert signal.send.called is False
def test_memcached_cache_key():
"""
Ensure the default cachefile backend is sanitizing its cache key for
memcached by default.
"""
class MockFile:
def __init__(self, name):
self.name = name
backend = Simple()
extra_char_count = len('state-') + len(settings.IMAGEKIT_CACHE_PREFIX)
length = 199 - extra_char_count
filename = '1' * length
file = MockFile(filename)
assert backend.get_key(file) == '%s%s-state' % (settings.IMAGEKIT_CACHE_PREFIX, file.name)
length = 200 - extra_char_count
filename = '1' * length
file = MockFile(filename)
assert backend.get_key(file) == '%s%s:%s' % (
settings.IMAGEKIT_CACHE_PREFIX,
'1' * (200 - len(':') - 32 - len(settings.IMAGEKIT_CACHE_PREFIX)),
md5(('%s%s-state' % (settings.IMAGEKIT_CACHE_PREFIX, filename)).encode('utf-8')).hexdigest())
def test_lazyfile_stringification():
file = LazyImageCacheFile('testspec', source=None)
assert str(file) == ''
assert repr(file) == '<ImageCacheFile: None>'
with get_image_file() as source_file:
file = LazyImageCacheFile('testspec', source=source_file)
file.name = 'a.jpg'
assert str(file) == 'a.jpg'
assert repr(file) == '<ImageCacheFile: a.jpg>'
|
{"/imagekit/models/fields/files.py": ["/imagekit/utils.py"], "/imagekit/utils.py": ["/imagekit/lib.py"], "/imagekit/__init__.py": ["/imagekit/specs/__init__.py", "/imagekit/pkgmeta.py", "/imagekit/registry.py"], "/tests/models.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/tests/test_optimistic_strategy.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/tests/utils.py"], "/tests/test_fields.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py", "/tests/models.py", "/tests/utils.py"], "/tests/test_serialization.py": ["/imagekit/cachefiles/__init__.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/tests/test_cachefiles.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/imagekit/templatetags/imagekit.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/registry.py"], "/tests/conftest.py": ["/tests/utils.py"], "/imagekit/cachefiles/namers.py": ["/imagekit/utils.py"], "/imagekit/forms/fields.py": ["/imagekit/specs/__init__.py", "/imagekit/utils.py"], "/imagekit/models/fields/__init__.py": ["/imagekit/registry.py", "/imagekit/specs/__init__.py", "/imagekit/specs/sourcegroups.py", "/imagekit/models/fields/files.py", "/imagekit/models/fields/utils.py"], "/imagekit/specs/sourcegroups.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_no_extra_queries.py": ["/tests/models.py"], "/imagekit/models/fields/utils.py": ["/imagekit/cachefiles/__init__.py"], "/imagekit/cachefiles/backends.py": ["/imagekit/utils.py"], "/tests/test_generateimage_tag.py": ["/tests/utils.py"], "/imagekit/cachefiles/__init__.py": ["/imagekit/files.py", "/imagekit/registry.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_thumbnail_tag.py": ["/tests/utils.py"], "/tests/test_settings.py": ["/imagekit/conf.py", "/imagekit/utils.py", "/tests/utils.py"], "/imagekit/files.py": ["/imagekit/utils.py"], "/tests/test_closing_fieldfiles.py": ["/tests/models.py", "/tests/utils.py"], "/imagekit/cachefiles/strategies.py": ["/imagekit/utils.py"], "/imagekit/registry.py": ["/imagekit/signals.py", "/imagekit/utils.py", "/imagekit/specs/sourcegroups.py", "/imagekit/cachefiles/__init__.py"], "/imagekit/generatorlibrary.py": ["/imagekit/processors/__init__.py", "/imagekit/registry.py", "/imagekit/specs/__init__.py"], "/tests/test_sourcegroups.py": ["/imagekit/signals.py", "/imagekit/specs/sourcegroups.py", "/tests/models.py", "/tests/utils.py"], "/tests/utils.py": ["/imagekit/cachefiles/backends.py", "/imagekit/conf.py", "/imagekit/utils.py", "/tests/models.py"], "/tests/test_abstract_models.py": ["/imagekit/utils.py", "/tests/models.py"], "/imagekit/management/commands/generateimages.py": ["/imagekit/registry.py"], "/tests/imagegenerators.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/imagekit/specs/__init__.py": ["/imagekit/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/imagekit/registry.py", "/imagekit/utils.py"]}
|
28,189,874
|
matthewwithanm/django-imagekit
|
refs/heads/develop
|
/imagekit/templatetags/imagekit.py
|
from django import template
from django.template.library import parse_bits
from django.utils.encoding import force_str
from django.utils.html import escape
from django.utils.safestring import mark_safe
from ..cachefiles import ImageCacheFile
from ..registry import generator_registry
register = template.Library()
ASSIGNMENT_DELIMETER = 'as'
HTML_ATTRS_DELIMITER = '--'
DEFAULT_THUMBNAIL_GENERATOR = 'imagekit:thumbnail'
def get_cachefile(context, generator_id, generator_kwargs, source=None):
generator_id = generator_id.resolve(context)
kwargs = {k: v.resolve(context) for k, v in generator_kwargs.items()}
generator = generator_registry.get(generator_id, **kwargs)
return ImageCacheFile(generator)
def parse_dimensions(dimensions):
"""
Parse the width and height values from a dimension string. Valid values are
'1x1', '1x', and 'x1'. If one of the dimensions is omitted, the parse result
will be None for that value.
"""
width, height = [d.strip() and int(d) or None for d in dimensions.split('x')]
return {'width': width, 'height': height}
class GenerateImageAssignmentNode(template.Node):
def __init__(self, variable_name, generator_id, generator_kwargs):
self._generator_id = generator_id
self._generator_kwargs = generator_kwargs
self._variable_name = variable_name
def get_variable_name(self, context):
return force_str(self._variable_name)
def render(self, context):
variable_name = self.get_variable_name(context)
context[variable_name] = get_cachefile(context, self._generator_id,
self._generator_kwargs)
return ''
class GenerateImageTagNode(template.Node):
def __init__(self, generator_id, generator_kwargs, html_attrs):
self._generator_id = generator_id
self._generator_kwargs = generator_kwargs
self._html_attrs = html_attrs
def render(self, context):
file = get_cachefile(context, self._generator_id,
self._generator_kwargs)
attrs = {k: v.resolve(context) for k, v in self._html_attrs.items()}
# Only add width and height if neither is specified (to allow for
# proportional in-browser scaling).
if 'width' not in attrs and 'height' not in attrs:
attrs.update(width=file.width, height=file.height)
attrs['src'] = file.url
attr_str = ' '.join('%s="%s"' % (escape(k), escape(v)) for k, v in
attrs.items())
return mark_safe('<img %s />' % attr_str)
class ThumbnailAssignmentNode(template.Node):
def __init__(self, variable_name, generator_id, dimensions, source, generator_kwargs):
self._variable_name = variable_name
self._generator_id = generator_id
self._dimensions = dimensions
self._source = source
self._generator_kwargs = generator_kwargs
def get_variable_name(self, context):
return force_str(self._variable_name)
def render(self, context):
variable_name = self.get_variable_name(context)
generator_id = self._generator_id.resolve(context) if self._generator_id else DEFAULT_THUMBNAIL_GENERATOR
kwargs = {k: v.resolve(context) for k, v in self._generator_kwargs.items()}
kwargs['source'] = self._source.resolve(context)
kwargs.update(parse_dimensions(self._dimensions.resolve(context)))
if kwargs.get('anchor'):
# ImageKit uses pickle at protocol 0, which throws infinite
# recursion errors when anchor is set to a SafeString instance.
# This converts the SafeString into a str instance.
kwargs['anchor'] = kwargs['anchor'][:]
generator = generator_registry.get(generator_id, **kwargs)
context[variable_name] = ImageCacheFile(generator)
return ''
class ThumbnailImageTagNode(template.Node):
def __init__(self, generator_id, dimensions, source, generator_kwargs, html_attrs):
self._generator_id = generator_id
self._dimensions = dimensions
self._source = source
self._generator_kwargs = generator_kwargs
self._html_attrs = html_attrs
def render(self, context):
generator_id = self._generator_id.resolve(context) if self._generator_id else DEFAULT_THUMBNAIL_GENERATOR
dimensions = parse_dimensions(self._dimensions.resolve(context))
kwargs = {k: v.resolve(context) for k, v in self._generator_kwargs.items()}
kwargs['source'] = self._source.resolve(context)
kwargs.update(dimensions)
if kwargs.get('anchor'):
# ImageKit uses pickle at protocol 0, which throws infinite
# recursion errors when anchor is set to a SafeString instance.
# This converts the SafeString into a str instance.
kwargs['anchor'] = kwargs['anchor'][:]
generator = generator_registry.get(generator_id, **kwargs)
file = ImageCacheFile(generator)
attrs = {k: v.resolve(context) for k, v in self._html_attrs.items()}
# Only add width and height if neither is specified (to allow for
# proportional in-browser scaling).
if 'width' not in attrs and 'height' not in attrs:
attrs.update(width=file.width, height=file.height)
attrs['src'] = file.url
attr_str = ' '.join('%s="%s"' % (escape(k), escape(v)) for k, v in
attrs.items())
return mark_safe('<img %s />' % attr_str)
def parse_ik_tag_bits(parser, bits):
"""
Parses the tag name, html attributes and variable name (for assignment tags)
from the provided bits. The preceding bits may vary and are left to be
parsed by specific tags.
"""
varname = None
html_attrs = {}
tag_name = bits.pop(0)
if len(bits) >= 2 and bits[-2] == ASSIGNMENT_DELIMETER:
varname = bits[-1]
bits = bits[:-2]
if HTML_ATTRS_DELIMITER in bits:
if varname:
raise template.TemplateSyntaxError('Do not specify html attributes'
' (using "%s") when using the "%s" tag as an assignment'
' tag.' % (HTML_ATTRS_DELIMITER, tag_name))
index = bits.index(HTML_ATTRS_DELIMITER)
html_bits = bits[index + 1:]
bits = bits[:index]
if not html_bits:
raise template.TemplateSyntaxError('Don\'t use "%s" unless you\'re'
' setting html attributes.' % HTML_ATTRS_DELIMITER)
args, html_attrs = parse_bits(parser, html_bits, [], 'args',
'kwargs', None, [], None, False, tag_name)
if len(args):
raise template.TemplateSyntaxError('All "%s" tag arguments after'
' the "%s" token must be named.' % (tag_name,
HTML_ATTRS_DELIMITER))
return (tag_name, bits, html_attrs, varname)
@register.tag
def generateimage(parser, token):
"""
Creates an image based on the provided arguments.
By default::
{% generateimage 'myapp:thumbnail' source=mymodel.profile_image %}
generates an ``<img>`` tag::
<img src="/path/to/34d944f200dd794bf1e6a7f37849f72b.jpg" width="100" height="100" />
You can add additional attributes to the tag using "--". For example,
this::
{% generateimage 'myapp:thumbnail' source=mymodel.profile_image -- alt="Hello!" %}
will result in the following markup::
<img src="/path/to/34d944f200dd794bf1e6a7f37849f72b.jpg" width="100" height="100" alt="Hello!" />
For more flexibility, ``generateimage`` also works as an assignment tag::
{% generateimage 'myapp:thumbnail' source=mymodel.profile_image as th %}
<img src="{{ th.url }}" width="{{ th.width }}" height="{{ th.height }}" />
"""
bits = token.split_contents()
tag_name, bits, html_attrs, varname = parse_ik_tag_bits(parser, bits)
args, kwargs = parse_bits(parser, bits, ['generator_id'], 'args', 'kwargs',
None, [], None, False, tag_name)
if len(args) != 1:
raise template.TemplateSyntaxError('The "%s" tag requires exactly one'
' unnamed argument (the generator id).' % tag_name)
generator_id = args[0]
if varname:
return GenerateImageAssignmentNode(varname, generator_id, kwargs)
else:
return GenerateImageTagNode(generator_id, kwargs, html_attrs)
@register.tag
def thumbnail(parser, token):
"""
A convenient shortcut syntax for generating a thumbnail. The following::
{% thumbnail '100x100' mymodel.profile_image %}
is equivalent to::
{% generateimage 'imagekit:thumbnail' source=mymodel.profile_image width=100 height=100 %}
The thumbnail tag supports the "--" and "as" bits for adding html
attributes and assigning to a variable, respectively. It also accepts the
kwargs "anchor", and "crop".
To use "smart cropping" (the ``SmartResize`` processor)::
{% thumbnail '100x100' mymodel.profile_image %}
To crop, anchoring the image to the top right (the ``ResizeToFill``
processor)::
{% thumbnail '100x100' mymodel.profile_image anchor='tr' %}
To resize without cropping (using the ``ResizeToFit`` processor)::
{% thumbnail '100x100' mymodel.profile_image crop=0 %}
"""
bits = token.split_contents()
tag_name, bits, html_attrs, varname = parse_ik_tag_bits(parser, bits)
args, kwargs = parse_bits(parser, bits, [], 'args', 'kwargs',
None, [], None, False, tag_name)
if len(args) < 2:
raise template.TemplateSyntaxError('The "%s" tag requires at least two'
' unnamed arguments: the dimensions and the source image.'
% tag_name)
elif len(args) > 3:
raise template.TemplateSyntaxError('The "%s" tag accepts at most three'
' unnamed arguments: a generator id, the dimensions, and the'
' source image.' % tag_name)
dimensions, source = args[-2:]
generator_id = args[0] if len(args) > 2 else None
if varname:
return ThumbnailAssignmentNode(varname, generator_id, dimensions,
source, kwargs)
else:
return ThumbnailImageTagNode(generator_id, dimensions, source, kwargs,
html_attrs)
|
{"/imagekit/models/fields/files.py": ["/imagekit/utils.py"], "/imagekit/utils.py": ["/imagekit/lib.py"], "/imagekit/__init__.py": ["/imagekit/specs/__init__.py", "/imagekit/pkgmeta.py", "/imagekit/registry.py"], "/tests/models.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/tests/test_optimistic_strategy.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/tests/utils.py"], "/tests/test_fields.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py", "/tests/models.py", "/tests/utils.py"], "/tests/test_serialization.py": ["/imagekit/cachefiles/__init__.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/tests/test_cachefiles.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/imagekit/templatetags/imagekit.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/registry.py"], "/tests/conftest.py": ["/tests/utils.py"], "/imagekit/cachefiles/namers.py": ["/imagekit/utils.py"], "/imagekit/forms/fields.py": ["/imagekit/specs/__init__.py", "/imagekit/utils.py"], "/imagekit/models/fields/__init__.py": ["/imagekit/registry.py", "/imagekit/specs/__init__.py", "/imagekit/specs/sourcegroups.py", "/imagekit/models/fields/files.py", "/imagekit/models/fields/utils.py"], "/imagekit/specs/sourcegroups.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_no_extra_queries.py": ["/tests/models.py"], "/imagekit/models/fields/utils.py": ["/imagekit/cachefiles/__init__.py"], "/imagekit/cachefiles/backends.py": ["/imagekit/utils.py"], "/tests/test_generateimage_tag.py": ["/tests/utils.py"], "/imagekit/cachefiles/__init__.py": ["/imagekit/files.py", "/imagekit/registry.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_thumbnail_tag.py": ["/tests/utils.py"], "/tests/test_settings.py": ["/imagekit/conf.py", "/imagekit/utils.py", "/tests/utils.py"], "/imagekit/files.py": ["/imagekit/utils.py"], "/tests/test_closing_fieldfiles.py": ["/tests/models.py", "/tests/utils.py"], "/imagekit/cachefiles/strategies.py": ["/imagekit/utils.py"], "/imagekit/registry.py": ["/imagekit/signals.py", "/imagekit/utils.py", "/imagekit/specs/sourcegroups.py", "/imagekit/cachefiles/__init__.py"], "/imagekit/generatorlibrary.py": ["/imagekit/processors/__init__.py", "/imagekit/registry.py", "/imagekit/specs/__init__.py"], "/tests/test_sourcegroups.py": ["/imagekit/signals.py", "/imagekit/specs/sourcegroups.py", "/tests/models.py", "/tests/utils.py"], "/tests/utils.py": ["/imagekit/cachefiles/backends.py", "/imagekit/conf.py", "/imagekit/utils.py", "/tests/models.py"], "/tests/test_abstract_models.py": ["/imagekit/utils.py", "/tests/models.py"], "/imagekit/management/commands/generateimages.py": ["/imagekit/registry.py"], "/tests/imagegenerators.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/imagekit/specs/__init__.py": ["/imagekit/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/imagekit/registry.py", "/imagekit/utils.py"]}
|
28,189,875
|
matthewwithanm/django-imagekit
|
refs/heads/develop
|
/tests/conftest.py
|
import pytest
from .utils import clear_imagekit_test_files
@pytest.fixture(scope='session', autouse=True)
def imagekit_test_files_teardown(request):
request.addfinalizer(clear_imagekit_test_files)
|
{"/imagekit/models/fields/files.py": ["/imagekit/utils.py"], "/imagekit/utils.py": ["/imagekit/lib.py"], "/imagekit/__init__.py": ["/imagekit/specs/__init__.py", "/imagekit/pkgmeta.py", "/imagekit/registry.py"], "/tests/models.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/tests/test_optimistic_strategy.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/tests/utils.py"], "/tests/test_fields.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py", "/tests/models.py", "/tests/utils.py"], "/tests/test_serialization.py": ["/imagekit/cachefiles/__init__.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/tests/test_cachefiles.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/imagekit/templatetags/imagekit.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/registry.py"], "/tests/conftest.py": ["/tests/utils.py"], "/imagekit/cachefiles/namers.py": ["/imagekit/utils.py"], "/imagekit/forms/fields.py": ["/imagekit/specs/__init__.py", "/imagekit/utils.py"], "/imagekit/models/fields/__init__.py": ["/imagekit/registry.py", "/imagekit/specs/__init__.py", "/imagekit/specs/sourcegroups.py", "/imagekit/models/fields/files.py", "/imagekit/models/fields/utils.py"], "/imagekit/specs/sourcegroups.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_no_extra_queries.py": ["/tests/models.py"], "/imagekit/models/fields/utils.py": ["/imagekit/cachefiles/__init__.py"], "/imagekit/cachefiles/backends.py": ["/imagekit/utils.py"], "/tests/test_generateimage_tag.py": ["/tests/utils.py"], "/imagekit/cachefiles/__init__.py": ["/imagekit/files.py", "/imagekit/registry.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_thumbnail_tag.py": ["/tests/utils.py"], "/tests/test_settings.py": ["/imagekit/conf.py", "/imagekit/utils.py", "/tests/utils.py"], "/imagekit/files.py": ["/imagekit/utils.py"], "/tests/test_closing_fieldfiles.py": ["/tests/models.py", "/tests/utils.py"], "/imagekit/cachefiles/strategies.py": ["/imagekit/utils.py"], "/imagekit/registry.py": ["/imagekit/signals.py", "/imagekit/utils.py", "/imagekit/specs/sourcegroups.py", "/imagekit/cachefiles/__init__.py"], "/imagekit/generatorlibrary.py": ["/imagekit/processors/__init__.py", "/imagekit/registry.py", "/imagekit/specs/__init__.py"], "/tests/test_sourcegroups.py": ["/imagekit/signals.py", "/imagekit/specs/sourcegroups.py", "/tests/models.py", "/tests/utils.py"], "/tests/utils.py": ["/imagekit/cachefiles/backends.py", "/imagekit/conf.py", "/imagekit/utils.py", "/tests/models.py"], "/tests/test_abstract_models.py": ["/imagekit/utils.py", "/tests/models.py"], "/imagekit/management/commands/generateimages.py": ["/imagekit/registry.py"], "/tests/imagegenerators.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/imagekit/specs/__init__.py": ["/imagekit/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/imagekit/registry.py", "/imagekit/utils.py"]}
|
28,189,876
|
matthewwithanm/django-imagekit
|
refs/heads/develop
|
/imagekit/cachefiles/namers.py
|
"""
Functions responsible for returning filenames for the given image generator.
Users are free to define their own functions; these are just some some sensible
choices.
"""
import os
from django.conf import settings
from ..utils import format_to_extension, suggest_extension
def source_name_as_path(generator):
"""
A namer that, given the following source file name::
photos/thumbnails/bulldog.jpg
will generate a name like this::
/path/to/generated/images/photos/thumbnails/bulldog/5ff3233527c5ac3e4b596343b440ff67.jpg
where "/path/to/generated/images/" is the value specified by the
``IMAGEKIT_CACHEFILE_DIR`` setting.
"""
source_filename = getattr(generator.source, 'name', None)
if source_filename is None or os.path.isabs(source_filename):
# Generally, we put the file right in the cache file directory.
dir = settings.IMAGEKIT_CACHEFILE_DIR
else:
# For source files with relative names (like Django media files),
# use the source's name to create the new filename.
dir = os.path.join(settings.IMAGEKIT_CACHEFILE_DIR,
os.path.splitext(source_filename)[0])
ext = suggest_extension(source_filename or '', generator.format)
return os.path.normpath(os.path.join(dir,
'%s%s' % (generator.get_hash(), ext)))
def source_name_dot_hash(generator):
"""
A namer that, given the following source file name::
photos/thumbnails/bulldog.jpg
will generate a name like this::
/path/to/generated/images/photos/thumbnails/bulldog.5ff3233527c5.jpg
where "/path/to/generated/images/" is the value specified by the
``IMAGEKIT_CACHEFILE_DIR`` setting.
"""
source_filename = getattr(generator.source, 'name', None)
if source_filename is None or os.path.isabs(source_filename):
# Generally, we put the file right in the cache file directory.
dir = settings.IMAGEKIT_CACHEFILE_DIR
else:
# For source files with relative names (like Django media files),
# use the source's name to create the new filename.
dir = os.path.join(settings.IMAGEKIT_CACHEFILE_DIR,
os.path.dirname(source_filename))
ext = suggest_extension(source_filename or '', generator.format)
basename = os.path.basename(source_filename)
return os.path.normpath(os.path.join(dir, '%s.%s%s' % (
os.path.splitext(basename)[0], generator.get_hash()[:12], ext)))
def hash(generator):
"""
A namer that, given the following source file name::
photos/thumbnails/bulldog.jpg
will generate a name like this::
/path/to/generated/images/5ff3233527c5ac3e4b596343b440ff67.jpg
where "/path/to/generated/images/" is the value specified by the
``IMAGEKIT_CACHEFILE_DIR`` setting.
"""
format = getattr(generator, 'format', None)
ext = format_to_extension(format) if format else ''
return os.path.normpath(os.path.join(settings.IMAGEKIT_CACHEFILE_DIR,
'%s%s' % (generator.get_hash(), ext)))
|
{"/imagekit/models/fields/files.py": ["/imagekit/utils.py"], "/imagekit/utils.py": ["/imagekit/lib.py"], "/imagekit/__init__.py": ["/imagekit/specs/__init__.py", "/imagekit/pkgmeta.py", "/imagekit/registry.py"], "/tests/models.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/tests/test_optimistic_strategy.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/tests/utils.py"], "/tests/test_fields.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py", "/tests/models.py", "/tests/utils.py"], "/tests/test_serialization.py": ["/imagekit/cachefiles/__init__.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/tests/test_cachefiles.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/imagekit/templatetags/imagekit.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/registry.py"], "/tests/conftest.py": ["/tests/utils.py"], "/imagekit/cachefiles/namers.py": ["/imagekit/utils.py"], "/imagekit/forms/fields.py": ["/imagekit/specs/__init__.py", "/imagekit/utils.py"], "/imagekit/models/fields/__init__.py": ["/imagekit/registry.py", "/imagekit/specs/__init__.py", "/imagekit/specs/sourcegroups.py", "/imagekit/models/fields/files.py", "/imagekit/models/fields/utils.py"], "/imagekit/specs/sourcegroups.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_no_extra_queries.py": ["/tests/models.py"], "/imagekit/models/fields/utils.py": ["/imagekit/cachefiles/__init__.py"], "/imagekit/cachefiles/backends.py": ["/imagekit/utils.py"], "/tests/test_generateimage_tag.py": ["/tests/utils.py"], "/imagekit/cachefiles/__init__.py": ["/imagekit/files.py", "/imagekit/registry.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_thumbnail_tag.py": ["/tests/utils.py"], "/tests/test_settings.py": ["/imagekit/conf.py", "/imagekit/utils.py", "/tests/utils.py"], "/imagekit/files.py": ["/imagekit/utils.py"], "/tests/test_closing_fieldfiles.py": ["/tests/models.py", "/tests/utils.py"], "/imagekit/cachefiles/strategies.py": ["/imagekit/utils.py"], "/imagekit/registry.py": ["/imagekit/signals.py", "/imagekit/utils.py", "/imagekit/specs/sourcegroups.py", "/imagekit/cachefiles/__init__.py"], "/imagekit/generatorlibrary.py": ["/imagekit/processors/__init__.py", "/imagekit/registry.py", "/imagekit/specs/__init__.py"], "/tests/test_sourcegroups.py": ["/imagekit/signals.py", "/imagekit/specs/sourcegroups.py", "/tests/models.py", "/tests/utils.py"], "/tests/utils.py": ["/imagekit/cachefiles/backends.py", "/imagekit/conf.py", "/imagekit/utils.py", "/tests/models.py"], "/tests/test_abstract_models.py": ["/imagekit/utils.py", "/tests/models.py"], "/imagekit/management/commands/generateimages.py": ["/imagekit/registry.py"], "/tests/imagegenerators.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/imagekit/specs/__init__.py": ["/imagekit/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/imagekit/registry.py", "/imagekit/utils.py"]}
|
28,189,877
|
matthewwithanm/django-imagekit
|
refs/heads/develop
|
/imagekit/forms/fields.py
|
from django.forms import ImageField
from ..specs import SpecHost
from ..utils import generate
class ProcessedImageField(ImageField, SpecHost):
def __init__(self, processors=None, format=None, options=None,
autoconvert=True, spec_id=None, spec=None, *args, **kwargs):
if spec_id is None:
# Unlike model fields, form fields are never told their field name.
# (Model fields are done so via `contribute_to_class()`.) Therefore
# we can't really generate a good spec id automatically.
raise TypeError('You must provide a spec_id')
SpecHost.__init__(self, processors=processors, format=format,
options=options, autoconvert=autoconvert, spec=spec,
spec_id=spec_id)
super().__init__(*args, **kwargs)
def clean(self, data, initial=None):
data = super().clean(data, initial)
if data and data != initial:
spec = self.get_spec(source=data)
f = generate(spec)
# Name is required in Django 1.4. When we drop support for it
# then we can directly return the result from `generate(spec)`
f.name = data.name
return f
return data
|
{"/imagekit/models/fields/files.py": ["/imagekit/utils.py"], "/imagekit/utils.py": ["/imagekit/lib.py"], "/imagekit/__init__.py": ["/imagekit/specs/__init__.py", "/imagekit/pkgmeta.py", "/imagekit/registry.py"], "/tests/models.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/tests/test_optimistic_strategy.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/tests/utils.py"], "/tests/test_fields.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py", "/tests/models.py", "/tests/utils.py"], "/tests/test_serialization.py": ["/imagekit/cachefiles/__init__.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/tests/test_cachefiles.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/imagekit/templatetags/imagekit.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/registry.py"], "/tests/conftest.py": ["/tests/utils.py"], "/imagekit/cachefiles/namers.py": ["/imagekit/utils.py"], "/imagekit/forms/fields.py": ["/imagekit/specs/__init__.py", "/imagekit/utils.py"], "/imagekit/models/fields/__init__.py": ["/imagekit/registry.py", "/imagekit/specs/__init__.py", "/imagekit/specs/sourcegroups.py", "/imagekit/models/fields/files.py", "/imagekit/models/fields/utils.py"], "/imagekit/specs/sourcegroups.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_no_extra_queries.py": ["/tests/models.py"], "/imagekit/models/fields/utils.py": ["/imagekit/cachefiles/__init__.py"], "/imagekit/cachefiles/backends.py": ["/imagekit/utils.py"], "/tests/test_generateimage_tag.py": ["/tests/utils.py"], "/imagekit/cachefiles/__init__.py": ["/imagekit/files.py", "/imagekit/registry.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_thumbnail_tag.py": ["/tests/utils.py"], "/tests/test_settings.py": ["/imagekit/conf.py", "/imagekit/utils.py", "/tests/utils.py"], "/imagekit/files.py": ["/imagekit/utils.py"], "/tests/test_closing_fieldfiles.py": ["/tests/models.py", "/tests/utils.py"], "/imagekit/cachefiles/strategies.py": ["/imagekit/utils.py"], "/imagekit/registry.py": ["/imagekit/signals.py", "/imagekit/utils.py", "/imagekit/specs/sourcegroups.py", "/imagekit/cachefiles/__init__.py"], "/imagekit/generatorlibrary.py": ["/imagekit/processors/__init__.py", "/imagekit/registry.py", "/imagekit/specs/__init__.py"], "/tests/test_sourcegroups.py": ["/imagekit/signals.py", "/imagekit/specs/sourcegroups.py", "/tests/models.py", "/tests/utils.py"], "/tests/utils.py": ["/imagekit/cachefiles/backends.py", "/imagekit/conf.py", "/imagekit/utils.py", "/tests/models.py"], "/tests/test_abstract_models.py": ["/imagekit/utils.py", "/tests/models.py"], "/imagekit/management/commands/generateimages.py": ["/imagekit/registry.py"], "/tests/imagegenerators.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/imagekit/specs/__init__.py": ["/imagekit/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/imagekit/registry.py", "/imagekit/utils.py"]}
|
28,189,878
|
matthewwithanm/django-imagekit
|
refs/heads/develop
|
/imagekit/models/fields/__init__.py
|
from django.db import models
from django.db.models.signals import class_prepared
from ...registry import register
from ...specs import SpecHost
from ...specs.sourcegroups import ImageFieldSourceGroup
from .files import ProcessedImageFieldFile
from .utils import ImageSpecFileDescriptor
class SpecHostField(SpecHost):
def _set_spec_id(self, cls, name):
spec_id = getattr(self, 'spec_id', None)
# Generate a spec_id to register the spec with. The default spec id is
# "<app>:<model>_<field>"
if not spec_id:
spec_id = ('%s:%s:%s' % (cls._meta.app_label,
cls._meta.object_name, name)).lower()
# Register the spec with the id. This allows specs to be overridden
# later, from outside of the model definition.
super().set_spec_id(spec_id)
class ImageSpecField(SpecHostField):
"""
The heart and soul of the ImageKit library, ImageSpecField allows you to add
variants of uploaded images to your models.
"""
def __init__(self, processors=None, format=None, options=None,
source=None, cachefile_storage=None, autoconvert=None,
cachefile_backend=None, cachefile_strategy=None, spec=None,
id=None):
SpecHost.__init__(self, processors=processors, format=format,
options=options, cachefile_storage=cachefile_storage,
autoconvert=autoconvert,
cachefile_backend=cachefile_backend,
cachefile_strategy=cachefile_strategy, spec=spec,
spec_id=id)
# TODO: Allow callable for source. See https://github.com/matthewwithanm/django-imagekit/issues/158#issuecomment-10921664
self.source = source
def contribute_to_class(self, cls, name):
# If the source field name isn't defined, figure it out.
def register_source_group(source):
setattr(cls, name, ImageSpecFileDescriptor(self, name, source))
self._set_spec_id(cls, name)
# Add the model and field as a source for this spec id
register.source_group(self.spec_id, ImageFieldSourceGroup(cls, source))
if self.source:
register_source_group(self.source)
else:
# The source argument is not defined
# Then we need to see if there is only one ImageField in that model
# But we need to do that after full model initialization
def handle_model_preparation(sender, **kwargs):
image_fields = [f.attname for f in cls._meta.fields if
isinstance(f, models.ImageField)]
if len(image_fields) == 0:
raise Exception(
'%s does not define any ImageFields, so your %s'
' ImageSpecField has no image to act on.' %
(cls.__name__, name))
elif len(image_fields) > 1:
raise Exception(
'%s defines multiple ImageFields, but you have not'
' specified a source for your %s ImageSpecField.' %
(cls.__name__, name))
register_source_group(image_fields[0])
class_prepared.connect(handle_model_preparation, sender=cls, weak=False)
class ProcessedImageField(models.ImageField, SpecHostField):
"""
ProcessedImageField is an ImageField that runs processors on the uploaded
image *before* saving it to storage. This is in contrast to specs, which
maintain the original. Useful for coercing fileformats or keeping images
within a reasonable size.
"""
attr_class = ProcessedImageFieldFile
def __init__(self, processors=None, format=None, options=None,
verbose_name=None, name=None, width_field=None, height_field=None,
autoconvert=None, spec=None, spec_id=None, **kwargs):
"""
The ProcessedImageField constructor accepts all of the arguments that
the :class:`django.db.models.ImageField` constructor accepts, as well
as the ``processors``, ``format``, and ``options`` arguments of
:class:`imagekit.models.ImageSpecField`.
"""
# if spec is not provided then autoconvert will be True by default
if spec is None and autoconvert is None:
autoconvert = True
SpecHost.__init__(self, processors=processors, format=format,
options=options, autoconvert=autoconvert, spec=spec,
spec_id=spec_id)
models.ImageField.__init__(self, verbose_name, name, width_field,
height_field, **kwargs)
def contribute_to_class(self, cls, name):
self._set_spec_id(cls, name)
return super().contribute_to_class(cls, name)
|
{"/imagekit/models/fields/files.py": ["/imagekit/utils.py"], "/imagekit/utils.py": ["/imagekit/lib.py"], "/imagekit/__init__.py": ["/imagekit/specs/__init__.py", "/imagekit/pkgmeta.py", "/imagekit/registry.py"], "/tests/models.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/tests/test_optimistic_strategy.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/tests/utils.py"], "/tests/test_fields.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py", "/tests/models.py", "/tests/utils.py"], "/tests/test_serialization.py": ["/imagekit/cachefiles/__init__.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/tests/test_cachefiles.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/imagekit/templatetags/imagekit.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/registry.py"], "/tests/conftest.py": ["/tests/utils.py"], "/imagekit/cachefiles/namers.py": ["/imagekit/utils.py"], "/imagekit/forms/fields.py": ["/imagekit/specs/__init__.py", "/imagekit/utils.py"], "/imagekit/models/fields/__init__.py": ["/imagekit/registry.py", "/imagekit/specs/__init__.py", "/imagekit/specs/sourcegroups.py", "/imagekit/models/fields/files.py", "/imagekit/models/fields/utils.py"], "/imagekit/specs/sourcegroups.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_no_extra_queries.py": ["/tests/models.py"], "/imagekit/models/fields/utils.py": ["/imagekit/cachefiles/__init__.py"], "/imagekit/cachefiles/backends.py": ["/imagekit/utils.py"], "/tests/test_generateimage_tag.py": ["/tests/utils.py"], "/imagekit/cachefiles/__init__.py": ["/imagekit/files.py", "/imagekit/registry.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_thumbnail_tag.py": ["/tests/utils.py"], "/tests/test_settings.py": ["/imagekit/conf.py", "/imagekit/utils.py", "/tests/utils.py"], "/imagekit/files.py": ["/imagekit/utils.py"], "/tests/test_closing_fieldfiles.py": ["/tests/models.py", "/tests/utils.py"], "/imagekit/cachefiles/strategies.py": ["/imagekit/utils.py"], "/imagekit/registry.py": ["/imagekit/signals.py", "/imagekit/utils.py", "/imagekit/specs/sourcegroups.py", "/imagekit/cachefiles/__init__.py"], "/imagekit/generatorlibrary.py": ["/imagekit/processors/__init__.py", "/imagekit/registry.py", "/imagekit/specs/__init__.py"], "/tests/test_sourcegroups.py": ["/imagekit/signals.py", "/imagekit/specs/sourcegroups.py", "/tests/models.py", "/tests/utils.py"], "/tests/utils.py": ["/imagekit/cachefiles/backends.py", "/imagekit/conf.py", "/imagekit/utils.py", "/tests/models.py"], "/tests/test_abstract_models.py": ["/imagekit/utils.py", "/tests/models.py"], "/imagekit/management/commands/generateimages.py": ["/imagekit/registry.py"], "/tests/imagegenerators.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/imagekit/specs/__init__.py": ["/imagekit/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/imagekit/registry.py", "/imagekit/utils.py"]}
|
28,189,879
|
matthewwithanm/django-imagekit
|
refs/heads/develop
|
/imagekit/specs/sourcegroups.py
|
"""
Source groups are the means by which image spec sources are identified. They
have two responsibilities:
1. To dispatch ``source_saved`` signals. (These will be relayed to the
corresponding specs' cache file strategies.)
2. To provide the source files that they represent, via a generator method named
``files()``. (This is used by the generateimages management command for
"pre-caching" image files.)
"""
import inspect
from django.db.models.signals import post_init, post_save
from django.utils.functional import wraps
from ..cachefiles import LazyImageCacheFile
from ..signals import source_saved
from ..utils import get_nonabstract_descendants
def ik_model_receiver(fn):
"""
A method decorator that filters out signals coming from models that don't
have fields that function as ImageFieldSourceGroup sources.
"""
@wraps(fn)
def receiver(self, sender, **kwargs):
if not inspect.isclass(sender):
return
for src in self._source_groups:
if issubclass(sender, src.model_class):
fn(self, sender=sender, **kwargs)
# If we find a match, return. We don't want to handle the signal
# more than once.
return
return receiver
class ModelSignalRouter:
"""
Normally, ``ImageFieldSourceGroup`` would be directly responsible for
watching for changes on the model field it represents. However, Django does
not dispatch events for abstract base classes. Therefore, we must listen for
the signals on all models and filter out those that aren't represented by
``ImageFieldSourceGroup``s. This class encapsulates that functionality.
Related:
https://github.com/matthewwithanm/django-imagekit/issues/126
https://code.djangoproject.com/ticket/9318
"""
def __init__(self):
self._source_groups = []
uid = 'ik_spec_field_receivers'
post_init.connect(self.post_init_receiver, dispatch_uid=uid)
post_save.connect(self.post_save_receiver, dispatch_uid=uid)
def add(self, source_group):
self._source_groups.append(source_group)
def init_instance(self, instance):
instance._ik = getattr(instance, '_ik', {})
def update_source_hashes(self, instance):
"""
Stores hashes of the source image files so that they can be compared
later to see whether the source image has changed (and therefore whether
the spec file needs to be regenerated).
"""
self.init_instance(instance)
instance._ik['source_hashes'] = {
attname: hash(getattr(instance, attname))
for attname in self.get_source_fields(instance)}
return instance._ik['source_hashes']
def get_source_fields(self, instance):
"""
Returns a list of the source fields for the given instance.
"""
return {
src.image_field
for src in self._source_groups
if isinstance(instance, src.model_class)}
@ik_model_receiver
def post_save_receiver(self, sender, instance=None, created=False, update_fields=None, raw=False, **kwargs):
if not raw:
self.init_instance(instance)
old_hashes = instance._ik.get('source_hashes', {}).copy()
new_hashes = self.update_source_hashes(instance)
for attname in self.get_source_fields(instance):
if update_fields and attname not in update_fields:
continue
file = getattr(instance, attname)
if file and old_hashes.get(attname) != new_hashes[attname]:
self.dispatch_signal(source_saved, file, sender, instance,
attname)
@ik_model_receiver
def post_init_receiver(self, sender, instance=None, **kwargs):
self.init_instance(instance)
source_fields = self.get_source_fields(instance)
local_fields = {
field.name: field
for field in instance._meta.local_fields
if field.name in source_fields}
instance._ik['source_hashes'] = {
attname: hash(file_field)
for attname, file_field in local_fields.items()}
def dispatch_signal(self, signal, file, model_class, instance, attname):
"""
Dispatch the signal for each of the matching source groups. Note that
more than one source can have the same model and image_field; it's
important that we dispatch the signal for each.
"""
for source_group in self._source_groups:
if issubclass(model_class, source_group.model_class) and source_group.image_field == attname:
signal.send(sender=source_group, source=file)
class ImageFieldSourceGroup:
"""
A source group that represents a particular field across all instances of a
model and its subclasses.
"""
def __init__(self, model_class, image_field):
self.model_class = model_class
self.image_field = image_field
signal_router.add(self)
def files(self):
"""
A generator that returns the source files that this source group
represents; in this case, a particular field of every instance of a
particular model and its subclasses.
"""
for model in get_nonabstract_descendants(self.model_class):
for instance in model.objects.all().iterator():
yield getattr(instance, self.image_field)
class SourceGroupFilesGenerator:
"""
A Python generator that yields cache file objects for source groups.
"""
def __init__(self, source_group, generator_id):
self.source_group = source_group
self.generator_id = generator_id
def __eq__(self, other):
return (isinstance(other, self.__class__)
and self.__dict__ == other.__dict__)
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
return hash((self.source_group, self.generator_id))
def __call__(self):
for source_file in self.source_group.files():
yield LazyImageCacheFile(self.generator_id,
source=source_file)
signal_router = ModelSignalRouter()
|
{"/imagekit/models/fields/files.py": ["/imagekit/utils.py"], "/imagekit/utils.py": ["/imagekit/lib.py"], "/imagekit/__init__.py": ["/imagekit/specs/__init__.py", "/imagekit/pkgmeta.py", "/imagekit/registry.py"], "/tests/models.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/tests/test_optimistic_strategy.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/tests/utils.py"], "/tests/test_fields.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py", "/tests/models.py", "/tests/utils.py"], "/tests/test_serialization.py": ["/imagekit/cachefiles/__init__.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/tests/test_cachefiles.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/imagekit/templatetags/imagekit.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/registry.py"], "/tests/conftest.py": ["/tests/utils.py"], "/imagekit/cachefiles/namers.py": ["/imagekit/utils.py"], "/imagekit/forms/fields.py": ["/imagekit/specs/__init__.py", "/imagekit/utils.py"], "/imagekit/models/fields/__init__.py": ["/imagekit/registry.py", "/imagekit/specs/__init__.py", "/imagekit/specs/sourcegroups.py", "/imagekit/models/fields/files.py", "/imagekit/models/fields/utils.py"], "/imagekit/specs/sourcegroups.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_no_extra_queries.py": ["/tests/models.py"], "/imagekit/models/fields/utils.py": ["/imagekit/cachefiles/__init__.py"], "/imagekit/cachefiles/backends.py": ["/imagekit/utils.py"], "/tests/test_generateimage_tag.py": ["/tests/utils.py"], "/imagekit/cachefiles/__init__.py": ["/imagekit/files.py", "/imagekit/registry.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_thumbnail_tag.py": ["/tests/utils.py"], "/tests/test_settings.py": ["/imagekit/conf.py", "/imagekit/utils.py", "/tests/utils.py"], "/imagekit/files.py": ["/imagekit/utils.py"], "/tests/test_closing_fieldfiles.py": ["/tests/models.py", "/tests/utils.py"], "/imagekit/cachefiles/strategies.py": ["/imagekit/utils.py"], "/imagekit/registry.py": ["/imagekit/signals.py", "/imagekit/utils.py", "/imagekit/specs/sourcegroups.py", "/imagekit/cachefiles/__init__.py"], "/imagekit/generatorlibrary.py": ["/imagekit/processors/__init__.py", "/imagekit/registry.py", "/imagekit/specs/__init__.py"], "/tests/test_sourcegroups.py": ["/imagekit/signals.py", "/imagekit/specs/sourcegroups.py", "/tests/models.py", "/tests/utils.py"], "/tests/utils.py": ["/imagekit/cachefiles/backends.py", "/imagekit/conf.py", "/imagekit/utils.py", "/tests/models.py"], "/tests/test_abstract_models.py": ["/imagekit/utils.py", "/tests/models.py"], "/imagekit/management/commands/generateimages.py": ["/imagekit/registry.py"], "/tests/imagegenerators.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/imagekit/specs/__init__.py": ["/imagekit/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/imagekit/registry.py", "/imagekit/utils.py"]}
|
28,189,880
|
matthewwithanm/django-imagekit
|
refs/heads/develop
|
/tests/test_no_extra_queries.py
|
from unittest.mock import Mock, PropertyMock, patch
from .models import Photo
def test_dont_access_source():
"""
Touching the source may trigger an unneeded query.
See <https://github.com/matthewwithanm/django-imagekit/issues/295>
"""
pmock = PropertyMock()
pmock.__get__ = Mock()
with patch.object(Photo, 'original_image', pmock):
photo = Photo() # noqa
assert not pmock.__get__.called
|
{"/imagekit/models/fields/files.py": ["/imagekit/utils.py"], "/imagekit/utils.py": ["/imagekit/lib.py"], "/imagekit/__init__.py": ["/imagekit/specs/__init__.py", "/imagekit/pkgmeta.py", "/imagekit/registry.py"], "/tests/models.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/tests/test_optimistic_strategy.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/tests/utils.py"], "/tests/test_fields.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py", "/tests/models.py", "/tests/utils.py"], "/tests/test_serialization.py": ["/imagekit/cachefiles/__init__.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/tests/test_cachefiles.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/imagekit/templatetags/imagekit.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/registry.py"], "/tests/conftest.py": ["/tests/utils.py"], "/imagekit/cachefiles/namers.py": ["/imagekit/utils.py"], "/imagekit/forms/fields.py": ["/imagekit/specs/__init__.py", "/imagekit/utils.py"], "/imagekit/models/fields/__init__.py": ["/imagekit/registry.py", "/imagekit/specs/__init__.py", "/imagekit/specs/sourcegroups.py", "/imagekit/models/fields/files.py", "/imagekit/models/fields/utils.py"], "/imagekit/specs/sourcegroups.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_no_extra_queries.py": ["/tests/models.py"], "/imagekit/models/fields/utils.py": ["/imagekit/cachefiles/__init__.py"], "/imagekit/cachefiles/backends.py": ["/imagekit/utils.py"], "/tests/test_generateimage_tag.py": ["/tests/utils.py"], "/imagekit/cachefiles/__init__.py": ["/imagekit/files.py", "/imagekit/registry.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_thumbnail_tag.py": ["/tests/utils.py"], "/tests/test_settings.py": ["/imagekit/conf.py", "/imagekit/utils.py", "/tests/utils.py"], "/imagekit/files.py": ["/imagekit/utils.py"], "/tests/test_closing_fieldfiles.py": ["/tests/models.py", "/tests/utils.py"], "/imagekit/cachefiles/strategies.py": ["/imagekit/utils.py"], "/imagekit/registry.py": ["/imagekit/signals.py", "/imagekit/utils.py", "/imagekit/specs/sourcegroups.py", "/imagekit/cachefiles/__init__.py"], "/imagekit/generatorlibrary.py": ["/imagekit/processors/__init__.py", "/imagekit/registry.py", "/imagekit/specs/__init__.py"], "/tests/test_sourcegroups.py": ["/imagekit/signals.py", "/imagekit/specs/sourcegroups.py", "/tests/models.py", "/tests/utils.py"], "/tests/utils.py": ["/imagekit/cachefiles/backends.py", "/imagekit/conf.py", "/imagekit/utils.py", "/tests/models.py"], "/tests/test_abstract_models.py": ["/imagekit/utils.py", "/tests/models.py"], "/imagekit/management/commands/generateimages.py": ["/imagekit/registry.py"], "/tests/imagegenerators.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/imagekit/specs/__init__.py": ["/imagekit/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/imagekit/registry.py", "/imagekit/utils.py"]}
|
28,189,881
|
matthewwithanm/django-imagekit
|
refs/heads/develop
|
/imagekit/models/fields/utils.py
|
from ...cachefiles import ImageCacheFile
class ImageSpecFileDescriptor:
def __init__(self, field, attname, source_field_name):
self.attname = attname
self.field = field
self.source_field_name = source_field_name
def __get__(self, instance, owner):
if instance is None:
return self.field
else:
source = getattr(instance, self.source_field_name)
spec = self.field.get_spec(source=source)
file = ImageCacheFile(spec)
instance.__dict__[self.attname] = file
return file
def __set__(self, instance, value):
instance.__dict__[self.attname] = value
|
{"/imagekit/models/fields/files.py": ["/imagekit/utils.py"], "/imagekit/utils.py": ["/imagekit/lib.py"], "/imagekit/__init__.py": ["/imagekit/specs/__init__.py", "/imagekit/pkgmeta.py", "/imagekit/registry.py"], "/tests/models.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/tests/test_optimistic_strategy.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/tests/utils.py"], "/tests/test_fields.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py", "/tests/models.py", "/tests/utils.py"], "/tests/test_serialization.py": ["/imagekit/cachefiles/__init__.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/tests/test_cachefiles.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/imagekit/templatetags/imagekit.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/registry.py"], "/tests/conftest.py": ["/tests/utils.py"], "/imagekit/cachefiles/namers.py": ["/imagekit/utils.py"], "/imagekit/forms/fields.py": ["/imagekit/specs/__init__.py", "/imagekit/utils.py"], "/imagekit/models/fields/__init__.py": ["/imagekit/registry.py", "/imagekit/specs/__init__.py", "/imagekit/specs/sourcegroups.py", "/imagekit/models/fields/files.py", "/imagekit/models/fields/utils.py"], "/imagekit/specs/sourcegroups.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_no_extra_queries.py": ["/tests/models.py"], "/imagekit/models/fields/utils.py": ["/imagekit/cachefiles/__init__.py"], "/imagekit/cachefiles/backends.py": ["/imagekit/utils.py"], "/tests/test_generateimage_tag.py": ["/tests/utils.py"], "/imagekit/cachefiles/__init__.py": ["/imagekit/files.py", "/imagekit/registry.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_thumbnail_tag.py": ["/tests/utils.py"], "/tests/test_settings.py": ["/imagekit/conf.py", "/imagekit/utils.py", "/tests/utils.py"], "/imagekit/files.py": ["/imagekit/utils.py"], "/tests/test_closing_fieldfiles.py": ["/tests/models.py", "/tests/utils.py"], "/imagekit/cachefiles/strategies.py": ["/imagekit/utils.py"], "/imagekit/registry.py": ["/imagekit/signals.py", "/imagekit/utils.py", "/imagekit/specs/sourcegroups.py", "/imagekit/cachefiles/__init__.py"], "/imagekit/generatorlibrary.py": ["/imagekit/processors/__init__.py", "/imagekit/registry.py", "/imagekit/specs/__init__.py"], "/tests/test_sourcegroups.py": ["/imagekit/signals.py", "/imagekit/specs/sourcegroups.py", "/tests/models.py", "/tests/utils.py"], "/tests/utils.py": ["/imagekit/cachefiles/backends.py", "/imagekit/conf.py", "/imagekit/utils.py", "/tests/models.py"], "/tests/test_abstract_models.py": ["/imagekit/utils.py", "/tests/models.py"], "/imagekit/management/commands/generateimages.py": ["/imagekit/registry.py"], "/tests/imagegenerators.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/imagekit/specs/__init__.py": ["/imagekit/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/imagekit/registry.py", "/imagekit/utils.py"]}
|
28,189,882
|
matthewwithanm/django-imagekit
|
refs/heads/develop
|
/imagekit/admin.py
|
from django.template.loader import render_to_string
from django.utils.translation import gettext_lazy as _
class AdminThumbnail:
"""
A convenience utility for adding thumbnails to Django's admin change list.
"""
short_description = _('Thumbnail')
allow_tags = True
def __init__(self, image_field, template=None):
"""
:param image_field: The name of the ImageField or ImageSpecField on the
model to use for the thumbnail.
:param template: The template with which to render the thumbnail
"""
self.image_field = image_field
self.template = template
def __call__(self, obj):
if callable(self.image_field):
thumbnail = self.image_field(obj)
else:
try:
thumbnail = getattr(obj, self.image_field)
except AttributeError:
raise Exception('The property %s is not defined on %s.' %
(self.image_field, obj.__class__.__name__))
original_image = getattr(thumbnail, 'source', None) or thumbnail
template = self.template or 'imagekit/admin/thumbnail.html'
return render_to_string(template, {
'model': obj,
'thumbnail': thumbnail,
'original_image': original_image,
})
|
{"/imagekit/models/fields/files.py": ["/imagekit/utils.py"], "/imagekit/utils.py": ["/imagekit/lib.py"], "/imagekit/__init__.py": ["/imagekit/specs/__init__.py", "/imagekit/pkgmeta.py", "/imagekit/registry.py"], "/tests/models.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/tests/test_optimistic_strategy.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/tests/utils.py"], "/tests/test_fields.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py", "/tests/models.py", "/tests/utils.py"], "/tests/test_serialization.py": ["/imagekit/cachefiles/__init__.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/tests/test_cachefiles.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/imagekit/templatetags/imagekit.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/registry.py"], "/tests/conftest.py": ["/tests/utils.py"], "/imagekit/cachefiles/namers.py": ["/imagekit/utils.py"], "/imagekit/forms/fields.py": ["/imagekit/specs/__init__.py", "/imagekit/utils.py"], "/imagekit/models/fields/__init__.py": ["/imagekit/registry.py", "/imagekit/specs/__init__.py", "/imagekit/specs/sourcegroups.py", "/imagekit/models/fields/files.py", "/imagekit/models/fields/utils.py"], "/imagekit/specs/sourcegroups.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_no_extra_queries.py": ["/tests/models.py"], "/imagekit/models/fields/utils.py": ["/imagekit/cachefiles/__init__.py"], "/imagekit/cachefiles/backends.py": ["/imagekit/utils.py"], "/tests/test_generateimage_tag.py": ["/tests/utils.py"], "/imagekit/cachefiles/__init__.py": ["/imagekit/files.py", "/imagekit/registry.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_thumbnail_tag.py": ["/tests/utils.py"], "/tests/test_settings.py": ["/imagekit/conf.py", "/imagekit/utils.py", "/tests/utils.py"], "/imagekit/files.py": ["/imagekit/utils.py"], "/tests/test_closing_fieldfiles.py": ["/tests/models.py", "/tests/utils.py"], "/imagekit/cachefiles/strategies.py": ["/imagekit/utils.py"], "/imagekit/registry.py": ["/imagekit/signals.py", "/imagekit/utils.py", "/imagekit/specs/sourcegroups.py", "/imagekit/cachefiles/__init__.py"], "/imagekit/generatorlibrary.py": ["/imagekit/processors/__init__.py", "/imagekit/registry.py", "/imagekit/specs/__init__.py"], "/tests/test_sourcegroups.py": ["/imagekit/signals.py", "/imagekit/specs/sourcegroups.py", "/tests/models.py", "/tests/utils.py"], "/tests/utils.py": ["/imagekit/cachefiles/backends.py", "/imagekit/conf.py", "/imagekit/utils.py", "/tests/models.py"], "/tests/test_abstract_models.py": ["/imagekit/utils.py", "/tests/models.py"], "/imagekit/management/commands/generateimages.py": ["/imagekit/registry.py"], "/tests/imagegenerators.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/imagekit/specs/__init__.py": ["/imagekit/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/imagekit/registry.py", "/imagekit/utils.py"]}
|
28,189,883
|
matthewwithanm/django-imagekit
|
refs/heads/develop
|
/imagekit/cachefiles/backends.py
|
import warnings
from copy import copy
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from ..utils import get_cache, get_singleton, sanitize_cache_key
class CacheFileState:
EXISTS = 'exists'
GENERATING = 'generating'
DOES_NOT_EXIST = 'does_not_exist'
def get_default_cachefile_backend():
"""
Get the default file backend.
"""
from django.conf import settings
return get_singleton(settings.IMAGEKIT_DEFAULT_CACHEFILE_BACKEND,
'file backend')
class InvalidFileBackendError(ImproperlyConfigured):
pass
class AbstractCacheFileBackend:
"""
An abstract cache file backend. This isn't used by any internal classes and
is included simply to illustrate the minimum interface of a cache file
backend for users who wish to implement their own.
"""
def generate(self, file, force=False):
raise NotImplementedError
def exists(self, file):
raise NotImplementedError
class CachedFileBackend:
existence_check_timeout = 5
"""
The number of seconds to wait before rechecking to see if the file exists.
If the image is found to exist, that information will be cached using the
timeout specified in your CACHES setting (which should be very high).
However, when the file does not exist, you probably want to check again
in a relatively short amount of time. This attribute allows you to do that.
"""
@property
def cache(self):
if not getattr(self, '_cache', None):
self._cache = get_cache()
return self._cache
def get_key(self, file):
from django.conf import settings
return sanitize_cache_key('%s%s-state' %
(settings.IMAGEKIT_CACHE_PREFIX, file.name))
def get_state(self, file, check_if_unknown=True):
key = self.get_key(file)
state = self.cache.get(key)
if state is None and check_if_unknown:
exists = self._exists(file)
state = CacheFileState.EXISTS if exists else CacheFileState.DOES_NOT_EXIST
self.set_state(file, state)
return state
def set_state(self, file, state):
key = self.get_key(file)
if state == CacheFileState.DOES_NOT_EXIST:
self.cache.set(key, state, self.existence_check_timeout)
else:
self.cache.set(key, state, settings.IMAGEKIT_CACHE_TIMEOUT)
def __getstate__(self):
state = copy(self.__dict__)
# Don't include the cache when pickling. It'll be reconstituted based
# on the settings.
state.pop('_cache', None)
return state
def exists(self, file):
return self.get_state(file) == CacheFileState.EXISTS
def generate(self, file, force=False):
raise NotImplementedError
def generate_now(self, file, force=False):
if force or self.get_state(file) not in (CacheFileState.GENERATING, CacheFileState.EXISTS):
self.set_state(file, CacheFileState.GENERATING)
file._generate()
self.set_state(file, CacheFileState.EXISTS)
file.close()
class Simple(CachedFileBackend):
"""
The most basic file backend. The storage is consulted to see if the file
exists. Files are generated synchronously.
"""
def generate(self, file, force=False):
self.generate_now(file, force=force)
def _exists(self, file):
return bool(getattr(file, '_file', None)
or (file.name and file.storage.exists(file.name)))
def _generate_file(backend, file, force=False):
backend.generate_now(file, force=force)
class BaseAsync(Simple):
"""
Base class for cache file backends that generate files asynchronously.
"""
is_async = True
def generate(self, file, force=False):
# Schedule the file for generation, unless we know for sure we don't
# need to. If an already-generated file sneaks through, that's okay;
# ``generate_now`` will catch it. We just want to make sure we don't
# schedule anything we know is unnecessary--but we also don't want to
# force a costly existence check.
state = self.get_state(file, check_if_unknown=False)
if state not in (CacheFileState.GENERATING, CacheFileState.EXISTS):
self.schedule_generation(file, force=force)
def schedule_generation(self, file, force=False):
# overwrite this to have the file generated in the background,
# e. g. in a worker queue.
raise NotImplementedError
try:
from celery import shared_task as task
except ImportError:
pass
else:
_celery_task = task(ignore_result=True, serializer='pickle')(_generate_file)
class Celery(BaseAsync):
"""
A backend that uses Celery to generate the images.
"""
def __init__(self, *args, **kwargs):
try:
import celery # noqa
except ImportError:
raise ImproperlyConfigured('You must install celery to use'
' imagekit.cachefiles.backends.Celery.')
super().__init__(*args, **kwargs)
def schedule_generation(self, file, force=False):
_celery_task.delay(self, file, force=force)
# Stub class to preserve backwards compatibility and issue a warning
class Async(Celery):
def __init__(self, *args, **kwargs):
message = '{path}.Async is deprecated. Use {path}.Celery instead.'
warnings.warn(message.format(path=__name__), DeprecationWarning)
super().__init__(*args, **kwargs)
try:
from django_rq import job
except ImportError:
pass
else:
_rq_job = job('default', result_ttl=0)(_generate_file)
class RQ(BaseAsync):
"""
A backend that uses RQ to generate the images.
"""
def __init__(self, *args, **kwargs):
try:
import django_rq # noqa
except ImportError:
raise ImproperlyConfigured('You must install django-rq to use'
' imagekit.cachefiles.backends.RQ.')
super().__init__(*args, **kwargs)
def schedule_generation(self, file, force=False):
_rq_job.delay(self, file, force=force)
try:
from dramatiq import actor
except ImportError:
pass
else:
_dramatiq_actor = actor()(_generate_file)
class Dramatiq(BaseAsync):
"""
A backend that uses Dramatiq to generate the images.
"""
def __init__(self, *args, **kwargs):
try:
import dramatiq # noqa
except ImportError:
raise ImproperlyConfigured('You must install django-dramatiq to use'
' imagekit.cachefiles.backends.Dramatiq.')
super().__init__(*args, **kwargs)
def schedule_generation(self, file, force=False):
_dramatiq_actor.send(self, file, force=force)
|
{"/imagekit/models/fields/files.py": ["/imagekit/utils.py"], "/imagekit/utils.py": ["/imagekit/lib.py"], "/imagekit/__init__.py": ["/imagekit/specs/__init__.py", "/imagekit/pkgmeta.py", "/imagekit/registry.py"], "/tests/models.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/tests/test_optimistic_strategy.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/tests/utils.py"], "/tests/test_fields.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py", "/tests/models.py", "/tests/utils.py"], "/tests/test_serialization.py": ["/imagekit/cachefiles/__init__.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/tests/test_cachefiles.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/imagekit/templatetags/imagekit.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/registry.py"], "/tests/conftest.py": ["/tests/utils.py"], "/imagekit/cachefiles/namers.py": ["/imagekit/utils.py"], "/imagekit/forms/fields.py": ["/imagekit/specs/__init__.py", "/imagekit/utils.py"], "/imagekit/models/fields/__init__.py": ["/imagekit/registry.py", "/imagekit/specs/__init__.py", "/imagekit/specs/sourcegroups.py", "/imagekit/models/fields/files.py", "/imagekit/models/fields/utils.py"], "/imagekit/specs/sourcegroups.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_no_extra_queries.py": ["/tests/models.py"], "/imagekit/models/fields/utils.py": ["/imagekit/cachefiles/__init__.py"], "/imagekit/cachefiles/backends.py": ["/imagekit/utils.py"], "/tests/test_generateimage_tag.py": ["/tests/utils.py"], "/imagekit/cachefiles/__init__.py": ["/imagekit/files.py", "/imagekit/registry.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_thumbnail_tag.py": ["/tests/utils.py"], "/tests/test_settings.py": ["/imagekit/conf.py", "/imagekit/utils.py", "/tests/utils.py"], "/imagekit/files.py": ["/imagekit/utils.py"], "/tests/test_closing_fieldfiles.py": ["/tests/models.py", "/tests/utils.py"], "/imagekit/cachefiles/strategies.py": ["/imagekit/utils.py"], "/imagekit/registry.py": ["/imagekit/signals.py", "/imagekit/utils.py", "/imagekit/specs/sourcegroups.py", "/imagekit/cachefiles/__init__.py"], "/imagekit/generatorlibrary.py": ["/imagekit/processors/__init__.py", "/imagekit/registry.py", "/imagekit/specs/__init__.py"], "/tests/test_sourcegroups.py": ["/imagekit/signals.py", "/imagekit/specs/sourcegroups.py", "/tests/models.py", "/tests/utils.py"], "/tests/utils.py": ["/imagekit/cachefiles/backends.py", "/imagekit/conf.py", "/imagekit/utils.py", "/tests/models.py"], "/tests/test_abstract_models.py": ["/imagekit/utils.py", "/tests/models.py"], "/imagekit/management/commands/generateimages.py": ["/imagekit/registry.py"], "/tests/imagegenerators.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/imagekit/specs/__init__.py": ["/imagekit/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/imagekit/registry.py", "/imagekit/utils.py"]}
|
28,189,884
|
matthewwithanm/django-imagekit
|
refs/heads/develop
|
/tests/test_generateimage_tag.py
|
import pytest
from django.template import TemplateSyntaxError
from . import imagegenerators # noqa
from .utils import clear_imagekit_cache, get_html_attrs, render_tag
def test_img_tag():
ttag = r"""{% generateimage 'testspec' source=img %}"""
clear_imagekit_cache()
attrs = get_html_attrs(ttag)
expected_attrs = {'src', 'width', 'height'}
assert set(attrs.keys()) == expected_attrs
for k in expected_attrs:
assert attrs[k].strip() != ''
def test_img_tag_attrs():
ttag = r"""{% generateimage 'testspec' source=img -- alt="Hello" %}"""
clear_imagekit_cache()
attrs = get_html_attrs(ttag)
assert attrs.get('alt') == 'Hello'
def test_dangling_html_attrs_delimiter():
ttag = r"""{% generateimage 'testspec' source=img -- %}"""
with pytest.raises(TemplateSyntaxError):
render_tag(ttag)
def test_html_attrs_assignment():
"""
You can either use generateimage as an assignment tag or specify html attrs,
but not both.
"""
ttag = r"""{% generateimage 'testspec' source=img -- alt="Hello" as th %}"""
with pytest.raises(TemplateSyntaxError):
render_tag(ttag)
def test_single_dimension_attr():
"""
If you only provide one of width or height, the other should not be added.
"""
ttag = r"""{% generateimage 'testspec' source=img -- width="50" %}"""
clear_imagekit_cache()
attrs = get_html_attrs(ttag)
assert 'height' not in attrs
def test_assignment_tag():
ttag = r"""{% generateimage 'testspec' source=img as th %}{{ th.url }}{{ th.height }}{{ th.width }}"""
clear_imagekit_cache()
html = render_tag(ttag)
assert html.strip() != ''
|
{"/imagekit/models/fields/files.py": ["/imagekit/utils.py"], "/imagekit/utils.py": ["/imagekit/lib.py"], "/imagekit/__init__.py": ["/imagekit/specs/__init__.py", "/imagekit/pkgmeta.py", "/imagekit/registry.py"], "/tests/models.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/tests/test_optimistic_strategy.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/tests/utils.py"], "/tests/test_fields.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py", "/tests/models.py", "/tests/utils.py"], "/tests/test_serialization.py": ["/imagekit/cachefiles/__init__.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/tests/test_cachefiles.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/imagekit/templatetags/imagekit.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/registry.py"], "/tests/conftest.py": ["/tests/utils.py"], "/imagekit/cachefiles/namers.py": ["/imagekit/utils.py"], "/imagekit/forms/fields.py": ["/imagekit/specs/__init__.py", "/imagekit/utils.py"], "/imagekit/models/fields/__init__.py": ["/imagekit/registry.py", "/imagekit/specs/__init__.py", "/imagekit/specs/sourcegroups.py", "/imagekit/models/fields/files.py", "/imagekit/models/fields/utils.py"], "/imagekit/specs/sourcegroups.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_no_extra_queries.py": ["/tests/models.py"], "/imagekit/models/fields/utils.py": ["/imagekit/cachefiles/__init__.py"], "/imagekit/cachefiles/backends.py": ["/imagekit/utils.py"], "/tests/test_generateimage_tag.py": ["/tests/utils.py"], "/imagekit/cachefiles/__init__.py": ["/imagekit/files.py", "/imagekit/registry.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_thumbnail_tag.py": ["/tests/utils.py"], "/tests/test_settings.py": ["/imagekit/conf.py", "/imagekit/utils.py", "/tests/utils.py"], "/imagekit/files.py": ["/imagekit/utils.py"], "/tests/test_closing_fieldfiles.py": ["/tests/models.py", "/tests/utils.py"], "/imagekit/cachefiles/strategies.py": ["/imagekit/utils.py"], "/imagekit/registry.py": ["/imagekit/signals.py", "/imagekit/utils.py", "/imagekit/specs/sourcegroups.py", "/imagekit/cachefiles/__init__.py"], "/imagekit/generatorlibrary.py": ["/imagekit/processors/__init__.py", "/imagekit/registry.py", "/imagekit/specs/__init__.py"], "/tests/test_sourcegroups.py": ["/imagekit/signals.py", "/imagekit/specs/sourcegroups.py", "/tests/models.py", "/tests/utils.py"], "/tests/utils.py": ["/imagekit/cachefiles/backends.py", "/imagekit/conf.py", "/imagekit/utils.py", "/tests/models.py"], "/tests/test_abstract_models.py": ["/imagekit/utils.py", "/tests/models.py"], "/imagekit/management/commands/generateimages.py": ["/imagekit/registry.py"], "/tests/imagegenerators.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/imagekit/specs/__init__.py": ["/imagekit/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/imagekit/registry.py", "/imagekit/utils.py"]}
|
28,189,885
|
matthewwithanm/django-imagekit
|
refs/heads/develop
|
/imagekit/cachefiles/__init__.py
|
import os.path
from copy import copy
from django.conf import settings
from django.core.files import File
from django.core.files.images import ImageFile
from django.utils.encoding import smart_str
from django.utils.functional import SimpleLazyObject
from ..files import BaseIKFile
from ..registry import generator_registry
from ..signals import content_required, existence_required
from ..utils import (
generate, get_by_qname, get_logger, get_singleton, get_storage
)
class ImageCacheFile(BaseIKFile, ImageFile):
"""
A file that represents the result of a generator. Creating an instance of
this class is not enough to trigger the generation of the file. In fact,
one of the main points of this class is to allow the creation of the file
to be deferred until the time that the cache file strategy requires it.
"""
def __init__(self, generator, name=None, storage=None, cachefile_backend=None, cachefile_strategy=None):
"""
:param generator: The object responsible for generating a new image.
:param name: The filename
:param storage: A Django storage object, or a callable which returns a
storage object that will be used to save the file.
:param cachefile_backend: The object responsible for managing the
state of the file.
:param cachefile_strategy: The object responsible for handling events
for this file.
"""
self.generator = generator
if not name:
try:
name = generator.cachefile_name
except AttributeError:
fn = get_by_qname(settings.IMAGEKIT_CACHEFILE_NAMER, 'namer')
name = fn(generator)
self.name = name
storage = (callable(storage) and storage()) or storage or \
getattr(generator, 'cachefile_storage', None) or get_storage()
self.cachefile_backend = (
cachefile_backend
or getattr(generator, 'cachefile_backend', None)
or get_singleton(settings.IMAGEKIT_DEFAULT_CACHEFILE_BACKEND,
'cache file backend'))
self.cachefile_strategy = (
cachefile_strategy
or getattr(generator, 'cachefile_strategy', None)
or get_singleton(settings.IMAGEKIT_DEFAULT_CACHEFILE_STRATEGY,
'cache file strategy')
)
super().__init__(storage=storage)
def _require_file(self):
if getattr(self, '_file', None) is None:
content_required.send(sender=self, file=self)
self._file = self.storage.open(self.name, 'rb')
# The ``path`` and ``url`` properties are overridden so as to not call
# ``_require_file``, which is only meant to be called when the file object
# will be directly interacted with (e.g. when using ``read()``). These only
# require the file to exist; they do not need its contents to work. This
# distinction gives the user the flexibility to create a cache file
# strategy that assumes the existence of a file, but can still make the file
# available when its contents are required.
def _storage_attr(self, attr):
if getattr(self, '_file', None) is None:
existence_required.send(sender=self, file=self)
fn = getattr(self.storage, attr)
return fn(self.name)
@property
def path(self):
return self._storage_attr('path')
@property
def url(self):
return self._storage_attr('url')
def generate(self, force=False):
"""
Generate the file. If ``force`` is ``True``, the file will be generated
whether the file already exists or not.
"""
if force or getattr(self, '_file', None) is None:
self.cachefile_backend.generate(self, force)
def _generate(self):
# Generate the file
content = generate(self.generator)
actual_name = self.storage.save(self.name, content)
# We're going to reuse the generated file, so we need to reset the pointer.
if not hasattr(content, "seekable") or content.seekable():
content.seek(0)
# Store the generated file. If we don't do this, the next time the
# "file" attribute is accessed, it will result in a call to the storage
# backend (in ``BaseIKFile._get_file``). Since we already have the
# contents of the file, what would the point of that be?
self.file = File(content)
# ``actual_name`` holds the output of ``self.storage.save()`` that
# by default returns filenames with forward slashes, even on windows.
# On the other hand, ``self.name`` holds OS-specific paths results
# from applying path normalizers like ``os.path.normpath()`` in the
# ``namer``. So, the filenames should be normalized before their
# equality checking.
if os.path.normpath(actual_name) != os.path.normpath(self.name):
get_logger().warning(
'The storage backend %s did not save the file with the'
' requested name ("%s") and instead used "%s". This may be'
' because a file already existed with the requested name. If'
' so, you may have meant to call generate() instead of'
' generate(force=True), or there may be a race condition in the'
' file backend %s. The saved file will not be used.' % (
self.storage,
self.name, actual_name,
self.cachefile_backend
)
)
def __bool__(self):
if not self.name:
return False
# Dispatch the existence_required signal before checking to see if the
# file exists. This gives the strategy a chance to create the file.
existence_required.send(sender=self, file=self)
try:
check = self.cachefile_strategy.should_verify_existence(self)
except AttributeError:
# All synchronous backends should have created the file as part of
# `existence_required` if they wanted to.
check = getattr(self.cachefile_backend, 'is_async', False)
return self.cachefile_backend.exists(self) if check else True
def __getstate__(self):
state = copy(self.__dict__)
# file is hidden link to "file" attribute
state.pop('_file', None)
# remove storage from state as some non-FileSystemStorage can't be
# pickled
settings_storage = get_storage()
if state['storage'] == settings_storage:
state.pop('storage')
return state
def __setstate__(self, state):
if 'storage' not in state:
state['storage'] = get_storage()
self.__dict__.update(state)
def __repr__(self):
return smart_str("<%s: %s>" % (
self.__class__.__name__, self if self.name else "None")
)
class LazyImageCacheFile(SimpleLazyObject):
def __init__(self, generator_id, *args, **kwargs):
def setup():
generator = generator_registry.get(generator_id, *args, **kwargs)
return ImageCacheFile(generator)
super().__init__(setup)
def __repr__(self):
return '<%s: %s>' % (self.__class__.__name__, str(self) or 'None')
|
{"/imagekit/models/fields/files.py": ["/imagekit/utils.py"], "/imagekit/utils.py": ["/imagekit/lib.py"], "/imagekit/__init__.py": ["/imagekit/specs/__init__.py", "/imagekit/pkgmeta.py", "/imagekit/registry.py"], "/tests/models.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/tests/test_optimistic_strategy.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/tests/utils.py"], "/tests/test_fields.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py", "/tests/models.py", "/tests/utils.py"], "/tests/test_serialization.py": ["/imagekit/cachefiles/__init__.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/tests/test_cachefiles.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/imagekit/templatetags/imagekit.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/registry.py"], "/tests/conftest.py": ["/tests/utils.py"], "/imagekit/cachefiles/namers.py": ["/imagekit/utils.py"], "/imagekit/forms/fields.py": ["/imagekit/specs/__init__.py", "/imagekit/utils.py"], "/imagekit/models/fields/__init__.py": ["/imagekit/registry.py", "/imagekit/specs/__init__.py", "/imagekit/specs/sourcegroups.py", "/imagekit/models/fields/files.py", "/imagekit/models/fields/utils.py"], "/imagekit/specs/sourcegroups.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_no_extra_queries.py": ["/tests/models.py"], "/imagekit/models/fields/utils.py": ["/imagekit/cachefiles/__init__.py"], "/imagekit/cachefiles/backends.py": ["/imagekit/utils.py"], "/tests/test_generateimage_tag.py": ["/tests/utils.py"], "/imagekit/cachefiles/__init__.py": ["/imagekit/files.py", "/imagekit/registry.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_thumbnail_tag.py": ["/tests/utils.py"], "/tests/test_settings.py": ["/imagekit/conf.py", "/imagekit/utils.py", "/tests/utils.py"], "/imagekit/files.py": ["/imagekit/utils.py"], "/tests/test_closing_fieldfiles.py": ["/tests/models.py", "/tests/utils.py"], "/imagekit/cachefiles/strategies.py": ["/imagekit/utils.py"], "/imagekit/registry.py": ["/imagekit/signals.py", "/imagekit/utils.py", "/imagekit/specs/sourcegroups.py", "/imagekit/cachefiles/__init__.py"], "/imagekit/generatorlibrary.py": ["/imagekit/processors/__init__.py", "/imagekit/registry.py", "/imagekit/specs/__init__.py"], "/tests/test_sourcegroups.py": ["/imagekit/signals.py", "/imagekit/specs/sourcegroups.py", "/tests/models.py", "/tests/utils.py"], "/tests/utils.py": ["/imagekit/cachefiles/backends.py", "/imagekit/conf.py", "/imagekit/utils.py", "/tests/models.py"], "/tests/test_abstract_models.py": ["/imagekit/utils.py", "/tests/models.py"], "/imagekit/management/commands/generateimages.py": ["/imagekit/registry.py"], "/tests/imagegenerators.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/imagekit/specs/__init__.py": ["/imagekit/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/imagekit/registry.py", "/imagekit/utils.py"]}
|
28,189,886
|
matthewwithanm/django-imagekit
|
refs/heads/develop
|
/tests/test_thumbnail_tag.py
|
import pytest
from django.template import TemplateSyntaxError
from . import imagegenerators # noqa
from .utils import clear_imagekit_cache, get_html_attrs, render_tag
def test_img_tag():
ttag = r"""{% thumbnail '100x100' img %}"""
clear_imagekit_cache()
attrs = get_html_attrs(ttag)
expected_attrs = {'src', 'width', 'height'}
assert set(attrs.keys()) == expected_attrs
for k in expected_attrs:
assert attrs[k].strip() != ''
def test_img_tag_anchor():
ttag = r"""{% thumbnail '100x100' img anchor='c' %}"""
clear_imagekit_cache()
attrs = get_html_attrs(ttag)
expected_attrs = {'src', 'width', 'height'}
assert set(attrs.keys()) == expected_attrs
for k in expected_attrs:
assert attrs[k].strip() != ''
def test_img_tag_attrs():
ttag = r"""{% thumbnail '100x100' img -- alt="Hello" %}"""
clear_imagekit_cache()
attrs = get_html_attrs(ttag)
assert attrs.get('alt') == 'Hello'
def test_dangling_html_attrs_delimiter():
ttag = r"""{% thumbnail '100x100' img -- %}"""
with pytest.raises(TemplateSyntaxError):
render_tag(ttag)
def test_not_enough_args():
ttag = r"""{% thumbnail '100x100' %}"""
with pytest.raises(TemplateSyntaxError):
render_tag(ttag)
def test_too_many_args():
ttag = r"""{% thumbnail 'generator_id' '100x100' img 'extra' %}"""
with pytest.raises(TemplateSyntaxError):
render_tag(ttag)
def test_html_attrs_assignment():
"""
You can either use thumbnail as an assignment tag or specify html attrs,
but not both.
"""
ttag = r"""{% thumbnail '100x100' img -- alt="Hello" as th %}"""
with pytest.raises(TemplateSyntaxError):
render_tag(ttag)
def test_assignment_tag():
ttag = r"""{% thumbnail '100x100' img as th %}{{ th.url }}"""
clear_imagekit_cache()
html = render_tag(ttag)
assert html != ''
def test_assignment_tag_anchor():
ttag = r"""{% thumbnail '100x100' img anchor='c' as th %}{{ th.url }}"""
clear_imagekit_cache()
html = render_tag(ttag)
assert html != ''
def test_single_dimension():
ttag = r"""{% thumbnail '100x' img as th %}{{ th.width }}"""
clear_imagekit_cache()
html = render_tag(ttag)
assert html == '100'
def test_alternate_generator():
ttag = r"""{% thumbnail '1pxsq' '100x' img as th %}{{ th.width }}"""
clear_imagekit_cache()
html = render_tag(ttag)
assert html == '1'
|
{"/imagekit/models/fields/files.py": ["/imagekit/utils.py"], "/imagekit/utils.py": ["/imagekit/lib.py"], "/imagekit/__init__.py": ["/imagekit/specs/__init__.py", "/imagekit/pkgmeta.py", "/imagekit/registry.py"], "/tests/models.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/tests/test_optimistic_strategy.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/tests/utils.py"], "/tests/test_fields.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py", "/tests/models.py", "/tests/utils.py"], "/tests/test_serialization.py": ["/imagekit/cachefiles/__init__.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/tests/test_cachefiles.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/imagekit/templatetags/imagekit.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/registry.py"], "/tests/conftest.py": ["/tests/utils.py"], "/imagekit/cachefiles/namers.py": ["/imagekit/utils.py"], "/imagekit/forms/fields.py": ["/imagekit/specs/__init__.py", "/imagekit/utils.py"], "/imagekit/models/fields/__init__.py": ["/imagekit/registry.py", "/imagekit/specs/__init__.py", "/imagekit/specs/sourcegroups.py", "/imagekit/models/fields/files.py", "/imagekit/models/fields/utils.py"], "/imagekit/specs/sourcegroups.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_no_extra_queries.py": ["/tests/models.py"], "/imagekit/models/fields/utils.py": ["/imagekit/cachefiles/__init__.py"], "/imagekit/cachefiles/backends.py": ["/imagekit/utils.py"], "/tests/test_generateimage_tag.py": ["/tests/utils.py"], "/imagekit/cachefiles/__init__.py": ["/imagekit/files.py", "/imagekit/registry.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_thumbnail_tag.py": ["/tests/utils.py"], "/tests/test_settings.py": ["/imagekit/conf.py", "/imagekit/utils.py", "/tests/utils.py"], "/imagekit/files.py": ["/imagekit/utils.py"], "/tests/test_closing_fieldfiles.py": ["/tests/models.py", "/tests/utils.py"], "/imagekit/cachefiles/strategies.py": ["/imagekit/utils.py"], "/imagekit/registry.py": ["/imagekit/signals.py", "/imagekit/utils.py", "/imagekit/specs/sourcegroups.py", "/imagekit/cachefiles/__init__.py"], "/imagekit/generatorlibrary.py": ["/imagekit/processors/__init__.py", "/imagekit/registry.py", "/imagekit/specs/__init__.py"], "/tests/test_sourcegroups.py": ["/imagekit/signals.py", "/imagekit/specs/sourcegroups.py", "/tests/models.py", "/tests/utils.py"], "/tests/utils.py": ["/imagekit/cachefiles/backends.py", "/imagekit/conf.py", "/imagekit/utils.py", "/tests/models.py"], "/tests/test_abstract_models.py": ["/imagekit/utils.py", "/tests/models.py"], "/imagekit/management/commands/generateimages.py": ["/imagekit/registry.py"], "/tests/imagegenerators.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/imagekit/specs/__init__.py": ["/imagekit/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/imagekit/registry.py", "/imagekit/utils.py"]}
|
28,189,887
|
matthewwithanm/django-imagekit
|
refs/heads/develop
|
/imagekit/models/fields/files.py
|
import os
from django.db.models.fields.files import ImageFieldFile
from ...utils import generate, suggest_extension
class ProcessedImageFieldFile(ImageFieldFile):
def save(self, name, content, save=True):
filename, ext = os.path.splitext(name)
spec = self.field.get_spec(source=content)
ext = suggest_extension(name, spec.format)
new_name = '%s%s' % (filename, ext)
content = generate(spec)
return super().save(new_name, content, save)
|
{"/imagekit/models/fields/files.py": ["/imagekit/utils.py"], "/imagekit/utils.py": ["/imagekit/lib.py"], "/imagekit/__init__.py": ["/imagekit/specs/__init__.py", "/imagekit/pkgmeta.py", "/imagekit/registry.py"], "/tests/models.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/tests/test_optimistic_strategy.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/tests/utils.py"], "/tests/test_fields.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py", "/tests/models.py", "/tests/utils.py"], "/tests/test_serialization.py": ["/imagekit/cachefiles/__init__.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/tests/test_cachefiles.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/imagekit/templatetags/imagekit.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/registry.py"], "/tests/conftest.py": ["/tests/utils.py"], "/imagekit/cachefiles/namers.py": ["/imagekit/utils.py"], "/imagekit/forms/fields.py": ["/imagekit/specs/__init__.py", "/imagekit/utils.py"], "/imagekit/models/fields/__init__.py": ["/imagekit/registry.py", "/imagekit/specs/__init__.py", "/imagekit/specs/sourcegroups.py", "/imagekit/models/fields/files.py", "/imagekit/models/fields/utils.py"], "/imagekit/specs/sourcegroups.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_no_extra_queries.py": ["/tests/models.py"], "/imagekit/models/fields/utils.py": ["/imagekit/cachefiles/__init__.py"], "/imagekit/cachefiles/backends.py": ["/imagekit/utils.py"], "/tests/test_generateimage_tag.py": ["/tests/utils.py"], "/imagekit/cachefiles/__init__.py": ["/imagekit/files.py", "/imagekit/registry.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_thumbnail_tag.py": ["/tests/utils.py"], "/tests/test_settings.py": ["/imagekit/conf.py", "/imagekit/utils.py", "/tests/utils.py"], "/imagekit/files.py": ["/imagekit/utils.py"], "/tests/test_closing_fieldfiles.py": ["/tests/models.py", "/tests/utils.py"], "/imagekit/cachefiles/strategies.py": ["/imagekit/utils.py"], "/imagekit/registry.py": ["/imagekit/signals.py", "/imagekit/utils.py", "/imagekit/specs/sourcegroups.py", "/imagekit/cachefiles/__init__.py"], "/imagekit/generatorlibrary.py": ["/imagekit/processors/__init__.py", "/imagekit/registry.py", "/imagekit/specs/__init__.py"], "/tests/test_sourcegroups.py": ["/imagekit/signals.py", "/imagekit/specs/sourcegroups.py", "/tests/models.py", "/tests/utils.py"], "/tests/utils.py": ["/imagekit/cachefiles/backends.py", "/imagekit/conf.py", "/imagekit/utils.py", "/tests/models.py"], "/tests/test_abstract_models.py": ["/imagekit/utils.py", "/tests/models.py"], "/imagekit/management/commands/generateimages.py": ["/imagekit/registry.py"], "/tests/imagegenerators.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/imagekit/specs/__init__.py": ["/imagekit/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/imagekit/registry.py", "/imagekit/utils.py"]}
|
28,189,888
|
matthewwithanm/django-imagekit
|
refs/heads/develop
|
/tests/test_settings.py
|
import django
from django.test import override_settings
import pytest
from imagekit.conf import ImageKitConf, settings
from imagekit.utils import get_storage
@pytest.mark.skipif(
django.VERSION < (4, 2),
reason="STORAGES was introduced in Django 4.2",
)
def test_custom_storages():
with override_settings(
STORAGES={
"default": {
"BACKEND": "tests.utils.CustomStorage",
}
},
):
conf = ImageKitConf()
assert conf.configure_default_file_storage(None) == "default"
@pytest.mark.skipif(
django.VERSION >= (5, 1),
reason="DEFAULT_FILE_STORAGE is removed in Django 5.1.",
)
def test_custom_default_file_storage():
with override_settings(DEFAULT_FILE_STORAGE="tests.utils.CustomStorage"):
# If we don’t remove this, Django 4.2 will keep the old value.
del settings.STORAGES
conf = ImageKitConf()
if django.VERSION >= (4, 2):
assert conf.configure_default_file_storage(None) == "default"
else:
assert (
conf.configure_default_file_storage(None) == "tests.utils.CustomStorage"
)
def test_get_storage_default():
from django.core.files.storage import FileSystemStorage
assert isinstance(get_storage(), FileSystemStorage)
@pytest.mark.skipif(
django.VERSION >= (5, 1),
reason="DEFAULT_FILE_STORAGE is removed in Django 5.1.",
)
def test_get_storage_custom_path():
from tests.utils import CustomStorage
with override_settings(IMAGEKIT_DEFAULT_FILE_STORAGE="tests.utils.CustomStorage"):
assert isinstance(get_storage(), CustomStorage)
@pytest.mark.skipif(
django.VERSION < (4, 2),
reason="STORAGES was introduced in Django 4.2",
)
def test_get_storage_custom_key():
from tests.utils import CustomStorage
with override_settings(
STORAGES={
"custom": {
"BACKEND": "tests.utils.CustomStorage",
}
},
IMAGEKIT_DEFAULT_FILE_STORAGE="custom",
):
assert isinstance(get_storage(), CustomStorage)
|
{"/imagekit/models/fields/files.py": ["/imagekit/utils.py"], "/imagekit/utils.py": ["/imagekit/lib.py"], "/imagekit/__init__.py": ["/imagekit/specs/__init__.py", "/imagekit/pkgmeta.py", "/imagekit/registry.py"], "/tests/models.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/tests/test_optimistic_strategy.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/tests/utils.py"], "/tests/test_fields.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py", "/tests/models.py", "/tests/utils.py"], "/tests/test_serialization.py": ["/imagekit/cachefiles/__init__.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/tests/test_cachefiles.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/imagekit/templatetags/imagekit.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/registry.py"], "/tests/conftest.py": ["/tests/utils.py"], "/imagekit/cachefiles/namers.py": ["/imagekit/utils.py"], "/imagekit/forms/fields.py": ["/imagekit/specs/__init__.py", "/imagekit/utils.py"], "/imagekit/models/fields/__init__.py": ["/imagekit/registry.py", "/imagekit/specs/__init__.py", "/imagekit/specs/sourcegroups.py", "/imagekit/models/fields/files.py", "/imagekit/models/fields/utils.py"], "/imagekit/specs/sourcegroups.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_no_extra_queries.py": ["/tests/models.py"], "/imagekit/models/fields/utils.py": ["/imagekit/cachefiles/__init__.py"], "/imagekit/cachefiles/backends.py": ["/imagekit/utils.py"], "/tests/test_generateimage_tag.py": ["/tests/utils.py"], "/imagekit/cachefiles/__init__.py": ["/imagekit/files.py", "/imagekit/registry.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_thumbnail_tag.py": ["/tests/utils.py"], "/tests/test_settings.py": ["/imagekit/conf.py", "/imagekit/utils.py", "/tests/utils.py"], "/imagekit/files.py": ["/imagekit/utils.py"], "/tests/test_closing_fieldfiles.py": ["/tests/models.py", "/tests/utils.py"], "/imagekit/cachefiles/strategies.py": ["/imagekit/utils.py"], "/imagekit/registry.py": ["/imagekit/signals.py", "/imagekit/utils.py", "/imagekit/specs/sourcegroups.py", "/imagekit/cachefiles/__init__.py"], "/imagekit/generatorlibrary.py": ["/imagekit/processors/__init__.py", "/imagekit/registry.py", "/imagekit/specs/__init__.py"], "/tests/test_sourcegroups.py": ["/imagekit/signals.py", "/imagekit/specs/sourcegroups.py", "/tests/models.py", "/tests/utils.py"], "/tests/utils.py": ["/imagekit/cachefiles/backends.py", "/imagekit/conf.py", "/imagekit/utils.py", "/tests/models.py"], "/tests/test_abstract_models.py": ["/imagekit/utils.py", "/tests/models.py"], "/imagekit/management/commands/generateimages.py": ["/imagekit/registry.py"], "/tests/imagegenerators.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/imagekit/specs/__init__.py": ["/imagekit/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/imagekit/registry.py", "/imagekit/utils.py"]}
|
28,189,889
|
matthewwithanm/django-imagekit
|
refs/heads/develop
|
/imagekit/files.py
|
import os
from django.core.files.base import ContentFile, File
from .utils import extension_to_mimetype, format_to_mimetype
class BaseIKFile(File):
"""
This class contains all of the methods we need from
django.db.models.fields.files.FieldFile, but with the model stuff ripped
out. It's only extended by one class, but we keep it separate for
organizational reasons.
"""
def __init__(self, storage):
self.storage = storage
def _require_file(self):
if not self:
raise ValueError()
def _get_file(self):
self._require_file()
if not hasattr(self, '_file') or self._file is None:
self._file = self.storage.open(self.name, 'rb')
return self._file
def _set_file(self, file):
self._file = file
def _del_file(self):
del self._file
file = property(_get_file, _set_file, _del_file)
def _get_path(self):
self._require_file()
return self.storage.path(self.name)
path = property(_get_path)
def _get_url(self):
self._require_file()
return self.storage.url(self.name)
url = property(_get_url)
def _get_size(self):
self._require_file()
if not getattr(self, '_committed', False):
return self.file.size
return self.storage.size(self.name)
size = property(_get_size)
def open(self, mode='rb'):
self._require_file()
try:
self.file.open(mode)
except ValueError:
# if the underlying file can't be reopened
# then we will use the storage to try to open it again
if self.file.closed:
# clear cached file instance
del self.file
# Because file is a property we can acces it after
# we deleted it
return self.file.open(mode)
raise
def _get_closed(self):
file = getattr(self, '_file', None)
return file is None or file.closed
closed = property(_get_closed)
def close(self):
file = getattr(self, '_file', None)
if file is not None:
file.close()
class IKContentFile(ContentFile):
"""
Wraps a ContentFile in a file-like object with a filename and a
content_type. A PIL image format can be optionally be provided as a content
type hint.
"""
def __init__(self, filename, content, format=None):
self.file = ContentFile(content)
self.file.name = filename
mimetype = getattr(self.file, 'content_type', None)
if format and not mimetype:
mimetype = format_to_mimetype(format)
if not mimetype:
ext = os.path.splitext(filename or '')[1]
mimetype = extension_to_mimetype(ext)
self.file.content_type = mimetype
@property
def name(self):
return self.file.name
def __str__(self):
return str(self.file.name or '')
|
{"/imagekit/models/fields/files.py": ["/imagekit/utils.py"], "/imagekit/utils.py": ["/imagekit/lib.py"], "/imagekit/__init__.py": ["/imagekit/specs/__init__.py", "/imagekit/pkgmeta.py", "/imagekit/registry.py"], "/tests/models.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/tests/test_optimistic_strategy.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/tests/utils.py"], "/tests/test_fields.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py", "/tests/models.py", "/tests/utils.py"], "/tests/test_serialization.py": ["/imagekit/cachefiles/__init__.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/tests/test_cachefiles.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/imagekit/templatetags/imagekit.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/registry.py"], "/tests/conftest.py": ["/tests/utils.py"], "/imagekit/cachefiles/namers.py": ["/imagekit/utils.py"], "/imagekit/forms/fields.py": ["/imagekit/specs/__init__.py", "/imagekit/utils.py"], "/imagekit/models/fields/__init__.py": ["/imagekit/registry.py", "/imagekit/specs/__init__.py", "/imagekit/specs/sourcegroups.py", "/imagekit/models/fields/files.py", "/imagekit/models/fields/utils.py"], "/imagekit/specs/sourcegroups.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_no_extra_queries.py": ["/tests/models.py"], "/imagekit/models/fields/utils.py": ["/imagekit/cachefiles/__init__.py"], "/imagekit/cachefiles/backends.py": ["/imagekit/utils.py"], "/tests/test_generateimage_tag.py": ["/tests/utils.py"], "/imagekit/cachefiles/__init__.py": ["/imagekit/files.py", "/imagekit/registry.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_thumbnail_tag.py": ["/tests/utils.py"], "/tests/test_settings.py": ["/imagekit/conf.py", "/imagekit/utils.py", "/tests/utils.py"], "/imagekit/files.py": ["/imagekit/utils.py"], "/tests/test_closing_fieldfiles.py": ["/tests/models.py", "/tests/utils.py"], "/imagekit/cachefiles/strategies.py": ["/imagekit/utils.py"], "/imagekit/registry.py": ["/imagekit/signals.py", "/imagekit/utils.py", "/imagekit/specs/sourcegroups.py", "/imagekit/cachefiles/__init__.py"], "/imagekit/generatorlibrary.py": ["/imagekit/processors/__init__.py", "/imagekit/registry.py", "/imagekit/specs/__init__.py"], "/tests/test_sourcegroups.py": ["/imagekit/signals.py", "/imagekit/specs/sourcegroups.py", "/tests/models.py", "/tests/utils.py"], "/tests/utils.py": ["/imagekit/cachefiles/backends.py", "/imagekit/conf.py", "/imagekit/utils.py", "/tests/models.py"], "/tests/test_abstract_models.py": ["/imagekit/utils.py", "/tests/models.py"], "/imagekit/management/commands/generateimages.py": ["/imagekit/registry.py"], "/tests/imagegenerators.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/imagekit/specs/__init__.py": ["/imagekit/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/imagekit/registry.py", "/imagekit/utils.py"]}
|
28,189,890
|
matthewwithanm/django-imagekit
|
refs/heads/develop
|
/tests/test_closing_fieldfiles.py
|
import pytest
from .models import Thumbnail
from .utils import create_photo
@pytest.mark.django_db(transaction=True)
def test_do_not_leak_open_files():
instance = create_photo('leak-test.jpg')
source_file = instance.original_image
# Ensure the FieldFile is closed before generation
source_file.close()
image_generator = Thumbnail(source=source_file)
image_generator.generate()
assert source_file.closed
@pytest.mark.django_db(transaction=True)
def test_do_not_close_open_files_after_generate():
instance = create_photo('do-not-close-test.jpg')
source_file = instance.original_image
# Ensure the FieldFile is opened before generation
source_file.open()
image_generator = Thumbnail(source=source_file)
image_generator.generate()
assert not source_file.closed
source_file.close()
|
{"/imagekit/models/fields/files.py": ["/imagekit/utils.py"], "/imagekit/utils.py": ["/imagekit/lib.py"], "/imagekit/__init__.py": ["/imagekit/specs/__init__.py", "/imagekit/pkgmeta.py", "/imagekit/registry.py"], "/tests/models.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/tests/test_optimistic_strategy.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/tests/utils.py"], "/tests/test_fields.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py", "/tests/models.py", "/tests/utils.py"], "/tests/test_serialization.py": ["/imagekit/cachefiles/__init__.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/tests/test_cachefiles.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/imagekit/templatetags/imagekit.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/registry.py"], "/tests/conftest.py": ["/tests/utils.py"], "/imagekit/cachefiles/namers.py": ["/imagekit/utils.py"], "/imagekit/forms/fields.py": ["/imagekit/specs/__init__.py", "/imagekit/utils.py"], "/imagekit/models/fields/__init__.py": ["/imagekit/registry.py", "/imagekit/specs/__init__.py", "/imagekit/specs/sourcegroups.py", "/imagekit/models/fields/files.py", "/imagekit/models/fields/utils.py"], "/imagekit/specs/sourcegroups.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_no_extra_queries.py": ["/tests/models.py"], "/imagekit/models/fields/utils.py": ["/imagekit/cachefiles/__init__.py"], "/imagekit/cachefiles/backends.py": ["/imagekit/utils.py"], "/tests/test_generateimage_tag.py": ["/tests/utils.py"], "/imagekit/cachefiles/__init__.py": ["/imagekit/files.py", "/imagekit/registry.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_thumbnail_tag.py": ["/tests/utils.py"], "/tests/test_settings.py": ["/imagekit/conf.py", "/imagekit/utils.py", "/tests/utils.py"], "/imagekit/files.py": ["/imagekit/utils.py"], "/tests/test_closing_fieldfiles.py": ["/tests/models.py", "/tests/utils.py"], "/imagekit/cachefiles/strategies.py": ["/imagekit/utils.py"], "/imagekit/registry.py": ["/imagekit/signals.py", "/imagekit/utils.py", "/imagekit/specs/sourcegroups.py", "/imagekit/cachefiles/__init__.py"], "/imagekit/generatorlibrary.py": ["/imagekit/processors/__init__.py", "/imagekit/registry.py", "/imagekit/specs/__init__.py"], "/tests/test_sourcegroups.py": ["/imagekit/signals.py", "/imagekit/specs/sourcegroups.py", "/tests/models.py", "/tests/utils.py"], "/tests/utils.py": ["/imagekit/cachefiles/backends.py", "/imagekit/conf.py", "/imagekit/utils.py", "/tests/models.py"], "/tests/test_abstract_models.py": ["/imagekit/utils.py", "/tests/models.py"], "/imagekit/management/commands/generateimages.py": ["/imagekit/registry.py"], "/tests/imagegenerators.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/imagekit/specs/__init__.py": ["/imagekit/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/imagekit/registry.py", "/imagekit/utils.py"]}
|
28,189,891
|
matthewwithanm/django-imagekit
|
refs/heads/develop
|
/imagekit/signals.py
|
from django.dispatch import Signal
# Generated file signals
content_required = Signal()
existence_required = Signal()
# Source group signals
source_saved = Signal()
|
{"/imagekit/models/fields/files.py": ["/imagekit/utils.py"], "/imagekit/utils.py": ["/imagekit/lib.py"], "/imagekit/__init__.py": ["/imagekit/specs/__init__.py", "/imagekit/pkgmeta.py", "/imagekit/registry.py"], "/tests/models.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/tests/test_optimistic_strategy.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/tests/utils.py"], "/tests/test_fields.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py", "/tests/models.py", "/tests/utils.py"], "/tests/test_serialization.py": ["/imagekit/cachefiles/__init__.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/tests/test_cachefiles.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/imagekit/templatetags/imagekit.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/registry.py"], "/tests/conftest.py": ["/tests/utils.py"], "/imagekit/cachefiles/namers.py": ["/imagekit/utils.py"], "/imagekit/forms/fields.py": ["/imagekit/specs/__init__.py", "/imagekit/utils.py"], "/imagekit/models/fields/__init__.py": ["/imagekit/registry.py", "/imagekit/specs/__init__.py", "/imagekit/specs/sourcegroups.py", "/imagekit/models/fields/files.py", "/imagekit/models/fields/utils.py"], "/imagekit/specs/sourcegroups.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_no_extra_queries.py": ["/tests/models.py"], "/imagekit/models/fields/utils.py": ["/imagekit/cachefiles/__init__.py"], "/imagekit/cachefiles/backends.py": ["/imagekit/utils.py"], "/tests/test_generateimage_tag.py": ["/tests/utils.py"], "/imagekit/cachefiles/__init__.py": ["/imagekit/files.py", "/imagekit/registry.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_thumbnail_tag.py": ["/tests/utils.py"], "/tests/test_settings.py": ["/imagekit/conf.py", "/imagekit/utils.py", "/tests/utils.py"], "/imagekit/files.py": ["/imagekit/utils.py"], "/tests/test_closing_fieldfiles.py": ["/tests/models.py", "/tests/utils.py"], "/imagekit/cachefiles/strategies.py": ["/imagekit/utils.py"], "/imagekit/registry.py": ["/imagekit/signals.py", "/imagekit/utils.py", "/imagekit/specs/sourcegroups.py", "/imagekit/cachefiles/__init__.py"], "/imagekit/generatorlibrary.py": ["/imagekit/processors/__init__.py", "/imagekit/registry.py", "/imagekit/specs/__init__.py"], "/tests/test_sourcegroups.py": ["/imagekit/signals.py", "/imagekit/specs/sourcegroups.py", "/tests/models.py", "/tests/utils.py"], "/tests/utils.py": ["/imagekit/cachefiles/backends.py", "/imagekit/conf.py", "/imagekit/utils.py", "/tests/models.py"], "/tests/test_abstract_models.py": ["/imagekit/utils.py", "/tests/models.py"], "/imagekit/management/commands/generateimages.py": ["/imagekit/registry.py"], "/tests/imagegenerators.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/imagekit/specs/__init__.py": ["/imagekit/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/imagekit/registry.py", "/imagekit/utils.py"]}
|
28,189,892
|
matthewwithanm/django-imagekit
|
refs/heads/develop
|
/imagekit/cachefiles/strategies.py
|
from ..utils import get_singleton
class JustInTime:
"""
A strategy that ensures the file exists right before it's needed.
"""
def on_existence_required(self, file):
file.generate()
def on_content_required(self, file):
file.generate()
class Optimistic:
"""
A strategy that acts immediately when the source file changes and assumes
that the cache files will not be removed (i.e. it doesn't ensure the
cache file exists when it's accessed).
"""
def on_source_saved(self, file):
file.generate()
def should_verify_existence(self, file):
return False
class DictStrategy:
def __init__(self, callbacks):
for k, v in callbacks.items():
setattr(self, k, v)
def load_strategy(strategy):
if isinstance(strategy, str):
strategy = get_singleton(strategy, 'cache file strategy')
elif isinstance(strategy, dict):
strategy = DictStrategy(strategy)
elif callable(strategy):
strategy = strategy()
return strategy
|
{"/imagekit/models/fields/files.py": ["/imagekit/utils.py"], "/imagekit/utils.py": ["/imagekit/lib.py"], "/imagekit/__init__.py": ["/imagekit/specs/__init__.py", "/imagekit/pkgmeta.py", "/imagekit/registry.py"], "/tests/models.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/tests/test_optimistic_strategy.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/tests/utils.py"], "/tests/test_fields.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py", "/tests/models.py", "/tests/utils.py"], "/tests/test_serialization.py": ["/imagekit/cachefiles/__init__.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/tests/test_cachefiles.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/imagekit/templatetags/imagekit.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/registry.py"], "/tests/conftest.py": ["/tests/utils.py"], "/imagekit/cachefiles/namers.py": ["/imagekit/utils.py"], "/imagekit/forms/fields.py": ["/imagekit/specs/__init__.py", "/imagekit/utils.py"], "/imagekit/models/fields/__init__.py": ["/imagekit/registry.py", "/imagekit/specs/__init__.py", "/imagekit/specs/sourcegroups.py", "/imagekit/models/fields/files.py", "/imagekit/models/fields/utils.py"], "/imagekit/specs/sourcegroups.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_no_extra_queries.py": ["/tests/models.py"], "/imagekit/models/fields/utils.py": ["/imagekit/cachefiles/__init__.py"], "/imagekit/cachefiles/backends.py": ["/imagekit/utils.py"], "/tests/test_generateimage_tag.py": ["/tests/utils.py"], "/imagekit/cachefiles/__init__.py": ["/imagekit/files.py", "/imagekit/registry.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_thumbnail_tag.py": ["/tests/utils.py"], "/tests/test_settings.py": ["/imagekit/conf.py", "/imagekit/utils.py", "/tests/utils.py"], "/imagekit/files.py": ["/imagekit/utils.py"], "/tests/test_closing_fieldfiles.py": ["/tests/models.py", "/tests/utils.py"], "/imagekit/cachefiles/strategies.py": ["/imagekit/utils.py"], "/imagekit/registry.py": ["/imagekit/signals.py", "/imagekit/utils.py", "/imagekit/specs/sourcegroups.py", "/imagekit/cachefiles/__init__.py"], "/imagekit/generatorlibrary.py": ["/imagekit/processors/__init__.py", "/imagekit/registry.py", "/imagekit/specs/__init__.py"], "/tests/test_sourcegroups.py": ["/imagekit/signals.py", "/imagekit/specs/sourcegroups.py", "/tests/models.py", "/tests/utils.py"], "/tests/utils.py": ["/imagekit/cachefiles/backends.py", "/imagekit/conf.py", "/imagekit/utils.py", "/tests/models.py"], "/tests/test_abstract_models.py": ["/imagekit/utils.py", "/tests/models.py"], "/imagekit/management/commands/generateimages.py": ["/imagekit/registry.py"], "/tests/imagegenerators.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/imagekit/specs/__init__.py": ["/imagekit/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/imagekit/registry.py", "/imagekit/utils.py"]}
|
28,189,893
|
matthewwithanm/django-imagekit
|
refs/heads/develop
|
/imagekit/registry.py
|
from .exceptions import AlreadyRegistered, NotRegistered
from .signals import content_required, existence_required, source_saved
from .utils import autodiscover, call_strategy_method
class GeneratorRegistry:
"""
An object for registering generators. This registry provides
a convenient way for a distributable app to define default generators
without locking the users of the app into it.
"""
def __init__(self):
self._generators = {}
content_required.connect(self.content_required_receiver)
existence_required.connect(self.existence_required_receiver)
def register(self, id, generator):
registered_generator = self._generators.get(id)
if registered_generator and generator != self._generators[id]:
raise AlreadyRegistered('The generator with id %s is'
' already registered' % id)
self._generators[id] = generator
def unregister(self, id):
try:
del self._generators[id]
except KeyError:
raise NotRegistered('The generator with id %s is not'
' registered' % id)
def get(self, id, **kwargs):
autodiscover()
try:
generator = self._generators[id]
except KeyError:
raise NotRegistered('The generator with id %s is not'
' registered' % id)
if callable(generator):
return generator(**kwargs)
else:
return generator
def get_ids(self):
autodiscover()
return self._generators.keys()
def content_required_receiver(self, sender, file, **kwargs):
self._receive(file, 'on_content_required')
def existence_required_receiver(self, sender, file, **kwargs):
self._receive(file, 'on_existence_required')
def _receive(self, file, callback):
generator = file.generator
# FIXME: I guess this means you can't register functions?
if generator.__class__ in self._generators.values():
# Only invoke the strategy method for registered generators.
call_strategy_method(file, callback)
class SourceGroupRegistry:
"""
The source group registry is responsible for listening to source_* signals
on source groups, and relaying them to the image generated file strategies
of the appropriate generators.
In addition, registering a new source group also registers its generated
files with that registry.
"""
_signals = {
source_saved: 'on_source_saved',
}
def __init__(self):
self._source_groups = {}
for signal in self._signals.keys():
signal.connect(self.source_group_receiver)
def register(self, generator_id, source_group):
from .specs.sourcegroups import SourceGroupFilesGenerator
generator_ids = self._source_groups.setdefault(source_group, set())
generator_ids.add(generator_id)
cachefile_registry.register(generator_id,
SourceGroupFilesGenerator(source_group, generator_id))
def unregister(self, generator_id, source_group):
from .specs.sourcegroups import SourceGroupFilesGenerator
generator_ids = self._source_groups.setdefault(source_group, set())
if generator_id in generator_ids:
generator_ids.remove(generator_id)
cachefile_registry.unregister(generator_id,
SourceGroupFilesGenerator(source_group, generator_id))
def source_group_receiver(self, sender, source, signal, **kwargs):
"""
Relay source group signals to the appropriate spec strategy.
"""
from .cachefiles import ImageCacheFile
source_group = sender
# Ignore signals from unregistered groups.
if source_group not in self._source_groups:
return
specs = [generator_registry.get(id, source=source) for id in
self._source_groups[source_group]]
callback_name = self._signals[signal]
for spec in specs:
file = ImageCacheFile(spec)
call_strategy_method(file, callback_name)
class CacheFileRegistry:
"""
An object for registering generated files with image generators. The two are
associated with each other via a string id. We do this (as opposed to
associating them directly by, for example, putting a ``cachefiles``
attribute on image generators) so that image generators can be overridden
without losing the associated files. That way, a distributable app can
define its own generators without locking the users of the app into it.
"""
def __init__(self):
self._cachefiles = {}
def register(self, generator_id, cachefiles):
"""
Associates generated files with a generator id
"""
if cachefiles not in self._cachefiles:
self._cachefiles[cachefiles] = set()
self._cachefiles[cachefiles].add(generator_id)
def unregister(self, generator_id, cachefiles):
"""
Disassociates generated files with a generator id
"""
try:
self._cachefiles[cachefiles].remove(generator_id)
except KeyError:
pass
def get(self, generator_id):
for k, v in self._cachefiles.items():
if generator_id in v:
yield from k()
class Register:
"""
Register generators and generated files.
"""
def generator(self, id, generator=None):
if generator is None:
# Return a decorator
def decorator(cls):
self.generator(id, cls)
return cls
return decorator
generator_registry.register(id, generator)
# iterable that returns kwargs or callable that returns iterable of kwargs
def cachefiles(self, generator_id, cachefiles):
cachefile_registry.register(generator_id, cachefiles)
def source_group(self, generator_id, source_group):
source_group_registry.register(generator_id, source_group)
class Unregister:
"""
Unregister generators and generated files.
"""
def generator(self, id):
generator_registry.unregister(id)
def cachefiles(self, generator_id, cachefiles):
cachefile_registry.unregister(generator_id, cachefiles)
def source_group(self, generator_id, source_group):
source_group_registry.unregister(generator_id, source_group)
generator_registry = GeneratorRegistry()
cachefile_registry = CacheFileRegistry()
source_group_registry = SourceGroupRegistry()
register = Register()
unregister = Unregister()
|
{"/imagekit/models/fields/files.py": ["/imagekit/utils.py"], "/imagekit/utils.py": ["/imagekit/lib.py"], "/imagekit/__init__.py": ["/imagekit/specs/__init__.py", "/imagekit/pkgmeta.py", "/imagekit/registry.py"], "/tests/models.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/tests/test_optimistic_strategy.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/tests/utils.py"], "/tests/test_fields.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py", "/tests/models.py", "/tests/utils.py"], "/tests/test_serialization.py": ["/imagekit/cachefiles/__init__.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/tests/test_cachefiles.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/imagekit/templatetags/imagekit.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/registry.py"], "/tests/conftest.py": ["/tests/utils.py"], "/imagekit/cachefiles/namers.py": ["/imagekit/utils.py"], "/imagekit/forms/fields.py": ["/imagekit/specs/__init__.py", "/imagekit/utils.py"], "/imagekit/models/fields/__init__.py": ["/imagekit/registry.py", "/imagekit/specs/__init__.py", "/imagekit/specs/sourcegroups.py", "/imagekit/models/fields/files.py", "/imagekit/models/fields/utils.py"], "/imagekit/specs/sourcegroups.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_no_extra_queries.py": ["/tests/models.py"], "/imagekit/models/fields/utils.py": ["/imagekit/cachefiles/__init__.py"], "/imagekit/cachefiles/backends.py": ["/imagekit/utils.py"], "/tests/test_generateimage_tag.py": ["/tests/utils.py"], "/imagekit/cachefiles/__init__.py": ["/imagekit/files.py", "/imagekit/registry.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_thumbnail_tag.py": ["/tests/utils.py"], "/tests/test_settings.py": ["/imagekit/conf.py", "/imagekit/utils.py", "/tests/utils.py"], "/imagekit/files.py": ["/imagekit/utils.py"], "/tests/test_closing_fieldfiles.py": ["/tests/models.py", "/tests/utils.py"], "/imagekit/cachefiles/strategies.py": ["/imagekit/utils.py"], "/imagekit/registry.py": ["/imagekit/signals.py", "/imagekit/utils.py", "/imagekit/specs/sourcegroups.py", "/imagekit/cachefiles/__init__.py"], "/imagekit/generatorlibrary.py": ["/imagekit/processors/__init__.py", "/imagekit/registry.py", "/imagekit/specs/__init__.py"], "/tests/test_sourcegroups.py": ["/imagekit/signals.py", "/imagekit/specs/sourcegroups.py", "/tests/models.py", "/tests/utils.py"], "/tests/utils.py": ["/imagekit/cachefiles/backends.py", "/imagekit/conf.py", "/imagekit/utils.py", "/tests/models.py"], "/tests/test_abstract_models.py": ["/imagekit/utils.py", "/tests/models.py"], "/imagekit/management/commands/generateimages.py": ["/imagekit/registry.py"], "/tests/imagegenerators.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/imagekit/specs/__init__.py": ["/imagekit/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/imagekit/registry.py", "/imagekit/utils.py"]}
|
28,189,894
|
matthewwithanm/django-imagekit
|
refs/heads/develop
|
/imagekit/processors/base.py
|
import warnings
from pilkit.processors.base import *
warnings.warn('imagekit.processors.base is deprecated use imagekit.processors instead', DeprecationWarning)
__all__ = ['ProcessorPipeline', 'Adjust', 'Reflection', 'Transpose', 'Anchor', 'MakeOpaque']
|
{"/imagekit/models/fields/files.py": ["/imagekit/utils.py"], "/imagekit/utils.py": ["/imagekit/lib.py"], "/imagekit/__init__.py": ["/imagekit/specs/__init__.py", "/imagekit/pkgmeta.py", "/imagekit/registry.py"], "/tests/models.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/tests/test_optimistic_strategy.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/tests/utils.py"], "/tests/test_fields.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py", "/tests/models.py", "/tests/utils.py"], "/tests/test_serialization.py": ["/imagekit/cachefiles/__init__.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/tests/test_cachefiles.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/imagekit/templatetags/imagekit.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/registry.py"], "/tests/conftest.py": ["/tests/utils.py"], "/imagekit/cachefiles/namers.py": ["/imagekit/utils.py"], "/imagekit/forms/fields.py": ["/imagekit/specs/__init__.py", "/imagekit/utils.py"], "/imagekit/models/fields/__init__.py": ["/imagekit/registry.py", "/imagekit/specs/__init__.py", "/imagekit/specs/sourcegroups.py", "/imagekit/models/fields/files.py", "/imagekit/models/fields/utils.py"], "/imagekit/specs/sourcegroups.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_no_extra_queries.py": ["/tests/models.py"], "/imagekit/models/fields/utils.py": ["/imagekit/cachefiles/__init__.py"], "/imagekit/cachefiles/backends.py": ["/imagekit/utils.py"], "/tests/test_generateimage_tag.py": ["/tests/utils.py"], "/imagekit/cachefiles/__init__.py": ["/imagekit/files.py", "/imagekit/registry.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_thumbnail_tag.py": ["/tests/utils.py"], "/tests/test_settings.py": ["/imagekit/conf.py", "/imagekit/utils.py", "/tests/utils.py"], "/imagekit/files.py": ["/imagekit/utils.py"], "/tests/test_closing_fieldfiles.py": ["/tests/models.py", "/tests/utils.py"], "/imagekit/cachefiles/strategies.py": ["/imagekit/utils.py"], "/imagekit/registry.py": ["/imagekit/signals.py", "/imagekit/utils.py", "/imagekit/specs/sourcegroups.py", "/imagekit/cachefiles/__init__.py"], "/imagekit/generatorlibrary.py": ["/imagekit/processors/__init__.py", "/imagekit/registry.py", "/imagekit/specs/__init__.py"], "/tests/test_sourcegroups.py": ["/imagekit/signals.py", "/imagekit/specs/sourcegroups.py", "/tests/models.py", "/tests/utils.py"], "/tests/utils.py": ["/imagekit/cachefiles/backends.py", "/imagekit/conf.py", "/imagekit/utils.py", "/tests/models.py"], "/tests/test_abstract_models.py": ["/imagekit/utils.py", "/tests/models.py"], "/imagekit/management/commands/generateimages.py": ["/imagekit/registry.py"], "/tests/imagegenerators.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/imagekit/specs/__init__.py": ["/imagekit/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/imagekit/registry.py", "/imagekit/utils.py"]}
|
28,189,895
|
matthewwithanm/django-imagekit
|
refs/heads/develop
|
/imagekit/generatorlibrary.py
|
from .processors import Thumbnail as ThumbnailProcessor
from .registry import register
from .specs import ImageSpec
class Thumbnail(ImageSpec):
def __init__(self, width=None, height=None, anchor=None, crop=None, upscale=None, **kwargs):
self.processors = [ThumbnailProcessor(width, height, anchor=anchor,
crop=crop, upscale=upscale)]
super().__init__(**kwargs)
register.generator('imagekit:thumbnail', Thumbnail)
|
{"/imagekit/models/fields/files.py": ["/imagekit/utils.py"], "/imagekit/utils.py": ["/imagekit/lib.py"], "/imagekit/__init__.py": ["/imagekit/specs/__init__.py", "/imagekit/pkgmeta.py", "/imagekit/registry.py"], "/tests/models.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/tests/test_optimistic_strategy.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/tests/utils.py"], "/tests/test_fields.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py", "/tests/models.py", "/tests/utils.py"], "/tests/test_serialization.py": ["/imagekit/cachefiles/__init__.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/tests/test_cachefiles.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/imagekit/templatetags/imagekit.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/registry.py"], "/tests/conftest.py": ["/tests/utils.py"], "/imagekit/cachefiles/namers.py": ["/imagekit/utils.py"], "/imagekit/forms/fields.py": ["/imagekit/specs/__init__.py", "/imagekit/utils.py"], "/imagekit/models/fields/__init__.py": ["/imagekit/registry.py", "/imagekit/specs/__init__.py", "/imagekit/specs/sourcegroups.py", "/imagekit/models/fields/files.py", "/imagekit/models/fields/utils.py"], "/imagekit/specs/sourcegroups.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_no_extra_queries.py": ["/tests/models.py"], "/imagekit/models/fields/utils.py": ["/imagekit/cachefiles/__init__.py"], "/imagekit/cachefiles/backends.py": ["/imagekit/utils.py"], "/tests/test_generateimage_tag.py": ["/tests/utils.py"], "/imagekit/cachefiles/__init__.py": ["/imagekit/files.py", "/imagekit/registry.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_thumbnail_tag.py": ["/tests/utils.py"], "/tests/test_settings.py": ["/imagekit/conf.py", "/imagekit/utils.py", "/tests/utils.py"], "/imagekit/files.py": ["/imagekit/utils.py"], "/tests/test_closing_fieldfiles.py": ["/tests/models.py", "/tests/utils.py"], "/imagekit/cachefiles/strategies.py": ["/imagekit/utils.py"], "/imagekit/registry.py": ["/imagekit/signals.py", "/imagekit/utils.py", "/imagekit/specs/sourcegroups.py", "/imagekit/cachefiles/__init__.py"], "/imagekit/generatorlibrary.py": ["/imagekit/processors/__init__.py", "/imagekit/registry.py", "/imagekit/specs/__init__.py"], "/tests/test_sourcegroups.py": ["/imagekit/signals.py", "/imagekit/specs/sourcegroups.py", "/tests/models.py", "/tests/utils.py"], "/tests/utils.py": ["/imagekit/cachefiles/backends.py", "/imagekit/conf.py", "/imagekit/utils.py", "/tests/models.py"], "/tests/test_abstract_models.py": ["/imagekit/utils.py", "/tests/models.py"], "/imagekit/management/commands/generateimages.py": ["/imagekit/registry.py"], "/tests/imagegenerators.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/imagekit/specs/__init__.py": ["/imagekit/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/imagekit/registry.py", "/imagekit/utils.py"]}
|
28,189,896
|
matthewwithanm/django-imagekit
|
refs/heads/develop
|
/imagekit/utils.py
|
import logging
import re
from hashlib import md5
from importlib import import_module
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.core.files import File
from pilkit.utils import *
bad_memcached_key_chars = re.compile('[\u0000-\u001f\\s]+')
_autodiscovered = False
def get_nonabstract_descendants(model):
""" Returns all non-abstract descendants of the model. """
if not model._meta.abstract:
yield model
for s in model.__subclasses__():
yield from get_nonabstract_descendants(s)
def get_by_qname(path, desc):
try:
dot = path.rindex('.')
except ValueError:
raise ImproperlyConfigured("%s isn't a %s module." % (path, desc))
module, objname = path[:dot], path[dot + 1:]
try:
mod = import_module(module)
except ImportError as e:
raise ImproperlyConfigured('Error importing %s module %s: "%s"' %
(desc, module, e))
try:
obj = getattr(mod, objname)
return obj
except AttributeError:
raise ImproperlyConfigured('%s module "%s" does not define "%s"'
% (desc[0].upper() + desc[1:], module, objname))
_singletons = {}
def get_singleton(class_path, desc):
global _singletons
cls = get_by_qname(class_path, desc)
instance = _singletons.get(cls)
if not instance:
instance = _singletons[cls] = cls()
return instance
def autodiscover():
"""
Auto-discover INSTALLED_APPS imagegenerators.py modules and fail silently
when not present. This forces an import on them to register any admin bits
they may want.
Copied from django.contrib.admin
"""
global _autodiscovered
if _autodiscovered:
return
from django.utils.module_loading import autodiscover_modules
autodiscover_modules('imagegenerators')
_autodiscovered = True
def get_logger(logger_name='imagekit', add_null_handler=True):
logger = logging.getLogger(logger_name)
if add_null_handler:
logger.addHandler(logging.NullHandler())
return logger
def get_field_info(field_file):
"""
A utility for easily extracting information about the host model from a
Django FileField (or subclass). This is especially useful for when you want
to alter processors based on a property of the source model. For example::
class MySpec(ImageSpec):
def __init__(self, source):
instance, attname = get_field_info(source)
self.processors = [SmartResize(instance.thumbnail_width,
instance.thumbnail_height)]
"""
return (
getattr(field_file, 'instance', None),
getattr(getattr(field_file, 'field', None), 'attname', None),
)
def generate(generator):
"""
Calls the ``generate()`` method of a generator instance, and then wraps the
result in a Django File object so Django knows how to save it.
"""
content = generator.generate()
f = File(content)
# The size of the File must be known or Django will try to open a file
# without a name and raise an Exception.
f.size = len(content.read())
# After getting the size reset the file pointer for future reads.
content.seek(0)
return f
def call_strategy_method(file, method_name):
strategy = getattr(file, 'cachefile_strategy', None)
fn = getattr(strategy, method_name, None)
if fn is not None:
fn(file)
def get_cache():
from django.core.cache import caches
return caches[settings.IMAGEKIT_CACHE_BACKEND]
def get_storage():
try:
from django.core.files.storage import storages, InvalidStorageError
except ImportError: # Django < 4.2
return get_singleton(
settings.IMAGEKIT_DEFAULT_FILE_STORAGE, 'file storage backend'
)
else:
try:
return storages[settings.IMAGEKIT_DEFAULT_FILE_STORAGE]
except InvalidStorageError:
return get_singleton(
settings.IMAGEKIT_DEFAULT_FILE_STORAGE, 'file storage backend'
)
def sanitize_cache_key(key):
if settings.IMAGEKIT_USE_MEMCACHED_SAFE_CACHE_KEY:
# Memcached keys can't contain whitespace or control characters.
new_key = bad_memcached_key_chars.sub('', key)
# The also can't be > 250 chars long. Since we don't know what the
# user's cache ``KEY_FUNCTION`` setting is like, we'll limit it to 200.
if len(new_key) >= 200:
new_key = '%s:%s' % (new_key[:200 - 33], md5(key.encode('utf-8')).hexdigest())
key = new_key
return key
|
{"/imagekit/models/fields/files.py": ["/imagekit/utils.py"], "/imagekit/utils.py": ["/imagekit/lib.py"], "/imagekit/__init__.py": ["/imagekit/specs/__init__.py", "/imagekit/pkgmeta.py", "/imagekit/registry.py"], "/tests/models.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/tests/test_optimistic_strategy.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/tests/utils.py"], "/tests/test_fields.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py", "/tests/models.py", "/tests/utils.py"], "/tests/test_serialization.py": ["/imagekit/cachefiles/__init__.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/tests/test_cachefiles.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/imagekit/templatetags/imagekit.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/registry.py"], "/tests/conftest.py": ["/tests/utils.py"], "/imagekit/cachefiles/namers.py": ["/imagekit/utils.py"], "/imagekit/forms/fields.py": ["/imagekit/specs/__init__.py", "/imagekit/utils.py"], "/imagekit/models/fields/__init__.py": ["/imagekit/registry.py", "/imagekit/specs/__init__.py", "/imagekit/specs/sourcegroups.py", "/imagekit/models/fields/files.py", "/imagekit/models/fields/utils.py"], "/imagekit/specs/sourcegroups.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_no_extra_queries.py": ["/tests/models.py"], "/imagekit/models/fields/utils.py": ["/imagekit/cachefiles/__init__.py"], "/imagekit/cachefiles/backends.py": ["/imagekit/utils.py"], "/tests/test_generateimage_tag.py": ["/tests/utils.py"], "/imagekit/cachefiles/__init__.py": ["/imagekit/files.py", "/imagekit/registry.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_thumbnail_tag.py": ["/tests/utils.py"], "/tests/test_settings.py": ["/imagekit/conf.py", "/imagekit/utils.py", "/tests/utils.py"], "/imagekit/files.py": ["/imagekit/utils.py"], "/tests/test_closing_fieldfiles.py": ["/tests/models.py", "/tests/utils.py"], "/imagekit/cachefiles/strategies.py": ["/imagekit/utils.py"], "/imagekit/registry.py": ["/imagekit/signals.py", "/imagekit/utils.py", "/imagekit/specs/sourcegroups.py", "/imagekit/cachefiles/__init__.py"], "/imagekit/generatorlibrary.py": ["/imagekit/processors/__init__.py", "/imagekit/registry.py", "/imagekit/specs/__init__.py"], "/tests/test_sourcegroups.py": ["/imagekit/signals.py", "/imagekit/specs/sourcegroups.py", "/tests/models.py", "/tests/utils.py"], "/tests/utils.py": ["/imagekit/cachefiles/backends.py", "/imagekit/conf.py", "/imagekit/utils.py", "/tests/models.py"], "/tests/test_abstract_models.py": ["/imagekit/utils.py", "/tests/models.py"], "/imagekit/management/commands/generateimages.py": ["/imagekit/registry.py"], "/tests/imagegenerators.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/imagekit/specs/__init__.py": ["/imagekit/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/imagekit/registry.py", "/imagekit/utils.py"]}
|
28,189,897
|
matthewwithanm/django-imagekit
|
refs/heads/develop
|
/imagekit/processors/resize.py
|
import warnings
from pilkit.processors.resize import *
warnings.warn('imagekit.processors.resize is deprecated use imagekit.processors instead', DeprecationWarning)
__all__ = ['Resize', 'ResizeToCover', 'ResizeToFill', 'SmartResize', 'ResizeCanvas', 'AddBorder', 'ResizeToFit', 'Thumbnail']
|
{"/imagekit/models/fields/files.py": ["/imagekit/utils.py"], "/imagekit/utils.py": ["/imagekit/lib.py"], "/imagekit/__init__.py": ["/imagekit/specs/__init__.py", "/imagekit/pkgmeta.py", "/imagekit/registry.py"], "/tests/models.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/tests/test_optimistic_strategy.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/tests/utils.py"], "/tests/test_fields.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py", "/tests/models.py", "/tests/utils.py"], "/tests/test_serialization.py": ["/imagekit/cachefiles/__init__.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/tests/test_cachefiles.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/imagekit/templatetags/imagekit.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/registry.py"], "/tests/conftest.py": ["/tests/utils.py"], "/imagekit/cachefiles/namers.py": ["/imagekit/utils.py"], "/imagekit/forms/fields.py": ["/imagekit/specs/__init__.py", "/imagekit/utils.py"], "/imagekit/models/fields/__init__.py": ["/imagekit/registry.py", "/imagekit/specs/__init__.py", "/imagekit/specs/sourcegroups.py", "/imagekit/models/fields/files.py", "/imagekit/models/fields/utils.py"], "/imagekit/specs/sourcegroups.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_no_extra_queries.py": ["/tests/models.py"], "/imagekit/models/fields/utils.py": ["/imagekit/cachefiles/__init__.py"], "/imagekit/cachefiles/backends.py": ["/imagekit/utils.py"], "/tests/test_generateimage_tag.py": ["/tests/utils.py"], "/imagekit/cachefiles/__init__.py": ["/imagekit/files.py", "/imagekit/registry.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_thumbnail_tag.py": ["/tests/utils.py"], "/tests/test_settings.py": ["/imagekit/conf.py", "/imagekit/utils.py", "/tests/utils.py"], "/imagekit/files.py": ["/imagekit/utils.py"], "/tests/test_closing_fieldfiles.py": ["/tests/models.py", "/tests/utils.py"], "/imagekit/cachefiles/strategies.py": ["/imagekit/utils.py"], "/imagekit/registry.py": ["/imagekit/signals.py", "/imagekit/utils.py", "/imagekit/specs/sourcegroups.py", "/imagekit/cachefiles/__init__.py"], "/imagekit/generatorlibrary.py": ["/imagekit/processors/__init__.py", "/imagekit/registry.py", "/imagekit/specs/__init__.py"], "/tests/test_sourcegroups.py": ["/imagekit/signals.py", "/imagekit/specs/sourcegroups.py", "/tests/models.py", "/tests/utils.py"], "/tests/utils.py": ["/imagekit/cachefiles/backends.py", "/imagekit/conf.py", "/imagekit/utils.py", "/tests/models.py"], "/tests/test_abstract_models.py": ["/imagekit/utils.py", "/tests/models.py"], "/imagekit/management/commands/generateimages.py": ["/imagekit/registry.py"], "/tests/imagegenerators.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/imagekit/specs/__init__.py": ["/imagekit/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/imagekit/registry.py", "/imagekit/utils.py"]}
|
28,189,898
|
matthewwithanm/django-imagekit
|
refs/heads/develop
|
/tests/test_sourcegroups.py
|
import pytest
from django.core.files import File
from imagekit.signals import source_saved
from imagekit.specs.sourcegroups import ImageFieldSourceGroup
from .models import AbstractImageModel, ConcreteImageModel, ImageModel
from .utils import get_image_file
def make_counting_receiver(source_group):
def receiver(sender, *args, **kwargs):
if sender is source_group:
receiver.count += 1
receiver.count = 0
return receiver
@pytest.mark.django_db(transaction=True)
def test_source_saved_signal():
"""
Creating a new instance with an image causes the source_saved signal to be
dispatched.
"""
source_group = ImageFieldSourceGroup(ImageModel, 'image')
receiver = make_counting_receiver(source_group)
source_saved.connect(receiver)
with File(get_image_file(), name='reference.png') as image:
ImageModel.objects.create(image=image)
assert receiver.count == 1
@pytest.mark.django_db(transaction=True)
def test_no_source_saved_signal():
"""
Creating a new instance without an image shouldn't cause the source_saved
signal to be dispatched.
https://github.com/matthewwithanm/django-imagekit/issues/214
"""
source_group = ImageFieldSourceGroup(ImageModel, 'image')
receiver = make_counting_receiver(source_group)
source_saved.connect(receiver)
ImageModel.objects.create()
assert receiver.count == 0
@pytest.mark.django_db(transaction=True)
def test_abstract_model_signals():
"""
Source groups created for abstract models must cause signals to be
dispatched on their concrete subclasses.
"""
source_group = ImageFieldSourceGroup(AbstractImageModel, 'original_image')
receiver = make_counting_receiver(source_group)
source_saved.connect(receiver)
with File(get_image_file(), name='reference.png') as image:
ConcreteImageModel.objects.create(original_image=image)
assert receiver.count == 1
|
{"/imagekit/models/fields/files.py": ["/imagekit/utils.py"], "/imagekit/utils.py": ["/imagekit/lib.py"], "/imagekit/__init__.py": ["/imagekit/specs/__init__.py", "/imagekit/pkgmeta.py", "/imagekit/registry.py"], "/tests/models.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/tests/test_optimistic_strategy.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/tests/utils.py"], "/tests/test_fields.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py", "/tests/models.py", "/tests/utils.py"], "/tests/test_serialization.py": ["/imagekit/cachefiles/__init__.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/tests/test_cachefiles.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/imagekit/templatetags/imagekit.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/registry.py"], "/tests/conftest.py": ["/tests/utils.py"], "/imagekit/cachefiles/namers.py": ["/imagekit/utils.py"], "/imagekit/forms/fields.py": ["/imagekit/specs/__init__.py", "/imagekit/utils.py"], "/imagekit/models/fields/__init__.py": ["/imagekit/registry.py", "/imagekit/specs/__init__.py", "/imagekit/specs/sourcegroups.py", "/imagekit/models/fields/files.py", "/imagekit/models/fields/utils.py"], "/imagekit/specs/sourcegroups.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_no_extra_queries.py": ["/tests/models.py"], "/imagekit/models/fields/utils.py": ["/imagekit/cachefiles/__init__.py"], "/imagekit/cachefiles/backends.py": ["/imagekit/utils.py"], "/tests/test_generateimage_tag.py": ["/tests/utils.py"], "/imagekit/cachefiles/__init__.py": ["/imagekit/files.py", "/imagekit/registry.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_thumbnail_tag.py": ["/tests/utils.py"], "/tests/test_settings.py": ["/imagekit/conf.py", "/imagekit/utils.py", "/tests/utils.py"], "/imagekit/files.py": ["/imagekit/utils.py"], "/tests/test_closing_fieldfiles.py": ["/tests/models.py", "/tests/utils.py"], "/imagekit/cachefiles/strategies.py": ["/imagekit/utils.py"], "/imagekit/registry.py": ["/imagekit/signals.py", "/imagekit/utils.py", "/imagekit/specs/sourcegroups.py", "/imagekit/cachefiles/__init__.py"], "/imagekit/generatorlibrary.py": ["/imagekit/processors/__init__.py", "/imagekit/registry.py", "/imagekit/specs/__init__.py"], "/tests/test_sourcegroups.py": ["/imagekit/signals.py", "/imagekit/specs/sourcegroups.py", "/tests/models.py", "/tests/utils.py"], "/tests/utils.py": ["/imagekit/cachefiles/backends.py", "/imagekit/conf.py", "/imagekit/utils.py", "/tests/models.py"], "/tests/test_abstract_models.py": ["/imagekit/utils.py", "/tests/models.py"], "/imagekit/management/commands/generateimages.py": ["/imagekit/registry.py"], "/tests/imagegenerators.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/imagekit/specs/__init__.py": ["/imagekit/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/imagekit/registry.py", "/imagekit/utils.py"]}
|
28,189,899
|
matthewwithanm/django-imagekit
|
refs/heads/develop
|
/tests/utils.py
|
import os
import pickle
import shutil
from io import BytesIO
from tempfile import NamedTemporaryFile
from bs4 import BeautifulSoup
from django.core.files import File
from django.core.files.storage import FileSystemStorage
from django.template import Context, Template
from PIL import Image
from imagekit.cachefiles.backends import Simple
from imagekit.conf import settings
from imagekit.utils import get_cache
from .models import Photo
def get_image_file():
"""
See also:
http://en.wikipedia.org/wiki/Lenna
http://sipi.usc.edu/database/database.php?volume=misc&image=12
https://lintian.debian.org/tags/license-problem-non-free-img-lenna.html
https://github.com/libav/libav/commit/8895bf7b78650c0c21c88cec0484e138ec511a4b
"""
path = os.path.join(settings.MEDIA_ROOT, 'reference.png')
return open(path, 'r+b')
def get_unique_image_file():
file = NamedTemporaryFile()
with get_image_file() as image:
file.write(image.read())
return file
def create_image():
return Image.open(get_image_file())
def create_instance(model_class, image_name):
instance = model_class()
img = File(get_image_file())
instance.original_image.save(image_name, img)
instance.save()
img.close()
return instance
def create_photo(name):
return create_instance(Photo, name)
def pickleback(obj):
pickled = BytesIO()
pickle.dump(obj, pickled)
pickled.seek(0)
return pickle.load(pickled)
def render_tag(ttag):
with get_image_file() as img:
template = Template('{%% load imagekit %%}%s' % ttag)
context = Context({'img': img})
return template.render(context)
def get_html_attrs(ttag):
return BeautifulSoup(render_tag(ttag), features="html.parser").img.attrs
def assert_file_is_falsy(file):
assert not bool(file), 'File is not falsy'
def assert_file_is_truthy(file):
assert bool(file), 'File is not truthy'
class CustomStorage(FileSystemStorage):
pass
class DummyAsyncCacheFileBackend(Simple):
"""
A cache file backend meant to simulate async generation.
"""
is_async = True
def generate(self, file, force=False):
pass
def clear_imagekit_cache():
cache = get_cache()
cache.clear()
# Clear IMAGEKIT_CACHEFILE_DIR
cache_dir = os.path.join(settings.MEDIA_ROOT, settings.IMAGEKIT_CACHEFILE_DIR)
if os.path.exists(cache_dir):
shutil.rmtree(cache_dir)
def clear_imagekit_test_files():
clear_imagekit_cache()
for fname in os.listdir(settings.MEDIA_ROOT):
if fname != 'reference.png':
path = os.path.join(settings.MEDIA_ROOT, fname)
if os.path.isdir(path):
shutil.rmtree(path)
else:
os.remove(path)
|
{"/imagekit/models/fields/files.py": ["/imagekit/utils.py"], "/imagekit/utils.py": ["/imagekit/lib.py"], "/imagekit/__init__.py": ["/imagekit/specs/__init__.py", "/imagekit/pkgmeta.py", "/imagekit/registry.py"], "/tests/models.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/tests/test_optimistic_strategy.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/tests/utils.py"], "/tests/test_fields.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py", "/tests/models.py", "/tests/utils.py"], "/tests/test_serialization.py": ["/imagekit/cachefiles/__init__.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/tests/test_cachefiles.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/imagekit/templatetags/imagekit.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/registry.py"], "/tests/conftest.py": ["/tests/utils.py"], "/imagekit/cachefiles/namers.py": ["/imagekit/utils.py"], "/imagekit/forms/fields.py": ["/imagekit/specs/__init__.py", "/imagekit/utils.py"], "/imagekit/models/fields/__init__.py": ["/imagekit/registry.py", "/imagekit/specs/__init__.py", "/imagekit/specs/sourcegroups.py", "/imagekit/models/fields/files.py", "/imagekit/models/fields/utils.py"], "/imagekit/specs/sourcegroups.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_no_extra_queries.py": ["/tests/models.py"], "/imagekit/models/fields/utils.py": ["/imagekit/cachefiles/__init__.py"], "/imagekit/cachefiles/backends.py": ["/imagekit/utils.py"], "/tests/test_generateimage_tag.py": ["/tests/utils.py"], "/imagekit/cachefiles/__init__.py": ["/imagekit/files.py", "/imagekit/registry.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_thumbnail_tag.py": ["/tests/utils.py"], "/tests/test_settings.py": ["/imagekit/conf.py", "/imagekit/utils.py", "/tests/utils.py"], "/imagekit/files.py": ["/imagekit/utils.py"], "/tests/test_closing_fieldfiles.py": ["/tests/models.py", "/tests/utils.py"], "/imagekit/cachefiles/strategies.py": ["/imagekit/utils.py"], "/imagekit/registry.py": ["/imagekit/signals.py", "/imagekit/utils.py", "/imagekit/specs/sourcegroups.py", "/imagekit/cachefiles/__init__.py"], "/imagekit/generatorlibrary.py": ["/imagekit/processors/__init__.py", "/imagekit/registry.py", "/imagekit/specs/__init__.py"], "/tests/test_sourcegroups.py": ["/imagekit/signals.py", "/imagekit/specs/sourcegroups.py", "/tests/models.py", "/tests/utils.py"], "/tests/utils.py": ["/imagekit/cachefiles/backends.py", "/imagekit/conf.py", "/imagekit/utils.py", "/tests/models.py"], "/tests/test_abstract_models.py": ["/imagekit/utils.py", "/tests/models.py"], "/imagekit/management/commands/generateimages.py": ["/imagekit/registry.py"], "/tests/imagegenerators.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/imagekit/specs/__init__.py": ["/imagekit/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/imagekit/registry.py", "/imagekit/utils.py"]}
|
28,189,900
|
matthewwithanm/django-imagekit
|
refs/heads/develop
|
/tests/test_abstract_models.py
|
from imagekit.utils import get_nonabstract_descendants
from .models import (AbstractImageModel, ConcreteImageModel,
ConcreteImageModelSubclass)
def test_nonabstract_descendants_generator():
descendants = list(get_nonabstract_descendants(AbstractImageModel))
assert descendants == [ConcreteImageModel, ConcreteImageModelSubclass]
|
{"/imagekit/models/fields/files.py": ["/imagekit/utils.py"], "/imagekit/utils.py": ["/imagekit/lib.py"], "/imagekit/__init__.py": ["/imagekit/specs/__init__.py", "/imagekit/pkgmeta.py", "/imagekit/registry.py"], "/tests/models.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/tests/test_optimistic_strategy.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/tests/utils.py"], "/tests/test_fields.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py", "/tests/models.py", "/tests/utils.py"], "/tests/test_serialization.py": ["/imagekit/cachefiles/__init__.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/tests/test_cachefiles.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/imagekit/templatetags/imagekit.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/registry.py"], "/tests/conftest.py": ["/tests/utils.py"], "/imagekit/cachefiles/namers.py": ["/imagekit/utils.py"], "/imagekit/forms/fields.py": ["/imagekit/specs/__init__.py", "/imagekit/utils.py"], "/imagekit/models/fields/__init__.py": ["/imagekit/registry.py", "/imagekit/specs/__init__.py", "/imagekit/specs/sourcegroups.py", "/imagekit/models/fields/files.py", "/imagekit/models/fields/utils.py"], "/imagekit/specs/sourcegroups.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_no_extra_queries.py": ["/tests/models.py"], "/imagekit/models/fields/utils.py": ["/imagekit/cachefiles/__init__.py"], "/imagekit/cachefiles/backends.py": ["/imagekit/utils.py"], "/tests/test_generateimage_tag.py": ["/tests/utils.py"], "/imagekit/cachefiles/__init__.py": ["/imagekit/files.py", "/imagekit/registry.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_thumbnail_tag.py": ["/tests/utils.py"], "/tests/test_settings.py": ["/imagekit/conf.py", "/imagekit/utils.py", "/tests/utils.py"], "/imagekit/files.py": ["/imagekit/utils.py"], "/tests/test_closing_fieldfiles.py": ["/tests/models.py", "/tests/utils.py"], "/imagekit/cachefiles/strategies.py": ["/imagekit/utils.py"], "/imagekit/registry.py": ["/imagekit/signals.py", "/imagekit/utils.py", "/imagekit/specs/sourcegroups.py", "/imagekit/cachefiles/__init__.py"], "/imagekit/generatorlibrary.py": ["/imagekit/processors/__init__.py", "/imagekit/registry.py", "/imagekit/specs/__init__.py"], "/tests/test_sourcegroups.py": ["/imagekit/signals.py", "/imagekit/specs/sourcegroups.py", "/tests/models.py", "/tests/utils.py"], "/tests/utils.py": ["/imagekit/cachefiles/backends.py", "/imagekit/conf.py", "/imagekit/utils.py", "/tests/models.py"], "/tests/test_abstract_models.py": ["/imagekit/utils.py", "/tests/models.py"], "/imagekit/management/commands/generateimages.py": ["/imagekit/registry.py"], "/tests/imagegenerators.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/imagekit/specs/__init__.py": ["/imagekit/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/imagekit/registry.py", "/imagekit/utils.py"]}
|
28,189,901
|
matthewwithanm/django-imagekit
|
refs/heads/develop
|
/imagekit/management/commands/generateimages.py
|
import re
from django.core.management.base import BaseCommand
from ...exceptions import MissingSource
from ...registry import cachefile_registry, generator_registry
class Command(BaseCommand):
help = ("""Generate files for the specified image generators (or all of them if
none was provided). Simple, glob-like wildcards are allowed, with *
matching all characters within a segment, and ** matching across
segments. (Segments are separated with colons.) So, for example,
"a:*:c" will match "a:b:c", but not "a:b:x:c", whereas "a:**:c" will
match both. Subsegments are always matched, so "a" will match "a" as
well as "a:b" and "a:b:c".""")
args = '[generator_ids]'
def add_arguments(self, parser):
parser.add_argument('generator_id', nargs='*', help='<app_name>:<model>:<field> for model specs')
def handle(self, *args, **options):
generators = generator_registry.get_ids()
generator_ids = options['generator_id'] if 'generator_id' in options else args
if generator_ids:
patterns = self.compile_patterns(generator_ids)
generators = (id for id in generators if any(p.match(id) for p in patterns))
for generator_id in generators:
self.stdout.write('Validating generator: %s\n' % generator_id)
for image_file in cachefile_registry.get(generator_id):
if image_file.name:
self.stdout.write(' %s\n' % image_file.name)
try:
image_file.generate()
except MissingSource as err:
self.stdout.write('\t No source associated with\n')
except Exception as err:
self.stdout.write('\tFailed %s\n' % (err))
def compile_patterns(self, generator_ids):
return [self.compile_pattern(id) for id in generator_ids]
def compile_pattern(self, generator_id):
parts = re.split(r'(\*{1,2})', generator_id)
pattern = ''
for part in parts:
if part == '*':
pattern += '[^:]*'
elif part == '**':
pattern += '.*'
else:
pattern += re.escape(part)
return re.compile('^%s(:.*)?$' % pattern)
|
{"/imagekit/models/fields/files.py": ["/imagekit/utils.py"], "/imagekit/utils.py": ["/imagekit/lib.py"], "/imagekit/__init__.py": ["/imagekit/specs/__init__.py", "/imagekit/pkgmeta.py", "/imagekit/registry.py"], "/tests/models.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/tests/test_optimistic_strategy.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/tests/utils.py"], "/tests/test_fields.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py", "/tests/models.py", "/tests/utils.py"], "/tests/test_serialization.py": ["/imagekit/cachefiles/__init__.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/tests/test_cachefiles.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/imagekit/templatetags/imagekit.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/registry.py"], "/tests/conftest.py": ["/tests/utils.py"], "/imagekit/cachefiles/namers.py": ["/imagekit/utils.py"], "/imagekit/forms/fields.py": ["/imagekit/specs/__init__.py", "/imagekit/utils.py"], "/imagekit/models/fields/__init__.py": ["/imagekit/registry.py", "/imagekit/specs/__init__.py", "/imagekit/specs/sourcegroups.py", "/imagekit/models/fields/files.py", "/imagekit/models/fields/utils.py"], "/imagekit/specs/sourcegroups.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_no_extra_queries.py": ["/tests/models.py"], "/imagekit/models/fields/utils.py": ["/imagekit/cachefiles/__init__.py"], "/imagekit/cachefiles/backends.py": ["/imagekit/utils.py"], "/tests/test_generateimage_tag.py": ["/tests/utils.py"], "/imagekit/cachefiles/__init__.py": ["/imagekit/files.py", "/imagekit/registry.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_thumbnail_tag.py": ["/tests/utils.py"], "/tests/test_settings.py": ["/imagekit/conf.py", "/imagekit/utils.py", "/tests/utils.py"], "/imagekit/files.py": ["/imagekit/utils.py"], "/tests/test_closing_fieldfiles.py": ["/tests/models.py", "/tests/utils.py"], "/imagekit/cachefiles/strategies.py": ["/imagekit/utils.py"], "/imagekit/registry.py": ["/imagekit/signals.py", "/imagekit/utils.py", "/imagekit/specs/sourcegroups.py", "/imagekit/cachefiles/__init__.py"], "/imagekit/generatorlibrary.py": ["/imagekit/processors/__init__.py", "/imagekit/registry.py", "/imagekit/specs/__init__.py"], "/tests/test_sourcegroups.py": ["/imagekit/signals.py", "/imagekit/specs/sourcegroups.py", "/tests/models.py", "/tests/utils.py"], "/tests/utils.py": ["/imagekit/cachefiles/backends.py", "/imagekit/conf.py", "/imagekit/utils.py", "/tests/models.py"], "/tests/test_abstract_models.py": ["/imagekit/utils.py", "/tests/models.py"], "/imagekit/management/commands/generateimages.py": ["/imagekit/registry.py"], "/tests/imagegenerators.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/imagekit/specs/__init__.py": ["/imagekit/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/imagekit/registry.py", "/imagekit/utils.py"]}
|
28,189,902
|
matthewwithanm/django-imagekit
|
refs/heads/develop
|
/tests/settings.py
|
import os
ADMINS = (
('test@example.com', 'TEST-R'),
)
BASE_PATH = os.path.abspath(os.path.dirname(__file__))
MEDIA_ROOT = os.path.normpath(os.path.join(BASE_PATH, 'media'))
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'imagekit.db',
},
}
SECRET_KEY = '_uobce43e5osp8xgzle*yag2_16%y$sf*5(12vfg25hpnxik_*'
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'imagekit',
'tests',
]
CACHE_BACKEND = 'locmem://'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.contrib.auth.context_processors.auth',
'django.template.context_processors.debug',
'django.template.context_processors.i18n',
'django.template.context_processors.media',
'django.template.context_processors.static',
'django.template.context_processors.tz',
'django.contrib.messages.context_processors.messages',
],
},
},
]
|
{"/imagekit/models/fields/files.py": ["/imagekit/utils.py"], "/imagekit/utils.py": ["/imagekit/lib.py"], "/imagekit/__init__.py": ["/imagekit/specs/__init__.py", "/imagekit/pkgmeta.py", "/imagekit/registry.py"], "/tests/models.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/tests/test_optimistic_strategy.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/tests/utils.py"], "/tests/test_fields.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py", "/tests/models.py", "/tests/utils.py"], "/tests/test_serialization.py": ["/imagekit/cachefiles/__init__.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/tests/test_cachefiles.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/imagekit/templatetags/imagekit.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/registry.py"], "/tests/conftest.py": ["/tests/utils.py"], "/imagekit/cachefiles/namers.py": ["/imagekit/utils.py"], "/imagekit/forms/fields.py": ["/imagekit/specs/__init__.py", "/imagekit/utils.py"], "/imagekit/models/fields/__init__.py": ["/imagekit/registry.py", "/imagekit/specs/__init__.py", "/imagekit/specs/sourcegroups.py", "/imagekit/models/fields/files.py", "/imagekit/models/fields/utils.py"], "/imagekit/specs/sourcegroups.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_no_extra_queries.py": ["/tests/models.py"], "/imagekit/models/fields/utils.py": ["/imagekit/cachefiles/__init__.py"], "/imagekit/cachefiles/backends.py": ["/imagekit/utils.py"], "/tests/test_generateimage_tag.py": ["/tests/utils.py"], "/imagekit/cachefiles/__init__.py": ["/imagekit/files.py", "/imagekit/registry.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_thumbnail_tag.py": ["/tests/utils.py"], "/tests/test_settings.py": ["/imagekit/conf.py", "/imagekit/utils.py", "/tests/utils.py"], "/imagekit/files.py": ["/imagekit/utils.py"], "/tests/test_closing_fieldfiles.py": ["/tests/models.py", "/tests/utils.py"], "/imagekit/cachefiles/strategies.py": ["/imagekit/utils.py"], "/imagekit/registry.py": ["/imagekit/signals.py", "/imagekit/utils.py", "/imagekit/specs/sourcegroups.py", "/imagekit/cachefiles/__init__.py"], "/imagekit/generatorlibrary.py": ["/imagekit/processors/__init__.py", "/imagekit/registry.py", "/imagekit/specs/__init__.py"], "/tests/test_sourcegroups.py": ["/imagekit/signals.py", "/imagekit/specs/sourcegroups.py", "/tests/models.py", "/tests/utils.py"], "/tests/utils.py": ["/imagekit/cachefiles/backends.py", "/imagekit/conf.py", "/imagekit/utils.py", "/tests/models.py"], "/tests/test_abstract_models.py": ["/imagekit/utils.py", "/tests/models.py"], "/imagekit/management/commands/generateimages.py": ["/imagekit/registry.py"], "/tests/imagegenerators.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/imagekit/specs/__init__.py": ["/imagekit/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/imagekit/registry.py", "/imagekit/utils.py"]}
|
28,189,903
|
matthewwithanm/django-imagekit
|
refs/heads/develop
|
/setup.py
|
#!/usr/bin/env python
import codecs
import os
import sys
from setuptools import find_packages, setup
if 'publish' in sys.argv:
os.system('python setup.py sdist bdist_wheel upload')
sys.exit()
def read(filepath):
with codecs.open(filepath, 'r', 'utf-8') as f:
return f.read()
def exec_file(filepath, globalz=None, localz=None):
exec(read(filepath), globalz, localz)
# Load package meta from the pkgmeta module without loading imagekit.
pkgmeta = {}
exec_file(os.path.join(os.path.dirname(__file__),
'imagekit', 'pkgmeta.py'), pkgmeta)
setup(
name='django-imagekit',
version=pkgmeta['__version__'],
description='Automated image processing for Django models.',
long_description=read(os.path.join(os.path.dirname(__file__), 'README.rst')),
author='Matthew Tretter',
author_email='m@tthewwithanm.com',
maintainer='Bryan Veloso',
maintainer_email='bryan@revyver.com',
license='BSD',
url='http://github.com/matthewwithanm/django-imagekit/',
packages=find_packages(exclude=['*.tests', '*.tests.*', 'tests.*', 'tests']),
zip_safe=False,
include_package_data=True,
install_requires=[
'django-appconf',
'pilkit',
],
extras_require={
'async': ['django-celery>=3.0'],
'async_rq': ['django-rq>=0.6.0'],
'async_dramatiq': ['django-dramatiq>=0.4.0'],
},
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3.10',
'Topic :: Utilities'
],
)
|
{"/imagekit/models/fields/files.py": ["/imagekit/utils.py"], "/imagekit/utils.py": ["/imagekit/lib.py"], "/imagekit/__init__.py": ["/imagekit/specs/__init__.py", "/imagekit/pkgmeta.py", "/imagekit/registry.py"], "/tests/models.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/tests/test_optimistic_strategy.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/tests/utils.py"], "/tests/test_fields.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py", "/tests/models.py", "/tests/utils.py"], "/tests/test_serialization.py": ["/imagekit/cachefiles/__init__.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/tests/test_cachefiles.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/imagekit/templatetags/imagekit.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/registry.py"], "/tests/conftest.py": ["/tests/utils.py"], "/imagekit/cachefiles/namers.py": ["/imagekit/utils.py"], "/imagekit/forms/fields.py": ["/imagekit/specs/__init__.py", "/imagekit/utils.py"], "/imagekit/models/fields/__init__.py": ["/imagekit/registry.py", "/imagekit/specs/__init__.py", "/imagekit/specs/sourcegroups.py", "/imagekit/models/fields/files.py", "/imagekit/models/fields/utils.py"], "/imagekit/specs/sourcegroups.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_no_extra_queries.py": ["/tests/models.py"], "/imagekit/models/fields/utils.py": ["/imagekit/cachefiles/__init__.py"], "/imagekit/cachefiles/backends.py": ["/imagekit/utils.py"], "/tests/test_generateimage_tag.py": ["/tests/utils.py"], "/imagekit/cachefiles/__init__.py": ["/imagekit/files.py", "/imagekit/registry.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_thumbnail_tag.py": ["/tests/utils.py"], "/tests/test_settings.py": ["/imagekit/conf.py", "/imagekit/utils.py", "/tests/utils.py"], "/imagekit/files.py": ["/imagekit/utils.py"], "/tests/test_closing_fieldfiles.py": ["/tests/models.py", "/tests/utils.py"], "/imagekit/cachefiles/strategies.py": ["/imagekit/utils.py"], "/imagekit/registry.py": ["/imagekit/signals.py", "/imagekit/utils.py", "/imagekit/specs/sourcegroups.py", "/imagekit/cachefiles/__init__.py"], "/imagekit/generatorlibrary.py": ["/imagekit/processors/__init__.py", "/imagekit/registry.py", "/imagekit/specs/__init__.py"], "/tests/test_sourcegroups.py": ["/imagekit/signals.py", "/imagekit/specs/sourcegroups.py", "/tests/models.py", "/tests/utils.py"], "/tests/utils.py": ["/imagekit/cachefiles/backends.py", "/imagekit/conf.py", "/imagekit/utils.py", "/tests/models.py"], "/tests/test_abstract_models.py": ["/imagekit/utils.py", "/tests/models.py"], "/imagekit/management/commands/generateimages.py": ["/imagekit/registry.py"], "/tests/imagegenerators.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/imagekit/specs/__init__.py": ["/imagekit/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/imagekit/registry.py", "/imagekit/utils.py"]}
|
28,189,904
|
matthewwithanm/django-imagekit
|
refs/heads/develop
|
/tests/imagegenerators.py
|
from imagekit import ImageSpec, register
from imagekit.processors import ResizeToFill
class TestSpec(ImageSpec):
__test__ = False
class ResizeTo1PixelSquare(ImageSpec):
def __init__(self, width=None, height=None, anchor=None, crop=None, **kwargs):
self.processors = [ResizeToFill(1, 1)]
super().__init__(**kwargs)
register.generator('testspec', TestSpec)
register.generator('1pxsq', ResizeTo1PixelSquare)
|
{"/imagekit/models/fields/files.py": ["/imagekit/utils.py"], "/imagekit/utils.py": ["/imagekit/lib.py"], "/imagekit/__init__.py": ["/imagekit/specs/__init__.py", "/imagekit/pkgmeta.py", "/imagekit/registry.py"], "/tests/models.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/tests/test_optimistic_strategy.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/tests/utils.py"], "/tests/test_fields.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py", "/tests/models.py", "/tests/utils.py"], "/tests/test_serialization.py": ["/imagekit/cachefiles/__init__.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/tests/test_cachefiles.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/imagekit/templatetags/imagekit.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/registry.py"], "/tests/conftest.py": ["/tests/utils.py"], "/imagekit/cachefiles/namers.py": ["/imagekit/utils.py"], "/imagekit/forms/fields.py": ["/imagekit/specs/__init__.py", "/imagekit/utils.py"], "/imagekit/models/fields/__init__.py": ["/imagekit/registry.py", "/imagekit/specs/__init__.py", "/imagekit/specs/sourcegroups.py", "/imagekit/models/fields/files.py", "/imagekit/models/fields/utils.py"], "/imagekit/specs/sourcegroups.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_no_extra_queries.py": ["/tests/models.py"], "/imagekit/models/fields/utils.py": ["/imagekit/cachefiles/__init__.py"], "/imagekit/cachefiles/backends.py": ["/imagekit/utils.py"], "/tests/test_generateimage_tag.py": ["/tests/utils.py"], "/imagekit/cachefiles/__init__.py": ["/imagekit/files.py", "/imagekit/registry.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_thumbnail_tag.py": ["/tests/utils.py"], "/tests/test_settings.py": ["/imagekit/conf.py", "/imagekit/utils.py", "/tests/utils.py"], "/imagekit/files.py": ["/imagekit/utils.py"], "/tests/test_closing_fieldfiles.py": ["/tests/models.py", "/tests/utils.py"], "/imagekit/cachefiles/strategies.py": ["/imagekit/utils.py"], "/imagekit/registry.py": ["/imagekit/signals.py", "/imagekit/utils.py", "/imagekit/specs/sourcegroups.py", "/imagekit/cachefiles/__init__.py"], "/imagekit/generatorlibrary.py": ["/imagekit/processors/__init__.py", "/imagekit/registry.py", "/imagekit/specs/__init__.py"], "/tests/test_sourcegroups.py": ["/imagekit/signals.py", "/imagekit/specs/sourcegroups.py", "/tests/models.py", "/tests/utils.py"], "/tests/utils.py": ["/imagekit/cachefiles/backends.py", "/imagekit/conf.py", "/imagekit/utils.py", "/tests/models.py"], "/tests/test_abstract_models.py": ["/imagekit/utils.py", "/tests/models.py"], "/imagekit/management/commands/generateimages.py": ["/imagekit/registry.py"], "/tests/imagegenerators.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/imagekit/specs/__init__.py": ["/imagekit/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/imagekit/registry.py", "/imagekit/utils.py"]}
|
28,189,905
|
matthewwithanm/django-imagekit
|
refs/heads/develop
|
/imagekit/specs/__init__.py
|
from copy import copy
from django.conf import settings
from django.db.models.fields.files import ImageFieldFile
from .. import hashers
from ..cachefiles.backends import get_default_cachefile_backend
from ..cachefiles.strategies import load_strategy
from ..exceptions import AlreadyRegistered, MissingSource
from ..registry import generator_registry, register
from ..utils import get_by_qname, open_image, process_image
class BaseImageSpec:
"""
An object that defines how an new image should be generated from a source
image.
"""
cachefile_storage = None
"""A Django storage system to use to save a cache file."""
cachefile_backend = None
"""
An object responsible for managing the state of cache files. Defaults to
an instance of ``IMAGEKIT_DEFAULT_CACHEFILE_BACKEND``
"""
cachefile_strategy = settings.IMAGEKIT_DEFAULT_CACHEFILE_STRATEGY
"""
A dictionary containing callbacks that allow you to customize how and when
the image file is created. Defaults to
``IMAGEKIT_DEFAULT_CACHEFILE_STRATEGY``.
"""
def __init__(self):
self.cachefile_backend = self.cachefile_backend or get_default_cachefile_backend()
self.cachefile_strategy = load_strategy(self.cachefile_strategy)
def generate(self):
raise NotImplementedError
MissingSource = MissingSource
"""
Raised when an operation requiring a source is attempted on a spec that has
no source.
"""
class ImageSpec(BaseImageSpec):
"""
An object that defines how to generate a new image from a source file using
PIL-based processors. (See :mod:`imagekit.processors`)
"""
processors = []
"""A list of processors to run on the original image."""
format = None
"""
The format of the output file. If not provided, ImageSpecField will try to
guess the appropriate format based on the extension of the filename and the
format of the input image.
"""
options = None
"""
A dictionary that will be passed to PIL's ``Image.save()`` method as keyword
arguments. Valid options vary between formats, but some examples include
``quality``, ``optimize``, and ``progressive`` for JPEGs. See the PIL
documentation for others.
"""
autoconvert = True
"""
Specifies whether automatic conversion using ``prepare_image()`` should be
performed prior to saving.
"""
def __init__(self, source):
self.source = source
super().__init__()
@property
def cachefile_name(self):
if not self.source:
return None
fn = get_by_qname(settings.IMAGEKIT_SPEC_CACHEFILE_NAMER, 'namer')
return fn(self)
@property
def source(self):
src = getattr(self, '_source', None)
if not src:
field_data = getattr(self, '_field_data', None)
if field_data:
src = self._source = getattr(field_data['instance'], field_data['attname'])
del self._field_data
return src
@source.setter
def source(self, value):
self._source = value
def __getstate__(self):
state = copy(self.__dict__)
# Unpickled ImageFieldFiles won't work (they're missing a storage
# object). Since they're such a common use case, we special case them.
# Unfortunately, this also requires us to add the source getter to
# lazily retrieve the source on the reconstructed object; simply trying
# to look up the source in ``__setstate__`` would require us to get the
# model instance but, if ``__setstate__`` was called as part of
# deserializing that model, the model wouldn't be fully reconstructed
# yet, preventing us from accessing the source field.
# (This is issue #234.)
if isinstance(self.source, ImageFieldFile):
field = self.source.field
state['_field_data'] = {
'instance': getattr(self.source, 'instance', None),
'attname': getattr(field, 'name', None),
}
state.pop('_source', None)
return state
def get_hash(self):
return hashers.pickle([
self.source.name,
self.processors,
self.format,
self.options,
self.autoconvert,
])
def generate(self):
if not self.source:
raise MissingSource("The spec '%s' has no source file associated"
" with it." % self)
# TODO: Move into a generator base class
# TODO: Factor out a generate_image function so you can create a generator and only override the PIL.Image creating part.
# (The tricky part is how to deal with original_format since generator base class won't have one.)
closed = self.source.closed
if closed:
# Django file object should know how to reopen itself if it was closed
# https://code.djangoproject.com/ticket/13750
self.source.open()
try:
img = open_image(self.source)
new_image = process_image(img,
processors=self.processors,
format=self.format,
autoconvert=self.autoconvert,
options=self.options)
finally:
if closed:
# We need to close the file if it was opened by us
self.source.close()
return new_image
def create_spec_class(class_attrs):
class DynamicSpecBase(ImageSpec):
def __reduce__(self):
try:
getstate = self.__getstate__
except AttributeError:
state = self.__dict__
else:
state = getstate()
return (create_spec, (class_attrs, state))
return type('DynamicSpec', (DynamicSpecBase,), class_attrs)
def create_spec(class_attrs, state):
cls = create_spec_class(class_attrs)
instance = cls.__new__(cls) # Create an instance without calling the __init__ (which may have required args).
try:
setstate = instance.__setstate__
except AttributeError:
instance.__dict__ = state
else:
setstate(state)
return instance
class SpecHost:
"""
An object that ostensibly has a spec attribute but really delegates to the
spec registry.
"""
def __init__(self, spec=None, spec_id=None, **kwargs):
spec_attrs = {k: v for k, v in kwargs.items() if v is not None}
if spec_attrs:
if spec:
raise TypeError('You can provide either an image spec or'
' arguments for the ImageSpec constructor, but not both.')
else:
spec = create_spec_class(spec_attrs)
self._original_spec = spec
if spec_id:
self.set_spec_id(spec_id)
def set_spec_id(self, id):
"""
Sets the spec id for this object. Useful for when the id isn't
known when the instance is constructed (e.g. for ImageSpecFields whose
generated `spec_id`s are only known when they are contributed to a
class). If the object was initialized with a spec, it will be registered
under the provided id.
"""
self.spec_id = id
if self._original_spec:
try:
register.generator(id, self._original_spec)
except AlreadyRegistered:
# Fields should not cause AlreadyRegistered exceptions. If a
# spec is already registered, that should be used. It is
# especially important that an error is not thrown here because
# of South, which will create duplicate models as part of its
# "fake orm," therefore re-registering specs.
pass
def get_spec(self, source):
"""
Look up the spec by the spec id. We do this (instead of storing the
spec as an attribute) so that users can override apps' specs--without
having to edit model definitions--simply by registering another spec
with the same id.
"""
if not getattr(self, 'spec_id', None):
raise Exception('Object %s has no spec id.' % self)
return generator_registry.get(self.spec_id, source=source)
|
{"/imagekit/models/fields/files.py": ["/imagekit/utils.py"], "/imagekit/utils.py": ["/imagekit/lib.py"], "/imagekit/__init__.py": ["/imagekit/specs/__init__.py", "/imagekit/pkgmeta.py", "/imagekit/registry.py"], "/tests/models.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/tests/test_optimistic_strategy.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/tests/utils.py"], "/tests/test_fields.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py", "/tests/models.py", "/tests/utils.py"], "/tests/test_serialization.py": ["/imagekit/cachefiles/__init__.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/tests/test_cachefiles.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/imagekit/templatetags/imagekit.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/registry.py"], "/tests/conftest.py": ["/tests/utils.py"], "/imagekit/cachefiles/namers.py": ["/imagekit/utils.py"], "/imagekit/forms/fields.py": ["/imagekit/specs/__init__.py", "/imagekit/utils.py"], "/imagekit/models/fields/__init__.py": ["/imagekit/registry.py", "/imagekit/specs/__init__.py", "/imagekit/specs/sourcegroups.py", "/imagekit/models/fields/files.py", "/imagekit/models/fields/utils.py"], "/imagekit/specs/sourcegroups.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_no_extra_queries.py": ["/tests/models.py"], "/imagekit/models/fields/utils.py": ["/imagekit/cachefiles/__init__.py"], "/imagekit/cachefiles/backends.py": ["/imagekit/utils.py"], "/tests/test_generateimage_tag.py": ["/tests/utils.py"], "/imagekit/cachefiles/__init__.py": ["/imagekit/files.py", "/imagekit/registry.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_thumbnail_tag.py": ["/tests/utils.py"], "/tests/test_settings.py": ["/imagekit/conf.py", "/imagekit/utils.py", "/tests/utils.py"], "/imagekit/files.py": ["/imagekit/utils.py"], "/tests/test_closing_fieldfiles.py": ["/tests/models.py", "/tests/utils.py"], "/imagekit/cachefiles/strategies.py": ["/imagekit/utils.py"], "/imagekit/registry.py": ["/imagekit/signals.py", "/imagekit/utils.py", "/imagekit/specs/sourcegroups.py", "/imagekit/cachefiles/__init__.py"], "/imagekit/generatorlibrary.py": ["/imagekit/processors/__init__.py", "/imagekit/registry.py", "/imagekit/specs/__init__.py"], "/tests/test_sourcegroups.py": ["/imagekit/signals.py", "/imagekit/specs/sourcegroups.py", "/tests/models.py", "/tests/utils.py"], "/tests/utils.py": ["/imagekit/cachefiles/backends.py", "/imagekit/conf.py", "/imagekit/utils.py", "/tests/models.py"], "/tests/test_abstract_models.py": ["/imagekit/utils.py", "/tests/models.py"], "/imagekit/management/commands/generateimages.py": ["/imagekit/registry.py"], "/tests/imagegenerators.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/imagekit/specs/__init__.py": ["/imagekit/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/imagekit/registry.py", "/imagekit/utils.py"]}
|
28,189,906
|
matthewwithanm/django-imagekit
|
refs/heads/develop
|
/imagekit/processors/utils.py
|
import warnings
from pilkit.processors.utils import *
warnings.warn('imagekit.processors.utils is deprecated use pilkit.processors.utils instead', DeprecationWarning)
|
{"/imagekit/models/fields/files.py": ["/imagekit/utils.py"], "/imagekit/utils.py": ["/imagekit/lib.py"], "/imagekit/__init__.py": ["/imagekit/specs/__init__.py", "/imagekit/pkgmeta.py", "/imagekit/registry.py"], "/tests/models.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/tests/test_optimistic_strategy.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/tests/utils.py"], "/tests/test_fields.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py", "/tests/models.py", "/tests/utils.py"], "/tests/test_serialization.py": ["/imagekit/cachefiles/__init__.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/tests/test_cachefiles.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/imagekit/templatetags/imagekit.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/registry.py"], "/tests/conftest.py": ["/tests/utils.py"], "/imagekit/cachefiles/namers.py": ["/imagekit/utils.py"], "/imagekit/forms/fields.py": ["/imagekit/specs/__init__.py", "/imagekit/utils.py"], "/imagekit/models/fields/__init__.py": ["/imagekit/registry.py", "/imagekit/specs/__init__.py", "/imagekit/specs/sourcegroups.py", "/imagekit/models/fields/files.py", "/imagekit/models/fields/utils.py"], "/imagekit/specs/sourcegroups.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_no_extra_queries.py": ["/tests/models.py"], "/imagekit/models/fields/utils.py": ["/imagekit/cachefiles/__init__.py"], "/imagekit/cachefiles/backends.py": ["/imagekit/utils.py"], "/tests/test_generateimage_tag.py": ["/tests/utils.py"], "/imagekit/cachefiles/__init__.py": ["/imagekit/files.py", "/imagekit/registry.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_thumbnail_tag.py": ["/tests/utils.py"], "/tests/test_settings.py": ["/imagekit/conf.py", "/imagekit/utils.py", "/tests/utils.py"], "/imagekit/files.py": ["/imagekit/utils.py"], "/tests/test_closing_fieldfiles.py": ["/tests/models.py", "/tests/utils.py"], "/imagekit/cachefiles/strategies.py": ["/imagekit/utils.py"], "/imagekit/registry.py": ["/imagekit/signals.py", "/imagekit/utils.py", "/imagekit/specs/sourcegroups.py", "/imagekit/cachefiles/__init__.py"], "/imagekit/generatorlibrary.py": ["/imagekit/processors/__init__.py", "/imagekit/registry.py", "/imagekit/specs/__init__.py"], "/tests/test_sourcegroups.py": ["/imagekit/signals.py", "/imagekit/specs/sourcegroups.py", "/tests/models.py", "/tests/utils.py"], "/tests/utils.py": ["/imagekit/cachefiles/backends.py", "/imagekit/conf.py", "/imagekit/utils.py", "/tests/models.py"], "/tests/test_abstract_models.py": ["/imagekit/utils.py", "/tests/models.py"], "/imagekit/management/commands/generateimages.py": ["/imagekit/registry.py"], "/tests/imagegenerators.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/imagekit/specs/__init__.py": ["/imagekit/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/imagekit/registry.py", "/imagekit/utils.py"]}
|
28,189,907
|
matthewwithanm/django-imagekit
|
refs/heads/develop
|
/imagekit/hashers.py
|
from copy import copy
from hashlib import md5
from io import BytesIO
from pickle import DICT, MARK, _Pickler
class CanonicalizingPickler(_Pickler):
dispatch = copy(_Pickler.dispatch)
def save_set(self, obj):
rv = obj.__reduce_ex__(0)
rv = (rv[0], (sorted(rv[1][0]),), rv[2])
self.save_reduce(obj=obj, *rv)
dispatch[set] = save_set
def save_dict(self, obj):
write = self.write
write(MARK + DICT)
self.memoize(obj)
self._batch_setitems(sorted(obj.items()))
dispatch[dict] = save_dict
def pickle(obj):
file = BytesIO()
CanonicalizingPickler(file, 0).dump(obj)
return md5(file.getvalue()).hexdigest()
|
{"/imagekit/models/fields/files.py": ["/imagekit/utils.py"], "/imagekit/utils.py": ["/imagekit/lib.py"], "/imagekit/__init__.py": ["/imagekit/specs/__init__.py", "/imagekit/pkgmeta.py", "/imagekit/registry.py"], "/tests/models.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/tests/test_optimistic_strategy.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/tests/utils.py"], "/tests/test_fields.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py", "/tests/models.py", "/tests/utils.py"], "/tests/test_serialization.py": ["/imagekit/cachefiles/__init__.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/tests/test_cachefiles.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/cachefiles/backends.py", "/tests/imagegenerators.py", "/tests/utils.py"], "/imagekit/templatetags/imagekit.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/registry.py"], "/tests/conftest.py": ["/tests/utils.py"], "/imagekit/cachefiles/namers.py": ["/imagekit/utils.py"], "/imagekit/forms/fields.py": ["/imagekit/specs/__init__.py", "/imagekit/utils.py"], "/imagekit/models/fields/__init__.py": ["/imagekit/registry.py", "/imagekit/specs/__init__.py", "/imagekit/specs/sourcegroups.py", "/imagekit/models/fields/files.py", "/imagekit/models/fields/utils.py"], "/imagekit/specs/sourcegroups.py": ["/imagekit/cachefiles/__init__.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_no_extra_queries.py": ["/tests/models.py"], "/imagekit/models/fields/utils.py": ["/imagekit/cachefiles/__init__.py"], "/imagekit/cachefiles/backends.py": ["/imagekit/utils.py"], "/tests/test_generateimage_tag.py": ["/tests/utils.py"], "/imagekit/cachefiles/__init__.py": ["/imagekit/files.py", "/imagekit/registry.py", "/imagekit/signals.py", "/imagekit/utils.py"], "/tests/test_thumbnail_tag.py": ["/tests/utils.py"], "/tests/test_settings.py": ["/imagekit/conf.py", "/imagekit/utils.py", "/tests/utils.py"], "/imagekit/files.py": ["/imagekit/utils.py"], "/tests/test_closing_fieldfiles.py": ["/tests/models.py", "/tests/utils.py"], "/imagekit/cachefiles/strategies.py": ["/imagekit/utils.py"], "/imagekit/registry.py": ["/imagekit/signals.py", "/imagekit/utils.py", "/imagekit/specs/sourcegroups.py", "/imagekit/cachefiles/__init__.py"], "/imagekit/generatorlibrary.py": ["/imagekit/processors/__init__.py", "/imagekit/registry.py", "/imagekit/specs/__init__.py"], "/tests/test_sourcegroups.py": ["/imagekit/signals.py", "/imagekit/specs/sourcegroups.py", "/tests/models.py", "/tests/utils.py"], "/tests/utils.py": ["/imagekit/cachefiles/backends.py", "/imagekit/conf.py", "/imagekit/utils.py", "/tests/models.py"], "/tests/test_abstract_models.py": ["/imagekit/utils.py", "/tests/models.py"], "/imagekit/management/commands/generateimages.py": ["/imagekit/registry.py"], "/tests/imagegenerators.py": ["/imagekit/__init__.py", "/imagekit/processors/__init__.py"], "/imagekit/specs/__init__.py": ["/imagekit/__init__.py", "/imagekit/cachefiles/backends.py", "/imagekit/cachefiles/strategies.py", "/imagekit/registry.py", "/imagekit/utils.py"]}
|
28,200,812
|
mandalbiswadip/django_sql
|
refs/heads/master
|
/contact/filters.py
|
from django_filters import FilterSet, CharFilter, ModelChoiceFilter
from django.db.models import Q
from .models import Contact
class ContactFilter(FilterSet):
q = CharFilter(method='combined_filter', label='Search')
class Meta:
model = Contact
fields = ['q']
# custom filter
def combined_filter(self, queryset, name, value):
return Contact.objects.filter(
Q(fname__exact=value) | Q(mname__exact=value) | Q(
lname__exact=value) | Q(address__state__exact=value) | Q(
address__address__exact=value) | Q(
address__city__exact=value) |
Q(address__zip__exact=value) | Q(
phone__area_code__exact=value) | Q(phone__number__exact=value)
).distinct()
|
{"/contact/views.py": ["/contact/filters.py", "/contact/forms.py", "/contact/models.py", "/contact/tables.py", "/contact/validations.py"], "/contact/migrations/0004_auto_20210723_1955.py": ["/contact/models.py"], "/contact/forms.py": ["/contact/models.py", "/contact/validations.py"], "/parse_csv.py": ["/contact/validations.py"], "/contact/filters.py": ["/contact/models.py"], "/contact/tables.py": ["/contact/models.py"], "/contact/migrations/0001_initial.py": ["/contact/models.py"]}
|
28,260,389
|
The-Academic-Observatory/academic-observatory-workflows
|
refs/heads/main
|
/academic_observatory_workflows/workflows/crossref_fundref_telescope.py
|
# Copyright 2020 Curtin University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Author: Aniek Roelofs, Richard Hosking, James Diprose
from __future__ import annotations
import gzip
import io
import json
import logging
import os
import random
import shutil
import subprocess
import xml.etree.ElementTree as ET
from typing import Dict, List, Tuple
import jsonlines
import pendulum
import requests
from academic_observatory_workflows.config import schema_folder as default_schema_folder
from airflow.api.common.experimental.pool import create_pool
from airflow.exceptions import AirflowException
from airflow.models.taskinstance import TaskInstance
from airflow.models.variable import Variable
from observatory.platform.utils.airflow_utils import AirflowVars
from observatory.platform.utils.gc_utils import (
bigquery_sharded_table_id,
bigquery_table_exists,
)
from observatory.platform.utils.proc_utils import wait_for_process
from observatory.platform.utils.url_utils import retry_session
from observatory.platform.workflows.snapshot_telescope import (
SnapshotRelease,
SnapshotTelescope,
)
class CrossrefFundrefRelease(SnapshotRelease):
def __init__(self, dag_id: str, release_date: pendulum.datetime, url: str):
"""Construct a CrossrefFundrefRelease
:param dag_id: The DAG id.
:param release_date: The release date.
:param url: The url corresponding with this release date.
"""
self.url = url
download_files_regex = "crossref_fundref.tar.gz"
extract_files_regex = "crossref_fundref.rdf"
transform_files_regex = "crossref_fundref.jsonl.gz"
super().__init__(dag_id, release_date, download_files_regex, extract_files_regex, transform_files_regex)
@property
def download_path(self) -> str:
"""Get the path to the downloaded file.
:return: the file path.
"""
return os.path.join(self.download_folder, "crossref_fundref.tar.gz")
@property
def extract_path(self) -> str:
"""Get the path to the extracted file.
:return: the file path.
"""
return os.path.join(self.extract_folder, "crossref_fundref.rdf")
@property
def transform_path(self) -> str:
"""Get the path to the transformed file.
:return: the file path.
"""
return os.path.join(self.transform_folder, "crossref_fundref.jsonl.gz")
def download(self):
"""Downloads release tar.gz file from url."""
logging.info(f"Downloading file: {self.download_path}, url: {self.url}")
# A selection of headers to prevent 403/forbidden error.
headers_list = [
{
"authority": "gitlab.com",
"upgrade-insecure-requests": "1",
"user-agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) "
"Chrome/83.0.4103.116 Safari/537.36",
"accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,"
"*/*;q=0.8,application/signed-exchange;v=b3;q=0.9",
"sec-fetch-site": "none",
"sec-fetch-mode": "navigate",
"sec-fetch-dest": "document",
"accept-language": "en-GB,en-US;q=0.9,en;q=0.8",
},
{
"User-Agent": "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:78.0) Gecko/20100101 Firefox/78.0",
"Accept": "*/*",
"Accept-Language": "en-US,en;q=0.5",
"Referer": "https://gitlab.com/",
},
]
# Download release
with requests.get(self.url, headers=random.choice(headers_list), stream=True) as response:
with open(self.download_path, "wb") as file:
shutil.copyfileobj(response.raw, file)
def extract(self):
"""Extract release from gzipped tar file."""
logging.info(f"Extracting file: {self.download_path}")
# Tar file contains both README.md and registry.rdf, use tar -ztf to get path of 'registry.rdf'
# Use this path to extract only registry.rdf to a new file.
cmd = (
f"registry_path=$(tar -ztf {self.download_path} | grep -m1 '/registry.rdf'); "
f"tar -xOzf {self.download_path} $registry_path > {self.extract_path}"
)
p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, executable="/bin/bash")
stdout, stderr = wait_for_process(p)
if stdout:
logging.info(stdout)
if stderr:
raise AirflowException(f"bash command failed for {self.url}: {stderr}")
logging.info(f"File extracted to: {self.extract_path}")
def transform(self):
"""Transforms release by storing file content in gzipped json format. Relationships between funders are added.
:return: None
"""
# Strip leading whitespace from first line if present.
strip_whitespace(self.extract_path)
# Parse RDF funders data
funders, funders_by_key = parse_fundref_registry_rdf(self.extract_path)
funders = add_funders_relationships(funders, funders_by_key)
# Transform FundRef release into JSON Lines format saving in memory buffer
# Save in memory buffer to gzipped file
with io.BytesIO() as bytes_io:
with gzip.GzipFile(fileobj=bytes_io, mode="w") as gzip_file:
with jsonlines.Writer(gzip_file) as writer:
writer.write_all(funders)
with open(self.transform_path, "wb") as jsonl_gzip_file:
jsonl_gzip_file.write(bytes_io.getvalue())
logging.info(f"Success transforming release: {self.url}")
class CrossrefFundrefTelescope(SnapshotTelescope):
"""Crossref Fundref Telescope."""
DAG_ID = "crossref_fundref"
DATASET_ID = "crossref"
RELEASES_URL = "https://gitlab.com/api/v4/projects/crossref%2Fopen_funder_registry/releases"
def __init__(
self,
dag_id: str = DAG_ID,
start_date: pendulum.DateTime = pendulum.datetime(2014, 3, 1),
schedule_interval: str = "@weekly",
dataset_id: str = DATASET_ID,
schema_folder: str = default_schema_folder(),
load_bigquery_table_kwargs: Dict = None,
table_descriptions: Dict = None,
catchup: bool = True,
airflow_vars: List = None,
):
"""Construct a CrossrefFundrefTelescope instance.
:param dag_id: the id of the DAG.
:param start_date: the start date of the DAG.
:param schedule_interval: the schedule interval of the DAG.
:param dataset_id: the BigQuery dataset id.
:param schema_folder: the SQL schema path.
:param load_bigquery_table_kwargs: the customisation parameters for loading data into a BigQuery table.
:param table_descriptions: a dictionary with table ids and corresponding table descriptions.
:param catchup: whether to catchup the DAG or not.
:param airflow_vars: list of airflow variable keys, for each variable it is checked if it exists in airflow.
"""
if table_descriptions is None:
table_descriptions = {
dag_id: "The Funder Registry dataset: " "https://www.crossref.org/services/funder-registry/"
}
if airflow_vars is None:
airflow_vars = [
AirflowVars.DATA_PATH,
AirflowVars.PROJECT_ID,
AirflowVars.DATA_LOCATION,
AirflowVars.DOWNLOAD_BUCKET,
AirflowVars.TRANSFORM_BUCKET,
]
if load_bigquery_table_kwargs is None:
load_bigquery_table_kwargs = {"ignore_unknown_values": True}
super().__init__(
dag_id,
start_date,
schedule_interval,
dataset_id,
schema_folder,
load_bigquery_table_kwargs=load_bigquery_table_kwargs,
table_descriptions=table_descriptions,
catchup=catchup,
airflow_vars=airflow_vars,
)
# Create Gitlab pool to limit the number of connections to Gitlab, which is very quick to block requests if
# there are too many at once.
pool_name = "gitlab_pool"
num_slots = 2
description = "A pool to limit the connections to Gitlab."
create_pool(pool_name, num_slots, description)
self.add_setup_task(self.check_dependencies)
self.add_setup_task(self.get_release_info, pool=pool_name)
self.add_task(self.download, pool=pool_name)
self.add_task(self.upload_downloaded)
self.add_task(self.extract)
self.add_task(self.transform)
self.add_task(self.upload_transformed)
self.add_task(self.bq_load)
self.add_task(self.cleanup)
def make_release(self, **kwargs) -> List[CrossrefFundrefRelease]:
"""Make release instances. The release is passed as an argument to the function (TelescopeFunction) that is
called in 'task_callable'.
:param kwargs: the context passed from the PythonOperator. See
https://airflow.apache.org/docs/stable/macros-ref.html for a list of the keyword arguments that are
passed to this argument.
:return: a list of GeonamesRelease instances.
"""
ti: TaskInstance = kwargs["ti"]
release_info = ti.xcom_pull(
key=CrossrefFundrefTelescope.RELEASE_INFO,
task_ids=self.get_release_info.__name__,
include_prior_dates=False,
)
releases = []
for release in release_info:
release_date = pendulum.parse(release["date"])
releases.append(CrossrefFundrefRelease(self.dag_id, release_date, release["url"]))
return releases
def get_release_info(self, **kwargs) -> bool:
"""Based on a list of all releases, checks which ones were released between the prev and this execution date
of the DAG. If the release falls within the time period mentioned above, checks if a bigquery table doesn't
exist yet for the release. A list of releases that passed both checks is passed to the next tasks. If the
list is empty the workflow will stop.
:param kwargs: the context passed from the PythonOperator. See
https://airflow.apache.org/docs/stable/macros-ref.html
for a list of the keyword arguments that are passed to this argument.
:return: None.
"""
# Get variables
project_id = Variable.get(AirflowVars.PROJECT_ID)
# List releases between a start date and an end date
prev_execution_date = pendulum.instance(kwargs["prev_execution_date"])
execution_date = pendulum.instance(kwargs["execution_date"])
releases_list = list_releases(prev_execution_date, execution_date)
logging.info(f"Releases between prev ({prev_execution_date}) and current ({execution_date}) execution date:")
logging.info(releases_list)
# Check if the BigQuery table for each release already exists and only process release if the table
# doesn't exist
releases_list_out = []
for release in releases_list:
release_date = pendulum.parse(release["date"])
table_id = bigquery_sharded_table_id(CrossrefFundrefTelescope.DAG_ID, release_date)
logging.info("Checking if bigquery table already exists:")
if bigquery_table_exists(project_id, self.dataset_id, table_id):
logging.info(
f"Skipping as table exists for {release['url']}: " f"{project_id}.{self.dataset_id}.{table_id}"
)
else:
logging.info(f"Table does not exist yet, processing {release['url']} in this workflow")
releases_list_out.append(release)
# If releases_list_out contains items then the DAG will continue (return True) otherwise it will
# stop (return False)
continue_dag = len(releases_list_out) > 0
if continue_dag:
ti: TaskInstance = kwargs["ti"]
ti.xcom_push(CrossrefFundrefTelescope.RELEASE_INFO, releases_list_out, execution_date)
return continue_dag
def download(self, releases: List[CrossrefFundrefRelease], **kwargs):
"""Task to download the releases.
:param releases: a list with Crossref Fundref releases.
:return: None.
"""
for release in releases:
release.download()
def extract(self, releases: List[CrossrefFundrefRelease], **kwargs):
"""Task to extract the releases.
:param releases: a list with Crossref Fundref releases.
:return: None.
"""
for release in releases:
release.extract()
def transform(self, releases: List[CrossrefFundrefRelease], **kwargs):
"""Task to transform the releases.
:param releases: a list with Crossref Fundref releases.
:return: None.
"""
for release in releases:
release.transform()
def list_releases(start_date: pendulum.DateTime, end_date: pendulum.DateTime) -> List[dict]:
"""List all available CrossrefFundref releases between the start and end date
:param start_date: The start date of the period to look for releases
:param end_date: The end date of the period to look for releases
:return: list with dictionaries of release info (url and release date)
"""
# A selection of headers to prevent 403/forbidden error.
headers_list = [
{
"authority": "gitlab.com",
"upgrade-insecure-requests": "1",
"user-agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) "
"Chrome/84.0.4147.89 Safari/537.36",
"accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,"
"*/*;q=0.8,application/signed-exchange;v=b3;q=0.9",
"sec-fetch-site": "none",
"sec-fetch-mode": "navigate",
"sec-fetch-dest": "document",
"accept-language": "en-GB,en-US;q=0.9,en;q=0.8",
},
{
"User-Agent": "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:78.0) Gecko/20100101 Firefox/78.0",
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8",
"Accept-Language": "en-US,en;q=0.5",
"DNT": "1",
"Connection": "keep-alive",
"Upgrade-Insecure-Requests": "1",
},
]
release_info = []
headers = random.choice(headers_list)
current_page = 1
while True:
# Fetch page
url = f"{CrossrefFundrefTelescope.RELEASES_URL}?per_page=100&page={current_page}"
response = retry_session().get(url, headers=headers)
# Check if correct response code
if response is not None and response.status_code == 200:
# Parse json
num_pages = int(response.headers["X-Total-Pages"])
json_response = json.loads(response.text)
# Parse release information
for release in json_response:
version = float(release["tag_name"].strip("v"))
for source in release["assets"]["sources"]:
if source["format"] == "tar.gz":
# Parse release date
if version == 0.1:
release_date = pendulum.datetime(year=2014, month=3, day=1)
elif version < 1.0:
date_string = release["description"].split("\n")[0]
release_date = pendulum.from_format("01 " + date_string, "DD MMMM YYYY")
else:
release_date = pendulum.parse(release["released_at"])
# Only include release if it is within start and end dates
if start_date <= release_date < end_date:
release_info.append({"url": source["url"], "date": release_date.format("YYYYMMDD")})
# Check if we should exit or get the next page
if num_pages <= current_page:
break
current_page += 1
else:
raise AirflowException(f"Error retrieving response from: {url}")
return release_info
def strip_whitespace(file_path: str):
"""Strip leading white space from the first line of the file.
This is present in fundref release 2019-06-01. If not removed it will give a XML ParseError.
:param file_path: Path to file from which to trim leading white space.
:return: None.
"""
with open(file_path, "r") as f_in, open(file_path + ".tmp", "w") as f_out:
first_line = True
for line in f_in:
if first_line and not line.startswith(" "):
os.remove(file_path + ".tmp")
return
elif first_line and line.startswith(" "):
line = line.lstrip()
f_out.write(line)
first_line = False
os.rename(file_path + ".tmp", file_path)
def new_funder_template():
"""Helper Function for creating a new Funder.
:return: a blank funder object.
"""
return {
"funder": None,
"pre_label": None,
"alt_label": [],
"narrower": [],
"broader": [],
"modified": None,
"created": None,
"funding_body_type": None,
"funding_body_sub_type": None,
"region": None,
"country": None,
"country_code": None,
"state": None,
"tax_id": None,
"continuation_of": [],
"renamed_as": [],
"replaces": [],
"affil_with": [],
"merged_with": [],
"incorporated_into": [],
"is_replaced_by": [],
"incorporates": [],
"split_into": [],
"status": None,
"merger_of": [],
"split_from": None,
"formly_known_as": None,
"notation": None,
}
def parse_fundref_registry_rdf(registry_file_path: str) -> Tuple[List, Dict]:
"""Helper function to parse a fundref registry rdf file and to return a python list containing each funder.
:param registry_file_path: the filename of the registry.rdf file to be parsed.
:return: funders list containing all the funders parsed from the input rdf and dictionary of funders with their
id as key.
"""
funders = []
funders_by_key = {}
tree = ET.parse(registry_file_path)
root = tree.getroot()
tag_prefix = root.tag.split("}")[0] + "}"
for record in root:
tag = record.tag.split("}")[-1]
if tag == "ConceptScheme":
for nested in record:
tag = nested.tag.split("}")[-1]
if tag == "hasTopConcept":
funder_id = nested.attrib[tag_prefix + "resource"]
funders_by_key[funder_id] = new_funder_template()
if tag == "Concept":
funder_id = record.attrib[tag_prefix + "about"]
funder = funders_by_key[funder_id]
funder["funder"] = funder_id
for nested in record:
tag = nested.tag.split("}")[-1]
if tag == "inScheme":
continue
elif tag == "prefLabel":
funder["pre_label"] = nested[0][0].text
elif tag == "altLabel":
alt_label = nested[0][0].text
if alt_label is not None:
funder["alt_label"].append(alt_label)
elif tag == "narrower":
funder["narrower"].append(nested.attrib[tag_prefix + "resource"])
elif tag == "broader":
funder["broader"].append(nested.attrib[tag_prefix + "resource"])
elif tag == "modified":
funder["modified"] = nested.text
elif tag == "created":
funder["created"] = nested.text
elif tag == "fundingBodySubType":
funder["funding_body_type"] = nested.text
elif tag == "fundingBodyType":
funder["funding_body_sub_type"] = nested.text
elif tag == "region":
funder["region"] = nested.text
elif tag == "country":
funder["country"] = nested.text
elif tag == "state":
funder["state"] = nested.text
elif tag == "address":
funder["country_code"] = nested[0][0].text
elif tag == "taxId":
funder["tax_id"] = nested.text
elif tag == "continuationOf":
funder["continuation_of"].append(nested.attrib[tag_prefix + "resource"])
elif tag == "renamedAs":
funder["renamed_as"].append(nested.attrib[tag_prefix + "resource"])
elif tag == "replaces":
funder["replaces"].append(nested.attrib[tag_prefix + "resource"])
elif tag == "affilWith":
funder["affil_with"].append(nested.attrib[tag_prefix + "resource"])
elif tag == "mergedWith":
funder["merged_with"].append(nested.attrib[tag_prefix + "resource"])
elif tag == "incorporatedInto":
funder["incorporated_into"].append(nested.attrib[tag_prefix + "resource"])
elif tag == "isReplacedBy":
funder["is_replaced_by"].append(nested.attrib[tag_prefix + "resource"])
elif tag == "incorporates":
funder["incorporates"].append(nested.attrib[tag_prefix + "resource"])
elif tag == "splitInto":
funder["split_into"].append(nested.attrib[tag_prefix + "resource"])
elif tag == "status":
funder["status"] = nested.attrib[tag_prefix + "resource"]
elif tag == "mergerOf":
funder["merger_of"].append(nested.attrib[tag_prefix + "resource"])
elif tag == "splitFrom":
funder["split_from"] = nested.attrib[tag_prefix + "resource"]
elif tag == "formerlyKnownAs":
funder["formly_known_as"] = nested.attrib[tag_prefix + "resource"]
elif tag == "notation":
funder["notation"] = nested.text
else:
logging.info(f"Unrecognized tag for element: {nested}")
funders.append(funder)
return funders, funders_by_key
def add_funders_relationships(funders: List, funders_by_key: Dict) -> List:
"""Adds any children/parent relationships to funder instances in the funders list.
:param funders: List of funders
:param funders_by_key: Dictionary of funders with their id as key.
:return: funders with added relationships.
"""
for funder in funders:
children, returned_depth = recursive_funders(funders_by_key, funder, 0, "narrower", [])
funder["children"] = children
funder["bottom"] = len(children) > 0
parent, returned_depth = recursive_funders(funders_by_key, funder, 0, "broader", [])
funder["parents"] = parent
funder["top"] = len(parent) > 0
return funders
def recursive_funders(
funders_by_key: Dict, funder: Dict, depth: int, direction: str, sub_funders: List
) -> Tuple[List, int]:
"""Recursively goes through a funder/sub_funder dict. The funder properties can be looked up with the
funders_by_key dictionary that stores the properties per funder id. Any children/parents for the funder are
already given in the xml element with the 'narrower' and 'broader' tags. For each funder in the list,
it will recursively add any children/parents for those funders in 'narrower'/'broader' and their funder properties.
:param funders_by_key: dictionary with id as key and funders object as value
:param funder: dictionary of a given funder containing 'narrower' and 'broader' info
:param depth: keeping track of nested depth
:param direction: either 'narrower' or 'broader' to get 'children' or 'parents'
:param sub_funders: list to keep track of which funder ids are parents
:return: list of children and current depth
"""
starting_depth = depth
children = []
# Loop through funder_ids in 'narrower' or 'broader' info
for funder_id in funder[direction]:
if funder_id in sub_funders:
# Stop recursion if funder is it's own parent or child
logging.info(f"Funder {funder_id} is it's own parent/child, skipping..")
name = "NA"
returned = []
returned_depth = depth
sub_funders.append(funder_id)
else:
try:
sub_funder = funders_by_key[funder_id]
# Add funder id of sub_funder to list to keep track of 'higher' sub_funders in the recursion
sub_funders.append(sub_funder["funder"])
# Store name to pass on to child object
name = sub_funder["pre_label"]
# Get children/parents of sub_funder
returned, returned_depth = recursive_funders(
funders_by_key, sub_funder, starting_depth + 1, direction, sub_funders
)
except KeyError:
logging.info(f"Could not find funder by id: {funder_id}, skipping..")
name = "NA"
returned = []
returned_depth = depth
sub_funders.append(funder_id)
# Add child/parent (containing nested children/parents) to list
if direction == "narrower":
child = {"funder": funder_id, "name": name, "children": returned}
else:
child = {"funder": funder_id, "name": name, "parent": returned}
children.append(child)
sub_funders.pop(-1)
if returned_depth > depth:
depth = returned_depth
return children, depth
|
{"/academic_observatory_workflows/workflows/ror_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_geonames_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/geonames_telescope.py"], "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/tests/test_clearbit.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_oa_web_workflow.py": ["/academic_observatory_workflows/workflows/oa_web_workflow.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/tests/test_zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_open_citations_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/open_citations_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_openalex_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/openalex_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py"], "/academic_observatory_workflows/workflows/pubmed_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_web_of_science_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_events_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_events_telescope.py"], "/academic_observatory_workflows/workflows/web_of_science_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_github.py": ["/academic_observatory_workflows/github.py"], "/academic_observatory_workflows/workflows/tests/test_pubmed_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/pubmed_telescope.py"], "/academic_observatory_workflows/workflows/geonames_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/open_citations_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_doi_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/model.py", "/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/tests/test_wikipedia.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/wikipedia.py"], "/docs/test_generate_csv.py": ["/docs/generate_schema_csv.py"], "/academic_observatory_workflows/workflows/scopus_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/doi_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py"], "/academic_observatory_workflows/workflows/crossref_events_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/oa_web_workflow.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/github.py", "/academic_observatory_workflows/wikipedia.py", "/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_ror_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/ror_telescope.py"], "/academic_observatory_workflows/workflows/openalex_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_zenodo.py": ["/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/model.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_scopus_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/scopus_telescope.py"], "/academic_observatory_workflows/dags/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_telescope.py"], "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_elastic_import_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/dags/elastic_import_workflow.py"], "/academic_observatory_workflows/dags/doi_workflow.py": ["/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/workflows/orcid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/dags/mag_telescope.py": ["/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/workflows/grid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_mag_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/dags/web_of_science_telescope.py": ["/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/dags/elastic_import_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_grid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/grid_telescope.py"], "/academic_observatory_workflows/workflows/mag_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_orcid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/orcid_telescope.py"]}
|
28,260,390
|
The-Academic-Observatory/academic-observatory-workflows
|
refs/heads/main
|
/academic_observatory_workflows/dags/unpaywall_snapshot_telescope.py
|
# Copyright 2020 Curtin University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Author: Aniek Roelofs, James Diprose
# The keywords airflow and DAG are required to load the DAGs from this file, see bullet 2 in the Apache Airflow FAQ:
# https://airflow.apache.org/docs/stable/faq.html
""" A DAG that harvests the Unpaywall database: https://unpaywall.org/
Saved to the BigQuery table: <project_id>.our_research.unpaywallYYYYMMDD
Has been tested with the following Unpaywall releases:
* 2020-04-27, 2020-02-25, 2019-11-22, 2019-08-16, 2019-04-19, 2019-02-21, 2018-09-27, 2018-09-24
Does not work with the following releases:
* 2018-03-29, 2018-04-28, 2018-06-21, 2018-09-02, 2018-09-06
"""
from academic_observatory_workflows.workflows.unpaywall_snapshot_telescope import (
UnpaywallSnapshotTelescope,
)
telescope = UnpaywallSnapshotTelescope()
globals()[telescope.dag_id] = telescope.make_dag()
|
{"/academic_observatory_workflows/workflows/ror_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_geonames_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/geonames_telescope.py"], "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/tests/test_clearbit.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_oa_web_workflow.py": ["/academic_observatory_workflows/workflows/oa_web_workflow.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/tests/test_zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_open_citations_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/open_citations_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_openalex_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/openalex_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py"], "/academic_observatory_workflows/workflows/pubmed_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_web_of_science_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_events_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_events_telescope.py"], "/academic_observatory_workflows/workflows/web_of_science_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_github.py": ["/academic_observatory_workflows/github.py"], "/academic_observatory_workflows/workflows/tests/test_pubmed_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/pubmed_telescope.py"], "/academic_observatory_workflows/workflows/geonames_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/open_citations_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_doi_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/model.py", "/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/tests/test_wikipedia.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/wikipedia.py"], "/docs/test_generate_csv.py": ["/docs/generate_schema_csv.py"], "/academic_observatory_workflows/workflows/scopus_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/doi_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py"], "/academic_observatory_workflows/workflows/crossref_events_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/oa_web_workflow.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/github.py", "/academic_observatory_workflows/wikipedia.py", "/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_ror_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/ror_telescope.py"], "/academic_observatory_workflows/workflows/openalex_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_zenodo.py": ["/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/model.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_scopus_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/scopus_telescope.py"], "/academic_observatory_workflows/dags/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_telescope.py"], "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_elastic_import_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/dags/elastic_import_workflow.py"], "/academic_observatory_workflows/dags/doi_workflow.py": ["/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/workflows/orcid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/dags/mag_telescope.py": ["/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/workflows/grid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_mag_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/dags/web_of_science_telescope.py": ["/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/dags/elastic_import_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_grid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/grid_telescope.py"], "/academic_observatory_workflows/workflows/mag_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_orcid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/orcid_telescope.py"]}
|
28,260,391
|
The-Academic-Observatory/academic-observatory-workflows
|
refs/heads/main
|
/academic_observatory_workflows/workflows/tests/test_ror_telescope.py
|
# Copyright 2021 Curtin University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Author: Aniek Roelofs
import json
import jsonlines
import os
import httpretty
import pendulum
from unittest.mock import patch
from click.testing import CliRunner
from airflow.exceptions import AirflowException
from academic_observatory_workflows.config import test_fixtures_folder
from academic_observatory_workflows.workflows.ror_telescope import (
RorRelease,
RorTelescope,
list_ror_records,
)
from observatory.platform.utils.gc_utils import bigquery_sharded_table_id
from observatory.platform.utils.test_utils import (
ObservatoryEnvironment,
ObservatoryTestCase,
module_file_path,
)
from observatory.platform.utils.workflow_utils import (
blob_name,
)
class TestRorTelescope(ObservatoryTestCase):
"""Tests for the ROR telescope"""
def __init__(self, *args, **kwargs):
"""Constructor which sets up variables used by tests.
:param args: arguments.
:param kwargs: keyword arguments.
"""
super(TestRorTelescope, self).__init__(*args, **kwargs)
self.project_id = os.getenv("TEST_GCP_PROJECT_ID")
self.data_location = os.getenv("TEST_GCP_DATA_LOCATION")
# Get list of dictionaries with expected BQ table content
table_content = []
with jsonlines.open(test_fixtures_folder("ror", "table_content.jsonl"), "r") as reader:
for row in reader:
table_content.append(row)
self.releases = {
"https://zenodo.org/api/files/6b2024bb-b37f-4a01-a78a-6d90f9d0cb90/2021-09-20-ror-data.zip": {
"path": test_fixtures_folder("ror", "2021-09-20-ror-data.zip"),
"download_hash": "60620675937e6513104275931331f68f",
"extract_hash": "17931b9f766387d10778f121725c0fa1",
"transform_hash": "2e6c12a9",
"table_content": table_content,
},
"https://zenodo.org/api/files/ee5e3ae8-81a1-4f49-88ea-6feb09d4d0ac/2021-09-23-ror-data.zip": {
"path": test_fixtures_folder("ror", "2021-09-23-ror-data.zip"),
"download_hash": "0cac8705fba6df755648472356b7cb83",
"extract_hash": "17931b9f766387d10778f121725c0fa1",
"transform_hash": "2e6c12a9",
"table_content": table_content,
},
}
self.release = RorRelease("ror", pendulum.datetime(2021, 1, 1), "https://myurl")
def test_dag_structure(self):
"""Test that the ROR DAG has the correct structure.
:return: None
"""
dag = RorTelescope().make_dag()
self.assert_dag_structure(
{
"check_dependencies": ["list_releases"],
"list_releases": ["download"],
"download": ["upload_downloaded"],
"upload_downloaded": ["extract"],
"extract": ["transform"],
"transform": ["upload_transformed"],
"upload_transformed": ["bq_load"],
"bq_load": ["cleanup"],
"cleanup": [],
},
dag,
)
def test_dag_load(self):
"""Test that the ROR DAG can be loaded from a DAG bag.
:return: None
"""
with ObservatoryEnvironment().create():
dag_file = os.path.join(module_file_path("academic_observatory_workflows.dags"), "ror_telescope.py")
self.assert_dag_load("ror", dag_file)
def test_telescope(self):
"""Test the ROR telescope end to end.
:return: None.
"""
# Setup Observatory environment
env = ObservatoryEnvironment(self.project_id, self.data_location)
dataset_id = env.add_dataset()
# Setup Telescope
execution_date = pendulum.datetime(year=2021, month=9, day=19)
telescope = RorTelescope(dataset_id=dataset_id)
dag = telescope.make_dag()
# Create the Observatory environment and run tests
with env.create():
with env.create_dag_run(dag, execution_date):
# Test that all dependencies are specified: no error should be thrown
env.run_task(telescope.check_dependencies.__name__)
# Test list releases task with files available
with httpretty.enabled():
records_path = test_fixtures_folder("ror", "zenodo_records.json")
self.setup_mock_file_download(telescope.ROR_DATASET_URL, records_path)
ti = env.run_task(telescope.list_releases.__name__)
records = ti.xcom_pull(
key=RorTelescope.RELEASE_INFO,
task_ids=telescope.list_releases.__name__,
include_prior_dates=False,
)
self.assertListEqual(
[
{
"release_date": "20210923",
"url": "https://zenodo.org/api/files/ee5e3ae8-81a1-4f49-88ea-6feb09d4d0ac/2021-09-23-ror-data.zip",
},
{
"release_date": "20210920",
"url": "https://zenodo.org/api/files/6b2024bb-b37f-4a01-a78a-6d90f9d0cb90/2021-09"
"-20-ror-data.zip",
},
],
records,
)
# Use release info for other tasks
releases = []
for record in records:
release_date = record["release_date"]
url = record["url"]
releases.append(RorRelease(telescope.dag_id, pendulum.parse(release_date), url))
# Test download task
with httpretty.enabled():
for release in releases:
download_path = self.releases[release.url]["path"]
self.setup_mock_file_download(release.url, download_path)
env.run_task(telescope.download.__name__)
for release in releases:
self.assertEqual(1, len(release.download_files))
download_hash = self.releases[release.url]["download_hash"]
self.assert_file_integrity(release.download_path, download_hash, "md5")
# Test that file uploaded
env.run_task(telescope.upload_downloaded.__name__)
for release in releases:
self.assert_blob_integrity(
env.download_bucket, blob_name(release.download_path), release.download_path
)
# Test that file extracted
env.run_task(telescope.extract.__name__)
for release in releases:
self.assertEqual(1, len(release.extract_files))
extract_hash = self.releases[release.url]["extract_hash"]
self.assert_file_integrity(release.extract_files[0], extract_hash, "md5")
# Test that file transformed
env.run_task(telescope.transform.__name__)
for release in releases:
self.assertEqual(1, len(release.extract_files))
transform_hash = self.releases[release.url]["transform_hash"]
self.assert_file_integrity(release.transform_path, transform_hash, "gzip_crc")
# Test that transformed file uploaded
env.run_task(telescope.upload_transformed.__name__)
for release in releases:
self.assert_blob_integrity(
env.transform_bucket, blob_name(release.transform_path), release.transform_path
)
# Test that data loaded into BigQuery
env.run_task(telescope.bq_load.__name__)
for release in releases:
table_id = (
f"{self.project_id}.{dataset_id}."
f"{bigquery_sharded_table_id(telescope.dag_id, release.release_date)}"
)
expected_content = self.releases[release.url]["table_content"]
self.assert_table_content(table_id, expected_content)
# Test that all telescope data deleted
download_folders, extract_folders, transform_folders = (
[releases[0].download_folder, releases[1].download_folder],
[releases[0].extract_folder, releases[1].extract_folder],
[releases[0].transform_folder, releases[1].transform_folder],
)
env.run_task(telescope.cleanup.__name__)
for i, release in enumerate(releases):
self.assert_cleanup(download_folders[i], extract_folders[i], transform_folders[i])
@patch("academic_observatory_workflows.workflows.ror_telescope.list_ror_records")
def test_list_releases(self, mock_list_records):
"""Test the list_releases method of the ROR telescope when there are no records
:return: None
"""
mock_list_records.return_value = []
execution_date = pendulum.datetime(2020, 1, 1)
next_execution_date = pendulum.date(2020, 2, 1)
telescope = RorTelescope()
continue_dag = telescope.list_releases(execution_date=execution_date, next_execution_date=next_execution_date)
self.assertFalse(continue_dag)
@patch("airflow.models.variable.Variable.get")
def test_release_extract(self, mock_variable_get):
"""Test exceptions are raised for the extract method of the ROR release
:return: None
"""
mock_variable_get.return_value = "data_path"
with CliRunner().isolated_filesystem():
# Create file at download path that is not a zip file
with open(self.release.download_path, "w") as f:
f.write("test")
# Test that exception is raised
with self.assertRaises(AirflowException):
self.release.extract()
@patch("airflow.models.variable.Variable.get")
def test_release_transform(self, mock_variable_get):
"""Test exceptions are raised for the transform method of the ROR release
:return: None
"""
mock_variable_get.return_value = "data_path"
with CliRunner().isolated_filesystem():
# Test exception is raised when there is more than one file
file_path1 = os.path.join(self.release.extract_folder, "2020-01-01-ror-data.json")
file_path2 = os.path.join(self.release.extract_folder, "2021-01-01-ror-data.json")
for file in [file_path1, file_path2]:
with open(file, "w") as f:
f.write("test")
with self.assertRaises(AirflowException):
self.release.transform()
with CliRunner().isolated_filesystem():
# Test exception is raised when there is no file (does not match regex pattern)
file_path1 = os.path.join(self.release.extract_folder, "ror-data.json")
with open(file_path1, "w") as f:
f.write("test")
with self.assertRaises(AirflowException):
self.release.transform()
def test_list_ror_records(self):
"""Test the list_ror_records function
:return: None
"""
start_date = pendulum.datetime(2020, 1, 1)
end_date = pendulum.datetime(2020, 2, 1)
# Test list records when there are no hits
with httpretty.enabled():
body = {
"hits": {"hits": [], "total": 2},
"links": {
"self": "https://zenodo.org/api/records/?sort=mostrecent&communities=ror-data&page=1&size=10"
},
}
httpretty.register_uri(httpretty.GET, RorTelescope.ROR_DATASET_URL, body=json.dumps(body))
records = list_ror_records(start_date, end_date)
self.assertEqual([], records)
# Test list records with a response code that is not 200
with httpretty.enabled():
httpretty.register_uri(httpretty.GET, RorTelescope.ROR_DATASET_URL, status=400)
with self.assertRaises(AirflowException):
list_ror_records(start_date, end_date)
|
{"/academic_observatory_workflows/workflows/ror_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_geonames_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/geonames_telescope.py"], "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/tests/test_clearbit.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_oa_web_workflow.py": ["/academic_observatory_workflows/workflows/oa_web_workflow.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/tests/test_zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_open_citations_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/open_citations_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_openalex_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/openalex_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py"], "/academic_observatory_workflows/workflows/pubmed_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_web_of_science_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_events_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_events_telescope.py"], "/academic_observatory_workflows/workflows/web_of_science_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_github.py": ["/academic_observatory_workflows/github.py"], "/academic_observatory_workflows/workflows/tests/test_pubmed_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/pubmed_telescope.py"], "/academic_observatory_workflows/workflows/geonames_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/open_citations_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_doi_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/model.py", "/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/tests/test_wikipedia.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/wikipedia.py"], "/docs/test_generate_csv.py": ["/docs/generate_schema_csv.py"], "/academic_observatory_workflows/workflows/scopus_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/doi_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py"], "/academic_observatory_workflows/workflows/crossref_events_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/oa_web_workflow.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/github.py", "/academic_observatory_workflows/wikipedia.py", "/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_ror_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/ror_telescope.py"], "/academic_observatory_workflows/workflows/openalex_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_zenodo.py": ["/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/model.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_scopus_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/scopus_telescope.py"], "/academic_observatory_workflows/dags/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_telescope.py"], "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_elastic_import_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/dags/elastic_import_workflow.py"], "/academic_observatory_workflows/dags/doi_workflow.py": ["/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/workflows/orcid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/dags/mag_telescope.py": ["/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/workflows/grid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_mag_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/dags/web_of_science_telescope.py": ["/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/dags/elastic_import_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_grid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/grid_telescope.py"], "/academic_observatory_workflows/workflows/mag_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_orcid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/orcid_telescope.py"]}
|
28,260,392
|
The-Academic-Observatory/academic-observatory-workflows
|
refs/heads/main
|
/academic_observatory_workflows/workflows/tests/test_unpaywall_snapshot_telescope.py
|
# Copyright 2020 Curtin University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Author: Aniek Roelofs, Tuan Chien
import datetime
import logging
import os
import shutil
from typing import List
from unittest.mock import patch
import pendulum
import vcr
from academic_observatory_workflows.config import test_fixtures_folder
from academic_observatory_workflows.workflows.unpaywall_snapshot_telescope import (
UnpaywallSnapshotRelease,
UnpaywallSnapshotTelescope,
)
from airflow.utils.state import State
from click.testing import CliRunner
from observatory.platform.utils.test_utils import (
HttpServer,
ObservatoryEnvironment,
ObservatoryTestCase,
module_file_path,
)
from observatory.platform.utils.workflow_utils import (
bigquery_sharded_table_id,
blob_name,
)
class TestUnpaywallSnapshotRelease(ObservatoryTestCase):
"""Tests for the functions used by the unpaywall telescope"""
def __init__(self, *args, **kwargs):
"""Constructor which sets up variables used by tests.
:param args: arguments.
:param kwargs: keyword arguments.
"""
super().__init__(*args, **kwargs)
# Unpaywall test release
self.unpaywall_test_path = test_fixtures_folder("unpaywall_snapshot", "unpaywall_snapshot.jsonl.gz")
self.unpaywall_test_file = "unpaywall_3000-01-27T153236.jsonl.gz"
self.unpaywall_test_url = "http://localhost/unpaywall_3000-01-27T153236.jsonl.gz"
self.unpaywall_test_date = pendulum.datetime(year=3000, month=1, day=27)
self.unpaywall_test_decompress_hash = "fe4e72ce54c4bb236802ddbb3dbee905"
self.unpaywall_test_transform_hash = "62cbb5af5a78d2e0769a28d976971cba"
# Turn logging to warning because vcr prints too much at info level
logging.basicConfig()
logging.getLogger().setLevel(logging.WARNING)
def test_parse_release_date(self):
"""Test that date obtained from url is string and in correct format.
:return: None.
"""
release_date = UnpaywallSnapshotRelease.parse_release_date(self.unpaywall_test_file)
self.assertEqual(self.unpaywall_test_date, release_date)
@patch("academic_observatory_workflows.workflows.unpaywall_snapshot_telescope.Variable.get")
def test_extract_release(self, mock_variable_get):
"""Test that the release is decompressed as expected.
:return: None.
"""
# Create data path and mock getting data path
data_path = "data"
mock_variable_get.return_value = data_path
with CliRunner().isolated_filesystem():
release = UnpaywallSnapshotRelease(
dag_id="test", release_date=self.unpaywall_test_date, file_name=self.unpaywall_test_file
)
# 'download' release
shutil.copyfile(self.unpaywall_test_path, release.download_path)
release.extract()
self.assertEqual(len(release.extract_files), 1)
self.assert_file_integrity(release.extract_path, self.unpaywall_test_decompress_hash, "md5")
@patch("academic_observatory_workflows.workflows.unpaywall_snapshot_telescope.get_airflow_connection_url")
@patch("observatory.platform.utils.workflow_utils.Variable.get")
def test_transform_release(self, mock_variable_get, m_get_conn):
"""Test that the release is transformed as expected.
:return: None.
"""
m_get_conn.return_value = "http://localhost/"
# Create data path and mock getting data path
data_path = "data"
mock_variable_get.return_value = data_path
with CliRunner().isolated_filesystem():
release = UnpaywallSnapshotRelease(
dag_id="test", release_date=self.unpaywall_test_date, file_name=self.unpaywall_test_file
)
shutil.copyfile(self.unpaywall_test_path, release.download_path)
release.extract()
release.transform()
self.assertEqual(len(release.transform_files), 1)
self.assert_file_integrity(release.transform_path, self.unpaywall_test_transform_hash, "md5")
@patch("academic_observatory_workflows.workflows.unpaywall_snapshot_telescope.get_airflow_connection_url")
@patch("academic_observatory_workflows.workflows.unpaywall_snapshot_telescope.Variable.get")
@patch("academic_observatory_workflows.workflows.unpaywall_snapshot_telescope.download_file")
def test_download(self, m_download_files, m_varget, m_get_conn):
release = UnpaywallSnapshotRelease(
dag_id="test", release_date=self.unpaywall_test_date, file_name=self.unpaywall_test_file
)
# Setup mocks
data_path = "data"
m_varget.return_value = data_path
m_get_conn.return_value = "http://localhost/"
release.download()
_, call_args = m_download_files.call_args
self.assertEqual(
call_args["url"],
"http://localhost/unpaywall_3000-01-27T153236.jsonl.gz",
)
self.assertEqual(
call_args["filename"], "data/telescopes/download/test/test_3000_01_27/unpaywall_snapshot.jsonl.gz"
)
@patch("academic_observatory_workflows.workflows.unpaywall_snapshot_telescope.get_airflow_connection_url")
@patch("academic_observatory_workflows.workflows.unpaywall_snapshot_telescope.Variable.get")
def test_extract_outputs(self, m_variable_get, m_get_conn):
# Create data path and mock getting data path
data_path = "data"
m_variable_get.return_value = data_path
m_get_conn.return_value = "http://localhost/"
with CliRunner().isolated_filesystem():
release = UnpaywallSnapshotRelease(
dag_id="test", release_date=self.unpaywall_test_date, file_name=self.unpaywall_test_file
)
shutil.copyfile(self.unpaywall_test_path, release.download_path)
release.extract()
self.assertEqual(len(release.extract_files), 1)
@patch("academic_observatory_workflows.workflows.unpaywall_snapshot_telescope.get_airflow_connection_url")
@patch("academic_observatory_workflows.workflows.unpaywall_snapshot_telescope.Variable.get")
def test_transform_outputs(self, m_variable_get, m_get_conn):
# Create data path and mock getting data path
data_path = "data"
m_variable_get.return_value = data_path
m_get_conn.return_value = "http://localhost/"
with CliRunner().isolated_filesystem():
release = UnpaywallSnapshotRelease(
dag_id="test", release_date=self.unpaywall_test_date, file_name=self.unpaywall_test_file
)
shutil.copyfile(self.unpaywall_test_path, release.download_path)
release.extract()
release.transform()
self.assertEqual(len(release.transform_files), 1)
class TestUnpaywallSnapshotTelescope(ObservatoryTestCase):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# Unpaywall releases list
self.list_unpaywall_releases_path = test_fixtures_folder("unpaywall_snapshot", "list_unpaywall_releases.yaml")
self.list_unpaywall_releases_hash = "78d1a129cb0aba072ca49e2599f60c10"
self.start_date = pendulum.datetime(year=2018, month=3, day=29)
self.end_date = pendulum.datetime(year=2020, month=4, day=29)
self.project_id = os.getenv("TEST_GCP_PROJECT_ID")
self.data_location = os.getenv("TEST_GCP_DATA_LOCATION")
self.unpaywall_test_path = test_fixtures_folder("unpaywall_snapshot", "unpaywall_snapshot.jsonl.gz")
def test_ctor(self):
# set table description
telescope = UnpaywallSnapshotTelescope(table_descriptions="something")
self.assertEqual(telescope.table_descriptions, "something")
# set airflow_vars
telescope = UnpaywallSnapshotTelescope(airflow_vars=[])
self.assertEqual(telescope.airflow_vars, ["transform_bucket"])
@patch("academic_observatory_workflows.workflows.unpaywall_snapshot_telescope.get_airflow_connection_url")
@patch("observatory.platform.utils.workflow_utils.Variable.get")
def test_list_releases(self, mock_variable_get, m_get_conn):
"""Test that list releases returns a list of string with urls.
:return: None.
"""
data_path = "data"
mock_variable_get.return_value = data_path
m_get_conn.return_value = "http://localhost/"
telescope = UnpaywallSnapshotTelescope()
with CliRunner().isolated_filesystem():
with vcr.use_cassette(self.list_unpaywall_releases_path):
releases = UnpaywallSnapshotTelescope.list_releases(self.start_date, self.end_date)
self.assertIsInstance(releases, List)
for release in releases:
self.assertIsInstance(release, dict)
self.assertEqual(13, len(releases))
@patch("academic_observatory_workflows.workflows.unpaywall_snapshot_telescope.get_http_response_xml_to_dict")
@patch("academic_observatory_workflows.workflows.unpaywall_snapshot_telescope.get_airflow_connection_url")
@patch("observatory.platform.utils.workflow_utils.Variable.get")
def test_list_releases_fail(self, m_get, m_get_conn, m_get_xml_dict):
data_path = "data"
m_get.return_value = data_path
m_get_conn.return_value = "http://localhost/"
m_get_xml_dict.side_effect = ConnectionError("Test")
telescope = UnpaywallSnapshotTelescope()
# Fetch error
self.assertRaises(ConnectionError, UnpaywallSnapshotTelescope.list_releases, self.start_date, self.end_date)
@patch("academic_observatory_workflows.workflows.unpaywall_snapshot_telescope.get_http_response_xml_to_dict")
@patch("academic_observatory_workflows.workflows.unpaywall_snapshot_telescope.get_airflow_connection_url")
@patch("observatory.platform.utils.workflow_utils.Variable.get")
def test_list_releases_date_out_of_range(self, m_get, m_get_conn, m_get_xmldict):
data_path = "data"
m_get.return_value = data_path
m_get_conn.return_value = "http://localhost/"
telescope = UnpaywallSnapshotTelescope()
m_get_xmldict.return_value = {
"ListBucketResult": {
"Contents": [
{"Key": "unpaywall_2018-03-29T113154.jsonl.gz", "LastModified": "2000-04-28T17:28:55.000Z"}
]
} # Outside range
}
releases = UnpaywallSnapshotTelescope.list_releases(self.start_date, self.end_date)
self.assertEqual(len(releases), 0)
class MockTI:
def xcom_push(self, *args):
pass
@patch("academic_observatory_workflows.workflows.unpaywall_snapshot_telescope.bigquery_table_exists")
@patch(
"academic_observatory_workflows.workflows.unpaywall_snapshot_telescope.UnpaywallSnapshotTelescope.list_releases"
)
@patch("observatory.platform.utils.workflow_utils.Variable.get")
def test_get_release_info(self, m_get, m_releases, m_bq_table_exist):
m_get.return_value = "projectid"
# No release
m_releases.return_value = []
m_bq_table_exist.return_value = True
telescope = UnpaywallSnapshotTelescope()
continue_dag = telescope.get_release_info(
**{
"ti": TestUnpaywallSnapshotTelescope.MockTI(),
"execution_date": datetime.datetime(2021, 1, 1),
"next_execution_date": datetime.datetime(2021, 2, 1),
}
)
self.assertEqual(continue_dag, False)
# Single release exists
m_releases.return_value = [{"date": "20210101", "file_name": "some file"}]
m_bq_table_exist.return_value = True
continue_dag = telescope.get_release_info(
**{
"ti": TestUnpaywallSnapshotTelescope.MockTI(),
"execution_date": datetime.datetime(2021, 1, 1),
"next_execution_date": datetime.datetime(2021, 2, 1),
}
)
self.assertEqual(continue_dag, False)
# Single release, not exist
m_releases.return_value = [{"date": "20210101", "file_name": "some file"}]
m_bq_table_exist.return_value = False
continue_dag = telescope.get_release_info(
**{
"ti": TestUnpaywallSnapshotTelescope.MockTI(),
"execution_date": datetime.datetime(2021, 1, 1),
"next_execution_date": datetime.datetime(2021, 2, 1),
}
)
self.assertEqual(continue_dag, True)
def test_dag_structure(self):
"""Test that the Crossref Events DAG has the correct structure."""
dag = UnpaywallSnapshotTelescope().make_dag()
self.assert_dag_structure(
{
"check_dependencies": ["get_release_info"],
"get_release_info": ["download"],
"download": ["upload_downloaded"],
"upload_downloaded": ["extract"],
"extract": ["transform"],
"transform": ["upload_transformed"],
"upload_transformed": ["bq_load"],
"bq_load": ["cleanup"],
"cleanup": [],
},
dag,
)
def test_dag_load(self):
"""Test that the DAG can be loaded from a DAG bag."""
with ObservatoryEnvironment().create():
dag_file = os.path.join(
module_file_path("academic_observatory_workflows.dags"), "unpaywall_snapshot_telescope.py"
)
self.assert_dag_load("unpaywall_snapshot", dag_file)
def setup_observatory_env(self):
env = ObservatoryEnvironment(self.project_id, self.data_location)
self.dataset_id = env.add_dataset()
return env
@patch("airflow.hooks.base.BaseHook.get_connection")
def test_telescope(self, m_base_get_con):
"""Test the Telescope end to end."""
m_base_get_con.return_value = "http://localhost"
# Setup http server to serve files
httpserver = HttpServer(directory=test_fixtures_folder("unpaywall_snapshot"))
with httpserver.create():
with patch(
"academic_observatory_workflows.workflows.unpaywall_snapshot_telescope.get_airflow_connection_url"
) as m_get_conns:
# Mock out unpaywall connection url
mock_url = f"http://{httpserver.host}:{httpserver.port}/"
m_get_conns.return_value = mock_url
env = self.setup_observatory_env()
execution_date = pendulum.datetime(2021, 6, 1)
release_date_str = "20210101"
release_date = pendulum.parse(release_date_str)
file_name = "unpaywall_snapshot.jsonl.gz"
with env.create():
telescope = UnpaywallSnapshotTelescope(dataset_id=self.dataset_id)
dag = telescope.make_dag()
release = UnpaywallSnapshotRelease(
dag_id=dag.dag_id, release_date=release_date, file_name=file_name
)
with env.create_dag_run(dag, execution_date):
# check dependencies
ti = env.run_task(telescope.check_dependencies.__name__)
self.assertEqual(ti.state, State.SUCCESS)
# get release info
with patch(
"academic_observatory_workflows.workflows.unpaywall_snapshot_telescope.UnpaywallSnapshotTelescope.list_releases"
) as m_list_releases:
m_list_releases.return_value = [
{
"date": release_date_str,
"file_name": file_name,
}
]
ti = env.run_task(telescope.get_release_info.__name__)
self.assertEqual(ti.state, State.SUCCESS)
# download
ti = env.run_task(telescope.download.__name__)
self.assertEqual(ti.state, State.SUCCESS)
# Check file was downloaded
self.assertEqual(len(release.download_files), 1)
# upload_downloaded
ti = env.run_task(telescope.upload_downloaded.__name__)
self.assertEqual(ti.state, State.SUCCESS)
self.assert_blob_integrity(
env.download_bucket, blob_name(release.download_path), release.download_path
)
# extract
ti = env.run_task(telescope.extract.__name__)
self.assertEqual(ti.state, State.SUCCESS)
# transform
ti = env.run_task(telescope.transform.__name__)
self.assertEqual(ti.state, State.SUCCESS)
# upload_transformed
ti = env.run_task(telescope.upload_transformed.__name__)
self.assertEqual(ti.state, State.SUCCESS)
self.assert_blob_integrity(
env.transform_bucket, blob_name(release.transform_path), release.transform_path
)
# bq_load
ti = env.run_task(telescope.bq_load.__name__)
self.assertEqual(ti.state, State.SUCCESS)
table_id = (
f"{self.project_id}.{self.dataset_id}."
f"{bigquery_sharded_table_id(telescope.dag_id, release.release_date)}"
)
expected_rows = 100
self.assert_table_integrity(table_id, expected_rows)
# cleanup
download_folder, extract_folder, transform_folder = (
release.download_folder,
release.extract_folder,
release.transform_folder,
)
env.run_task(telescope.cleanup.__name__)
self.assertEqual(ti.state, State.SUCCESS)
self.assert_cleanup(download_folder, extract_folder, transform_folder)
|
{"/academic_observatory_workflows/workflows/ror_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_geonames_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/geonames_telescope.py"], "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/tests/test_clearbit.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_oa_web_workflow.py": ["/academic_observatory_workflows/workflows/oa_web_workflow.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/tests/test_zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_open_citations_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/open_citations_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_openalex_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/openalex_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py"], "/academic_observatory_workflows/workflows/pubmed_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_web_of_science_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_events_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_events_telescope.py"], "/academic_observatory_workflows/workflows/web_of_science_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_github.py": ["/academic_observatory_workflows/github.py"], "/academic_observatory_workflows/workflows/tests/test_pubmed_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/pubmed_telescope.py"], "/academic_observatory_workflows/workflows/geonames_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/open_citations_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_doi_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/model.py", "/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/tests/test_wikipedia.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/wikipedia.py"], "/docs/test_generate_csv.py": ["/docs/generate_schema_csv.py"], "/academic_observatory_workflows/workflows/scopus_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/doi_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py"], "/academic_observatory_workflows/workflows/crossref_events_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/oa_web_workflow.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/github.py", "/academic_observatory_workflows/wikipedia.py", "/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_ror_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/ror_telescope.py"], "/academic_observatory_workflows/workflows/openalex_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_zenodo.py": ["/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/model.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_scopus_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/scopus_telescope.py"], "/academic_observatory_workflows/dags/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_telescope.py"], "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_elastic_import_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/dags/elastic_import_workflow.py"], "/academic_observatory_workflows/dags/doi_workflow.py": ["/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/workflows/orcid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/dags/mag_telescope.py": ["/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/workflows/grid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_mag_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/dags/web_of_science_telescope.py": ["/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/dags/elastic_import_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_grid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/grid_telescope.py"], "/academic_observatory_workflows/workflows/mag_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_orcid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/orcid_telescope.py"]}
|
28,260,393
|
The-Academic-Observatory/academic-observatory-workflows
|
refs/heads/main
|
/academic_observatory_workflows/workflows/tests/test_unpaywall_telescope.py
|
# Copyright 2020 Curtin University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Author: Tuan Chien
import os
import shutil
import unittest
from datetime import timedelta
from unittest.mock import patch
import pendulum
from academic_observatory_workflows.config import test_fixtures_folder
from academic_observatory_workflows.workflows.unpaywall_telescope import (
UnpaywallRelease,
UnpaywallTelescope,
)
from airflow.exceptions import AirflowException
from airflow.models.connection import Connection
from airflow.utils.state import State
from click.testing import CliRunner
from google.cloud import bigquery
from observatory.platform.utils.file_utils import validate_file_hash
from observatory.platform.utils.jinja2_utils import render_template
from observatory.platform.utils.test_utils import (
HttpServer,
ObservatoryEnvironment,
ObservatoryTestCase,
module_file_path,
)
from observatory.platform.utils.workflow_utils import blob_name, create_date_table_id
class TestUnpaywallRelease(unittest.TestCase):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.fixture_dir = test_fixtures_folder("unpaywall")
self.snapshot_file = "unpaywall_2021-07-02T151134.jsonl.gz"
self.snapshot_path = os.path.join(self.fixture_dir, self.snapshot_file)
self.snapshot_hash = "0f1ac32355c4582d82ae4bc76db17c26" # md5
@patch("academic_observatory_workflows.workflows.unpaywall_telescope.get_airflow_connection_password")
def test_api_key(self, m_pass):
m_pass.return_value = "testpass"
release = UnpaywallRelease(
dag_id="dag",
start_date=pendulum.now(),
end_date=pendulum.now(),
first_release=True,
)
self.assertEqual(release.api_key, "testpass")
@patch("academic_observatory_workflows.workflows.unpaywall_telescope.get_airflow_connection_password")
def test_snapshot_url(self, m_pass):
m_pass.return_value = "testpass"
release = UnpaywallRelease(
dag_id="dag",
start_date=pendulum.now(),
end_date=pendulum.now(),
first_release=True,
)
url = "https://api.unpaywall.org/feed/snapshot?api_key=testpass"
self.assertEqual(release.snapshot_url, url)
@patch("academic_observatory_workflows.workflows.unpaywall_telescope.get_airflow_connection_password")
def test_data_feed_url(self, m_pass):
m_pass.return_value = "testpass"
release = UnpaywallRelease(
dag_id="dag",
start_date=pendulum.now(),
end_date=pendulum.now(),
first_release=True,
)
url = "https://api.unpaywall.org/feed/changefiles?interval=day&api_key=testpass"
self.assertEqual(release.data_feed_url, url)
@patch("academic_observatory_workflows.workflows.unpaywall_telescope.get_airflow_connection_password")
@patch("academic_observatory_workflows.workflows.unpaywall_telescope.download_file")
@patch("academic_observatory_workflows.workflows.unpaywall_telescope.get_observatory_http_header")
@patch("academic_observatory_workflows.workflows.unpaywall_telescope.get_http_response_json")
@patch("airflow.models.variable.Variable.get")
def test_download_data_feed(self, m_get, m_get_response, m_header, m_download, m_pass):
m_get.return_value = "data"
m_pass.return_value = "testpass"
m_header.return_value = {"User-Agent": "custom"}
# Day
m_get_response.return_value = {
"list": [
{
"url": "http://url1",
"filename": "changed_dois_with_versions_2021-07-02T080001.jsonl.gz",
},
{
"url": "http://url2",
"filename": "changed_dois_with_versions_2021-07-02T080001.jsonl.gz",
},
]
}
release = UnpaywallRelease(
dag_id="dag",
start_date=pendulum.datetime(2021, 7, 4),
end_date=pendulum.datetime(2021, 7, 4),
first_release=False,
)
release.download()
_, call_args = m_download.call_args
self.assertEqual(call_args["url"], "http://url1")
self.assertEqual(
call_args["filename"],
"data/telescopes/download/dag/2021_07_04-2021_07_04/changed_dois_with_versions_2021-07-02T080001.jsonl.gz",
)
@patch("academic_observatory_workflows.workflows.unpaywall_telescope.get_observatory_http_header")
@patch("academic_observatory_workflows.workflows.unpaywall_telescope.get_airflow_connection_password")
@patch("academic_observatory_workflows.workflows.unpaywall_telescope.download_file")
@patch("airflow.models.variable.Variable.get")
def test_download_snapshot(self, m_get, m_download, m_pass, m_header):
m_get.return_value = "data"
m_pass.return_value = "testpass"
m_header.return_value = {"User-Agent": "custom"}
fixture_dir = test_fixtures_folder("unpaywall")
with CliRunner().isolated_filesystem():
release = UnpaywallRelease(
dag_id="dag",
start_date=pendulum.datetime(2021, 7, 2),
end_date=pendulum.datetime(2021, 7, 3),
first_release=True,
)
src = self.snapshot_path
dst = os.path.join(release.download_folder, self.snapshot_file)
shutil.copyfile(src, dst)
release.download()
# Bad dates
with CliRunner().isolated_filesystem():
release = UnpaywallRelease(
dag_id="dag",
start_date=pendulum.datetime(2021, 9, 22),
end_date=pendulum.datetime(2021, 1, 3),
first_release=True,
)
src = self.snapshot_path
dst = os.path.join(release.download_folder, self.snapshot_file)
shutil.copyfile(src, dst)
self.assertRaises(AirflowException, release.download)
@patch("academic_observatory_workflows.workflows.unpaywall_telescope.get_http_response_json")
def test_get_diff_release(self, m_get_json):
# No release info
m_get_json.return_value = {"list": []}
result = UnpaywallRelease.get_diff_release(feed_url=None, start_date=None)
self.assertEqual(result, (None, None))
m_get_json.return_value = {
"list": [
{"url": "url", "filename": "changed_dois_with_versions_2021-07-02T080001.jsonl.gz"},
{"url": "url", "filename": "changed_dois_with_versions_2021-07-02T080001.jsonl.gz"},
{"url": "url", "filename": "changed_dois_with_versions_2021-07-02T080001.jsonl.gz"},
]
}
url, filename = UnpaywallRelease.get_diff_release(
feed_url=None,
start_date=pendulum.datetime(2021, 7, 4),
)
self.assertEqual(filename, "changed_dois_with_versions_2021-07-02T080001.jsonl.gz")
@patch("airflow.models.variable.Variable.get")
def test_extract(self, m_get):
m_get.return_value = "data"
fixture_dir = test_fixtures_folder("unpaywall")
with CliRunner().isolated_filesystem():
release = UnpaywallRelease(
dag_id="dag",
start_date=pendulum.datetime(2021, 7, 4),
end_date=pendulum.datetime(2021, 7, 4),
first_release=True,
)
src = self.snapshot_path
dst = os.path.join(release.download_folder, self.snapshot_file)
shutil.copyfile(src, dst)
self.assertEqual(len(release.download_files), 1)
release.extract()
self.assertEqual(len(release.extract_files), 1)
@patch("airflow.models.variable.Variable.get")
def test_transform(self, m_get):
m_get.return_value = "data"
fixture_dir = test_fixtures_folder("unpaywall")
with CliRunner().isolated_filesystem():
release = UnpaywallRelease(
dag_id="dag",
start_date=pendulum.datetime(2021, 7, 4),
end_date=pendulum.datetime(2021, 7, 4),
first_release=True,
)
src = self.snapshot_path
dst = os.path.join(release.download_folder, self.snapshot_file)
shutil.copyfile(src, dst)
release.extract()
release.transform()
self.assertEqual(len(release.transform_files), 1)
json_transformed_hash = "62cbb5af5a78d2e0769a28d976971cba"
json_transformed = os.path.join(release.transform_folder, self.snapshot_file[:-3])
self.assertTrue(validate_file_hash(file_path=json_transformed, expected_hash=json_transformed_hash))
class TestUnpaywallTelescope(ObservatoryTestCase):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.project_id = os.getenv("TEST_GCP_PROJECT_ID")
self.data_location = os.getenv("TEST_GCP_DATA_LOCATION")
self.fixture_dir = test_fixtures_folder("unpaywall")
self.snapshot_file = "unpaywall_2021-07-02T151134.jsonl.gz"
self.snapshot_path = os.path.join(self.fixture_dir, self.snapshot_file)
self.snapshot_hash = "0f1ac32355c4582d82ae4bc76db17c26" # md5
def test_ctor(self):
telescope = UnpaywallTelescope(airflow_vars=[])
self.assertEqual(telescope.airflow_vars, ["transform_bucket"])
self.assertRaises(AirflowException, UnpaywallTelescope, schedule_interval="@monthly")
def test_schedule_days_apart(self):
start_date = pendulum.datetime(2021, 1, 9)
schedule_interval = timedelta(days=2)
days_apart_gen = UnpaywallTelescope._schedule_days_apart(
start_date=start_date, schedule_interval=schedule_interval
)
diff = next(days_apart_gen)
self.assertEqual(diff, 2)
diff = next(days_apart_gen)
self.assertEqual(diff, 2)
schedule_interval = "@weekly"
days_apart_gen = UnpaywallTelescope._schedule_days_apart(
start_date=start_date, schedule_interval=schedule_interval
)
diff = next(days_apart_gen)
self.assertEqual(diff, 1)
diff = next(days_apart_gen)
self.assertEqual(diff, 7)
def test_dag_structure(self):
"""Test that the Crossref Events DAG has the correct structure."""
dag = UnpaywallTelescope().make_dag()
self.assert_dag_structure(
{
"check_dependencies": ["check_releases"],
"check_releases": ["download"],
"download": ["upload_downloaded"],
"upload_downloaded": ["extract"],
"extract": ["transform"],
"transform": ["upload_transformed"],
"upload_transformed": ["bq_load_partition"],
"bq_load_partition": ["bq_delete_old"],
"bq_delete_old": ["bq_append_new"],
"bq_append_new": ["cleanup"],
"cleanup": [],
},
dag,
)
def test_dag_load(self):
"""Test that the DAG can be loaded from a DAG bag."""
with ObservatoryEnvironment().create():
dag_file = os.path.join(module_file_path("academic_observatory_workflows.dags"), "unpaywall_telescope.py")
self.assert_dag_load("unpaywall", dag_file)
def setup_observatory_environment(self):
env = ObservatoryEnvironment(self.project_id, self.data_location)
self.dataset_id = env.add_dataset()
return env
def create_changefiles(self, host, port):
# Daily
template_path = os.path.join(self.fixture_dir, "daily-feed", "changefiles.jinja2")
changefiles = render_template(template_path, host=host, port=port)
dst = os.path.join(self.fixture_dir, "daily-feed", "changefiles")
with open(dst, "w") as f:
f.write(changefiles)
def remove_changefiles(self):
dst = os.path.join(self.fixture_dir, "daily-feed", "changefiles")
os.remove(dst)
# We want to do 3 dag runs. First is to load snapshot.
# Second is to load day diff 1 day before snapshot date (won't exist, so skip).
# Third loads a daily diff on day of snapshot (exists).
# Demonstrates that we are looking 2 days back with diff updates.
def test_telescope_day(self):
env = self.setup_observatory_environment()
first_execution_date = pendulum.datetime(2021, 7, 2) # Snapshot
second_execution_date = pendulum.datetime(2021, 7, 3) # No update found
third_execution_date = pendulum.datetime(2021, 7, 4) # Update found
with env.create(task_logging=True):
server = HttpServer(directory=self.fixture_dir)
with server.create():
with patch.object(
UnpaywallRelease, "SNAPSHOT_URL", f"http://{server.host}:{server.port}/{self.snapshot_file}"
):
with patch.object(
UnpaywallRelease,
"CHANGEFILES_URL",
f"http://{server.host}:{server.port}/daily-feed/changefiles",
):
self.create_changefiles(server.host, server.port)
conn = Connection(
conn_id=UnpaywallRelease.AIRFLOW_CONNECTION, uri="http://:YOUR_API_KEY@localhost"
)
env.add_connection(conn)
telescope = UnpaywallTelescope(dataset_id=self.dataset_id)
dag = telescope.make_dag()
# First run
with env.create_dag_run(dag, first_execution_date):
release = UnpaywallRelease(
dag_id=UnpaywallTelescope.DAG_ID,
start_date=pendulum.datetime(2021, 7, 2),
end_date=pendulum.datetime(2021, 7, 2),
first_release=True,
)
# Check dependencies are met
ti = env.run_task(telescope.check_dependencies.__name__)
self.assertEqual(ti.state, State.SUCCESS)
# Check releases
ti = env.run_task(telescope.check_releases.__name__)
self.assertEqual(ti.state, State.SUCCESS)
# Download data
ti = env.run_task(telescope.download.__name__)
self.assertEqual(ti.state, State.SUCCESS)
# Upload downloaded data
ti = env.run_task(telescope.upload_downloaded.__name__)
self.assertEqual(ti.state, State.SUCCESS)
for file in release.download_files:
self.assert_blob_integrity(env.download_bucket, blob_name(file), file)
# Extract data
ti = env.run_task(telescope.extract.__name__)
self.assertEqual(ti.state, State.SUCCESS)
# Transform data
ti = env.run_task(telescope.transform.__name__)
self.assertEqual(ti.state, State.SUCCESS)
self.assertEqual(len(release.transform_files), 1)
# Upload transformed data
ti = env.run_task(telescope.upload_transformed.__name__)
self.assertEqual(ti.state, State.SUCCESS)
for file in release.transform_files:
self.assert_blob_integrity(env.transform_bucket, blob_name(file), file)
# Load bq table partitions
ti = env.run_task(telescope.bq_load_partition.__name__)
self.assertEqual(ti.state, State.SKIPPED)
# Delete changed data from main table
with patch("observatory.platform.utils.gc_utils.bq_query_bytes_daily_limit_check"):
ti = env.run_task(telescope.bq_delete_old.__name__)
self.assertEqual(ti.state, State.SKIPPED)
# Add new changes
ti = env.run_task(telescope.bq_append_new.__name__)
self.assertEqual(ti.state, State.SUCCESS)
main_table_id, partition_table_id = release.dag_id, f"{release.dag_id}_partitions"
table_id = f"{self.project_id}.{telescope.dataset_id}.{main_table_id}"
expected_rows = 100
self.assert_table_integrity(table_id, expected_rows)
# Cleanup files
download_folder, extract_folder, transform_folder = (
release.download_folder,
release.extract_folder,
release.transform_folder,
)
ti = env.run_task(telescope.cleanup.__name__)
self.assertEqual(ti.state, State.SUCCESS)
self.assert_cleanup(download_folder, extract_folder, transform_folder)
# Second run (skips) Use dailies
with env.create_dag_run(dag, second_execution_date):
release = UnpaywallRelease(
dag_id=UnpaywallTelescope.DAG_ID,
start_date=pendulum.datetime(2021, 7, 3),
end_date=pendulum.datetime(2021, 7, 3),
first_release=False,
)
# Check dependencies are met
ti = env.run_task(telescope.check_dependencies.__name__)
self.assertEqual(ti.state, State.SUCCESS)
# Check releases
ti = env.run_task(telescope.check_releases.__name__)
self.assertEqual(ti.state, State.SUCCESS)
# Download data
ti = env.run_task(telescope.download.__name__)
self.assertEqual(ti.state, State.SKIPPED)
# Upload downloaded data
ti = env.run_task(telescope.upload_downloaded.__name__)
self.assertEqual(ti.state, State.SKIPPED)
# Extract data
ti = env.run_task(telescope.extract.__name__)
self.assertEqual(ti.state, State.SKIPPED)
# Transform data
ti = env.run_task(telescope.transform.__name__)
self.assertEqual(ti.state, State.SKIPPED)
self.assertEqual(len(release.transform_files), 0)
# Upload transformed data
ti = env.run_task(telescope.upload_transformed.__name__)
self.assertEqual(ti.state, State.SKIPPED)
# Load bq table partitions
ti = env.run_task(telescope.bq_load_partition.__name__)
self.assertEqual(ti.state, State.SKIPPED)
# Delete changed data from main table
with patch("observatory.platform.utils.gc_utils.bq_query_bytes_daily_limit_check"):
ti = env.run_task(telescope.bq_delete_old.__name__)
self.assertEqual(ti.state, State.SUCCESS)
# Add new changes
ti = env.run_task(telescope.bq_append_new.__name__)
self.assertEqual(ti.state, State.SUCCESS)
# Cleanup files
download_folder, extract_folder, transform_folder = (
release.download_folder,
release.extract_folder,
release.transform_folder,
)
ti = env.run_task(telescope.cleanup.__name__)
self.assertEqual(ti.state, State.SUCCESS)
self.assert_cleanup(download_folder, extract_folder, transform_folder)
# Third run (downloads)
with env.create_dag_run(dag, third_execution_date):
release = UnpaywallRelease(
dag_id=UnpaywallTelescope.DAG_ID,
start_date=pendulum.datetime(2021, 7, 4),
end_date=pendulum.datetime(2021, 7, 4),
first_release=True,
)
# Check dependencies are met
ti = env.run_task(telescope.check_dependencies.__name__)
self.assertEqual(ti.state, State.SUCCESS)
# Check releases
ti = env.run_task(telescope.check_releases.__name__)
self.assertEqual(ti.state, State.SUCCESS)
# Download data
ti = env.run_task(telescope.download.__name__)
self.assertEqual(ti.state, State.SUCCESS)
# Upload downloaded data
ti = env.run_task(telescope.upload_downloaded.__name__)
self.assertEqual(ti.state, State.SUCCESS)
for file in release.download_files:
self.assert_blob_integrity(env.download_bucket, blob_name(file), file)
# Extract data
ti = env.run_task(telescope.extract.__name__)
self.assertEqual(ti.state, State.SUCCESS)
# Transform data
ti = env.run_task(telescope.transform.__name__)
self.assertEqual(ti.state, State.SUCCESS)
self.assertEqual(len(release.transform_files), 1)
# Upload transformed data
ti = env.run_task(telescope.upload_transformed.__name__)
self.assertEqual(ti.state, State.SUCCESS)
for file in release.transform_files:
self.assert_blob_integrity(env.transform_bucket, blob_name(file), file)
# Load bq table partitions
ti = env.run_task(telescope.bq_load_partition.__name__)
self.assertEqual(ti.state, State.SUCCESS)
main_table_id, partition_table_id = release.dag_id, f"{release.dag_id}_partitions"
table_id = create_date_table_id(
partition_table_id, release.end_date, bigquery.TimePartitioningType.DAY
)
table_id = f"{self.project_id}.{telescope.dataset_id}.{table_id}"
expected_rows = 2
self.assert_table_integrity(table_id, expected_rows)
# Delete changed data from main table
with patch("observatory.platform.utils.gc_utils.bq_query_bytes_daily_limit_check"):
ti = env.run_task(telescope.bq_delete_old.__name__)
self.assertEqual(ti.state, State.SUCCESS)
table_id = f"{self.project_id}.{telescope.dataset_id}.{main_table_id}"
expected_rows = 99
self.assert_table_integrity(table_id, expected_rows)
# Add new changes
ti = env.run_task(telescope.bq_append_new.__name__)
self.assertEqual(ti.state, State.SUCCESS)
table_id = f"{self.project_id}.{telescope.dataset_id}.{main_table_id}"
expected_rows = 101
self.assert_table_integrity(table_id, expected_rows)
# Cleanup files
download_folder, extract_folder, transform_folder = (
release.download_folder,
release.extract_folder,
release.transform_folder,
)
ti = env.run_task(telescope.cleanup.__name__)
self.assertEqual(ti.state, State.SUCCESS)
self.assert_cleanup(download_folder, extract_folder, transform_folder)
# Clean up template
self.remove_changefiles()
|
{"/academic_observatory_workflows/workflows/ror_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_geonames_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/geonames_telescope.py"], "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/tests/test_clearbit.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_oa_web_workflow.py": ["/academic_observatory_workflows/workflows/oa_web_workflow.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/tests/test_zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_open_citations_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/open_citations_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_openalex_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/openalex_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py"], "/academic_observatory_workflows/workflows/pubmed_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_web_of_science_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_events_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_events_telescope.py"], "/academic_observatory_workflows/workflows/web_of_science_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_github.py": ["/academic_observatory_workflows/github.py"], "/academic_observatory_workflows/workflows/tests/test_pubmed_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/pubmed_telescope.py"], "/academic_observatory_workflows/workflows/geonames_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/open_citations_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_doi_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/model.py", "/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/tests/test_wikipedia.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/wikipedia.py"], "/docs/test_generate_csv.py": ["/docs/generate_schema_csv.py"], "/academic_observatory_workflows/workflows/scopus_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/doi_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py"], "/academic_observatory_workflows/workflows/crossref_events_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/oa_web_workflow.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/github.py", "/academic_observatory_workflows/wikipedia.py", "/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_ror_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/ror_telescope.py"], "/academic_observatory_workflows/workflows/openalex_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_zenodo.py": ["/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/model.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_scopus_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/scopus_telescope.py"], "/academic_observatory_workflows/dags/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_telescope.py"], "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_elastic_import_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/dags/elastic_import_workflow.py"], "/academic_observatory_workflows/dags/doi_workflow.py": ["/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/workflows/orcid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/dags/mag_telescope.py": ["/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/workflows/grid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_mag_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/dags/web_of_science_telescope.py": ["/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/dags/elastic_import_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_grid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/grid_telescope.py"], "/academic_observatory_workflows/workflows/mag_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_orcid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/orcid_telescope.py"]}
|
28,260,394
|
The-Academic-Observatory/academic-observatory-workflows
|
refs/heads/main
|
/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py
|
# Copyright 2020 Curtin University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Author: Aniek Roelofs, Tuan Chien
import logging
import os
import re
from typing import Dict, List, Union
import pendulum
from airflow.models import Variable
from airflow.models.taskinstance import TaskInstance
from observatory.platform.utils.airflow_utils import (
AirflowVars,
get_airflow_connection_url,
)
from observatory.platform.utils.file_utils import find_replace_file, gunzip_files
from observatory.platform.utils.gc_utils import (
bigquery_sharded_table_id,
bigquery_table_exists,
)
from observatory.platform.utils.http_download import download_file
from observatory.platform.utils.url_utils import (
get_http_response_xml_to_dict,
get_observatory_http_header,
)
from observatory.platform.workflows.snapshot_telescope import (
SnapshotRelease,
SnapshotTelescope,
)
from academic_observatory_workflows.config import schema_folder as default_schema_folder
class UnpaywallSnapshotRelease(SnapshotRelease):
"""Unpaywall Snapshot Release instance."""
AIRFLOW_CONNECTION = "unpaywall_snapshot"
def __init__(
self,
dag_id: str,
release_date: pendulum.DateTime,
file_name: str = None,
):
"""Construct an UnpaywallSnapshotRelease instance.
:param dag_id: The DAG ID.
:param release_date: Release date.
:param file_name: Filename to download.
"""
super().__init__(
dag_id=dag_id,
release_date=release_date,
)
self.file_name = file_name
@property
def url(self):
"""Download url."""
dataset_url = get_airflow_connection_url(UnpaywallSnapshotRelease.AIRFLOW_CONNECTION)
return f"{dataset_url}{self.file_name}"
@property
def download_path(self) -> str:
"""Get the path to the downloaded file.
:return: the file path.
"""
return os.path.join(self.download_folder, "unpaywall_snapshot.jsonl.gz")
@property
def extract_path(self) -> str:
"""Get the path to the extracted file.
:return: the file path.
"""
return os.path.join(self.extract_folder, "unpaywall_snapshot.jsonl")
@property
def transform_path(self) -> str:
"""Get the path to the transformed file.
:return: the file path.
"""
return os.path.join(self.transform_folder, "unpaywall_snapshot.jsonl")
@staticmethod
def parse_release_date(file_name: str) -> pendulum.DateTime:
"""Parses a release date from a file name.
:param file_name: Unpaywall release file name (contains date string).
:return: date.
"""
date = re.search(r"\d{4}-\d{2}-\d{2}", file_name).group()
return pendulum.parse(date)
def download(self):
"""Download an Unpaywall release. Either from the snapshot or data freed."""
headers = get_observatory_http_header(package_name="academic_observatory_workflows")
download_file(url=self.url, filename=self.download_path, headers=headers)
def extract(self):
"""Extract release from gzipped file."""
gunzip_files(file_list=[self.download_path], output_dir=self.extract_folder)
def transform(self):
"""Transforms release by replacing a specific '-' with '_'."""
pattern = "authenticated-orcid"
replacement = "authenticated_orcid"
find_replace_file(src=self.extract_path, dst=self.transform_path, pattern=pattern, replacement=replacement)
class UnpaywallSnapshotTelescope(SnapshotTelescope):
"""A container for holding the constants and static functions for the Unpaywall telescope."""
DAG_ID = "unpaywall_snapshot"
def __init__(
self,
*,
dag_id: str = DAG_ID,
start_date: pendulum.DateTime = pendulum.datetime(2018, 5, 14),
schedule_interval: str = "@weekly",
dataset_id: str = "our_research",
queue: str = "remote_queue",
schema_folder: str = default_schema_folder(),
load_bigquery_table_kwargs: Dict = None,
table_descriptions: Dict = None,
catchup: bool = True,
airflow_vars: Union[List[AirflowVars], None] = None,
):
"""Initialise the telescope.
:param dag_id: DAG ID.
:param start_date: Airflow start date for running the DAG.
:param schedule_interval: Airflow schedule interval for running the DAG.
:param dataset_id: GCP dataset ID.
:param queue: Airflow worker queue to use.
:param schema_folder: Folder containing the database schemas.
:param load_bigquery_table_kwargs: the customisation parameters for loading data into a BigQuery table.
:param table_descriptions: Descriptions of the tables.
:param catchup: Whether Airflow should catch up past dag runs.
:param airflow_vars: List of Airflow variables to use.
"""
if table_descriptions is None:
table_descriptions = {dag_id: "The Unpaywall database: https://unpaywall.org/"}
if airflow_vars is None:
airflow_vars = [
AirflowVars.DATA_PATH,
AirflowVars.PROJECT_ID,
AirflowVars.DATA_LOCATION,
AirflowVars.DOWNLOAD_BUCKET,
AirflowVars.TRANSFORM_BUCKET,
]
airflow_conns = [UnpaywallSnapshotRelease.AIRFLOW_CONNECTION]
if load_bigquery_table_kwargs is None:
load_bigquery_table_kwargs = {"ignore_unknown_values": True}
super().__init__(
dag_id,
start_date,
schedule_interval,
dataset_id,
schema_folder,
load_bigquery_table_kwargs=load_bigquery_table_kwargs,
table_descriptions=table_descriptions,
catchup=catchup,
airflow_vars=airflow_vars,
airflow_conns=airflow_conns,
queue=queue,
)
self.add_setup_task(self.check_dependencies)
self.add_setup_task(self.get_release_info)
self.add_task(self.download)
self.add_task(self.upload_downloaded)
self.add_task(self.extract)
self.add_task(self.transform)
self.add_task(self.upload_transformed)
self.add_task(self.bq_load)
self.add_task(self.cleanup)
@staticmethod
def list_releases(start_date: pendulum.DateTime, end_date: pendulum.DateTime) -> List[Dict]:
"""Parses xml string retrieved from GET request to create list of urls for
different releases.
:param start_date:
:param end_date:
:return: a list of UnpaywallSnapshotRelease instances.
"""
releases_list = list()
# Request releases page
dataset_url = get_airflow_connection_url(UnpaywallSnapshotRelease.AIRFLOW_CONNECTION)
response = get_http_response_xml_to_dict(dataset_url)
items = response["ListBucketResult"]["Contents"]
for item in items:
# Get filename and parse dates
file_name = item["Key"]
last_modified = pendulum.parse(item["LastModified"])
release_date = UnpaywallSnapshotRelease.parse_release_date(file_name)
# Only include release if last modified date is within start and end date.
# Last modified date is used rather than release date because if release date is used then releases will
# be missed.
if start_date <= last_modified < end_date:
release = {
"date": release_date.format("YYYYMMDD"),
"file_name": file_name,
}
releases_list.append(release)
return releases_list
def get_release_info(self, **kwargs) -> bool:
"""Based on a list of all releases, checks which ones were released between this and the next execution date
of the DAG. If the release falls within the time period mentioned above, checks if a bigquery table doesn't
exist yet for the release. A list of releases that passed both checks is passed to the next tasks. If the list
is empty the workflow will stop.
:param kwargs: the context passed from the PythonOperator. See
https://airflow.apache.org/docs/stable/macros-ref.html
for a list of the keyword arguments that are passed to this argument.
:return: Whether the DAG should continue.
"""
# Get variables
project_id = Variable.get(AirflowVars.PROJECT_ID)
# List releases between a start and end date
execution_date = pendulum.instance(kwargs["execution_date"])
next_execution_date = pendulum.instance(kwargs["next_execution_date"])
releases_list = UnpaywallSnapshotTelescope.list_releases(execution_date, next_execution_date)
logging.info(f"Releases between {execution_date} and {next_execution_date}:\n{releases_list}\n")
# Check if the BigQuery table exists for each release to see if the workflow needs to process
releases_list_out = []
for release in releases_list:
table_id = bigquery_sharded_table_id(UnpaywallSnapshotTelescope.DAG_ID, pendulum.parse(release["date"]))
file = release["file_name"]
if bigquery_table_exists(project_id, self.dataset_id, table_id):
logging.info(f"Skipping as table exists for {file}: " f"{project_id}.{self.dataset_id}.{table_id}")
else:
logging.info(f"Table doesn't exist yet, processing {file} in this workflow")
releases_list_out.append(release)
# If releases_list_out contains items then the DAG will continue (return True) otherwise it will
# stop (return False)
continue_dag = len(releases_list_out) > 0
if continue_dag:
ti: TaskInstance = kwargs["ti"]
ti.xcom_push(UnpaywallSnapshotTelescope.RELEASE_INFO, releases_list_out, execution_date)
return continue_dag
def make_release(self, **kwargs) -> List[UnpaywallSnapshotRelease]:
"""Make a list of UnpaywallSnapshotRelease instances.
:param kwargs: The context passed from the PythonOperator.
:return: UnpaywallSnapshotRelease instance.
"""
ti: TaskInstance = kwargs["ti"]
release_info = ti.xcom_pull(
key=UnpaywallSnapshotTelescope.RELEASE_INFO,
task_ids=self.get_release_info.__name__,
include_prior_dates=False,
)
releases = list()
for release in release_info:
release_date = pendulum.parse(release["date"])
file_name = release["file_name"]
release = UnpaywallSnapshotRelease(dag_id=self.dag_id, release_date=release_date, file_name=file_name)
releases.append(release)
return releases
|
{"/academic_observatory_workflows/workflows/ror_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_geonames_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/geonames_telescope.py"], "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/tests/test_clearbit.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_oa_web_workflow.py": ["/academic_observatory_workflows/workflows/oa_web_workflow.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/tests/test_zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_open_citations_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/open_citations_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_openalex_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/openalex_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py"], "/academic_observatory_workflows/workflows/pubmed_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_web_of_science_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_events_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_events_telescope.py"], "/academic_observatory_workflows/workflows/web_of_science_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_github.py": ["/academic_observatory_workflows/github.py"], "/academic_observatory_workflows/workflows/tests/test_pubmed_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/pubmed_telescope.py"], "/academic_observatory_workflows/workflows/geonames_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/open_citations_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_doi_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/model.py", "/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/tests/test_wikipedia.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/wikipedia.py"], "/docs/test_generate_csv.py": ["/docs/generate_schema_csv.py"], "/academic_observatory_workflows/workflows/scopus_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/doi_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py"], "/academic_observatory_workflows/workflows/crossref_events_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/oa_web_workflow.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/github.py", "/academic_observatory_workflows/wikipedia.py", "/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_ror_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/ror_telescope.py"], "/academic_observatory_workflows/workflows/openalex_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_zenodo.py": ["/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/model.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_scopus_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/scopus_telescope.py"], "/academic_observatory_workflows/dags/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_telescope.py"], "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_elastic_import_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/dags/elastic_import_workflow.py"], "/academic_observatory_workflows/dags/doi_workflow.py": ["/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/workflows/orcid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/dags/mag_telescope.py": ["/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/workflows/grid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_mag_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/dags/web_of_science_telescope.py": ["/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/dags/elastic_import_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_grid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/grid_telescope.py"], "/academic_observatory_workflows/workflows/mag_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_orcid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/orcid_telescope.py"]}
|
28,260,395
|
The-Academic-Observatory/academic-observatory-workflows
|
refs/heads/main
|
/academic_observatory_workflows/workflows/tests/test_crossref_fundref_telescope.py
|
# Copyright 2020 Curtin University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Author: Aniek Roelofs
import os
from typing import List
from unittest.mock import patch
import httpretty
import pendulum
import vcr
from airflow.utils.state import State
from click.testing import CliRunner
from academic_observatory_workflows.config import test_fixtures_folder
from academic_observatory_workflows.workflows.crossref_fundref_telescope import (
CrossrefFundrefRelease,
CrossrefFundrefTelescope,
list_releases,
strip_whitespace,
)
from observatory.platform.utils.file_utils import get_file_hash
from observatory.platform.utils.test_utils import (
ObservatoryEnvironment,
ObservatoryTestCase,
module_file_path,
)
from observatory.platform.utils.workflow_utils import (
bigquery_sharded_table_id,
blob_name,
)
class TestCrossrefFundrefTelescope(ObservatoryTestCase):
"""Tests for the CrossrefFundref telescope"""
def __init__(self, *args, **kwargs):
"""Constructor which sets up variables used by tests.
:param args: arguments.
:param kwargs: keyword arguments.
"""
super(TestCrossrefFundrefTelescope, self).__init__(*args, **kwargs)
self.project_id = os.getenv("TEST_GCP_PROJECT_ID")
self.data_location = os.getenv("TEST_GCP_DATA_LOCATION")
self.download_path = test_fixtures_folder("crossref_fundref", "crossref_fundref_v1.34.tar.gz")
self.download_hash = "0cd65042"
self.extract_hash = "559aa89d41a85ff84d705084c1caeb8d"
self.transform_hash = "632b453a"
def test_dag_structure(self):
"""Test that the CrossrefFundref DAG has the correct structure.
:return: None
"""
# mock create_pool to prevent querying non existing airflow db
with patch("academic_observatory_workflows.workflows.crossref_fundref_telescope.create_pool"):
dag = CrossrefFundrefTelescope().make_dag()
self.assert_dag_structure(
{
"check_dependencies": ["get_release_info"],
"get_release_info": ["download"],
"download": ["upload_downloaded"],
"upload_downloaded": ["extract"],
"extract": ["transform"],
"transform": ["upload_transformed"],
"upload_transformed": ["bq_load"],
"bq_load": ["cleanup"],
"cleanup": [],
},
dag,
)
def test_dag_load(self):
"""Test that the CrossrefFundref DAG can be loaded from a DAG bag.
:return: None
"""
with ObservatoryEnvironment().create():
dag_file = os.path.join(
module_file_path("academic_observatory_workflows.dags"), "crossref_fundref_telescope.py"
)
self.assert_dag_load("crossref_fundref", dag_file)
def test_telescope(self):
"""Test the CrossrefFundref telescope end to end.
:return: None.
"""
# Setup Observatory environment
env = ObservatoryEnvironment(self.project_id, self.data_location)
dataset_id = env.add_dataset()
# Create the Observatory environment and run tests
with env.create():
# Setup Telescope inside env, so pool can be created
execution_date = pendulum.datetime(year=2021, month=6, day=1)
telescope = CrossrefFundrefTelescope(dataset_id=dataset_id)
dag = telescope.make_dag()
with env.create_dag_run(dag, execution_date):
# Test that all dependencies are specified: no error should be thrown
ti = env.run_task(telescope.check_dependencies.__name__)
self.assertEqual(ti.state, State.SUCCESS)
# Test list releases task
release_info = [
{
"url": "https://gitlab.com/crossref/open_funder_registry/-/archive/v1.34/open_funder_registry-v1.34.tar.gz",
"date": "2021-05-19T09:34:09.898000+00:00",
}
]
with patch(
"academic_observatory_workflows.workflows.crossref_fundref_telescope.list_releases"
) as mock_list_releases:
mock_list_releases.return_value = release_info
ti = env.run_task(telescope.get_release_info.__name__)
actual_release_info = ti.xcom_pull(
key=CrossrefFundrefTelescope.RELEASE_INFO,
task_ids=telescope.get_release_info.__name__,
include_prior_dates=False,
)
self.assertEqual(release_info, actual_release_info)
# Create release instance to check results from other tasks
release = CrossrefFundrefRelease(
telescope.dag_id, pendulum.parse(release_info[0]["date"]), release_info[0]["url"]
)
# Test download task
with httpretty.enabled():
self.setup_mock_file_download(release.url, self.download_path)
env.run_task(telescope.download.__name__)
self.assertEqual(1, len(release.download_files))
self.assert_file_integrity(release.download_path, self.download_hash, "gzip_crc")
# Test that file uploaded
env.run_task(telescope.upload_downloaded.__name__)
self.assert_blob_integrity(env.download_bucket, blob_name(release.download_path), release.download_path)
# Test that file extracted
env.run_task(telescope.extract.__name__)
self.assertEqual(1, len(release.extract_files))
self.assert_file_integrity(release.extract_path, self.extract_hash, "md5")
# Test that file transformed
env.run_task(telescope.transform.__name__)
self.assertEqual(1, len(release.transform_files))
self.assert_file_integrity(release.transform_path, self.transform_hash, "gzip_crc")
# Test that transformed file uploaded
env.run_task(telescope.upload_transformed.__name__)
self.assert_blob_integrity(
env.transform_bucket, blob_name(release.transform_path), release.transform_path
)
# Test that data loaded into BigQuery
env.run_task(telescope.bq_load.__name__)
table_id = (
f"{self.project_id}.{dataset_id}."
f"{bigquery_sharded_table_id(telescope.dag_id, release.release_date)}"
)
expected_rows = 27949
self.assert_table_integrity(table_id, expected_rows)
# Test that all telescope data deleted
download_folder, extract_folder, transform_folder = (
release.download_folder,
release.extract_folder,
release.transform_folder,
)
env.run_task(telescope.cleanup.__name__)
self.assert_cleanup(download_folder, extract_folder, transform_folder)
def test_list_releases(self):
"""Test that list releases returns a list with dictionaries of release info.
:return: None.
"""
cassette_path = test_fixtures_folder("crossref_fundref", "list_fundref_releases.yaml")
with vcr.use_cassette(cassette_path):
releases = list_releases(pendulum.datetime(2014, 3, 1), pendulum.datetime(2020, 6, 1))
self.assertIsInstance(releases, List)
self.assertEqual(39, len(releases))
for release in releases:
self.assertIsInstance(release, dict)
self.assertIsInstance(release["url"], str)
self.assertIsInstance(pendulum.parse(release["date"]), pendulum.DateTime)
def test_strip_whitespace(self):
with CliRunner().isolated_filesystem():
# Create file with space
file_with_space = "file1.txt"
with open(file_with_space, "w") as f:
f.write(" ")
f.write("test")
# Create file without space and store hash
file_without_space = "file2.txt"
with open(file_without_space, "w") as f:
f.write("test")
expected_hash = get_file_hash(file_path=file_without_space, algorithm="md5")
# Strip whitespace and check that files are now the same
strip_whitespace(file_with_space)
self.assert_file_integrity(file_with_space, expected_hash, "md5")
# Check that file stays the same when first line is not a space
strip_whitespace(file_without_space)
self.assert_file_integrity(file_without_space, expected_hash, "md5")
|
{"/academic_observatory_workflows/workflows/ror_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_geonames_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/geonames_telescope.py"], "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/tests/test_clearbit.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_oa_web_workflow.py": ["/academic_observatory_workflows/workflows/oa_web_workflow.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/tests/test_zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_open_citations_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/open_citations_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_openalex_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/openalex_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py"], "/academic_observatory_workflows/workflows/pubmed_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_web_of_science_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_events_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_events_telescope.py"], "/academic_observatory_workflows/workflows/web_of_science_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_github.py": ["/academic_observatory_workflows/github.py"], "/academic_observatory_workflows/workflows/tests/test_pubmed_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/pubmed_telescope.py"], "/academic_observatory_workflows/workflows/geonames_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/open_citations_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_doi_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/model.py", "/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/tests/test_wikipedia.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/wikipedia.py"], "/docs/test_generate_csv.py": ["/docs/generate_schema_csv.py"], "/academic_observatory_workflows/workflows/scopus_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/doi_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py"], "/academic_observatory_workflows/workflows/crossref_events_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/oa_web_workflow.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/github.py", "/academic_observatory_workflows/wikipedia.py", "/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_ror_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/ror_telescope.py"], "/academic_observatory_workflows/workflows/openalex_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_zenodo.py": ["/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/model.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_scopus_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/scopus_telescope.py"], "/academic_observatory_workflows/dags/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_telescope.py"], "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_elastic_import_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/dags/elastic_import_workflow.py"], "/academic_observatory_workflows/dags/doi_workflow.py": ["/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/workflows/orcid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/dags/mag_telescope.py": ["/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/workflows/grid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_mag_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/dags/web_of_science_telescope.py": ["/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/dags/elastic_import_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_grid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/grid_telescope.py"], "/academic_observatory_workflows/workflows/mag_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_orcid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/orcid_telescope.py"]}
|
28,260,396
|
The-Academic-Observatory/academic-observatory-workflows
|
refs/heads/main
|
/academic_observatory_workflows/workflows/tests/test_elastic_import_workflow.py
|
# Copyright 2021 Curtin University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Author: James Diprose
from __future__ import annotations
import json
import logging
import os
from academic_observatory_workflows.config import elastic_mappings_folder
from academic_observatory_workflows.dags.elastic_import_workflow import load_elastic_mappings_ao
from observatory.platform.utils.config_utils import module_file_path
from observatory.platform.utils.file_utils import load_file
from observatory.platform.utils.jinja2_utils import render_template
from observatory.platform.utils.test_utils import ObservatoryEnvironment, ObservatoryTestCase
from observatory.platform.utils.workflow_utils import make_dag_id
class TestElasticImportWorkflow(ObservatoryTestCase):
"""Tests for the Elastic Import Workflow"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.project_id = os.getenv("TEST_GCP_PROJECT_ID")
self.data_location = os.getenv("TEST_GCP_DATA_LOCATION")
def test_load_elastic_mappings_ao(self):
"""Test load_elastic_mappings_ao"""
path = elastic_mappings_folder()
aggregate = "author"
expected = [
("ao_dois", load_file(os.path.join(path, "ao-dois-mappings.json"))),
(
"ao_author_access_types",
render_template(
os.path.join(path, "ao-access-types-mappings.json.jinja2"),
aggregate=aggregate,
facet="access_types",
),
),
(
"ao_author_disciplines",
render_template(
os.path.join(path, "ao-disciplines-mappings.json.jinja2"), aggregate=aggregate, facet="disciplines"
),
),
(
"ao_author_events",
render_template(
os.path.join(path, "ao-events-mappings.json.jinja2"), aggregate=aggregate, facet="events"
),
),
(
"ao_author_metrics",
render_template(
os.path.join(path, "ao-metrics-mappings.json.jinja2"), aggregate=aggregate, facet="metrics"
),
),
(
"ao_author_output_types",
render_template(
os.path.join(path, "ao-output-types-mappings.json.jinja2"),
aggregate=aggregate,
facet="output_types",
),
),
(
"ao_author_unique_list",
render_template(
os.path.join(path, "ao-unique-list-mappings.json.jinja2"), aggregate=aggregate, facet="unique_list"
),
),
(
"ao_author_output_types",
render_template(
os.path.join(path, "ao-output-types-mappings.json.jinja2"),
aggregate=aggregate,
facet="output_types",
),
),
(
"ao_author_countries",
render_template(
os.path.join(path, "ao-relations-mappings.json.jinja2"), aggregate=aggregate, facet="countries"
),
),
(
"ao_author_funders",
render_template(
os.path.join(path, "ao-relations-mappings.json.jinja2"), aggregate=aggregate, facet="funders"
),
),
(
"ao_author_groupings",
render_template(
os.path.join(path, "ao-relations-mappings.json.jinja2"), aggregate=aggregate, facet="groupings"
),
),
(
"ao_author_institutions",
render_template(
os.path.join(path, "ao-relations-mappings.json.jinja2"), aggregate=aggregate, facet="institutions"
),
),
(
"ao_author_journals",
render_template(
os.path.join(path, "ao-relations-mappings.json.jinja2"), aggregate=aggregate, facet="journals"
),
),
(
"ao_author_publishers",
render_template(
os.path.join(path, "ao-relations-mappings.json.jinja2"), aggregate=aggregate, facet="publishers"
),
),
]
for table_id, expected_mappings_str in expected:
logging.info(table_id)
expected_mappings = json.loads(expected_mappings_str)
actual_mappings = load_elastic_mappings_ao(path, table_id)
self.assertEqual(expected_mappings, actual_mappings)
def test_dag_load(self):
"""Test that the DAG can be loaded from a DAG bag.
:return: None
"""
env = ObservatoryEnvironment(self.project_id, self.data_location, enable_api=False)
with env.create():
expected_dag_ids = [make_dag_id("elastic_import", suffix) for suffix in ["observatory"]]
dag_file = os.path.join(
module_file_path("academic_observatory_workflows.dags"), "elastic_import_workflow.py"
)
for dag_id in expected_dag_ids:
self.assert_dag_load(dag_id, dag_file)
|
{"/academic_observatory_workflows/workflows/ror_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_geonames_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/geonames_telescope.py"], "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/tests/test_clearbit.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_oa_web_workflow.py": ["/academic_observatory_workflows/workflows/oa_web_workflow.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/tests/test_zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_open_citations_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/open_citations_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_openalex_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/openalex_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py"], "/academic_observatory_workflows/workflows/pubmed_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_web_of_science_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_events_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_events_telescope.py"], "/academic_observatory_workflows/workflows/web_of_science_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_github.py": ["/academic_observatory_workflows/github.py"], "/academic_observatory_workflows/workflows/tests/test_pubmed_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/pubmed_telescope.py"], "/academic_observatory_workflows/workflows/geonames_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/open_citations_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_doi_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/model.py", "/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/tests/test_wikipedia.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/wikipedia.py"], "/docs/test_generate_csv.py": ["/docs/generate_schema_csv.py"], "/academic_observatory_workflows/workflows/scopus_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/doi_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py"], "/academic_observatory_workflows/workflows/crossref_events_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/oa_web_workflow.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/github.py", "/academic_observatory_workflows/wikipedia.py", "/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_ror_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/ror_telescope.py"], "/academic_observatory_workflows/workflows/openalex_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_zenodo.py": ["/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/model.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_scopus_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/scopus_telescope.py"], "/academic_observatory_workflows/dags/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_telescope.py"], "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_elastic_import_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/dags/elastic_import_workflow.py"], "/academic_observatory_workflows/dags/doi_workflow.py": ["/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/workflows/orcid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/dags/mag_telescope.py": ["/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/workflows/grid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_mag_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/dags/web_of_science_telescope.py": ["/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/dags/elastic_import_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_grid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/grid_telescope.py"], "/academic_observatory_workflows/workflows/mag_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_orcid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/orcid_telescope.py"]}
|
28,260,397
|
The-Academic-Observatory/academic-observatory-workflows
|
refs/heads/main
|
/academic_observatory_workflows/dags/doi_workflow.py
|
# Copyright 2020 Curtin University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Author: Richard Hosking, James Diprose
# The keywords airflow and DAG are required to load the DAGs from this file, see bullet 2 in the Apache Airflow FAQ:
# https://airflow.apache.org/docs/stable/faq.html
"""
A DAG that produces the dois table and aggregated tables for the dashboards.
Each release is saved to the following BigQuery tables:
<project_id>.observatory.countryYYYYMMDD
<project_id>.observatory.doiYYYYMMDD
<project_id>.observatory.funderYYYYMMDD
<project_id>.observatory.groupYYYYMMDD
<project_id>.observatory.institutionYYYYMMDD
<project_id>.observatory.journalYYYYMMDD
<project_id>.observatory.publisherYYYYMMDD
<project_id>.observatory.regionYYYYMMDD
<project_id>.observatory.subregionYYYYMMDD
Every week the following tables are overwritten for visualisation in the Data Studio dashboards:
<project_id>.coki_dashboards.country
<project_id>.coki_dashboards.doi
<project_id>.coki_dashboards.funder
<project_id>.coki_dashboards.group
<project_id>.coki_dashboards.institution
<project_id>.coki_dashboards.journal
<project_id>.coki_dashboards.publisher
<project_id>.coki_dashboards.region
<project_id>.coki_dashboards.subregion
"""
from academic_observatory_workflows.workflows.doi_workflow import DoiWorkflow
# Outputs data into:
doi_workflow = DoiWorkflow()
globals()[doi_workflow.dag_id] = doi_workflow.make_dag()
|
{"/academic_observatory_workflows/workflows/ror_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_geonames_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/geonames_telescope.py"], "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/tests/test_clearbit.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_oa_web_workflow.py": ["/academic_observatory_workflows/workflows/oa_web_workflow.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/tests/test_zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_open_citations_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/open_citations_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_openalex_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/openalex_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py"], "/academic_observatory_workflows/workflows/pubmed_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_web_of_science_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_events_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_events_telescope.py"], "/academic_observatory_workflows/workflows/web_of_science_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_github.py": ["/academic_observatory_workflows/github.py"], "/academic_observatory_workflows/workflows/tests/test_pubmed_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/pubmed_telescope.py"], "/academic_observatory_workflows/workflows/geonames_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/open_citations_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_doi_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/model.py", "/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/tests/test_wikipedia.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/wikipedia.py"], "/docs/test_generate_csv.py": ["/docs/generate_schema_csv.py"], "/academic_observatory_workflows/workflows/scopus_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/doi_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py"], "/academic_observatory_workflows/workflows/crossref_events_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/oa_web_workflow.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/github.py", "/academic_observatory_workflows/wikipedia.py", "/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_ror_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/ror_telescope.py"], "/academic_observatory_workflows/workflows/openalex_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_zenodo.py": ["/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/model.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_scopus_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/scopus_telescope.py"], "/academic_observatory_workflows/dags/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_telescope.py"], "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_elastic_import_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/dags/elastic_import_workflow.py"], "/academic_observatory_workflows/dags/doi_workflow.py": ["/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/workflows/orcid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/dags/mag_telescope.py": ["/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/workflows/grid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_mag_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/dags/web_of_science_telescope.py": ["/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/dags/elastic_import_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_grid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/grid_telescope.py"], "/academic_observatory_workflows/workflows/mag_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_orcid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/orcid_telescope.py"]}
|
28,260,398
|
The-Academic-Observatory/academic-observatory-workflows
|
refs/heads/main
|
/academic_observatory_workflows/workflows/openalex_telescope.py
|
# Copyright 2022 Curtin University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Author: Aniek Roelofs
from __future__ import annotations
import gzip
import json
import logging
import os
import subprocess
from concurrent.futures import ProcessPoolExecutor, as_completed
from subprocess import Popen
from typing import List, Tuple
import boto3
import jsonlines
import pendulum
from airflow.exceptions import AirflowException, AirflowSkipException
from airflow.hooks.base import BaseHook
from airflow.models.variable import Variable
from google.cloud import storage
from observatory.platform.utils.airflow_utils import AirflowVars
from observatory.platform.utils.gc_utils import aws_to_google_cloud_storage_transfer
from observatory.platform.utils.proc_utils import wait_for_process
from observatory.platform.workflows.stream_telescope import (
StreamRelease,
StreamTelescope,
)
from academic_observatory_workflows.config import schema_folder as default_schema_folder
class OpenAlexRelease(StreamRelease):
def __init__(
self,
dag_id: str,
start_date: pendulum.DateTime,
end_date: pendulum.DateTime,
first_release: bool,
max_processes: int,
):
"""Construct a OpenAlexRelease instance
:param dag_id: the id of the DAG.
:param start_date: the start_date of the release.
:param end_date: the end_date of the release.
:param first_release: whether this is the first release that is processed for this DAG
:param max_processes: max processes for transforming files.
"""
super().__init__(
dag_id, start_date, end_date, first_release, download_files_regex=".*.gz", transform_files_regex=".*.gz"
)
self.max_processes = max_processes
@property
def transfer_manifest_path_download(self) -> str:
"""Get the path to the file with ids of updated entities that are transferred to the download bucket.
:return: the file path.
"""
return os.path.join(self.download_folder, "transfer_manifest_download.csv")
@property
def transfer_manifest_path_transform(self) -> str:
"""Get the path to the file with ids of updated entities that are transferred to the transform bucket.
:return: the file path.
"""
return os.path.join(self.download_folder, "transfer_manifest_transform.csv")
# TODO uncomment when using transfer manifest
# @property
# def transfer_manifest_blob_download(self):
# return blob_name(self.transfer_manifest_path_download)
# @property
# def transfer_manifest_blob_transform(self):
# return blob_name(self.transfer_manifest_path_transform)
def write_transfer_manifest(self):
"""Write a transfer manifest file with filenames of files changed since the start date of this release.
A separate manifest file is created for the download and transform bucket.
Each filename excludes the s3 bucket name (s3://openalex) and is between double quotes, e.g.:
s3://openalex/data/works/updated_date=2021-12-17/0000_part_00.gz ->
"data/works/updated_date=2021-12-17/0000_part_00.gz"
:return: The number of updated entities.
"""
logging.info(
f"Writing info on updated entities from 'institution', 'concept' and 'work' to"
f" {self.transfer_manifest_path_download}"
)
logging.info(
f"Writing info on updated entities from 'author' and 'venue' to" f" {self.transfer_manifest_path_transform}"
)
aws_access_key_id, aws_secret_access_key = get_aws_conn_info()
s3client = boto3.client("s3", aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key)
updated_entities_count = 0
with open(self.transfer_manifest_path_download, "w") as f_download, open(
self.transfer_manifest_path_transform, "w"
) as f_transform:
for entity in ["authors", "concepts", "institutions", "venues", "works"]:
manifest_obj = s3client.get_object(Bucket=OpenAlexTelescope.AWS_BUCKET, Key=f"data/{entity}/manifest")
content = manifest_obj["Body"].read()
entries = json.loads(content.decode())["entries"]
for entry in entries:
updated_date_str = entry["url"].split("updated_date=")[1].split("/")[0]
updated_date = pendulum.from_format(updated_date_str, "YYYY-MM-DD")
if updated_date >= self.start_date:
object_name = '"' + entry["url"].replace("s3://openalex/", "") + '"\n'
if entity in ["authors", "venues"]:
f_transform.write(object_name)
else:
f_download.write(object_name)
updated_entities_count += 1
if updated_entities_count == 0:
raise AirflowSkipException("No updated entities to process")
def transfer(self, max_retries):
"""Transfer files from AWS bucket to Google Cloud bucket
:param max_retries: Number of max retries to try the transfer
:return: None.
"""
aws_access_key_id, aws_secret_access_key = get_aws_conn_info()
gc_project_id = Variable.get(AirflowVars.PROJECT_ID)
# TODO use transfer manifest instead when that is working
download_transfer = {"manifest": self.transfer_manifest_path_download, "bucket": self.download_bucket}
transform_transfer = {"manifest": self.transfer_manifest_path_transform, "bucket": self.transform_bucket}
total_count = 0
for transfer in [download_transfer, transform_transfer]:
success = False
prefixes = []
with open(transfer["manifest"], "r") as f:
for line in f:
prefixes.append(line.strip("\n").strip('"'))
if not prefixes:
continue
for i in range(max_retries):
if success:
break
success, objects_count = aws_to_google_cloud_storage_transfer(
aws_access_key_id,
aws_secret_access_key,
aws_bucket=OpenAlexTelescope.AWS_BUCKET,
include_prefixes=prefixes,
gc_project_id=gc_project_id,
gc_bucket=transfer["bucket"],
gc_bucket_path=f"telescopes/{self.dag_id}/{self.release_id}/",
description=f"Transfer OpenAlex data from Airflow telescope to {transfer['bucket']}",
# transfer_manifest=f"gs://{self.download_bucket}/{self.transfer_manifest_blob}"
)
total_count += objects_count
if not success:
raise AirflowException(f"Google Storage Transfer unsuccessful, status: {success}")
logging.info(f"Total number of objects transferred: {total_count}")
def download_transferred(self):
"""Download the updated entities from the Google Cloud download bucket to a local directory using gsutil.
Gsutil is used instead of the standard Google Cloud Python library, because this is faster at downloading files.
It supports multi-threading with the '-m' flag and can open multiple simultaneous connections to GCS.
In future the 'gcloud storage' command might be used instead which is even faster, but still in preview.
:return: None.
"""
# Authenticate gcloud with service account
args = [
"gcloud",
"auth",
"activate-service-account",
f"--key-file" f"={os.environ['GOOGLE_APPLICATION_CREDENTIALS']}",
]
proc: Popen = subprocess.Popen(
args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=dict(os.environ, CLOUDSDK_PYTHON="python3")
)
run_subprocess_cmd(proc, args)
logging.info(f"Downloading transferred files from Google Cloud bucket: {self.download_bucket}")
log_path = os.path.join(self.download_folder, "cp.log")
# Download all records from bucket using Gsutil
args = [
"gsutil",
"-m",
"-q",
"cp",
"-L",
log_path,
"-r",
f"gs://{self.download_bucket}/telescopes/{self.dag_id}/{self.release_id}/*",
self.download_folder,
]
proc: Popen = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
run_subprocess_cmd(proc, args)
def transform(self):
"""Transform all files for the Work, Concept and Institution entities.
Transforms one file per process.
:return: None.
"""
logging.info(f"Transforming files, no. workers: {self.max_processes}")
with ProcessPoolExecutor(max_workers=self.max_processes) as executor:
futures = []
for file_path in self.download_files:
file = os.path.relpath(file_path, self.download_folder)
transform_path = os.path.join(self.transform_folder, file)
futures.append(executor.submit(transform_file, file_path, transform_path))
for future in as_completed(futures):
future.result()
class OpenAlexTelescope(StreamTelescope):
"""OpenAlex telescope"""
DAG_ID = "openalex"
AWS_BUCKET = "openalex"
AIRFLOW_CONN_AWS = "openalex"
def __init__(
self,
dag_id: str = DAG_ID,
start_date: pendulum.DateTime = pendulum.datetime(2021, 12, 1),
schedule_interval: str = "@weekly",
dataset_id: str = "openalex",
dataset_description: str = "The OpenAlex dataset: https://docs.openalex.org/about-the-data",
queue: str = "remote_queue",
merge_partition_field: str = "id",
schema_folder: str = os.path.join(default_schema_folder(), "openalex"),
airflow_vars: List = None,
airflow_conns: List = None,
max_processes: int = os.cpu_count(),
):
"""Construct an OpenAlexTelescope instance.
:param dag_id: the id of the DAG.
:param start_date: the start date of the DAG.
:param schedule_interval: the schedule interval of the DAG.
:param dataset_id: the dataset id.
:param dataset_description: the dataset description.
:param queue: the queue that the tasks should run on.
:param merge_partition_field: the BigQuery field used to match partitions for a merge
:param schema_folder: the SQL schema path.
:param airflow_vars: list of airflow variable keys, for each variable it is checked if it exists in airflow
:param max_processes: max processes for transforming files.
"""
if airflow_vars is None:
airflow_vars = [
AirflowVars.DATA_PATH,
AirflowVars.PROJECT_ID,
AirflowVars.DATA_LOCATION,
AirflowVars.DOWNLOAD_BUCKET,
AirflowVars.TRANSFORM_BUCKET,
]
if airflow_conns is None:
airflow_conns = [
self.AIRFLOW_CONN_AWS,
]
super().__init__(
dag_id,
start_date,
schedule_interval,
dataset_id,
merge_partition_field,
schema_folder,
dataset_description=dataset_description,
queue=queue,
airflow_vars=airflow_vars,
airflow_conns=airflow_conns,
load_bigquery_table_kwargs={"ignore_unknown_values": True},
)
self.max_processes = max_processes
self.add_setup_task(self.check_dependencies)
self.add_task(self.write_transfer_manifest)
# self.add_task(self.upload_transfer_manifest) #TODO uncomment when using transfer manifest
self.add_task(self.transfer)
self.add_task(self.download_transferred)
self.add_task(self.transform)
self.add_task(self.upload_transformed)
self.add_task(self.bq_load_partition)
self.add_task_chain([self.bq_delete_old, self.bq_append_new, self.cleanup], trigger_rule="none_failed")
def get_bq_load_info(self, release: OpenAlexRelease) -> List[Tuple[str, str, str]]:
"""Get a list of the transform blob, main table id and partition table id that are used to load data into
BigQuery.
This method overrides the parent class' method for this telescope, because there are transform files
inside the transform bucket that were transferred directly. Which means that they can't be found with
the 'release.transform_files' property that is normally used.
:param release: The release object.
:return: List with tuples of transform_blob, main_table_id and partition_table_id
"""
base_transform_blob = os.path.join("telescopes", "openalex", release.release_id, "data")
bq_load_info = []
for entity in ["authors", "concepts", "institutions", "venues", "works"]:
# Check if files exist in folder
client = storage.Client()
exists = list(
client.list_blobs(
release.transform_bucket,
prefix=f"telescopes/{release.dag_id}" f"/{release.release_id}/data/{entity}",
max_results=1,
)
)
if exists:
table_name = entity[:-1].capitalize()
bq_load_info.append((f"{base_transform_blob}/{entity}/*", table_name, f"{table_name}_partitions"))
return bq_load_info
def make_release(self, **kwargs) -> OpenAlexRelease:
"""Make a Release instance
:param kwargs: The context passed from the PythonOperator.
:return: an OpenAlexRelease
"""
start_date, end_date, first_release = self.get_release_info(**kwargs)
release = OpenAlexRelease(self.dag_id, start_date, end_date, first_release, self.max_processes)
return release
def write_transfer_manifest(self, release: OpenAlexRelease, **kwargs):
"""Task to write transfer manifest files used during transfer.
:param release: an OpenAlexRelease instance.
:param kwargs: The context passed from the PythonOperator.
:return: None.
"""
release.write_transfer_manifest()
# TODO uncomment when transfer manifest works
# def upload_transfer_manifest(self, release: OpenAlexRelease, **kwargs):
# upload_file_to_cloud_storage(release.download_bucket, release.transfer_manifest_blob_download,
# release.transfer_manifest_path_download)
# upload_file_to_cloud_storage(release.download_bucket, release.transfer_manifest_blob_transform,
# release.transfer_manifest_path_transform)
def transfer(self, release: OpenAlexRelease, **kwargs):
"""Task to transfer the OpenAlex data
:param release: an OpenAlexRelease instance.
:param kwargs: The context passed from the PythonOperator.
:return: None.
"""
release.transfer(max_retries=self.max_retries)
def download_transferred(self, release: OpenAlexRelease, **kwargs):
"""Task to download the OpenAlexRelease data.
:param release: an OpenAlexRelease instance.
:param kwargs: The context passed from the PythonOperator.
:return: None.
"""
release.download_transferred()
def transform(self, release: OpenAlexRelease, **kwargs):
"""Task to transform the OpenAlexRelease data.
:param release: an OpenAlexRelease instance.
:param kwargs: The context passed from the PythonOperator.
:return: None.
"""
release.transform()
def get_aws_conn_info() -> (str, str):
"""Get the AWS access key id and secret access key from the OpenAlex airflow connection.
:return: access key id and secret access key
"""
conn = BaseHook.get_connection(OpenAlexTelescope.AIRFLOW_CONN_AWS)
access_key_id = conn.login
secret_access_key = conn.password
return access_key_id, secret_access_key
def run_subprocess_cmd(proc: Popen, args: list):
"""Execute and wait for subprocess to finish, also handle stdout & stderr from process.
:param proc: subprocess proc
:param args: args list that was passed on to subprocess
:return: None.
"""
logging.info(f"Executing bash command: {subprocess.list2cmdline(args)}")
out, err = wait_for_process(proc)
if out:
logging.info(out)
if err:
logging.info(err)
if proc.returncode != 0:
# Don't raise exception if the only error is because blobs could not be found in bucket
err_lines = err.split("\n")
if err_lines:
raise AirflowException("bash command failed")
logging.info("Finished cmd successfully")
def transform_file(download_path: str, transform_path: str):
"""Transforms a single file.
Each entry/object in the gzip input file is transformed and the transformed object is immediately written out to
a gzip file.
For each entity only one field has to be transformed.
:param download_path: The path to the file with the OpenAlex entries.
:param transform_path: The path where transformed data will be saved
:return: None.
"""
if not os.path.isdir(os.path.dirname(transform_path)):
os.makedirs(os.path.dirname(transform_path))
logging.info(f"Transforming {download_path}")
with gzip.open(download_path, "rb") as f_in, gzip.open(transform_path, "wt", encoding="ascii") as f_out:
reader = jsonlines.Reader(f_in)
for obj in reader:
if "works" in download_path:
transform_object(obj, "abstract_inverted_index")
else:
transform_object(obj, "international")
json.dump(obj, f_out)
f_out.write("\n")
logging.info(f"Finished transform, saved to {transform_path}")
def transform_object(obj: dict, field: str):
"""Transform an entry/object for one of the OpenAlex entities.
For the Work entity only the "abstract_inverted_index" field is transformed.
For the Concept and Institution entities only the "international" field is transformed.
:param obj: Single object with entity information
:param field: The field of interested that is transformed.
:return: None.
"""
if field == "international":
for nested_field in obj.get(field, {}).keys():
if not isinstance(obj[field][nested_field], dict):
continue
keys = list(obj[field][nested_field].keys())
values = list(obj[field][nested_field].values())
obj[field][nested_field] = {"keys": keys, "values": values}
elif field == "abstract_inverted_index":
if not isinstance(obj.get(field), dict):
return
keys = list(obj[field].keys())
values = [str(value)[1:-1] for value in obj[field].values()]
obj[field] = {"keys": keys, "values": values}
|
{"/academic_observatory_workflows/workflows/ror_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_geonames_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/geonames_telescope.py"], "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/tests/test_clearbit.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_oa_web_workflow.py": ["/academic_observatory_workflows/workflows/oa_web_workflow.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/tests/test_zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_open_citations_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/open_citations_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_openalex_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/openalex_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py"], "/academic_observatory_workflows/workflows/pubmed_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_web_of_science_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_events_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_events_telescope.py"], "/academic_observatory_workflows/workflows/web_of_science_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_github.py": ["/academic_observatory_workflows/github.py"], "/academic_observatory_workflows/workflows/tests/test_pubmed_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/pubmed_telescope.py"], "/academic_observatory_workflows/workflows/geonames_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/open_citations_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_doi_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/model.py", "/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/tests/test_wikipedia.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/wikipedia.py"], "/docs/test_generate_csv.py": ["/docs/generate_schema_csv.py"], "/academic_observatory_workflows/workflows/scopus_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/doi_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py"], "/academic_observatory_workflows/workflows/crossref_events_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/oa_web_workflow.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/github.py", "/academic_observatory_workflows/wikipedia.py", "/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_ror_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/ror_telescope.py"], "/academic_observatory_workflows/workflows/openalex_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_zenodo.py": ["/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/model.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_scopus_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/scopus_telescope.py"], "/academic_observatory_workflows/dags/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_telescope.py"], "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_elastic_import_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/dags/elastic_import_workflow.py"], "/academic_observatory_workflows/dags/doi_workflow.py": ["/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/workflows/orcid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/dags/mag_telescope.py": ["/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/workflows/grid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_mag_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/dags/web_of_science_telescope.py": ["/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/dags/elastic_import_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_grid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/grid_telescope.py"], "/academic_observatory_workflows/workflows/mag_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_orcid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/orcid_telescope.py"]}
|
28,260,399
|
The-Academic-Observatory/academic-observatory-workflows
|
refs/heads/main
|
/academic_observatory_workflows/workflows/crossref_events_telescope.py
|
# Copyright 2020 Curtin University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Author: Aniek Roelofs
from __future__ import annotations
import logging
import os
import re
from concurrent.futures import ThreadPoolExecutor, as_completed, ProcessPoolExecutor
from typing import List, Tuple, Union
import jsonlines
import pendulum
import requests
from airflow.exceptions import AirflowSkipException
from tenacity import RetryError, retry, stop_after_attempt, wait_exponential, wait_fixed
from academic_observatory_workflows.config import schema_folder as default_schema_folder
from observatory.platform.utils.airflow_utils import AirflowVars
from observatory.platform.utils.url_utils import get_user_agent
from observatory.platform.utils.workflow_utils import upload_files_from_list
from observatory.platform.workflows.stream_telescope import (
StreamRelease,
StreamTelescope,
)
class CrossrefEventsRelease(StreamRelease):
def __init__(
self,
dag_id: str,
start_date: pendulum.DateTime,
end_date: pendulum.DateTime,
first_release: bool,
mailto: str,
max_threads: int,
max_processes: int,
):
"""Construct a CrossrefEventsRelease instance
:param dag_id: the id of the DAG.
:param start_date: the start_date of the release.
:param end_date: the end_date of the release.
:param first_release: whether this is the first release that is processed for this DAG
:param mailto: Email address used in the download url
:param max_threads: Max threads used for parallel downloading
:param max_processes: max processes for transforming files.
"""
download_files_regex = r".*.jsonl$"
transform_files_regex = r".*.jsonl$"
super().__init__(
dag_id,
start_date,
end_date,
first_release,
download_files_regex=download_files_regex,
transform_files_regex=transform_files_regex,
)
self.mailto = mailto
self.max_threads = max_threads
self.max_processes = max_processes
@property
def urls(self) -> list:
urls = []
start_date = self.start_date.date()
end_date = self.end_date.date()
period = pendulum.period(start_date, end_date)
for dt in period.range("days"):
date_str = dt.strftime("%Y-%m-%d")
start_date = date_str
end_date = date_str
events_url = (
f"https://api.eventdata.crossref.org/v1/events?mailto={self.mailto}"
f"&from-collected-date={start_date}&until-collected-date={end_date}&rows=1000"
)
edited_url = (
f"https://api.eventdata.crossref.org/v1/events/edited?mailto={self.mailto}"
f"&from-updated-date={start_date}&until-updated-date={end_date}&rows=1000"
)
deleted_url = (
f"https://api.eventdata.crossref.org/v1/events/deleted?mailto={self.mailto}"
f"&from-updated-date={start_date}&until-updated-date={end_date}&rows=1000"
)
urls.append(events_url)
if not self.first_release:
urls.append(edited_url)
urls.append(deleted_url)
return urls
def batch_path(self, url, cursor: bool = False) -> str:
"""Gets the appropriate file path for a single batch, either for an events or cursor file.
:param url: The url used for a specific batch
:param cursor: Whether this is a cursor file or file with actual events
:return: Path to the events or cursor file
"""
event_type, date = parse_event_url(url)
if cursor:
return os.path.join(self.download_folder, f"{event_type}_{date}_cursor.txt")
else:
return os.path.join(self.download_folder, f"{event_type}_{date}.jsonl")
def download(self):
"""Download all events.
:return: None.
"""
logging.info(f"Downloading events, no. workers: {self.max_threads}")
logging.info(f"Downloading using these URLs, but with different start and end dates: {self.urls[0]}")
with ThreadPoolExecutor(max_workers=self.max_threads) as executor:
futures = []
for i, url in enumerate(self.urls):
futures.append(executor.submit(self.download_batch, i, url))
for future in as_completed(futures):
future.result()
if len(self.download_files) == 0:
raise AirflowSkipException("No events found")
def download_batch(self, i: int, url: str):
"""Download one day of events. When the download finished successfully, the generated cursor file is deleted.
If there is a cursor file available at the start, it means that a previous download attempt failed. If there
is an events file available and no cursor file, it means that a previous download attempt was successful,
so these events will not be downloaded again.
:param i: URL counter
:param url: The url from which to download events
:return: None.
"""
events_path = self.batch_path(url)
cursor_path = self.batch_path(url, cursor=True)
event_type, date = parse_event_url(url)
# if events file exists but no cursor file, previous request has finished & successful
if os.path.isfile(events_path) and not os.path.isfile(cursor_path):
logging.info(f"{i + 1}.{event_type} Skipped, already finished: {date}")
return
logging.info(f"{i + 1}.{event_type} Downloading date: {date}")
headers = {"User-Agent": get_user_agent(package_name="academic_observatory_workflows")}
next_cursor, counts, total_events = download_events(url, headers, events_path, cursor_path)
counter = counts
while next_cursor:
tmp_url = url + f"&cursor={next_cursor}"
next_cursor, counts, _ = download_events(tmp_url, headers, events_path, cursor_path)
counter += counts
if os.path.isfile(cursor_path):
os.remove(cursor_path)
logging.info(
f"{i + 1}.{event_type} successful, date: {date}, total no. events: {total_events}, downloaded "
f"events: {counter}"
)
def transform(self):
"""Transform all events.
:return: None.
"""
logging.info(f"Transforming events, no. workers: {self.max_processes}")
with ProcessPoolExecutor(max_workers=self.max_processes) as executor:
futures = []
for file in self.download_files:
futures.append(executor.submit(transform_batch, file, self.transform_folder))
for future in as_completed(futures):
future.result()
def transform_batch(download_path: str, transform_folder: str):
"""Transform one day of events.
:param download_path: The path to the downloaded file.
:param transform_folder: the transform folder.
:return: None.
"""
file_name = os.path.basename(download_path)
transform_path = os.path.join(transform_folder, file_name)
logging.info(f"Transforming file: {download_path}")
logging.info(f"Saving to: {transform_path}")
with jsonlines.open(download_path, "r") as reader:
with jsonlines.open(transform_path, "w") as writer:
for event in reader:
event = transform_events(event)
writer.write(event)
logging.info(f"Finished: {file_name}")
class CrossrefEventsTelescope(StreamTelescope):
"""Crossref Events telescope"""
DAG_ID = "crossref_events"
def __init__(
self,
dag_id: str = DAG_ID,
start_date: pendulum.DateTime = pendulum.datetime(2018, 5, 14),
schedule_interval: str = "@weekly",
dataset_id: str = "crossref",
dataset_description: str = "The Crossref Events dataset: https://www.eventdata.crossref.org/guide/",
queue: str = "remote_queue",
merge_partition_field: str = "id",
schema_folder: str = default_schema_folder(),
batch_load: bool = True,
airflow_vars: List = None,
mailto: str = "aniek.roelofs@curtin.edu.au",
max_threads: int = min(32, os.cpu_count() + 4),
max_processes: int = os.cpu_count(),
):
"""Construct a CrossrefEventsTelescope instance.
:param dag_id: the id of the DAG.
:param start_date: the start date of the DAG.
:param schedule_interval: the schedule interval of the DAG.
:param dataset_id: the dataset id.
:param dataset_description: the dataset description.
:param queue: the queue that the tasks should run on.
:param merge_partition_field: the BigQuery field used to match partitions for a merge
:param schema_folder: the SQL schema path.
:param batch_load: whether all files in the transform folder are loaded into 1 table at once
:param airflow_vars: list of airflow variable keys, for each variable it is checked if it exists in airflow
:param mailto: Email address used in the download url
:param max_threads: Max processes used for parallel downloading, default is based on 7 days x 3 url categories
:param max_processes: max processes for transforming files.
"""
if airflow_vars is None:
airflow_vars = [
AirflowVars.DATA_PATH,
AirflowVars.PROJECT_ID,
AirflowVars.DATA_LOCATION,
AirflowVars.DOWNLOAD_BUCKET,
AirflowVars.TRANSFORM_BUCKET,
]
super().__init__(
dag_id,
start_date,
schedule_interval,
dataset_id,
merge_partition_field,
schema_folder,
dataset_description=dataset_description,
queue=queue,
batch_load=batch_load,
airflow_vars=airflow_vars,
load_bigquery_table_kwargs={"ignore_unknown_values": True},
)
self.mailto = mailto
self.max_threads = max_threads
self.max_processes = max_processes
self.add_setup_task(self.check_dependencies)
self.add_task_chain(
[self.download, self.upload_downloaded, self.transform, self.upload_transformed, self.bq_load_partition]
)
self.add_task_chain([self.bq_delete_old, self.bq_append_new, self.cleanup], trigger_rule="none_failed")
def make_release(self, **kwargs) -> CrossrefEventsRelease:
"""Make a Release instance
:param kwargs: The context passed from the PythonOperator.
:return: CrossrefEventsRelease
"""
start_date, end_date, first_release = self.get_release_info(**kwargs)
release = CrossrefEventsRelease(
self.dag_id, start_date, end_date, first_release, self.mailto, self.max_threads, self.max_processes
)
return release
def download(self, release: CrossrefEventsRelease, **kwargs):
"""Task to download the CrossrefEventsRelease release.
:param release: a CrossrefEventsRelease instance.
:param kwargs: The context passed from the PythonOperator.
:return: None.
"""
release.download()
def upload_downloaded(self, release: CrossrefEventsRelease, **kwargs):
"""Upload the downloaded files for the given release.
:param release: a CrossrefEventsRelease instance
:param kwargs: The context passed from the PythonOperator.
:return: None.
"""
upload_files_from_list(release.download_files, release.download_bucket)
def transform(self, release: CrossrefEventsRelease, **kwargs):
"""Task to transform the CrossrefEventsRelease release.
:param release: a CrossrefEventsRelease instance.
:param kwargs: The context passed from the PythonOperator.
:return: None.
"""
release.transform()
@retry(
stop=stop_after_attempt(3),
wait=wait_fixed(20) + wait_exponential(multiplier=10, exp_base=3, max=60 * 10),
)
def get_response(url: str, headers: dict):
"""Get response from the url with given headers and retry for certain status codes.
:param url: The url
:param headers: The headers dict
:return: The response
"""
response = requests.get(url, headers=headers)
if response.status_code in [500, 400, 429]:
logging.info(
f'Downloading events from url: {url}, attempt: {get_response.retry.statistics["attempt_number"]}, '
f'idle for: {get_response.retry.statistics["idle_for"]}'
)
raise ConnectionError("Retrying url")
return response
def parse_event_url(url: str) -> (str, str):
"""Parse the URL to get the event type and date
:param url: The url
:return: The event type and date
"""
event_type = url.split("?mailto")[0].split("/")[-1]
if event_type == "events":
date = url.split("from-collected-date=")[1].split("&")[0]
else:
date = url.split("from-updated-date=")[1].split("&")[0]
return event_type, date
def download_events(url: str, headers: dict, events_path: str, cursor_path: str) -> Tuple[Union[str, None], int, int]:
"""Extract the events from the given url until no new cursor is returned or a RetryError occurs.
The extracted events are appended to a jsonl file and the cursors are written to a text file.
:param url: The url
:param headers: The headers dict
:param events_path: Path to the file in which events are stored.
:param cursor_path: Path to the file where cursors are stored.
:return: next_cursor, counter of events and total number of events according to the response
"""
try:
response = get_response(url, headers)
except RetryError:
# Try again with rows set to 100
url = re.sub("rows=[0-9]*", "rows=100", url)
response = get_response(url, headers)
if response.status_code == 200:
response_json = response.json()
total_events = response_json["message"]["total-results"]
events = response_json["message"]["events"]
next_cursor = response_json["message"]["next-cursor"]
counter = len(events)
# append events and cursor
if events:
with open(events_path, "a") as f:
with jsonlines.Writer(f) as writer:
writer.write_all(events)
if next_cursor:
with open(cursor_path, "a") as f:
f.write(next_cursor + "\n")
return next_cursor, counter, total_events
else:
raise ConnectionError(f"Error requesting url: {url}, response: {response.text}")
def transform_events(event):
"""Transform the dictionary with event data by replacing '-' with '_' in key names, converting all int values to
string except for the 'total' field and parsing datetime columns for a valid datetime.
:param event: The event dictionary
:return: The updated event dictionary
"""
if isinstance(event, (str, int, float)):
return event
if isinstance(event, dict):
new = event.__class__()
for k, v in event.items():
if isinstance(v, int) and k != "total":
v = str(v)
if k in ["timestamp", "occurred_at", "issued", "dateModified", "updated_date"]:
try:
v = str(pendulum.parse(v))
except ValueError:
v = "0001-01-01T00:00:00Z"
# Replace hyphens with underscores for BigQuery compatibility
k = k.replace("-", "_")
# Replace @ symbol in keys left by DataCite between the 15 and 22 March 2019
k = k.replace("@", "")
new[k] = transform_events(v)
return new
|
{"/academic_observatory_workflows/workflows/ror_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_geonames_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/geonames_telescope.py"], "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/tests/test_clearbit.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_oa_web_workflow.py": ["/academic_observatory_workflows/workflows/oa_web_workflow.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/tests/test_zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_open_citations_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/open_citations_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_openalex_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/openalex_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py"], "/academic_observatory_workflows/workflows/pubmed_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_web_of_science_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_events_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_events_telescope.py"], "/academic_observatory_workflows/workflows/web_of_science_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_github.py": ["/academic_observatory_workflows/github.py"], "/academic_observatory_workflows/workflows/tests/test_pubmed_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/pubmed_telescope.py"], "/academic_observatory_workflows/workflows/geonames_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/open_citations_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_doi_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/model.py", "/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/tests/test_wikipedia.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/wikipedia.py"], "/docs/test_generate_csv.py": ["/docs/generate_schema_csv.py"], "/academic_observatory_workflows/workflows/scopus_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/doi_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py"], "/academic_observatory_workflows/workflows/crossref_events_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/oa_web_workflow.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/github.py", "/academic_observatory_workflows/wikipedia.py", "/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_ror_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/ror_telescope.py"], "/academic_observatory_workflows/workflows/openalex_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_zenodo.py": ["/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/model.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_scopus_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/scopus_telescope.py"], "/academic_observatory_workflows/dags/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_telescope.py"], "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_elastic_import_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/dags/elastic_import_workflow.py"], "/academic_observatory_workflows/dags/doi_workflow.py": ["/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/workflows/orcid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/dags/mag_telescope.py": ["/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/workflows/grid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_mag_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/dags/web_of_science_telescope.py": ["/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/dags/elastic_import_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_grid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/grid_telescope.py"], "/academic_observatory_workflows/workflows/mag_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_orcid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/orcid_telescope.py"]}
|
28,260,400
|
The-Academic-Observatory/academic-observatory-workflows
|
refs/heads/main
|
/academic_observatory_workflows/workflows/geonames_telescope.py
|
# Copyright 2020 Curtin University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Author: Aniek Roelofs, James Diprose
from __future__ import annotations
import gzip
import logging
import os
import shutil
from typing import Dict, List
from zipfile import ZipFile
import pendulum
import requests
from academic_observatory_workflows.config import schema_folder as default_schema_folder
from airflow.models.taskinstance import TaskInstance
from google.cloud.bigquery import SourceFormat
from observatory.platform.utils.airflow_utils import AirflowVars
from observatory.platform.utils.http_download import download_file
from observatory.platform.workflows.snapshot_telescope import (
SnapshotRelease,
SnapshotTelescope,
)
def fetch_release_date() -> pendulum.DateTime:
"""Fetch the Geonames release date.
:return: the release date.
"""
response = requests.head(GeonamesRelease.DOWNLOAD_URL)
date_str = response.headers["Last-Modified"]
date: pendulum.DateTime = pendulum.from_format(date_str, "ddd, DD MMM YYYY HH:mm:ss z")
return date
def first_sunday_of_month(datetime: pendulum.DateTime) -> pendulum.DateTime:
"""Get the first Sunday of the month based on a given datetime.
:param datetime: the datetime.
:return: the first Sunday of the month.
"""
return datetime.start_of("month").first_of("month", day_of_week=7)
class GeonamesRelease(SnapshotRelease):
DOWNLOAD_URL = "https://download.geonames.org/export/dump/allCountries.zip"
def __init__(self, dag_id: str, release_date: pendulum.DateTime):
"""Create a GeonamesRelease instance.
:param dag_id: the DAG id.
:param release_date: the date of the release.
"""
download_file_name = f"{dag_id}.zip"
extract_file_name = f"allCountries.txt"
transform_file_name = f"{dag_id}.csv.gz"
super().__init__(dag_id, release_date, download_file_name, extract_file_name, transform_file_name)
@property
def download_path(self) -> str:
"""Get the path to the downloaded file.
:return: the file path.
"""
return os.path.join(self.download_folder, self.download_files_regex)
@property
def extract_path(self) -> str:
"""Get the path to the extracted file.
:return: the file path.
"""
return os.path.join(self.extract_folder, self.extract_files_regex)
@property
def transform_path(self) -> str:
"""Get the path to the transformed file.
:return: the file path.
"""
return os.path.join(self.transform_folder, self.transform_files_regex)
def download(self):
"""Downloads geonames dump file containing country data. The file is in zip format and will be extracted
after downloading, saving the unzipped content.
:return: None
"""
download_file(url=GeonamesRelease.DOWNLOAD_URL, filename=self.download_path)
logging.info(f"Downloaded file: {self.download_path}")
def extract(self):
"""Extract a downloaded Geonames release.
:return: None
"""
with ZipFile(self.download_path) as zip_file:
zip_file.extractall(self.extract_folder)
def transform(self):
"""Transforms release by storing file content in gzipped csv format.
:return: None
"""
with open(self.extract_path, "rb") as file_in:
with gzip.open(self.transform_path, "wb") as file_out:
shutil.copyfileobj(file_in, file_out)
class GeonamesTelescope(SnapshotTelescope):
"""
A Telescope that harvests the GeoNames geographical database: https://www.geonames.org/
Saved to the BigQuery table: <project_id>.geonames.geonamesYYYYMMDD
"""
DAG_ID = "geonames"
def __init__(
self,
dag_id: str = DAG_ID,
start_date: pendulum.DateTime = pendulum.datetime(2020, 9, 1),
schedule_interval: str = "@weekly",
dataset_id: str = "geonames",
schema_folder: str = default_schema_folder(),
source_format: str = SourceFormat.CSV,
dataset_description: str = "The GeoNames geographical database: https://www.geonames.org/",
load_bigquery_table_kwargs: Dict = None,
table_descriptions: Dict = None,
catchup: bool = False,
airflow_vars: List = None,
):
"""The Geonames telescope.
:param dag_id: the id of the DAG.
:param start_date: the start date of the DAG.
:param schedule_interval: the schedule interval of the DAG.
:param dataset_id: the BigQuery dataset id.
:param schema_folder: the SQL schema path.
:param source_format: the format of the data to load into BigQuery.
:param dataset_description: description for the BigQuery dataset.
:param load_bigquery_table_kwargs: the customisation parameters for loading data into a BigQuery table.
:param table_descriptions: a dictionary with table ids and corresponding table descriptions.
:param catchup: whether to catchup the DAG or not.
:param airflow_vars: list of airflow variable keys, for each variable it is checked if it exists in airflow.
"""
if load_bigquery_table_kwargs is None:
load_bigquery_table_kwargs = {
"csv_field_delimiter": "\t",
"csv_quote_character": "",
"ignore_unknown_values": True,
}
if table_descriptions is None:
table_descriptions = {dag_id: "The GeoNames table."}
if airflow_vars is None:
airflow_vars = [
AirflowVars.DATA_PATH,
AirflowVars.PROJECT_ID,
AirflowVars.DATA_LOCATION,
AirflowVars.DOWNLOAD_BUCKET,
AirflowVars.TRANSFORM_BUCKET,
]
super().__init__(
dag_id,
start_date,
schedule_interval,
dataset_id,
schema_folder,
source_format=source_format,
load_bigquery_table_kwargs=load_bigquery_table_kwargs,
dataset_description=dataset_description,
table_descriptions=table_descriptions,
catchup=catchup,
airflow_vars=airflow_vars,
)
self.add_setup_task(self.check_dependencies)
self.add_setup_task(self.fetch_release_date)
self.add_task(self.download)
self.add_task(self.upload_downloaded)
self.add_task(self.extract)
self.add_task(self.transform)
self.add_task(self.upload_transformed)
self.add_task(self.bq_load)
self.add_task(self.cleanup)
def make_release(self, **kwargs) -> List[GeonamesRelease]:
"""Make release instances. The release is passed as an argument to the function (TelescopeFunction) that is
called in 'task_callable'.
:param kwargs: the context passed from the PythonOperator. See
https://airflow.apache.org/docs/stable/macros-ref.html for a list of the keyword arguments that are
passed to this argument.
:return: a list of GeonamesRelease instances.
"""
ti: TaskInstance = kwargs["ti"]
release_date = ti.xcom_pull(
key=GeonamesTelescope.RELEASE_INFO, task_ids=self.fetch_release_date.__name__, include_prior_dates=False
)
return [GeonamesRelease(self.dag_id, pendulum.parse(release_date))]
def fetch_release_date(self, **kwargs):
"""Get the Geonames release for a given month and publishes the release_date as an XCom.
:param kwargs: the context passed from the BranchPythonOperator. See
https://airflow.apache.org/docs/stable/macros-ref.html
for a list of the keyword arguments that are passed to this argument.
:return: whether to keep executing the DAG.
"""
# Check if first Sunday of month
execution_date = kwargs["execution_date"]
run_date = first_sunday_of_month(execution_date)
logging.info(f"execution_date={execution_date}, run_date={run_date}")
# If first Sunday of month get current release date and push for processing
continue_dag = execution_date == run_date
if continue_dag:
# Fetch release date
release_date = fetch_release_date()
# Push messages
ti: TaskInstance = kwargs["ti"]
ti.xcom_push(GeonamesTelescope.RELEASE_INFO, release_date.format("YYYYMMDD"), execution_date)
return continue_dag
def download(self, releases: List[GeonamesRelease], **kwargs):
"""Task to download the GeonamesRelease release for a given month.
:param releases: the list of GeonamesRelease instances.
:return: None.
"""
# Download each release
for release in releases:
release.download()
def extract(self, releases: List[GeonamesRelease], **kwargs):
"""Task to extract the GeonamesRelease release for a given month.
:param release: GeonamesRelease.
:return: None.
"""
for release in releases:
release.extract()
def transform(self, releases: List[GeonamesRelease], **kwargs):
"""Task to transform the GeonamesRelease release for a given month.
:param releases: the list of GeonamesRelease instances.
:return: None.
"""
for release in releases:
release.transform()
|
{"/academic_observatory_workflows/workflows/ror_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_geonames_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/geonames_telescope.py"], "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/tests/test_clearbit.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_oa_web_workflow.py": ["/academic_observatory_workflows/workflows/oa_web_workflow.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/tests/test_zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_open_citations_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/open_citations_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_openalex_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/openalex_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py"], "/academic_observatory_workflows/workflows/pubmed_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_web_of_science_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_events_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_events_telescope.py"], "/academic_observatory_workflows/workflows/web_of_science_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_github.py": ["/academic_observatory_workflows/github.py"], "/academic_observatory_workflows/workflows/tests/test_pubmed_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/pubmed_telescope.py"], "/academic_observatory_workflows/workflows/geonames_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/open_citations_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_doi_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/model.py", "/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/tests/test_wikipedia.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/wikipedia.py"], "/docs/test_generate_csv.py": ["/docs/generate_schema_csv.py"], "/academic_observatory_workflows/workflows/scopus_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/doi_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py"], "/academic_observatory_workflows/workflows/crossref_events_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/oa_web_workflow.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/github.py", "/academic_observatory_workflows/wikipedia.py", "/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_ror_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/ror_telescope.py"], "/academic_observatory_workflows/workflows/openalex_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_zenodo.py": ["/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/model.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_scopus_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/scopus_telescope.py"], "/academic_observatory_workflows/dags/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_telescope.py"], "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_elastic_import_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/dags/elastic_import_workflow.py"], "/academic_observatory_workflows/dags/doi_workflow.py": ["/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/workflows/orcid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/dags/mag_telescope.py": ["/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/workflows/grid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_mag_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/dags/web_of_science_telescope.py": ["/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/dags/elastic_import_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_grid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/grid_telescope.py"], "/academic_observatory_workflows/workflows/mag_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_orcid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/orcid_telescope.py"]}
|
28,260,401
|
The-Academic-Observatory/academic-observatory-workflows
|
refs/heads/main
|
/academic_observatory_workflows/workflows/tests/test_oa_web_workflow.py
|
# Copyright 2021 Curtin University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Author: James Diprose, Aniek Roelofs
import json
import os
from typing import List
from unittest import TestCase
from unittest.mock import patch
import httpretty
import jsonlines
import nltk
import pandas as pd
import pendulum
import vcr
from airflow.exceptions import AirflowException
from airflow.models.connection import Connection
from airflow.models.variable import Variable
from airflow.utils.state import State
from click.testing import CliRunner
import academic_observatory_workflows.workflows.oa_web_workflow
from academic_observatory_workflows.config import schema_folder, test_fixtures_folder
from academic_observatory_workflows.workflows.oa_web_workflow import (
Description,
OaWebRelease,
OaWebWorkflow,
calc_oa_stats,
clean_ror_id,
clean_url,
get_institution_logo,
get_wiki_descriptions,
make_logo_url,
remove_text_between_brackets,
shorten_text_full_sentences,
split_largest_remainder,
val_empty,
trigger_repository_dispatch,
)
from observatory.platform.utils.file_utils import load_jsonl
from observatory.platform.utils.test_utils import (
ObservatoryEnvironment,
ObservatoryTestCase,
Table,
bq_load_tables,
make_dummy_dag,
module_file_path,
)
academic_observatory_workflows.workflows.oa_web_workflow.INCLUSION_THRESHOLD = 0
class TestFunctions(TestCase):
def test_val_empty(self):
# Empty list
self.assertTrue(val_empty([]))
# Non empty list
self.assertFalse(val_empty([1, 2, 3]))
# None
self.assertTrue(val_empty(None))
# Empty string
self.assertTrue(val_empty(""))
# Non Empty string
self.assertFalse(val_empty("hello"))
def test_clean_ror_id(self):
actual = clean_ror_id("https://ror.org/02n415q13")
expected = "02n415q13"
self.assertEqual(actual, expected)
def test_split_largest_remainder(self):
# Check that if ratios do not sum to 1 an AssertionError is raised
with self.assertRaises(AssertionError):
sample_size = 100
ratios = [0.1, 0.2, 0.4, 100]
split_largest_remainder(sample_size, *ratios)
# Test that correct absolute values are returned
sample_size = 10
ratios = [0.11, 0.21, 0.68]
results = split_largest_remainder(sample_size, *ratios)
self.assertEqual((1, 2, 7), results)
def test_clean_url(self):
url = "https://www.auckland.ac.nz/en.html"
expected = "https://www.auckland.ac.nz/"
actual = clean_url(url)
self.assertEqual(expected, actual)
def test_make_logo_url(self):
expected = "/logos/country/s/1234.jpg"
actual = make_logo_url(category="country", entity_id="1234", size="s", fmt="jpg")
self.assertEqual(expected, actual)
def test_calc_oa_stats(self):
n_outputs = 100
n_outputs_open = 33
n_outputs_publisher_open = 24
n_outputs_other_platform_open = 22
n_outputs_other_platform_open_only = 9
n_outputs_publisher_open_only, n_outputs_both, n_outputs_closed = calc_oa_stats(
n_outputs,
n_outputs_open,
n_outputs_publisher_open,
n_outputs_other_platform_open,
n_outputs_other_platform_open_only,
)
self.assertEqual(11, n_outputs_publisher_open_only)
self.assertEqual(13, n_outputs_both)
self.assertEqual(67, n_outputs_closed)
total = n_outputs_publisher_open_only + n_outputs_both + n_outputs_other_platform_open_only + n_outputs_closed
self.assertEqual(100, total)
@patch("academic_observatory_workflows.workflows.oa_web_workflow.requests.post")
def test_trigger_repository_dispatch(self, mock_requests_post):
trigger_repository_dispatch(token="my-token", event_type="my-event-type")
mock_requests_post.called_once()
@patch("academic_observatory_workflows.workflows.oa_web_workflow.make_logo_url")
def test_get_institution_logo(self, mock_make_url):
mock_make_url.return_value = "logo_path"
mock_clearbit_ref = "academic_observatory_workflows.workflows.oa_web_workflow.clearbit_download_logo"
def download_logo(company_url, file_path, size, fmt):
if not os.path.isdir(os.path.dirname(file_path)):
os.makedirs(os.path.dirname(file_path))
with open(file_path, "w") as f:
f.write("foo")
ror_id, url, size, width, fmt, build_path = "ror_id", "url.com", "size", 10, "fmt", "build_path"
with CliRunner().isolated_filesystem():
# Test when logo file does not exist yet and logo download fails
with patch(mock_clearbit_ref) as mock_clearbit_download:
actual_ror_id, actual_logo_path = get_institution_logo(ror_id, url, size, width, fmt, build_path)
self.assertEqual(ror_id, actual_ror_id)
self.assertEqual("/unknown.svg", actual_logo_path)
mock_clearbit_download.assert_called_once_with(
company_url=url, file_path="build_path/logos/institution/size/ror_id.fmt", size=width, fmt=fmt
)
mock_make_url.assert_not_called()
mock_make_url.reset_mock()
# Test when logo file does not exist yet and logo is downloaded successfully
with patch(mock_clearbit_ref, wraps=download_logo) as mock_clearbit_download:
actual_ror_id, actual_logo_path = get_institution_logo(ror_id, url, size, width, fmt, build_path)
self.assertEqual(ror_id, actual_ror_id)
self.assertEqual("logo_path", actual_logo_path)
mock_clearbit_download.assert_called_once_with(
company_url=url, file_path="build_path/logos/institution/size/ror_id.fmt", size=width, fmt=fmt
)
mock_make_url.assert_called_once_with(category="institution", entity_id=ror_id, size=size, fmt=fmt)
mock_make_url.reset_mock()
# Test when logo file already exists
with patch(mock_clearbit_ref, wraps=download_logo) as mock_clearbit_download:
actual_ror_id, actual_logo_path = get_institution_logo(ror_id, url, size, width, fmt, build_path)
self.assertEqual(ror_id, actual_ror_id)
self.assertEqual("logo_path", actual_logo_path)
mock_clearbit_download.assert_not_called()
mock_make_url.assert_called_once_with(category="institution", entity_id=ror_id, size=size, fmt=fmt)
def test_remove_text_between_brackets(self):
text_input = (
"Sem Gordius (Nobis: Gestarum) at ea debile quantum si dis subordinatas Civiuni Magna. Ut "
"oratione ut est enim subsolanea—aut Quasi Nemine (Ac (Hac)-y-Enim) hac dis Facer Eventu (Se Necessaria)—mus quod 400 srripta firmare, annuebat p illum quas te 068,721 verbum displicere (803,200 ea in). Cum Memento si lorem 9,200 dispositae (7,200 ut) eget te Ridiculus magnae leo Arduas Nec sed 4,800 rationibus (900 ut) louor in vel integer te Nec Evidenter, Illa, eum Porro. Sem euismod'a crimen praevenire nec neque diabolum saepe, iniunctum vel Cadentes Modi, quo modo si intendis licuit sem vindices laesionem. Quo Quantum'v hitmari sint id Malrimonii, rem sit odio nascetur iste at Sociosqu."
)
text_output = remove_text_between_brackets(text_input)
text_expected = "Sem Gordius at ea debile quantum si dis subordinatas Civiuni Magna. Ut oratione ut est enim subsolanea—aut Quasi Nemine hac dis Facer Eventu—mus quod 400 srripta firmare, annuebat p illum quas te 068,721 verbum displicere. Cum Memento si lorem 9,200 dispositae eget te Ridiculus magnae leo Arduas Nec sed 4,800 rationibus louor in vel integer te Nec Evidenter, Illa, eum Porro. Sem euismod'a crimen praevenire nec neque diabolum saepe, iniunctum vel Cadentes Modi, quo modo si intendis licuit sem vindices laesionem. Quo Quantum'v hitmari sint id Malrimonii, rem sit odio nascetur iste at Sociosqu."
self.assertEqual(text_expected, text_output)
def test_shorten_text_full_sentences(self):
nltk.download("punkt")
text_input = "Sem Gordius at ea debile quantum si dis subordinatas Civiuni Magna. Ut oratione ut est enim subsolanea—aut Quasi Nemine hac dis Facer Eventu—mus quod 400 srripta firmare, annuebat p illum quas te 068,721 verbum displicere. Cum Memento si lorem 9,200 dispositae eget te Ridiculus magnae leo Arduas Nec sed 4,800 rationibus louor in vel integer te Nec Evidenter, Illa, eum Porro. Sem euismod'a crimen praevenire nec neque diabolum saepe, iniunctum vel Cadentes Modi, quo modo si intendis licuit sem vindices laesionem. Quo Quantum'v hitmari sint id Malrimonii, rem sit odio nascetur iste at Sociosqu."
text_output = shorten_text_full_sentences(text_input, char_limit=300)
text_expected = "Sem Gordius at ea debile quantum si dis subordinatas Civiuni Magna. Ut oratione ut est enim subsolanea—aut Quasi Nemine hac dis Facer Eventu—mus quod 400 srripta firmare, annuebat p illum quas te 068,721 verbum displicere."
self.assertEqual(text_expected, text_output)
text_input = 'Non Divini te Litigiorum sem Cruciatus Potentiores ut v equestrem mi dui Totius in Modeste futuri hic M.V. Centimanos mi Sensus. Sed Poenam Coepit Leo EA 009–08, Minimum 582, dantis dis leo consultationis si EROS: "Sem Subiungam, hominem est Nobili in Dignitatis non Habitasse Abdicatione, animi fortiaue nisi dui Necessitas privatis scientiam perditionis si vigilantia mus dignissim frefquentia veritatem eius secundam, caesarianis, promotionibus, rem laboriosam ulterioribus alliciebat discursus ex dui Imperiosus."'
text_output = shorten_text_full_sentences(text_input, char_limit=300)
text_expected = "Non Divini te Litigiorum sem Cruciatus Potentiores ut v equestrem mi dui Totius in Modeste futuri hic M.V. Centimanos mi Sensus."
self.assertEqual(text_expected, text_output)
def test_get_wiki_description(self):
country = {
"uri": "https://en.wikipedia.org/w/api.php?action=query&format=json&prop=extracts&"
"titles=Panama%7CZambia%7CMalta%7CMali%7CAzerbaijan%7CSenegal%7CBotswana%7CEl_Salvador%7C"
"North_Macedonia%7CGuatemala%7CUzbekistan%7CMontenegro%7CSaint_Kitts_and_Nevis%7CBahrain%7C"
"Syria%7CYemen%7CMongolia%7CGrenada%7CAlbania%7CR%C3%A9union&redirects=1&exintro=1&explaintext=1",
"response_file_path": test_fixtures_folder("oa_web_workflow", "country_wiki_response.json"),
"descriptions_file_path": test_fixtures_folder("oa_web_workflow", "country_wiki_descriptions.json"),
}
institution = {
"uri": "https://en.wikipedia.org/w/api.php?action=query&format=json&prop=extracts&"
"titles=Pontifical_Catholic_University_of_Peru%7CSt._John%27s_University_%28New_York_City%29%7C"
"St_George%27s_Hospital%7CCalifornia_Polytechnic_State_University%7CUniversity_of_Bath%7C"
"Indian_Institute_of_Technology_Gandhinagar%7CMichigan_Technological_University%7C"
"University_of_Guam%7CUniversity_of_Maragheh%7CUniversity_of_Detroit_Mercy%7C"
"Bath_Spa_University%7CCollege_of_Charleston%7CUniversidade_Federal_de_Goi%C3%A1s%7C"
"University_of_Almer%C3%ADa%7CNational_University_of_Computer_and_Emerging_Sciences%7C"
"Sefako_Makgatho_Health_Sciences_University%7CKuwait_Institute_for_Scientific_Research%7C"
"Chinese_Academy_of_Tropical_Agricultural_Sciences%7CUniversidade_Federal_do_Pampa%7C"
"Nationwide_Children%27s_Hospital&redirects=1&exintro=1&explaintext=1",
"response_file_path": test_fixtures_folder("oa_web_workflow", "institution_wiki_response.json"),
"descriptions_file_path": test_fixtures_folder("oa_web_workflow", "institution_wiki_descriptions.json"),
}
for entity in [country, institution]:
# Download required nltk resource
nltk.download("punkt")
# Set up titles arg and expected descriptions
with open(entity["descriptions_file_path"], "r") as f:
descriptions_info = json.load(f)
titles = {}
descriptions = []
for item in descriptions_info:
id, title, description = item
titles[title] = id
descriptions.append((id, description))
with httpretty.enabled():
# Set up mocked successful response
with open(entity["response_file_path"], "rb") as f:
body = f.read()
httpretty.register_uri(httpretty.GET, entity["uri"], body=body)
# Get wiki descriptions
actual_descriptions = get_wiki_descriptions(titles)
actual_descriptions.sort(key=lambda x: x[0])
self.assertListEqual(descriptions, actual_descriptions)
with httpretty.enabled():
# Set up mocked failed response
httpretty.register_uri(httpretty.GET, entity["uri"], status=400)
with self.assertRaises(AirflowException):
# Get wiki descriptions
get_wiki_descriptions(titles)
class TestOaWebRelease(TestCase):
maxDiff = None
def setUp(self) -> None:
dt_fmt = "YYYY-MM-DD"
self.release = OaWebRelease(
dag_id="dag", project_id="project", release_date=pendulum.now(), data_bucket_name="data-bucket-name"
)
self.countries = [
{
"alpha2": "NZ",
"id": "NZL",
"name": "New Zealand",
"year": 2020,
"date": pendulum.date(2020, 12, 31).format(dt_fmt),
"url": None,
"wikipedia_url": "https://en.wikipedia.org/wiki/New_Zealand",
"country": None,
"subregion": "Australia and New Zealand",
"region": "Oceania",
"institution_types": None,
"n_citations": 121,
"n_outputs": 100,
"n_outputs_open": 48,
"n_outputs_publisher_open": 37,
# "n_outputs_publisher_open_only": 11,
# "n_outputs_both": 26,
"n_outputs_other_platform_open": 37,
"n_outputs_other_platform_open_only": 11,
# "n_outputs_closed": 52,
"n_outputs_oa_journal": 19,
"n_outputs_hybrid": 10,
"n_outputs_no_guarantees": 8,
"identifiers": None,
},
{
"alpha2": "NZ",
"id": "NZL",
"name": "New Zealand",
"year": 2021,
"date": pendulum.date(2021, 12, 31).format(dt_fmt),
"url": None,
"wikipedia_url": "https://en.wikipedia.org/wiki/New_Zealand",
"country": None,
"subregion": "Australia and New Zealand",
"region": "Oceania",
"institution_types": None,
"n_citations": 233,
"n_outputs": 100,
"n_outputs_open": 45,
"n_outputs_publisher_open": 37,
# "n_outputs_publisher_open_only": 14,
# "n_outputs_both": 24, 23?
"n_outputs_other_platform_open": 31,
"n_outputs_other_platform_open_only": 8,
# "n_outputs_closed": 55,
"n_outputs_oa_journal": 20,
"n_outputs_hybrid": 9,
"n_outputs_no_guarantees": 8,
"identifiers": None,
},
]
self.institutions = [
{
"alpha2": None,
"id": "https://ror.org/02n415q13",
"name": "Curtin University",
"year": 2020,
"date": pendulum.date(2020, 12, 31).format(dt_fmt),
"url": "https://curtin.edu.au/",
"wikipedia_url": "https://en.wikipedia.org/wiki/Curtin_University",
"country": "Australia",
"subregion": "Australia and New Zealand",
"region": "Oceania",
"institution_types": ["Education"],
"n_citations": 121,
"n_outputs": 100,
"n_outputs_open": 48,
"n_outputs_publisher_open": 37,
# "n_outputs_publisher_open_only": 11,
# "n_outputs_both": 26,
"n_outputs_other_platform_open": 37,
"n_outputs_other_platform_open_only": 11,
# "n_outputs_closed": 52,
"n_outputs_oa_journal": 19,
"n_outputs_hybrid": 10,
"n_outputs_no_guarantees": 8,
"identifiers": {
"ISNI": {"all": ["0000 0004 0375 4078"]},
"OrgRef": {"all": ["370725"]},
"Wikidata": {"all": ["Q1145497"]},
"GRID": {"preferred": "grid.1032.0"},
"FundRef": {"all": ["501100001797"]},
},
},
{
"alpha2": None,
"id": "https://ror.org/02n415q13",
"name": "Curtin University",
"year": 2021,
"date": pendulum.date(2021, 12, 31).format(dt_fmt),
"url": "https://curtin.edu.au/",
"wikipedia_url": "https://en.wikipedia.org/wiki/Curtin_University",
"country": "Australia",
"subregion": "Australia and New Zealand",
"region": "Oceania",
"institution_types": ["Education"],
"n_citations": 233,
"n_outputs": 100,
"n_outputs_open": 45,
"n_outputs_publisher_open": 37,
# "n_outputs_publisher_open_only": 14,
# "n_outputs_both": 24, 23?
"n_outputs_other_platform_open": 31,
"n_outputs_other_platform_open_only": 8,
# "n_outputs_closed": 55,
"n_outputs_oa_journal": 20,
"n_outputs_hybrid": 9,
"n_outputs_no_guarantees": 8,
"identifiers": {
"ISNI": {"all": ["0000 0004 0375 4078"]},
"OrgRef": {"all": ["370725"]},
"Wikidata": {"all": ["Q1145497"]},
"GRID": {"preferred": "grid.1032.0"},
"FundRef": {"all": ["501100001797"]},
},
},
{
"alpha2": None,
"id": "https://ror.org/12345",
"name": "Foo University",
"year": 2020,
"date": pendulum.date(2020, 12, 31).format(dt_fmt),
"url": None,
"wikipedia_url": None,
"country": "Australia",
"subregion": "Australia and New Zealand",
"region": "Oceania",
"institution_types": ["Education"],
"n_citations": 121,
"n_outputs": 100,
"n_outputs_open": 48,
"n_outputs_publisher_open": 37,
# "n_outputs_publisher_open_only": 11,
# "n_outputs_both": 26,
"n_outputs_other_platform_open": 37,
"n_outputs_other_platform_open_only": 11,
# "n_outputs_closed": 52,
"n_outputs_oa_journal": 19,
"n_outputs_hybrid": 10,
"n_outputs_no_guarantees": 8,
"identifiers": {
"ISNI": {"all": ["0000 0004 0375 4078"]},
"OrgRef": {"all": ["370725"]},
"Wikidata": {"all": ["Q1145497"]},
"GRID": {"preferred": "grid.1032.0"},
"FundRef": {"all": ["501100001797"]},
},
},
]
self.entities = [
("country", self.countries, ["NZL"]),
("institution", self.institutions, ["02n415q13"]),
]
def save_mock_data(self, category, test_data):
path = os.path.join(self.release.download_folder, f"{category}.jsonl")
with jsonlines.open(path, mode="w") as writer:
writer.write_all(test_data)
df = pd.DataFrame(test_data)
return df
@patch("academic_observatory_workflows.workflows.oa_web_workflow.Variable.get")
def test_load_data(self, mock_var_get):
category = "country"
with CliRunner().isolated_filesystem() as t:
mock_var_get.return_value = t
# Save CSV
df = self.save_mock_data(category, self.countries)
# Load csv
actual_df = self.release.load_data(category)
# Compare
expected_countries = df.to_dict("records")
actual_countries = actual_df.to_dict("records")
self.assertEqual(expected_countries, actual_countries)
def test_update_df_with_percentages(self):
keys = [("hello", "n_outputs"), ("world", "n_outputs")]
df = pd.DataFrame([{"n_hello": 20, "n_world": 50, "n_outputs": 100}])
self.release.update_df_with_percentages(df, keys)
expected = {"n_hello": 20, "n_world": 50, "n_outputs": 100, "p_hello": 20, "p_world": 50}
actual = df.to_dict(orient="records")[0]
self.assertEqual(expected, actual)
@patch("academic_observatory_workflows.workflows.oa_web_workflow.Variable.get")
def test_make_index(self, mock_var_get):
with CliRunner().isolated_filesystem() as t:
mock_var_get.return_value = t
# Country
category = "country"
df = pd.DataFrame(self.countries)
df = self.release.preprocess_df(category, df)
df_country_index = self.release.make_index(category, df)
expected = [
{
"alpha2": "NZ",
"category": "country",
"id": "NZL",
"name": "New Zealand",
"wikipedia_url": "https://en.wikipedia.org/wiki/New_Zealand",
"subregion": "Australia and New Zealand",
"region": "Oceania",
"n_citations": 354,
"n_outputs": 200,
"n_outputs_open": 93,
"n_outputs_publisher_open": 74,
"n_outputs_publisher_open_only": 25,
"n_outputs_both": 49,
"n_outputs_other_platform_open": 68,
"n_outputs_other_platform_open_only": 19,
"n_outputs_closed": 107,
"n_outputs_oa_journal": 39,
"n_outputs_hybrid": 19,
"n_outputs_no_guarantees": 16,
"p_outputs_open": 46.5,
"p_outputs_publisher_open": 37.0,
"p_outputs_publisher_open_only": 13.0,
"p_outputs_both": 25.0,
"p_outputs_other_platform_open": 34.0,
"p_outputs_other_platform_open_only": 9.0,
"p_outputs_closed": 53.0,
"p_outputs_oa_journal": 53.0,
"p_outputs_hybrid": 26.0,
"p_outputs_no_guarantees": 21.0,
}
]
print("Checking country records:")
actual = df_country_index.to_dict("records")
for e, a in zip(expected, actual):
self.assertDictEqual(e, a)
# Institution
category = "institution"
df = pd.DataFrame(self.institutions)
df = self.release.preprocess_df(category, df)
df_institution_index = self.release.make_index(category, df)
expected = [
{
"category": "institution",
"id": "02n415q13",
"name": "Curtin University",
"url": "https://curtin.edu.au/",
"wikipedia_url": "https://en.wikipedia.org/wiki/Curtin_University",
"country": "Australia",
"subregion": "Australia and New Zealand",
"region": "Oceania",
"institution_types": ["Education"],
"n_citations": 354,
"n_outputs": 200,
"n_outputs_open": 93,
"n_outputs_publisher_open": 74,
"n_outputs_publisher_open_only": 25,
"n_outputs_both": 49,
"n_outputs_other_platform_open": 68,
"n_outputs_other_platform_open_only": 19,
"n_outputs_closed": 107,
"n_outputs_oa_journal": 39,
"n_outputs_hybrid": 19,
"n_outputs_no_guarantees": 16,
"p_outputs_open": 46.5,
"p_outputs_publisher_open": 37.0,
"p_outputs_publisher_open_only": 13.0,
"p_outputs_both": 25.0,
"p_outputs_other_platform_open": 34.0,
"p_outputs_other_platform_open_only": 9.0,
"p_outputs_closed": 53.0,
"p_outputs_oa_journal": 53.0,
"p_outputs_hybrid": 26.0,
"p_outputs_no_guarantees": 21.0,
"identifiers": [
{"type": "ROR", "id": "02n415q13", "url": "https://ror.org/02n415q13"},
{
"type": "ISNI",
"id": "0000 0004 0375 4078",
"url": "https://isni.org/isni/0000 0004 0375 4078",
},
{"type": "Wikidata", "id": "Q1145497", "url": "https://www.wikidata.org/wiki/Q1145497"},
{"type": "GRID", "id": "grid.1032.0", "url": "https://grid.ac/institutes/grid.1032.0"},
{
"type": "FundRef",
"id": "501100001797",
"url": "https://api.crossref.org/funders/501100001797",
},
],
}
]
print("Checking institution records:")
actual = df_institution_index.to_dict("records")
for e, a in zip(expected, actual):
self.assertDictEqual(e, a)
@patch("academic_observatory_workflows.workflows.oa_web_workflow.Variable.get")
def test_update_index_with_logos(self, mock_var_get):
with CliRunner().isolated_filesystem() as t:
mock_var_get.return_value = t
sizes = ["l", "s"]
# Country table
category = "country"
df = pd.DataFrame(self.countries)
df = self.release.preprocess_df(category, df)
df_index_table = self.release.make_index(category, df)
self.release.update_index_with_logos(category, df_index_table)
for i, row in df_index_table.iterrows():
for size in sizes:
# Check that logo key created
key = f"logo_{size}"
self.assertTrue(key in row)
# Check that correct logo path exists
item_id = row["id"]
expected_path = f"/logos/{category}/{size}/{item_id}.svg"
actual_path = row[key]
self.assertEqual(expected_path, actual_path)
# Institution table
category = "institution"
df = pd.DataFrame(self.institutions)
df = self.release.preprocess_df(category, df)
df_index_table = self.release.make_index(category, df)
with vcr.use_cassette(test_fixtures_folder("oa_web_workflow", "test_make_logos.yaml")):
self.release.update_index_with_logos(category, df_index_table)
curtin_row = df_index_table.loc["02n415q13"]
foo_row = df_index_table.loc["12345"]
for size in sizes:
# Check that logo was added to dataframe
key = f"logo_{size}"
self.assertTrue(key in curtin_row)
self.assertTrue(key in foo_row)
# Check that correct path created
item_id = curtin_row["id"]
expected_curtin_path = f"/logos/{category}/{size}/{item_id}.jpg"
expected_foo_path = f"/unknown.svg"
self.assertEqual(expected_curtin_path, curtin_row[key])
self.assertEqual(expected_foo_path, foo_row[key])
# Check that downloaded logo exists
full_path = os.path.join(self.release.build_path, expected_curtin_path[1:])
self.assertTrue(os.path.isfile(full_path))
@patch("academic_observatory_workflows.workflows.oa_web_workflow.Variable.get")
def test_save_index(self, mock_var_get):
with CliRunner().isolated_filesystem() as t:
mock_var_get.return_value = t
for category, data, entity_ids in self.entities:
df = pd.DataFrame(data)
df = self.release.preprocess_df(category, df)
country_index = self.release.make_index(category, df)
self.release.update_index_with_logos(category, country_index)
self.release.save_index(category, country_index)
path = os.path.join(self.release.build_path, "data", f"{category}.json")
self.assertTrue(os.path.isfile(path))
@patch("academic_observatory_workflows.workflows.oa_web_workflow.Variable.get")
def test_make_entities(self, mock_var_get):
with CliRunner().isolated_filesystem() as t:
mock_var_get.return_value = t
# Country
category = "country"
df = pd.DataFrame(self.countries)
df = self.release.preprocess_df(category, df)
df_index_table = self.release.make_index(category, df)
entities = self.release.make_entities(df_index_table, df)
expected = [
{
"id": "NZL",
"name": "New Zealand",
"category": category,
"description": {
"license": Description.license,
"text": None,
"url": "https://en.wikipedia.org/wiki/New_Zealand",
},
"wikipedia_url": "https://en.wikipedia.org/wiki/New_Zealand",
"subregion": "Australia and New Zealand",
"region": "Oceania",
"max_year": 2021,
"min_year": 2020,
"stats": {
"n_citations": 354,
"n_outputs": 200,
"n_outputs_open": 93,
"n_outputs_publisher_open": 74,
"n_outputs_publisher_open_only": 25,
"n_outputs_both": 49,
"n_outputs_other_platform_open": 68,
"n_outputs_other_platform_open_only": 19,
"n_outputs_closed": 107,
"n_outputs_oa_journal": 39,
"n_outputs_hybrid": 19,
"n_outputs_no_guarantees": 16,
"p_outputs_open": 46.5,
"p_outputs_publisher_open": 37.0,
"p_outputs_publisher_open_only": 13.0,
"p_outputs_both": 25.0,
"p_outputs_other_platform_open": 34.0,
"p_outputs_other_platform_open_only": 9.0,
"p_outputs_closed": 53.0,
"p_outputs_oa_journal": 53.0,
"p_outputs_hybrid": 26.0,
"p_outputs_no_guarantees": 21.0,
},
"timeseries": [
{
"year": 2020,
"date": "2020-12-31",
"stats": {
"n_citations": 121,
"n_outputs": 100,
"n_outputs_open": 48,
"n_outputs_publisher_open": 37,
"n_outputs_publisher_open_only": 11,
"n_outputs_both": 26,
"n_outputs_other_platform_open": 37,
"n_outputs_other_platform_open_only": 11,
"n_outputs_closed": 52,
"n_outputs_oa_journal": 19,
"n_outputs_hybrid": 10,
"n_outputs_no_guarantees": 8,
"p_outputs_open": 48.0,
"p_outputs_publisher_open": 37.0,
"p_outputs_publisher_open_only": 11.0,
"p_outputs_both": 26.0,
"p_outputs_other_platform_open": 37.0,
"p_outputs_other_platform_open_only": 11.0,
"p_outputs_closed": 52.0,
"p_outputs_oa_journal": 51.0,
"p_outputs_hybrid": 27.0,
"p_outputs_no_guarantees": 22.0,
},
},
{
"year": 2021,
"date": "2021-12-31",
"stats": {
"n_citations": 233,
"n_outputs": 100,
"n_outputs_open": 45,
"n_outputs_publisher_open": 37,
"n_outputs_publisher_open_only": 14,
"n_outputs_both": 23,
"n_outputs_other_platform_open": 31,
"n_outputs_other_platform_open_only": 8,
"n_outputs_closed": 55,
"n_outputs_oa_journal": 20,
"n_outputs_hybrid": 9,
"n_outputs_no_guarantees": 8,
"p_outputs_open": 45.0,
"p_outputs_publisher_open": 37.0,
"p_outputs_publisher_open_only": 14.0,
"p_outputs_both": 23.0,
"p_outputs_other_platform_open": 31.0,
"p_outputs_other_platform_open_only": 8.0,
"p_outputs_closed": 55.0,
"p_outputs_oa_journal": 54.0,
"p_outputs_hybrid": 24.0,
"p_outputs_no_guarantees": 22.0,
},
},
],
}
]
for a_entity, e_entity in zip(expected, entities):
self.assertDictEqual(a_entity, e_entity.to_dict())
# Institution
category = "institution"
df = pd.DataFrame(self.institutions)
df = self.release.preprocess_df(category, df)
df_index_table = self.release.make_index(category, df)
entities = self.release.make_entities(df_index_table, df)
expected = [
{
"id": "02n415q13",
"name": "Curtin University",
"country": "Australia",
"description": {
"license": Description.license,
"text": None,
"url": "https://en.wikipedia.org/wiki/Curtin_University",
},
"category": category,
"url": "https://curtin.edu.au/",
"wikipedia_url": "https://en.wikipedia.org/wiki/Curtin_University",
"subregion": "Australia and New Zealand",
"region": "Oceania",
"institution_types": ["Education"],
"max_year": 2021,
"min_year": 2020,
"identifiers": [
{"type": "ROR", "id": "02n415q13", "url": "https://ror.org/02n415q13"},
{"type": "ISNI", "id": "0000 0004 0375 4078", "url": "https://isni.org/isni/0000 0004 0375 4078"},
{"type": "Wikidata", "id": "Q1145497", "url": "https://www.wikidata.org/wiki/Q1145497"},
{"type": "GRID", "id": "grid.1032.0", "url": "https://grid.ac/institutes/grid.1032.0"},
{"type": "FundRef", "id": "501100001797", "url": "https://api.crossref.org/funders/501100001797"},
],
"stats": {
"n_citations": 354,
"n_outputs": 200,
"n_outputs_open": 93,
"n_outputs_publisher_open": 74,
"n_outputs_publisher_open_only": 25,
"n_outputs_both": 49,
"n_outputs_other_platform_open": 68,
"n_outputs_other_platform_open_only": 19,
"n_outputs_closed": 107,
"n_outputs_oa_journal": 39,
"n_outputs_hybrid": 19,
"n_outputs_no_guarantees": 16,
"p_outputs_open": 46.5,
"p_outputs_publisher_open": 37.0,
"p_outputs_publisher_open_only": 13.0,
"p_outputs_both": 25.0,
"p_outputs_other_platform_open": 34.0,
"p_outputs_other_platform_open_only": 9.0,
"p_outputs_closed": 53.0,
"p_outputs_oa_journal": 53.0,
"p_outputs_hybrid": 26.0,
"p_outputs_no_guarantees": 21.0,
},
"timeseries": [
{
"year": 2020,
"date": "2020-12-31",
"stats": {
"n_citations": 121,
"n_outputs": 100,
"n_outputs_open": 48,
"n_outputs_publisher_open": 37,
"n_outputs_publisher_open_only": 11,
"n_outputs_both": 26,
"n_outputs_other_platform_open": 37,
"n_outputs_other_platform_open_only": 11,
"n_outputs_closed": 52,
"n_outputs_oa_journal": 19,
"n_outputs_hybrid": 10,
"n_outputs_no_guarantees": 8,
"p_outputs_open": 48.0,
"p_outputs_publisher_open": 37.0,
"p_outputs_publisher_open_only": 11.0,
"p_outputs_both": 26.0,
"p_outputs_other_platform_open": 37.0,
"p_outputs_other_platform_open_only": 11.0,
"p_outputs_closed": 52.0,
"p_outputs_oa_journal": 51.0,
"p_outputs_hybrid": 27.0,
"p_outputs_no_guarantees": 22.0,
},
},
{
"year": 2021,
"date": "2021-12-31",
"stats": {
"n_citations": 233,
"n_outputs": 100,
"n_outputs_open": 45,
"n_outputs_publisher_open": 37,
"n_outputs_publisher_open_only": 14,
"n_outputs_both": 23,
"n_outputs_other_platform_open": 31,
"n_outputs_other_platform_open_only": 8,
"n_outputs_closed": 55,
"n_outputs_oa_journal": 20,
"n_outputs_hybrid": 9,
"n_outputs_no_guarantees": 8,
"p_outputs_open": 45.0,
"p_outputs_publisher_open": 37.0,
"p_outputs_publisher_open_only": 14.0,
"p_outputs_both": 23.0,
"p_outputs_other_platform_open": 31.0,
"p_outputs_other_platform_open_only": 8.0,
"p_outputs_closed": 55.0,
"p_outputs_oa_journal": 54.0,
"p_outputs_hybrid": 24.0,
"p_outputs_no_guarantees": 22.0,
},
},
],
}
]
for a_entity, e_entity in zip(expected, entities):
self.assertDictEqual(a_entity, e_entity.to_dict())
@patch("academic_observatory_workflows.workflows.oa_web_workflow.Variable.get")
def test_save_entities(self, mock_var_get):
with CliRunner().isolated_filesystem() as t:
mock_var_get.return_value = t
for category, data, entity_ids in self.entities:
# Read data
df = pd.DataFrame(data)
df = self.release.preprocess_df(category, df)
# Save entities
df_index_table = self.release.make_index(category, df)
entities = self.release.make_entities(df_index_table, df)
self.release.save_entities(category, entities)
# Check that entity json files are saved
for entity_id in entity_ids:
path = os.path.join(self.release.build_path, "data", category, f"{entity_id}.json")
print(f"Assert exists: {path}")
self.assertTrue(os.path.isfile(path))
def test_make_auto_complete(self):
category = "country"
expected = [
{"id": "NZL", "name": "New Zealand", "logo_s": "/logos/country/NZL.svg"},
{"id": "AUS", "name": "Australia", "logo_s": "/logos/country/AUS.svg"},
{"id": "USA", "name": "United States", "logo_s": "/logos/country/USA.svg"},
]
df = pd.DataFrame(expected)
records = self.release.make_auto_complete(df, category)
for e in expected:
e["category"] = category
self.assertEqual(expected, records)
@patch("academic_observatory_workflows.workflows.oa_web_workflow.Variable.get")
def test_save_autocomplete(self, mock_var_get):
with CliRunner().isolated_filesystem() as t:
mock_var_get.return_value = t
category = "country"
expected = [
{"id": "NZL", "name": "New Zealand", "logo_s": "/logos/country/NZL.svg"},
{"id": "AUS", "name": "Australia", "logo_s": "/logos/country/AUS.svg"},
{"id": "USA", "name": "United States", "logo_s": "/logos/country/USA.svg"},
]
df = pd.DataFrame(expected)
records = self.release.make_auto_complete(df, category)
self.release.save_autocomplete(records)
path = os.path.join(self.release.build_path, "data", "autocomplete.json")
self.assertTrue(os.path.isfile(path))
class TestOaWebWorkflow(ObservatoryTestCase):
def setUp(self) -> None:
"""TestOaWebWorkflow checks that the workflow functions correctly, i.e. outputs the correct files, but doesn't
check that the calculations are correct (data correctness is tested in TestOaWebRelease)."""
self.project_id = os.getenv("TEST_GCP_PROJECT_ID")
self.data_location = os.getenv("TEST_GCP_DATA_LOCATION")
self.oa_web_fixtures = "oa_web_workflow"
def test_dag_structure(self):
"""Test that the DAG has the correct structure.
:return: None
"""
env = ObservatoryEnvironment(enable_api=False)
with env.create():
dag = OaWebWorkflow().make_dag()
self.assert_dag_structure(
{
"doi_sensor": ["check_dependencies"],
"check_dependencies": ["query"],
"query": ["download"],
"download": ["transform"],
"transform": ["upload_dataset"],
"upload_dataset": ["repository_dispatch"],
"repository_dispatch": ["cleanup"],
"cleanup": [],
},
dag,
)
def test_dag_load(self):
"""Test that the DAG can be loaded from a DAG bag.
:return: None
"""
env = ObservatoryEnvironment(project_id=self.project_id, data_location=self.data_location, enable_api=False)
with env.create():
dag_file = os.path.join(module_file_path("academic_observatory_workflows.dags"), "oa_web_workflow.py")
self.assert_dag_load("oa_web_workflow", dag_file)
def setup_tables(
self, dataset_id_all: str, dataset_id_settings: str, bucket_name: str, release_date: pendulum.DateTime
):
ror = load_jsonl(test_fixtures_folder("doi", "ror.jsonl"))
country = load_jsonl(test_fixtures_folder(self.oa_web_fixtures, "country.jsonl"))
institution = load_jsonl(test_fixtures_folder(self.oa_web_fixtures, "institution.jsonl"))
settings_country = load_jsonl(test_fixtures_folder("doi", "country.jsonl"))
analysis_schema_path = schema_folder()
oa_web_schema_path = test_fixtures_folder(self.oa_web_fixtures, "schema")
with CliRunner().isolated_filesystem() as t:
tables = [
Table("ror", True, dataset_id_all, ror, "ror", analysis_schema_path),
Table("country", True, dataset_id_all, country, "country", oa_web_schema_path),
Table("institution", True, dataset_id_all, institution, "institution", oa_web_schema_path),
Table(
"country",
False,
dataset_id_settings,
settings_country,
"country",
analysis_schema_path,
),
]
bq_load_tables(
tables=tables, bucket_name=bucket_name, release_date=release_date, data_location=self.data_location
)
@patch("academic_observatory_workflows.workflows.oa_web_workflow.trigger_repository_dispatch")
def test_telescope(self, mock_trigger_repository_dispatch):
"""Test the telescope end to end.
:return: None.
"""
execution_date = pendulum.datetime(2021, 11, 13)
env = ObservatoryEnvironment(project_id=self.project_id, data_location=self.data_location, enable_api=False)
dataset_id = env.add_dataset("data")
dataset_id_settings = env.add_dataset("settings")
data_bucket = env.add_bucket()
github_token = "github-token"
with env.create() as t:
# Add data bucket variable
env.add_variable(Variable(key=OaWebWorkflow.DATA_BUCKET, val=data_bucket))
# Add Github token connection
env.add_connection(Connection(conn_id=OaWebWorkflow.GITHUB_TOKEN_CONN, uri=f"http://:{github_token}@"))
# Run fake DOI workflow
dag = make_dummy_dag("doi", execution_date)
with env.create_dag_run(dag, execution_date):
# Running all of a DAGs tasks sets the DAG to finished
ti = env.run_task("dummy_task")
self.assertEqual(State.SUCCESS, ti.state)
# Upload fake data to BigQuery
self.setup_tables(
dataset_id_all=dataset_id,
dataset_id_settings=dataset_id_settings,
bucket_name=env.download_bucket,
release_date=execution_date,
)
# Run workflow
workflow = OaWebWorkflow(
agg_dataset_id=dataset_id, ror_dataset_id=dataset_id, settings_dataset_id=dataset_id_settings
)
dag = workflow.make_dag()
with env.create_dag_run(dag, execution_date):
# DOI Sensor
ti = env.run_task("doi_sensor")
self.assertEqual(State.SUCCESS, ti.state)
# Check dependencies
ti = env.run_task(workflow.check_dependencies.__name__)
self.assertEqual(State.SUCCESS, ti.state)
# Run query
ti = env.run_task(workflow.query.__name__)
self.assertEqual(State.SUCCESS, ti.state)
# Download data
ti = env.run_task(workflow.download.__name__)
self.assertEqual(State.SUCCESS, ti.state)
base_folder = os.path.join(
t, "data", "telescopes", "download", "oa_web_workflow", "oa_web_workflow_2021_11_13"
)
expected_file_names = ["country.jsonl", "institution.jsonl"]
for file_name in expected_file_names:
path = os.path.join(base_folder, file_name)
self.assertTrue(os.path.isfile(path))
# Transform data
ti = env.run_task(workflow.transform.__name__)
self.assertEqual(State.SUCCESS, ti.state)
base_folder = os.path.join(
t, "data", "telescopes", "transform", "oa_web_workflow", "oa_web_workflow_2021_11_13"
)
build_folder = os.path.join(base_folder, "build")
expected_files = make_expected_build_files(build_folder)
print("Checking expected transformed files")
for file in expected_files:
print(f"\t{file}")
self.assertTrue(os.path.isfile(file))
# Check that zip file exists
latest_file = os.path.join(base_folder, "latest.zip")
print(f"\t{latest_file}")
self.assertTrue(os.path.isfile(latest_file))
# Upload data to bucket
ti = env.run_task(workflow.upload_dataset.__name__)
self.assertEqual(State.SUCCESS, ti.state)
blob_name = f"{workflow.version}/latest.zip"
self.assert_blob_exists(data_bucket, blob_name)
# Trigger repository dispatch
ti = env.run_task(workflow.repository_dispatch.__name__)
self.assertEqual(State.SUCCESS, ti.state)
mock_trigger_repository_dispatch.called_once_with(github_token, "data-update/develop")
mock_trigger_repository_dispatch.called_once_with(github_token, "data-update/staging")
mock_trigger_repository_dispatch.called_once_with(github_token, "data-update/production")
# Test that all telescope data deleted
download_folder, extract_folder, transform_folder = (
os.path.join(t, "data", "telescopes", "download", "oa_web_workflow", "oa_web_workflow_2021_11_13"),
os.path.join(t, "data", "telescopes", "extract", "oa_web_workflow", "oa_web_workflow_2021_11_13"),
os.path.join(t, "data", "telescopes", "transform", "oa_web_workflow", "oa_web_workflow_2021_11_13"),
)
env.run_task(workflow.cleanup.__name__)
self.assert_cleanup(download_folder, extract_folder, transform_folder)
def make_expected_build_files(base_path: str) -> List[str]:
countries = ["AUS", "NZL"]
institutions = ["03b94tp07", "02n415q13"] # Auckland, Curtin
categories = ["country"] * len(countries) + ["institution"] * len(institutions)
entity_ids = countries + institutions
expected = []
# Add base data files
data_path = os.path.join(base_path, "data")
file_names = [
"stats.json",
"autocomplete.json",
"autocomplete.parquet",
"country.json",
"country.parquet",
"institution.json",
"institution.parquet",
]
for file_name in file_names:
expected.append(os.path.join(data_path, file_name))
# Add country and institution specific data files
for category, entity_id in zip(categories, entity_ids):
path = os.path.join(data_path, category, f"{entity_id}.json")
expected.append(path)
# Add logos
for category, entity_id in zip(categories, entity_ids):
file_name = f"{entity_id}.svg"
if category == "institution":
file_name = f"{entity_id}.jpg"
for size in ["l", "s"]:
path = os.path.join(base_path, "logos", category, size, file_name)
expected.append(path)
return expected
|
{"/academic_observatory_workflows/workflows/ror_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_geonames_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/geonames_telescope.py"], "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/tests/test_clearbit.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_oa_web_workflow.py": ["/academic_observatory_workflows/workflows/oa_web_workflow.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/tests/test_zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_open_citations_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/open_citations_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_openalex_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/openalex_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py"], "/academic_observatory_workflows/workflows/pubmed_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_web_of_science_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_events_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_events_telescope.py"], "/academic_observatory_workflows/workflows/web_of_science_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_github.py": ["/academic_observatory_workflows/github.py"], "/academic_observatory_workflows/workflows/tests/test_pubmed_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/pubmed_telescope.py"], "/academic_observatory_workflows/workflows/geonames_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/open_citations_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_doi_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/model.py", "/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/tests/test_wikipedia.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/wikipedia.py"], "/docs/test_generate_csv.py": ["/docs/generate_schema_csv.py"], "/academic_observatory_workflows/workflows/scopus_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/doi_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py"], "/academic_observatory_workflows/workflows/crossref_events_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/oa_web_workflow.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/github.py", "/academic_observatory_workflows/wikipedia.py", "/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_ror_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/ror_telescope.py"], "/academic_observatory_workflows/workflows/openalex_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_zenodo.py": ["/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/model.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_scopus_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/scopus_telescope.py"], "/academic_observatory_workflows/dags/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_telescope.py"], "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_elastic_import_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/dags/elastic_import_workflow.py"], "/academic_observatory_workflows/dags/doi_workflow.py": ["/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/workflows/orcid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/dags/mag_telescope.py": ["/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/workflows/grid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_mag_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/dags/web_of_science_telescope.py": ["/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/dags/elastic_import_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_grid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/grid_telescope.py"], "/academic_observatory_workflows/workflows/mag_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_orcid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/orcid_telescope.py"]}
|
28,260,402
|
The-Academic-Observatory/academic-observatory-workflows
|
refs/heads/main
|
/academic_observatory_workflows/workflows/open_citations_telescope.py
|
# Copyright 2020 Curtin University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Author: James Diprose, Tuan Chien
import os
import zipfile
from typing import Dict, List
import pendulum
from academic_observatory_workflows.config import schema_folder as default_schema_folder
from airflow.models import Variable
from airflow.models.taskinstance import TaskInstance
from google.cloud import bigquery
from google.cloud.bigquery import SourceFormat
from observatory.platform.utils.airflow_utils import AirflowVars
from observatory.platform.utils.gc_utils import (
bigquery_sharded_table_id,
bigquery_table_exists,
)
from observatory.platform.utils.http_download import DownloadInfo, download_files
from observatory.platform.utils.url_utils import (
get_http_response_json,
get_observatory_http_header,
)
from observatory.platform.workflows.snapshot_telescope import (
SnapshotRelease,
SnapshotTelescope,
)
class OpenCitationsRelease(SnapshotRelease):
"""Open Citations COCI dataset release info."""
def __init__(
self,
dag_id: str,
release_date: pendulum.DateTime,
files: List[DownloadInfo],
):
"""Create a OpenCitationsRelease instance.
:param dag_id: the DAG id.
:param release_date: the date of the release.
:param files: List of files to download.
"""
super().__init__(dag_id, release_date)
self.files = files
def download(self):
"""Download the release."""
headers = get_observatory_http_header(package_name="academic_observatory_workflows")
download_files(download_list=self.files, headers=headers, prefix_dir=self.download_folder)
def extract(self):
"""Extract the release to the transform folder."""
for file in self.download_files:
with zipfile.ZipFile(file, "r") as zf:
zf.extractall(self.transform_folder)
# Need to rename files to make the schema finding mechanism work
for file in self.transform_files:
filename = os.path.basename(file)
dir = os.path.dirname(file)
new_name = os.path.join(dir, f"open_citations.{filename}")
os.rename(file, new_name)
class OpenCitationsTelescope(SnapshotTelescope):
"""A telescope that harvests the Open Citations COCI CSV dataset . http://opencitations.net/index/coci"""
DAG_ID = "open_citations"
VERSION_URL = "https://api.figshare.com/v2/articles/6741422/versions"
def __init__(
self,
dag_id: str = DAG_ID,
start_date: pendulum.DateTime = pendulum.datetime(2018, 7, 1),
schedule_interval: str = "@weekly",
dataset_id: str = DAG_ID,
schema_folder: str = default_schema_folder(),
queue: str = "remote_queue",
dataset_description: str = "The OpenCitations Indexes: http://opencitations.net/",
table_descriptions: Dict = None,
catchup: bool = False,
airflow_vars: List = None,
):
"""
:param dag_id: the id of the DAG.
:param start_date: the start date of the DAG.
:param schedule_interval: the schedule interval of the DAG.
:param dataset_id: the BigQuery dataset id.
:param schema_folder: the SQL schema path.
:param queue: Queue to run tasks on.
:param dataset_description: description for the BigQuery dataset.
:param table_descriptions: a dictionary with table ids and corresponding table descriptions.
:param catchup: whether to catchup the DAG or not.
:param airflow_vars: list of airflow variable keys, for each variable it is checked if it exists in airflow.
"""
load_bigquery_table_kwargs = {
"csv_field_delimiter": ",",
"csv_quote_character": '"',
"csv_skip_leading_rows": 1,
"csv_allow_quoted_newlines": True,
"write_disposition": bigquery.WriteDisposition.WRITE_APPEND,
"ignore_unknown_values": True
}
if table_descriptions is None:
table_descriptions = {dag_id: "The Open Citations COCI CSV table."}
if airflow_vars is None:
airflow_vars = [
AirflowVars.DATA_PATH,
AirflowVars.PROJECT_ID,
AirflowVars.DATA_LOCATION,
AirflowVars.DOWNLOAD_BUCKET,
AirflowVars.TRANSFORM_BUCKET,
]
super().__init__(
dag_id,
start_date,
schedule_interval,
dataset_id,
schema_folder,
queue=queue,
source_format=SourceFormat.CSV,
load_bigquery_table_kwargs=load_bigquery_table_kwargs,
dataset_description=dataset_description,
table_descriptions=table_descriptions,
catchup=catchup,
airflow_vars=airflow_vars,
)
self.add_setup_task(self.check_dependencies)
self.add_setup_task(self.get_release_info)
self.add_task(self.download)
self.add_task(self.upload_downloaded)
self.add_task(self.extract)
self.add_task(self.upload_transformed)
self.add_task(self.bq_load)
self.add_task(self.cleanup)
def _list_releases(
self,
*,
start_date: pendulum.DateTime,
end_date: pendulum.DateTime,
) -> List[Dict[str, str]]:
"""List available releases from figshare between the start and end date (inclusive).
:param start_date: Start date.
:param end_date: End date.
:return: List of dictionaries containing release info.
"""
versions = get_http_response_json(OpenCitationsTelescope.VERSION_URL)
releases = []
for version in versions:
article = get_http_response_json(version["url"])
release_date = pendulum.parse(article["created_date"])
if (start_date is None or start_date <= release_date) and (end_date is None or release_date <= end_date):
releases.append({"date": release_date.format("YYYYMMDD"), "files": article["files"]})
return releases
def _process_release(self, release: Dict[str, str]) -> bool:
"""Indicates whether we should process this release. If there are no files, or if the BigQuery table exists, we will not process this release.
:param release: Release to consider.
:return: Whether to process the release.
"""
if len(release["files"]) == 0:
return False
project_id = Variable.get(AirflowVars.PROJECT_ID)
table_id = bigquery_sharded_table_id(self.dag_id, pendulum.parse(release["date"]))
if bigquery_table_exists(project_id, self.dataset_id, table_id):
return False
return True
def get_release_info(self, **kwargs):
"""Calculate which releases require processing, and push the info to an XCom.
:param kwargs: the context passed from the BranchPythonOperator. See
https://airflow.apache.org/docs/stable/macros-ref.html
for a list of the keyword arguments that are passed to this argument.
:return: whether to keep executing the DAG.
"""
start_date = kwargs["execution_date"]
end_date = kwargs["next_execution_date"].subtract(microseconds=1)
releases = self._list_releases(start_date=start_date, end_date=end_date)
filtered_releases = list(filter(self._process_release, releases))
continue_dag = len(filtered_releases) > 0
if continue_dag:
ti = kwargs["ti"]
ti.xcom_push(OpenCitationsTelescope.RELEASE_INFO, filtered_releases, start_date)
return continue_dag
def make_release(self, **kwargs) -> List[OpenCitationsRelease]:
"""Make release instances. The release is passed as an argument to the function (TelescopeFunction) that is
called in 'task_callable'.
:param kwargs: the context passed from the PythonOperator. See
https://airflow.apache.org/docs/stable/macros-ref.html for a list of the keyword arguments that are
passed to this argument.
:return: a list of OpenCitationsRelease instances.
"""
ti: TaskInstance = kwargs["ti"]
releases_dict = ti.xcom_pull(
key=OpenCitationsTelescope.RELEASE_INFO, task_ids=self.get_release_info.__name__, include_prior_dates=False
)
releases = []
for rel_info in releases_dict:
files = []
for file in rel_info["files"]:
info = DownloadInfo(
url=file["download_url"], filename=file["name"], hash=file["computed_md5"], hash_algorithm="md5"
)
files.append(info)
release = OpenCitationsRelease(self.dag_id, release_date=pendulum.parse(rel_info["date"]), files=files)
releases.append(release)
return releases
|
{"/academic_observatory_workflows/workflows/ror_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_geonames_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/geonames_telescope.py"], "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/tests/test_clearbit.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_oa_web_workflow.py": ["/academic_observatory_workflows/workflows/oa_web_workflow.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/tests/test_zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_open_citations_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/open_citations_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_openalex_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/openalex_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py"], "/academic_observatory_workflows/workflows/pubmed_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_web_of_science_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_events_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_events_telescope.py"], "/academic_observatory_workflows/workflows/web_of_science_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_github.py": ["/academic_observatory_workflows/github.py"], "/academic_observatory_workflows/workflows/tests/test_pubmed_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/pubmed_telescope.py"], "/academic_observatory_workflows/workflows/geonames_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/open_citations_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_doi_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/model.py", "/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/tests/test_wikipedia.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/wikipedia.py"], "/docs/test_generate_csv.py": ["/docs/generate_schema_csv.py"], "/academic_observatory_workflows/workflows/scopus_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/doi_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py"], "/academic_observatory_workflows/workflows/crossref_events_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/oa_web_workflow.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/github.py", "/academic_observatory_workflows/wikipedia.py", "/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_ror_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/ror_telescope.py"], "/academic_observatory_workflows/workflows/openalex_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_zenodo.py": ["/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/model.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_scopus_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/scopus_telescope.py"], "/academic_observatory_workflows/dags/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_telescope.py"], "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_elastic_import_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/dags/elastic_import_workflow.py"], "/academic_observatory_workflows/dags/doi_workflow.py": ["/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/workflows/orcid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/dags/mag_telescope.py": ["/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/workflows/grid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_mag_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/dags/web_of_science_telescope.py": ["/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/dags/elastic_import_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_grid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/grid_telescope.py"], "/academic_observatory_workflows/workflows/mag_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_orcid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/orcid_telescope.py"]}
|
28,260,403
|
The-Academic-Observatory/academic-observatory-workflows
|
refs/heads/main
|
/academic_observatory_workflows/workflows/tests/test_crossref_events_telescope.py
|
# Copyright 2020 Curtin University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Author: Aniek Roelofs
import os
from datetime import timedelta
from unittest.mock import patch
import pendulum
import vcr
from academic_observatory_workflows.config import test_fixtures_folder
from academic_observatory_workflows.workflows.crossref_events_telescope import (
CrossrefEventsRelease,
CrossrefEventsTelescope,
parse_event_url,
transform_batch,
)
from airflow.exceptions import AirflowSkipException
from click.testing import CliRunner
from google.cloud import bigquery
from observatory.platform.utils.test_utils import (
ObservatoryEnvironment,
ObservatoryTestCase,
module_file_path,
)
from observatory.platform.utils.url_utils import get_user_agent
from observatory.platform.utils.workflow_utils import blob_name, create_date_table_id
class TestCrossrefEventsTelescope(ObservatoryTestCase):
"""Tests for the Crossref Events telescope"""
def __init__(self, *args, **kwargs):
"""Constructor which sets up variables used by tests.
:param args: arguments.
:param kwargs: keyword arguments.
"""
super(TestCrossrefEventsTelescope, self).__init__(*args, **kwargs)
self.project_id = os.getenv("TEST_GCP_PROJECT_ID")
self.data_location = os.getenv("TEST_GCP_DATA_LOCATION")
self.first_execution_date = pendulum.datetime(year=2018, month=5, day=14)
self.first_cassette = test_fixtures_folder("crossref_events", "crossref_events1.yaml")
self.second_execution_date = pendulum.datetime(year=2018, month=5, day=20)
self.second_cassette = test_fixtures_folder("crossref_events", "crossref_events2.yaml")
# additional tests setup
self.start_date = pendulum.datetime(2021, 5, 6)
self.end_date = pendulum.datetime(2021, 5, 12)
self.release = CrossrefEventsRelease(
CrossrefEventsTelescope.DAG_ID,
self.start_date,
self.end_date,
False,
"mailto",
max_threads=21,
max_processes=1,
)
def test_dag_structure(self):
"""Test that the Crossref Events DAG has the correct structure.
:return: None
"""
dag = CrossrefEventsTelescope().make_dag()
self.assert_dag_structure(
{
"check_dependencies": ["download"],
"download": ["upload_downloaded"],
"upload_downloaded": ["transform"],
"transform": ["upload_transformed"],
"upload_transformed": ["bq_load_partition"],
"bq_load_partition": ["bq_delete_old"],
"bq_delete_old": ["bq_append_new"],
"bq_append_new": ["cleanup"],
"cleanup": [],
},
dag,
)
def test_dag_load(self):
"""Test that the Crossref Events DAG can be loaded from a DAG bag.
:return: None
"""
with ObservatoryEnvironment().create():
dag_file = os.path.join(
module_file_path("academic_observatory_workflows.dags"), "crossref_events_telescope.py"
)
self.assert_dag_load("crossref_events", dag_file)
def test_telescope(self):
"""Test the Crossref Events telescope end to end.
:return: None.
"""
# Setup Observatory environment
env = ObservatoryEnvironment(self.project_id, self.data_location)
dataset_id = env.add_dataset()
# Setup Telescope
telescope = CrossrefEventsTelescope(dataset_id=dataset_id)
telescope.max_threads = 1
telescope.max_processes = 1
dag = telescope.make_dag()
# Create the Observatory environment and run tests
with env.create(task_logging=True):
# first run
with env.create_dag_run(dag, self.first_execution_date) as dag_run:
# Test that all dependencies are specified: no error should be thrown
env.run_task(telescope.check_dependencies.__name__)
start_date, end_date, first_release = telescope.get_release_info(
execution_date=self.first_execution_date,
dag=dag,
dag_run=dag_run,
next_execution_date=pendulum.datetime(2018, 5, 20),
)
self.assertEqual(start_date, dag.default_args["start_date"])
self.assertEqual(end_date, pendulum.today("UTC") - timedelta(days=1))
self.assertTrue(first_release)
# use release info for other tasks
release = CrossrefEventsRelease(
telescope.dag_id,
start_date,
end_date,
first_release,
telescope.mailto,
telescope.max_threads,
telescope.max_processes,
)
# Test download task
with vcr.use_cassette(self.first_cassette):
env.run_task(telescope.download.__name__)
self.assertEqual(6, len(release.download_files))
for file in release.download_files:
if "2018-05-14" in file:
download_hash = "9a18d1002a5395de3cbcd9c61fb28c83"
else:
download_hash = "ad9cf98aab232eee7edf12375f016770"
self.assert_file_integrity(file, download_hash, "md5")
# Test that files uploaded
env.run_task(telescope.upload_downloaded.__name__)
for file in release.download_files:
self.assert_blob_integrity(env.download_bucket, blob_name(file), file)
# Test that files transformed
env.run_task(telescope.transform.__name__)
self.assertEqual(6, len(release.transform_files))
for file in release.transform_files:
if "2018-05-14" in file:
transform_hash = "3e953d2424fe37739790bbc5c2410824"
else:
transform_hash = "d5e0a887656d1786a9e7c4dbdbf77ba1"
self.assert_file_integrity(file, transform_hash, "md5")
# Test that transformed files uploaded
env.run_task(telescope.upload_transformed.__name__)
for file in release.transform_files:
self.assert_blob_integrity(env.transform_bucket, blob_name(file), file)
# Test that load partition task is skipped for the first release
ti = env.run_task(telescope.bq_load_partition.__name__)
self.assertEqual(ti.state, "skipped")
# Test delete old task is skipped for the first release
with patch("observatory.platform.utils.gc_utils.bq_query_bytes_daily_limit_check"):
ti = env.run_task(telescope.bq_delete_old.__name__)
self.assertEqual(ti.state, "skipped")
# Test append new creates table
env.run_task(telescope.bq_append_new.__name__)
main_table_id, partition_table_id = release.dag_id, f"{release.dag_id}_partitions"
table_id = f"{self.project_id}.{telescope.dataset_id}.{main_table_id}"
expected_rows = 68
self.assert_table_integrity(table_id, expected_rows)
# Test that all telescope data deleted
download_folder, extract_folder, transform_folder = (
release.download_folder,
release.extract_folder,
release.transform_folder,
)
env.run_task(telescope.cleanup.__name__)
self.assert_cleanup(download_folder, extract_folder, transform_folder)
# second run
with env.create_dag_run(dag, self.second_execution_date) as dag_run:
# Test that all dependencies are specified: no error should be thrown
env.run_task(telescope.check_dependencies.__name__)
start_date, end_date, first_release = telescope.get_release_info(
execution_date=self.second_execution_date,
dag=dag,
dag_run=dag_run,
next_execution_date=pendulum.datetime(2018, 5, 27),
)
self.assertEqual(release.end_date + timedelta(days=1), start_date)
self.assertEqual(pendulum.today("UTC") - timedelta(days=1), end_date)
self.assertFalse(first_release)
# use release info for other tasks
release = CrossrefEventsRelease(
telescope.dag_id,
start_date,
end_date,
first_release,
telescope.mailto,
telescope.max_threads,
telescope.max_processes,
)
# Test download task
with vcr.use_cassette(self.second_cassette):
env.run_task(telescope.download.__name__)
self.assertEqual(20, len(release.download_files))
for file in release.download_files:
if "edited" in file:
download_hash = "b1c8c856c29365efeeef8a7c1ccba7da"
elif "deleted" in file:
download_hash = "8d52425faa9192e8748865b8c53c2b3d"
else:
download_hash = "01aa964587e6296df5697d13a122e8ce"
self.assert_file_integrity(file, download_hash, "md5")
# Test that file uploaded
env.run_task(telescope.upload_downloaded.__name__)
for file in release.download_files:
self.assert_blob_integrity(env.download_bucket, blob_name(file), file)
# Test that file transformed
env.run_task(telescope.transform.__name__)
self.assertEqual(20, len(release.transform_files))
for file in release.transform_files:
if "edited" in file:
transform_hash = "902437a731a4aed529f4e0d176d2222b"
elif "deleted" in file:
transform_hash = "10b6d1911aaaad14204d867884722da4"
else:
transform_hash = "513d71d356d8356d1365d1dd25b1f71a"
self.assert_file_integrity(file, transform_hash, "md5")
# Test that transformed file uploaded
env.run_task(telescope.upload_transformed.__name__)
for file in release.transform_files:
self.assert_blob_integrity(env.transform_bucket, blob_name(file), file)
# Test that load partition task creates partition
env.run_task(telescope.bq_load_partition.__name__)
main_table_id, partition_table_id = release.dag_id, f"{release.dag_id}_partitions"
table_id = create_date_table_id(partition_table_id, release.end_date, bigquery.TimePartitioningType.DAY)
table_id = f"{self.project_id}.{telescope.dataset_id}.{table_id}"
expected_rows = 82
self.assert_table_integrity(table_id, expected_rows)
# Test task deleted rows from main table
with patch("observatory.platform.utils.gc_utils.bq_query_bytes_daily_limit_check"):
env.run_task(telescope.bq_delete_old.__name__)
table_id = f"{self.project_id}.{telescope.dataset_id}.{main_table_id}"
expected_rows = 60
self.assert_table_integrity(table_id, expected_rows)
# Test append new adds rows to table
env.run_task(telescope.bq_append_new.__name__)
table_id = f"{self.project_id}.{telescope.dataset_id}.{main_table_id}"
expected_rows = 142
self.assert_table_integrity(table_id, expected_rows)
# Test that all telescope data deleted
download_folder, extract_folder, transform_folder = (
release.download_folder,
release.extract_folder,
release.transform_folder,
)
env.run_task(telescope.cleanup.__name__)
self.assert_cleanup(download_folder, extract_folder, transform_folder)
def test_urls(self):
"""Test the urls property of release
:return: None.
"""
events_url = (
"https://api.eventdata.crossref.org/v1/events?mailto={mail_to}"
"&from-collected-date={start_date}&until-collected-date={end_date}&rows=1000"
)
edited_url = (
"https://api.eventdata.crossref.org/v1/events/edited?"
"mailto={mail_to}&from-updated-date={start_date}"
"&until-updated-date={end_date}&rows=1000"
)
deleted_url = (
"https://api.eventdata.crossref.org/v1/events/deleted?"
"mailto={mail_to}&from-updated-date={start_date}"
"&until-updated-date={end_date}&rows=1000"
)
self.release.first_release = True
urls = self.release.urls
self.assertEqual(7, len(urls))
for url in urls:
event_type, date = parse_event_url(url)
self.assertEqual(event_type, "events")
expected_url = events_url.format(mail_to=self.release.mailto, start_date=date, end_date=date)
self.assertEqual(expected_url, url)
self.release.first_release = False
urls = self.release.urls
self.assertEqual(21, len(urls))
for url in urls:
event_type, date = parse_event_url(url)
if event_type == "events":
expected_url = events_url.format(mail_to=self.release.mailto, start_date=date, end_date=date)
elif event_type == "edited":
expected_url = edited_url.format(mail_to=self.release.mailto, start_date=date, end_date=date)
else:
expected_url = deleted_url.format(mail_to=self.release.mailto, start_date=date, end_date=date)
self.assertEqual(expected_url, url)
@patch.object(CrossrefEventsRelease, "download_batch")
@patch("observatory.platform.utils.workflow_utils.Variable.get")
def test_download(self, mock_variable_get, mock_download_batch):
"""Test the download method of the release in parallel mode
:return: None.
"""
mock_variable_get.return_value = "data"
with CliRunner().isolated_filesystem():
# Test download without any events returned
with self.assertRaises(AirflowSkipException):
self.release.download()
# Test download with events returned
mock_download_batch.reset_mock()
events_path = os.path.join(self.release.download_folder, "events.jsonl")
with open(events_path, "w") as f:
f.write("[{'test': 'test'}]\n")
self.release.download()
self.assertEqual(len(self.release.urls), mock_download_batch.call_count)
@patch("academic_observatory_workflows.workflows.crossref_events_telescope.download_events")
@patch("observatory.platform.utils.workflow_utils.Variable.get")
def test_download_batch(self, mock_variable_get, mock_download_events):
"""Test download_batch function
:return: None.
"""
mock_variable_get.return_value = os.path.join(os.getcwd(), "data")
self.release.first_release = True
batch_number = 0
url = self.release.urls[batch_number]
headers = {"User-Agent": get_user_agent(package_name="academic_observatory_workflows")}
with CliRunner().isolated_filesystem():
events_path = self.release.batch_path(url)
cursor_path = self.release.batch_path(url, cursor=True)
# Test with existing cursor path
with open(cursor_path, "w") as f:
f.write("cursor")
mock_download_events.return_value = (None, 10, 10)
self.release.download_batch(batch_number, url)
self.assertFalse(os.path.exists(cursor_path))
mock_download_events.assert_called_once_with(url, headers, events_path, cursor_path)
# Test with no existing previous files
mock_download_events.reset_mock()
mock_download_events.return_value = (None, 10, 10)
self.release.download_batch(batch_number, url)
mock_download_events.assert_called_once_with(url, headers, events_path, cursor_path)
# Test with events path and no cursor path, so previous successful attempt
mock_download_events.reset_mock()
with open(events_path, "w") as f:
f.write("events")
self.release.download_batch(batch_number, url)
mock_download_events.assert_not_called()
os.remove(events_path)
@patch("observatory.platform.utils.workflow_utils.Variable.get")
def test_transform_batch(self, mock_variable_get):
"""Test the transform_batch method of the release
:return: None.
"""
with CliRunner().isolated_filesystem() as t:
mock_variable_get.return_value = os.path.join(t, "data")
# Use release info so that we can download the right data
release = CrossrefEventsRelease(
"crossref_events",
pendulum.datetime(2018, 5, 14),
pendulum.datetime(2018, 5, 19),
True,
"aniek.roelofs@curtin.edu.au",
max_threads=1,
max_processes=1,
)
# Download files
with vcr.use_cassette(self.first_cassette):
release.download()
# Transform batch
for file_path in release.download_files:
transform_batch(file_path, release.transform_folder)
# Assert all transformed
self.assertEqual(len(release.download_files), len(release.transform_files))
|
{"/academic_observatory_workflows/workflows/ror_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_geonames_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/geonames_telescope.py"], "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/tests/test_clearbit.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_oa_web_workflow.py": ["/academic_observatory_workflows/workflows/oa_web_workflow.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/tests/test_zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_open_citations_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/open_citations_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_openalex_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/openalex_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py"], "/academic_observatory_workflows/workflows/pubmed_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_web_of_science_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_events_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_events_telescope.py"], "/academic_observatory_workflows/workflows/web_of_science_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_github.py": ["/academic_observatory_workflows/github.py"], "/academic_observatory_workflows/workflows/tests/test_pubmed_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/pubmed_telescope.py"], "/academic_observatory_workflows/workflows/geonames_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/open_citations_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_doi_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/model.py", "/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/tests/test_wikipedia.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/wikipedia.py"], "/docs/test_generate_csv.py": ["/docs/generate_schema_csv.py"], "/academic_observatory_workflows/workflows/scopus_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/doi_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py"], "/academic_observatory_workflows/workflows/crossref_events_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/oa_web_workflow.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/github.py", "/academic_observatory_workflows/wikipedia.py", "/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_ror_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/ror_telescope.py"], "/academic_observatory_workflows/workflows/openalex_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_zenodo.py": ["/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/model.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_scopus_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/scopus_telescope.py"], "/academic_observatory_workflows/dags/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_telescope.py"], "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_elastic_import_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/dags/elastic_import_workflow.py"], "/academic_observatory_workflows/dags/doi_workflow.py": ["/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/workflows/orcid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/dags/mag_telescope.py": ["/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/workflows/grid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_mag_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/dags/web_of_science_telescope.py": ["/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/dags/elastic_import_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_grid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/grid_telescope.py"], "/academic_observatory_workflows/workflows/mag_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_orcid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/orcid_telescope.py"]}
|
28,260,404
|
The-Academic-Observatory/academic-observatory-workflows
|
refs/heads/main
|
/academic_observatory_workflows/workflows/ror_telescope.py
|
# Copyright 2021 Curtin University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Author: Aniek Roelofs
from __future__ import annotations
import json
import logging
import os
import shutil
from typing import List, Dict
from zipfile import BadZipFile, ZipFile
import pendulum
import requests
from airflow.exceptions import AirflowException
from airflow.models.taskinstance import TaskInstance
from google.cloud.bigquery import SourceFormat
from observatory.platform.utils.airflow_utils import AirflowVars
from observatory.platform.utils.file_utils import list_to_jsonl_gz
from observatory.platform.utils.url_utils import (
retry_session,
)
from observatory.platform.workflows.snapshot_telescope import (
SnapshotRelease,
SnapshotTelescope,
)
from academic_observatory_workflows.config import schema_folder as default_schema_folder
class RorRelease(SnapshotRelease):
def __init__(self, dag_id: str, release_date: pendulum.DateTime, url: str):
"""Construct a RorRelease.
:param release_date: the release date.
:param url: The url to the ror snapshot
"""
download_files_regex = f"{dag_id}.zip"
extract_files_regex = r"^\d{4}-\d{2}-\d{2}-ror-data.json$"
transform_files_regex = f"{dag_id}.jsonl.gz"
super().__init__(dag_id, release_date, download_files_regex, extract_files_regex, transform_files_regex)
self.url = url
@property
def download_path(self) -> str:
"""Get the path to the downloaded file.
:return: the file path.
"""
return os.path.join(self.download_folder, f"{self.dag_id}.zip")
@property
def transform_path(self) -> str:
"""Get the path to the transformed file.
:return: the file path.
"""
return os.path.join(self.transform_folder, f"{self.dag_id}.jsonl.gz")
def download(self):
"""Downloads an individual ROR release from Zenodo.
:return: None.
"""
with requests.get(self.url, stream=True) as r:
with open(self.download_path, "wb") as f:
shutil.copyfileobj(r.raw, f)
logging.info(f"Downloaded file from {self.url} to: {self.download_path}")
def extract(self):
"""Extract a single ROR release to a given extraction path.
:return: None.
"""
logging.info(f"Extracting file: {self.download_path}")
try:
with ZipFile(self.download_path) as zip_file:
zip_file.extractall(self.extract_folder)
except BadZipFile:
raise AirflowException("Not a zip file")
logging.info(f"File extracted to: {self.extract_folder}")
def transform(self):
"""Transform an extracted ROR release.
The .json file is turned into json lines format and gzipped.
:return: None.
"""
extract_files = self.extract_files
# Check there is only one JSON file
if len(extract_files) == 1:
release_json_file = extract_files[0]
logging.info(f"Transforming file: {release_json_file}")
else:
raise AirflowException(f"{len(extract_files)} extracted files found: {extract_files}")
with open(release_json_file, "r") as f:
results = [record for record in json.load(f)]
list_to_jsonl_gz(self.transform_path, results)
class RorTelescope(SnapshotTelescope):
"""
The Research Organization Registry (ROR): https://ror.readme.io/
Saved to the BigQuery table: <project_id>.ror.rorYYYYMMDD
"""
DAG_ID = "ror"
DATASET_ID = "ror"
ROR_DATASET_URL = "https://zenodo.org/api/records/?communities=ror-data&sort=mostrecent"
def __init__(
self,
dag_id: str = DAG_ID,
start_date: pendulum.DateTime = pendulum.datetime(2021, 9, 1),
schedule_interval: str = "@weekly",
dataset_id: str = DATASET_ID,
schema_folder: str = default_schema_folder(),
load_bigquery_table_kwargs: Dict = None,
source_format: str = SourceFormat.NEWLINE_DELIMITED_JSON,
dataset_description: str = "",
catchup: bool = True,
airflow_vars: List = None,
):
"""Construct a RorTelescope instance.
:param dag_id: the id of the DAG.
:param start_date: the start date of the DAG.
:param schedule_interval: the schedule interval of the DAG.
:param dataset_id: the BigQuery dataset id.
:param schema_folder: the SQL schema path.
:param load_bigquery_table_kwargs: the customisation parameters for loading data into a BigQuery table.
:param source_format: the format of the data to load into BigQuery.
:param dataset_description: description for the BigQuery dataset.
:param catchup: whether to catchup the DAG or not.
:param airflow_vars: list of airflow variable keys, for each variable it is checked if it exists in airflow
"""
if airflow_vars is None:
airflow_vars = [
AirflowVars.DATA_PATH,
AirflowVars.PROJECT_ID,
AirflowVars.DATA_LOCATION,
AirflowVars.DOWNLOAD_BUCKET,
AirflowVars.TRANSFORM_BUCKET,
]
if load_bigquery_table_kwargs is None:
load_bigquery_table_kwargs = {"ignore_unknown_values": True}
super().__init__(
dag_id,
start_date,
schedule_interval,
dataset_id,
schema_folder,
source_format=source_format,
load_bigquery_table_kwargs=load_bigquery_table_kwargs,
dataset_description=dataset_description,
catchup=catchup,
airflow_vars=airflow_vars,
)
self.add_setup_task_chain([self.check_dependencies, self.list_releases])
self.add_task_chain(
[
self.download,
self.upload_downloaded,
self.extract,
self.transform,
self.upload_transformed,
self.bq_load,
self.cleanup,
]
)
def make_release(self, **kwargs) -> List[RorRelease]:
"""Make release instances. The release is passed as an argument to the function (TelescopeFunction) that is
called in 'task_callable'.
:param kwargs: the context passed from the PythonOperator. See
https://airflow.apache.org/docs/stable/macros-ref.html for a list of the keyword arguments that are
passed to this argument.
:return: A list of ROR release instances
"""
ti: TaskInstance = kwargs["ti"]
records = ti.xcom_pull(
key=RorTelescope.RELEASE_INFO, task_ids=self.list_releases.__name__, include_prior_dates=False
)
releases = []
for record in records:
release_date = record["release_date"]
url = record["url"]
releases.append(RorRelease(self.dag_id, pendulum.parse(release_date), url))
return releases
def list_releases(self, **kwargs):
"""Lists all ROR records for a given month and publishes their url and release_date as an XCom.
:param kwargs: the context passed from the BranchPythonOperator. See
https://airflow.apache.org/docs/stable/macros-ref.html
for a list of the keyword arguments that are passed to this argument.
:return: the identifier of the task to execute next.
"""
execution_date = kwargs["execution_date"]
next_execution_date = kwargs["next_execution_date"]
records = list_ror_records(execution_date, next_execution_date)
continue_dag = len(records)
if continue_dag:
# Push messages
ti: TaskInstance = kwargs["ti"]
ti.xcom_push(RorTelescope.RELEASE_INFO, records, execution_date)
else:
logging.info(f"Found no available records.")
return continue_dag
def download(self, releases: List[RorRelease], **kwargs):
"""Task to download the ROR releases for a given month.
:param releases: a list of ROR releases.
:return: None.
"""
for release in releases:
release.download()
def extract(self, releases: List[RorRelease], **kwargs):
"""Task to extract the ROR releases for a given month.
:param releases: a list of ROR releases.
:return: None.
"""
for release in releases:
release.extract()
def transform(self, releases: List[RorRelease], **kwargs):
"""Task to transform the ROR releases for a given month.
:param releases: a list of ROR releases.
:return: None.
"""
for release in releases:
release.transform()
def list_ror_records(start_date: pendulum.DateTime, end_date: pendulum.DateTime, timeout: float = 30.0) -> List[dict]:
"""List all ROR records available on Zenodo between two dates.
:param start_date: Start date of period to look into
:param end_date: End date of period to look into
:param timeout: the number of seconds to wait until timing out.
:return: the list of ROR records with required variables stored as a dictionary.
"""
logging.info(f"Getting info on available ROR records from Zenodo, from url: {RorTelescope.ROR_DATASET_URL}")
response = retry_session().get(RorTelescope.ROR_DATASET_URL, timeout=timeout, headers={"Accept-encoding": "gzip"})
if response.status_code != 200:
raise AirflowException(
f"Request to get available records on Zenodo unsuccessful, url: {RorTelescope.ROR_DATASET_URL}, "
f"status code: {response.status_code}, response: {response.text}, reason: {response.reason}"
)
response_json = json.loads(response.text)
# Get release date and url of records that are created between two dates
records: List[dict] = []
hits = response_json.get("hits", {}).get("hits", [])
logging.info(f"Looking for records between dates {start_date} and {end_date}")
for hit in hits:
release_date: pendulum.DateTime = pendulum.parse(hit["created"])
if start_date <= release_date < end_date:
link = hit["files"][0]["links"]["self"]
records.append({"release_date": release_date.format("YYYYMMDD"), "url": link})
logging.info(f"Found record created on '{release_date}', url: {link}")
if release_date < start_date:
break
return records
|
{"/academic_observatory_workflows/workflows/ror_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_geonames_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/geonames_telescope.py"], "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/tests/test_clearbit.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_oa_web_workflow.py": ["/academic_observatory_workflows/workflows/oa_web_workflow.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/tests/test_zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_open_citations_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/open_citations_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_openalex_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/openalex_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py"], "/academic_observatory_workflows/workflows/pubmed_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_web_of_science_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_events_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_events_telescope.py"], "/academic_observatory_workflows/workflows/web_of_science_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_github.py": ["/academic_observatory_workflows/github.py"], "/academic_observatory_workflows/workflows/tests/test_pubmed_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/pubmed_telescope.py"], "/academic_observatory_workflows/workflows/geonames_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/open_citations_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_doi_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/model.py", "/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/tests/test_wikipedia.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/wikipedia.py"], "/docs/test_generate_csv.py": ["/docs/generate_schema_csv.py"], "/academic_observatory_workflows/workflows/scopus_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/doi_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py"], "/academic_observatory_workflows/workflows/crossref_events_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/oa_web_workflow.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/github.py", "/academic_observatory_workflows/wikipedia.py", "/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_ror_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/ror_telescope.py"], "/academic_observatory_workflows/workflows/openalex_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_zenodo.py": ["/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/model.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_scopus_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/scopus_telescope.py"], "/academic_observatory_workflows/dags/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_telescope.py"], "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_elastic_import_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/dags/elastic_import_workflow.py"], "/academic_observatory_workflows/dags/doi_workflow.py": ["/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/workflows/orcid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/dags/mag_telescope.py": ["/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/workflows/grid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_mag_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/dags/web_of_science_telescope.py": ["/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/dags/elastic_import_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_grid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/grid_telescope.py"], "/academic_observatory_workflows/workflows/mag_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_orcid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/orcid_telescope.py"]}
|
28,260,405
|
The-Academic-Observatory/academic-observatory-workflows
|
refs/heads/main
|
/academic_observatory_workflows/model.py
|
# Copyright 2021 Curtin University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Author: James Diprose, Tuan Chien
from __future__ import annotations
import os
import random
import uuid
from dataclasses import dataclass
from datetime import datetime
from typing import Dict, List, Tuple
import pandas as pd
import pendulum
from click.testing import CliRunner
from faker import Faker
from pendulum import DateTime
from academic_observatory_workflows.config import schema_folder, test_fixtures_folder
from observatory.platform.utils.file_utils import load_jsonl
from observatory.platform.utils.test_utils import Table, bq_load_tables
LICENSES = ["cc-by", None]
EVENT_TYPES = [
"f1000",
"stackexchange",
"datacite",
"twitter",
"reddit-links",
"wordpressdotcom",
"plaudit",
"cambia-lens",
"hypothesis",
"wikipedia",
"reddit",
"crossref",
"newsfeed",
"web",
]
OUTPUT_TYPES = [
"journal_articles",
"book_sections",
"authored_books",
"edited_volumes",
"reports",
"datasets",
"proceedings_article",
"other_outputs",
]
FUNDREF_COUNTRY_CODES = ["usa", "gbr", "aus", "can"]
FUNDREF_REGIONS = {"usa": "Americas", "gbr": "Europe", "aus": "Oceania", "can": "Americas"}
FUNDING_BODY_TYPES = [
"For-profit companies (industry)",
"Trusts, charities, foundations (both public and private)",
"Associations and societies (private and public)",
"National government",
"Universities (academic only)",
"International organizations",
"Research institutes and centers",
"Other non-profit organizations",
"Local government",
"Libraries and data archiving organizations",
]
FUNDING_BODY_SUBTYPES = {
"For-profit companies (industry)": "pri",
"Trusts, charities, foundations (both public and private)": "pri",
"Associations and societies (private and public)": "pri",
"National government": "gov",
"Universities (academic only)": "gov",
"International organizations": "pri",
"Research institutes and centers": "pri",
"Other non-profit organizations": "pri",
"Local government": "gov",
"Libraries and data archiving organizations": "gov",
}
@dataclass
class Institution:
"""An institution.
:param id: unique identifier.
:param name: the institution's name.
:param grid_id: the institution's GRID id.
:param ror_id: the institution's ROR id.
:param country_code: the institution's country code.
:param country_code_2: the institution's country code.
:param subregion: the institution's subregion.
:param papers: the papers published by the institution.
:param types: the institution type.
:param country: the institution country name.
:param coordinates: the institution's coordinates.
"""
id: int
name: str = None
grid_id: str = None
ror_id: str = None
country_code: str = None
country_code_2: str = None
region: str = None
subregion: str = None
papers: List[Paper] = None
types: str = None
country: str = None
coordinates: str = None
def date_between_dates(start_ts: int, end_ts: int) -> DateTime:
"""Return a datetime between two timestamps.
:param start_ts: the start timestamp.
:param end_ts: the end timestamp.
:return: the DateTime datetime.
"""
r_ts = random.randint(start_ts, end_ts - 1)
return pendulum.from_timestamp(r_ts)
@dataclass
class Paper:
"""A paper.
:param id: unique identifier.
:param doi: the DOI of the paper.
:param title: the title of the paper.
:param published_date: the date the paper was published.
:param output_type: the output type, see OUTPUT_TYPES.
:param authors: the authors of the paper.
:param funders: the funders of the research published in the paper.
:param journal: the journal this paper is published in.
:param publisher: the publisher of this paper (the owner of the journal).
:param events: a list of events related to this paper.
:param cited_by: a list of papers that this paper is cited by.
:param fields_of_study: a list of the fields of study of the paper.
:param license:
:param is_free_to_read_at_publisher:
:param is_in_institutional_repo:
"""
id: int
doi: str = None
title: str = None
published_date: pendulum.Date = None
output_type: str = None
authors: List[Author] = None
funders: List[Funder] = None
journal: Journal = None
publisher: Publisher = None
events: List[Event] = None
cited_by: List[Paper] = None
fields_of_study: List[FieldOfStudy] = None
license: str = None
is_free_to_read_at_publisher: bool = False
is_in_institutional_repo: bool = False
@property
def access_type(self) -> AccessType:
"""Return the access type for the paper.
:return: AccessType.
"""
gold_doaj = self.journal.license is not None
gold = gold_doaj or (self.is_free_to_read_at_publisher and self.license is not None and not gold_doaj)
hybrid = self.is_free_to_read_at_publisher and self.license is not None and not gold_doaj
bronze = self.is_free_to_read_at_publisher and self.license is None and not gold_doaj
green = self.is_in_institutional_repo
green_only = self.is_in_institutional_repo and not gold_doaj and not self.is_free_to_read_at_publisher
oa = gold or hybrid or bronze or green
return AccessType(
oa=oa, green=green, gold=gold, gold_doaj=gold_doaj, hybrid=hybrid, bronze=bronze, green_only=green_only
)
@dataclass
class AccessType:
"""The access type of a paper.
:param oa: whether the paper is open access or not.
:param green: when the paper is available in an institutional repository.
:param gold: when the paper is an open access journal or (it is not in an open access journal and is free to read
at the publisher and has an open access license).
:param gold_doaj: when the paper is an open access journal.
:param hybrid: where the paper is free to read at the publisher, it has an open access license and the journal is
not open access.
:param bronze: when the paper is free to read at the publisher website however there is no license.
:param green_only: where the paper is not free to read from the publisher, however it is available at an
institutional repository.
"""
oa: bool = None
green: bool = None
gold: bool = None
gold_doaj: bool = None
hybrid: bool = None
bronze: bool = None
green_only: bool = None
@dataclass
class Author:
"""An author.
:param id: unique identifier.
:param name: the name of the author.
:param institution: the author's institution.
"""
id: int
name: str = None
institution: Institution = None
@dataclass
class Funder:
"""A research funder.
:param id: unique identifier.
:param name: the name of the funder.
:param doi: the DOI of the funder.
:param country_code: the country code of the funder.
:param region: the region the funder is located in.
:param funding_body_type: the funding body type, see FUNDING_BODY_TYPES.
:param funding_body_subtype: the funding body subtype, see FUNDING_BODY_SUBTYPES.
"""
id: int
name: str = None
doi: str = None
country_code: str = None
region: str = None
funding_body_type: str = None
funding_body_subtype: str = None
@dataclass
class Publisher:
"""A publisher.
:param id: unique identifier.
:param name: the name of the publisher.
:param doi_prefix: the publisher DOI prefix.
:param journals: the journals owned by the publisher.
"""
id: int
name: str = None
doi_prefix: int = None
journals: List[Journal] = None
@dataclass
class FieldOfStudy:
"""A field of study.
:param id: unique identifier.
:param name: the field of study name.
:param level: the field of study level.
"""
id: int
name: str = None
level: int = None
@dataclass
class Journal:
"""A journal
:param id: unique identifier.
:param name: the journal name.
:param name: the license that articles are published under by the journal.
"""
id: int
name: str = None
license: str = None
@dataclass
class Event:
"""An event.
:param source: the source of the event, see EVENT_TYPES.
:param event_date: the date of the event.
"""
source: str = None
event_date: DateTime = None
InstitutionList = List[Institution]
AuthorList = List[Author]
FunderList = List[Funder]
PublisherList = List[Publisher]
PaperList = List[Paper]
FieldOfStudyList = List[FieldOfStudy]
EventsList = List[Event]
@dataclass
class ObservatoryDataset:
"""The generated observatory dataset.
:param institutions: list of institutions.
:param authors: list of authors.
:param funders: list of funders.
:param publishers: list of publishers.
:param papers: list of papers.
:param fields_of_study: list of fields of study.
"""
institutions: InstitutionList
authors: AuthorList
funders: FunderList
publishers: PublisherList
papers: PaperList
fields_of_study: FieldOfStudyList
def make_doi(doi_prefix: int):
"""Makes a randomised DOI given a DOI prefix.
:param doi_prefix: the DOI prefix.
:return: the DOI.
"""
return f"10.{doi_prefix}/{str(uuid.uuid4())}"
def make_observatory_dataset(
institutions: List[Institution],
n_funders: int = 5,
n_publishers: int = 5,
n_authors: int = 10,
n_papers: int = 50,
n_fields_of_study_per_level: int = 5,
) -> ObservatoryDataset:
"""Generate an observatory dataset.
:param institutions: a list of institutions.
:param n_funders: the number of funders to generate.
:param n_publishers: the number of publishers to generate.
:param n_authors: the number of authors to generate.
:param n_papers: the number of papers to generate.
:param n_fields_of_study_per_level: the number of fields of study to generate per level.
:return: the observatory dataset.
"""
faker = Faker()
funder_doi_prefix = 1000
funders = make_funders(n_funders=n_funders, doi_prefix=funder_doi_prefix, faker=faker)
publisher_doi_prefix = funder_doi_prefix + len(funders)
publishers = make_publishers(n_publishers=n_publishers, doi_prefix=publisher_doi_prefix, faker=faker)
fields_of_study = make_fields_of_study(n_fields_of_study_per_level=n_fields_of_study_per_level, faker=faker)
authors = make_authors(n_authors=n_authors, institutions=institutions, faker=faker)
papers = make_papers(
n_papers=n_papers,
authors=authors,
funders=funders,
publishers=publishers,
fields_of_study=fields_of_study,
faker=faker,
)
return ObservatoryDataset(institutions, authors, funders, publishers, papers, fields_of_study)
def make_funders(*, n_funders: int, doi_prefix: int, faker: Faker) -> FunderList:
"""Make the funders ground truth dataset.
:param n_funders: number of funders to generate.
:param doi_prefix: the DOI prefix for the funders.
:param faker: the faker instance.
:return: a list of funders.
"""
funders = []
for i, _ in enumerate(range(n_funders)):
country_code = random.choice(FUNDREF_COUNTRY_CODES)
funding_body_type = random.choice(FUNDING_BODY_TYPES)
funders.append(
Funder(
i,
name=faker.company(),
doi=make_doi(doi_prefix),
country_code=country_code,
region=FUNDREF_REGIONS[country_code],
funding_body_type=funding_body_type,
funding_body_subtype=FUNDING_BODY_SUBTYPES[funding_body_type],
)
)
doi_prefix += 1
return funders
def make_publishers(
*,
n_publishers: int,
doi_prefix: int,
faker: Faker,
min_journals_per_publisher: int = 1,
max_journals_per_publisher: int = 3,
) -> PublisherList:
"""Make publishers ground truth dataset.
:param n_publishers: number of publishers.
:param doi_prefix: the publisher DOI prefix.
:param faker: the faker instance.
:param min_journals_per_publisher: the min number of journals to generate per publisher.
:param max_journals_per_publisher: the max number of journals to generate per publisher.
:return:
"""
publishers = []
for i, _ in enumerate(range(n_publishers)):
n_journals_ = random.randint(min_journals_per_publisher, max_journals_per_publisher)
journals_ = []
for _ in range(n_journals_):
journals_.append(Journal(str(uuid.uuid4()), name=faker.company(), license=random.choice(LICENSES)))
publishers.append(Publisher(i, name=faker.company(), doi_prefix=doi_prefix, journals=journals_))
doi_prefix += 1
return publishers
def make_fields_of_study(
*,
n_fields_of_study_per_level: int,
faker: Faker,
n_levels: int = 6,
min_title_length: int = 1,
max_title_length: int = 3,
) -> FieldOfStudyList:
"""Generate the fields of study for the ground truth dataset.
:param n_fields_of_study_per_level: the number of fields of study per level.
:param faker: the faker instance.
:param n_levels: the number of levels.
:param min_title_length: the minimum field of study title length (words).
:param max_title_length: the maximum field of study title length (words).
:return: a list of the fields of study.
"""
fields_of_study = []
fos_id_ = 0
for level in range(n_levels):
for _ in range(n_fields_of_study_per_level):
n_words_ = random.randint(min_title_length, max_title_length)
name_ = faker.sentence(nb_words=n_words_)
fos_ = FieldOfStudy(fos_id_, name=name_, level=level)
fields_of_study.append(fos_)
fos_id_ += 1
return fields_of_study
def make_authors(*, n_authors: int, institutions: InstitutionList, faker: Faker) -> AuthorList:
"""Generate the authors ground truth dataset.
:param n_authors: the number of authors to generate.
:param institutions: the institutions.
:param faker: the faker instance.
:return: a list of authors.
"""
authors = []
for i, _ in enumerate(range(n_authors)):
author = Author(i, name=faker.name(), institution=random.choice(institutions))
authors.append(author)
return authors
def make_papers(
*,
n_papers: int,
authors: AuthorList,
funders: FunderList,
publishers: PublisherList,
fields_of_study: List,
faker: Faker,
min_title_length: int = 2,
max_title_length: int = 10,
min_authors: int = 1,
max_authors: int = 10,
min_funders: int = 0,
max_funders: int = 3,
min_events: int = 0,
max_events: int = 100,
min_fields_of_study: int = 1,
max_fields_of_study: int = 20,
) -> PaperList:
"""Generate the list of ground truth papers.
:param n_papers: the number of papers to generate.
:param authors: the authors list.
:param funders: the funders list.
:param publishers: the publishers list.
:param fields_of_study: the fields of study list.
:param faker: the faker instance.
:param min_title_length: the min paper title length.
:param max_title_length: the max paper title length.
:param min_authors: the min number of authors for each paper.
:param max_authors: the max number of authors for each paper.
:param min_funders: the min number of funders for each paper.
:param max_funders: the max number of funders for each paper.
:param min_events: the min number of events per paper.
:param max_events: the max number of events per paper.
:param min_fields_of_study: the min fields of study per paper.
:param max_fields_of_study: the max fields of study per paper.
:return: the list of papers.
"""
papers = []
for i, _ in enumerate(range(n_papers)):
# Random title
n_words_ = random.randint(min_title_length, max_title_length)
title_ = faker.sentence(nb_words=n_words_)
# Random date
published_date_ = pendulum.from_format(faker.date(), "YYYY-MM-DD").date()
published_date_ = pendulum.date(year=published_date_.year, month=published_date_.month, day=published_date_.day)
# Output type
output_type_ = random.choice(OUTPUT_TYPES)
# Pick a random list of authors
n_authors_ = random.randint(min_authors, max_authors)
authors_ = random.sample(authors, n_authors_)
# Random funder
n_funders_ = random.randint(min_funders, max_funders)
if n_funders_ > 0:
funders_ = random.sample(funders, n_funders_)
else:
funders_ = []
# Random publisher
publisher_ = random.choice(publishers)
# Journal
journal_ = random.choice(publisher_.journals)
# Random DOI
doi_ = make_doi(publisher_.doi_prefix)
# Random events
n_events_ = random.randint(min_events, max_events)
events_ = []
today = datetime.now()
today_ts = int(today.timestamp())
start_date = datetime(today.year - 2, today.month, today.day)
start_ts = int(start_date.timestamp())
for _ in range(n_events_):
event_date_ = date_between_dates(start_ts=start_ts, end_ts=today_ts)
events_.append(Event(source=random.choice(EVENT_TYPES), event_date=event_date_))
# Fields of study
n_fos_ = random.randint(min_fields_of_study, max_fields_of_study)
level_0_index = 199
fields_of_study_ = [random.choice(fields_of_study[:level_0_index])]
fields_of_study_.extend(random.sample(fields_of_study, n_fos_))
# Open access status
is_free_to_read_at_publisher_ = True
if journal_.license is not None:
# Gold
license_ = journal_.license
else:
license_ = random.choice(LICENSES)
if license_ is None:
# Bronze: free to read on publisher website but no license
is_free_to_read_at_publisher_ = bool(random.getrandbits(1))
# Hybrid: license=True
# Green: in a 'repository'
is_in_institutional_repo_ = bool(random.getrandbits(1))
# Green not bronze: Not free to read at publisher but in a 'repository'
# Make paper
paper = Paper(
i,
doi=doi_,
title=title_,
published_date=published_date_,
output_type=output_type_,
authors=authors_,
funders=funders_,
journal=journal_,
publisher=publisher_,
events=events_,
fields_of_study=fields_of_study_,
license=license_,
is_free_to_read_at_publisher=is_free_to_read_at_publisher_,
is_in_institutional_repo=is_in_institutional_repo_,
)
papers.append(paper)
# Create paper citations
# Sort from oldest to newest
papers.sort(key=lambda p: p.published_date)
for i, paper in enumerate(papers):
# Create cited_by
n_papers_forwards = len(papers) - i
n_cited_by = random.randint(0, int(n_papers_forwards / 2))
paper.cited_by = random.sample(papers[i + 1 :], n_cited_by)
return papers
def make_open_citations(dataset: ObservatoryDataset) -> List[Dict]:
"""Generate an Open Citations table from an ObservatoryDataset instance.
:param dataset: the Observatory Dataset.
:return: table rows.
"""
records = []
def make_oc_timespan(cited_date: pendulum.Date, citing_date: pendulum.Date):
ts = "P"
delta = citing_date - cited_date
years = delta.in_years()
months = delta.in_months() - years * 12
if years > 0:
ts += f"{years}Y"
if months > 0 or years == 0:
ts += f"{months}M"
return ts
def is_author_sc(cited_: Paper, citing_: Paper):
for cited_author in cited_.authors:
for citing_author in citing_.authors:
if cited_author.name == citing_author.name:
return True
return False
def is_journal_sc(cited_: Paper, citing_: Paper):
return cited_.journal.name == citing_.journal.name
for cited in dataset.papers:
for citing in cited.cited_by:
records.append(
{
"oci": "",
"citing": citing.doi,
"cited": cited.doi,
"creation": citing.published_date.strftime("%Y-%m"),
"timespan": make_oc_timespan(cited.published_date, citing.published_date),
"journal_sc": is_author_sc(cited, citing),
"author_sc": is_journal_sc(cited, citing),
}
)
return records
def make_crossref_events(dataset: ObservatoryDataset) -> List[Dict]:
"""Generate the Crossref Events table from an ObservatoryDataset instance.
:param dataset: the Observatory Dataset.
:return: table rows.
"""
events = []
for paper in dataset.papers:
for event in paper.events:
obj_id = f"https://doi.org/{paper.doi}"
occurred_at = f"{event.event_date.to_datetime_string()} UTC"
source_id = event.source
events.append(
{
"obj_id": obj_id,
"timestamp": occurred_at,
"occurred_at": occurred_at,
"source_id": source_id,
"id": str(uuid.uuid4()),
}
)
return events
def make_unpaywall(dataset: ObservatoryDataset) -> List[Dict]:
"""Generate the Unpaywall table from an ObservatoryDataset instance.
:param dataset: the Observatory Dataset.
:return: table rows.
"""
records = []
genre_lookup = {
"journal_articles": ["journal-article"],
"book_sections": ["book-section", "book-part", "book-chapter"],
"authored_books": ["book", "monograph"],
"edited_volumes": ["edited-book"],
"reports": ["report"],
"datasets": ["dataset"],
"proceedings_article": ["proceedings-article"],
"other_outputs": ["other-outputs"],
}
for paper in dataset.papers:
# Make OA status
journal_is_in_doaj = paper.journal.license is not None
oa_locations = []
if paper.is_free_to_read_at_publisher:
oa_location = {"host_type": "publisher", "license": paper.license, "url": ""}
oa_locations.append(oa_location)
if paper.is_in_institutional_repo:
oa_location = {"host_type": "repository", "license": paper.license, "url": ""}
oa_locations.append(oa_location)
is_oa = len(oa_locations) > 0
if is_oa:
best_oa_location = oa_locations[0]
else:
best_oa_location = None
# Create record
records.append(
{
"doi": paper.doi,
"year": paper.published_date.year,
"genre": random.choice(genre_lookup[paper.output_type]),
"publisher": paper.publisher.name,
"journal_name": paper.journal.name,
"journal_issn_l": paper.journal.id,
"is_oa": is_oa,
"journal_is_in_doaj": journal_is_in_doaj,
"best_oa_location": best_oa_location,
"oa_locations": oa_locations,
}
)
return records
@dataclass
class MagDataset:
"""A container to hold the Microsoft Academic Graph tables.
:param: Affiliations table rows.
:param: Papers table rows.
:param: PaperAuthorAffiliations rows.
:param: FieldsOfStudy rows.
:param: PaperFieldsOfStudy rows.
"""
affiliations: List[Dict]
papers: List[Dict]
paper_author_affiliations: List[Dict]
fields_of_study: List[Dict]
paper_fields_of_study: List[Dict]
def make_mag(dataset: ObservatoryDataset) -> MagDataset:
"""Generate the Microsoft Academic Graph tables from an ObservatoryDataset instance.
:param dataset: the Observatory Dataset.
:return: the Microsoft Academic Graph dataset.
"""
# Create affiliations
affiliations = []
for institute in dataset.institutions:
affiliations.append({"AffiliationId": institute.id, "DisplayName": institute.name, "GridId": institute.grid_id})
# Create fields of study
fields_of_study = []
for fos in dataset.fields_of_study:
fields_of_study.append({"FieldOfStudyId": fos.id, "DisplayName": fos.name, "Level": fos.level})
# Create papers, paper_author_affiliations and paper_fields_of_study
papers = []
paper_author_affiliations = []
paper_fields_of_study = []
for paper in dataset.papers:
papers.append({"PaperId": paper.id, "CitationCount": len(paper.cited_by), "Doi": paper.doi})
for author in paper.authors:
paper_author_affiliations.append(
{"PaperId": paper.id, "AuthorId": author.id, "AffiliationId": author.institution.id}
)
for fos in paper.fields_of_study:
paper_fields_of_study.append({"PaperId": paper.id, "FieldOfStudyId": fos.id})
return MagDataset(affiliations, papers, paper_author_affiliations, fields_of_study, paper_fields_of_study)
def make_crossref_fundref(dataset: ObservatoryDataset) -> List[Dict]:
"""Generate the Crossref Fundref table from an ObservatoryDataset instance.
:param dataset: the Observatory Dataset.
:return: table rows.
"""
records = []
for funder in dataset.funders:
records.append(
{
"pre_label": funder.name,
"funder": f"http://dx.doi.org/{funder.doi}",
"country_code": funder.country_code,
"region": funder.region,
"funding_body_type": funder.funding_body_type,
"funding_body_sub_type": funder.funding_body_subtype,
}
)
return records
def make_crossref_metadata(dataset: ObservatoryDataset) -> List[Dict]:
"""Generate the Crossref Metadata table from an ObservatoryDataset instance.
:param dataset: the Observatory Dataset.
:return: table rows.
"""
records = []
for paper in dataset.papers:
# Create funders
funders = []
for funder in paper.funders:
funders.append({"name": funder.name, "DOI": funder.doi, "award": None, "doi_asserted_by": None})
# Add Crossref record
records.append(
{
"title": [paper.title],
"DOI": paper.doi,
"is_referenced_by_count": len(paper.cited_by),
"issued": {
"date_parts": [paper.published_date.year, paper.published_date.month, paper.published_date.day]
},
"funder": funders,
"publisher": paper.publisher.name,
}
)
return records
def bq_load_observatory_dataset(
observatory_dataset: ObservatoryDataset,
bucket_name: str,
dataset_id_all: str,
dataset_id_settings: str,
release_date: DateTime,
data_location: str,
):
"""Load the fake Observatory Dataset in BigQuery.
:param observatory_dataset: the Observatory Dataset.
:param bucket_name: the Google Cloud Storage bucket name.
:param dataset_id_all: the dataset id for all data tables.
:param dataset_id_settings: the dataset id for settings tables.
:param release_date: the release date for the observatory dataset.
:param data_location: the location of the BigQuery dataset.
:return: None.
"""
# Generate source datasets
open_citations = make_open_citations(observatory_dataset)
crossref_events = make_crossref_events(observatory_dataset)
mag: MagDataset = make_mag(observatory_dataset)
crossref_fundref = make_crossref_fundref(observatory_dataset)
unpaywall = make_unpaywall(observatory_dataset)
crossref_metadata = make_crossref_metadata(observatory_dataset)
# Load fake ROR and settings datasets
test_doi_path = test_fixtures_folder("doi")
ror = load_jsonl(os.path.join(test_doi_path, "ror.jsonl"))
country = load_jsonl(os.path.join(test_doi_path, "country.jsonl"))
groupings = load_jsonl(os.path.join(test_doi_path, "groupings.jsonl"))
mag_affiliation_override = load_jsonl(os.path.join(test_doi_path, "mag_affiliation_override.jsonl"))
analysis_schema_path = schema_folder()
with CliRunner().isolated_filesystem() as t:
tables = [
Table("crossref_events", False, dataset_id_all, crossref_events, "crossref_events", analysis_schema_path),
Table(
"crossref_metadata", True, dataset_id_all, crossref_metadata, "crossref_metadata", analysis_schema_path
),
Table("crossref_fundref", True, dataset_id_all, crossref_fundref, "crossref_fundref", analysis_schema_path),
Table("Affiliations", True, dataset_id_all, mag.affiliations, "MagAffiliations", analysis_schema_path),
Table("FieldsOfStudy", True, dataset_id_all, mag.fields_of_study, "MagFieldsOfStudy", analysis_schema_path),
Table(
"PaperAuthorAffiliations",
True,
dataset_id_all,
mag.paper_author_affiliations,
"MagPaperAuthorAffiliations",
analysis_schema_path,
),
Table(
"PaperFieldsOfStudy",
True,
dataset_id_all,
mag.paper_fields_of_study,
"MagPaperFieldsOfStudy",
analysis_schema_path,
),
Table("Papers", True, dataset_id_all, mag.papers, "MagPapers", analysis_schema_path),
Table("open_citations", True, dataset_id_all, open_citations, "open_citations", analysis_schema_path),
Table("unpaywall", False, dataset_id_all, unpaywall, "unpaywall", analysis_schema_path),
Table("ror", True, dataset_id_all, ror, "ror", analysis_schema_path),
Table(
"country",
False,
dataset_id_settings,
country,
"country",
analysis_schema_path,
),
Table("groupings", False, dataset_id_settings, groupings, "groupings", analysis_schema_path),
Table(
"mag_affiliation_override",
False,
dataset_id_settings,
mag_affiliation_override,
"mag_affiliation_override",
analysis_schema_path,
),
Table(
"PaperAbstractsInvertedIndex",
True,
dataset_id_all,
[],
"MagPaperAbstractsInvertedIndex",
analysis_schema_path,
),
Table("Journals", True, dataset_id_all, [], "MagJournals", analysis_schema_path),
Table("ConferenceInstances", True, dataset_id_all, [], "MagConferenceInstances", analysis_schema_path),
Table("ConferenceSeries", True, dataset_id_all, [], "MagConferenceSeries", analysis_schema_path),
Table(
"FieldOfStudyExtendedAttributes",
True,
dataset_id_all,
[],
"MagFieldOfStudyExtendedAttributes",
analysis_schema_path,
),
Table(
"PaperExtendedAttributes", True, dataset_id_all, [], "MagPaperExtendedAttributes", analysis_schema_path
),
Table("PaperResources", True, dataset_id_all, [], "MagPaperResources", analysis_schema_path),
Table("PaperUrls", True, dataset_id_all, [], "MagPaperUrls", analysis_schema_path),
Table("PaperMeSH", True, dataset_id_all, [], "MagPaperMeSH", analysis_schema_path),
Table("orcid", False, dataset_id_all, [], "orcid", analysis_schema_path),
]
bq_load_tables(tables=tables, bucket_name=bucket_name, release_date=release_date, data_location=data_location)
def aggregate_events(events: List[Event]) -> Tuple[List[Dict], List[Dict], List[Dict]]:
"""Aggregate events by source into total events for all time, monthly and yearly counts.
:param events: list of events.
:return: list of events for each source aggregated by all time, months and years.
"""
lookup_totals = dict()
lookup_months = dict()
lookup_years = dict()
for event in events:
# Total events
if event.source in lookup_totals:
lookup_totals[event.source] += 1
else:
lookup_totals[event.source] = 1
# Events by month
month = event.event_date.strftime("%Y-%m")
month_key = (event.source, month)
if month_key in lookup_months:
lookup_months[month_key] += 1
else:
lookup_months[month_key] = 1
# Events by year
year = event.event_date.year
year_key = (event.source, year)
if year_key in lookup_years:
lookup_years[year_key] += 1
else:
lookup_years[year_key] = 1
total = [{"source": source, "count": count} for source, count in lookup_totals.items()]
months = [{"source": source, "month": month, "count": count} for (source, month), count in lookup_months.items()]
years = [{"source": source, "year": year, "count": count} for (source, year), count in lookup_years.items()]
# Sort
sort_events(total, months, years)
return total, months, years
def sort_events(events: List[Dict], months: List[Dict], years: List[Dict]):
"""Sort events in-place.
:param events: events all time.
:param months: events by month.
:param years: events by year.
:return: None.
"""
events.sort(key=lambda x: x["source"])
months.sort(key=lambda x: f"{x['month']}{x['source']}{x['count']}")
years.sort(key=lambda x: f"{x['year']}{x['source']}{x['count']}")
def make_doi_table(dataset: ObservatoryDataset) -> List[Dict]:
"""Generate the DOI table from an ObservatoryDataset instance.
:param dataset: the Observatory Dataset.
:return: table rows.
"""
records = []
for paper in dataset.papers:
# Doi, events and grids
doi = paper.doi.upper()
events = make_doi_events(doi, paper.events)
# Affiliations: institutions, countries, regions, subregion, funders, journals, publishers
institutions = make_doi_institutions(paper.authors)
countries = make_doi_countries(paper.authors)
regions = make_doi_regions(paper.authors)
subregions = make_doi_subregions(paper.authors)
funders = make_doi_funders(paper.funders)
journals = make_doi_journals(paper.journal)
publishers = make_doi_publishers(paper.publisher)
# Make final record
records.append(
{
"doi": doi,
"crossref": {
"title": paper.title,
"published_year": paper.published_date.year,
"published_month": paper.published_date.month,
"published_year_month": f"{paper.published_date.year}-{paper.published_date.month}",
"funder": [{"name": funder.name, "DOI": funder.doi} for funder in paper.funders],
},
"unpaywall": {},
"unpaywall_history": {},
"mag": {},
"open_citations": {},
"events": events,
"affiliations": {
"doi": doi,
"institutions": institutions,
"countries": countries,
"subregions": subregions,
"regions": regions,
"groupings": [],
"funders": funders,
"authors": [],
"journals": journals,
"publishers": publishers,
},
}
)
# Sort to match with sorted results
records.sort(key=lambda r: r["doi"])
return records
def make_doi_events(doi: str, event_list: EventsList) -> Dict:
"""Make the events for a DOI table row.
:param doi: the DOI.
:param event_list: a list of events for the paper.
:return: the events for the DOI table.
"""
events_total, events_months, events_years = aggregate_events(event_list)
# When no events, events is None
events = None
if len(events_total):
events = {
"doi": doi,
"events": events_total,
"months": events_months,
"years": events_years,
}
return events
def make_doi_funders(funder_list: FunderList) -> List[Dict]:
"""Make a DOI table row funders affiliation list.
:param funder_list: the funders list.
:return: the funders affiliation list.
"""
# Funders
funders = {}
for funder in funder_list:
funders[funder.doi] = {
"identifier": funder.name,
"name": funder.name,
"doi": funder.doi,
"types": ["Funder"],
"country": None,
"country_code": funder.country_code,
"country_code_2": None,
"region": funder.region,
"subregion": None,
"coordinates": None,
"funding_body_type": funder.funding_body_type,
"funding_body_subtype": funder.funding_body_subtype,
"members": [],
}
funders = [v for k, v in funders.items()]
funders.sort(key=lambda x: x["identifier"])
return funders
def make_doi_journals(journal: Journal) -> List[Dict]:
"""Make the journal affiliation list for a DOI table row.
:param journal: the paper's journal.
:return: the journal affiliation list.
"""
return [
{
"identifier": journal.id,
"types": ["Journal"],
"name": journal.name,
"country": None,
"country_code": None,
"country_code_2": None,
"region": None,
"subregion": None,
"coordinates": None,
"members": [],
}
]
def to_affiliations_list(dict_: Dict):
"""Convert affiliation dict into a list.
:param dict_: affiliation dict.
:return: affiliation list.
"""
l_ = []
for k, v in dict_.items():
v["members"] = list(v["members"])
v["members"].sort()
if "count" in v:
v["count"] = len(v["rors"])
v.pop("rors", None)
l_.append(v)
l_.sort(key=lambda x: x["identifier"])
return l_
def make_doi_publishers(publisher: Publisher) -> List[Dict]:
"""Make the publisher affiliations for a DOI table row.
:param publisher: the paper's publisher.
:return: the publisher affiliations list.
"""
return [
{
"identifier": publisher.name,
"types": ["Publisher"],
"name": publisher.name,
"country": None,
"country_code": None,
"country_code_2": None,
"region": None,
"subregion": None,
"coordinates": None,
"members": [],
}
]
def make_doi_institutions(author_list: AuthorList) -> List[Dict]:
"""Make the institution affiliations for a DOI table row.
:param author_list: the paper's author list.
:return: the institution affiliation list.
"""
institutions = {}
for author in author_list:
# Institution
inst = author.institution
if inst.ror_id not in institutions:
institutions[inst.ror_id] = {
"identifier": inst.ror_id,
"types": [inst.types],
"name": inst.name,
"country": inst.country,
"country_code": inst.country_code,
"country_code_2": inst.country_code_2,
"region": inst.region,
"subregion": inst.subregion,
"coordinates": inst.coordinates,
"members": [],
}
return to_affiliations_list(institutions)
def make_doi_countries(author_list: AuthorList):
"""Make the countries affiliations for a DOI table row.
:param author_list: the paper's author list.
:return: the countries affiliation list.
"""
countries = {}
for author in author_list:
inst = author.institution
if inst.country not in countries:
countries[inst.country] = {
"identifier": inst.country_code,
"name": inst.country,
"types": ["Country"],
"country": inst.country,
"country_code": inst.country_code,
"country_code_2": inst.country_code_2,
"region": inst.region,
"subregion": inst.subregion,
"coordinates": None,
"count": 0,
"members": {inst.ror_id},
"rors": {inst.ror_id},
}
else:
countries[inst.country]["members"].add(inst.ror_id)
countries[inst.country]["rors"].add(inst.ror_id)
return to_affiliations_list(countries)
def make_doi_regions(author_list: AuthorList):
"""Make the regions affiliations for a DOI table row.
:param author_list: the paper's author list.
:return: the regions affiliation list.
"""
regions = {}
for author in author_list:
inst = author.institution
if inst.region not in regions:
regions[inst.region] = {
"identifier": inst.region,
"name": inst.region,
"types": ["Region"],
"country": None,
"country_code": None,
"country_code_2": None,
"region": inst.region,
"subregion": None,
"coordinates": None,
"count": 0,
"members": {inst.subregion},
"rors": {inst.ror_id},
}
else:
regions[inst.region]["members"].add(inst.subregion)
regions[inst.region]["rors"].add(inst.ror_id)
return to_affiliations_list(regions)
def make_doi_subregions(author_list: AuthorList):
"""Make the subregions affiliations for a DOI table row.
:param author_list: the paper's author list.
:return: the subregions affiliation list.
"""
subregions = {}
for author in author_list:
inst = author.institution
if inst.subregion not in subregions:
subregions[inst.subregion] = {
"identifier": inst.subregion,
"name": inst.subregion,
"types": ["Subregion"],
"country": None,
"country_code": None,
"country_code_2": None,
"region": inst.region,
"subregion": None,
"coordinates": None,
"count": 0,
"members": {inst.country_code},
"rors": {inst.ror_id},
}
else:
subregions[inst.subregion]["members"].add(inst.country_code)
subregions[inst.subregion]["rors"].add(inst.ror_id)
return to_affiliations_list(subregions)
def calc_percent(value: float, total: float) -> float:
"""Calculate a percentage and round to 2dp.
:param value: the value.
:param total: the total.
:return: the percentage.
"""
return round(value / total * 100, 2)
def make_country_table(dataset: ObservatoryDataset) -> List[Dict]:
"""Generate the Observatory Country table from an ObservatoryDataset instance.
:param dataset: the Observatory Dataset.
:return: table rows.
"""
data = []
for paper in dataset.papers:
for author in paper.authors:
inst = author.institution
at = paper.access_type
data.append(
{
"doi": paper.doi,
"id": inst.country_code,
"time_period": paper.published_date.year,
"name": inst.country,
"country": inst.country,
"country_code": inst.country_code,
"country_code_2": inst.country_code_2,
"region": inst.region,
"subregion": inst.subregion,
"coordinates": None,
"total_outputs": 1,
"oa": at.oa,
"green": at.green,
"gold": at.gold,
"gold_doaj": at.gold_doaj,
"hybrid": at.hybrid,
"bronze": at.bronze,
"green_only": at.green_only,
}
)
df = pd.DataFrame(data)
df.drop_duplicates(inplace=True)
agg = {
"id": "first",
"time_period": "first",
"name": "first",
"country": "first",
"country_code": "first",
"country_code_2": "first",
"region": "first",
"subregion": "first",
"coordinates": "first",
"total_outputs": "sum",
"oa": "sum",
"green": "sum",
"gold": "sum",
"gold_doaj": "sum",
"hybrid": "sum",
"bronze": "sum",
"green_only": "sum",
}
df = df.groupby(["id", "time_period"], as_index=False).agg(agg).sort_values(by=["id", "time_period"])
records = []
for i, row in df.iterrows():
total_outputs = row["total_outputs"]
oa = row["oa"]
green = row["green"]
gold = row["gold"]
gold_doaj = row["gold_doaj"]
hybrid = row["hybrid"]
bronze = row["bronze"]
green_only = row["green_only"]
records.append(
{
"id": row["id"],
"time_period": row["time_period"],
"name": row["name"],
"country": row["country"],
"country_code": row["country_code"],
"country_code_2": row["country_code_2"],
"region": row["region"],
"subregion": row["subregion"],
"coordinates": row["coordinates"],
"total_outputs": total_outputs,
"access_types": {
"oa": {"total_outputs": oa, "percent": calc_percent(oa, total_outputs)},
"green": {"total_outputs": green, "percent": calc_percent(green, total_outputs)},
"gold": {"total_outputs": gold, "percent": calc_percent(gold, total_outputs)},
"gold_doaj": {"total_outputs": gold_doaj, "percent": calc_percent(gold_doaj, total_outputs)},
"hybrid": {"total_outputs": hybrid, "percent": calc_percent(hybrid, total_outputs)},
"bronze": {"total_outputs": bronze, "percent": calc_percent(bronze, total_outputs)},
"green_only": {"total_outputs": green_only, "percent": calc_percent(green_only, total_outputs)},
},
"citations": {},
"output_types": [],
"disciplines": {},
"funders": [],
"members": [],
"publishers": [],
"journals": [],
"events": [],
}
)
return records
|
{"/academic_observatory_workflows/workflows/ror_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_geonames_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/geonames_telescope.py"], "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/tests/test_clearbit.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_oa_web_workflow.py": ["/academic_observatory_workflows/workflows/oa_web_workflow.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/tests/test_zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_open_citations_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/open_citations_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_openalex_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/openalex_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py"], "/academic_observatory_workflows/workflows/pubmed_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_web_of_science_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_events_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_events_telescope.py"], "/academic_observatory_workflows/workflows/web_of_science_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_github.py": ["/academic_observatory_workflows/github.py"], "/academic_observatory_workflows/workflows/tests/test_pubmed_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/pubmed_telescope.py"], "/academic_observatory_workflows/workflows/geonames_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/open_citations_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_doi_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/model.py", "/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/tests/test_wikipedia.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/wikipedia.py"], "/docs/test_generate_csv.py": ["/docs/generate_schema_csv.py"], "/academic_observatory_workflows/workflows/scopus_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/doi_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py"], "/academic_observatory_workflows/workflows/crossref_events_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/oa_web_workflow.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/github.py", "/academic_observatory_workflows/wikipedia.py", "/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_ror_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/ror_telescope.py"], "/academic_observatory_workflows/workflows/openalex_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_zenodo.py": ["/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/model.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_scopus_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/scopus_telescope.py"], "/academic_observatory_workflows/dags/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_telescope.py"], "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_elastic_import_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/dags/elastic_import_workflow.py"], "/academic_observatory_workflows/dags/doi_workflow.py": ["/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/workflows/orcid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/dags/mag_telescope.py": ["/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/workflows/grid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_mag_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/dags/web_of_science_telescope.py": ["/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/dags/elastic_import_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_grid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/grid_telescope.py"], "/academic_observatory_workflows/workflows/mag_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_orcid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/orcid_telescope.py"]}
|
28,260,406
|
The-Academic-Observatory/academic-observatory-workflows
|
refs/heads/main
|
/academic_observatory_workflows/workflows/orcid_telescope.py
|
# Copyright 2020 Curtin University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Author: Aniek Roelofs
import gzip
import logging
import math
import os
import shutil
import subprocess
import tarfile
from concurrent.futures import as_completed, ProcessPoolExecutor
from io import BytesIO
from subprocess import Popen
from typing import List
import boto3
import jsonlines
import pendulum
import xmltodict
from airflow.exceptions import AirflowException, AirflowSkipException
from airflow.hooks.base_hook import BaseHook
from airflow.models.taskinstance import TaskInstance
from airflow.models.variable import Variable
from observatory.platform.utils.airflow_utils import AirflowConns
from observatory.platform.utils.airflow_utils import AirflowVars
from observatory.platform.utils.gc_utils import (
aws_to_google_cloud_storage_transfer,
storage_bucket_exists,
)
from observatory.platform.utils.proc_utils import wait_for_process
from observatory.platform.workflows.stream_telescope import (
StreamRelease,
StreamTelescope,
)
from academic_observatory_workflows.config import schema_folder as default_schema_folder
class OrcidRelease(StreamRelease):
def __init__(
self,
dag_id: str,
start_date: pendulum.DateTime,
end_date: pendulum.DateTime,
first_release: bool,
max_processes: int,
batch_size: int = 500,
log_count: int = 5000,
):
"""Construct an OrcidRelease instance
:param dag_id: the id of the DAG.
:param start_date: the start_date of the release.
:param end_date: the end_date of the release.
:param first_release: whether this is the first release that is processed for this DAG.
:param max_processes: Max processes used for transforming files.
:param batch_size: the size of batches used when transforming files.
:param log_count: after how many iterations to print transform log update.
"""
download_files_regex = r".*.xml$"
transform_files_regex = r".*.jsonl.gz"
super().__init__(
dag_id,
start_date,
end_date,
first_release,
download_files_regex=download_files_regex,
transform_files_regex=transform_files_regex,
)
self.max_processes = max_processes
self.batch_size = batch_size
self.log_count = log_count
@property
def modified_records_path(self) -> str:
"""Get the path to the file with ids of modified records.
:return: the file path.
"""
return os.path.join(self.download_folder, "modified_records.txt")
def transfer(self, max_retries):
"""Sync files from AWS bucket to Google Cloud bucket
:param max_retries: Number of max retries to try the transfer
:return: None.
"""
aws_access_key_id, aws_secret_access_key = get_aws_conn_info()
gc_download_bucket = Variable.get(AirflowVars.ORCID_BUCKET)
gc_project_id = Variable.get(AirflowVars.PROJECT_ID)
last_modified_since = None if self.first_release else self.start_date
success = False
total_count = 0
for i in range(max_retries):
if success:
break
success, objects_count = aws_to_google_cloud_storage_transfer(
aws_access_key_id,
aws_secret_access_key,
aws_bucket=OrcidTelescope.SUMMARIES_BUCKET,
include_prefixes=[],
gc_project_id=gc_project_id,
gc_bucket=gc_download_bucket,
description="Transfer ORCID data from airflow telescope",
last_modified_since=last_modified_since,
)
total_count += objects_count
if not success:
raise AirflowException(f"Google Storage Transfer unsuccessful, status: {success}")
logging.info(f"Total number of objects transferred: {total_count}")
if total_count == 0:
raise AirflowSkipException("No objects to transfer")
def download_transferred(self):
"""Download the updated records from the Google Cloud bucket to a local directory using gsutil.
If the run processes the first release it will download all files. If it is a later release, it will check
the ORCID lambda file which tracks which records are modified. Only the modified records will be downloaded.
:return: None.
"""
aws_access_key_id, aws_secret_access_key = get_aws_conn_info()
gc_download_bucket = Variable.get(AirflowVars.ORCID_BUCKET)
# Authenticate gcloud with service account
args = [
"gcloud",
"auth",
"activate-service-account",
f"--key-file" f"={os.environ['GOOGLE_APPLICATION_CREDENTIALS']}",
]
# Set env variable to fix gcloud error, see https://issuetracker.google.com/issues/217589135
proc: Popen = subprocess.Popen(
args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=dict(os.environ, CLOUDSDK_PYTHON="python3")
)
run_subprocess_cmd(proc, args)
logging.info(f"Downloading transferred files from Google Cloud bucket: {gc_download_bucket}")
log_path = os.path.join(self.download_folder, "cp.log")
if self.first_release:
# Download all records from bucket
args = [
"gsutil",
"-m",
"-q",
"cp",
"-L",
log_path,
"-r",
f"gs://{gc_download_bucket}",
self.download_folder,
]
proc: Popen = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
else:
# Download only modified records from bucket
write_modified_record_blobs(
self.start_date,
self.end_date,
aws_access_key_id,
aws_secret_access_key,
gc_download_bucket,
self.modified_records_path,
)
args = ["gsutil", "-m", "-q", "cp", "-L", log_path, "-I", self.download_folder]
proc: Popen = subprocess.Popen(
args, stdin=open(self.modified_records_path), stdout=subprocess.PIPE, stderr=subprocess.PIPE
)
run_subprocess_cmd(proc, args)
def transform(self):
"""Transform the ORCID records in parallel.
Each file is 1 record, after the single file is transformed the data is appended to a .jsonl.gz file
:return: None.
"""
logging.info(f"Using {self.max_processes} workers for multithreading")
count = 0
files_to_process = self.download_files
num_batches = math.ceil(len(files_to_process) / self.batch_size)
# Process the files in batches because there are so many and we don't get feedback when
for b in range(num_batches):
logging.info(f"Transforming batch: {b}")
index = b * self.batch_size
batch_files = files_to_process[index : index + self.batch_size]
with ProcessPoolExecutor(max_workers=self.max_processes) as executor:
futures = list()
for file_path in batch_files:
future = executor.submit(transform_single_file, file_path, self.transform_folder)
futures.append(future)
for future in as_completed(futures):
future.result()
count += 1
if count % self.log_count == 0:
logging.info(f"Transformed {count} files")
# Loop through directories with individual files, concatenate files in each directory into 1 gzipped file.
logging.info("Finished transforming individual files, concatenating & compressing files")
for root, dirs, files in os.walk(self.transform_folder):
if root == self.transform_folder:
continue
file_dir = os.path.basename(root)
transform_path = os.path.join(self.transform_folder, file_dir + ".jsonl.gz")
with gzip.GzipFile(transform_path, mode="wb") as f_out:
for name in files:
with open(os.path.join(root, name), "rb") as f_in:
shutil.copyfileobj(f_in, f_out)
class OrcidTelescope(StreamTelescope):
"""ORCID telescope"""
DAG_ID = "orcid"
SUMMARIES_BUCKET = "v2.0-summaries"
LAMBDA_BUCKET = "orcid-lambda-file"
LAMBDA_OBJECT = "last_modified.csv.tar"
S3_HOST = "s3.eu-west-1.amazonaws.com"
def __init__(
self,
dag_id: str = DAG_ID,
start_date: pendulum.DateTime = pendulum.datetime(2018, 5, 14),
schedule_interval: str = "@weekly",
dataset_id: str = "orcid",
dataset_description: str = "",
table_descriptions: dict = None,
queue: str = "remote_queue",
merge_partition_field: str = "orcid_identifier.uri",
schema_folder: str = default_schema_folder(),
batch_load: bool = True,
airflow_vars: List = None,
airflow_conns: List = None,
max_processes: int = min(32, os.cpu_count() + 4),
):
"""Construct an OrcidTelescope instance.
:param dag_id: the id of the DAG.
:param start_date: the start date of the DAG.
:param schedule_interval: the schedule interval of the DAG.
:param dataset_id: the dataset id.
:param dataset_description: the dataset description.
:param queue: the queue that the telescope should run on.
:param table_descriptions: a dictionary with table ids and corresponding table descriptions.
:param merge_partition_field: the BigQuery field used to match partitions for a merge
:param schema_folder: the SQL schema path.
:param batch_load: whether all files in the transform folder are loaded into 1 table at once
:param airflow_vars: list of airflow variable keys, for each variable it is checked if it exists in airflow
:param airflow_conns: list of airflow connection keys, for each connection it is checked if it exists in airflow
:param max_processes: Max processes used for parallel transforming.
"""
if table_descriptions is None:
table_descriptions = {
dag_id: "The ORCID (Open Researcher and Contributor ID) is a nonproprietary "
"alphanumeric code to uniquely identify authors and contributors of "
"scholarly communication, see: https://orcid.org/."
}
if airflow_vars is None:
airflow_vars = [
AirflowVars.DATA_PATH,
AirflowVars.PROJECT_ID,
AirflowVars.DATA_LOCATION,
AirflowVars.DOWNLOAD_BUCKET,
AirflowVars.TRANSFORM_BUCKET,
AirflowVars.ORCID_BUCKET,
]
if airflow_conns is None:
airflow_conns = [AirflowConns.ORCID]
super().__init__(
dag_id,
start_date,
schedule_interval,
dataset_id,
merge_partition_field,
schema_folder,
dataset_description=dataset_description,
table_descriptions=table_descriptions,
queue=queue,
airflow_vars=airflow_vars,
airflow_conns=airflow_conns,
batch_load=batch_load,
load_bigquery_table_kwargs={"ignore_unknown_values": True},
)
self.max_processes = max_processes
self.add_setup_task(self.check_dependencies)
self.add_task_chain(
[self.transfer, self.download_transferred, self.transform, self.upload_transformed, self.bq_load_partition]
)
self.add_task_chain([self.bq_delete_old, self.bq_append_new, self.cleanup], trigger_rule="none_failed")
def make_release(self, **kwargs) -> OrcidRelease:
"""Make a release instance. The release is passed as an argument to the function (TelescopeFunction) that is
called in 'task_callable'.
:param kwargs: the context passed from the PythonOperator. See
https://airflow.apache.org/docs/stable/macros-ref.html for a list of the keyword arguments that are
passed to this argument.
:return: an OrcidRelease instance.
"""
start_date, end_date, first_release = self.get_release_info(**kwargs)
release = OrcidRelease(self.dag_id, start_date, end_date, first_release, self.max_processes)
return release
def check_dependencies(self, **kwargs) -> bool:
"""Check dependencies of DAG. Add to parent method to additionally check whether the Google Cloud bucket
that is used to sync ORCID data exists.
:return: True if dependencies are valid.
"""
super().check_dependencies()
orcid_bucket_name = Variable.get(AirflowVars.ORCID_BUCKET)
if not storage_bucket_exists(orcid_bucket_name):
raise AirflowException(f"Bucket to store ORCID download data does not exist ({orcid_bucket_name})")
return True
def transfer(self, release: OrcidRelease, **kwargs):
"""Task to transfer data of the ORCID release.
:param release: an OrcidRelease instance.
:return: None.
"""
release.transfer(self.max_retries)
def download_transferred(self, release: OrcidRelease, **kwargs):
"""Task to download the transferred data of the ORCID release.
:param release: an OrcidRelease instance.
:return: None.
"""
release.download_transferred()
def transform(self, release: OrcidRelease, **kwargs):
"""Task to transform data of the ORCID release.
:param release: an OrcidRelease instance.
:return: None.
"""
release.transform()
def get_aws_conn_info() -> (str, str):
"""Get the AWS access key id and secret access key from the ORCID airflow connection.
:return: access key id and secret access key
"""
conn = BaseHook.get_connection(AirflowConns.ORCID)
access_key_id = conn.login
secret_access_key = conn.password
return access_key_id, secret_access_key
def transform_single_file(download_path: str, transform_folder: str):
"""Transform a single ORCID file/record.
The xml file is turned into a dictionary, a record should have either a valid 'record' section or an 'error'
section. The keys of the dictionary are slightly changed so they are valid BigQuery fields.
The dictionary is appended to a jsonl file
:param download_path: The path to the file with the ORCID record.
:param transform_folder: The path where transformed files will be saved.
:return: None.
"""
file_name = os.path.basename(download_path)
file_dir = os.path.join(transform_folder, file_name[-7:-4]) # last three digits are used for subdir
# Create subdirectory if it does not exist yet, even with if statement it will still raise FileExistsError
# sometimes
if not os.path.exists(file_dir):
try:
os.mkdir(file_dir)
except FileExistsError:
pass
transform_path = os.path.join(file_dir, os.path.splitext(file_name)[0] + ".jsonl")
# Skip if file already exists
if os.path.exists(transform_path):
return
# Create dict of data from summary xml file
with open(download_path, "r") as f:
orcid_dict = xmltodict.parse(f.read())
# Get record
orcid_record = orcid_dict.get("record:record")
# Some records do not have a 'record', but only 'error', this will be stored in the BQ table.
if not orcid_record:
orcid_record = orcid_dict.get("error:error")
if not orcid_record:
raise AirflowException(f"Key error for file: {download_path}")
orcid_record = change_keys(orcid_record, convert)
with jsonlines.open(transform_path, "w") as writer:
writer.write(orcid_record)
del orcid_dict
del orcid_record
def run_subprocess_cmd(proc: Popen, args: list):
"""Execute and wait for subprocess to finish, also handle stdout & stderr from process.
:param proc: subprocess proc
:param args: args list that was passed on to subprocess
:return: None.
"""
logging.info(f"Executing bash command: {subprocess.list2cmdline(args)}")
out, err = wait_for_process(proc)
if out:
logging.info(out)
if err:
logging.info(err)
if proc.returncode != 0:
# Don't raise exception if the only error is because blobs could not be found in bucket
err_lines = err.split("\n")
for line in err_lines[:]:
if not line or "CommandException: No URLs matched:" in line or "could not be transferred." in line:
err_lines.remove(line)
if err_lines:
raise AirflowException("bash command failed")
logging.info("Finished cmd successfully")
def write_modified_record_blobs(
start_date: pendulum.DateTime,
end_date: pendulum.DateTime,
aws_access_key_id: str,
aws_secret_access_key: str,
gc_download_bucket: str,
modified_records_path: str,
) -> int:
"""Download the ORCID lambda file (last_modified.csv.tar) from AWS and use file to write the full Google Cloud
blob names of modified records.
The tar file is opened in memory and contains the ORCID record IDs, sorted by last modified date.
:param start_date: Start date of the release
:param end_date: End date of the release
:param aws_access_key_id: AWS access key id
:param aws_secret_access_key: AWS secret access key
:param gc_download_bucket: Name of Google Cloud bucket with ORCID records
:param modified_records_path: Path to file with the blob names of modified records
:return: The number of modified records.
"""
logging.info(f"Writing modified records to {modified_records_path}")
# orcid lambda file, containing info on last_modified dates of records
aws_lambda_bucket = OrcidTelescope.LAMBDA_BUCKET
aws_lambda_object = OrcidTelescope.LAMBDA_OBJECT
s3client = boto3.client("s3", aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key)
lambda_obj = s3client.get_object(Bucket=aws_lambda_bucket, Key=aws_lambda_object)
lambda_content = lambda_obj["Body"].read()
modified_records_count = 0
# open tar file in memory
with tarfile.open(fileobj=BytesIO(lambda_content)) as tar, open(modified_records_path, "w") as f:
for tar_resource in tar:
if tar_resource.isfile():
# extract last modified file in memory
inner_file_bytes = tar.extractfile(tar_resource).read().decode().split("\n")
for line in inner_file_bytes[1:]:
elements = line.split(",")
orcid_record = elements[0]
# parse through line by line, check if last_modified timestamp is between start/end date
last_modified_date = pendulum.parse(elements[3])
# skip records that are too new, not included in this release
if last_modified_date > end_date:
continue
# use records between start date and end date
elif last_modified_date >= start_date:
directory = orcid_record[-3:]
f.write(f"gs://{gc_download_bucket}/{directory}/{orcid_record}.xml" + "\n")
modified_records_count += 1
# stop when reached records before start date, not included in this release
else:
break
return modified_records_count
def convert(k: str) -> str:
"""Convert key of dictionary to valid BQ key.
:param k: Key
:return: The converted key
"""
if len(k.split(":")) > 1:
k = k.split(":")[1]
if k.startswith("@") or k.startswith("#"):
k = k[1:]
k = k.replace("-", "_")
return k
def change_keys(obj, convert):
"""Recursively goes through the dictionary obj and replaces keys with the convert function.
:param obj: The dictionary value, can be object of any type
:param convert: The convert function.
:return: The transformed object.
"""
if isinstance(obj, (str, int, float)):
return obj
if isinstance(obj, dict):
new = obj.__class__()
for k, v in list(obj.items()):
if k.startswith("@xmlns"):
pass
else:
new[convert(k)] = change_keys(v, convert)
elif isinstance(obj, (list, set, tuple)):
new = obj.__class__(change_keys(v, convert) for v in obj)
else:
return obj
return new
|
{"/academic_observatory_workflows/workflows/ror_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_geonames_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/geonames_telescope.py"], "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/tests/test_clearbit.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_oa_web_workflow.py": ["/academic_observatory_workflows/workflows/oa_web_workflow.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/tests/test_zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_open_citations_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/open_citations_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_openalex_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/openalex_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py"], "/academic_observatory_workflows/workflows/pubmed_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_web_of_science_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_events_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_events_telescope.py"], "/academic_observatory_workflows/workflows/web_of_science_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_github.py": ["/academic_observatory_workflows/github.py"], "/academic_observatory_workflows/workflows/tests/test_pubmed_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/pubmed_telescope.py"], "/academic_observatory_workflows/workflows/geonames_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/open_citations_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_doi_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/model.py", "/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/tests/test_wikipedia.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/wikipedia.py"], "/docs/test_generate_csv.py": ["/docs/generate_schema_csv.py"], "/academic_observatory_workflows/workflows/scopus_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/doi_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py"], "/academic_observatory_workflows/workflows/crossref_events_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/oa_web_workflow.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/github.py", "/academic_observatory_workflows/wikipedia.py", "/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_ror_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/ror_telescope.py"], "/academic_observatory_workflows/workflows/openalex_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_zenodo.py": ["/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/model.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_scopus_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/scopus_telescope.py"], "/academic_observatory_workflows/dags/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_telescope.py"], "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_elastic_import_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/dags/elastic_import_workflow.py"], "/academic_observatory_workflows/dags/doi_workflow.py": ["/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/workflows/orcid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/dags/mag_telescope.py": ["/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/workflows/grid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_mag_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/dags/web_of_science_telescope.py": ["/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/dags/elastic_import_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_grid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/grid_telescope.py"], "/academic_observatory_workflows/workflows/mag_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_orcid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/orcid_telescope.py"]}
|
28,260,407
|
The-Academic-Observatory/academic-observatory-workflows
|
refs/heads/main
|
/academic_observatory_workflows/workflows/crossref_metadata_telescope.py
|
# Copyright 2020 Curtin University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Author: Aniek Roelofs, James Diprose
from __future__ import annotations
import functools
import json
import logging
import os
import shutil
import subprocess
from concurrent.futures import ProcessPoolExecutor, as_completed
from datetime import datetime
from subprocess import Popen
from typing import Dict, List
import jsonlines
import pendulum
import requests
from airflow.exceptions import AirflowException
from airflow.hooks.base import BaseHook
from bs4 import BeautifulSoup
from natsort import natsorted
from observatory.platform.utils.airflow_utils import AirflowConns, AirflowVars
from observatory.platform.utils.proc_utils import wait_for_process
from observatory.platform.utils.url_utils import retry_session
from observatory.platform.utils.workflow_utils import blob_name, bq_load_shard, get_chunks
from observatory.platform.workflows.snapshot_telescope import (
SnapshotRelease,
SnapshotTelescope,
)
from academic_observatory_workflows.config import schema_folder as default_schema_folder
class CrossrefMetadataRelease(SnapshotRelease):
def __init__(self, dag_id: str, release_date: pendulum.DateTime):
"""Create a CrossrefMetadataRelease instance.
:param dag_id: the DAG id.
:param release_date: the date of the release.
"""
download_files_regex = ".*.json.tar.gz$"
extract_files_regex = f".*.json$"
transform_files_regex = f".*.jsonl$"
super().__init__(dag_id, release_date, download_files_regex, extract_files_regex, transform_files_regex)
self.url = CrossrefMetadataTelescope.TELESCOPE_URL.format(year=release_date.year, month=release_date.month)
@property
def api_key(self):
"""Return API token"""
connection = BaseHook.get_connection(AirflowConns.CROSSREF)
return connection.password
@property
def download_path(self) -> str:
"""Get the path to the downloaded file.
:return: the file path.
"""
return os.path.join(self.download_folder, "crossref_metadata.json.tar.gz")
def download(self):
"""Download release.
:return: None.
"""
logging.info(f"Downloading from url: {self.url}")
# Set API token header
header = {"Crossref-Plus-API-Token": f"Bearer {self.api_key}"}
# Download release
with requests.get(self.url, headers=header, stream=True) as response:
# Check if authorisation with the api token was successful or not, raise error if not successful
if response.status_code != 200:
raise ConnectionError(f"Error downloading file {self.url}, status_code={response.status_code}")
# Open file for saving
with open(self.download_path, "wb") as file:
response.raw.read = functools.partial(response.raw.read, decode_content=True)
shutil.copyfileobj(response.raw, file)
logging.info(f"Successfully download url to {self.download_path}")
def extract(self):
"""Extract release. Decompress and unzip file to multiple json files.
:return: None.
"""
logging.info(f"extract_release: {self.download_path}")
# Run command using GNUtar, bsdtar (on e.g. OS x) might give error: 'Error inclusion pattern: Failed to open
# 'pigz -d'
cmd = f'tar -xv -I "pigz -d" -f {self.download_path} -C {self.extract_folder}'
p: Popen = subprocess.Popen(
cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, executable="/bin/bash"
)
stdout, stderr = wait_for_process(p)
logging.debug(stdout)
success = p.returncode == 0 and "error" not in stderr.lower()
if success:
logging.info(f"extract_release success: {self.download_path}")
else:
logging.error(stdout)
logging.error(stderr)
raise AirflowException(f"extract_release error: {self.download_path}")
def transform(self, max_processes: int, batch_size: int = 500):
"""Transform the Crossref Metadata release.
Each extracted file is transformed. This is done in parallel using the ThreadPoolExecutor.
:param max_processes: the number of processes to use when transforming files (one process per file).
:param batch_size: the number of files to send to ProcessPoolExecutor at one time.
:return: whether the transformation was successful or not.
"""
logging.info(f"Transform input folder: {self.extract_folder}, output folder: {self.transform_folder}")
finished = 0
# List files and sort so that they are processed in ascending order
input_file_paths = natsorted(self.extract_files)
# Process files in batches so that ProcessPoolExecutor doesn't deplete the system of memory
for batch_input_file_paths in get_chunks(input_list=input_file_paths, chunk_size=batch_size):
with ProcessPoolExecutor(max_workers=max_processes) as executor:
futures = []
# Create tasks for each file
for input_file in batch_input_file_paths:
# The output file will be a json lines file, hence adding the 'l' to the file extension
output_file = os.path.join(self.transform_folder, os.path.basename(input_file) + "l")
future = executor.submit(transform_file, input_file, output_file)
futures.append(future)
# Wait for completed tasks
for future in as_completed(futures):
future.result()
finished += 1
if finished % 1000 == 0:
logging.info(f"Transformed {finished} files")
class CrossrefMetadataTelescope(SnapshotTelescope):
"""
The Crossref Metadata Telescope
Saved to the BigQuery table: <project_id>.crossref.crossref_metadataYYYYMMDD
"""
DAG_ID = "crossref_metadata"
DATASET_ID = "crossref"
SCHEDULE_INTERVAL = "0 0 7 * *"
TELESCOPE_URL = "https://api.crossref.org/snapshots/monthly/{year}/{month:02d}/all.json.tar.gz"
def __init__(
self,
dag_id: str = DAG_ID,
start_date: pendulum.DateTime = pendulum.datetime(2020, 6, 7),
schedule_interval: str = SCHEDULE_INTERVAL,
dataset_id: str = "crossref",
schema_folder: str = default_schema_folder(),
queue: str = "remote_queue",
dataset_description: str = "The Crossref Metadata Plus dataset: "
"https://www.crossref.org/services/metadata-retrieval/metadata-plus/",
load_bigquery_table_kwargs: Dict = None,
table_descriptions: Dict = None,
airflow_vars: List = None,
airflow_conns: List = None,
max_active_runs: int = 1,
max_processes: int = os.cpu_count(),
):
"""The Crossref Metadata telescope
:param dag_id: the id of the DAG.
:param start_date: the start date of the DAG.
:param schedule_interval: the schedule interval of the DAG.
:param dataset_id: the BigQuery dataset id.
:param schema_folder: the SQL schema path.
:param queue: Crossref Metadata tasks run on the worker VM, indicated by the 'remote_queue'.
:param dataset_description: description for the BigQuery dataset.
:param load_bigquery_table_kwargs: the customisation parameters for loading data into a BigQuery table.
:param table_descriptions: a dictionary with table ids and corresponding table descriptions.
:param airflow_vars: list of airflow variable keys, for each variable it is checked if it exists in airflow.
:param airflow_conns: list of airflow connection keys, for each connection it is checked if it exists in airflow
:param max_active_runs: the maximum number of DAG runs that can be run at once.
:param max_processes: the number of processes used with ProcessPoolExecutor to transform files in parallel.
"""
if table_descriptions is None:
table_descriptions = {dag_id: "A single Crossref Metadata snapshot."}
if airflow_vars is None:
airflow_vars = [
AirflowVars.DATA_PATH,
AirflowVars.PROJECT_ID,
AirflowVars.DATA_LOCATION,
AirflowVars.DOWNLOAD_BUCKET,
AirflowVars.TRANSFORM_BUCKET,
]
if airflow_conns is None:
airflow_conns = [AirflowConns.CROSSREF]
if load_bigquery_table_kwargs is None:
load_bigquery_table_kwargs = {"ignore_unknown_values": True}
super().__init__(
dag_id,
start_date,
schedule_interval,
dataset_id,
schema_folder,
queue=queue,
dataset_description=dataset_description,
load_bigquery_table_kwargs=load_bigquery_table_kwargs,
table_descriptions=table_descriptions,
airflow_vars=airflow_vars,
airflow_conns=airflow_conns,
max_active_runs=max_active_runs,
)
self.max_processes = max_processes
self.add_setup_task(self.check_dependencies)
self.add_setup_task(self.check_release_exists)
self.add_task(self.download)
self.add_task(self.upload_downloaded)
self.add_task(self.extract)
self.add_task(self.transform)
self.add_task(self.upload_transformed)
self.add_task(self.bq_load)
self.add_task(self.cleanup)
def make_release(self, **kwargs) -> List[CrossrefMetadataRelease]:
"""Make release instances. The release is passed as an argument to the function (TelescopeFunction) that is
called in 'task_callable'.
:param kwargs: the context passed from the PythonOperator. See
https://airflow.apache.org/docs/stable/macros-ref.html for a list of the keyword arguments that are
passed to this argument.
:return: a list of CrossrefMetadataRelease instances.
"""
release_date = kwargs["execution_date"]
return [CrossrefMetadataRelease(self.dag_id, release_date)]
def check_release_exists(self, **kwargs):
"""Check that the release for this month exists.
:param kwargs: the context passed from the PythonOperator. See
https://airflow.apache.org/docs/stable/macros-ref.html
for a list of the keyword arguments that are passed to this argument.
:return: None.
"""
# List all available releases
logging.info(f"Listing available releases since start date ({self.start_date}):")
for dt in pendulum.period(pendulum.instance(self.start_date), pendulum.today("UTC")).range("years"):
response = requests.get(f"https://api.crossref.org/snapshots/monthly/{dt.year}")
soup = BeautifulSoup(response.text)
hrefs = soup.find_all("a", href=True)
for href in hrefs:
logging.info(href["href"])
# Construct the release for the execution date and check if it exists.
# The release release for a given execution_date is added on the 5th day of the following month.
# E.g. the 2020-05 release is added to the website on 2020-06-05.
execution_date = kwargs["execution_date"]
url = CrossrefMetadataTelescope.TELESCOPE_URL.format(year=execution_date.year, month=execution_date.month)
logging.info(f"Checking if available release exists for {execution_date.year}-{execution_date.month}")
# Get API key: it is required to check the head now
connection = BaseHook.get_connection(AirflowConns.CROSSREF)
api_key = connection.password
response = retry_session().head(url, headers={"Crossref-Plus-API-Token": f"Bearer {api_key}"})
if response.status_code == 302:
logging.info(f"Snapshot exists at url: {url}, response code: {response.status_code}")
return True
elif response.reason == "Not Found":
logging.info(
f"Snapshot does not exist at url: {url}, response code: {response.status_code}, "
f"reason: {response.reason}"
)
return False
else:
raise AirflowException(
f"Could not get head of url: {url}, response code: {response.status_code}," f"reason: {response.reason}"
)
def download(self, releases: List[CrossrefMetadataRelease], **kwargs):
"""Task to download the CrossrefMetadataRelease release for a given month.
:param releases: the list of CrossrefMetadataRelease instances.
:return: None.
"""
# Download each release
for release in releases:
release.download()
def extract(self, releases: List[CrossrefMetadataRelease], **kwargs):
"""Task to extract the CrossrefMetadataRelease release for a given month.
:param releases: the list of CrossrefMetadataRelease instances.
:return: None.
"""
for release in releases:
release.extract()
def transform(self, releases: List[CrossrefMetadataRelease], **kwargs):
"""Task to transform the CrossrefMetadataRelease release for a given month.
:param releases: the list of CrossrefMetadataRelease instances.
:return: None.
"""
for release in releases:
release.transform(max_processes=self.max_processes)
def bq_load(self, releases: List[SnapshotRelease], **kwargs):
"""Task to load each transformed release to BigQuery.
The table_id is set to the file name without the extension.
:param releases: a list of releases.
:return: None.
"""
# Load each transformed release
for release in releases:
transform_blob = f"{blob_name(release.transform_folder)}/*"
table_description = self.table_descriptions.get(self.dag_id, "")
bq_load_shard(
self.schema_folder,
release.release_date,
transform_blob,
self.dataset_id,
self.dag_id,
self.source_format,
prefix=self.schema_prefix,
schema_version=self.schema_version,
dataset_description=self.dataset_description,
table_description=table_description,
**self.load_bigquery_table_kwargs,
)
def transform_file(input_file_path: str, output_file_path: str):
"""Transform a single crossref metadata json file.
The json file is converted to a jsonl file and field names are transformed so they are accepted by BigQuery.
:param input_file_path: the path of the file to transform.
:param output_file_path: where to save the transformed file.
:return: None.
"""
# Open json
with open(input_file_path, mode="r") as input_file:
input_data = json.load(input_file)
# Transform data
output_data = []
for item in input_data["items"]:
output_data.append(transform_item(item))
# Save as JSON Lines
with jsonlines.open(output_file_path, mode="w", compact=True) as output_file:
output_file.write_all(output_data)
def transform_item(item):
"""Transform a single Crossref Metadata JSON value.
:param item: a JSON value.
:return: the transformed item.
"""
if isinstance(item, dict):
new = {}
for k, v in item.items():
# Replace hyphens with underscores for BigQuery compatibility
k = k.replace("-", "_")
# Get inner array for date parts
if k == "date_parts":
v = v[0]
if None in v:
# "date-parts" : [ [ null ] ]
v = []
elif k == "award":
if isinstance(v, str):
v = [v]
elif k == "date_time":
try:
datetime.strptime(v, "%Y-%m-%dT%H:%M:%SZ")
except ValueError:
v = ""
new[k] = transform_item(v)
return new
elif isinstance(item, list):
return [transform_item(i) for i in item]
else:
return item
|
{"/academic_observatory_workflows/workflows/ror_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_geonames_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/geonames_telescope.py"], "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/tests/test_clearbit.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_oa_web_workflow.py": ["/academic_observatory_workflows/workflows/oa_web_workflow.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/tests/test_zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_open_citations_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/open_citations_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_openalex_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/openalex_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py"], "/academic_observatory_workflows/workflows/pubmed_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_web_of_science_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_events_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_events_telescope.py"], "/academic_observatory_workflows/workflows/web_of_science_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_github.py": ["/academic_observatory_workflows/github.py"], "/academic_observatory_workflows/workflows/tests/test_pubmed_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/pubmed_telescope.py"], "/academic_observatory_workflows/workflows/geonames_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/open_citations_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_doi_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/model.py", "/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/tests/test_wikipedia.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/wikipedia.py"], "/docs/test_generate_csv.py": ["/docs/generate_schema_csv.py"], "/academic_observatory_workflows/workflows/scopus_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/doi_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py"], "/academic_observatory_workflows/workflows/crossref_events_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/oa_web_workflow.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/github.py", "/academic_observatory_workflows/wikipedia.py", "/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_ror_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/ror_telescope.py"], "/academic_observatory_workflows/workflows/openalex_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_zenodo.py": ["/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/model.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_scopus_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/scopus_telescope.py"], "/academic_observatory_workflows/dags/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_telescope.py"], "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_elastic_import_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/dags/elastic_import_workflow.py"], "/academic_observatory_workflows/dags/doi_workflow.py": ["/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/workflows/orcid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/dags/mag_telescope.py": ["/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/workflows/grid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_mag_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/dags/web_of_science_telescope.py": ["/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/dags/elastic_import_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_grid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/grid_telescope.py"], "/academic_observatory_workflows/workflows/mag_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_orcid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/orcid_telescope.py"]}
|
28,260,408
|
The-Academic-Observatory/academic-observatory-workflows
|
refs/heads/main
|
/academic_observatory_workflows/workflows/unpaywall_telescope.py
|
# Copyright 2020 Curtin University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Author: Tuan Chien
import os
from datetime import datetime, timedelta
from typing import Generator, List, Optional, Tuple, Union
import pendulum
from academic_observatory_workflows.config import schema_folder as default_schema_folder
from academic_observatory_workflows.workflows.unpaywall_snapshot_telescope import (
UnpaywallSnapshotRelease,
)
from airflow.exceptions import AirflowException
from airflow.models.dagrun import DagRun
from croniter import croniter
from dateutil.relativedelta import relativedelta
from observatory.platform.utils.airflow_utils import (
AirflowVars,
get_airflow_connection_password,
)
from observatory.platform.utils.file_utils import find_replace_file, gunzip_files
from observatory.platform.utils.http_download import download_file
from observatory.platform.utils.url_utils import (
get_http_response_json,
get_observatory_http_header,
)
from observatory.platform.utils.workflow_utils import is_first_dag_run
from observatory.platform.workflows.stream_telescope import (
StreamRelease,
StreamTelescope,
)
class UnpaywallRelease(StreamRelease):
"""Unpaywall Data Feed Release"""
AIRFLOW_CONNECTION = "unpaywall" # Contains API key
# These links are publicly listed on Unpaywall's website. See https://unpaywall.org/products/data-feed
SNAPSHOT_URL = "https://api.unpaywall.org/feed/snapshot"
CHANGEFILES_URL = "https://api.unpaywall.org/feed/changefiles"
def __init__(
self,
*,
dag_id: str,
start_date: pendulum.DateTime,
end_date: pendulum.DateTime,
first_release: bool,
):
"""Construct an UnpaywallRelease instance
:param dag_id: the id of the DAG.
:param start_date: the start_date of the release.
:param end_date: the end_date of the release.
:param first_release: whether this is the first release that is processed for this DAG
"""
super().__init__(
dag_id,
start_date,
end_date,
first_release,
)
self.http_header = get_observatory_http_header(package_name="academic_observatory_workflows")
@property
def api_key(self) -> str:
"""The API key for accessing Unpaywall."""
return get_airflow_connection_password(UnpaywallRelease.AIRFLOW_CONNECTION)
@property
def snapshot_url(self) -> str:
"""Snapshot URL"""
return f"{UnpaywallRelease.SNAPSHOT_URL}?api_key={self.api_key}"
@property
def data_feed_url(self) -> str:
"""Data Feed URL"""
return f"{UnpaywallRelease.CHANGEFILES_URL}?interval=day&api_key={self.api_key}"
def download(self):
"""Download the release."""
if self.first_release:
self._download_snapshot()
else:
self._download_data_feed()
def _download_snapshot(self):
"""Download the most recent Unpaywall snapshot on or before the start date."""
download_file(url=self.snapshot_url, headers=self.http_header, prefix_dir=self.download_folder)
download_date = UnpaywallSnapshotRelease.parse_release_date(self.download_files[0]).date()
start_date = self.start_date.date()
if start_date != download_date:
raise AirflowException(
f"The telescope start date {start_date} and the downloaded snapshot date {download_date} do not match. Please set the telescope's start date to {download_date}."
)
@staticmethod
def get_diff_release(*, feed_url: str, start_date: pendulum.DateTime) -> Tuple[Optional[str], Optional[str]]:
"""Get the differential release url and filename.
:param feed_url: The URL to query for releases.
:param start_date: Earliest date to consider.
:return: (None,None) if nothing found, otherwise (url, filename).
"""
release_info = get_http_response_json(feed_url)
for release in release_info["list"]:
# Have been advised by Unpaywall to parse timestamp from filename instead of relying on the json fields.
release_date = UnpaywallSnapshotRelease.parse_release_date(release["filename"]).date()
# Apply diffs from 2 days ago. This is so we start applying diffs 1 day before the snapshot date to
# guarantee no gaps with the snapshot.
target_date = (start_date - pendulum.Duration(days=2)).date()
if release_date == target_date:
return release["url"], release["filename"]
return (None, None)
def _download_data_feed(self):
"""Download data feed update (diff) that can be applied to the base snapshot. Can only handle a single download."""
url, filename = self.get_diff_release(
feed_url=self.data_feed_url,
start_date=self.start_date,
)
filename = os.path.join(self.download_folder, filename)
download_file(url=url, filename=filename, headers=self.http_header)
def extract(self):
"""Unzip the downloaded files."""
gunzip_files(file_list=self.download_files, output_dir=self.extract_folder)
def transform(self):
"""Find and replace the 'authenticated-orcid' string in the jsonl to 'authenticated_orcid'"""
files = list(filter(lambda file: file[-5:] == "jsonl", self.extract_files))
for src in files:
filename = os.path.basename(src)
dst = os.path.join(self.transform_folder, filename)
find_replace_file(src=src, dst=dst, pattern="authenticated-orcid", replacement="authenticated_orcid")
class UnpaywallTelescope(StreamTelescope):
DAG_ID = "unpaywall"
DATAFEED_URL = "https://unpaywall.org/products/data-feed"
AIRFLOW_CONNECTION = "unpaywall"
def __init__(
self,
dag_id: str = DAG_ID,
start_date: pendulum.DateTime = pendulum.datetime(2021, 7, 2),
schedule_interval: str = "@daily",
dataset_id: str = "our_research",
dataset_description: str = f"Unpaywall Data Feed: {DATAFEED_URL}",
merge_partition_field: str = "doi",
schema_folder: str = default_schema_folder(),
airflow_vars: List = None,
catchup=True,
):
"""Unpaywall Data Feed telescope.
:param dag_id: the id of the DAG.
:param start_date: the start date of the DAG.
:param schedule_interval: the schedule interval of the DAG.
:param dataset_id: the dataset id.
:param dataset_description: the dataset description.
:param merge_partition_field: the BigQuery field used to match partitions for a merge
:param schema_folder: the SQL schema path.
:param airflow_vars: list of airflow variable keys, for each variable it is checked if it exists in airflow
:param catchup: Whether to perform catchup on old releases.
"""
self._validate_schedule_interval(start_date=start_date, schedule_interval=schedule_interval)
if airflow_vars is None:
airflow_vars = [
AirflowVars.DATA_PATH,
AirflowVars.PROJECT_ID,
AirflowVars.DATA_LOCATION,
AirflowVars.DOWNLOAD_BUCKET,
AirflowVars.TRANSFORM_BUCKET,
]
super().__init__(
dag_id,
start_date,
schedule_interval,
dataset_id,
merge_partition_field,
schema_folder,
dataset_description=dataset_description,
batch_load=True,
catchup=catchup,
airflow_vars=airflow_vars,
airflow_conns=[UnpaywallTelescope.AIRFLOW_CONNECTION],
load_bigquery_table_kwargs={"ignore_unknown_values": True},
)
self.add_setup_task(self.check_dependencies)
self.add_setup_task(self.check_releases)
self.add_task(self.download)
self.add_task(self.upload_downloaded)
self.add_task(self.extract)
self.add_task(self.transform)
self.add_task(self.upload_transformed)
self.add_task(self.bq_load_partition)
self.add_task_chain([self.bq_delete_old, self.bq_append_new, self.cleanup], trigger_rule="none_failed")
@staticmethod
def _schedule_days_apart(
*, start_date: pendulum.DateTime, schedule_interval: Union[str, timedelta, relativedelta]
) -> Generator:
"""Calculate the scheduled days apart.
:param start_date: DAG start date.
:param schedule_interval: DAG schedule interval.
:return: A generator that gives back the days apart for each execution.
"""
if isinstance(schedule_interval, (timedelta, relativedelta)):
while True:
yield schedule_interval.days
a = start_date
it = croniter(schedule_interval, start_date)
while True:
b = it.next(datetime)
diff = (b - a).days
a = b
yield diff
def _validate_schedule_interval(self, *, start_date: pendulum.DateTime, schedule_interval: str):
"""Check that the schedule interval gives us 1 or 7 day differences.
Throws exception on failure.
:param start_date: DAG start date.
:param schedule_interval: DAG schedule interval.
"""
days_apart = UnpaywallTelescope._schedule_days_apart(start_date=start_date, schedule_interval=schedule_interval)
diffs = [next(days_apart) for i in range(2)]
if diffs[0] != diffs[1] or diffs[0] != 1:
raise AirflowException(f"Schedule interval must trigger executions 1 days apart.")
def check_releases(self, **kwargs) -> bool:
"""Check to see if diff releases are available. If not, and it's not the first release, then skip doing work.
Snapshot releases are checked on first release at download stage.
:param kwargs: The context passed from the PythonOperator.
:return: True to continue, False to skip.
"""
start_date, first_release = self._get_release_info(**kwargs)
# No checks on first release
if first_release:
return True
# Check for diffs
api_key = get_airflow_connection_password(UnpaywallRelease.AIRFLOW_CONNECTION)
url = f"{UnpaywallRelease.CHANGEFILES_URL}?interval=day&api_key={api_key}"
_, filename = UnpaywallRelease.get_diff_release(feed_url=url, start_date=start_date)
# No release within our target date.
if filename is None:
return False
# Release found
return True
def make_release(self, **kwargs) -> UnpaywallRelease:
"""Make a Release instance
:param kwargs: The context passed from the PythonOperator.
:return: UnpaywallRelease
"""
start_date, first_release = self._get_release_info(**kwargs)
release = UnpaywallRelease(
dag_id=self.dag_id,
start_date=start_date,
end_date=start_date,
first_release=first_release,
)
return release
def _get_release_info(self, **kwargs) -> Tuple[pendulum.DateTime, bool]:
"""Get the start, end dates, and whether this is a first release.
:param kwargs: The context passed from the PythonOperator.
:return start date, whether first release.
"""
dag_run: DagRun = kwargs["dag_run"]
first_release = is_first_dag_run(dag_run)
start_date = pendulum.instance(kwargs["execution_date"])
return start_date, first_release
|
{"/academic_observatory_workflows/workflows/ror_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_geonames_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/geonames_telescope.py"], "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/tests/test_clearbit.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_oa_web_workflow.py": ["/academic_observatory_workflows/workflows/oa_web_workflow.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/tests/test_zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_open_citations_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/open_citations_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_openalex_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/openalex_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py"], "/academic_observatory_workflows/workflows/pubmed_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_web_of_science_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_events_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_events_telescope.py"], "/academic_observatory_workflows/workflows/web_of_science_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_github.py": ["/academic_observatory_workflows/github.py"], "/academic_observatory_workflows/workflows/tests/test_pubmed_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/pubmed_telescope.py"], "/academic_observatory_workflows/workflows/geonames_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/open_citations_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_doi_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/model.py", "/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/tests/test_wikipedia.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/wikipedia.py"], "/docs/test_generate_csv.py": ["/docs/generate_schema_csv.py"], "/academic_observatory_workflows/workflows/scopus_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/doi_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py"], "/academic_observatory_workflows/workflows/crossref_events_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/oa_web_workflow.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/github.py", "/academic_observatory_workflows/wikipedia.py", "/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_ror_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/ror_telescope.py"], "/academic_observatory_workflows/workflows/openalex_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_zenodo.py": ["/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/model.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_scopus_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/scopus_telescope.py"], "/academic_observatory_workflows/dags/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_telescope.py"], "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_elastic_import_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/dags/elastic_import_workflow.py"], "/academic_observatory_workflows/dags/doi_workflow.py": ["/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/workflows/orcid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/dags/mag_telescope.py": ["/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/workflows/grid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_mag_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/dags/web_of_science_telescope.py": ["/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/dags/elastic_import_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_grid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/grid_telescope.py"], "/academic_observatory_workflows/workflows/mag_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_orcid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/orcid_telescope.py"]}
|
28,260,409
|
The-Academic-Observatory/academic-observatory-workflows
|
refs/heads/main
|
/academic_observatory_workflows/workflows/tests/test_geonames_telescope.py
|
# Copyright 2020 Curtin University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Author: Aniek Roelofs, James Diprose
import os
from unittest.mock import patch
import pendulum
from academic_observatory_workflows.config import test_fixtures_folder
from academic_observatory_workflows.workflows.geonames_telescope import (
GeonamesRelease,
GeonamesTelescope,
fetch_release_date,
first_sunday_of_month,
)
from observatory.platform.utils.file_utils import get_file_hash
from observatory.platform.utils.gc_utils import bigquery_sharded_table_id
from observatory.platform.utils.test_utils import (
HttpServer,
ObservatoryEnvironment,
ObservatoryTestCase,
module_file_path,
)
from observatory.platform.utils.workflow_utils import (
SubFolder,
blob_name,
workflow_path,
)
class MockResponse:
def __init__(self, headers):
self.headers = headers
class TestGeonamesTelescope(ObservatoryTestCase):
"""Tests for the Geonames telescope"""
def __init__(self, *args, **kwargs):
"""Constructor which sets up variables used by tests.
:param args: arguments.
:param kwargs: keyword arguments.
"""
super(TestGeonamesTelescope, self).__init__(*args, **kwargs)
self.project_id = os.getenv("TEST_GCP_PROJECT_ID")
self.data_location = os.getenv("TEST_GCP_DATA_LOCATION")
self.all_countries_path = test_fixtures_folder("geonames", "allCountries.zip")
self.fetch_release_date_path = test_fixtures_folder("geonames", "fetch_release_date.yaml")
self.list_releases_path = test_fixtures_folder("geonames", "list_releases.yaml")
def test_dag_structure(self):
"""Test that the Geonames DAG has the correct structure.
:return: None
"""
dag = GeonamesTelescope().make_dag()
self.assert_dag_structure(
{
"check_dependencies": ["fetch_release_date"],
"fetch_release_date": ["download"],
"download": ["upload_downloaded"],
"upload_downloaded": ["extract"],
"extract": ["transform"],
"transform": ["upload_transformed"],
"upload_transformed": ["bq_load"],
"bq_load": ["cleanup"],
"cleanup": [],
},
dag,
)
def test_dag_load(self):
"""Test that the Geonames DAG can be loaded from a DAG bag.
:return: None
"""
with ObservatoryEnvironment().create():
dag_file = os.path.join(module_file_path("academic_observatory_workflows.dags"), "geonames_telescope.py")
self.assert_dag_load("geonames", dag_file)
def test_first_sunday_of_month(self):
"""Test first_sunday_of_month function.
:return: None.
"""
# Test when the date is later in the month
datetime = pendulum.datetime(year=2020, month=7, day=28)
expected_datetime = pendulum.datetime(year=2020, month=7, day=5)
actual_datetime = first_sunday_of_month(datetime)
self.assertEqual(expected_datetime, actual_datetime)
# Test a date when the current date is a Sunday
datetime = pendulum.datetime(year=2020, month=11, day=1)
expected_datetime = pendulum.datetime(year=2020, month=11, day=1)
actual_datetime = first_sunday_of_month(datetime)
self.assertEqual(expected_datetime, actual_datetime)
@patch("academic_observatory_workflows.workflows.geonames_telescope.requests.head")
def test_fetch_release_date(self, m_req):
"""Test fetch_release_date function.
:return: None.
"""
m_req.return_value = MockResponse({"Last-Modified": "Thu, 16 Jul 2020 01:22:15 GMT"})
date = fetch_release_date()
self.assertEqual(date, pendulum.datetime(year=2020, month=7, day=16, hour=1, minute=22, second=15))
def test_telescope(self):
"""Test the Geonames telescope end to end.
:return: None.
"""
# Setup Observatory environment
env = ObservatoryEnvironment(self.project_id, self.data_location)
dataset_id = env.add_dataset()
# Setup Telescope
execution_date = pendulum.datetime(year=2020, month=11, day=1)
telescope = GeonamesTelescope(dataset_id=dataset_id)
dag = telescope.make_dag()
# Create the Observatory environment and run tests
with env.create():
with env.create_dag_run(dag, execution_date):
# Release settings
release_date = pendulum.datetime(year=2021, month=3, day=5, hour=1, minute=34, second=32)
release_id = f'{telescope.dag_id}_{release_date.strftime("%Y_%m_%d")}'
download_folder = workflow_path(SubFolder.downloaded, telescope.dag_id, release_id)
extract_folder = workflow_path(SubFolder.extracted, telescope.dag_id, release_id)
transform_folder = workflow_path(SubFolder.transformed, telescope.dag_id, release_id)
# Test that all dependencies are specified: no error should be thrown
env.run_task(telescope.check_dependencies.__name__)
# Test list releases task
with patch("academic_observatory_workflows.workflows.geonames_telescope.requests.head") as m_req:
m_req.return_value = MockResponse({"Last-Modified": "Fri, 05 Mar 2021 01:34:32 GMT"})
ti = env.run_task(telescope.fetch_release_date.__name__)
pulled_release_date = ti.xcom_pull(
key=GeonamesTelescope.RELEASE_INFO,
task_ids=telescope.fetch_release_date.__name__,
include_prior_dates=False,
)
self.assertIsInstance(pendulum.parse(pulled_release_date), pendulum.DateTime)
self.assertEqual(release_date.date(), pendulum.parse(pulled_release_date).date())
# Test download task
server = HttpServer(test_fixtures_folder("geonames"))
with server.create():
with patch.object(
GeonamesRelease, "DOWNLOAD_URL", f"http://{server.host}:{server.port}/allCountries.zip"
):
env.run_task(telescope.download.__name__)
download_file_path = os.path.join(download_folder, f"{telescope.dag_id}.zip")
expected_file_hash = get_file_hash(file_path=self.all_countries_path, algorithm="md5")
self.assert_file_integrity(download_file_path, expected_file_hash, "md5")
# Test that file uploaded
env.run_task(telescope.upload_downloaded.__name__)
self.assert_blob_integrity(env.download_bucket, blob_name(download_file_path), download_file_path)
# Test that file extracted
env.run_task(telescope.extract.__name__)
extracted_file_path = os.path.join(extract_folder, "allCountries.txt")
expected_file_hash = "de1bf005df4840d16faf598999d72051"
self.assert_file_integrity(extracted_file_path, expected_file_hash, "md5")
# Test that file transformed
env.run_task(telescope.transform.__name__)
transformed_file_path = os.path.join(transform_folder, f"{telescope.dag_id}.csv.gz")
expected_file_hash = "26c14e16"
self.assert_file_integrity(transformed_file_path, expected_file_hash, "gzip_crc")
# Test that transformed file uploaded
env.run_task(telescope.upload_transformed.__name__)
self.assert_blob_integrity(
env.transform_bucket, blob_name(transformed_file_path), transformed_file_path
)
# Test that data loaded into BigQuery
env.run_task(telescope.bq_load.__name__)
table_id = f"{self.project_id}.{dataset_id}.{bigquery_sharded_table_id(telescope.dag_id, release_date)}"
expected_rows = 50
self.assert_table_integrity(table_id, expected_rows)
# Test that all telescope data deleted
env.run_task(telescope.cleanup.__name__)
self.assert_cleanup(download_folder, extract_folder, transform_folder)
|
{"/academic_observatory_workflows/workflows/ror_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_geonames_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/geonames_telescope.py"], "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/tests/test_clearbit.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_oa_web_workflow.py": ["/academic_observatory_workflows/workflows/oa_web_workflow.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/tests/test_zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_open_citations_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/open_citations_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_openalex_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/openalex_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py"], "/academic_observatory_workflows/workflows/pubmed_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_web_of_science_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_events_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_events_telescope.py"], "/academic_observatory_workflows/workflows/web_of_science_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_github.py": ["/academic_observatory_workflows/github.py"], "/academic_observatory_workflows/workflows/tests/test_pubmed_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/pubmed_telescope.py"], "/academic_observatory_workflows/workflows/geonames_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/open_citations_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_doi_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/model.py", "/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/tests/test_wikipedia.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/wikipedia.py"], "/docs/test_generate_csv.py": ["/docs/generate_schema_csv.py"], "/academic_observatory_workflows/workflows/scopus_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/doi_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py"], "/academic_observatory_workflows/workflows/crossref_events_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/oa_web_workflow.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/github.py", "/academic_observatory_workflows/wikipedia.py", "/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_ror_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/ror_telescope.py"], "/academic_observatory_workflows/workflows/openalex_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_zenodo.py": ["/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/model.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_scopus_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/scopus_telescope.py"], "/academic_observatory_workflows/dags/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_telescope.py"], "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_elastic_import_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/dags/elastic_import_workflow.py"], "/academic_observatory_workflows/dags/doi_workflow.py": ["/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/workflows/orcid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/dags/mag_telescope.py": ["/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/workflows/grid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_mag_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/dags/web_of_science_telescope.py": ["/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/dags/elastic_import_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_grid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/grid_telescope.py"], "/academic_observatory_workflows/workflows/mag_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_orcid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/orcid_telescope.py"]}
|
28,260,410
|
The-Academic-Observatory/academic-observatory-workflows
|
refs/heads/main
|
/academic_observatory_workflows/workflows/tests/test_crossref_metadata_telescope.py
|
# Copyright 2020 Curtin University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Author: Aniek Roelofs
import json
import os
from datetime import datetime
from unittest.mock import patch
import httpretty
import pendulum
from airflow.exceptions import AirflowException
from airflow.models.connection import Connection
from click.testing import CliRunner
from natsort import natsorted
from academic_observatory_workflows.config import test_fixtures_folder
from academic_observatory_workflows.workflows.crossref_metadata_telescope import (
CrossrefMetadataRelease,
CrossrefMetadataTelescope,
transform_item,
transform_file,
)
from observatory.platform.utils.airflow_utils import AirflowConns
from observatory.platform.utils.file_utils import load_jsonl
from observatory.platform.utils.gc_utils import bigquery_sharded_table_id
from observatory.platform.utils.test_utils import (
ObservatoryEnvironment,
ObservatoryTestCase,
module_file_path,
)
from observatory.platform.utils.workflow_utils import blob_name
class TestCrossrefMetadataTelescope(ObservatoryTestCase):
"""Tests for the Crossref Metadata telescope"""
def __init__(self, *args, **kwargs):
"""Constructor which sets up variables used by tests.
:param args: arguments.
:param kwargs: keyword arguments.
"""
super(TestCrossrefMetadataTelescope, self).__init__(*args, **kwargs)
self.project_id = os.getenv("TEST_GCP_PROJECT_ID")
self.data_location = os.getenv("TEST_GCP_DATA_LOCATION")
self.download_path = test_fixtures_folder("crossref_metadata", "crossref_metadata.json.tar.gz")
self.extract_file_hashes = [
"42cab8ed20ef20bed51dacd3dc364589",
"c45901a52154789470410aad51485e9c",
"4c0fd617224a557b9ef04313cca0bd4a",
"d93dc613e299871925532d906c3a44a1",
"dd1ab247c55191a14bcd1bf32719c337",
]
self.transform_hashes = [
"a2be39d3c4d4c9dc20af768f8ae35476",
"38b766ec494054e621787de00ff715c8",
"70437aad7c4568ed07408baf034871e4",
"c3e3285a48867c8b7c10b1c9c0c5ab8a",
"71ba3612352bcb2a723d4aa33ec35b61",
]
# release used for tests outside observatory test environment
self.release = CrossrefMetadataRelease("crossref_metadata", datetime(2020, 1, 1))
def test_dag_structure(self):
"""Test that the Crossref Metadata DAG has the correct structure.
:return: None
"""
dag = CrossrefMetadataTelescope().make_dag()
self.assert_dag_structure(
{
"check_dependencies": ["check_release_exists"],
"check_release_exists": ["download"],
"download": ["upload_downloaded"],
"upload_downloaded": ["extract"],
"extract": ["transform"],
"transform": ["upload_transformed"],
"upload_transformed": ["bq_load"],
"bq_load": ["cleanup"],
"cleanup": [],
},
dag,
)
def test_dag_load(self):
"""Test that the Crossref Metadata DAG can be loaded from a DAG bag.
:return: None
"""
with ObservatoryEnvironment().create():
dag_file = os.path.join(
module_file_path("academic_observatory_workflows.dags"), "crossref_metadata_telescope.py"
)
self.assert_dag_load("crossref_metadata", dag_file)
def test_telescope(self):
"""Test the Crossref Metadata telescope end to end.
:return: None.
"""
# Setup Observatory environment
env = ObservatoryEnvironment(self.project_id, self.data_location)
dataset_id = env.add_dataset()
# Setup Telescope
execution_date = pendulum.datetime(year=2022, month=1, day=1)
telescope = CrossrefMetadataTelescope(dataset_id=dataset_id)
dag = telescope.make_dag()
# Create the Observatory environment and run tests
with env.create():
with env.create_dag_run(dag, execution_date):
# Add Crossref Metadata connection
env.add_connection(Connection(conn_id=AirflowConns.CROSSREF, uri="mysql://:crossref-token@"))
# Test that all dependencies are specified: no error should be thrown
env.run_task(telescope.check_dependencies.__name__)
# Test check release exists task, next tasks should not be skipped
with httpretty.enabled():
url = CrossrefMetadataTelescope.TELESCOPE_URL.format(
year=execution_date.year, month=execution_date.month
)
httpretty.register_uri(httpretty.HEAD, url, body="", status=302)
env.run_task(telescope.check_release_exists.__name__)
release = CrossrefMetadataRelease(telescope.dag_id, execution_date)
# Test download task
with httpretty.enabled():
self.setup_mock_file_download(release.url, self.download_path)
env.run_task(telescope.download.__name__)
self.assertEqual(1, len(release.download_files))
expected_file_hash = "047770ae386f3376c08e3975d7f06016"
self.assert_file_integrity(release.download_path, expected_file_hash, "md5")
# Test that file uploaded
env.run_task(telescope.upload_downloaded.__name__)
self.assert_blob_integrity(env.download_bucket, blob_name(release.download_path), release.download_path)
# Test that file extracted
env.run_task(telescope.extract.__name__)
self.assertEqual(5, len(release.extract_files))
for i, file in enumerate(natsorted(release.extract_files)):
expected_file_hash = self.extract_file_hashes[i]
self.assert_file_integrity(file, expected_file_hash, "md5")
# Test that files transformed
env.run_task(telescope.transform.__name__)
self.assertEqual(5, len(release.transform_files))
for i, file in enumerate(natsorted(release.transform_files)):
expected_file_hash = self.transform_hashes[i]
self.assert_file_integrity(file, expected_file_hash, "md5")
# Test that transformed files uploaded
env.run_task(telescope.upload_transformed.__name__)
for file in release.transform_files:
self.assert_blob_integrity(env.transform_bucket, blob_name(file), file)
# Test that data loaded into BigQuery
env.run_task(telescope.bq_load.__name__)
table_id = (
f"{self.project_id}.{dataset_id}."
f"{bigquery_sharded_table_id(telescope.dag_id, release.release_date)}"
)
expected_rows = 20
self.assert_table_integrity(table_id, expected_rows)
# Test that all telescope data deleted
download_folder, extract_folder, transform_folder = (
release.download_folder,
release.extract_folder,
release.transform_folder,
)
env.run_task(telescope.cleanup.__name__)
self.assert_cleanup(download_folder, extract_folder, transform_folder)
@patch("academic_observatory_workflows.workflows.crossref_metadata_telescope.BaseHook.get_connection")
def test_download(self, mock_conn):
"""Test download method of release with failing response
:param mock_conn: Mock Airflow crossref connection
:return: None.
"""
mock_conn.return_value = Connection(AirflowConns.CROSSREF, "http://:crossref-token@")
release = self.release
with httpretty.enabled():
httpretty.register_uri(httpretty.GET, release.url, body="", status=400)
with self.assertRaises(ConnectionError):
release.download()
@patch("academic_observatory_workflows.workflows.crossref_metadata_telescope.subprocess.Popen")
@patch("observatory.platform.utils.workflow_utils.Variable.get")
def test_extract(self, mock_variable_get, mock_subprocess):
"""Test extract method of release with failing extract command
:param mock_variable_get: Mock Airflow data path variable
:param mock_subprocess: Mock the subprocess output
:return: None.
"""
mock_variable_get.return_value = "data"
release = self.release
mock_subprocess().returncode = 1
mock_subprocess().communicate.return_value = "stdout".encode(), "stderr".encode()
with self.assertRaises(AirflowException):
release.extract()
@patch("academic_observatory_workflows.workflows.crossref_metadata_telescope.BaseHook.get_connection")
def test_check_release_exists(self, mock_get_connection):
"""Test the 'check_release_exists' task with different responses.
:return: None.
"""
# Mock getting Crossref Metadata Connection
mock_get_connection.return_value = Connection(password="crossref-token")
release = self.release
telescope = CrossrefMetadataTelescope()
with httpretty.enabled():
# register 3 responses, successful, release not found and 'other'
httpretty.register_uri(
httpretty.HEAD,
uri=release.url,
responses=[
httpretty.Response(body="", status=302),
httpretty.Response(body="", status=404, adding_headers={"reason": "Not Found"}),
httpretty.Response(body="", status=400),
],
)
continue_dag = telescope.check_release_exists(execution_date=release.release_date)
self.assertTrue(continue_dag)
continue_dag = telescope.check_release_exists(execution_date=release.release_date)
self.assertFalse(continue_dag)
with self.assertRaises(AirflowException):
telescope.check_release_exists(execution_date=release.release_date)
def test_transform_file(self):
"""Test transform_file."""
with CliRunner().isolated_filesystem() as t:
# Save input file
input_file_path = os.path.join(t, "input.json")
input_data = {
"items": [
{
"indexed": {
"date-parts": [[2019, 11, 19]],
"date-time": "2019-11-19T10:09:18Z",
"timestamp": 1574158158980,
},
"reference-count": 0,
"publisher": "American Medical Association (AMA)",
"issue": "2",
"content-domain": {"domain": [], "crossmark-restriction": False},
"short-container-title": [],
"published-print": {"date-parts": [[1994, 2, 1]]},
"DOI": "10.1001/archderm.130.2.225",
"type": "journal-article",
"created": {
"date-parts": [[2003, 3, 18]],
"date-time": "2003-03-18T21:22:40Z",
"timestamp": 1048022560000,
},
"page": "225-232",
"source": "Crossref",
"is-referenced-by-count": 23,
"title": ["Abnormalities of p53 protein expression in cutaneous disorders"],
"prefix": "10.1001",
"volume": "130",
"author": [{"given": "N. S.", "family": "McNutt", "affiliation": []}],
"member": "10",
"container-title": ["Archives of Dermatology"],
"original-title": [],
"deposited": {
"date-parts": [[2011, 7, 21]],
"date-time": "2011-07-21T07:23:09Z",
"timestamp": 1311232989000,
},
"score": None,
"subtitle": [],
"short-title": [],
"issued": {"date-parts": [[1994, 2, 1]]},
"references-count": 0,
"URL": "http://dx.doi.org/10.1001/archderm.130.2.225",
"relation": {},
"ISSN": ["0003-987X"],
"issn-type": [{"value": "0003-987X", "type": "print"}],
}
]
}
with open(input_file_path, mode="w") as f:
json.dump(input_data, f)
# Load Transform file
output_file_path = os.path.join(t, "output.jsonl")
transform_file(input_file_path, output_file_path)
# Check results
expected_results = [
{
"indexed": {
"date_parts": [2019, 11, 19],
"date_time": "2019-11-19T10:09:18Z",
"timestamp": 1574158158980,
},
"reference_count": 0,
"publisher": "American Medical Association (AMA)",
"issue": "2",
"content_domain": {"domain": [], "crossmark_restriction": False},
"short_container_title": [],
"published_print": {"date_parts": [1994, 2, 1]},
"DOI": "10.1001/archderm.130.2.225",
"type": "journal-article",
"created": {
"date_parts": [2003, 3, 18],
"date_time": "2003-03-18T21:22:40Z",
"timestamp": 1048022560000,
},
"page": "225-232",
"source": "Crossref",
"is_referenced_by_count": 23,
"title": ["Abnormalities of p53 protein expression in cutaneous disorders"],
"prefix": "10.1001",
"volume": "130",
"author": [{"given": "N. S.", "family": "McNutt", "affiliation": []}],
"member": "10",
"container_title": ["Archives of Dermatology"],
"original_title": [],
"deposited": {
"date_parts": [2011, 7, 21],
"date_time": "2011-07-21T07:23:09Z",
"timestamp": 1311232989000,
},
"score": None,
"subtitle": [],
"short_title": [],
"issued": {"date_parts": [1994, 2, 1]},
"references_count": 0,
"URL": "http://dx.doi.org/10.1001/archderm.130.2.225",
"relation": {},
"ISSN": ["0003-987X"],
"issn_type": [{"value": "0003-987X", "type": "print"}],
}
]
actual_results = load_jsonl(output_file_path)
self.assertEqual(expected_results, actual_results)
def test_transform_item(self):
"""Test the cases that transform_item transforms"""
# Replace hyphens with underscores
item = {
"hello": {},
"hello-world": {"hello-world": [{"hello-world": 1}, {"hello-world": 1}, {"hello-world": 1}]},
}
expected = {
"hello": {},
"hello_world": {"hello_world": [{"hello_world": 1}, {"hello_world": 1}, {"hello_world": 1}]},
}
actual = transform_item(item)
self.assertEqual(expected, actual)
# date-parts
item = {"date-parts": [[2021, 1, 1]]}
expected = {"date_parts": [2021, 1, 1]}
actual = transform_item(item)
self.assertEqual(expected, actual)
# date-parts with None inside inner list
item = {"date-parts": [[None]]}
expected = {"date_parts": []}
actual = transform_item(item)
self.assertEqual(expected, actual)
# list with date-parts
item = {"hello-world": {"hello-world": [{"date-parts": [[2021, 1, 1]]}, {"date-parts": [[None]]}]}}
expected = {"hello_world": {"hello_world": [{"date_parts": [2021, 1, 1]}, {"date_parts": []}]}}
actual = transform_item(item)
self.assertEqual(expected, actual)
|
{"/academic_observatory_workflows/workflows/ror_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_geonames_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/geonames_telescope.py"], "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/tests/test_clearbit.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_oa_web_workflow.py": ["/academic_observatory_workflows/workflows/oa_web_workflow.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/tests/test_zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_open_citations_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/open_citations_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_openalex_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/openalex_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py"], "/academic_observatory_workflows/workflows/pubmed_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_web_of_science_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_events_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_events_telescope.py"], "/academic_observatory_workflows/workflows/web_of_science_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_github.py": ["/academic_observatory_workflows/github.py"], "/academic_observatory_workflows/workflows/tests/test_pubmed_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/pubmed_telescope.py"], "/academic_observatory_workflows/workflows/geonames_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/open_citations_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_doi_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/model.py", "/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/tests/test_wikipedia.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/wikipedia.py"], "/docs/test_generate_csv.py": ["/docs/generate_schema_csv.py"], "/academic_observatory_workflows/workflows/scopus_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/doi_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py"], "/academic_observatory_workflows/workflows/crossref_events_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/oa_web_workflow.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/github.py", "/academic_observatory_workflows/wikipedia.py", "/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_ror_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/ror_telescope.py"], "/academic_observatory_workflows/workflows/openalex_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_zenodo.py": ["/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/model.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_scopus_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/scopus_telescope.py"], "/academic_observatory_workflows/dags/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_telescope.py"], "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_elastic_import_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/dags/elastic_import_workflow.py"], "/academic_observatory_workflows/dags/doi_workflow.py": ["/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/workflows/orcid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/dags/mag_telescope.py": ["/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/workflows/grid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_mag_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/dags/web_of_science_telescope.py": ["/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/dags/elastic_import_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_grid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/grid_telescope.py"], "/academic_observatory_workflows/workflows/mag_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_orcid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/orcid_telescope.py"]}
|
28,260,411
|
The-Academic-Observatory/academic-observatory-workflows
|
refs/heads/main
|
/academic_observatory_workflows/workflows/doi_workflow.py
|
# Copyright 2020-2021 Curtin University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Author: Richard Hosking, James Diprose
from __future__ import annotations
import logging
import os
from concurrent.futures import ThreadPoolExecutor, as_completed
from dataclasses import dataclass
from datetime import timedelta
from typing import Dict, List, Optional, Tuple
import pendulum
from airflow.exceptions import AirflowException
from airflow.models import Variable
from academic_observatory_workflows.config import sql_folder
from observatory.platform.utils.airflow_utils import AirflowVars, set_task_state
from observatory.platform.utils.dag_run_sensor import DagRunSensor
from observatory.platform.utils.gc_utils import (
bigquery_sharded_table_id,
copy_bigquery_table,
create_bigquery_dataset,
create_bigquery_table_from_query,
create_bigquery_view,
select_table_shard_dates,
)
from observatory.platform.utils.jinja2_utils import (
make_sql_jinja2_filename,
render_template,
)
from observatory.platform.utils.workflow_utils import make_release_date
from observatory.platform.workflows.workflow import Workflow
MAX_QUERIES = 100
@dataclass
class Table:
dataset_id: str
table_id: str = None
sharded: bool = False
release_date: pendulum.DateTime = None
@dataclass
class Transform:
inputs: Dict = None
output_table: Table = None
output_cluster: bool = False
output_clustering_fields: List = None
@dataclass
class Aggregation:
table_id: str
aggregation_field: str
group_by_time_field: str = "published_year"
relate_to_institutions: bool = False
relate_to_countries: bool = False
relate_to_groups: bool = False
relate_to_members: bool = False
relate_to_journals: bool = False
relate_to_funders: bool = False
relate_to_publishers: bool = False
def make_dataset_transforms(
dataset_id_crossref_events: str = "crossref",
dataset_id_crossref_metadata: str = "crossref",
dataset_id_crossref_fundref: str = "crossref",
dataset_id_ror: str = "ror",
dataset_id_mag: str = "mag",
dataset_id_orcid: str = "orcid",
dataset_id_open_citations: str = "open_citations",
dataset_id_unpaywall: str = "our_research",
dataset_id_settings: str = "settings",
dataset_id_observatory: str = "observatory",
dataset_id_observatory_intermediate: str = "observatory_intermediate",
) -> Tuple[List[Transform], Transform, Transform]:
return (
[
Transform(
inputs={"crossref_events": Table(dataset_id_crossref_events, "crossref_events")},
output_table=Table(dataset_id_observatory_intermediate, "crossref_events"),
output_cluster=True,
output_clustering_fields=["doi"],
),
Transform(
inputs={
"crossref_fundref": Table(dataset_id_crossref_fundref, "crossref_fundref", sharded=True),
"crossref_metadata": Table(dataset_id_crossref_metadata, "crossref_metadata", sharded=True),
},
output_table=Table(dataset_id_observatory_intermediate, "crossref_fundref"),
output_cluster=True,
output_clustering_fields=["doi"],
),
Transform(
inputs={
"ror": Table(dataset_id_ror, "ror", sharded=True),
"settings": Table(dataset_id_settings),
},
output_table=Table(dataset_id_observatory_intermediate, "ror"),
),
Transform(
inputs={
"mag": Table(dataset_id_mag, "Affiliations", sharded=True),
"settings": Table(dataset_id_settings),
},
output_table=Table(dataset_id_observatory_intermediate, "mag"),
output_cluster=True,
output_clustering_fields=["Doi"],
),
Transform(
inputs={"orcid": Table(dataset_id_orcid, "orcid")},
output_table=Table(dataset_id_observatory_intermediate, "orcid"),
output_cluster=True,
output_clustering_fields=["doi"],
),
Transform(
inputs={"open_citations": Table(dataset_id_open_citations, "open_citations", sharded=True)},
output_table=Table(dataset_id_observatory_intermediate, "open_citations"),
output_cluster=True,
output_clustering_fields=["doi"],
),
Transform(
inputs={"unpaywall": Table(dataset_id_unpaywall, "unpaywall", sharded=False)},
output_table=Table(dataset_id_observatory_intermediate, "unpaywall"),
output_cluster=True,
output_clustering_fields=["doi"],
),
],
Transform(
inputs={
"observatory_intermediate": Table(dataset_id_observatory_intermediate),
"unpaywall": Table(dataset_id_unpaywall),
"crossref_metadata": Table(dataset_id_crossref_metadata, "crossref_metadata", sharded=True),
"settings": Table(dataset_id_settings),
},
output_table=Table(dataset_id_observatory, "doi"),
output_cluster=True,
output_clustering_fields=["doi"],
),
Transform(
inputs={
"observatory": Table(dataset_id_observatory, "doi", sharded=True),
"crossref_events": Table(dataset_id_observatory_intermediate, "crossref_events", sharded=True),
},
output_table=Table(dataset_id_observatory, "book"),
output_cluster=True,
output_clustering_fields=["isbn"],
),
)
def make_elastic_tables(
aggregate_table_id: str,
relate_to_institutions: bool = False,
relate_to_countries: bool = False,
relate_to_groups: bool = False,
relate_to_members: bool = False,
relate_to_journals: bool = False,
relate_to_funders: bool = False,
relate_to_publishers: bool = False,
):
# Always export
tables = [
{
"file_name": DoiWorkflow.EXPORT_UNIQUE_LIST_FILENAME,
"aggregate": aggregate_table_id,
"facet": "unique_list",
},
{
"file_name": DoiWorkflow.EXPORT_ACCESS_TYPES_FILENAME,
"aggregate": aggregate_table_id,
"facet": "access_types",
},
{
"file_name": DoiWorkflow.EXPORT_DISCIPLINES_FILENAME,
"aggregate": aggregate_table_id,
"facet": "disciplines",
},
{
"file_name": DoiWorkflow.EXPORT_OUTPUT_TYPES_FILENAME,
"aggregate": aggregate_table_id,
"facet": "output_types",
},
{"file_name": DoiWorkflow.EXPORT_EVENTS_FILENAME, "aggregate": aggregate_table_id, "facet": "events"},
{"file_name": DoiWorkflow.EXPORT_METRICS_FILENAME, "aggregate": aggregate_table_id, "facet": "metrics"},
]
# Optional Relationships
if relate_to_institutions:
tables.append(
{
"file_name": DoiWorkflow.EXPORT_RELATIONS_FILENAME,
"aggregate": aggregate_table_id,
"facet": "institutions",
}
)
if relate_to_countries:
tables.append(
{
"file_name": DoiWorkflow.EXPORT_RELATIONS_FILENAME,
"aggregate": aggregate_table_id,
"facet": "countries",
}
)
if relate_to_groups:
tables.append(
{
"file_name": DoiWorkflow.EXPORT_RELATIONS_FILENAME,
"aggregate": aggregate_table_id,
"facet": "groupings",
}
)
if relate_to_members:
tables.append(
{
"file_name": DoiWorkflow.EXPORT_RELATIONS_FILENAME,
"aggregate": aggregate_table_id,
"facet": "members",
}
)
if relate_to_journals:
tables.append(
{
"file_name": DoiWorkflow.EXPORT_RELATIONS_FILENAME,
"aggregate": aggregate_table_id,
"facet": "journals",
}
)
if relate_to_funders:
tables.append(
{
"file_name": DoiWorkflow.EXPORT_RELATIONS_FILENAME,
"aggregate": aggregate_table_id,
"facet": "funders",
}
)
if relate_to_publishers:
tables.append(
{
"file_name": DoiWorkflow.EXPORT_RELATIONS_FILENAME,
"aggregate": aggregate_table_id,
"facet": "publishers",
}
)
return tables
class DoiWorkflow(Workflow):
INT_DATASET_ID = "observatory_intermediate"
INT_DATASET_DESCRIPTION = "Intermediate processing dataset for the Academic Observatory."
DASHBOARDS_DATASET_ID = "coki_dashboards"
DASHBOARDS_DATASET_DESCRIPTION = "The latest data for display in the COKI dashboards."
FINAL_DATASET_ID = "observatory"
FINAL_DATASET_DESCRIPTION = "The Academic Observatory dataset."
ELASTIC_DATASET_ID = "data_export"
ELASTIC_DATASET_ID_DATASET_DESCRIPTION = "The Academic Observatory dataset for Elasticsearch."
AGGREGATE_DOI_FILENAME = make_sql_jinja2_filename("aggregate_doi")
EXPORT_UNIQUE_LIST_FILENAME = make_sql_jinja2_filename("export_unique_list")
EXPORT_ACCESS_TYPES_FILENAME = make_sql_jinja2_filename("export_access_types")
EXPORT_DISCIPLINES_FILENAME = make_sql_jinja2_filename("export_disciplines")
EXPORT_EVENTS_FILENAME = make_sql_jinja2_filename("export_events")
EXPORT_METRICS_FILENAME = make_sql_jinja2_filename("export_metrics")
EXPORT_OUTPUT_TYPES_FILENAME = make_sql_jinja2_filename("export_output_types")
EXPORT_RELATIONS_FILENAME = make_sql_jinja2_filename("export_relations")
SENSOR_DAG_IDS = [
"crossref_metadata",
"crossref_fundref",
"geonames",
"ror",
"open_citations",
"unpaywall",
"orcid",
"crossref_events",
]
AGGREGATIONS = [
Aggregation(
"country",
"countries",
relate_to_members=True,
relate_to_journals=True,
relate_to_funders=True,
relate_to_publishers=True,
),
Aggregation(
"funder",
"funders",
relate_to_institutions=True,
relate_to_countries=True,
relate_to_groups=True,
relate_to_members=True,
relate_to_funders=True,
relate_to_publishers=True,
),
Aggregation(
"group",
"groupings",
relate_to_institutions=True,
relate_to_members=True,
relate_to_journals=True,
relate_to_funders=True,
relate_to_publishers=True,
),
Aggregation(
"institution",
"institutions",
relate_to_institutions=True,
relate_to_countries=True,
relate_to_journals=True,
relate_to_funders=True,
relate_to_publishers=True,
),
Aggregation(
"author",
"authors",
relate_to_institutions=True,
relate_to_countries=True,
relate_to_groups=True,
relate_to_journals=True,
relate_to_funders=True,
relate_to_publishers=True,
),
Aggregation(
"journal",
"journals",
relate_to_institutions=True,
relate_to_countries=True,
relate_to_groups=True,
relate_to_journals=True,
relate_to_funders=True,
),
Aggregation(
"publisher",
"publishers",
relate_to_institutions=True,
relate_to_countries=True,
relate_to_groups=True,
relate_to_funders=True,
),
Aggregation("region", "regions", relate_to_funders=True, relate_to_publishers=True),
Aggregation("subregion", "subregions", relate_to_funders=True, relate_to_publishers=True),
]
def __init__(
self,
*,
intermediate_dataset_id: str = INT_DATASET_ID,
dashboards_dataset_id: str = DASHBOARDS_DATASET_ID,
observatory_dataset_id: str = FINAL_DATASET_ID,
elastic_dataset_id: str = ELASTIC_DATASET_ID,
transforms: Tuple = None,
dag_id: Optional[str] = "doi",
start_date: Optional[pendulum.DateTime] = pendulum.datetime(2020, 8, 30),
schedule_interval: Optional[str] = "@weekly",
catchup: Optional[bool] = False,
airflow_vars: List = None,
):
"""Create the DoiWorkflow.
:param intermediate_dataset_id: the BigQuery intermediate dataset id.
:param dashboards_dataset_id: the BigQuery dashboards dataset id.
:param observatory_dataset_id: the BigQuery observatory dataset id.
:param elastic_dataset_id: the BigQuery elastic dataset id.
:param dag_id: the DAG id.
:param start_date: the start date.
:param schedule_interval: the schedule interval.
:param catchup: whether to catchup.
:param airflow_vars: the required Airflow Variables.
"""
if airflow_vars is None:
airflow_vars = [
AirflowVars.DATA_PATH,
AirflowVars.PROJECT_ID,
AirflowVars.DATA_LOCATION,
AirflowVars.DOWNLOAD_BUCKET,
AirflowVars.TRANSFORM_BUCKET,
]
# Initialise Telesecope base class
super().__init__(
dag_id=dag_id,
start_date=start_date,
schedule_interval=schedule_interval,
catchup=catchup,
airflow_vars=airflow_vars,
)
self.intermediate_dataset_id = intermediate_dataset_id
self.dashboards_dataset_id = dashboards_dataset_id
self.observatory_dataset_id = observatory_dataset_id
self.elastic_dataset_id = elastic_dataset_id
if transforms is None:
self.transforms, self.transform_doi, self.transform_book = make_dataset_transforms(
dataset_id_observatory=observatory_dataset_id
)
else:
self.transforms, self.transform_doi, self.transform_book = transforms
self.create_tasks()
def create_tasks(self):
# Add sensors
with self.parallel_tasks():
for ext_dag_id in self.SENSOR_DAG_IDS:
sensor = DagRunSensor(
task_id=f"{ext_dag_id}_sensor",
external_dag_id=ext_dag_id,
mode="reschedule",
duration=timedelta(days=7), # Look back up to 7 days from execution date
poke_interval=int(timedelta(hours=1).total_seconds()), # Check at this interval if dag run is ready
timeout=int(timedelta(days=2).total_seconds()), # Sensor will fail after 2 days of waiting
)
self.add_operator(sensor)
# Setup tasks
self.add_setup_task(self.check_dependencies)
# Create datasets
self.add_task(self.create_datasets)
# Create tasks for processing intermediate tables
with self.parallel_tasks():
for transform in self.transforms:
task_id = f"create_{transform.output_table.table_id}"
self.add_task(self.create_intermediate_table, op_kwargs={"transform": transform}, task_id=task_id)
# Create DOI Table
self.add_task(
self.create_intermediate_table,
op_kwargs={"transform": self.transform_doi},
task_id=f"create_{self.transform_doi.output_table.table_id}",
)
# Create Book Table
self.add_task(
self.create_intermediate_table,
op_kwargs={"transform": self.transform_book},
task_id=f"create_{self.transform_book.output_table.table_id}",
)
# Create final tables
with self.parallel_tasks():
for agg in self.AGGREGATIONS:
task_id = f"create_{agg.table_id}"
self.add_task(
self.create_aggregate_table, op_kwargs={"aggregation": agg, "task_id": task_id}, task_id=task_id
)
# Copy tables and create views
self.add_task(self.copy_to_dashboards)
self.add_task(self.create_dashboard_views)
# Export for Elastic
with self.parallel_tasks():
for agg in self.AGGREGATIONS:
task_id = f"export_{agg.table_id}"
self.add_task(
self.export_for_elastic, op_kwargs={"aggregation": agg, "task_id": task_id}, task_id=task_id
)
def make_release(self, **kwargs) -> ObservatoryRelease:
"""Make a release instance. The release is passed as an argument to the function (TelescopeFunction) that is
called in 'task_callable'.
:param kwargs: the context passed from the PythonOperator. See
https://airflow.apache.org/docs/stable/macros-ref.html for a list of the keyword arguments that are passed
to this argument.
:return: A release instance or list of release instances
"""
release_date = make_release_date(**kwargs)
project_id = Variable.get(AirflowVars.PROJECT_ID)
data_location = Variable.get(AirflowVars.DATA_LOCATION)
return ObservatoryRelease(
project_id=project_id,
data_location=data_location,
release_date=release_date,
intermediate_dataset_id=self.intermediate_dataset_id,
observatory_dataset_id=self.observatory_dataset_id,
dashboards_dataset_id=self.dashboards_dataset_id,
elastic_dataset_id=self.elastic_dataset_id,
)
def create_datasets(self, release: ObservatoryRelease, **kwargs):
"""Create required BigQuery datasets.
:param release: the ObservatoryRelease.
:param kwargs: the context passed from the Airflow Operator.
See https://airflow.apache.org/docs/stable/macros-ref.html for a list of the keyword arguments that are passed
to this argument.
:return: None.
"""
release.create_datasets()
def create_intermediate_table(self, release: ObservatoryRelease, **kwargs):
"""Create an intermediate table.
:param release: the ObservatoryRelease.
:param kwargs: the context passed from the Airflow Operator.
See https://airflow.apache.org/docs/stable/macros-ref.html for a list of the keyword arguments that are passed
to this argument.
:return: None.
"""
transform: Transform = kwargs["transform"]
release.create_intermediate_table(
inputs=transform.inputs,
output_dataset_id=transform.output_table.dataset_id,
output_table_id=transform.output_table.table_id,
output_cluster=transform.output_cluster,
output_clustering_fields=transform.output_clustering_fields,
)
def create_aggregate_table(self, release: ObservatoryRelease, **kwargs):
"""Runs the aggregate table query.
:param release: the ObservatoryRelease.
:param kwargs: the context passed from the Airflow Operator.
See https://airflow.apache.org/docs/stable/macros-ref.html for a list of the keyword arguments that are passed
to this argument.
:return: None.
"""
agg: Aggregation = kwargs["aggregation"]
success = release.create_aggregate_table(
aggregation_field=agg.aggregation_field,
table_id=agg.table_id,
group_by_time_field=agg.group_by_time_field,
relate_to_institutions=agg.relate_to_institutions,
relate_to_countries=agg.relate_to_countries,
relate_to_groups=agg.relate_to_groups,
relate_to_members=agg.relate_to_members,
relate_to_journals=agg.relate_to_journals,
relate_to_funders=agg.relate_to_funders,
relate_to_publishers=agg.relate_to_publishers,
)
set_task_state(success, kwargs["task_id"])
def copy_to_dashboards(self, release: ObservatoryRelease, **kwargs):
"""Copy tables to dashboards dataset.
:param release: the ObservatoryRelease.
:param kwargs: the context passed from the Airflow Operator.
See https://airflow.apache.org/docs/stable/macros-ref.html for a list of the keyword arguments that are passed
to this argument.
:return: None.
"""
success = release.copy_to_dashboards()
set_task_state(success, self.copy_to_dashboards.__name__)
def create_dashboard_views(self, release: ObservatoryRelease, **kwargs):
"""Create views for dashboards dataset.
:param release: the ObservatoryRelease.
:param kwargs: the context passed from the Airflow Operator.
See https://airflow.apache.org/docs/stable/macros-ref.html for a list of the keyword arguments that are passed
to this argument.
:return: None.
"""
release.create_dashboard_views()
def export_for_elastic(self, release: ObservatoryRelease, **kwargs):
"""Export data in a de-nested form for Elasticsearch.
:param release: the ObservatoryRelease.
:param kwargs: the context passed from the Airflow Operator.
See https://airflow.apache.org/docs/stable/macros-ref.html for a list of the keyword arguments that are passed
to this argument.
:return: None.
"""
agg = kwargs["aggregation"]
success = release.export_for_elastic(
table_id=agg.table_id,
relate_to_institutions=agg.relate_to_institutions,
relate_to_countries=agg.relate_to_countries,
relate_to_groups=agg.relate_to_groups,
relate_to_members=agg.relate_to_members,
relate_to_journals=agg.relate_to_journals,
relate_to_funders=agg.relate_to_funders,
relate_to_publishers=agg.relate_to_publishers,
)
set_task_state(success, kwargs["task_id"])
class ObservatoryRelease:
def __init__(
self,
*,
project_id: str,
data_location: str,
release_date: pendulum.DateTime,
intermediate_dataset_id: str,
dashboards_dataset_id: str,
observatory_dataset_id: str,
elastic_dataset_id: str,
):
"""Construct an ObservatoryRelease.
:param project_id: the Google Cloud project id.
:param data_location: the location for BigQuery datasets.
:param release_date: the release date.
:param intermediate_dataset_id: the BigQuery intermediate dataset id.
:param dashboards_dataset_id: the BigQuery dashboards dataset id.
:param observatory_dataset_id: the BigQuery observatory dataset id.
:param elastic_dataset_id: the BigQuery elastic dataset id.
"""
self.project_id = project_id
self.data_location = data_location
self.release_date = release_date
self.intermediate_dataset_id = intermediate_dataset_id
self.dashboards_dataset_id = dashboards_dataset_id
self.observatory_dataset_id = observatory_dataset_id
self.elastic_dataset_id = elastic_dataset_id
def create_datasets(self):
"""Create the BigQuery datasets where data will be saved.
:return: None.
"""
datasets = [
(self.intermediate_dataset_id, DoiWorkflow.INT_DATASET_DESCRIPTION),
(self.dashboards_dataset_id, DoiWorkflow.DASHBOARDS_DATASET_DESCRIPTION),
(self.observatory_dataset_id, DoiWorkflow.FINAL_DATASET_DESCRIPTION),
(self.elastic_dataset_id, DoiWorkflow.ELASTIC_DATASET_ID_DATASET_DESCRIPTION),
]
for dataset_id, description in datasets:
create_bigquery_dataset(
self.project_id,
dataset_id,
self.data_location,
description=description,
)
def create_intermediate_table(
self,
*,
inputs: Dict,
output_dataset_id: str,
output_table_id: str,
output_cluster: bool,
output_clustering_fields: List,
):
"""Create an intermediate table.
:param inputs: the input datasets.
:param output_dataset_id: the output dataset id.
:param output_table_id: the output table id.
:param output_cluster: whether to cluster or not.
:param output_clustering_fields: the fields to cluster on.
:return: None.
"""
def get_release_date(dataset_id: str, table_id: str):
# Get last table shard date before current end date
table_shard_dates = select_table_shard_dates(self.project_id, dataset_id, table_id, self.release_date)
if len(table_shard_dates):
shard_date = table_shard_dates[0]
else:
raise AirflowException(
f"{self.project_id}.{dataset_id}.{table_id} "
f"with a table shard date <= {self.release_date} not found"
)
return shard_date
for k, table in inputs.items():
if table.sharded:
table.release_date = get_release_date(table.dataset_id, table.table_id)
# Create processed table
template_path = os.path.join(sql_folder(), make_sql_jinja2_filename(f"create_{output_table_id}"))
sql = render_template(template_path, project_id=self.project_id, release_date=self.release_date, **inputs)
output_table_id_sharded = bigquery_sharded_table_id(output_table_id, self.release_date)
success = create_bigquery_table_from_query(
sql=sql,
project_id=self.project_id,
dataset_id=output_dataset_id,
table_id=output_table_id_sharded,
location=self.data_location,
cluster=output_cluster,
clustering_fields=output_clustering_fields,
)
return success
def create_aggregate_table(
self,
*,
aggregation_field: str,
table_id: str,
group_by_time_field: str = "published_year",
relate_to_institutions: bool = False,
relate_to_countries: bool = False,
relate_to_groups: bool = False,
relate_to_members: bool = False,
relate_to_journals: bool = False,
relate_to_funders: bool = False,
relate_to_publishers: bool = False,
) -> bool:
"""Runs the aggregate table query.
:param aggregation_field: the field to aggregate on, e.g. institution, publisher etc.
:param group_by_time_field: either published_year or published_year_month depending on the granularity required for
the time dimension
:param table_id: the table id.
:param relate_to_institutions: whether to generate the institution relationship output for this query
:param relate_to_countries: whether to generate the countries relationship output for this query
:param relate_to_groups: whether to generate the groups relationship output for this query
:param relate_to_members: whether to generate the members relationship output for this query
:param relate_to_journals: whether to generate the journals relationship output for this query
:param relate_to_funders: whether to generate the funders relationship output for this query
:param relate_to_publishers: whether to generate the publish relationship output for this query
:return: None.
"""
template_path = os.path.join(sql_folder(), make_sql_jinja2_filename("create_aggregate"))
sql = render_template(
template_path,
project_id=self.project_id,
dataset_id=self.observatory_dataset_id,
release_date=self.release_date,
aggregation_field=aggregation_field,
group_by_time_field=group_by_time_field,
relate_to_institutions=relate_to_institutions,
relate_to_countries=relate_to_countries,
relate_to_groups=relate_to_groups,
relate_to_members=relate_to_members,
relate_to_journals=relate_to_journals,
relate_to_funders=relate_to_funders,
relate_to_publishers=relate_to_publishers,
)
sharded_table_id = bigquery_sharded_table_id(table_id, self.release_date)
success = create_bigquery_table_from_query(
sql=sql,
project_id=self.project_id,
dataset_id=self.observatory_dataset_id,
table_id=sharded_table_id,
location=self.data_location,
cluster=True,
clustering_fields=["id"],
)
return success
def copy_to_dashboards(self) -> bool:
"""Copy all tables in the observatory dataset to the dashboards dataset.
:return: whether successful or not.
"""
results = []
table_ids = [agg.table_id for agg in DoiWorkflow.AGGREGATIONS] + ["doi"]
for table_id in table_ids:
source_table_id = f"{self.project_id}.{self.observatory_dataset_id}.{bigquery_sharded_table_id(table_id, self.release_date)}"
destination_table_id = f"{self.project_id}.{self.dashboards_dataset_id}.{table_id}"
success = copy_bigquery_table(source_table_id, destination_table_id, self.data_location)
if not success:
logging.error(f"Issue copying table: {source_table_id} to {destination_table_id}")
results.append(success)
return all(results)
def create_dashboard_views(self):
"""Create views.
:return: None.
"""
# Create processed dataset
template_path = os.path.join(sql_folder(), make_sql_jinja2_filename("comparison_view"))
# Create views
table_ids = ["country", "funder", "group", "institution", "publisher", "subregion"]
for table_id in table_ids:
view_name = f"{table_id}_comparison"
query = render_template(
template_path, project_id=self.project_id, dataset_id=self.dashboards_dataset_id, table_id=table_id
)
create_bigquery_view(self.project_id, self.dashboards_dataset_id, view_name, query)
def export_for_elastic(
self,
*,
table_id: str,
relate_to_institutions: bool = False,
relate_to_countries: bool = False,
relate_to_groups: bool = False,
relate_to_members: bool = False,
relate_to_journals: bool = False,
relate_to_funders: bool = False,
relate_to_publishers: bool = False,
) -> bool:
"""Export data in in a de-nested form for elastic
:param table_id:
:param relate_to_institutions:
:param relate_to_countries:
:param relate_to_groups:
:param relate_to_members:
:param relate_to_journals:
:param relate_to_funders:
:param relate_to_publishers:
:return: whether successful or not.
"""
tables = make_elastic_tables(
table_id,
relate_to_institutions=relate_to_institutions,
relate_to_countries=relate_to_countries,
relate_to_groups=relate_to_groups,
relate_to_members=relate_to_members,
relate_to_journals=relate_to_journals,
relate_to_funders=relate_to_funders,
relate_to_publishers=relate_to_publishers,
)
# Calculate the number of parallel queries. Since all of the real work is done on BigQuery run each export task
# in a separate thread so that they can be done in parallel.
num_queries = min(len(tables), MAX_QUERIES)
results = []
with ThreadPoolExecutor(max_workers=num_queries) as executor:
futures = list()
futures_msgs = {}
for table in tables:
template_file_name = table["file_name"]
aggregate = table["aggregate"]
facet = table["facet"]
msg = f"Exporting file_name={template_file_name}, aggregate={aggregate}, facet={facet}"
logging.info(msg)
future = executor.submit(
self.export_aggregate_table,
table_id=table_id,
template_file_name=template_file_name,
aggregate=aggregate,
facet=facet,
)
futures.append(future)
futures_msgs[future] = msg
# Wait for completed tasks
for future in as_completed(futures):
success = future.result()
msg = futures_msgs[future]
results.append(success)
if success:
logging.info(f"Exporting feed success: {msg}")
else:
logging.error(f"Exporting feed failed: {msg}")
return all(results)
def export_aggregate_table(self, *, table_id: str, template_file_name: str, aggregate: str, facet: str):
"""Export an aggregate table.
:param table_id:
:param template_file_name:
:param aggregate:
:param facet:
:return:
"""
template_path = os.path.join(sql_folder(), template_file_name)
sql = render_template(
template_path,
project_id=self.project_id,
dataset_id=self.observatory_dataset_id,
table_id=table_id,
release_date=self.release_date,
aggregate=aggregate,
facet=facet,
)
export_table_id = f"ao_{aggregate}_{facet}"
processed_table_id = bigquery_sharded_table_id(export_table_id, self.release_date)
success = create_bigquery_table_from_query(
sql=sql,
project_id=self.project_id,
dataset_id=self.elastic_dataset_id,
table_id=processed_table_id,
location=self.data_location,
)
return success
|
{"/academic_observatory_workflows/workflows/ror_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_geonames_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/geonames_telescope.py"], "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/tests/test_clearbit.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_oa_web_workflow.py": ["/academic_observatory_workflows/workflows/oa_web_workflow.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/tests/test_zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_open_citations_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/open_citations_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_openalex_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/openalex_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py"], "/academic_observatory_workflows/workflows/pubmed_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_web_of_science_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_events_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_events_telescope.py"], "/academic_observatory_workflows/workflows/web_of_science_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_github.py": ["/academic_observatory_workflows/github.py"], "/academic_observatory_workflows/workflows/tests/test_pubmed_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/pubmed_telescope.py"], "/academic_observatory_workflows/workflows/geonames_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/open_citations_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_doi_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/model.py", "/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/tests/test_wikipedia.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/wikipedia.py"], "/docs/test_generate_csv.py": ["/docs/generate_schema_csv.py"], "/academic_observatory_workflows/workflows/scopus_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/doi_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py"], "/academic_observatory_workflows/workflows/crossref_events_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/oa_web_workflow.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/github.py", "/academic_observatory_workflows/wikipedia.py", "/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_ror_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/ror_telescope.py"], "/academic_observatory_workflows/workflows/openalex_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_zenodo.py": ["/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/model.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_scopus_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/scopus_telescope.py"], "/academic_observatory_workflows/dags/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_telescope.py"], "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_elastic_import_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/dags/elastic_import_workflow.py"], "/academic_observatory_workflows/dags/doi_workflow.py": ["/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/workflows/orcid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/dags/mag_telescope.py": ["/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/workflows/grid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_mag_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/dags/web_of_science_telescope.py": ["/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/dags/elastic_import_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_grid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/grid_telescope.py"], "/academic_observatory_workflows/workflows/mag_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_orcid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/orcid_telescope.py"]}
|
28,260,412
|
The-Academic-Observatory/academic-observatory-workflows
|
refs/heads/main
|
/academic_observatory_workflows/workflows/tests/test_open_citations_telescope.py
|
# Copyright 2020 Curtin University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Author: James Diprose, Tuan Chien
import os
import unittest
from re import template
from unittest.mock import MagicMock, patch
import pendulum
from academic_observatory_workflows.config import test_fixtures_folder
from academic_observatory_workflows.workflows.open_citations_telescope import (
OpenCitationsRelease,
OpenCitationsTelescope,
)
from airflow.utils.state import State
from observatory.platform.utils.gc_utils import run_bigquery_query
from observatory.platform.utils.http_download import DownloadInfo
from observatory.platform.utils.jinja2_utils import render_template
from observatory.platform.utils.test_utils import (
HttpServer,
ObservatoryEnvironment,
ObservatoryTestCase,
module_file_path,
)
from observatory.platform.utils.workflow_utils import (
bigquery_sharded_table_id,
blob_name,
)
class TestOpenCitationsTelescope(ObservatoryTestCase):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.project_id = os.getenv("TEST_GCP_PROJECT_ID")
self.data_location = os.getenv("TEST_GCP_DATA_LOCATION")
self.fixture_dir = test_fixtures_folder("open_citations")
self.release_list_file = "list_open_citation_releases.json"
self.version_1_file = "1.json"
def test_ctor(self):
table_descriptions = {"open_citations": "Custom description"}
telescope = OpenCitationsTelescope(table_descriptions=table_descriptions)
self.assertEqual(telescope.table_descriptions, table_descriptions)
telescope = OpenCitationsTelescope(airflow_vars=[])
self.assertEqual(telescope.airflow_vars, ["transform_bucket"])
@patch("academic_observatory_workflows.workflows.open_citations_telescope.get_http_response_json")
def test_list_releases_skip(self, m_get_response):
telescope = OpenCitationsTelescope()
m_get_response.side_effect = [
[{"url": "something"}],
{"created_date": "2018-11-13T12:03:08Z", "files": [1, 2]},
]
start_date = pendulum.datetime(2019, 1, 1)
end_date = pendulum.datetime(2019, 2, 1)
releases = telescope._list_releases(start_date=start_date, end_date=end_date)
self.assertEqual(len(releases), 0)
@patch("academic_observatory_workflows.workflows.open_citations_telescope.bigquery_table_exists")
@patch("academic_observatory_workflows.workflows.open_citations_telescope.bigquery_sharded_table_id")
@patch("academic_observatory_workflows.workflows.open_citations_telescope.Variable.get")
def test_process_release_no_files(self, m_get, m_bq_table_id, m_bq_table_exists):
m_get.return_value = "project_id"
m_bq_table_id.return_value = "1"
m_bq_table_exists.return_value = False
telescope = OpenCitationsTelescope()
releases = [
{"files": [], "date": "20210101"},
{"files": [1], "date": "20210101"},
{"files": [2], "date": "20210101"},
]
filtered_releases = list(filter(telescope._process_release, releases))
self.assertEqual(len(filtered_releases), 2)
@patch("academic_observatory_workflows.workflows.open_citations_telescope.bigquery_table_exists")
@patch("academic_observatory_workflows.workflows.open_citations_telescope.bigquery_sharded_table_id")
@patch("academic_observatory_workflows.workflows.open_citations_telescope.Variable.get")
def test_process_release_table_exists(self, m_get, m_bq_table_id, m_bq_table_exists):
m_get.return_value = "project_id"
m_bq_table_id.return_value = "1"
m_bq_table_exists.side_effect = [False, True, False]
telescope = OpenCitationsTelescope()
releases = [
{"files": [0], "date": "20210101"},
{"files": [1], "date": "20210101"},
{"files": [2], "date": "20210101"},
]
filtered_releases = list(filter(telescope._process_release, releases))
self.assertEqual(len(filtered_releases), 2)
@patch("academic_observatory_workflows.workflows.open_citations_telescope.OpenCitationsTelescope._process_release")
@patch("academic_observatory_workflows.workflows.open_citations_telescope.OpenCitationsTelescope._list_releases")
def test_get_release_info_continue(self, m_list_releases, m_process_release):
m_list_releases.return_value = [1, 2, 3]
m_process_release.return_value = True
telescope = OpenCitationsTelescope()
execution_date = pendulum.datetime(2021, 1, 1)
next_execution_date = pendulum.datetime(2021, 1, 8)
ti = MagicMock()
continue_dag = telescope.get_release_info(
execution_date=execution_date, next_execution_date=next_execution_date, ti=ti
)
self.assertTrue(continue_dag)
self.assertEqual(len(ti.method_calls), 1)
@patch("academic_observatory_workflows.workflows.open_citations_telescope.OpenCitationsTelescope._process_release")
@patch("academic_observatory_workflows.workflows.open_citations_telescope.OpenCitationsTelescope._list_releases")
def test_get_release_info_skip(self, m_list_releases, m_process_release):
m_list_releases.return_value = []
m_process_release.return_value = True
telescope = OpenCitationsTelescope()
execution_date = pendulum.datetime(2021, 1, 1)
next_execution_date = pendulum.datetime(2021, 1, 8)
ti = MagicMock()
continue_dag = telescope.get_release_info(
execution_date=execution_date, next_execution_date=next_execution_date, ti=ti
)
self.assertFalse(continue_dag)
self.assertEqual(len(ti.method_calls), 0)
def create_templates(self, *, host, port):
# list open citation releases
template_path = os.path.join(self.fixture_dir, self.release_list_file + ".jinja2")
rendered = render_template(template_path, host=host, port=port)
dst = os.path.join(self.fixture_dir, self.release_list_file)
with open(dst, "w") as f:
f.write(rendered)
# version 1
template_path = os.path.join(self.fixture_dir, self.version_1_file + ".jinja2")
rendered = render_template(template_path, host=host, port=port)
dst = os.path.join(self.fixture_dir, self.version_1_file)
with open(dst, "w") as f:
f.write(rendered)
def remove_templates(self):
dst = os.path.join(self.fixture_dir, self.release_list_file)
os.remove(dst)
dst = os.path.join(self.fixture_dir, self.version_1_file)
os.remove(dst)
def test_dag_structure(self):
"""Test that the OpenCitationsTelescope DAG has the correct structure.
:return: None
"""
dag = OpenCitationsTelescope().make_dag()
self.assert_dag_structure(
{
"check_dependencies": ["get_release_info"],
"get_release_info": ["download"],
"download": ["upload_downloaded"],
"upload_downloaded": ["extract"],
"extract": ["upload_transformed"],
"upload_transformed": ["bq_load"],
"bq_load": ["cleanup"],
"cleanup": [],
},
dag,
)
def test_dag_load(self):
"""Test that the OpenCitationsTelescope DAG can be loaded from a DAG bag.
:return: None
"""
with ObservatoryEnvironment().create():
dag_file = os.path.join(
module_file_path("academic_observatory_workflows.dags"), "open_citations_telescope.py"
)
self.assert_dag_load("open_citations", dag_file)
def test_telescope(self):
"""Test the OpenCitationsTelescope telescope end to end."""
# Setup Observatory environment
env = ObservatoryEnvironment(self.project_id, self.data_location)
dataset_id = env.add_dataset()
with env.create():
execution_date = pendulum.datetime(year=2018, month=11, day=12)
telescope = OpenCitationsTelescope(dataset_id=dataset_id)
dag = telescope.make_dag()
with env.create_dag_run(dag, execution_date):
server = HttpServer(directory=self.fixture_dir)
with patch.object(
OpenCitationsTelescope,
"VERSION_URL",
f"http://{server.host}:{server.port}/{self.release_list_file}",
):
download_url = f"http://{server.host}:{server.port}/data.csv.zip"
download_url2 = f"http://{server.host}:{server.port}/data2.csv.zip"
download_file_hash = "f06dfd0bee323a95861f0ba490e786c9"
download_file_hash2 = "6d90805d99b65b107b17907432aa8534"
release = OpenCitationsRelease(
telescope.dag_id,
release_date=pendulum.datetime(2018, 11, 13),
files=[
DownloadInfo(
url=download_url,
filename="data.csv.zip",
hash=download_file_hash,
hash_algorithm="md5",
),
DownloadInfo(
url=download_url2,
filename="data2.csv.zip",
hash=download_file_hash2,
hash_algorithm="md5",
),
],
)
self.create_templates(host=server.host, port=server.port)
with server.create():
# Check dependencies
ti = env.run_task(telescope.check_dependencies.__name__)
self.assertEqual(ti.state, State.SUCCESS)
# Get release info
ti = env.run_task(telescope.get_release_info.__name__)
self.assertEqual(ti.state, State.SUCCESS)
actual_release_info = ti.xcom_pull(
key=OpenCitationsTelescope.RELEASE_INFO,
task_ids=telescope.get_release_info.__name__,
include_prior_dates=False,
)
self.assertEqual(len(actual_release_info), 1)
self.assertEqual(actual_release_info[0]["date"], "20181113")
self.assertEqual(len(actual_release_info[0]["files"]), 2)
self.assertEqual(actual_release_info[0]["files"][0]["download_url"], download_url)
self.assertEqual(actual_release_info[0]["files"][1]["download_url"], download_url2)
# Download
ti = env.run_task(telescope.download.__name__)
self.assertEqual(ti.state, State.SUCCESS)
self.assertEqual(len(release.download_files), 2)
self.remove_templates()
# Upload downloaded
ti = env.run_task(telescope.upload_downloaded.__name__)
self.assertEqual(ti.state, State.SUCCESS)
self.assert_blob_integrity(
env.download_bucket, blob_name(release.download_files[0]), release.download_files[0]
)
# Extract
ti = env.run_task(telescope.extract.__name__)
self.assertEqual(ti.state, State.SUCCESS)
# Upload transformed
ti = env.run_task(telescope.upload_transformed.__name__)
self.assertEqual(ti.state, State.SUCCESS)
self.assert_blob_integrity(
env.transform_bucket, blob_name(release.transform_files[0]), release.transform_files[0]
)
print(release.transform_files)
# BQ load
ti = env.run_task(telescope.bq_load.__name__)
self.assertEqual(ti.state, State.SUCCESS)
table_id = (
f"{self.project_id}.{dataset_id}."
f"{bigquery_sharded_table_id(telescope.dag_id, release.release_date)}"
)
expected_rows = 4
self.assert_table_integrity(table_id, expected_rows)
sql = f"SELECT * from {self.project_id}.{dataset_id}.open_citations20181113"
with patch("observatory.platform.utils.gc_utils.bq_query_bytes_daily_limit_check"):
records = run_bigquery_query(sql)
self.assertEqual(
records[0]["oci"],
"020010100093631183015370109090737060203090304-020020102030636101310000103090309",
)
self.assertEqual(records[0]["citing"], "10.1109/viuf.1997.623934")
self.assertEqual(records[0]["cited"], "10.21236/ada013939")
self.assertEqual(records[0]["creation"], "1997")
self.assertEqual(records[0]["timespan"], "P22Y")
self.assertEqual(records[0]["journal_sc"], False)
self.assertEqual(records[0]["author_sc"], False)
self.assertEqual(
records[1]["oci"],
"02001010009363118353702000009370100-0200100010636280009020563020301025800025900000601036306",
)
self.assertEqual(records[1]["citing"], "10.1109/viz.2009.10")
self.assertEqual(records[1]["cited"], "10.1016/s0925-2312(02)00613-6")
self.assertEqual(records[1]["creation"], "2009-07")
self.assertEqual(records[1]["timespan"], "P6Y3M")
self.assertEqual(records[1]["journal_sc"], False)
self.assertEqual(records[1]["author_sc"], False)
self.assertEqual(records[2]["citing"], "10.1109/viuf.1997.623934")
self.assertEqual(records[2]["cited"], "10.21236/ada013939")
self.assertEqual(records[2]["creation"], "1997")
self.assertEqual(records[2]["timespan"], "P22Y")
self.assertEqual(records[2]["journal_sc"], False)
self.assertEqual(records[2]["author_sc"], False)
self.assertEqual(
records[1]["oci"],
"02001010009363118353702000009370100-0200100010636280009020563020301025800025900000601036306",
)
self.assertEqual(records[3]["citing"], "10.1109/viz.2009.10")
self.assertEqual(records[3]["cited"], "10.1016/s0925-2312(02)00613-6")
self.assertEqual(records[3]["creation"], "2009-07")
self.assertEqual(records[3]["timespan"], "P6Y3M")
self.assertEqual(records[3]["journal_sc"], False)
self.assertEqual(records[3]["author_sc"], False)
# Cleanup
download_folder, extract_folder, transform_folder = (
release.download_folder,
release.extract_folder,
release.transform_folder,
)
env.run_task(telescope.cleanup.__name__)
self.assertEqual(ti.state, State.SUCCESS)
self.assert_cleanup(download_folder, extract_folder, transform_folder)
|
{"/academic_observatory_workflows/workflows/ror_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_geonames_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/geonames_telescope.py"], "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/tests/test_clearbit.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_oa_web_workflow.py": ["/academic_observatory_workflows/workflows/oa_web_workflow.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/tests/test_zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_open_citations_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/open_citations_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_openalex_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/openalex_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py"], "/academic_observatory_workflows/workflows/pubmed_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_web_of_science_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_events_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_events_telescope.py"], "/academic_observatory_workflows/workflows/web_of_science_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_github.py": ["/academic_observatory_workflows/github.py"], "/academic_observatory_workflows/workflows/tests/test_pubmed_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/pubmed_telescope.py"], "/academic_observatory_workflows/workflows/geonames_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/open_citations_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_doi_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/model.py", "/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/tests/test_wikipedia.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/wikipedia.py"], "/docs/test_generate_csv.py": ["/docs/generate_schema_csv.py"], "/academic_observatory_workflows/workflows/scopus_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/doi_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py"], "/academic_observatory_workflows/workflows/crossref_events_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/oa_web_workflow.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/github.py", "/academic_observatory_workflows/wikipedia.py", "/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_ror_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/ror_telescope.py"], "/academic_observatory_workflows/workflows/openalex_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_zenodo.py": ["/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/model.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_scopus_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/scopus_telescope.py"], "/academic_observatory_workflows/dags/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_telescope.py"], "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_elastic_import_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/dags/elastic_import_workflow.py"], "/academic_observatory_workflows/dags/doi_workflow.py": ["/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/workflows/orcid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/dags/mag_telescope.py": ["/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/workflows/grid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_mag_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/dags/web_of_science_telescope.py": ["/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/dags/elastic_import_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_grid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/grid_telescope.py"], "/academic_observatory_workflows/workflows/mag_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_orcid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/orcid_telescope.py"]}
|
28,260,413
|
The-Academic-Observatory/academic-observatory-workflows
|
refs/heads/main
|
/academic_observatory_workflows/dags/mag_telescope.py
|
# Copyright 2020 Curtin University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Author: James Diprose
"""
A DAG that harvests the Microsoft Academic Graph (MAG) dataset: https://www.microsoft.com/en-us/research/project/microsoft-academic-graph/
Saved to the BigQuery tables:
<project_id>.mag.AffiliationsYYYYMMDD
<project_id>.mag.AuthorExtendedAttributesYYYYMMDD
<project_id>.mag.AuthorsYYYYMMDD
<project_id>.mag.ConferenceInstancesYYYYMMDD
<project_id>.mag.ConferenceSeriesYYYYMMDD
<project_id>.mag.EntityRelatedEntitiesYYYYMMDD
<project_id>.mag.FieldOfStudyChildrenYYYYMMDD
<project_id>.mag.FieldOfStudyExtendedAttributesYYYYMMDD
<project_id>.mag.FieldsOfStudyYYYYMMDD
<project_id>.mag.JournalsYYYYMMDD
<project_id>.mag.PaperAbstractsInvertedIndexYYYYMMDD
<project_id>.mag.PaperAuthorAffiliationsYYYYMMDD
<project_id>.mag.PaperCitationContextsYYYYMMDD
<project_id>.mag.PaperExtendedAttributesYYYYMMDD
<project_id>.mag.PaperFieldsOfStudyYYYYMMDD
<project_id>.mag.PaperRecommendationsYYYYMMDD
<project_id>.mag.PaperReferencesYYYYMMDD
<project_id>.mag.PaperResourcesYYYYMMDD
<project_id>.mag.PapersYYYYMMDD
<project_id>.mag.PaperUrlsYYYYMMDD
<project_id>.mag.RelatedFieldOfStudyYYYYMMDD
"""
import pendulum
from airflow import DAG
from airflow.operators.python_operator import PythonOperator, ShortCircuitOperator
from academic_observatory_workflows.workflows.mag_telescope import MagTelescope
default_args = {"owner": "airflow", "start_date": pendulum.datetime(2020, 7, 1)}
with DAG(dag_id=MagTelescope.DAG_ID, schedule_interval="@weekly", default_args=default_args, max_active_runs=1) as dag:
# Check that dependencies exist before starting
check = PythonOperator(
task_id=MagTelescope.TASK_ID_CHECK_DEPENDENCIES,
python_callable=MagTelescope.check_dependencies,
provide_context=True,
queue=MagTelescope.QUEUE,
)
# Transfer all MAG releases to Google Cloud storage that were processed in the given interval
transfer = PythonOperator(
task_id=MagTelescope.TASK_ID_TRANSFER,
python_callable=MagTelescope.transfer,
provide_context=True,
queue=MagTelescope.QUEUE,
)
# List releases and skip all subsequent tasks if there is no release to process
list_releases = ShortCircuitOperator(
task_id=MagTelescope.TASK_ID_LIST,
python_callable=MagTelescope.list_releases,
provide_context=True,
queue=MagTelescope.QUEUE,
)
# Download all MAG releases for a given interval
download = PythonOperator(
task_id=MagTelescope.TASK_ID_DOWNLOAD,
python_callable=MagTelescope.download,
provide_context=True,
queue=MagTelescope.QUEUE,
)
# Transform all MAG releases for a given interval
transform = PythonOperator(
task_id=MagTelescope.TASK_ID_TRANSFORM,
python_callable=MagTelescope.transform,
provide_context=True,
queue=MagTelescope.QUEUE,
)
# Upload all transformed MAG releases for a given interval to Google Cloud
upload_transformed = PythonOperator(
task_id=MagTelescope.TASK_ID_UPLOAD_TRANSFORMED,
python_callable=MagTelescope.upload_transformed,
provide_context=True,
queue=MagTelescope.QUEUE,
retries=MagTelescope.RETRIES,
)
# Load all MAG releases for a given interval to BigQuery
bq_load = PythonOperator(
task_id=MagTelescope.TASK_ID_BQ_LOAD,
python_callable=MagTelescope.bq_load,
provide_context=True,
queue=MagTelescope.QUEUE,
)
# Cleanup local files
cleanup = PythonOperator(
task_id=MagTelescope.TASK_ID_CLEANUP,
python_callable=MagTelescope.cleanup,
provide_context=True,
queue=MagTelescope.QUEUE,
)
check >> transfer >> list_releases >> download >> transform >> upload_transformed >> bq_load >> cleanup
|
{"/academic_observatory_workflows/workflows/ror_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_geonames_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/geonames_telescope.py"], "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/tests/test_clearbit.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_oa_web_workflow.py": ["/academic_observatory_workflows/workflows/oa_web_workflow.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/tests/test_zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_open_citations_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/open_citations_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_openalex_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/openalex_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py"], "/academic_observatory_workflows/workflows/pubmed_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_web_of_science_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_events_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_events_telescope.py"], "/academic_observatory_workflows/workflows/web_of_science_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_github.py": ["/academic_observatory_workflows/github.py"], "/academic_observatory_workflows/workflows/tests/test_pubmed_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/pubmed_telescope.py"], "/academic_observatory_workflows/workflows/geonames_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/open_citations_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_doi_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/model.py", "/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/tests/test_wikipedia.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/wikipedia.py"], "/docs/test_generate_csv.py": ["/docs/generate_schema_csv.py"], "/academic_observatory_workflows/workflows/scopus_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/doi_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py"], "/academic_observatory_workflows/workflows/crossref_events_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/oa_web_workflow.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/github.py", "/academic_observatory_workflows/wikipedia.py", "/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_ror_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/ror_telescope.py"], "/academic_observatory_workflows/workflows/openalex_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_zenodo.py": ["/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/model.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_scopus_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/scopus_telescope.py"], "/academic_observatory_workflows/dags/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_telescope.py"], "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_elastic_import_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/dags/elastic_import_workflow.py"], "/academic_observatory_workflows/dags/doi_workflow.py": ["/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/workflows/orcid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/dags/mag_telescope.py": ["/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/workflows/grid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_mag_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/dags/web_of_science_telescope.py": ["/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/dags/elastic_import_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_grid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/grid_telescope.py"], "/academic_observatory_workflows/workflows/mag_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_orcid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/orcid_telescope.py"]}
|
28,260,414
|
The-Academic-Observatory/academic-observatory-workflows
|
refs/heads/main
|
/academic_observatory_workflows/workflows/grid_telescope.py
|
# Copyright 2020 Curtin University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Author: James Diprose, Aniek Roelofs
from __future__ import annotations
import json
import logging
import os
import re
from shutil import copyfile
from typing import List
from zipfile import BadZipFile, ZipFile
import pendulum
from academic_observatory_workflows.config import schema_folder as default_schema_folder
from airflow.exceptions import AirflowException
from airflow.models.taskinstance import TaskInstance
from google.cloud.bigquery import SourceFormat
from observatory.platform.utils.airflow_utils import AirflowVars
from observatory.platform.utils.file_utils import list_to_jsonl_gz
from observatory.platform.utils.http_download import download_file
from observatory.platform.utils.url_utils import (
get_observatory_http_header,
retry_session,
)
from observatory.platform.workflows.snapshot_telescope import (
SnapshotRelease,
SnapshotTelescope,
)
class GridRelease(SnapshotRelease):
def __init__(self, dag_id: str, article_ids: List[str], release_date: pendulum.DateTime):
"""Construct a GridRelease.
:param article_ids: the titles of the Figshare articles.
:param release_date: the release date.
"""
self.article_ids = article_ids
download_files_regex = dag_id + "\.[a-zA-Z]+"
extract_files_regex = "grid.json"
transform_files_regex = f"{dag_id}.jsonl.gz"
super().__init__(dag_id, release_date, download_files_regex, extract_files_regex, transform_files_regex)
@property
def transform_path(self) -> str:
return os.path.join(self.transform_folder, f"{self.dag_id}.jsonl.gz")
def download(self, timeout: float = 30.0) -> List[str]:
"""Downloads an individual GRID release from Figshare.
:param timeout: the timeout in seconds when calling the Figshare API.
:return: the paths on the system of the downloaded files.
"""
downloads = []
for article_id in self.article_ids:
response = retry_session().get(
GridTelescope.GRID_FILE_URL.format(article_id=article_id),
timeout=timeout,
headers={"Accept-encoding": "gzip"},
)
article_files = json.loads(response.text)
for i, article_file in enumerate(article_files):
real_file_name = article_file["name"]
supplied_md5 = article_file["supplied_md5"]
download_url = article_file["download_url"]
file_type = os.path.splitext(real_file_name)[1]
if file_type == ".csv":
continue
# Download
logging.info(f"Downloading file: {real_file_name}, md5: {supplied_md5}, url: {download_url}")
file_path = os.path.join(self.download_folder, f"{self.dag_id}{file_type}")
logging.info(f"Saving to {file_path}")
headers = get_observatory_http_header(package_name="academic_observatory_workflows")
download_file(
url=download_url, filename=file_path, hash=supplied_md5, hash_algorithm="md5", headers=headers
)
downloads.append(file_path)
return downloads
def extract(self) -> None:
"""Extract a single GRID release to a given extraction path. The release will be extracted into the following
directory structure: extraction_path/file_name (without extension).
If the release is a .zip file, it will be extracted, otherwise it will be copied to a directory within the
extraction path.
:return: None.
"""
logging.info(f"Download files {self.download_files}")
# Extract files
for file_path in self.download_files:
# Extract zip files
if file_path.endswith(".zip"):
unzip_folder_path = self.extract_folder
logging.info(f"Extracting file: {file_path}")
try:
with ZipFile(file_path) as zip_file:
zip_file.extractall(unzip_folder_path)
except BadZipFile:
logging.error("Not a zip file")
logging.info(f"File extracted to: {unzip_folder_path}")
else:
# File is already uncompressed (.json or .csv), so make a directory and copy it into it
output_file_path = os.path.join(self.extract_folder, os.path.basename(file_path))
copyfile(file_path, output_file_path)
logging.info(f"File saved to: {output_file_path}")
def transform(self) -> str:
"""Transform an extracted GRID release .json file into json lines format and gzip the result.
:return: the GRID version, the file name and the file path.
"""
extract_files = self.extract_files
# Only process one JSON file
if len(extract_files) == 1:
release_json_file = extract_files[0]
logging.info(f"Transforming file: {release_json_file}")
else:
raise AirflowException(f"{len(extract_files)} extracted grid.json file found: {extract_files}")
with open(release_json_file) as json_file:
# Load GRID release JSON file
data = json.load(json_file)
version = data["version"]
institutes = data["institutes"]
# Transform GRID release into JSON Lines format saving in memory buffer
# Save in memory buffer to gzipped file
list_to_jsonl_gz(self.transform_path, institutes)
return version
def list_grid_records(
start_date: pendulum.DateTime, end_date: pendulum.DateTime, grid_dataset_url: str, timeout: float = 30.0
) -> List[dict]:
"""List all GRID records available on Figshare between two dates.
:param timeout: the number of seconds to wait until timing out.
:return: the list of GRID releases with required variables stored as a dictionary.
"""
response = retry_session().get(grid_dataset_url, timeout=timeout, headers={"Accept-encoding": "gzip"})
response_json = json.loads(response.text)
records: List[dict] = []
release_articles = {}
for item in response_json:
published_date: pendulum.DateTime = pendulum.parse(item["published_date"])
if start_date <= published_date < end_date:
article_id = item["id"]
title = item["title"]
# Parse date:
# The publish date is not used as the release date because the dataset is often
# published after the release date
date_matches = re.search("([0-9]{4}\-[0-9]{2}\-[0-9]{2})", title)
if date_matches is None:
raise ValueError(f"No release date found in GRID title: {title}")
release_date = date_matches[0]
try:
release_articles[release_date].append(article_id)
except KeyError:
release_articles[release_date] = [article_id]
for release_date in release_articles:
article_ids = release_articles[release_date]
records.append({"article_ids": article_ids, "release_date": release_date})
return records
class GridTelescope(SnapshotTelescope):
"""
The Global Research Identifier Database (GRID): https://grid.ac/
Saved to the BigQuery table: <project_id>.digital_science.gridYYYYMMDD
"""
DAG_ID = "grid"
DATASET_ID = "digital_science"
GRID_FILE_URL = "https://api.figshare.com/v2/articles/{article_id}/files"
GRID_DATASET_URL = "https://api.figshare.com/v2/collections/3812929/articles?page_size=1000"
def __init__(
self,
dag_id: str = DAG_ID,
start_date: pendulum.DateTime = pendulum.datetime(2015, 9, 1),
schedule_interval: str = "@weekly",
dataset_id: str = DATASET_ID,
schema_folder: str = default_schema_folder(),
source_format: str = SourceFormat.NEWLINE_DELIMITED_JSON,
dataset_description: str = "Datasets provided by Digital Science: https://www.digital-science.com/",
catchup: bool = True,
airflow_vars: List = None,
):
"""Construct a GridTelescope instance.
:param dag_id: the id of the DAG.
:param start_date: the start date of the DAG.
:param schedule_interval: the schedule interval of the DAG.
:param dataset_id: the BigQuery dataset id.
:param schema_folder: the SQL schema path.
:param source_format: the format of the data to load into BigQuery.
:param dataset_description: description for the BigQuery dataset.
:param catchup: whether to catchup the DAG or not.
:param airflow_vars: list of airflow variable keys, for each variable it is checked if it exists in airflow
"""
if airflow_vars is None:
airflow_vars = [
AirflowVars.DATA_PATH,
AirflowVars.PROJECT_ID,
AirflowVars.DATA_LOCATION,
AirflowVars.DOWNLOAD_BUCKET,
AirflowVars.TRANSFORM_BUCKET,
]
super().__init__(
dag_id,
start_date,
schedule_interval,
dataset_id,
schema_folder,
source_format=source_format,
dataset_description=dataset_description,
catchup=catchup,
airflow_vars=airflow_vars,
)
self.add_setup_task_chain([self.check_dependencies, self.list_releases])
self.add_task_chain(
[
self.download,
self.upload_downloaded,
self.extract,
self.transform,
self.upload_transformed,
self.bq_load,
self.cleanup,
]
)
def make_release(self, **kwargs) -> List[GridRelease]:
"""Make release instances. The release is passed as an argument to the function (TelescopeFunction) that is
called in 'task_callable'.
:param kwargs: the context passed from the PythonOperator. See
https://airflow.apache.org/docs/stable/macros-ref.html for a list of the keyword arguments that are
passed to this argument.
:return: A list of grid release instances
"""
ti: TaskInstance = kwargs["ti"]
records = ti.xcom_pull(
key=GridTelescope.RELEASE_INFO, task_ids=self.list_releases.__name__, include_prior_dates=False
)
releases = []
for record in records:
article_ids = record["article_ids"]
release_date = record["release_date"]
releases.append(GridRelease(self.dag_id, article_ids, pendulum.parse(release_date)))
return releases
def list_releases(self, **kwargs):
"""Lists all GRID releases for a given month and publishes their article_id's and
release_date's as an XCom.
:param kwargs: the context passed from the BranchPythonOperator. See
https://airflow.apache.org/docs/stable/macros-ref.html
for a list of the keyword arguments that are passed to this argument.
:return: the identifier of the task to execute next.
"""
execution_date = kwargs["execution_date"]
next_execution_date = kwargs["next_execution_date"]
records = list_grid_records(execution_date, next_execution_date, GridTelescope.GRID_DATASET_URL)
continue_dag = len(records)
if continue_dag:
# Push messages
ti: TaskInstance = kwargs["ti"]
ti.xcom_push(GridTelescope.RELEASE_INFO, records, execution_date)
return continue_dag
def download(self, releases: List[GridRelease], **kwargs):
"""Task to download the GRID releases for a given month.
:param releases: a list of GRID releases.
:return: None.
"""
# Download each release
for release in releases:
release.download()
def extract(self, releases: List[GridRelease], **kwargs):
"""Task to extract the GRID releases for a given month.
:param releases: a list of GRID releases.
:return: None.
"""
# Extract each release
for release in releases:
release.extract()
def transform(self, releases: List[GridRelease], **kwargs):
"""Task to transform the GRID releases for a given month.
:param releases: a list of GRID releases.
:return: None.
"""
# Transform each release
for release in releases:
release.transform()
|
{"/academic_observatory_workflows/workflows/ror_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_geonames_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/geonames_telescope.py"], "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/tests/test_clearbit.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_oa_web_workflow.py": ["/academic_observatory_workflows/workflows/oa_web_workflow.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/tests/test_zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_open_citations_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/open_citations_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_openalex_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/openalex_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py"], "/academic_observatory_workflows/workflows/pubmed_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_web_of_science_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_events_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_events_telescope.py"], "/academic_observatory_workflows/workflows/web_of_science_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_github.py": ["/academic_observatory_workflows/github.py"], "/academic_observatory_workflows/workflows/tests/test_pubmed_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/pubmed_telescope.py"], "/academic_observatory_workflows/workflows/geonames_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/open_citations_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_doi_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/model.py", "/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/tests/test_wikipedia.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/wikipedia.py"], "/docs/test_generate_csv.py": ["/docs/generate_schema_csv.py"], "/academic_observatory_workflows/workflows/scopus_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/doi_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py"], "/academic_observatory_workflows/workflows/crossref_events_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/oa_web_workflow.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/github.py", "/academic_observatory_workflows/wikipedia.py", "/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_ror_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/ror_telescope.py"], "/academic_observatory_workflows/workflows/openalex_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_zenodo.py": ["/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/model.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_scopus_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/scopus_telescope.py"], "/academic_observatory_workflows/dags/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_telescope.py"], "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_elastic_import_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/dags/elastic_import_workflow.py"], "/academic_observatory_workflows/dags/doi_workflow.py": ["/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/workflows/orcid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/dags/mag_telescope.py": ["/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/workflows/grid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_mag_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/dags/web_of_science_telescope.py": ["/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/dags/elastic_import_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_grid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/grid_telescope.py"], "/academic_observatory_workflows/workflows/mag_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_orcid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/orcid_telescope.py"]}
|
28,260,415
|
The-Academic-Observatory/academic-observatory-workflows
|
refs/heads/main
|
/academic_observatory_workflows/workflows/oa_web_workflow.py
|
# Copyright 2021 Curtin University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Author: James Diprose, Aniek Roelofs
from __future__ import annotations
import dataclasses
import datetime
import json
import logging
import math
import os
import os.path
import shutil
import urllib.parse
from concurrent.futures import ThreadPoolExecutor, as_completed
from dataclasses import field
from operator import itemgetter
from typing import Dict, List, Optional, Tuple, Union
from urllib.parse import urlparse
import google.cloud.bigquery as bigquery
import jsonlines
import nltk
import pandas as pd
import pendulum
import pyarrow as pa
import pyarrow.parquet as pq
import requests
from airflow.exceptions import AirflowException
from airflow.models.variable import Variable
from airflow.sensors.external_task import ExternalTaskSensor
from pyarrow import json as pa_json
from academic_observatory_workflows.clearbit import clearbit_download_logo
from observatory.platform.utils.airflow_utils import AirflowVars, get_airflow_connection_password
from observatory.platform.utils.config_utils import module_file_path
from observatory.platform.utils.file_utils import load_jsonl
from observatory.platform.utils.gc_utils import (
bigquery_sharded_table_id,
download_blobs_from_cloud_storage,
select_table_shard_dates,
upload_file_to_cloud_storage,
)
from observatory.platform.utils.workflow_utils import make_release_date
from observatory.platform.workflows.snapshot_telescope import SnapshotRelease
from observatory.platform.workflows.workflow import Workflow
# The minimum number of outputs before including an entity in the analysis
INCLUSION_THRESHOLD = 1000
# The query that pulls data to be included in the dashboards
QUERY = """
SELECT
agg.id,
agg.name,
agg.time_period as year,
DATE(agg.time_period, 12, 31) as date,
(SELECT * from ror.links LIMIT 1) AS url,
COALESCE(ror.wikipedia_url, country.wikipedia_url) as wikipedia_url,
country.alpha2 as alpha2,
agg.country as country,
agg.subregion as subregion,
agg.region as region,
ror.types AS institution_types,
agg.total_outputs as n_outputs,
agg.access_types.oa.total_outputs AS n_outputs_open,
agg.citations.mag.total_citations as n_citations,
agg.access_types.publisher.total_outputs AS n_outputs_publisher_open,
agg.access_types.green.total_outputs AS n_outputs_other_platform_open,
agg.access_types.green_only.total_outputs AS n_outputs_other_platform_open_only,
agg.access_types.gold_doaj.total_outputs AS n_outputs_oa_journal,
agg.access_types.hybrid.total_outputs AS n_outputs_hybrid,
agg.access_types.bronze.total_outputs AS n_outputs_no_guarantees,
ror.external_ids AS identifiers
FROM
`{project_id}.{agg_dataset_id}.{agg_table_id}` as agg
LEFT OUTER JOIN `{project_id}.{ror_dataset_id}.{ror_table_id}` as ror ON agg.id = ror.id
LEFT OUTER JOIN `{project_id}.{settings_dataset_id}.{country_table_id}` as country ON agg.id = country.alpha3
WHERE agg.time_period >= 2000 AND agg.time_period <= (EXTRACT(YEAR FROM CURRENT_DATE()) - 1)
ORDER BY year DESC, name ASC
"""
@dataclasses.dataclass
class PublicationStats:
# Number fields
n_citations: int = None
n_outputs: int = None
n_outputs_open: int = None
n_outputs_publisher_open: int = None
n_outputs_publisher_open_only: int = None
n_outputs_both: int = None
n_outputs_other_platform_open: int = None
n_outputs_other_platform_open_only: int = None
n_outputs_closed: int = None
n_outputs_oa_journal: int = None
n_outputs_hybrid: int = None
n_outputs_no_guarantees: int = None
# Percentage fields
p_outputs_open: int = None
p_outputs_publisher_open: int = None
p_outputs_publisher_open_only: int = None
p_outputs_both: int = None
p_outputs_other_platform_open: int = None
p_outputs_other_platform_open_only: int = None
p_outputs_closed: int = None
p_outputs_oa_journal: int = None
p_outputs_hybrid: int = None
p_outputs_no_guarantees: int = None
@staticmethod
def from_dict(dict_: Dict) -> PublicationStats:
n_citations = dict_.get("n_citations")
n_outputs = dict_.get("n_outputs")
n_outputs_open = dict_.get("n_outputs_open")
n_outputs_publisher_open = dict_.get("n_outputs_publisher_open")
n_outputs_publisher_open_only = dict_.get("n_outputs_publisher_open_only")
n_outputs_both = dict_.get("n_outputs_both")
n_outputs_other_platform_open = dict_.get("n_outputs_other_platform_open")
n_outputs_other_platform_open_only = dict_.get("n_outputs_other_platform_open_only")
n_outputs_closed = dict_.get("n_outputs_closed")
n_outputs_oa_journal = dict_.get("n_outputs_oa_journal")
n_outputs_hybrid = dict_.get("n_outputs_hybrid")
n_outputs_no_guarantees = dict_.get("n_outputs_no_guarantees")
p_outputs_open = dict_.get("p_outputs_open")
p_outputs_publisher_open = dict_.get("p_outputs_publisher_open")
p_outputs_publisher_open_only = dict_.get("p_outputs_publisher_open_only")
p_outputs_both = dict_.get("p_outputs_both")
p_outputs_other_platform_open = dict_.get("p_outputs_other_platform_open")
p_outputs_other_platform_open_only = dict_.get("p_outputs_other_platform_open_only")
p_outputs_closed = dict_.get("p_outputs_closed")
p_outputs_oa_journal = dict_.get("p_outputs_oa_journal")
p_outputs_hybrid = dict_.get("p_outputs_hybrid")
p_outputs_no_guarantees = dict_.get("p_outputs_no_guarantees")
return PublicationStats(
n_citations=n_citations,
n_outputs=n_outputs,
n_outputs_open=n_outputs_open,
n_outputs_publisher_open=n_outputs_publisher_open,
n_outputs_publisher_open_only=n_outputs_publisher_open_only,
n_outputs_both=n_outputs_both,
n_outputs_other_platform_open=n_outputs_other_platform_open,
n_outputs_other_platform_open_only=n_outputs_other_platform_open_only,
n_outputs_closed=n_outputs_closed,
n_outputs_oa_journal=n_outputs_oa_journal,
n_outputs_hybrid=n_outputs_hybrid,
n_outputs_no_guarantees=n_outputs_no_guarantees,
p_outputs_open=p_outputs_open,
p_outputs_publisher_open=p_outputs_publisher_open,
p_outputs_publisher_open_only=p_outputs_publisher_open_only,
p_outputs_both=p_outputs_both,
p_outputs_other_platform_open=p_outputs_other_platform_open,
p_outputs_other_platform_open_only=p_outputs_other_platform_open_only,
p_outputs_closed=p_outputs_closed,
p_outputs_oa_journal=p_outputs_oa_journal,
p_outputs_hybrid=p_outputs_hybrid,
p_outputs_no_guarantees=p_outputs_no_guarantees,
)
def to_dict(self) -> Dict:
return {
"n_citations": self.n_citations,
"n_outputs": self.n_outputs,
"n_outputs_open": self.n_outputs_open,
"n_outputs_publisher_open": self.n_outputs_publisher_open,
"n_outputs_publisher_open_only": self.n_outputs_publisher_open_only,
"n_outputs_both": self.n_outputs_both,
"n_outputs_other_platform_open": self.n_outputs_other_platform_open,
"n_outputs_other_platform_open_only": self.n_outputs_other_platform_open_only,
"n_outputs_closed": self.n_outputs_closed,
"n_outputs_oa_journal": self.n_outputs_oa_journal,
"n_outputs_hybrid": self.n_outputs_hybrid,
"n_outputs_no_guarantees": self.n_outputs_no_guarantees,
"p_outputs_open": self.p_outputs_open,
"p_outputs_publisher_open": self.p_outputs_publisher_open,
"p_outputs_publisher_open_only": self.p_outputs_publisher_open_only,
"p_outputs_both": self.p_outputs_both,
"p_outputs_other_platform_open": self.p_outputs_other_platform_open,
"p_outputs_other_platform_open_only": self.p_outputs_other_platform_open_only,
"p_outputs_closed": self.p_outputs_closed,
"p_outputs_oa_journal": self.p_outputs_oa_journal,
"p_outputs_hybrid": self.p_outputs_hybrid,
"p_outputs_no_guarantees": self.p_outputs_no_guarantees,
}
def split_largest_remainder(sample_size: int, *ratios) -> Tuple:
"""Split a sample size into different groups based on a list of ratios (that add to 1.0) using the largest
remainder method: https://en.wikipedia.org/wiki/Largest_remainder_method.
Copyright 2021 James Diprose
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
:param sample_size: the absolute sample size.
:param ratios: the list of ratios, must add to 1.0.
:return: the absolute numbers of each group.
"""
assert math.isclose(sum(ratios), 1), "ratios must sum to 1.0"
sizes = [sample_size * ratio for ratio in ratios]
sizes_whole = [math.floor(size) for size in sizes]
while (sample_size - sum(sizes_whole)) > 0:
remainders = [size % 1 for size in sizes]
max_index = max(enumerate(remainders), key=itemgetter(1))[0]
sizes_whole[max_index] = sizes_whole[max_index] + 1
sizes[max_index] = sizes_whole[max_index]
return tuple(sizes_whole)
@dataclasses.dataclass
class Subject:
name: str
n_outputs: float
def to_dict(self) -> Dict:
return {"name": self.name, "n_outputs": self.n_outputs}
@dataclasses.dataclass
class Collaborator:
name: str
n_outputs: float
def to_dict(self) -> Dict:
return {"name": self.name, "n_outputs": self.n_outputs}
@dataclasses.dataclass
class Identifier:
id: str
type: str
url: str
@staticmethod
def from_dict(dict_: Dict):
i = dict_["id"]
t = dict_["type"]
u = dict_["url"]
return Identifier(i, t, u)
def to_dict(self) -> Dict:
return {"id": self.id, "type": self.type, "url": self.url}
@dataclasses.dataclass
class Year:
year: int
date: datetime.datetime
stats: PublicationStats
def to_dict(self) -> Dict:
return {"year": self.year, "date": self.date.strftime("%Y-%m-%d"), "stats": self.stats.to_dict()}
@dataclasses.dataclass
class Stats:
min_year: int
max_year: int
last_updated: str
def to_dict(self) -> Dict:
return {
"min_year": self.min_year,
"max_year": self.max_year,
"last_updated": self.last_updated,
}
def save_json(path: str, data: Union[Dict, List]):
"""Save data to JSON.
:param path: the output path.
:param data: the data to save.
:return: None.
"""
with open(path, mode="w") as f:
json.dump(data, f, separators=(",", ":"))
def val_empty(val):
if isinstance(val, list):
return len(val) == 0
else:
return val is None or val == ""
def clean_ror_id(ror_id: str):
"""Remove the https://ror.org/ prefix from a ROR id.
:param ror_id: original ROR id.
:return: cleaned ROR id.
"""
return ror_id.replace("https://ror.org/", "")
@dataclasses.dataclass
class Description:
text: str
url: str
license: str = (
"https://en.wikipedia.org/wiki/Wikipedia:Text_of_Creative_Commons_Attribution-ShareAlike_3.0_Unported_License"
)
@staticmethod
def from_dict(dict_: Dict) -> Description:
text = dict_.get("description")
url = dict_.get("wikipedia_url")
return Description(text, url)
def to_dict(self) -> Dict:
return {"text": self.text, "license": self.license, "url": self.url}
def trigger_repository_dispatch(*, token: str, event_type: str):
"""Trigger a Github repository dispatch event.
:param event_type: the event type
:param token: the Github token.
:return: the response.
"""
headers = {
"Accept": "application/vnd.github.v3+json",
"Authorization": f"token {token}",
}
data = {"event_type": event_type}
return requests.post(
"https://api.github.com/repos/The-Academic-Observatory/coki-oa-web/dispatches",
headers=headers,
data=json.dumps(data),
)
@dataclasses.dataclass
class Entity:
id: str
name: str
description: Description
category: str = None
logo_s: str = None
logo_l: str = None
url: str = None
wikipedia_url: str = None
country: Optional[str] = None
subregion: str = None
region: str = None
min_year: int = None
max_year: int = None
institution_types: Optional[str] = field(default_factory=lambda: [])
stats: PublicationStats = None
identifiers: List[Identifier] = field(default_factory=lambda: [])
collaborators: List[Collaborator] = field(default_factory=lambda: []) # todo
subjects: List[Subject] = field(default_factory=lambda: []) # todo
other_platform_locations: List[str] = field(default_factory=lambda: []) # todo
timeseries: List[Year] = field(default_factory=lambda: [])
@staticmethod
def from_dict(dict_: Dict) -> Entity:
id = dict_.get("id")
name = dict_.get("name")
wikipedia_url = dict_.get("wikipedia_url")
description = Description.from_dict(dict_)
category = dict_.get("category")
logo_s = dict_.get("logo_s")
logo_l = dict_.get("logo_l")
url = dict_.get("url")
country = dict_.get("country")
subregion = dict_.get("subregion")
region = dict_.get("region")
min_year = dict_.get("min_year")
max_year = dict_.get("max_year")
institution_types = dict_.get("institution_types", [])
identifiers = [Identifier.from_dict(obj) for obj in dict_.get("identifiers", [])]
return Entity(
id,
name,
description=description,
category=category,
logo_s=logo_s,
logo_l=logo_l,
url=url,
wikipedia_url=wikipedia_url,
country=country,
subregion=subregion,
region=region,
min_year=min_year,
max_year=max_year,
institution_types=institution_types,
identifiers=identifiers,
)
def to_dict(self) -> Dict:
dict_ = {
"id": self.id,
"name": self.name,
"description": self.description.to_dict(),
"category": self.category,
"logo_s": self.logo_s,
"logo_l": self.logo_l,
"url": self.url,
"wikipedia_url": self.wikipedia_url,
"region": self.region,
"subregion": self.subregion,
"country": self.country,
"institution_types": self.institution_types,
"min_year": self.min_year,
"max_year": self.max_year,
"stats": self.stats.to_dict(),
"identifiers": [obj.to_dict() for obj in self.identifiers],
"collaborators": [obj.to_dict() for obj in self.collaborators],
"subjects": [obj.to_dict() for obj in self.subjects],
"other_platform_locations": self.other_platform_locations,
"timeseries": [obj.to_dict() for obj in self.timeseries],
}
# Filter out key val pairs with empty lists and values
dict_ = {k: v for k, v in dict_.items() if not val_empty(v)}
return dict_
def get_institution_logo(ror_id: str, url: str, size: str, width: int, fmt: str, build_path) -> Tuple[str, str]:
"""Get the path to the logo for an institution.
If the logo does not exist in the build path yet, download from the Clearbit Logo API tool.
If the logo does not exist and failed to download, the path will default to "/unknown.svg".
:param ror_id: the institution's ROR id
:param url: the URL of the company domain + suffix e.g. spotify.com
:param size: the image size of the small logo for tables etc.
:param width: the width of the image.
:param fmt: the image format.
:param build_path: the build path for files of this workflow
:return: The ROR id and relative path (from build path) to the logo
"""
logo_path = f"/unknown.svg"
file_path = os.path.join(build_path, "logos", "institution", size, f"{ror_id}.{fmt}")
if not os.path.isfile(file_path):
clearbit_download_logo(company_url=url, file_path=file_path, size=width, fmt=fmt)
if os.path.isfile(file_path):
logo_path = make_logo_url(category="institution", entity_id=ror_id, size=size, fmt=fmt)
return ror_id, logo_path
def get_wiki_descriptions(titles: Dict[str, str]) -> List[Tuple[str, str]]:
"""Get the wikipedia descriptions for the given titles.
:param titles: Dict with titles as keys and id's (either ror_id or alpha3 country code) as values
:return: List with tuples (id, wiki description)
"""
titles_arg = []
for title, entity_id in titles.items():
# URL encode title if it is not encoded yet
if title == urllib.parse.unquote(title):
titles_arg.append(urllib.parse.quote(title))
# Append title directly if it is already encoded and not empty
else:
titles_arg.append(title)
# Confirm that there is a max of 20 titles, the limit for the wikipedia API
assert len(titles_arg) <= 20
# Extract descriptions using the Wikipedia API
url = f"https://en.wikipedia.org/w/api.php?action=query&format=json&prop=extracts&titles={'%7C'.join(titles_arg)}&redirects=1&exintro=1&explaintext=1"
response = requests.get(url)
if response.status_code != 200:
raise AirflowException(f"Unsuccessful retrieving wikipedia extracts, url: {url}")
response_json = response.json()
pages = response_json["query"]["pages"]
# Create mapping between redirected/normalized page title and original page title
redirects = {}
for title in response_json["query"].get("redirects", []):
redirects[title["to"]] = title["from"]
normalized = {}
for title in response_json["query"].get("normalized", []):
normalized[title["to"]] = title["from"]
# Create mapping between entity_id and decoded page title.
decoded_titles = {urllib.parse.unquote(k): v for k, v in titles.items()}
descriptions = []
for page_id, page in pages.items():
page_title = page["title"]
# Get page_title from redirected/normalized if it is present
page_title = redirects.get(page_title, page_title)
page_title = normalized.get(page_title, page_title)
# Link original title to description
entity_id = decoded_titles[urllib.parse.unquote(page_title)]
# Get description and clean up
description = page.get("extract", "")
if description:
description = remove_text_between_brackets(description)
description = shorten_text_full_sentences(description)
descriptions.append((entity_id, description))
return descriptions
def remove_text_between_brackets(text: str) -> str:
"""Remove any text between (nested) brackets.
If there is a space after the opening bracket, this is removed as well.
E.g. 'Like this (foo, (bar)) example' -> 'Like this example'
:param text: The text to modify
:return: The modified text
"""
new_text = []
nested = 0
for char in text:
if char == "(":
nested += 1
new_text = new_text[:-1] if new_text[-1] == " " else new_text
elif (char == ")") and nested:
nested -= 1
elif nested == 0:
new_text.append(char)
return "".join(new_text)
def shorten_text_full_sentences(text: str, *, char_limit: int = 300) -> str:
"""Shorten a text to as many complete sentences as possible, while the total number of characters stays below
the char_limit.
Always return at least one sentence, even if this exceeds the char_limit.
:param text: A string with the complete text
:param char_limit: The max number of characters
:return: The shortened text.
"""
# Create list of sentences
sentences = nltk.tokenize.sent_tokenize(text)
# Add sentences until char limit is reached
sentences_output = []
total_len = 0
for sentence in sentences:
total_len += len(sentence)
if (total_len > char_limit) and sentences_output:
break
sentences_output.append(sentence)
return " ".join(sentences_output)
def bq_query_to_gcs(*, query: str, project_id: str, destination_uri: str, location: str = "us") -> bool:
"""Run a BigQuery query and save the results on Google Cloud Storage.
:param query: the query string.
:param project_id: the Google Cloud project id.
:param destination_uri: the Google Cloud Storage destination uri.
:param location: the BigQuery dataset location.
:return: the status of the job.
"""
client = bigquery.Client()
# Run query
query_job: bigquery.QueryJob = client.query(query, location=location)
query_job.result()
# Create and run extraction job
source_table_id = f"{project_id}.{query_job.destination.dataset_id}.{query_job.destination.table_id}"
extract_job_config = bigquery.ExtractJobConfig()
extract_job_config.destination_format = bigquery.DestinationFormat.NEWLINE_DELIMITED_JSON
extract_job: bigquery.ExtractJob = client.extract_table(
source_table_id, destination_uri, job_config=extract_job_config, location=location
)
extract_job.result()
return query_job.state == "DONE" and extract_job.state == "DONE"
def clean_url(url: str) -> str:
"""Remove path and query from URL.
:param url: the url.
:return: the cleaned url.
"""
p = urlparse(url)
return f"{p.scheme}://{p.netloc}/"
def save_as_jsonl(output_path: str, iterable: List[Dict]):
with open(output_path, "w") as f:
with jsonlines.Writer(f) as writer:
writer.write_all(iterable)
def jsonl_to_pyarrow(jsonl_path: str, output_path: str):
table = pa_json.read_json(jsonl_path)
pq.write_table(table, output_path)
def make_logo_url(*, category: str, entity_id: str, size: str, fmt: str) -> str:
return f"/logos/{category}/{size}/{entity_id}.{fmt}"
def calc_oa_stats(
n_outputs: int,
n_outputs_open: int,
n_outputs_publisher_open: int,
n_outputs_other_platform_open: int,
n_outputs_other_platform_open_only: int,
):
# Closed
n_outputs_closed = n_outputs - n_outputs_open
# Both
n_outputs_both = n_outputs_other_platform_open - n_outputs_other_platform_open_only
# Publisher open only
n_outputs_publisher_open_only = n_outputs_publisher_open - n_outputs_both
return n_outputs_publisher_open_only, n_outputs_both, n_outputs_closed
class OaWebRelease(SnapshotRelease):
PERCENTAGE_FIELD_KEYS = [
("outputs_open", "n_outputs"),
("outputs_both", "n_outputs"),
("outputs_closed", "n_outputs"),
("outputs_publisher_open", "n_outputs"),
("outputs_publisher_open_only", "n_outputs"),
("outputs_other_platform_open", "n_outputs"),
("outputs_other_platform_open_only", "n_outputs"),
("outputs_oa_journal", "n_outputs_publisher_open"),
("outputs_hybrid", "n_outputs_publisher_open"),
("outputs_no_guarantees", "n_outputs_publisher_open"),
]
def __init__(
self,
*,
dag_id: str,
project_id: str,
release_date: pendulum.DateTime,
data_bucket_name: str,
change_chart_years: int = 10,
agg_dataset_id: str = "observatory",
ror_dataset_id: str = "ror",
):
"""Create an OaWebRelease instance.
:param dag_id: the dag id.
:param project_id: the Google Cloud project id.
:param release_date: the release date.
:param change_chart_years: the number of years to include in the change charts.
:param agg_dataset_id: the dataset to use for aggregation.
:param ror_dataset_id: the ROR dataset id.
"""
super().__init__(dag_id=dag_id, release_date=release_date)
self.project_id = project_id
self.data_bucket_name = data_bucket_name
self.change_chart_years = change_chart_years
self.agg_dataset_id = agg_dataset_id
self.ror_dataset_id = ror_dataset_id
self.data_path = module_file_path("academic_observatory_workflows.workflows.data.oa_web_workflow")
@property
def build_path(self):
return os.path.join(self.transform_folder, "build")
def load_data(self, category: str) -> pd.DataFrame:
"""Load the data file for a given category.
:param category: the category, i.e. country or institution.
:return: the Pandas Dataframe.
"""
path = os.path.join(self.download_folder, f"{category}.jsonl")
data = load_jsonl(path)
return pd.DataFrame(data)
def preprocess_df(self, category: str, df: pd.DataFrame) -> pd.DataFrame:
"""Pre-process the data frame.
:param category: the category.
:param df: the dataframe.
:return: the Pandas Dataframe.
"""
# Convert data types
df = df.copy(deep=True)
df["date"] = pd.to_datetime(df["date"])
df.fillna("", inplace=True)
for column in df.columns:
if column.startswith("n_"):
df[column] = pd.to_numeric(df[column])
# Create missing fields
publisher_open_only = []
both = []
closed = []
for i, row in df.iterrows():
n_outputs = row["n_outputs"]
n_outputs_open = row["n_outputs_open"]
n_outputs_publisher_open = row["n_outputs_publisher_open"]
n_outputs_other_platform_open = row["n_outputs_other_platform_open"]
n_outputs_other_platform_open_only = row["n_outputs_other_platform_open_only"]
n_outputs_publisher_open_only, n_outputs_both, n_outputs_closed = calc_oa_stats(
n_outputs,
n_outputs_open,
n_outputs_publisher_open,
n_outputs_other_platform_open,
n_outputs_other_platform_open_only,
)
# Add to arrays
publisher_open_only.append(n_outputs_publisher_open_only)
both.append(n_outputs_both)
closed.append(n_outputs_closed)
df["n_outputs_publisher_open_only"] = publisher_open_only
df["n_outputs_both"] = both
df["n_outputs_closed"] = closed
# Clean RoR ids
if category == "institution":
# Remove columns not used for institutions
df.drop(columns=["alpha2"], inplace=True, errors="ignore")
# Clean RoR ids
df["id"] = df["id"].apply(lambda i: clean_ror_id(i))
# Parse identifiers
preferred_key = "preferred"
identifiers = []
for i, row in df.iterrows():
# Parse identifier for each entry
ent_ids = []
ids_dict = row["identifiers"]
# Add ROR id
ror_id = row["id"]
ent_ids.append({"id": ror_id, "type": "ROR", "url": f"https://ror.org/{ror_id}"})
# Parse other ids
for k, v in ids_dict.items():
url = None
id_type = k
if id_type != "OrgRef":
if preferred_key in v:
id_value = v[preferred_key]
else:
id_value = v["all"][0]
# Create URLs
if id_type == "ISNI":
url = f"https://isni.org/isni/{id_value}"
elif id_type == "Wikidata":
url = f"https://www.wikidata.org/wiki/{id_value}"
elif id_type == "GRID":
url = f"https://grid.ac/institutes/{id_value}"
elif id_type == "FundRef":
url = f"https://api.crossref.org/funders/{id_value}"
ent_ids.append({"id": id_value, "type": id_type, "url": url})
identifiers.append(ent_ids)
df["identifiers"] = identifiers
if category == "country":
# Remove columns not used for countries
df.drop(columns=["url", "institution_types", "country", "identifiers"], inplace=True, errors="ignore")
return df
def make_index(self, category: str, df: pd.DataFrame):
"""Make the data for the index tables.
:param category: the category, i.e. country or institution.
:param df: Pandas dataframe with all data points.
:return:
"""
# Create aggregate
agg = {}
for column in df.columns:
if column.startswith("n_"):
agg[column] = "sum"
else:
agg[column] = "first"
# Create aggregate
df_index_table = df.groupby(["id"]).agg(
agg,
index=False,
)
# Exclude countries with small samples
df_index_table = df_index_table[df_index_table["n_outputs"] >= INCLUSION_THRESHOLD]
# Add percentages to dataframe
self.update_df_with_percentages(df_index_table, self.PERCENTAGE_FIELD_KEYS)
# Make percentages add to 100% when integers
self.quantize_df_percentages(df_index_table)
# Sort from highest oa percentage to lowest
df_index_table.sort_values(by=["n_outputs_open"], ascending=False, inplace=True)
# Add category
df_index_table["category"] = category
# Remove date and year
df_index_table.drop(columns=["date", "year"], inplace=True)
return df_index_table
def update_df_with_percentages(self, df: pd.DataFrame, keys: List[Tuple[str, str]]):
"""Calculate percentages for fields in a Pandas dataframe.
:param df: the Pandas dataframe.
:param keys: they keys to calculate percentages for.
:return: None.
"""
for numerator_key, denominator_key in keys:
p_key = f"p_{numerator_key}"
df[p_key] = df[f"n_{numerator_key}"] / df[denominator_key] * 100
# Fill in NaN caused by denominator of zero
df[p_key] = df[p_key].fillna(0)
def quantize_df_percentages(self, df: pd.DataFrame):
"""Makes percentages add to 100% when integers
:param df: the Pandas dataframe.
:return: None.
"""
for i, row in df.iterrows():
# Make percentage publisher open only, both, other platform open only and closed add to 100
sample_size = 100
keys = [
"p_outputs_publisher_open_only",
"p_outputs_both",
"p_outputs_other_platform_open_only",
"p_outputs_closed",
]
ratios = [row[key] / 100.0 for key in keys]
results = split_largest_remainder(sample_size, *ratios)
for key, value in zip(keys, results):
df.loc[i, key] = value
# Make percentage oa_journal, hybrid and no_guarantees add to 100
keys = ["p_outputs_oa_journal", "p_outputs_hybrid", "p_outputs_no_guarantees"]
ratios = [row[key] / 100.0 for key in keys]
has_publisher_open = row["n_outputs_publisher_open"] > 0
if has_publisher_open:
results = split_largest_remainder(sample_size, *ratios)
for key, value in zip(keys, results):
df.loc[i, key] = value
def update_index_with_logos(self, category: str, df_index_table: pd.DataFrame):
"""Update the index with logos, downloading logos if they don't exist.
:param category: the category, i.e. country or institution.
:param df_index_table: the index table Pandas dataframe.
:return: None.
"""
sizes = ["s", "l"]
for size in sizes:
base_path = os.path.join(self.build_path, "logos", category, size)
os.makedirs(base_path, exist_ok=True)
# Make logos
if category == "country":
logging.info("Copying local logos")
with ThreadPoolExecutor() as executor:
futures = []
# Copy and rename logo images from using alpha2 to alpha3 country codes
for size in sizes:
base_path = os.path.join(self.build_path, "logos", category, size)
for alpha3, alpha2 in zip(df_index_table["id"], df_index_table["alpha2"]):
src_path = os.path.join(self.data_path, "flags", size, f"{alpha2}.svg")
dst_path = os.path.join(base_path, f"{alpha3}.svg")
futures.append(executor.submit(shutil.copy, src_path, dst_path))
[f.result() for f in as_completed(futures)]
logging.info("Finished copying local logos")
# Add logo urls to index
for size in sizes:
df_index_table[f"logo_{size}"] = df_index_table["id"].apply(
lambda country_code: make_logo_url(category=category, entity_id=country_code, size=size, fmt="svg")
)
elif category == "institution":
logging.info("Downloading logos using Clearbit")
fmt = "jpg"
# Get the institution logo and the path to the logo image
for size, width in zip(sizes, [32, 128]):
with ThreadPoolExecutor() as executor:
futures = []
logo_paths = []
for ror_id, url in zip(df_index_table["id"], df_index_table["url"]):
if url:
url = clean_url(url)
futures.append(
executor.submit(get_institution_logo, ror_id, url, size, width, fmt, self.build_path)
)
else:
logo_paths.append((ror_id, "/unknown.svg"))
logo_paths += [f.result() for f in as_completed(futures)]
logging.info("Finished downloading logos")
# Sort table and results by id
df_index_table.sort_index(inplace=True)
logo_paths_sorted = [tup[1] for tup in sorted(logo_paths, key=lambda tup: tup[0])]
# Add logo paths to table
df_index_table[f"logo_{size}"] = logo_paths_sorted
def update_index_with_wiki_descriptions(self, df_index_table: pd.DataFrame):
"""Get the wikipedia descriptions for each entity (institution or country) and add them to the index table.
:param df_index_table: the index table Pandas dataframe.
:return: None.
"""
# Filter to select rows where url is not empty
wikipedia_url_filter = df_index_table["wikipedia_url"] != ""
# The wikipedia 'title' is the last part of the wikipedia url, without segments specified with '#'
titles_all = list(
zip(
df_index_table.loc[wikipedia_url_filter, "wikipedia_url"]
.str.split("wikipedia.org/wiki/")
.str[-1]
.str.split("#")
.str[0],
df_index_table.loc[wikipedia_url_filter, "id"],
)
)
# Create list with dictionaries of max 20 ids + titles (this is wiki api max)
titles_chunks = [
dict(titles_all[i : i + OaWebWorkflow.WIKI_MAX_TITLES])
for i in range(0, len(titles_all), OaWebWorkflow.WIKI_MAX_TITLES)
]
logging.info(
f"Downloading wikipedia descriptions for all {len(titles_all)} entities in {len(titles_chunks)} chunks."
)
# Download 'punkt' resource, required when shortening wiki descriptions
nltk.download("punkt")
# Process each dictionary in separate thread to get wiki descriptions
with ThreadPoolExecutor() as executor:
futures = []
for titles in titles_chunks:
futures.append(executor.submit(get_wiki_descriptions, titles))
descriptions = []
for f in as_completed(futures):
descriptions += f.result()
logging.info(f"Finished downloading wikipedia descriptions")
# Sort table and results by id
df_index_table.sort_index(inplace=True)
descriptions_sorted = [tup[1] for tup in sorted(descriptions, key=lambda tup: tup[0])]
# Add wiki descriptions to table
df_index_table.loc[wikipedia_url_filter, "description"] = descriptions_sorted
df_index_table.loc[~wikipedia_url_filter, "description"] = ""
def save_index(self, category: str, df_index_table: pd.DataFrame):
"""Save the index table.
:param category: the category, i.e. country or institution.
:param df_index_table: the index table Pandas Dataframe.
:return: None.
"""
# Save subset
base_path = os.path.join(self.build_path, "data")
os.makedirs(base_path, exist_ok=True)
df_index_table = df_index_table.drop(
[
"description",
"year",
"date",
"institution_types",
"identifiers",
"collaborators",
"subjects",
"other_platform_locations",
"timeseries",
],
axis=1,
errors="ignore",
)
# Make entities
records = df_index_table.to_dict("records")
entities = []
for record in records:
entity = Entity.from_dict(record)
entity.stats = PublicationStats.from_dict(record)
entities.append(entity)
# Sort by Open %
entities = sorted(entities, key=lambda e: e.stats.p_outputs_open, reverse=True)
entities = [e.to_dict() for e in entities]
# Save as JSON
json_path = os.path.join(base_path, f"{category}.json")
save_json(json_path, entities)
# Save JSONL
jsonl_path = os.path.join(base_path, f"{category}.jsonl")
save_as_jsonl(jsonl_path, entities)
# Save as PyArrow
pyarrow_path = os.path.join(base_path, f"{category}.parquet")
jsonl_to_pyarrow(jsonl_path, pyarrow_path)
def make_entities(self, df_index_table: pd.DataFrame, df: pd.DataFrame) -> List[Entity]:
"""Make entities.
:param df_index_table: the index table Pandas Dataframe.
:param df: the Pandas dataframe.
:return: the Entity objects.
"""
entities = []
key_id = "id"
key_year = "year"
key_date = "date"
key_records = "records"
ts_groups = df.groupby([key_id])
for entity_id, df_group in ts_groups:
# Exclude institutions with small num outputs
total_outputs = df_group["n_outputs"].sum()
if total_outputs >= INCLUSION_THRESHOLD:
self.update_df_with_percentages(df_group, self.PERCENTAGE_FIELD_KEYS)
df_group = df_group.sort_values(by=[key_year])
df_group = df_group.loc[:, ~df_group.columns.str.contains("^Unnamed")]
# Make percentages add to 100% when integers
self.quantize_df_percentages(df_group)
# Create entity
entity_dict: Dict = df_index_table.loc[df_index_table[key_id] == entity_id].to_dict(key_records)[0]
entity = Entity.from_dict(entity_dict)
entity.stats = PublicationStats.from_dict(entity_dict)
# Make timeseries data
years = []
rows: List[Dict] = df_group.to_dict(key_records)
for row in rows:
year = int(row.get(key_year))
date = row.get(key_date)
stats = PublicationStats.from_dict(row)
years.append(Year(year=year, date=date, stats=stats))
entity.timeseries = years
# Set min and max years for data
entity.min_year = years[0].year
entity.max_year = years[-1].year
entities.append(entity)
return entities
def save_entities(self, category: str, entities: List[Entity]):
"""Save the data for each entity as a JSON file.
:param category: the entity category.
:param entities: the list of Entity objects.
:return: None.
"""
base_path = os.path.join(self.build_path, "data", category)
os.makedirs(base_path, exist_ok=True)
for entity in entities:
output_path = os.path.join(base_path, f"{entity.id}.json")
entity_dict = entity.to_dict()
save_json(output_path, entity_dict)
def make_auto_complete(self, df_index_table: pd.DataFrame, category: str) -> List[Dict]:
"""Build the autocomplete data.
:param df_index_table: index table Pandas dataframe.
:param category: the category, i.e. country or institution.
:return: autocomplete records.
"""
records = []
for i, row in df_index_table.iterrows():
id = row["id"]
name = row["name"]
logo = row["logo_s"]
records.append({"id": id, "name": name, "category": category, "logo_s": logo})
return records
def save_autocomplete(self, auto_complete: List[Dict]):
"""Save the autocomplete data.
:param auto_complete: the autocomplete list.
:return: None.
"""
base_path = os.path.join(self.build_path, "data")
os.makedirs(base_path, exist_ok=True)
# Save as JSON
output_path = os.path.join(base_path, "autocomplete.json")
df_ac = pd.DataFrame(auto_complete)
records = df_ac.to_dict("records")
save_json(output_path, records)
# Save as PyArrow
table = pa.Table.from_pandas(df_ac)
pyarrow_path = os.path.join(base_path, f"autocomplete.parquet")
pq.write_table(table, pyarrow_path)
def save_stats(self, stats: Stats):
"""Save overall stats.
:param stats: stats object.
:return: None.
"""
base_path = os.path.join(self.build_path, "data")
os.makedirs(base_path, exist_ok=True)
# Save as JSON
output_path = os.path.join(base_path, "stats.json")
save_json(output_path, stats.to_dict())
class OaWebWorkflow(Workflow):
DATA_BUCKET = "oa_web_data_bucket"
GITHUB_TOKEN_CONN = "oa_web_github_token"
"""The OaWebWorkflow generates data files for the COKI Open Access Dashboard.
The figure below illustrates the generated data and notes about what each file is used for.
.
├── data: data
│ ├── autocomplete.json: used for the website search functionality. Copied into public/data folder.
│ ├── autocomplete.parquet: used for filtering in Cloudflare Worker.
│ ├── country: individual entity statistics files for countries. Used to build each country page.
│ │ ├── ALB.json
│ │ ├── ARE.json
│ │ └── ARG.json
│ ├── country.json: used to create the country table. First 18 countries used to build first page of country table
│ │ and then this file is included in the public folder and downloaded by the client to enable the
│ │ other pages of the table to be displayed. Copied into public/data folder.
│ ├── country.jsonl: used to generate the parquet file.
│ ├── country.parquet: to be used along with apache-arrow to enable filtering from a Cloudflare Worker.
│ ├── institution: individual entity statistics files for institutions. Used to build each institution page.
│ │ ├── 05ykr0121.json
│ │ ├── 05ym42410.json
│ │ └── 05ynxx418.json
│ ├── institution.json: used to create the institution table. First 18 institutions used to build first page of institution table
│ │ and then this file is included in the public folder and downloaded by the client to enable the
│ │ other pages of the table to be displayed. Copied into public/data folder.
│ ├── institution.jsonl: used to generate the parquet file.
│ ├── institution.parquet: to be used along with apache-arrow to enable filtering from a Cloudflare Worker.
│ └── stats.json: global statistics, e.g. the minimum and maximum date for the dataset, when it was last updated etc.
└── logos: country and institution logos. Copied into public/logos folder.
├── country
│ ├── l: large logos displayed on country pages.
│ │ ├── ALB.svg
│ │ ├── ARE.svg
│ │ └── ARG.svg
│ └── s: small logos displayed in country table.
│ ├── ALB.svg
│ ├── ARE.svg
│ └── ARG.svg
└── institution
├── l: large logos displayed on institution pages.
│ ├── 05ykr0121.jpg
│ ├── 05ym42410.jpg
│ └── 05ynxx418.jpg
└── s: small logos displayed in institution table.
├── 05ykr0121.jpg
├── 05ym42410.jpg
└── 05ynxx418.jpg
"""
# Set the number of titles for which wiki descriptions are retrieved at once, the API can return max 20 extracts.
WIKI_MAX_TITLES = 20
def __init__(
self,
*,
dag_id: str = "oa_web_workflow",
start_date: Optional[pendulum.DateTime] = pendulum.datetime(2021, 5, 2),
schedule_interval: Optional[str] = "@weekly",
catchup: Optional[bool] = False,
ext_dag_id: str = "doi",
table_ids: List[str] = None,
airflow_vars: List[str] = None,
airflow_conns: List[str] = None,
agg_dataset_id: str = "observatory",
ror_dataset_id: str = "ror",
settings_dataset_id: str = "settings",
version: str = "v1",
):
"""Create the OaWebWorkflow.
:param dag_id: the DAG id.
:param start_date: the start date.
:param schedule_interval: the schedule interval.
:param catchup: whether to catchup or not.
:param table_ids: the table ids.
:param version: the dataset version published by this workflow. The Github Action pulls from a specific dataset
version: https://github.com/The-Academic-Observatory/coki-oa-web/blob/develop/.github/workflows/build-on-data-update.yml#L68-L74.
This is so that when breaking changes are made to the schema, the web application won't break.
:param airflow_vars: required Airflow Variables.
"""
if airflow_vars is None:
airflow_vars = [
AirflowVars.DATA_PATH,
AirflowVars.PROJECT_ID,
AirflowVars.DATA_LOCATION,
AirflowVars.DOWNLOAD_BUCKET,
AirflowVars.TRANSFORM_BUCKET,
self.DATA_BUCKET,
]
if airflow_conns is None:
airflow_conns = [self.GITHUB_TOKEN_CONN]
super().__init__(
dag_id=dag_id,
start_date=start_date,
schedule_interval=schedule_interval,
catchup=catchup,
airflow_vars=airflow_vars,
airflow_conns=airflow_conns,
)
self.agg_dataset_id = agg_dataset_id
self.ror_dataset_id = ror_dataset_id
self.settings_dataset_id = settings_dataset_id
self.table_ids = table_ids
self.version = version
if table_ids is None:
self.table_ids = ["country", "institution"]
self.add_operator(
ExternalTaskSensor(task_id=f"{ext_dag_id}_sensor", external_dag_id=ext_dag_id, mode="reschedule")
)
self.add_setup_task(self.check_dependencies)
self.add_task(self.query)
self.add_task(self.download)
self.add_task(self.transform)
self.add_task(self.upload_dataset)
self.add_task(self.repository_dispatch)
self.add_task(self.cleanup)
def make_release(self, **kwargs) -> OaWebRelease:
"""Make release instances. The release is passed as an argument to the function (TelescopeFunction) that is
called in 'task_callable'.
:param kwargs: the context passed from the PythonOperator. See
https://airflow.apache.org/docs/stable/macros-ref.html for a list of the keyword arguments that are
passed to this argument.
:return: A list of OaWebRelease instances
"""
project_id = Variable.get(AirflowVars.PROJECT_ID)
release_date = make_release_date(**kwargs)
data_bucket_name = Variable.get(self.DATA_BUCKET)
return OaWebRelease(
dag_id=self.dag_id,
project_id=project_id,
data_bucket_name=data_bucket_name,
release_date=release_date,
ror_dataset_id=self.ror_dataset_id,
agg_dataset_id=self.agg_dataset_id,
)
def query(self, release: OaWebRelease, **kwargs):
"""Fetch the data for each table.
:param release: the release.
:param kwargs: the context passed from the PythonOperator. See
https://airflow.apache.org/docs/stable/macros-ref.html for a list of the keyword arguments that are
passed to this argument.
:return: None.
"""
results = []
# ROR release date
ror_table_id = "ror"
ror_release_date = select_table_shard_dates(
project_id=release.project_id,
dataset_id=release.ror_dataset_id,
table_id=ror_table_id,
end_date=release.release_date,
)[0]
ror_sharded_table_id = bigquery_sharded_table_id(ror_table_id, ror_release_date)
for agg_table_id in self.table_ids:
# Aggregate release dates
agg_release_date = select_table_shard_dates(
project_id=release.project_id,
dataset_id=release.agg_dataset_id,
table_id=agg_table_id,
end_date=release.release_date,
)[0]
agg_sharded_table_id = bigquery_sharded_table_id(agg_table_id, agg_release_date)
# Fetch data
destination_uri = f"gs://{release.download_bucket}/{self.dag_id}/{release.release_id}/{agg_table_id}.jsonl"
success = bq_query_to_gcs(
query=QUERY.format(
project_id=release.project_id,
agg_dataset_id=release.agg_dataset_id,
agg_table_id=agg_sharded_table_id,
ror_dataset_id=release.ror_dataset_id,
ror_table_id=ror_sharded_table_id,
settings_dataset_id=self.settings_dataset_id,
country_table_id="country",
),
project_id=release.project_id,
destination_uri=destination_uri,
)
results.append(success)
state = all(results)
if not state:
raise AirflowException("OaWebWorkflow.query failed")
def download(self, release: OaWebRelease, **kwargs):
"""Download the queried data.
:param release: the release.
:param kwargs: the context passed from the PythonOperator. See
https://airflow.apache.org/docs/stable/macros-ref.html for a list of the keyword arguments that are
passed to this argument.
:return: None.
"""
prefix = f"{self.dag_id}/{release.release_id}"
state = download_blobs_from_cloud_storage(
bucket_name=release.download_bucket, prefix=prefix, destination_path=release.download_folder
)
if not state:
raise AirflowException("OaWebWorkflow.download failed")
def transform(self, release: OaWebRelease, **kwargs):
"""Transform the queried data into the final format for the open access website.
:param release: the release.
:param kwargs: the context passed from the PythonOperator. See
https://airflow.apache.org/docs/stable/macros-ref.html for a list of the keyword arguments that are
passed to this argument.
:return: None.
"""
# Make required folders
auto_complete = []
for category in self.table_ids:
logging.info(f"Transforming {category} entity")
# Load data
df = release.load_data(category)
# Pre-process data
df = release.preprocess_df(category, df)
# Make index table
df_index_table = release.make_index(category, df)
release.update_index_with_logos(category, df_index_table)
release.update_index_with_wiki_descriptions(df_index_table)
entities = release.make_entities(df_index_table, df)
# Make autocomplete data for this category
auto_complete += release.make_auto_complete(df_index_table, category)
# Save category data
release.save_index(category, df_index_table)
release.save_entities(category, entities)
logging.info(f"Saved transformed {category} entity")
# Save auto complete data as json
release.save_autocomplete(auto_complete)
logging.info(f"Saved autocomplete data")
# Save stats as json
min_year = 2000
max_year = pendulum.now().year - 1
last_updated = pendulum.now().format("D MMMM YYYY")
stats = Stats(min_year, max_year, last_updated)
release.save_stats(stats)
logging.info(f"Saved stats data")
# Zip data
dst = os.path.join(release.transform_folder, "latest")
shutil.copytree(release.build_path, dst)
base_name = os.path.join(release.transform_folder, "latest")
shutil.make_archive(base_name, "zip", dst)
def upload_dataset(self, release: OaWebRelease, **kwargs):
"""Publish the dataset produced by this workflow.
:param release: the release.
:param kwargs: the context passed from the PythonOperator. See
https://airflow.apache.org/docs/stable/macros-ref.html for a list of the keyword arguments that are
passed to this argument.
:return: None.
"""
# upload_file_to_cloud_storage should always rewrite a new version of latest.zip if it exists
# object versioning on the bucket will keep the previous versions
blob_name = f"{self.version}/latest.zip"
file_path = os.path.join(release.transform_folder, "latest.zip")
upload_file_to_cloud_storage(
bucket_name=release.data_bucket_name, blob_name=blob_name, file_path=file_path, check_blob_hash=False
)
def repository_dispatch(self, release: OaWebRelease, **kwargs):
"""Trigger a Github repository_dispatch to trigger new website builds.
:param release: the release.
:param kwargs: the context passed from the PythonOperator. See
https://airflow.apache.org/docs/stable/macros-ref.html for a list of the keyword arguments that are
passed to this argument.
:return: None.
"""
token = get_airflow_connection_password(self.GITHUB_TOKEN_CONN)
event_types = ["data-update/develop", "data-update/staging", "data-update/production"]
for event_type in event_types:
trigger_repository_dispatch(token=token, event_type=event_type)
def cleanup(self, release: OaWebRelease, **kwargs):
"""Delete all files and folders associated with this release.
:param release: the release.
:param kwargs: the context passed from the PythonOperator. See
https://airflow.apache.org/docs/stable/macros-ref.html for a list of the keyword arguments that are
passed to this argument.
:return: None.
"""
release.cleanup()
|
{"/academic_observatory_workflows/workflows/ror_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_geonames_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/geonames_telescope.py"], "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/tests/test_clearbit.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_oa_web_workflow.py": ["/academic_observatory_workflows/workflows/oa_web_workflow.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/tests/test_zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_open_citations_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/open_citations_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_openalex_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/openalex_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py"], "/academic_observatory_workflows/workflows/pubmed_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_web_of_science_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_events_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_events_telescope.py"], "/academic_observatory_workflows/workflows/web_of_science_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_github.py": ["/academic_observatory_workflows/github.py"], "/academic_observatory_workflows/workflows/tests/test_pubmed_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/pubmed_telescope.py"], "/academic_observatory_workflows/workflows/geonames_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/open_citations_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_doi_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/model.py", "/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/tests/test_wikipedia.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/wikipedia.py"], "/docs/test_generate_csv.py": ["/docs/generate_schema_csv.py"], "/academic_observatory_workflows/workflows/scopus_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/doi_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py"], "/academic_observatory_workflows/workflows/crossref_events_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/oa_web_workflow.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/github.py", "/academic_observatory_workflows/wikipedia.py", "/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_ror_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/ror_telescope.py"], "/academic_observatory_workflows/workflows/openalex_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_zenodo.py": ["/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/model.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_scopus_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/scopus_telescope.py"], "/academic_observatory_workflows/dags/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_telescope.py"], "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_elastic_import_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/dags/elastic_import_workflow.py"], "/academic_observatory_workflows/dags/doi_workflow.py": ["/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/workflows/orcid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/dags/mag_telescope.py": ["/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/workflows/grid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_mag_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/dags/web_of_science_telescope.py": ["/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/dags/elastic_import_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_grid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/grid_telescope.py"], "/academic_observatory_workflows/workflows/mag_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_orcid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/orcid_telescope.py"]}
|
28,260,416
|
The-Academic-Observatory/academic-observatory-workflows
|
refs/heads/main
|
/academic_observatory_workflows/workflows/scopus_telescope.py
|
# Copyright 2020 Curtin University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Author: Tuan Chien
import calendar
import json
import logging
import os
import urllib.request
from concurrent.futures import ThreadPoolExecutor, as_completed
from queue import Empty, Queue
from threading import Event
from time import sleep
from typing import Any, Dict, List, Tuple, Type, Union
from urllib.parse import quote_plus
import jsonlines
import pendulum
from academic_observatory_workflows.config import schema_folder as default_schema_folder
from airflow import AirflowException
from google.cloud.bigquery import WriteDisposition
from observatory.platform.utils.airflow_utils import (
AirflowConns,
AirflowVars,
get_airflow_connection_password,
)
from observatory.platform.utils.file_utils import load_file, write_to_file
from observatory.platform.utils.url_utils import get_user_agent
from observatory.platform.utils.workflow_utils import (
blob_name,
bq_load_shard,
build_schedule,
get_as_list,
get_entry_or_none,
)
from observatory.platform.workflows.snapshot_telescope import (
SnapshotRelease,
SnapshotTelescope,
)
from ratelimit import limits, sleep_and_retry
class ScopusRelease(SnapshotRelease):
def __init__(
self,
*,
dag_id: str,
release_date: pendulum.DateTime,
api_keys: List[str],
institution_ids: List[str],
earliest_date: pendulum.DateTime,
view: str,
):
"""Construct a ScopusRelease instance.
:param dag_id: The DAG ID.
:param release_date: Release date.
:param api_keys: List of available API keys to use.
:param institution_ids: List of institution IDs to query.
:param earliest_date: Earliest date to query from.
:param view: The view type. Standard or complete. See https://dev.elsevier.com/sc_search_views.html
"""
super().__init__(
dag_id=dag_id,
release_date=release_date,
)
self.table_id = ScopusTelescope.DAG_ID
self.api_keys = api_keys
self.institution_ids = institution_ids
self.earliest_date = earliest_date
self.view = view
def download(self):
"""Download snapshot from SCOPUS for the given institution."""
start_date = self.earliest_date
end_date = self.release_date.subtract(days=1).end_of("day")
schedule = build_schedule(start_date, end_date)
taskq = Queue()
for period in schedule:
taskq.put(period)
workers = list()
for i, key in enumerate(self.api_keys):
worker = ScopusUtilWorker(
client_id=i,
client=ScopusClient(api_key=key, view=self.view),
quota_reset_date=self.release_date,
quota_remaining=ScopusUtilWorker.DEFAULT_KEY_QUOTA,
)
workers.append(worker)
ScopusUtility.download_parallel(
workers=workers,
taskq=taskq,
conn=self.dag_id,
institution_ids=self.institution_ids,
download_dir=self.download_folder,
)
def transform(self):
"""Transform the data into database format."""
for file in self.download_files:
records = json.loads(load_file(file))
harvest_datetime = self._get_harvest_datetime(file)
entries = self._transform_to_db_format(records=records, harvest_datetime=harvest_datetime)
self._write_transform_files(entries=entries, file=file)
def _transform_to_db_format(self, records: List[dict], harvest_datetime: str) -> List[dict]:
"""Convert the json response to the expected schema.
:param records: List of the records as json.
:param harvest_datetime: Timestamp of when the API call was made.
:return: List of transformed entries.
"""
entries = []
for data in records:
entry = ScopusJsonParser.parse_json(
data=data,
harvest_datetime=harvest_datetime,
release_date=self.release_date.date().isoformat(),
institution_ids=self.institution_ids,
)
entries.append(entry)
return entries
def _write_transform_files(self, *, entries: Union[dict, list], file: str):
"""Save the schema compatible dictionaries as jsonlines.
:param entries: List of schema compatible entries.
:param file: The filepath to the xml file of API response.
"""
# Strip out the harvest time stamp from the filename so that schema detection works
filename = os.path.basename(file)
filename = f"{filename[:23]}.jsonl"
filename = f"{ScopusTelescope.DAG_ID}.{filename}"
dst_file = os.path.join(self.transform_folder, filename)
logging.info(f"Writing file {dst_file}")
with jsonlines.open(dst_file, mode="w") as writer:
writer.write_all(entries)
def _get_harvest_datetime(self, filepath: str) -> str:
"""Get the harvest datetime from the filename. <startdate>_<enddate>_<timestamp>.json
:param filepath: JSON file path.
:return: Harvest datetime string.
"""
filename = os.path.basename(filepath)
file_tokens = filename.split("_")
return file_tokens[2][:-5]
class ScopusTelescope(SnapshotTelescope):
DAG_ID = "scopus"
TABLE_DESCRIPTION = "The Scopus citation database: https://www.scopus.com"
def __init__(
self,
*,
dag_id: str,
airflow_conns: List[AirflowConns],
airflow_vars: List[AirflowVars],
institution_ids: List[str],
view: str = "STANDARD",
earliest_date: pendulum.DateTime = pendulum.datetime(1800, 1, 1),
start_date: pendulum.DateTime = pendulum.datetime(2018, 5, 14),
schedule_interval: str = "@monthly",
dataset_id: str = "elsevier",
schema_folder: str = default_schema_folder(),
):
"""Scopus telescope.
:param dag_id: the id of the DAG.
:param start_date: the start date of the DAG.
:param schedule_interval: the schedule interval of the DAG.
:param dataset_id: the dataset id.
:param schema_folder: the SQL schema path.
:param airflow_vars: list of airflow variable keys to check the existence of
:param airflow_conns: list of airflow connection ids to check the existence of
:param institution_ids: list of institution IDs to use for the WoS search query.
:param view: The view type. Standard or complete. See https://dev.elsevier.com/sc_search_views.html
:param earliest_date: earliest date to query for results.
"""
load_bigquery_table_kwargs = {
"write_disposition": WriteDisposition.WRITE_APPEND,
"ignore_unknown_values": True
}
super().__init__(
dag_id,
start_date,
schedule_interval,
dataset_id,
schema_folder,
catchup=False,
table_descriptions={dag_id: ScopusTelescope.TABLE_DESCRIPTION},
airflow_vars=airflow_vars,
airflow_conns=airflow_conns,
load_bigquery_table_kwargs=load_bigquery_table_kwargs,
)
if len(airflow_conns) == 0:
raise AirflowException("You need to supply at least one Airflow connection with a SCOPUS API key.")
if len(institution_ids) == 0:
raise AirflowException("You must specify at least one institution id to query.")
self.institution_ids = institution_ids
self.earliest_date = earliest_date
self.view = view
self.add_setup_task(self.check_dependencies)
self.add_task(self.download)
self.add_task(self.upload_downloaded)
self.add_task(self.transform)
self.add_task(self.upload_transformed)
self.add_task(self.bq_load)
self.add_task(self.cleanup)
@property
def api_keys(self) -> List[str]:
"""Get the API keys to use for downloading SCOPUS data.
:return: List of API keys to use.
"""
keys = [get_airflow_connection_password(conn) for conn in self.airflow_conns]
return keys
def make_release(self, **kwargs) -> List[ScopusRelease]:
"""Make release instances. The release is passed as an argument to the function (TelescopeFunction) that is
called in 'task_callable'.
:param kwargs: the context passed from the PythonOperator. See
https://airflow.apache.org/docs/stable/macros-ref.html for a list of the keyword arguments that are
passed to this argument.
:return: a list of GeonamesRelease instances.
"""
return [
ScopusRelease(
dag_id=self.dag_id,
release_date=pendulum.now("UTC"),
api_keys=self.api_keys,
institution_ids=self.institution_ids,
earliest_date=self.earliest_date,
view=self.view,
)
]
def bq_load(self, releases: List[SnapshotRelease], **kwargs):
"""Task to load each transformed release to BigQuery.
The table_id is set to the file name without the extension.
:param releases: a list of releases.
:return: None.
"""
# Load each transformed release
for release in releases:
transform_blob = f"{blob_name(release.transform_folder)}/*"
table_description = self.table_descriptions.get(self.dag_id, "")
bq_load_shard(
self.schema_folder,
release.release_date,
transform_blob,
self.dataset_id,
ScopusTelescope.DAG_ID,
self.source_format,
prefix=self.schema_prefix,
schema_version=self.schema_version,
dataset_description=self.dataset_description,
table_description=table_description,
**self.load_bigquery_table_kwargs,
)
class ScopusClientThrottleLimits:
"""API throttling constants for ScopusClient."""
CALL_LIMIT = 2 # SCOPUS allows 2 api calls / second.
CALL_PERIOD = 1 # seconds
class ScopusClient:
"""Handles URL fetching of SCOPUS search."""
RESULTS_PER_PAGE = 25
MAX_RESULTS = 5000 # Upper limit on number of results returned
QUOTA_EXCEED_ERROR_PREFIX = "QuotaExceeded. Resets at: "
def __init__(self, *, api_key: str, view: str = "standard"):
"""Constructor.
:param api_key: API key.
:param view: The 'view' access level. Can be 'standard' or 'complete'.
"""
self._headers = {
"X-ELS-APIKey": api_key,
"Accept": "application/json",
"User-Agent": get_user_agent(package_name="academic_observatory_workflows"),
}
self._view = view
def _url(self, query: str) -> str:
"""Get the query url.
:param query: Query string.
:return: Query url.
"""
return f"https://api.elsevier.com/content/search/scopus?view={self._view}&query={quote_plus(query)}"
@staticmethod
def get_reset_date_from_error(msg: str) -> int:
"""Get the reset date timestamp in seconds from the exception message.
According to https://dev.elsevier.com/api_key_settings.html it is meant to be seconds, but milliseconds were
observed the last time it was checked in Oct 2020.
:param msg: exception message.
:return: Reset date timestamp in seconds.
"""
ts_offset = len(ScopusClient.QUOTA_EXCEED_ERROR_PREFIX)
return int(msg[ts_offset:]) / 1000 # Elsevier docs says reports seconds, but headers report milliseconds.
@staticmethod
def get_next_page_url(links: List[dict]) -> Union[None, str]:
"""Get the URL for the next result page.
:param links: The list of links returned from the last query.
:return None if next page not found, otherwise string to next page's url.
"""
try:
for link in links:
if link["@ref"] == "next":
return link["@href"]
except:
return None
@sleep_and_retry
@limits(calls=ScopusClientThrottleLimits.CALL_LIMIT, period=ScopusClientThrottleLimits.CALL_PERIOD)
def retrieve(self, query: str) -> Tuple[List[Dict[str, Any]], int, int]:
"""Execute the query.
:param query: Query string.
:return: (results of query, quota remaining, quota reset date timestamp in seconds)
"""
http_ok = 200
http_quota_exceeded = 429
request = urllib.request.Request(self._url(query), headers=self._headers)
results = list()
while True:
response = urllib.request.urlopen(request)
quota_remaining = response.getheader("X-RateLimit-Remaining")
quota_reset = response.getheader("X-RateLimit-Reset")
request_code = response.getcode()
if request_code == http_quota_exceeded:
raise AirflowException(f"{ScopusClient.QUOTA_EXCEED_ERROR_PREFIX}{quota_reset}")
response_dict = json.loads(response.read().decode("utf-8"))
if request_code != http_ok:
raise AirflowException(f"HTTP {request_code}:{response_dict}")
if "search-results" not in response_dict:
break
results.extend(response_dict["search-results"]["entry"])
total_results = int(response_dict["search-results"]["opensearch:totalResults"])
if total_results > ScopusClient.MAX_RESULTS:
raise AirflowException(
f"ScopusClient: query {query} has {total_results} results but the maximum is {ScopusClient.MAX_RESULTS}"
)
if len(results) == total_results:
break
if total_results == 0:
results = list()
break
url = ScopusClient.get_next_page_url(response_dict["search-results"]["link"])
if url is None:
raise AirflowException(
f"ScopusClient: no next url found. Only have {len(results)} of {total_results} results."
)
request = urllib.request.Request(url, headers=self._headers)
return results, quota_remaining, quota_reset
class ScopusUtilWorker:
"""Worker class"""
DEFAULT_KEY_QUOTA = 20000 # API key query limit default per 7 days.
QUEUE_WAIT_TIME = 20 # Wait time for Queue.get() call
def __init__(
self, *, client_id: int, client: ScopusClient, quota_reset_date: pendulum.DateTime, quota_remaining: int
):
"""Constructor.
:param client_id: Client id to use for debug messages so we don't leak the API key.
:param client: ElsClient object for an API key.
:param quota_reset_date: Date at which the quota will reset.
"""
self.client_id = client_id
self.client = client
self.quota_reset_date = quota_reset_date
self.quota_remaining = quota_remaining
class ScopusUtility:
"""Handles the SCOPUS interactions."""
@staticmethod
def build_query(*, institution_ids: List[str], period: Type[pendulum.Period]) -> str:
"""Build a SCOPUS API query.
:param institution_ids: List of Institutional ID to query, e.g, ["60031226"] (Curtin University)
:param period: A schedule period.
:return: Constructed web query.
"""
tail_offset = -4 # To remove ' or ' and ' OR ' from tail of string
organisations = str()
for i, inst in enumerate(institution_ids):
organisations += f"AF-ID({inst}) OR "
organisations = organisations[:tail_offset]
# Build publication date range
search_months = str()
for point in period.range("months"):
month_name = calendar.month_name[point.month]
search_months += f'"{month_name} {point.year}" or '
search_months = search_months[:tail_offset]
query = f"({organisations}) AND PUBDATETXT({search_months})"
return query
@staticmethod
def download_period(
*,
worker: ScopusUtilWorker,
conn: str,
period: Type[pendulum.Period],
institution_ids: List[str],
download_dir: str,
):
"""Download records for a stated date range.
The elsapy package currently has a cap of 5000 results per query. So in the unlikely event any institution has
more than 5000 entries per month, this will present a problem.
:param worker: Worker that will do the downloading.
:param conn: Connection ID from Airflow (minus scopus_)
:param period: Period to download.
:param institution_ids: List of institutions to query concurrently.
:param download_dir: Path to save downloaded files to.
"""
timestamp = pendulum.now("UTC").isoformat()
save_file = os.path.join(download_dir, f"{period.start}_{period.end}_{timestamp}.json")
logging.info(f"{conn} worker {worker.client_id}: retrieving period {period.start} - {period.end}")
query = ScopusUtility.build_query(institution_ids=institution_ids, period=period)
result, num_results = ScopusUtility.make_query(worker=worker, query=query)
logging.info(f"{conn}: {num_results} results retrieved")
write_to_file(result, save_file)
@staticmethod
def sleep_if_needed(*, reset_date: pendulum.DateTime, conn: str):
"""Sleep until reset_date.
:param reset_date: Date(time) to sleep to.
:param conn: Connection id from Airflow.
"""
now = pendulum.now("UTC")
sleep_time = (reset_date - now).seconds
if sleep_time > 0:
logging.info(f"{conn}: Sleeping for {sleep_time} seconds until a worker is ready.")
sleep(sleep_time)
@staticmethod
def update_reset_date(*, conn: str, error_msg: str, worker: ScopusUtilWorker):
"""Update the reset date to closest date that will make a worker available.
:param conn: Airflow connection ID.
:param error_msg: Error message from quota exceeded exception.
:param worker: Worker that will do the downloading.
"""
renews_ts = ScopusClient.get_reset_date_from_error(error_msg)
worker.quota_reset_date = pendulum.from_timestamp(renews_ts)
logging.warning(f"{conn} worker {worker.client_id}: quoted exceeded. New reset date: {worker.quota_reset_date}")
@staticmethod
def clear_task_queue(queue: Queue):
"""Clear a queue.
:param queue: Queue to clear.
"""
while not queue.empty():
try:
queue.get(False)
except Empty:
continue
queue.task_done()
@staticmethod
def download_worker(
*,
worker: ScopusUtilWorker,
exit_event: Event,
taskq: Queue,
conn: str,
institution_ids: List[str],
download_dir: str,
):
"""Download worker method used by parallel downloader.
:param worker: worker to use.
:param exit_event: exit event to monitor.
:param taskq: tasks queue.
:param conn: Airflow connection ID.
:param institution_ids: List of institutions to query concurrently.
:param download_dir: Path to save downloaded files to.
"""
while True:
try:
logging.info(f"{conn} worker {worker.client_id}: attempting to get a task")
task = taskq.get(block=True, timeout=ScopusUtilWorker.QUEUE_WAIT_TIME)
logging.info(f"{conn} worker {worker.client_id}: received task {task}")
except Empty:
if exit_event.is_set():
logging.info(f"{conn} worker {worker.client_id}: received exit event. Returning results.")
break
logging.info(f"{conn} worker {worker.client_id}: get task timeout. Retrying.")
continue
try: # Got task. Try to download.
logging.info(f"{conn} worker {worker.client_id}: downloading {task}")
ScopusUtility.download_period(
worker=worker, conn=conn, period=task, institution_ids=institution_ids, download_dir=download_dir
)
taskq.task_done()
logging.info(f"{conn} worker {worker.client_id}: download done for {task}")
except Exception as e:
logging.error(f"Received error: {e}")
taskq.task_done()
error_msg = str(e)
if error_msg.startswith(ScopusClient.QUOTA_EXCEED_ERROR_PREFIX):
ScopusUtility.update_reset_date(conn=conn, error_msg=error_msg, worker=worker)
taskq.put(task)
ScopusUtility.sleep_if_needed(reset_date=worker.quota_reset_date, conn=conn)
continue
# Need to clear the queue before we raise exception otherwise join blocks forever
ScopusUtility.clear_task_queue(taskq)
raise AirflowException(error_msg)
@staticmethod
def download_parallel(
*, workers: List[ScopusUtilWorker], taskq: Queue, conn: str, institution_ids: List[str], download_dir: str
):
"""Download SCOPUS snapshot with parallel sessions. Tasks will be distributed in parallel to the available
keys. Each key will independently fetch a task from the queue when it's free so there's no guarantee of load
balance.
:param workers: List of workers available.
:param taskq: tasks queue.
:param conn: Airflow connection ID.
:param institution_ids: List of institutions to query concurrently.
:param download_dir: Path to save downloaded files to.
"""
sessions = len(workers)
logging.info(f"Creating {sessions} concurrent sessions.")
with ThreadPoolExecutor(max_workers=sessions) as executor:
futures = list()
thread_exit = Event()
for worker in workers:
futures.append(
executor.submit(
ScopusUtility.download_worker,
worker=worker,
exit_event=thread_exit,
taskq=taskq,
conn=conn,
institution_ids=institution_ids,
download_dir=download_dir,
)
)
taskq.join() # Wait until all tasks done
logging.info(f"{conn}: all tasks fetched. Signalling threads to exit.")
thread_exit.set()
for future in as_completed(futures):
future.result()
@staticmethod
def make_query(*, worker: ScopusUtilWorker, query: str) -> Tuple[str, int]:
"""Throttling wrapper for the API call. This is a global limit for this API when called from a program on the
same machine. Limits specified in ScopusUtilConst class.
Throttle limits may or may not be enforced. Probably depends on how executors spin up tasks.
:param worker: ScopusUtilWorker object.
:param query: Query object.
:returns: Query results.
"""
results, _, _ = worker.client.retrieve(query)
return json.dumps(results), len(results)
class ScopusJsonParser:
"""Helper methods to process the json from SCOPUS into desired structure."""
@staticmethod
def get_affiliations(data: Dict[str, Any]) -> Union[None, List[Dict[str, Any]]]:
"""Get the affiliation field.
:param data: json response from SCOPUS.
:return list of affiliation details.
"""
affiliations = list()
if "affiliation" not in data:
return None
for affiliation in data["affiliation"]:
affil = dict()
affil["name"] = get_entry_or_none(affiliation, "affilname")
affil["city"] = get_entry_or_none(affiliation, "affiliation-city")
affil["country"] = get_entry_or_none(affiliation, "affiliation-country")
# Available in complete view
affil["id"] = get_entry_or_none(affiliation, "afid")
affil["name_variant"] = get_entry_or_none(affiliation, "name-variant")
affiliations.append(affil)
if len(affiliations) == 0:
return None
return affiliations
@staticmethod
def get_authors(data: Dict[str, Any]) -> Union[None, List[Dict[str, Any]]]:
"""Get the author field. Won't know if this parser is going to throw error unless we get access to api key
with complete view access.
:param data: json response from SCOPUS.
:return list of authors' details.
"""
author_list = list()
if "author" not in data:
return None
# Assuming there's a list given the doc says complete author list
authors = data["author"]
for author in authors:
ad = dict()
ad["authid"] = get_entry_or_none(author, "authid") # Not sure what this is or how it's diff to afid
ad["orcid"] = get_entry_or_none(author, "orcid")
ad["full_name"] = get_entry_or_none(author, "authname") # Taking a guess that this is what it is
ad["first_name"] = get_entry_or_none(author, "given-name")
ad["last_name"] = get_entry_or_none(author, "surname")
ad["initials"] = get_entry_or_none(author, "initials")
ad["afid"] = get_entry_or_none(author, "afid")
author_list.append(ad)
if len(author_list) == 0:
return None
return author_list
@staticmethod
def get_identifier_list(data: dict, id_type: str) -> Union[None, List[str]]:
"""Get the list of document identifiers or null of it does not exist. This string/list behaviour was observed
for ISBNs so using it for other identifiers just in case.
:param data: json response from SCOPUS.
:param id_type: type of identifier, e.g., 'isbn'
:return: List of identifiers.
"""
identifier = list()
if id_type not in data:
return None
id_data = data[id_type]
if isinstance(id_data, str):
identifier.append(id_data)
else: # Only other observed case is list
for entry in id_data:
identifier.append(entry["$"]) # This is what showed up in ISBN example in list situation
if len(identifier) == 0:
return None
return identifier
@staticmethod
def parse_json(*, data: dict, harvest_datetime: str, release_date: str, institution_ids: List[str]) -> dict:
"""Turn json data into db schema format.
:param data: json response from SCOPUS.
:param harvest_datetime: isoformat string of time the fetch took place.
:param release_date: DAG execution date.
:param institution_ids: List of institution ids used in the query.
:return: dict of data in right field format.
"""
entry = dict()
entry["harvest_datetime"] = harvest_datetime # Time of harvest (datetime string)
entry["release_date"] = release_date # Release date (date string)
entry["institution_ids"] = institution_ids
entry["title"] = get_entry_or_none(data, "dc:title") # Article title
entry["identifier"] = get_entry_or_none(data, "dc:identifier") # Scopus ID
entry["creator"] = get_entry_or_none(data, "dc:creator") # First author name
entry["publication_name"] = get_entry_or_none(data, "prism:publicationName") # Source title
entry["cover_date"] = get_entry_or_none(data, "prism:coverDate") # Publication date
entry["doi"] = ScopusJsonParser.get_identifier_list(data, "prism:doi") # DOI
entry["eissn"] = ScopusJsonParser.get_identifier_list(data, "prism:eIssn") # Electronic ISSN
entry["issn"] = ScopusJsonParser.get_identifier_list(data, "prism:issn") # ISSN
entry["isbn"] = ScopusJsonParser.get_identifier_list(data, "prism:isbn") # ISBN
entry["aggregation_type"] = get_entry_or_none(data, "prism:aggregationType") # Source type
entry["pubmed_id"] = get_entry_or_none(data, "pubmed-id") # MEDLINE identifier
entry["pii"] = get_entry_or_none(data, "pii") # PII Publisher item identifier
entry["eid"] = get_entry_or_none(data, "eid") # Electronic ID
entry["subtype_description"] = get_entry_or_none(data, "subtypeDescription") # Document Type description
entry["open_access"] = get_entry_or_none(data, "openaccess", int) # Open access status. (Integer)
entry["open_access_flag"] = get_entry_or_none(data, "openaccessFlag") # Open access status. (Boolean)
entry["citedby_count"] = get_entry_or_none(data, "citedby-count", int) # Cited by count (integer)
entry["source_id"] = get_entry_or_none(data, "source-id", int) # Source ID (integer)
entry["affiliations"] = ScopusJsonParser.get_affiliations(data) # Affiliations
entry["orcid"] = get_entry_or_none(data, "orcid") # ORCID
# Available in complete view
entry["authors"] = ScopusJsonParser.get_authors(data) # List of authors
entry["abstract"] = get_entry_or_none(data, "dc:description") # Abstract
entry["keywords"] = get_as_list(data, "authkeywords") # Assuming it's a list of strings.
entry["article_number"] = get_entry_or_none(data, "article-number") # Article number (unclear if int or str)
entry["fund_agency_ac"] = get_entry_or_none(data, "fund-acr") # Funding agency acronym
entry["fund_agency_id"] = get_entry_or_none(data, "fund-no") # Funding agency identification
entry["fund_agency_name"] = get_entry_or_none(data, "fund-sponsor") # Funding agency name
return entry
|
{"/academic_observatory_workflows/workflows/ror_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_geonames_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/geonames_telescope.py"], "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/tests/test_clearbit.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_oa_web_workflow.py": ["/academic_observatory_workflows/workflows/oa_web_workflow.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/tests/test_zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_open_citations_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/open_citations_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_openalex_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/openalex_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py"], "/academic_observatory_workflows/workflows/pubmed_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_web_of_science_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_events_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_events_telescope.py"], "/academic_observatory_workflows/workflows/web_of_science_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_github.py": ["/academic_observatory_workflows/github.py"], "/academic_observatory_workflows/workflows/tests/test_pubmed_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/pubmed_telescope.py"], "/academic_observatory_workflows/workflows/geonames_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/open_citations_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_doi_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/model.py", "/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/tests/test_wikipedia.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/wikipedia.py"], "/docs/test_generate_csv.py": ["/docs/generate_schema_csv.py"], "/academic_observatory_workflows/workflows/scopus_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/doi_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py"], "/academic_observatory_workflows/workflows/crossref_events_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/oa_web_workflow.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/github.py", "/academic_observatory_workflows/wikipedia.py", "/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_ror_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/ror_telescope.py"], "/academic_observatory_workflows/workflows/openalex_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_zenodo.py": ["/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/model.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_scopus_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/scopus_telescope.py"], "/academic_observatory_workflows/dags/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_telescope.py"], "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_elastic_import_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/dags/elastic_import_workflow.py"], "/academic_observatory_workflows/dags/doi_workflow.py": ["/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/workflows/orcid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/dags/mag_telescope.py": ["/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/workflows/grid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_mag_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/dags/web_of_science_telescope.py": ["/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/dags/elastic_import_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_grid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/grid_telescope.py"], "/academic_observatory_workflows/workflows/mag_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_orcid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/orcid_telescope.py"]}
|
28,260,417
|
The-Academic-Observatory/academic-observatory-workflows
|
refs/heads/main
|
/academic_observatory_workflows/workflows/tests/test_scopus_telescope.py
|
# Copyright 2020 Curtin University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Author: Tuan Chien
import json
import os
import unittest
import unittest.mock as mock
from logging import error
from queue import Empty, Queue
from threading import Event, Thread
from time import sleep
from unittest.mock import MagicMock, patch
import observatory.api.server.orm as orm
import pendulum
from academic_observatory_workflows.config import test_fixtures_folder
from academic_observatory_workflows.workflows.scopus_telescope import (
ScopusClient,
ScopusJsonParser,
ScopusRelease,
ScopusTelescope,
ScopusUtility,
ScopusUtilWorker,
)
from airflow import AirflowException
from airflow.models import Connection
from airflow.utils.state import State
from click.testing import CliRunner
from freezegun import freeze_time
from observatory.platform.utils.airflow_utils import AirflowConns, AirflowVars
from observatory.platform.utils.api import make_observatory_api
from observatory.platform.utils.gc_utils import run_bigquery_query
from observatory.platform.utils.test_utils import (
HttpServer,
ObservatoryEnvironment,
ObservatoryTestCase,
module_file_path,
)
from observatory.platform.utils.url_utils import get_user_agent
from observatory.platform.utils.workflow_utils import (
bigquery_sharded_table_id,
blob_name,
build_schedule,
make_dag_id,
)
class TestScopusUtility(unittest.TestCase):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
@patch("academic_observatory_workflows.workflows.scopus_telescope.Queue.empty")
def test_clear_task_queue(self, m_empty):
m_empty.side_effect = [False, False, True]
q = Queue()
q.put(1)
ScopusUtility.clear_task_queue(q)
self.assertRaises(Empty, q.get, False)
q.join() # Make sure no block
class MockUrlResponse:
def __init__(self, *, response="{}", code=200):
self.response = response
self.code = code
def getheader(self, header):
if header == "X-RateLimit-Remaining":
return 0
if header == "X-RateLimit-Reset":
return 10
def getcode(self):
return self.code
def read(self):
return self.response
class TestScopusClient(unittest.TestCase):
"""Test the ScopusClient class."""
class MockMetadata:
@classmethod
def get(self, attribute):
if attribute == "Version":
return "1"
if attribute == "Home-page":
return "http://test.test"
if attribute == "Author-email":
return "test@test"
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.api_key = "testkey"
self.query = "dummyquery"
def test_scopus_client_user_agent(self):
"""Test to make sure the user agent string is set correctly."""
with patch("observatory.platform.utils.url_utils.metadata", return_value=TestScopusClient.MockMetadata):
obj = ScopusClient(api_key="")
generated_ua = obj._headers["User-Agent"]
self.assertEqual(generated_ua, get_user_agent(package_name="academic_observatory_workflows"))
def test_get_reset_date_from_error(self):
msg = f"{ScopusClient.QUOTA_EXCEED_ERROR_PREFIX}2000"
offset = ScopusClient.get_reset_date_from_error(msg)
self.assertEqual(offset, 2)
def test_get_next_page_url(self):
links = []
next_link = ScopusClient.get_next_page_url(links)
self.assertEqual(next_link, None)
expected_url = "http://next.url"
links = [{"@ref": "next", "@href": expected_url}]
next_link = ScopusClient.get_next_page_url(links)
self.assertEqual(next_link, expected_url)
links = [{"@ref": "self"}]
next_link = ScopusClient.get_next_page_url(links)
self.assertEqual(next_link, None)
links = [{}]
self.assertEqual(ScopusClient.get_next_page_url(links), None)
@patch("academic_observatory_workflows.workflows.scopus_telescope.urllib.request.Request")
@patch("academic_observatory_workflows.workflows.scopus_telescope.urllib.request.urlopen")
def test_retrieve_exceeded(self, m_urlopen, m_request):
m_urlopen.return_value = MockUrlResponse(code=429)
client = ScopusClient(api_key=self.api_key)
self.assertRaises(AirflowException, client.retrieve, self.query)
@patch("academic_observatory_workflows.workflows.scopus_telescope.urllib.request.Request")
@patch("academic_observatory_workflows.workflows.scopus_telescope.urllib.request.urlopen")
def test_retrieve_noresults(self, m_urlopen, m_request):
m_urlopen.return_value = MockUrlResponse(code=200, response=b"{}")
client = ScopusClient(api_key=self.api_key)
results, remaining, reset = client.retrieve(self.query)
self.assertEqual(results, [])
self.assertEqual(remaining, 0)
self.assertEqual(reset, 10)
@patch("academic_observatory_workflows.workflows.scopus_telescope.json.loads")
@patch("academic_observatory_workflows.workflows.scopus_telescope.urllib.request.Request")
@patch("academic_observatory_workflows.workflows.scopus_telescope.urllib.request.urlopen")
def test_retrieve_totalresults_zero(self, m_urlopen, m_request, m_json):
m_urlopen.return_value = MockUrlResponse(code=200, response=b"{}")
m_json.return_value = {
"search-results": {
"entry": [None],
"opensearch:totalResults": 0,
}
}
client = ScopusClient(api_key=self.api_key)
results, remaining, reset = client.retrieve(self.query)
self.assertEqual(results, [])
self.assertEqual(remaining, 0)
self.assertEqual(reset, 10)
@patch("academic_observatory_workflows.workflows.scopus_telescope.urllib.request.Request")
@patch("academic_observatory_workflows.workflows.scopus_telescope.urllib.request.urlopen")
def test_retrieve_unexpected_httpcode(self, m_urlopen, m_request):
m_urlopen.return_value = MockUrlResponse(code=403, response=b"{}")
client = ScopusClient(api_key=self.api_key)
self.assertRaises(AirflowException, client.retrieve, self.query)
@patch("academic_observatory_workflows.workflows.scopus_telescope.urllib.request.Request")
@patch("academic_observatory_workflows.workflows.scopus_telescope.urllib.request.urlopen")
def test_retrieve_max_results_exceeded(self, m_urlopen, m_request):
response = b'{"search-results": {"entry": [1], "opensearch:totalResults": 5001}}'
m_urlopen.return_value = MockUrlResponse(code=200, response=response)
client = ScopusClient(api_key=self.api_key)
self.assertRaises(AirflowException, client.retrieve, self.query)
@patch("academic_observatory_workflows.workflows.scopus_telescope.urllib.request.Request")
@patch("academic_observatory_workflows.workflows.scopus_telescope.urllib.request.urlopen")
def test_retrieve_no_next_url(self, m_urlopen, m_request):
response = b'{"search-results": {"entry": [1], "opensearch:totalResults": 2, "link": []}}'
m_urlopen.return_value = MockUrlResponse(code=200, response=response)
client = ScopusClient(api_key=self.api_key)
self.assertRaises(AirflowException, client.retrieve, self.query)
@patch("academic_observatory_workflows.workflows.scopus_telescope.urllib.request.Request")
@patch("academic_observatory_workflows.workflows.scopus_telescope.urllib.request.urlopen")
def test_retrieve(self, m_urlopen, m_request):
response = b'{"search-results": {"entry": [1], "opensearch:totalResults": 2, "link": [{"@ref": "next", "@href": "someurl"}]}}'
m_urlopen.return_value = MockUrlResponse(code=200, response=response)
client = ScopusClient(api_key=self.api_key)
results, _, _ = client.retrieve(self.query)
self.assertEqual(len(results), 2)
class TestScopusUtilWorker(unittest.TestCase):
def test_ctor(self):
util = ScopusUtilWorker(
client_id=0, client=None, quota_reset_date=pendulum.datetime(2000, 1, 1), quota_remaining=0
)
self.assertEqual(util.client_id, 0)
self.assertEqual(util.client, None)
self.assertEqual(util.quota_reset_date, pendulum.datetime(2000, 1, 1))
self.assertEqual(util.quota_remaining, 0)
def test_build_query(self):
institution_ids = ["60031226"]
period = pendulum.period(pendulum.datetime(2021, 1, 1), pendulum.datetime(2021, 2, 1))
query = ScopusUtility.build_query(institution_ids=institution_ids, period=period)
def test_make_query(self):
worker = MagicMock()
worker.client = MagicMock()
worker.client.retrieve = MagicMock()
worker.client.retrieve.return_value = [{}, {}], 2000, 10
query = ""
results, num_results = ScopusUtility.make_query(worker=worker, query=query)
self.assertEqual(num_results, 2)
self.assertEqual(results, "[{}, {}]")
@freeze_time("2021-02-01")
@patch("academic_observatory_workflows.workflows.scopus_telescope.write_to_file")
def test_download_period(self, m_write_file):
conn = "conn_id"
worker = MagicMock()
worker.client = MagicMock()
worker.client.retrieve = MagicMock()
results = [{}] * (ScopusClient.MAX_RESULTS + 1)
worker.client.retrieve.return_value = results, 2000, 10
period = pendulum.period(pendulum.date(2021, 1, 1), pendulum.date(2021, 2, 1))
institution_ids = ["123"]
ScopusUtility.download_period(
worker=worker, conn=conn, period=period, institution_ids=institution_ids, download_dir="/tmp"
)
args, _ = m_write_file.call_args
self.assertEqual(args[0], json.dumps(results))
self.assertEqual(args[1], "/tmp/2021-01-01_2021-02-01_2021-02-01T00:00:00+00:00.json")
@freeze_time("2021-02-02")
def test_sleep_if_needed_needed(self):
reset_date = pendulum.datetime(2021, 2, 2, 0, 0, 1)
with patch("academic_observatory_workflows.workflows.scopus_telescope.logging.info") as m_log:
ScopusUtility.sleep_if_needed(reset_date=reset_date, conn="conn")
self.assertEqual(m_log.call_count, 1)
@freeze_time("2021-02-02")
def test_sleep_if_needed_not_needed(self):
reset_date = pendulum.datetime(2021, 2, 1)
with patch("academic_observatory_workflows.workflows.scopus_telescope.logging.info") as m_log:
ScopusUtility.sleep_if_needed(reset_date=reset_date, conn="conn")
self.assertEqual(m_log.call_count, 0)
@freeze_time("2021-02-02")
def test_update_reset_date(self):
conn = "conn_id"
worker = MagicMock()
now = pendulum.now("UTC")
worker.quota_reset_date = now
new_ts = now.int_timestamp * 1000 + 2000
error_msg = f"{ScopusClient.QUOTA_EXCEED_ERROR_PREFIX}{new_ts}"
ScopusUtility.update_reset_date(conn=conn, error_msg=error_msg, worker=worker)
self.assertTrue(worker.quota_reset_date > now)
@patch.object(ScopusUtilWorker, "QUEUE_WAIT_TIME", 1)
def test_download_worker_empty_retry_exit(self):
def trigger_exit(event):
now = pendulum.now("UTC")
trigger = now.add(seconds=5)
while pendulum.now("UTC") < trigger:
continue
event.set()
conn = "conn"
queue = Queue()
event = Event()
institution_ids = ["123"]
thread = Thread(target=trigger_exit, args=(event,))
thread.start()
worker = ScopusUtilWorker(client_id=0, client=None, quota_reset_date=pendulum.now("UTC"), quota_remaining=10)
ScopusUtility.download_worker(
worker=worker,
exit_event=event,
taskq=queue,
conn=conn,
institution_ids=institution_ids,
download_dir="",
)
thread.join()
@patch("academic_observatory_workflows.workflows.scopus_telescope.ScopusUtility.download_period")
def test_download_worker_download_exit(self, m_download):
def trigger_exit(event):
now = pendulum.now("UTC")
trigger = now.add(seconds=5)
while pendulum.now("UTC") < trigger:
continue
event.set()
conn = "conn"
queue = Queue()
now = pendulum.now("UTC")
queue.put(pendulum.period(now, now))
event = Event()
institution_ids = ["123"]
thread = Thread(target=trigger_exit, args=(event,))
thread.start()
worker = ScopusUtilWorker(client_id=0, client=None, quota_reset_date=pendulum.now("UTC"), quota_remaining=10)
ScopusUtility.download_worker(
worker=worker,
exit_event=event,
taskq=queue,
conn=conn,
institution_ids=institution_ids,
download_dir="",
)
thread.join()
@patch("academic_observatory_workflows.workflows.scopus_telescope.ScopusUtility.download_period")
def test_download_worker_download_quota_exceed_retry_exit(self, m_download):
def trigger_exit(event):
now = pendulum.now("UTC")
trigger = now.add(seconds=1)
while pendulum.now("UTC") < trigger:
continue
event.set()
now = pendulum.now("UTC")
next_reset = now.add(seconds=2).int_timestamp * 1000
m_download.side_effect = [AirflowException(f"{ScopusClient.QUOTA_EXCEED_ERROR_PREFIX}{next_reset}"), None]
conn = "conn"
queue = Queue()
queue.put(pendulum.period(now, now))
event = Event()
institution_ids = ["123"]
thread = Thread(target=trigger_exit, args=(event,))
thread.start()
worker = ScopusUtilWorker(client_id=0, client=None, quota_reset_date=now, quota_remaining=10)
ScopusUtility.download_worker(
worker=worker,
exit_event=event,
taskq=queue,
conn=conn,
institution_ids=institution_ids,
download_dir="",
)
thread.join()
@patch("academic_observatory_workflows.workflows.scopus_telescope.ScopusUtility.download_period")
def test_download_worker_download_uncaught_exception(self, m_download):
def trigger_exit(event):
now = pendulum.now("UTC")
trigger = now.add(seconds=5)
while pendulum.now("UTC") < trigger:
continue
event.set()
now = pendulum.now("UTC")
m_download.side_effect = AirflowException("Some other error")
conn = "conn"
queue = Queue()
queue.put(pendulum.period(now, now))
queue.put(pendulum.period(now, now))
event = Event()
institution_ids = ["123"]
thread = Thread(target=trigger_exit, args=(event,))
thread.start()
worker = ScopusUtilWorker(client_id=0, client=None, quota_reset_date=now, quota_remaining=10)
self.assertRaises(
AirflowException,
ScopusUtility.download_worker,
worker=worker,
exit_event=event,
taskq=queue,
conn=conn,
institution_ids=institution_ids,
download_dir="",
)
thread.join()
@patch("academic_observatory_workflows.workflows.scopus_telescope.ScopusUtility.download_period")
def test_download_parallel(self, m_download):
now = pendulum.now("UTC")
conn = "conn"
queue = Queue()
institution_ids = ["123"]
m_download.return_value = None
for _ in range(4):
queue.put(pendulum.period(now, now))
workers = [
ScopusUtilWorker(client_id=i, client=None, quota_reset_date=now, quota_remaining=10) for i in range(2)
]
ScopusUtility.download_parallel(
workers=workers, taskq=queue, conn=conn, institution_ids=institution_ids, download_dir=""
)
class TestScopusJsonParser(unittest.TestCase):
"""Test parsing facilities."""
def __init__(self, *args, **kwargs):
super(TestScopusJsonParser, self).__init__(*args, **kwargs)
self.institution_ids = ["60031226"] # Curtin University
self.data = {
"dc:identifier": "scopusid",
"eid": "testid",
"dc:title": "arttitle",
"prism:aggregationType": "source",
"subtypeDescription": "typedesc",
"citedby-count": "345",
"prism:publicationName": "pubname",
"prism:isbn": "isbn",
"prism:issn": "issn",
"prism:eIssn": "eissn",
"prism:coverDate": "2010-12-01",
"prism:doi": "doi",
"pii": "pii",
"pubmed-id": "med",
"orcid": "orcid",
"dc:creator": "firstauth",
"source-id": "1000",
"openaccess": "1",
"openaccessFlag": False,
"affiliation": [
{
"affilname": "aname",
"affiliation-city": "acity",
"affiliation-country": "country",
"afid": "id",
"name-variant": "variant",
}
],
"author": [
{
"authid": "id",
"orcid": "id",
"authname": "name",
"given-name": "first",
"surname": "last",
"initials": "mj",
"afid": "id",
}
],
"dc:description": "abstract",
"authkeywords": ["words"],
"article-number": "artno",
"fund-acr": "acr",
"fund-no": "no",
"fund-sponsor": "sponsor",
}
def test_get_affiliations(self):
"""Test get affiliations"""
affil = ScopusJsonParser.get_affiliations({})
self.assertEqual(affil, None)
affil = ScopusJsonParser.get_affiliations(self.data)
self.assertEqual(len(affil), 1)
af = affil[0]
self.assertEqual(af["name"], "aname")
self.assertEqual(af["city"], "acity")
self.assertEqual(af["country"], "country")
self.assertEqual(af["id"], "id")
self.assertEqual(af["name_variant"], "variant")
# 0 length affiliations
affil = ScopusJsonParser.get_affiliations({"affiliation": []})
self.assertEqual(affil, None)
def test_get_authors(self):
"""Test get authors"""
author = ScopusJsonParser.get_authors({})
self.assertEqual(author, None)
author = ScopusJsonParser.get_authors(self.data)
self.assertEqual(len(author), 1)
au = author[0]
self.assertEqual(au["authid"], "id")
self.assertEqual(au["orcid"], "id")
self.assertEqual(au["full_name"], "name")
self.assertEqual(au["first_name"], "first")
self.assertEqual(au["last_name"], "last")
self.assertEqual(au["initials"], "mj")
self.assertEqual(au["afid"], "id")
# 0 length author
author = ScopusJsonParser.get_authors({"author": []})
self.assertEqual(author, None)
def test_get_identifier_list(self):
ids = ScopusJsonParser.get_identifier_list({}, "myid")
self.assertEqual(ids, None)
ids = ScopusJsonParser.get_identifier_list({"myid": "thing"}, "myid")
self.assertEqual(ids, ["thing"])
ids = ScopusJsonParser.get_identifier_list({"myid": []}, "myid")
self.assertEqual(ids, None)
ids = ScopusJsonParser.get_identifier_list({"myid": [{"$": "thing"}]}, "myid")
self.assertEqual(ids, ["thing"])
def test_parse_json(self):
"""Test the parser."""
harvest_datetime = pendulum.now("UTC").isoformat()
release_date = "2018-01-01"
entry = ScopusJsonParser.parse_json(
data=self.data,
harvest_datetime=harvest_datetime,
release_date=release_date,
institution_ids=self.institution_ids,
)
self.assertEqual(entry["harvest_datetime"], harvest_datetime)
self.assertEqual(entry["release_date"], release_date)
self.assertEqual(entry["title"], "arttitle")
self.assertEqual(entry["identifier"], "scopusid")
self.assertEqual(entry["creator"], "firstauth")
self.assertEqual(entry["publication_name"], "pubname")
self.assertEqual(entry["cover_date"], "2010-12-01")
self.assertEqual(entry["doi"][0], "doi")
self.assertEqual(entry["eissn"][0], "eissn")
self.assertEqual(entry["issn"][0], "issn")
self.assertEqual(entry["isbn"][0], "isbn")
self.assertEqual(entry["aggregation_type"], "source")
self.assertEqual(entry["pubmed_id"], "med")
self.assertEqual(entry["pii"], "pii")
self.assertEqual(entry["eid"], "testid")
self.assertEqual(entry["subtype_description"], "typedesc")
self.assertEqual(entry["open_access"], 1)
self.assertEqual(entry["open_access_flag"], False)
self.assertEqual(entry["citedby_count"], 345)
self.assertEqual(entry["source_id"], 1000)
self.assertEqual(entry["orcid"], "orcid")
self.assertEqual(len(entry["affiliations"]), 1)
af = entry["affiliations"][0]
self.assertEqual(af["name"], "aname")
self.assertEqual(af["city"], "acity")
self.assertEqual(af["country"], "country")
self.assertEqual(af["id"], "id")
self.assertEqual(af["name_variant"], "variant")
self.assertEqual(entry["abstract"], "abstract")
self.assertEqual(entry["article_number"], "artno")
self.assertEqual(entry["fund_agency_ac"], "acr")
self.assertEqual(entry["fund_agency_id"], "no")
self.assertEqual(entry["fund_agency_name"], "sponsor")
words = entry["keywords"]
self.assertEqual(len(words), 1)
self.assertEqual(words[0], "words")
authors = entry["authors"]
self.assertEqual(len(authors), 1)
au = authors[0]
self.assertEqual(au["authid"], "id")
self.assertEqual(au["orcid"], "id")
self.assertEqual(au["full_name"], "name")
self.assertEqual(au["first_name"], "first")
self.assertEqual(au["last_name"], "last")
self.assertEqual(au["initials"], "mj")
self.assertEqual(au["afid"], "id")
self.assertEqual(len(entry["institution_ids"]), 1)
self.assertEqual(entry["institution_ids"], self.institution_ids)
class TestScopusTelescope(ObservatoryTestCase):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.project_id = os.getenv("TEST_GCP_PROJECT_ID")
self.host = "localhost"
self.api_port = 5000
self.data_location = "us"
self.org_name = "Curtin University"
self.conn_id = "scopus_curtin_university"
self.earliest_date = pendulum.datetime(2021, 1, 1)
self.fixture_dir = test_fixtures_folder("scopus")
self.fixture_file = os.path.join(self.fixture_dir, "test.json")
with open(self.fixture_file, "r") as f:
self.results_str = f.read()
self.results_len = 1
def setup_connections(self, env):
# Add Observatory API connection
conn = Connection(conn_id=AirflowConns.OBSERVATORY_API, uri=f"http://:password@{self.host}:{self.api_port}")
env.add_connection(conn)
# Add login/pass connection
conn = Connection(conn_id=self.conn_id, uri=f"http://login:password@localhost")
env.add_connection(conn)
def setup_api(self, env, extra=None):
dt = pendulum.now("UTC")
if extra is None:
extra = {
"airflow_connections": [self.conn_id],
"institution_ids": ["123"],
"earliest_date": self.earliest_date.isoformat(),
"view": "STANDARD",
}
name = "Scopus Telescope"
telescope_type = orm.TelescopeType(name=name, type_id=ScopusTelescope.DAG_ID, created=dt, modified=dt)
env.api_session.add(telescope_type)
organisation = orm.Organisation(
name=self.org_name,
created=dt,
modified=dt,
gcp_project_id=self.project_id,
gcp_download_bucket=env.download_bucket,
gcp_transform_bucket=env.transform_bucket,
)
env.api_session.add(organisation)
telescope = orm.Telescope(
name=name,
telescope_type=telescope_type,
organisation=organisation,
modified=dt,
created=dt,
extra=extra,
)
env.api_session.add(telescope)
env.api_session.commit()
def get_telescope(self, dataset_id):
api = make_observatory_api()
telescope_type = api.get_telescope_type(type_id=ScopusTelescope.DAG_ID)
telescopes = api.get_telescopes(telescope_type_id=telescope_type.id, limit=1000)
self.assertEqual(len(telescopes), 1)
dag_id = make_dag_id(ScopusTelescope.DAG_ID, telescopes[0].organisation.name)
airflow_conns = telescopes[0].extra.get("airflow_connections")
institution_ids = telescopes[0].extra.get("institution_ids")
earliest_date_str = telescopes[0].extra.get("earliest_date")
earliest_date = pendulum.parse(earliest_date_str)
airflow_vars = [
AirflowVars.DATA_PATH,
AirflowVars.DATA_LOCATION,
]
telescope = ScopusTelescope(
dag_id=dag_id,
dataset_id=dataset_id,
airflow_conns=airflow_conns,
airflow_vars=airflow_vars,
institution_ids=institution_ids,
earliest_date=earliest_date,
)
return telescope
def test_ctor(self):
self.assertRaises(
AirflowException,
ScopusTelescope,
dag_id="dag",
dataset_id="dataset",
airflow_conns=[],
airflow_vars=[],
institution_ids=[],
earliest_date=pendulum.now("UTC"),
)
self.assertRaises(
AirflowException,
ScopusTelescope,
dag_id="dag",
dataset_id="dataset",
airflow_conns=["conn"],
airflow_vars=[],
institution_ids=[],
earliest_date=pendulum.now("UTC"),
)
def test_dag_structure(self):
"""Test that the ScopusTelescope DAG has the correct structure.
:return: None
"""
dag = ScopusTelescope(
dag_id="dag",
airflow_conns=["conn"],
airflow_vars=[],
institution_ids=["10"],
earliest_date=pendulum.now("UTC"),
view="standard",
).make_dag()
self.assert_dag_structure(
{
"check_dependencies": ["download"],
"download": ["upload_downloaded"],
"upload_downloaded": ["transform"],
"transform": ["upload_transformed"],
"upload_transformed": ["bq_load"],
"bq_load": ["cleanup"],
"cleanup": [],
},
dag,
)
def test_dag_load(self):
"""Test that the DAG can be loaded from a DAG bag."""
dag_file = os.path.join(module_file_path("academic_observatory_workflows.dags"), "scopus_telescope.py")
env = ObservatoryEnvironment(self.project_id, self.data_location, api_host=self.host, api_port=self.api_port)
with env.create():
self.setup_connections(env)
self.setup_api(env)
dag_file = os.path.join(module_file_path("academic_observatory_workflows.dags"), "scopus_telescope.py")
dag_id = make_dag_id(ScopusTelescope.DAG_ID, self.org_name)
self.assert_dag_load(dag_id, dag_file)
def test_dag_load_missing_params(self):
"""Test that the DAG can be loaded from a DAG bag."""
dag_file = os.path.join(module_file_path("academic_observatory_workflows.dags"), "scopus_telescope.py")
env = ObservatoryEnvironment(self.project_id, self.data_location, api_host=self.host, api_port=self.api_port)
extra = {
"airflow_connections": [self.conn_id],
"institution_ids": ["123"],
"earliest_date": self.earliest_date.isoformat(),
}
with env.create():
self.setup_connections(env)
self.setup_api(env, extra=extra)
dag_file = os.path.join(module_file_path("academic_observatory_workflows.dags"), "scopus_telescope.py")
dag_id = make_dag_id(ScopusTelescope.DAG_ID, self.org_name)
self.assertRaises(AssertionError, self.assert_dag_load, dag_id, dag_file)
def test_telescope(self):
env = ObservatoryEnvironment(self.project_id, self.data_location, api_host=self.host, api_port=self.api_port)
with env.create():
self.setup_connections(env)
self.setup_api(env)
dataset_id = env.add_dataset()
execution_date = pendulum.datetime(2021, 1, 1)
telescope = self.get_telescope(dataset_id)
dag = telescope.make_dag()
release_date = pendulum.datetime(2021, 2, 1)
release = ScopusRelease(
dag_id=make_dag_id(ScopusTelescope.DAG_ID, self.org_name),
release_date=release_date,
api_keys=["1"],
institution_ids=["123"],
view="standard",
earliest_date=pendulum.datetime(2021, 1, 1),
)
with env.create_dag_run(dag, execution_date):
# check dependencies
ti = env.run_task(telescope.check_dependencies.__name__)
self.assertEqual(ti.state, State.SUCCESS)
# download
with patch(
"academic_observatory_workflows.workflows.scopus_telescope.ScopusUtility.make_query"
) as m_search:
m_search.return_value = self.results_str, self.results_len
ti = env.run_task(telescope.download.__name__)
self.assertEqual(ti.state, State.SUCCESS)
self.assertEqual(len(release.download_files), 1)
self.assertEqual(m_search.call_count, 1)
# upload downloaded
ti = env.run_task(telescope.upload_downloaded.__name__)
self.assertEqual(ti.state, State.SUCCESS)
self.assert_blob_integrity(
env.download_bucket, blob_name(release.download_files[0]), release.download_files[0]
)
# transform
ti = env.run_task(telescope.transform.__name__)
self.assertEqual(ti.state, State.SUCCESS)
# upload_transformed
ti = env.run_task(telescope.upload_transformed.__name__)
self.assertEqual(ti.state, State.SUCCESS)
for file in release.transform_files:
self.assert_blob_integrity(env.transform_bucket, blob_name(file), file)
# bq_load
ti = env.run_task(telescope.bq_load.__name__)
self.assertEqual(ti.state, State.SUCCESS)
table_id = (
f"{self.project_id}.{dataset_id}."
f"{bigquery_sharded_table_id(ScopusTelescope.DAG_ID, release.release_date)}"
)
expected_rows = 1
self.assert_table_integrity(table_id, expected_rows)
# Sample some fields to check in the first row
sql = f"SELECT * FROM {self.project_id}.{dataset_id}.scopus20210201"
with patch("observatory.platform.utils.gc_utils.bq_query_bytes_daily_limit_check"):
records = list(run_bigquery_query(sql))
self.assertEqual(records[0]["aggregation_type"], "Journal")
self.assertEqual(records[0]["source_id"], 1)
self.assertEqual(records[0]["eid"], "somedoi")
self.assertEqual(records[0]["pii"], "S00000")
self.assertEqual(records[0]["identifier"], "SCOPUS_ID:000000")
self.assertEqual(records[0]["doi"], ["10.0000/00"])
self.assertEqual(records[0]["publication_name"], "Journal of Things")
self.assertEqual(records[0]["institution_ids"], [123])
self.assertEqual(records[0]["creator"], "Name F.")
self.assertEqual(records[0]["article_number"], "1")
self.assertEqual(records[0]["title"], "Article title")
self.assertEqual(records[0]["issn"], ["00000000"])
self.assertEqual(records[0]["subtype_description"], "Article")
self.assertEqual(records[0]["citedby_count"], 0)
# cleanup
download_folder, extract_folder, transform_folder = (
release.download_folder,
release.extract_folder,
release.transform_folder,
)
env.run_task(telescope.cleanup.__name__)
self.assertEqual(ti.state, State.SUCCESS)
self.assert_cleanup(download_folder, extract_folder, transform_folder)
|
{"/academic_observatory_workflows/workflows/ror_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_geonames_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/geonames_telescope.py"], "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/tests/test_clearbit.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_oa_web_workflow.py": ["/academic_observatory_workflows/workflows/oa_web_workflow.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/tests/test_zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_open_citations_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/open_citations_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_openalex_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/openalex_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py"], "/academic_observatory_workflows/workflows/pubmed_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_web_of_science_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_events_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_events_telescope.py"], "/academic_observatory_workflows/workflows/web_of_science_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_github.py": ["/academic_observatory_workflows/github.py"], "/academic_observatory_workflows/workflows/tests/test_pubmed_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/pubmed_telescope.py"], "/academic_observatory_workflows/workflows/geonames_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/open_citations_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_doi_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/model.py", "/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/tests/test_wikipedia.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/wikipedia.py"], "/docs/test_generate_csv.py": ["/docs/generate_schema_csv.py"], "/academic_observatory_workflows/workflows/scopus_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/doi_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py"], "/academic_observatory_workflows/workflows/crossref_events_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/oa_web_workflow.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/github.py", "/academic_observatory_workflows/wikipedia.py", "/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_ror_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/ror_telescope.py"], "/academic_observatory_workflows/workflows/openalex_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_zenodo.py": ["/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/model.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_scopus_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/scopus_telescope.py"], "/academic_observatory_workflows/dags/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_telescope.py"], "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_elastic_import_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/dags/elastic_import_workflow.py"], "/academic_observatory_workflows/dags/doi_workflow.py": ["/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/workflows/orcid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/dags/mag_telescope.py": ["/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/workflows/grid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_mag_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/dags/web_of_science_telescope.py": ["/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/dags/elastic_import_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_grid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/grid_telescope.py"], "/academic_observatory_workflows/workflows/mag_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_orcid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/orcid_telescope.py"]}
|
28,260,418
|
The-Academic-Observatory/academic-observatory-workflows
|
refs/heads/main
|
/academic_observatory_workflows/workflows/tests/test_mag_telescope.py
|
# Copyright 2020 Curtin University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Author: James Diprose
import glob
import os
from typing import List
from unittest.mock import patch
from zipfile import ZipFile
import natsort
import pendulum
from academic_observatory_workflows.config import test_fixtures_folder
from academic_observatory_workflows.workflows.mag_telescope import (
MagTelescope,
db_load_mag_release,
list_mag_release_files,
transform_mag_file,
transform_mag_release,
)
from click.testing import CliRunner
from google.cloud import bigquery, storage
from google.cloud.storage import Blob
from observatory.platform.utils.gc_utils import upload_files_to_cloud_storage
from observatory.platform.utils.test_utils import ObservatoryTestCase, random_id
def extract_mag_release(file_path: str, unzip_path: str):
"""Extract a MAG release.
:param file_path: the path to the archive to unzip.
:param unzip_path: the path to unzip the files into. If the zip is of a folder, then the folder will be unzipped
into this path.
:return: None.
"""
with ZipFile(file_path) as zip_file:
zip_file.extractall(unzip_path)
class TestMagTelescope(ObservatoryTestCase):
"""Tests for the functions used by the MAG telescope"""
def __init__(self, *args, **kwargs):
"""Constructor which sets up variables used by tests.
:param args: arguments.
:param kwargs: keyword arguments.
"""
super(TestMagTelescope, self).__init__(*args, **kwargs)
self.gc_project_id: str = os.getenv("TEST_GCP_PROJECT_ID")
self.gc_bucket_name: str = os.getenv("TEST_GCP_BUCKET_NAME")
self.gc_data_location: str = os.getenv("TEST_GCP_DATA_LOCATION")
self.data_path = test_fixtures_folder("mag", "mag-2020-05-21.zip")
self.release_date = pendulum.datetime(year=2020, month=5, day=21)
self.release_folder = "mag-2020-05-21"
self.extracted_folder = "extracted"
self.transformed_folder = "transformed"
self.release_folder = "mag-2020-05-21"
self.extracted_folder = "extracted"
self.transformed_folder = "transformed"
self.folders = ["advanced", "mag", "nlp", "samples"]
self.sub_folders = [
("Authors.txt_aes_tmp_2020-10-11_02-01-24", "Authors.txt"),
("PaperExtendedAttributes.txt_aes_tmp_2020-10-11_02-02-00", "PaperExtendedAttributes.txt"),
("PaperUrls.txt_aes_tmp_2020-10-11_02-01-43", "PaperUrls.txt"),
]
self.advanced = [
"EntityRelatedEntities.txt",
"FieldOfStudyChildren.txt",
"FieldOfStudyExtendedAttributes.txt",
"FieldsOfStudy.txt",
"PaperFieldsOfStudy.txt",
"PaperRecommendations.txt",
"RelatedFieldOfStudy.txt",
]
self.mag = [
"Affiliations.txt",
"Authors.txt",
"ConferenceInstances.txt",
"ConferenceSeries.txt",
"Journals.txt",
"PaperAuthorAffiliations.txt",
"PaperExtendedAttributes.txt",
"PaperReferences.txt",
"PaperUrls.txt",
"Papers.txt",
]
self.nlp = [
"PaperAbstractsInvertedIndex.txt.1",
"PaperAbstractsInvertedIndex.txt.2",
"PaperCitationContexts.txt",
]
self.samples = [
"CreateDatabase.usql",
"CreateFunctions.usql",
"HIndexDatabricksSample.py",
"ReadMe.pdf",
"ReleaseNote.txt",
]
def test_list_mag_release_files(self):
"""Test that list_mag_release_files lists all files in the MAG releases folder.
:return: None.
"""
with CliRunner().isolated_filesystem():
# Make MAG folders
folders = []
for _, folder in enumerate(self.folders):
path = os.path.join(self.release_folder, folder)
os.makedirs(path, exist_ok=True)
folders.append(path)
# Make mag sub folders
for sub_folder, sub_file in self.sub_folders:
sub_folder_path = os.path.join(self.release_folder, "mag", sub_folder)
os.makedirs(sub_folder_path, exist_ok=True)
sub_file_path = os.path.join(sub_folder_path, sub_file)
open(sub_file_path, "a").close()
# advanced files
expected_files = []
for file_name in self.advanced:
path = os.path.join(folders[0], file_name)
open(path, "a").close()
expected_files.append(path)
# mag files
for file_name in self.mag:
path = os.path.join(folders[1], file_name)
open(path, "a").close()
expected_files.append(path)
# nlp files
for file_name in self.nlp:
path = os.path.join(folders[2], file_name)
open(path, "a").close()
expected_files.append(path)
# sample files
for file_name in self.samples:
path = os.path.join(folders[3], file_name)
open(path, "a").close()
# List MAG releases and check that output is as expected
files = list_mag_release_files(self.release_folder)
actual_files = [str(f) for f in files]
self.assertListEqual(expected_files, actual_files)
# Check that this function works with a folder of transformed files
with CliRunner().isolated_filesystem():
with CliRunner().isolated_filesystem():
# Make MAG files
file_names = self.advanced + self.mag + self.nlp
expected_files = []
os.makedirs(self.release_folder, exist_ok=True)
for file_name in file_names:
path = os.path.join(self.release_folder, file_name)
open(path, "a").close()
expected_files.append(path)
expected_files = sorted(expected_files)
# List MAG releases and check that output is as expected
files = list_mag_release_files(self.release_folder)
actual_files = [str(f) for f in files]
self.assertListEqual(expected_files, actual_files)
def test_transform_mag_file(self):
"""Tests that transform_mag_file transforms a single file correctly.
:return: None.
"""
with CliRunner().isolated_filesystem():
# Extract release zip file into folder
extract_mag_release(self.data_path, self.extracted_folder)
# Make input and output paths
input_file_path = os.path.join(self.extracted_folder, self.release_folder, "mag", "Affiliations.txt")
output_file_path = os.path.join(self.transformed_folder, self.release_folder, "mag")
os.makedirs(output_file_path)
output_file_path = os.path.join(output_file_path, "Affiliations.txt")
# Transform file and check result
result = transform_mag_file(input_file_path, output_file_path)
self.assertTrue(result)
expected_file_hash = "5570569e573a517587d3d11ec00eebf9"
self.assert_file_integrity(output_file_path, expected_file_hash, "md5")
def test_transform_mag_release(self):
"""Tests that transform_mag_release transforms an entire MAG release.
:return: None.
"""
with CliRunner().isolated_filesystem():
# Make expected files
expected_files = []
for file in self.advanced + self.mag + self.nlp:
expected_files.append(os.path.join(self.transformed_folder, self.release_folder, file))
expected_files = natsort.natsorted(expected_files)
# Extract release zip file into folder
extract_mag_release(self.data_path, self.extracted_folder)
# Transform release
input_release_path = os.path.join(self.extracted_folder, self.release_folder)
output_release_path = os.path.join(self.transformed_folder, self.release_folder)
os.makedirs(output_release_path)
result = transform_mag_release(input_release_path, output_release_path)
# Test that the expected files exist
self.assertTrue(result)
actual_files = glob.glob(os.path.join(output_release_path, "**"))
actual_files = natsort.natsorted(actual_files)
self.assertEqual(expected_files, actual_files)
def test_bq_load_mag_release(self):
"""Tests that db_load_mag_release successfully loads a MAG release into BigQuery.
:return: None.
"""
with CliRunner().isolated_filesystem():
# Extract release zip file into folder
extract_mag_release(self.data_path, self.extracted_folder)
# Transform release
input_release_path = os.path.join(self.extracted_folder, self.release_folder)
output_release_path = os.path.join(self.transformed_folder, self.release_folder)
os.makedirs(output_release_path)
result = transform_mag_release(input_release_path, output_release_path)
self.assertTrue(result)
# Upload to cloud storage
base_folder = random_id()
print(f"base_folder: {base_folder}")
release_path = f"{base_folder}/{self.release_folder}"
posix_paths = list_mag_release_files(output_release_path)
file_paths = [str(path) for path in posix_paths]
blob_names = [f"{release_path}/{path.name}" for path in posix_paths]
# Create random dataset id
client = bigquery.Client()
dataset_id = random_id()
try:
# Upload files to cloud storage
result = upload_files_to_cloud_storage(self.gc_bucket_name, blob_names, file_paths)
self.assertTrue(result)
# Load release into BigQuery
result = db_load_mag_release(
self.gc_project_id,
self.gc_bucket_name,
self.gc_data_location,
release_path,
self.release_date,
dataset_id=dataset_id,
)
# Check that all tables have loaded
self.assertTrue(result)
# Check that PaperAbstractsInvertedIndex has 100 rows, since it was loaded from two tables with 50
# rows each
table: bigquery.Table = client.get_table(f"{dataset_id}.PaperAbstractsInvertedIndex20200521")
expected_num_rows = 100
self.assertEqual(expected_num_rows, table.num_rows)
finally:
# Cleanup
client.delete_dataset(dataset_id, delete_contents=True, not_found_ok=True)
# Delete all blobs
storage_client = storage.Client()
bucket = storage_client.get_bucket(self.gc_bucket_name)
blobs: List[Blob] = list(bucket.list_blobs(prefix=base_folder))
for blob in blobs:
blob.delete()
@patch("academic_observatory_workflows.workflows.mag_telescope.delete_old_xcoms")
@patch("academic_observatory_workflows.workflows.mag_telescope.pull_release_dates")
def test_delete_old_xcoms_called(self, m_pull_release_dates, m_delete_xcoms):
"""Just test that delete_old_xcoms is called with the expected parameters"""
m_pull_release_dates.return_value = []
execution_date = pendulum.datetime(2021, 1, 1)
kwargs = {"ti": None, "execution_date": execution_date}
MagTelescope.cleanup(**kwargs)
self.assertEqual(m_delete_xcoms.call_count, 1)
_, call_args = m_delete_xcoms.call_args
self.assertEqual(call_args["dag_id"], MagTelescope.DAG_ID)
self.assertEqual(call_args["execution_date"], execution_date)
|
{"/academic_observatory_workflows/workflows/ror_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_geonames_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/geonames_telescope.py"], "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/tests/test_clearbit.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_oa_web_workflow.py": ["/academic_observatory_workflows/workflows/oa_web_workflow.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/tests/test_zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_open_citations_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/open_citations_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_openalex_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/openalex_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py"], "/academic_observatory_workflows/workflows/pubmed_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_web_of_science_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_events_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_events_telescope.py"], "/academic_observatory_workflows/workflows/web_of_science_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_github.py": ["/academic_observatory_workflows/github.py"], "/academic_observatory_workflows/workflows/tests/test_pubmed_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/pubmed_telescope.py"], "/academic_observatory_workflows/workflows/geonames_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/open_citations_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_doi_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/model.py", "/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/tests/test_wikipedia.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/wikipedia.py"], "/docs/test_generate_csv.py": ["/docs/generate_schema_csv.py"], "/academic_observatory_workflows/workflows/scopus_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/doi_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py"], "/academic_observatory_workflows/workflows/crossref_events_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/oa_web_workflow.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/github.py", "/academic_observatory_workflows/wikipedia.py", "/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_ror_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/ror_telescope.py"], "/academic_observatory_workflows/workflows/openalex_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_zenodo.py": ["/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/model.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_scopus_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/scopus_telescope.py"], "/academic_observatory_workflows/dags/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_telescope.py"], "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_elastic_import_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/dags/elastic_import_workflow.py"], "/academic_observatory_workflows/dags/doi_workflow.py": ["/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/workflows/orcid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/dags/mag_telescope.py": ["/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/workflows/grid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_mag_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/dags/web_of_science_telescope.py": ["/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/dags/elastic_import_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_grid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/grid_telescope.py"], "/academic_observatory_workflows/workflows/mag_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_orcid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/orcid_telescope.py"]}
|
28,260,419
|
The-Academic-Observatory/academic-observatory-workflows
|
refs/heads/main
|
/academic_observatory_workflows/dags/web_of_science_telescope.py
|
# Copyright 2020 Curtin University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Author: Tuan Chien
# The keywords airflow and DAG are required to load the DAGs from this file, see bullet 2 in the Apache Airflow FAQ:
# https://airflow.apache.org/docs/stable/faq.html
import pendulum
from academic_observatory_workflows.workflows.web_of_science_telescope import (
WebOfScienceTelescope,
)
from observatory.platform.utils.airflow_utils import AirflowVars
from observatory.platform.utils.api import make_observatory_api
from observatory.platform.utils.workflow_utils import make_dag_id
api = make_observatory_api()
telescope_type = api.get_telescope_type(type_id=WebOfScienceTelescope.DAG_ID)
telescopes = api.get_telescopes(telescope_type_id=telescope_type.id, limit=1000)
# Create workflows for each organisation
for telescope in telescopes:
dag_id = make_dag_id(WebOfScienceTelescope.DAG_ID, telescope.organisation.name)
airflow_conns = telescope.extra.get("airflow_connections")
institution_ids = telescope.extra.get("institution_ids")
if airflow_conns is None or institution_ids is None:
raise Exception(f"airflow_conns: {airflow_conns} or institution_ids: {institution_ids} is None")
# earliest_date is parsed into a datetime.date object by the Python API client
earliest_date_str = telescope.extra.get("earliest_date")
earliest_date = pendulum.parse(earliest_date_str)
airflow_vars = [
AirflowVars.DATA_PATH,
AirflowVars.DATA_LOCATION,
]
telescope = WebOfScienceTelescope(
dag_id=dag_id,
airflow_conns=airflow_conns,
airflow_vars=airflow_vars,
institution_ids=institution_ids,
earliest_date=earliest_date,
)
globals()[telescope.dag_id] = telescope.make_dag()
|
{"/academic_observatory_workflows/workflows/ror_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_geonames_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/geonames_telescope.py"], "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/tests/test_clearbit.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_oa_web_workflow.py": ["/academic_observatory_workflows/workflows/oa_web_workflow.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/tests/test_zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_open_citations_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/open_citations_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_openalex_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/openalex_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py"], "/academic_observatory_workflows/workflows/pubmed_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_web_of_science_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_events_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_events_telescope.py"], "/academic_observatory_workflows/workflows/web_of_science_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_github.py": ["/academic_observatory_workflows/github.py"], "/academic_observatory_workflows/workflows/tests/test_pubmed_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/pubmed_telescope.py"], "/academic_observatory_workflows/workflows/geonames_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/open_citations_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_doi_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/model.py", "/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/tests/test_wikipedia.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/wikipedia.py"], "/docs/test_generate_csv.py": ["/docs/generate_schema_csv.py"], "/academic_observatory_workflows/workflows/scopus_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/doi_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py"], "/academic_observatory_workflows/workflows/crossref_events_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/oa_web_workflow.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/github.py", "/academic_observatory_workflows/wikipedia.py", "/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_ror_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/ror_telescope.py"], "/academic_observatory_workflows/workflows/openalex_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_zenodo.py": ["/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/model.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_scopus_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/scopus_telescope.py"], "/academic_observatory_workflows/dags/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_telescope.py"], "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_elastic_import_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/dags/elastic_import_workflow.py"], "/academic_observatory_workflows/dags/doi_workflow.py": ["/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/workflows/orcid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/dags/mag_telescope.py": ["/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/workflows/grid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_mag_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/dags/web_of_science_telescope.py": ["/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/dags/elastic_import_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_grid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/grid_telescope.py"], "/academic_observatory_workflows/workflows/mag_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_orcid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/orcid_telescope.py"]}
|
28,260,420
|
The-Academic-Observatory/academic-observatory-workflows
|
refs/heads/main
|
/academic_observatory_workflows/dags/elastic_import_workflow.py
|
# Copyright 2020, 2021 Curtin University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Author: James Diprose
# The keywords airflow and DAG are required to load the DAGs from this file, see bullet 2 in the Apache Airflow FAQ:
# https://airflow.apache.org/docs/stable/faq.html
import json
import os
from typing import Callable, Dict, List
from academic_observatory_workflows.config import elastic_mappings_folder
from observatory.platform.elastic.elastic import KeepInfo, KeepOrder
from observatory.platform.elastic.kibana import TimeField
from observatory.platform.utils.jinja2_utils import render_template
from observatory.platform.utils.workflow_utils import make_dag_id
from observatory.platform.workflows.elastic_import_workflow import (
ElasticImportConfig,
ElasticImportWorkflow,
load_elastic_mappings_simple,
)
DATASET_ID = "data_export"
DATA_LOCATION = "us"
FILE_TYPE_JSONL = "jsonl.gz"
DAG_ONIX_WORKFLOW_PREFIX = "onix_workflow"
DAG_PREFIX = "elastic_import"
ELASTIC_MAPPINGS_PATH = elastic_mappings_folder()
AO_KIBANA_TIME_FIELDS = [TimeField("^.*$", "published_year")]
# These can be customised per DAG. Just using some generic settings for now.
index_keep_info = {
"": KeepInfo(ordering=KeepOrder.newest, num=2),
"ao": KeepInfo(ordering=KeepOrder.newest, num=2),
}
def load_elastic_mappings_ao(path: str, table_prefix: str, simple_prefixes: List = None):
"""For the Observatory project, load the Elastic mappings for a given table_prefix.
:param path: the path to the mappings files.
:param table_prefix: the table_id prefix (without shard date).
:param simple_prefixes: the prefixes of mappings to load with the load_elastic_mappings_simple function.
:return: the rendered mapping as a Dict.
"""
# Set default simple_prefixes
if simple_prefixes is None:
simple_prefixes = ["ao_doi"]
if not table_prefix.startswith("ao"):
raise ValueError("Table must begin with 'ao'")
elif any([table_prefix.startswith(prefix) for prefix in simple_prefixes]):
return load_elastic_mappings_simple(path, table_prefix)
else:
prefix, aggregate, facet = table_prefix.split("_", 2)
mappings_file_name = "ao-relations-mappings.json.jinja2"
is_fixed_facet = facet in ["unique_list", "access_types", "disciplines", "output_types", "events", "metrics"]
if is_fixed_facet:
mappings_file_name = f"ao-{facet.replace('_', '-')}-mappings.json.jinja2"
mappings_path = os.path.join(path, mappings_file_name)
return json.loads(render_template(mappings_path, aggregate=aggregate, facet=facet))
configs = [
ElasticImportConfig(
dag_id=make_dag_id(DAG_PREFIX, "observatory"),
project_id="academic-observatory",
dataset_id=DATASET_ID,
bucket_name="academic-observatory-transform",
elastic_conn_key="elastic_main",
kibana_conn_key="kibana_main",
data_location=DATA_LOCATION,
file_type=FILE_TYPE_JSONL,
sensor_dag_ids=["doi"],
kibana_spaces=["coki-scratch-space", "coki-dashboards", "dev-coki-dashboards"],
elastic_mappings_path=ELASTIC_MAPPINGS_PATH,
elastic_mappings_func=load_elastic_mappings_ao,
kibana_time_fields=AO_KIBANA_TIME_FIELDS,
index_keep_info=index_keep_info,
)
]
for config in configs:
dag = ElasticImportWorkflow(
dag_id=config.dag_id,
project_id=config.project_id,
dataset_id=config.dataset_id,
bucket_name=config.bucket_name,
elastic_conn_key=config.elastic_conn_key,
kibana_conn_key=config.kibana_conn_key,
data_location=config.data_location,
file_type=config.file_type,
sensor_dag_ids=config.sensor_dag_ids,
elastic_mappings_folder=ELASTIC_MAPPINGS_PATH,
elastic_mappings_func=config.elastic_mappings_func,
kibana_spaces=config.kibana_spaces,
kibana_time_fields=config.kibana_time_fields,
index_keep_info=config.index_keep_info,
).make_dag()
globals()[dag.dag_id] = dag
|
{"/academic_observatory_workflows/workflows/ror_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_geonames_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/geonames_telescope.py"], "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/tests/test_clearbit.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_oa_web_workflow.py": ["/academic_observatory_workflows/workflows/oa_web_workflow.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/tests/test_zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_open_citations_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/open_citations_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_openalex_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/openalex_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py"], "/academic_observatory_workflows/workflows/pubmed_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_web_of_science_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_events_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_events_telescope.py"], "/academic_observatory_workflows/workflows/web_of_science_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_github.py": ["/academic_observatory_workflows/github.py"], "/academic_observatory_workflows/workflows/tests/test_pubmed_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/pubmed_telescope.py"], "/academic_observatory_workflows/workflows/geonames_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/open_citations_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_doi_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/model.py", "/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/tests/test_wikipedia.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/wikipedia.py"], "/docs/test_generate_csv.py": ["/docs/generate_schema_csv.py"], "/academic_observatory_workflows/workflows/scopus_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/doi_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py"], "/academic_observatory_workflows/workflows/crossref_events_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/oa_web_workflow.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/github.py", "/academic_observatory_workflows/wikipedia.py", "/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_ror_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/ror_telescope.py"], "/academic_observatory_workflows/workflows/openalex_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_zenodo.py": ["/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/model.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_scopus_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/scopus_telescope.py"], "/academic_observatory_workflows/dags/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_telescope.py"], "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_elastic_import_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/dags/elastic_import_workflow.py"], "/academic_observatory_workflows/dags/doi_workflow.py": ["/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/workflows/orcid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/dags/mag_telescope.py": ["/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/workflows/grid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_mag_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/dags/web_of_science_telescope.py": ["/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/dags/elastic_import_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_grid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/grid_telescope.py"], "/academic_observatory_workflows/workflows/mag_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_orcid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/orcid_telescope.py"]}
|
28,260,421
|
The-Academic-Observatory/academic-observatory-workflows
|
refs/heads/main
|
/academic_observatory_workflows/workflows/tests/test_grid_telescope.py
|
# Copyright 2020 Curtin University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Author: James Diprose, Aniek Roelofs, Tuan Chien
import logging
import os
import shutil
import unittest
from pathlib import Path
from unittest.mock import MagicMock, PropertyMock, patch
import pendulum
from airflow.exceptions import AirflowException
from click.testing import CliRunner
from academic_observatory_workflows.config import test_fixtures_folder
from academic_observatory_workflows.workflows.grid_telescope import (
GridRelease,
GridTelescope,
list_grid_records,
)
from observatory.platform.utils.file_utils import get_file_hash, gzip_file_crc
from observatory.platform.utils.test_utils import (
HttpServer,
ObservatoryEnvironment,
ObservatoryTestCase,
module_file_path,
)
from observatory.platform.utils.workflow_utils import blob_name, table_ids_from_path
class MockResponse:
def __init__(self):
self.text = '[{"published_date": "20210101", "id":12345, "title":"no date in here"}]'
class MockSession:
def get(self, *args, **kwargs):
return MockResponse()
class MockTaskInstance:
def __init__(self, records):
"""Construct a MockTaskInstance. This mocks the airflow TaskInstance and is passed as a keyword arg to the
make_release function.
:param records: List of record info, returned as value during xcom_pull
"""
self.records = records
def xcom_pull(self, key: str, task_ids: str, include_prior_dates: bool):
"""Mock xcom_pull method of airflow TaskInstance.
:param key: -
:param task_ids: -
:param include_prior_dates: -
:return: Records list
"""
return self.records
def side_effect(arg):
values = {
"project_id": "project",
"download_bucket_name": "download-bucket",
"transform_bucket_name": "transform-bucket",
"data_path": "data",
"data_location": "US",
}
return values[arg]
def copy_download_fixtures(*, mock, fixtures):
_, call_args = mock.call_args
src_filename = os.path.basename(call_args["url"])
src = os.path.join(fixtures, "files", src_filename)
dst = call_args["filename"]
shutil.copyfile(src, dst)
@patch("observatory.platform.utils.workflow_utils.Variable.get")
class TestGridTelescope(unittest.TestCase):
"""Tests for the functions used by the GRID telescope"""
def __init__(
self,
*args,
**kwargs,
):
"""Constructor which sets up variables used by tests.
:param args: arguments.
:param kwargs: keyword arguments.
"""
self.fixtures = test_fixtures_folder("grid")
self.httpserver = HttpServer(directory=self.fixtures)
self.httpserver.start()
super(TestGridTelescope, self).__init__(*args, **kwargs)
# Telescope instance
self.grid = GridTelescope()
# Paths
# Contains GRID releases 2015-09-22 and 2015-10-09 (format for both is .csv and .json files)
with patch.object(
GridTelescope,
"GRID_FILE_URL",
f"http://{self.httpserver.host}:{self.httpserver.port}" + "/v2/articles/{article_id}/files",
):
with patch("observatory.platform.utils.workflow_utils.Variable.get") as mock_variable_get:
mock_variable_get.side_effect = side_effect
self.grid_run_2015_10_18 = {
"start_date": pendulum.datetime(2015, 10, 11),
"end_date": pendulum.datetime(2015, 10, 18),
"records": [
{"article_ids": [1570967, 1570968], "release_date": "2015-10-09"},
{"article_ids": [1553267, 1553266], "release_date": "2015-09-22"},
],
# there are 2 releases in this run, but use only 1 for testing
"release": GridRelease(
self.grid.dag_id, ["1553266", "1553267"], pendulum.parse("2015-09-22T00:00:00+00:00")
),
"download_hash": "c6fd33fd31b6699a2f19622f0283f4f1",
"extract_hash": "c6fd33fd31b6699a2f19622f0283f4f1",
"transform_crc": "eb66ae78",
}
# Contains GRID release 2020-03-15 (format is a .zip file, which is more common)
self.grid_run_2020_03_27 = {
"start_date": pendulum.datetime(2020, 3, 20),
"end_date": pendulum.datetime(2020, 3, 27),
"records": [{"article_ids": [12022722], "release_date": "2020-03-15T00:00:00+00:00"}],
"release": GridRelease(self.grid.dag_id, ["12022722"], pendulum.parse("2020-03-15T00:00:00+00:00")),
"download_hash": "3d300affce1666ac50b8d945c6ca4c5a",
"extract_hash": "5aff68e9bf72e846a867e91c1fa206a0",
"transform_crc": "77bc8585",
}
self.grid_runs = [self.grid_run_2015_10_18, self.grid_run_2020_03_27]
# Turn logging to warning because vcr prints too much at info level
logging.basicConfig()
logging.getLogger().setLevel(logging.WARNING)
def __del__(self):
self.httpserver.stop()
def test_ctor(self, mock_variable_get):
"""Cover case where airflow_vars is given."""
telescope = GridTelescope(airflow_vars=[])
self.assertEqual(telescope.airflow_vars, list(["transform_bucket"]))
def test_list_grid_records(self, mock_variable_get):
"""Check that list grid records returns a list of dictionaries with records in the correct format.
:param mock_variable_get: Mock result of airflow's Variable.get() function
:return: None.
"""
with patch.object(
GridTelescope,
"GRID_DATASET_URL",
f"http://{self.httpserver.host}:{self.httpserver.port}/list_grid_releases",
):
start_date = self.grid_run_2015_10_18["start_date"]
end_date = self.grid_run_2015_10_18["end_date"]
records = list_grid_records(start_date, end_date, GridTelescope.GRID_DATASET_URL)
self.assertEqual(self.grid_run_2015_10_18["records"], records)
def test_list_grid_records_bad_title(self, mock_variable_get):
"""Check exception raised when invalid title given."""
with patch(
"academic_observatory_workflows.workflows.grid_telescope.retry_session", return_value=MockSession()
) as _:
start_date = pendulum.datetime(2020, 1, 1)
end_date = pendulum.datetime(2022, 1, 1)
self.assertRaises(ValueError, list_grid_records, start_date, end_date, "")
def test_list_releases(self, mock_variable_get):
"""Test list_releases."""
ti = MagicMock()
with patch("academic_observatory_workflows.workflows.grid_telescope.list_grid_records") as m_list_grid_records:
m_list_grid_records.return_value = []
telescope = GridTelescope()
result = telescope.list_releases(execution_date=pendulum.now(), next_execution_date=pendulum.now())
self.assertEqual(result, False)
m_list_grid_records.return_value = [1]
telescope = GridTelescope()
result = telescope.list_releases(execution_date=pendulum.now(), next_execution_date=pendulum.now(), ti=ti)
self.assertEqual(result, True)
def test_make_release(self, mock_variable_get):
"""Check that make_release returns a list of GridRelease instances.
:param mock_variable_get: Mock result of airflow's Variable.get() function
:return: None.
"""
mock_variable_get.side_effect = side_effect
for run in self.grid_runs:
records = run["records"]
releases = self.grid.make_release(ti=MockTaskInstance(records))
self.assertIsInstance(releases, list)
for release in releases:
self.assertIsInstance(release, GridRelease)
@patch("academic_observatory_workflows.workflows.grid_telescope.download_file")
def test_download_release(self, m_download, mock_variable_get):
"""Download two specific GRID releases and check they have the expected md5 sum.
:param mock_variable_get: Mock result of airflow's Variable.get() function
:return:
"""
mock_variable_get.side_effect = side_effect
with CliRunner().isolated_filesystem():
for run in self.grid_runs:
release = run["release"]
downloads = release.download()
# Check that returned downloads has correct length
self.assertEqual(1, len(downloads))
self.assertEqual(m_download.call_count, 2)
_, call_args = m_download.call_args_list[0]
self.assertEqual(call_args["url"], "https://ndownloader.figshare.com/files/2284777")
self.assertEqual(call_args["filename"], "data/telescopes/download/grid/grid_2015_09_22/grid.json")
self.assertEqual(call_args["hash"], "c6fd33fd31b6699a2f19622f0283f4f1")
_, call_args = m_download.call_args_list[1]
self.assertEqual(call_args["url"], "https://ndownloader.figshare.com/files/22091379")
self.assertEqual(call_args["filename"], "data/telescopes/download/grid/grid_2020_03_15/grid.zip")
self.assertEqual(call_args["hash"], "3d300affce1666ac50b8d945c6ca4c5a")
@patch("academic_observatory_workflows.workflows.grid_telescope.download_file")
def test_extract_release(self, m_download, mock_variable_get):
"""Test that the GRID releases are extracted as expected, both for an unzipped json file and a zip file.
:param mock_variable_get: Mock result of airflow's Variable.get() function
:return: None.
"""
mock_variable_get.side_effect = side_effect
with CliRunner().isolated_filesystem():
for run in self.grid_runs:
release = run["release"]
release.download()
# Copy the file in rather than download
copy_download_fixtures(mock=m_download, fixtures=self.fixtures)
release.extract()
self.assertEqual(1, len(release.extract_files))
self.assertEqual(
run["extract_hash"], get_file_hash(file_path=release.extract_files[0], algorithm="md5")
)
@patch("academic_observatory_workflows.workflows.grid_telescope.download_file")
def test_transform_release(self, m_download, mock_variable_get):
"""Test that the GRID releases are transformed as expected.
:param mock_variable_get: Mock result of airflow's Variable.get() function
:return: None.
"""
mock_variable_get.side_effect = side_effect
with CliRunner().isolated_filesystem():
for run in self.grid_runs:
release = run["release"]
release.download()
# Copy the file in rather than download
copy_download_fixtures(mock=m_download, fixtures=self.fixtures)
release.extract()
release.transform()
self.assertEqual(1, len(release.transform_files))
self.assertEqual(run["transform_crc"], gzip_file_crc(release.transform_files[0]))
class TestGridRelease(unittest.TestCase):
@patch(
"academic_observatory_workflows.workflows.grid_telescope.GridRelease.extract_folder", new_callable=PropertyMock
)
@patch(
"academic_observatory_workflows.workflows.grid_telescope.GridRelease.download_files", new_callable=PropertyMock
)
def test_extract_not_zip_file(self, m_download_files, m_extract_folder):
with CliRunner().isolated_filesystem():
Path("file.zip").touch()
release = GridRelease(dag_id="dag", article_ids=[], release_date=pendulum.now())
m_download_files.return_value = ["file.zip"]
m_extract_folder.return_value = "."
release.extract()
@patch(
"academic_observatory_workflows.workflows.grid_telescope.GridRelease.extract_files", new_callable=PropertyMock
)
def test_transform_multiple_extract(self, m_extract_files):
m_extract_files.return_value = ["1", "2"]
with CliRunner().isolated_filesystem():
release = GridRelease(dag_id="dag", article_ids=[], release_date=pendulum.now())
self.assertRaises(AirflowException, release.transform)
class TestGridTelescopeDag(ObservatoryTestCase):
def __init__(self, *args, **kwargs):
"""Constructor which sets up variables used by tests.
:param args: arguments.
:param kwargs: keyword arguments.
"""
super().__init__(*args, **kwargs)
self.fixtures = test_fixtures_folder("grid")
self.project_id = os.environ["TEST_GCP_PROJECT_ID"]
self.data_location = os.environ["TEST_GCP_DATA_LOCATION"]
# Paths
self.fixtures = test_fixtures_folder("grid")
self.httpserver = HttpServer(directory=self.fixtures)
self.httpserver.start()
# GridTelescope.GRID_FILE_URL = (
# f"http://{self.httpserver.host}:{self.httpserver.port}" + "/v2/articles/{article_id}/files"
# )
def __del__(self):
self.httpserver.stop()
def setup_observatory_environment(self):
env = ObservatoryEnvironment(self.project_id, self.data_location)
self.dataset_id = env.add_dataset()
return env
def test_dag_structure(self):
"""Test that the GRID DAG has the correct structure.
:return: None
"""
telescope = GridTelescope()
dag = telescope.make_dag()
self.assert_dag_structure(
{
"check_dependencies": ["list_releases"],
"list_releases": ["download"],
"download": ["upload_downloaded"],
"upload_downloaded": ["extract"],
"extract": ["transform"],
"transform": ["upload_transformed"],
"upload_transformed": ["bq_load"],
"bq_load": ["cleanup"],
"cleanup": [],
},
dag,
)
def test_dag_load(self):
"""Test that the GRID DAG can be loaded from a DAG bag.
:return: None
"""
env = ObservatoryEnvironment(self.project_id, self.data_location)
with env.create():
dag_file = os.path.join(module_file_path("academic_observatory_workflows.dags"), "grid_telescope.py")
self.assert_dag_load("grid", dag_file)
@patch("academic_observatory_workflows.workflows.grid_telescope.download_file")
def test_telescope(self, m_download):
"""Test running the telescope. Functional test."""
env = self.setup_observatory_environment()
telescope = GridTelescope(dag_id="grid", dataset_id=self.dataset_id)
dag = telescope.make_dag()
execution_date = pendulum.datetime(year=2015, month=9, day=22)
# Create the Observatory environment and run tests
with env.create():
with env.create_dag_run(dag, execution_date):
with patch.object(
GridTelescope,
"GRID_FILE_URL",
f"http://{self.httpserver.host}:{self.httpserver.port}" + "/v2/articles/{article_id}/files",
):
# Check dependencies
env.run_task(telescope.check_dependencies.__name__)
# List releases
with patch(
"academic_observatory_workflows.workflows.grid_telescope.list_grid_records"
) as m_list_grid_records:
m_list_grid_records.return_value = [
{"article_ids": [1553266, 1553267], "release_date": "2015-10-09"},
]
ti = env.run_task(telescope.list_releases.__name__)
# Test list releases
available_releases = ti.xcom_pull(
key=GridTelescope.RELEASE_INFO,
task_ids=telescope.list_releases.__name__,
include_prior_dates=False,
)
self.assertEqual(len(available_releases), 1)
# Download
env.run_task(telescope.download.__name__)
copy_download_fixtures(mock=m_download, fixtures=self.fixtures)
# Test download
release = GridRelease(
dag_id="grid",
article_ids=[1553266, 1553267],
release_date=pendulum.datetime(2015, 10, 9),
)
self.assertEqual(len(release.download_files), 1)
# upload_downloaded
env.run_task(telescope.upload_downloaded.__name__)
# Test upload_downloaded
for file in release.download_files:
self.assert_blob_integrity(env.download_bucket, blob_name(file), file)
# extract
env.run_task(telescope.extract.__name__)
# Test extract
self.assertEqual(len(release.extract_files), 1)
# transform
env.run_task(telescope.transform.__name__)
# Test transform
self.assertEqual(len(release.transform_files), 1)
# upload_transformed
env.run_task(telescope.upload_transformed.__name__)
# Test upload_transformed
for file in release.transform_files:
self.assert_blob_integrity(env.transform_bucket, blob_name(file), file)
# bq_load
env.run_task(telescope.bq_load.__name__)
# Test bq_load
# Will only check table exists rather than validate data.
for file in release.transform_files:
table_id, _ = table_ids_from_path(file)
suffix = release.release_date.format("YYYYMMDD")
table_id = f"{self.project_id}.{self.dataset_id}.{table_id}{suffix}"
expected_rows = 48987
self.assert_table_integrity(table_id, expected_rows)
# cleanup
env.run_task(telescope.cleanup.__name__)
# Test cleanup
# Test that all telescope data deleted
download_folder, extract_folder, transform_folder = (
release.download_folder,
release.extract_folder,
release.transform_folder,
)
env.run_task(telescope.cleanup.__name__)
self.assert_cleanup(download_folder, extract_folder, transform_folder)
|
{"/academic_observatory_workflows/workflows/ror_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_geonames_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/geonames_telescope.py"], "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/tests/test_clearbit.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_oa_web_workflow.py": ["/academic_observatory_workflows/workflows/oa_web_workflow.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/tests/test_zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_open_citations_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/open_citations_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_openalex_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/openalex_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py"], "/academic_observatory_workflows/workflows/pubmed_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_web_of_science_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_events_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_events_telescope.py"], "/academic_observatory_workflows/workflows/web_of_science_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_github.py": ["/academic_observatory_workflows/github.py"], "/academic_observatory_workflows/workflows/tests/test_pubmed_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/pubmed_telescope.py"], "/academic_observatory_workflows/workflows/geonames_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/open_citations_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_doi_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/model.py", "/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/tests/test_wikipedia.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/wikipedia.py"], "/docs/test_generate_csv.py": ["/docs/generate_schema_csv.py"], "/academic_observatory_workflows/workflows/scopus_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/doi_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py"], "/academic_observatory_workflows/workflows/crossref_events_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/oa_web_workflow.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/github.py", "/academic_observatory_workflows/wikipedia.py", "/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_ror_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/ror_telescope.py"], "/academic_observatory_workflows/workflows/openalex_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_zenodo.py": ["/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/model.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_scopus_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/scopus_telescope.py"], "/academic_observatory_workflows/dags/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_telescope.py"], "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_elastic_import_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/dags/elastic_import_workflow.py"], "/academic_observatory_workflows/dags/doi_workflow.py": ["/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/workflows/orcid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/dags/mag_telescope.py": ["/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/workflows/grid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_mag_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/dags/web_of_science_telescope.py": ["/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/dags/elastic_import_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_grid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/grid_telescope.py"], "/academic_observatory_workflows/workflows/mag_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_orcid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/orcid_telescope.py"]}
|
28,260,422
|
The-Academic-Observatory/academic-observatory-workflows
|
refs/heads/main
|
/academic_observatory_workflows/workflows/tests/test_doi_workflow.py
|
# Copyright 2020 Curtin University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Author: James Diprose
from __future__ import annotations
import os
from datetime import timedelta
from typing import Dict, List
from unittest.mock import patch
import pendulum
from airflow.exceptions import AirflowException
from academic_observatory_workflows.model import (
Institution,
bq_load_observatory_dataset,
make_country_table,
make_doi_table,
make_observatory_dataset,
sort_events,
)
from academic_observatory_workflows.workflows.doi_workflow import (
DoiWorkflow,
make_dataset_transforms,
make_elastic_tables,
)
from observatory.platform.utils.airflow_utils import set_task_state
from observatory.platform.utils.gc_utils import run_bigquery_query
from observatory.platform.utils.test_utils import (
ObservatoryEnvironment,
ObservatoryTestCase,
make_dummy_dag,
module_file_path,
)
class TestDoiWorkflow(ObservatoryTestCase):
"""Tests for the functions used by the Doi workflow"""
def __init__(self, *args, **kwargs):
super(TestDoiWorkflow, self).__init__(*args, **kwargs)
# GCP settings
self.gcp_project_id: str = os.getenv("TEST_GCP_PROJECT_ID")
self.gcp_bucket_name: str = os.getenv("TEST_GCP_BUCKET_NAME")
self.gcp_data_location: str = os.getenv("TEST_GCP_DATA_LOCATION")
# Institutions
inst_curtin = Institution(
1,
name="Curtin University",
grid_id="grid.1032.0",
ror_id="https://ror.org/02n415q13",
country_code="AUS",
country_code_2="AU",
region="Oceania",
subregion="Australia and New Zealand",
types="Education",
country="Australia",
coordinates="-32.005931, 115.894397",
)
inst_anu = Institution(
2,
name="Australian National University",
grid_id="grid.1001.0",
ror_id="https://ror.org/019wvm592",
country_code="AUS",
country_code_2="AU",
region="Oceania",
subregion="Australia and New Zealand",
types="Education",
country="Australia",
coordinates="-35.2778, 149.1205",
)
inst_akl = Institution(
3,
name="University of Auckland",
grid_id="grid.9654.e",
ror_id="https://ror.org/03b94tp07",
country_code="NZL",
country_code_2="NZ",
region="Oceania",
subregion="Australia and New Zealand",
types="Education",
country="New Zealand",
coordinates="-36.852304, 174.767734",
)
self.institutions = [inst_curtin, inst_anu, inst_akl]
def test_set_task_state(self):
"""Test
:return:
"""
set_task_state(True, "my-task-id")
with self.assertRaises(AirflowException):
set_task_state(False, "my-task-id")
def test_dag_structure(self):
"""Test that the DOI DAG has the correct structure.
:return: None
"""
dag = DoiWorkflow().make_dag()
self.assert_dag_structure(
{
"crossref_metadata_sensor": ["check_dependencies"],
"crossref_fundref_sensor": ["check_dependencies"],
"geonames_sensor": ["check_dependencies"],
"ror_sensor": ["check_dependencies"],
"open_citations_sensor": ["check_dependencies"],
"unpaywall_sensor": ["check_dependencies"],
"orcid_sensor": ["check_dependencies"],
"crossref_events_sensor": ["check_dependencies"],
"check_dependencies": ["create_datasets"],
"create_datasets": [
"create_crossref_events",
"create_crossref_fundref",
"create_ror",
"create_mag",
"create_orcid",
"create_open_citations",
"create_unpaywall",
],
"create_crossref_events": ["create_doi"],
"create_crossref_fundref": ["create_doi"],
"create_ror": ["create_doi"],
"create_mag": ["create_doi"],
"create_orcid": ["create_doi"],
"create_open_citations": ["create_doi"],
"create_unpaywall": ["create_doi"],
"create_doi": [
"create_book",
],
"create_book": [
"create_country",
"create_funder",
"create_group",
"create_institution",
"create_author",
"create_journal",
"create_publisher",
"create_region",
"create_subregion",
],
"create_country": ["copy_to_dashboards"],
"create_funder": ["copy_to_dashboards"],
"create_group": ["copy_to_dashboards"],
"create_institution": ["copy_to_dashboards"],
"create_author": ["copy_to_dashboards"],
"create_journal": ["copy_to_dashboards"],
"create_publisher": ["copy_to_dashboards"],
"create_region": ["copy_to_dashboards"],
"create_subregion": ["copy_to_dashboards"],
"copy_to_dashboards": ["create_dashboard_views"],
"create_dashboard_views": [
"export_country",
"export_funder",
"export_group",
"export_institution",
"export_author",
"export_journal",
"export_publisher",
"export_region",
"export_subregion",
],
"export_country": [],
"export_funder": [],
"export_group": [],
"export_institution": [],
"export_author": [],
"export_journal": [],
"export_publisher": [],
"export_region": [],
"export_subregion": [],
},
dag,
)
def test_dag_load(self):
"""Test that the DOI can be loaded from a DAG bag.
:return: None
"""
env = ObservatoryEnvironment(self.gcp_project_id, self.gcp_data_location)
with env.create():
dag_file = os.path.join(module_file_path("academic_observatory_workflows.dags"), "doi_workflow.py")
self.assert_dag_load("doi", dag_file)
def test_telescope(self):
"""Test the DOI telescope end to end.
:return: None.
"""
# Create datasets
env = ObservatoryEnvironment(
project_id=self.gcp_project_id, data_location=self.gcp_data_location, enable_api=False
)
fake_dataset_id = env.add_dataset(prefix="fake")
intermediate_dataset_id = env.add_dataset(prefix="intermediate")
dashboards_dataset_id = env.add_dataset(prefix="dashboards")
observatory_dataset_id = env.add_dataset(prefix="observatory")
elastic_dataset_id = env.add_dataset(prefix="elastic")
settings_dataset_id = env.add_dataset(prefix="settings")
dataset_transforms = make_dataset_transforms(
dataset_id_crossref_events=fake_dataset_id,
dataset_id_crossref_metadata=fake_dataset_id,
dataset_id_crossref_fundref=fake_dataset_id,
dataset_id_ror=fake_dataset_id,
dataset_id_mag=fake_dataset_id,
dataset_id_orcid=fake_dataset_id,
dataset_id_open_citations=fake_dataset_id,
dataset_id_unpaywall=fake_dataset_id,
dataset_id_settings=settings_dataset_id,
dataset_id_observatory=observatory_dataset_id,
dataset_id_observatory_intermediate=intermediate_dataset_id,
)
transforms, transform_doi, transform_book = dataset_transforms
with env.create(task_logging=True):
# Make dag
start_date = pendulum.datetime(year=2021, month=10, day=10)
workflow = DoiWorkflow(
intermediate_dataset_id=intermediate_dataset_id,
dashboards_dataset_id=dashboards_dataset_id,
observatory_dataset_id=observatory_dataset_id,
elastic_dataset_id=elastic_dataset_id,
transforms=dataset_transforms,
start_date=start_date,
)
# Disable dag check on dag run sensor
for sensor in workflow.operators[0]:
sensor.check_exists = False
sensor.grace_period = timedelta(seconds=1)
doi_dag = workflow.make_dag()
# If there is no dag run for the DAG being monitored, the sensor will pass. This is so we can
# skip waiting on weeks when the DAG being waited on is not scheduled to run.
expected_state = "success"
with env.create_dag_run(doi_dag, start_date):
for task_id in DoiWorkflow.SENSOR_DAG_IDS:
ti = env.run_task(f"{task_id}_sensor")
self.assertEqual(expected_state, ti.state)
# Run Dummy Dags
execution_date = pendulum.datetime(year=2021, month=10, day=17)
release_date = pendulum.datetime(year=2021, month=10, day=23)
release_suffix = release_date.strftime("%Y%m%d")
expected_state = "success"
for dag_id in DoiWorkflow.SENSOR_DAG_IDS:
dag = make_dummy_dag(dag_id, execution_date)
with env.create_dag_run(dag, execution_date):
# Running all of a DAGs tasks sets the DAG to finished
ti = env.run_task("dummy_task")
self.assertEqual(expected_state, ti.state)
# Run end to end tests for DOI DAG
with env.create_dag_run(doi_dag, execution_date):
# Test that sensors go into 'success' state as the DAGs that they are waiting for have finished
for task_id in DoiWorkflow.SENSOR_DAG_IDS:
ti = env.run_task(f"{task_id}_sensor")
self.assertEqual(expected_state, ti.state)
# Check dependencies
ti = env.run_task("check_dependencies")
self.assertEqual(expected_state, ti.state)
# Create datasets
ti = env.run_task("create_datasets")
self.assertEqual(expected_state, ti.state)
# Generate fake dataset
observatory_dataset = make_observatory_dataset(self.institutions)
bq_load_observatory_dataset(
observatory_dataset,
env.download_bucket,
fake_dataset_id,
settings_dataset_id,
release_date,
self.gcp_data_location,
)
# Test that source dataset transformations run
for transform in transforms:
task_id = f"create_{transform.output_table.table_id}"
ti = env.run_task(task_id)
self.assertEqual(expected_state, ti.state)
# Test create DOI task
ti = env.run_task("create_doi")
self.assertEqual(expected_state, ti.state)
# DOI assert table exists
expected_table_id = f"{self.gcp_project_id}.{observatory_dataset_id}.doi{release_suffix}"
expected_rows = len(observatory_dataset.papers)
self.assert_table_integrity(expected_table_id, expected_rows=expected_rows)
# DOI assert correctness of output
expected_output = make_doi_table(observatory_dataset)
with patch("observatory.platform.utils.gc_utils.bq_query_bytes_daily_limit_check"):
actual_output = self.query_table(observatory_dataset_id, f"doi{release_suffix}", "doi")
self.assert_doi(expected_output, actual_output)
# Test create book
ti = env.run_task("create_book")
self.assertEqual(expected_state, ti.state)
expected_table_id = f"{self.gcp_project_id}.{observatory_dataset_id}.book{release_suffix}"
expected_rows = 0
self.assert_table_integrity(expected_table_id, expected_rows)
# Test aggregations tasks
for agg in DoiWorkflow.AGGREGATIONS:
task_id = f"create_{agg.table_id}"
ti = env.run_task(task_id)
self.assertEqual(expected_state, ti.state)
# Aggregation assert table exists
expected_table_id = f"{self.gcp_project_id}.{observatory_dataset_id}.{agg.table_id}{release_suffix}"
self.assert_table_integrity(expected_table_id)
# Assert country aggregation output
expected_output = make_country_table(observatory_dataset)
with patch("observatory.platform.utils.gc_utils.bq_query_bytes_daily_limit_check"):
actual_output = self.query_table(
observatory_dataset_id, f"country{release_suffix}", "id, time_period"
)
self.assert_aggregate(expected_output, actual_output)
# TODO: test correctness of remaining outputs
# Test copy to dashboards
ti = env.run_task("copy_to_dashboards")
self.assertEqual(expected_state, ti.state)
table_ids = [agg.table_id for agg in DoiWorkflow.AGGREGATIONS] + ["doi"]
for table_id in table_ids:
self.assert_table_integrity(f"{self.gcp_project_id}.{dashboards_dataset_id}.{table_id}")
# Test create dashboard views
ti = env.run_task("create_dashboard_views")
self.assertEqual(expected_state, ti.state)
for table_id in ["country", "funder", "group", "institution", "publisher", "subregion"]:
self.assert_table_integrity(f"{self.gcp_project_id}.{dashboards_dataset_id}.{table_id}_comparison")
# Test create exported tables for Elasticsearch
for agg in DoiWorkflow.AGGREGATIONS:
table_id = agg.table_id
task_id = f"export_{table_id}"
ti = env.run_task(task_id)
self.assertEqual(expected_state, ti.state)
# Check that the correct tables exist for each aggregation
tables = make_elastic_tables(
table_id,
relate_to_institutions=agg.relate_to_institutions,
relate_to_countries=agg.relate_to_countries,
relate_to_groups=agg.relate_to_groups,
relate_to_members=agg.relate_to_members,
relate_to_journals=agg.relate_to_journals,
relate_to_funders=agg.relate_to_funders,
relate_to_publishers=agg.relate_to_publishers,
)
for table in tables:
aggregate = table["aggregate"]
facet = table["facet"]
expected_table_id = (
f"{self.gcp_project_id}.{elastic_dataset_id}.ao_{aggregate}_{facet}{release_suffix}"
)
self.assert_table_integrity(expected_table_id)
def query_table(self, observatory_dataset_id: str, table_id: str, order_by_field: str) -> List[Dict]:
"""Query a BigQuery table, sorting the results and returning results as a list of dicts.
:param observatory_dataset_id: the observatory dataset id.
:param table_id: the table id.
:param order_by_field: what field or fields to order by.
:return: the table rows.
"""
return [
dict(row)
for row in run_bigquery_query(
f"SELECT * from {self.gcp_project_id}.{observatory_dataset_id}.{table_id} ORDER BY {order_by_field} ASC;"
)
]
def assert_aggregate(self, expected: List[Dict], actual: List[Dict]):
"""Assert an aggregate table.
:param expected: the expected rows.
:param actual: the actual rows.
:return: None.
"""
# Check that expected and actual are same length
self.assertEqual(len(expected), len(actual))
# Check that each item matches
for expected_item, actual_item in zip(expected, actual):
# Check that top level fields match
for key in [
"id",
"time_period",
"name",
"country",
"country_code",
"country_code_2",
"region",
"subregion",
"coordinates",
"total_outputs",
]:
self.assertEqual(expected_item[key], actual_item[key])
# Access types
self.assert_sub_fields(
expected_item,
actual_item,
"access_types",
["oa", "green", "gold", "gold_doaj", "hybrid", "bronze", "green_only"],
)
def assert_sub_fields(self, expected: Dict, actual: Dict, field: str, sub_fields: List[str]):
"""Checks that the sub fields in the aggregate match.
:param expected: the expected item.
:param actual: the actual item.
:param field: the field name.
:param sub_fields: the sub field name.
:return:
"""
for key in sub_fields:
self.assertEqual(expected[field][key], actual[field][key])
def assert_doi(self, expected: List[Dict], actual: List[Dict]):
"""Assert the DOI table.
:param expected: the expected DOI table rows.
:param actual: the actual DOI table rows.
:return: None.
"""
# Assert DOI output is correct
self.assertEqual(len(expected), len(actual))
for expected_record, actual_record in zip(expected, actual):
# Check that DOIs match
self.assertEqual(expected_record["doi"], actual_record["doi"])
# Check events
self.assert_doi_events(expected_record["events"], actual_record["events"])
# Check affiliations
self.assert_doi_affiliations(expected_record["affiliations"], actual_record["affiliations"])
def assert_doi_events(self, expected: Dict, actual: Dict):
"""Assert the DOI table events field.
:param expected: the expected events field.
:param actual: the actual events field.
:return: None
"""
if expected is None:
# When no events exist assert they are None
self.assertIsNone(actual)
else:
# When events exist check that they are equal
self.assertEqual(expected["doi"], actual["doi"])
sort_events(actual["events"], actual["months"], actual["years"])
event_keys = ["events", "months", "years"]
for key in event_keys:
self.assertEqual(len(expected[key]), len(actual[key]))
for ee, ea in zip(expected[key], actual[key]):
self.assertDictEqual(ee, ea)
def assert_doi_affiliations(self, expected: Dict, actual: Dict):
"""Assert DOI affiliations.
:param expected: the expected DOI affiliation rows.
:param actual: the actual DOI affiliation rows.
:return: None.
"""
# DOI
self.assertEqual(expected["doi"], actual["doi"])
# Subfields
fields = ["institutions", "countries", "subregions", "regions", "journals", "publishers", "funders"]
for field in fields:
self.assert_doi_affiliation(expected, actual, field)
def assert_doi_affiliation(self, expected: Dict, actual: Dict, key: str):
"""Assert a DOI affiliation row.
:param expected: the expected DOI affiliation row.
:param actual: the actual DOI affiliation row.
:return: None.
"""
items_expected_ = expected[key]
items_actual_ = actual[key]
self.assertEqual(len(items_expected_), len(items_actual_))
items_actual_.sort(key=lambda x: x["identifier"])
for item_ in items_actual_:
item_["members"].sort()
self.assertListEqual(items_expected_, items_actual_)
|
{"/academic_observatory_workflows/workflows/ror_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_geonames_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/geonames_telescope.py"], "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/tests/test_clearbit.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_oa_web_workflow.py": ["/academic_observatory_workflows/workflows/oa_web_workflow.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/tests/test_zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_open_citations_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/open_citations_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_openalex_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/openalex_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py"], "/academic_observatory_workflows/workflows/pubmed_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_web_of_science_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_events_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_events_telescope.py"], "/academic_observatory_workflows/workflows/web_of_science_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_github.py": ["/academic_observatory_workflows/github.py"], "/academic_observatory_workflows/workflows/tests/test_pubmed_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/pubmed_telescope.py"], "/academic_observatory_workflows/workflows/geonames_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/open_citations_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_doi_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/model.py", "/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/tests/test_wikipedia.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/wikipedia.py"], "/docs/test_generate_csv.py": ["/docs/generate_schema_csv.py"], "/academic_observatory_workflows/workflows/scopus_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/doi_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py"], "/academic_observatory_workflows/workflows/crossref_events_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/oa_web_workflow.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/github.py", "/academic_observatory_workflows/wikipedia.py", "/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_ror_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/ror_telescope.py"], "/academic_observatory_workflows/workflows/openalex_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_zenodo.py": ["/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/model.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_scopus_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/scopus_telescope.py"], "/academic_observatory_workflows/dags/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_telescope.py"], "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_elastic_import_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/dags/elastic_import_workflow.py"], "/academic_observatory_workflows/dags/doi_workflow.py": ["/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/workflows/orcid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/dags/mag_telescope.py": ["/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/workflows/grid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_mag_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/dags/web_of_science_telescope.py": ["/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/dags/elastic_import_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_grid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/grid_telescope.py"], "/academic_observatory_workflows/workflows/mag_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_orcid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/orcid_telescope.py"]}
|
28,260,423
|
The-Academic-Observatory/academic-observatory-workflows
|
refs/heads/main
|
/academic_observatory_workflows/workflows/mag_telescope.py
|
# Copyright 2020 Curtin University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Author: James Diprose
import logging
import os
import re
import shutil
import subprocess
from concurrent.futures import ThreadPoolExecutor, as_completed
from multiprocessing import cpu_count
from pathlib import Path, PosixPath
from subprocess import Popen
from typing import List
import pendulum
from academic_observatory_workflows.config import schema_folder
from airflow.exceptions import AirflowException
from airflow.hooks.base import BaseHook
from airflow.models.taskinstance import TaskInstance
from airflow.models.variable import Variable
from google.cloud import storage
from google.cloud.bigquery import SourceFormat
from google.cloud.storage import Blob
from natsort import natsorted
from observatory.platform.utils.airflow_utils import (
AirflowConns,
AirflowVars,
check_connections,
check_variables,
)
from observatory.platform.utils.config_utils import find_schema
from observatory.platform.utils.gc_utils import (
azure_to_google_cloud_storage_transfer,
bigquery_sharded_table_id,
bigquery_table_exists,
create_bigquery_dataset,
download_blobs_from_cloud_storage,
load_bigquery_table,
table_name_from_blob,
upload_files_to_cloud_storage,
)
from observatory.platform.utils.proc_utils import wait_for_process
from observatory.platform.utils.workflow_utils import (
SubFolder,
delete_old_xcoms,
workflow_path,
)
from sqlalchemy.sql.expression import delete
MAG_GCP_BUCKET_PATH = "telescopes/mag"
def pull_release_dates(ti: TaskInstance) -> List[pendulum.Date]:
"""Pull a list of MAG release dates instances with xcom.
:param ti: the Apache Airflow task instance.
:return: the list of MAG release dates.
"""
release_dates = ti.xcom_pull(
key=MagTelescope.RELEASES_TOPIC_NAME, task_ids=MagTelescope.TASK_ID_LIST, include_prior_dates=False
)
release_dates = [pendulum.parse(release_date) for release_date in release_dates]
return release_dates
def list_mag_release_files(release_path: str) -> List[PosixPath]:
"""List the MAG release file paths in a particular folder. Excludes the samples directory.
:param release_path: the path to the MAG release.
:return: a list of PosixPath files.
"""
release_folder = os.path.basename(os.path.abspath(release_path))
include_regex = fr"^.*/{release_folder}(/advanced|/mag|/nlp)?/\w+.txt(.[0-9]+)?$"
types = ["*.txt", "*.txt.[0-9]"]
files = []
for file_type in types:
paths = list(Path(release_path).rglob(file_type))
for path in paths:
path_string = str(path.resolve())
if re.match(include_regex, path_string) is not None:
files.append(path)
files = natsorted(files, key=lambda x: str(x))
return files
def transform_mag_file(input_file_path: str, output_file_path: str) -> bool:
r"""Transform MAG file, removing the \x0 and \r characters. \r is the ^M windows character.
:param input_file_path: the path of the file to transform.
:param output_file_path: where to save the transformed file.
:return: whether the transformation was successful or not.
"""
# TODO: see if we can get rid of shell=True
bash_command = fr"sed 's/\r//g; s/\x0//g' {str(input_file_path)} > {output_file_path}"
logging.info(f"transform_mag_file bash command: {bash_command}")
proc: Popen = subprocess.Popen(bash_command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
output, error = wait_for_process(proc)
logging.debug(output)
success = proc.returncode == 0
if success:
logging.info(f"transform_mag_file success: {input_file_path}")
else:
logging.error(f"transform_mag_file error: {input_file_path}")
logging.error(error)
return success
def transform_mag_release(input_release_path: str, output_release_path: str, max_workers: int = cpu_count()) -> bool:
"""Transform a MAG release into a form that can be loaded into BigQuery.
:param input_release_path: the path to the folder containing the files for the MAG release.
:param output_release_path: the path where the transformed files will be saved.
:param max_workers: the number of processes to use when transforming files (one process per file).
:return: whether the transformation was successful or not.
"""
# Transform each file in parallel
with ThreadPoolExecutor(max_workers=max_workers) as executor:
# Create tasks
futures = []
futures_msgs = {}
paths = list_mag_release_files(input_release_path)
for path in paths:
# Make path to save file
os.makedirs(output_release_path, exist_ok=True)
output_path = os.path.join(output_release_path, path.name)
msg = f"input_file_path={path}, output_file_path={output_path}"
logging.info(f"transform_mag_release: {msg}")
future = executor.submit(transform_mag_file, path, output_path)
futures.append(future)
futures_msgs[future] = msg
# Wait for completed tasks
results = []
for future in as_completed(futures):
success = future.result()
msg = futures_msgs[future]
results.append(success)
if success:
logging.info(f"transform_mag_release success: {msg}")
else:
logging.error(f"transform_mag_release failed: {msg}")
return all(results)
def list_mag_release_dates(
*,
project_id: str,
bucket_name: str,
prefix: str = MAG_GCP_BUCKET_PATH,
mag_dataset_id: str = "mag",
mag_table_name: str = "Affiliations",
) -> List[pendulum.Date]:
"""List all MAG release dates that have not been loaded into BigQuery.
:param project_id: the Google Cloud project id.
:param bucket_name: the Google Cloud bucket name.
:param prefix: the prefix to search on.
:param mag_dataset_id: the MAG BigQuery dataset id.
:param mag_table_name: the table name to use to check whether the MAG dataset has loaded.
:return: a list of release dates.
"""
# Find releases on Google Cloud Storage
release_dates = set()
client = storage.Client()
blobs = client.list_blobs(bucket_name, prefix=prefix)
for blob in blobs:
name = blob.name
dt_str = re.search("\d{4}-\d{2}-\d{2}", name)
if dt_str is not None:
dt = pendulum.from_format(dt_str.group(), "YYYY-MM-DD")
release_dates.add(dt)
# Include all releases that have not been processed yet
release_dates_out = []
for release_date in release_dates:
table_id = bigquery_sharded_table_id(mag_table_name, release_date)
if not bigquery_table_exists(project_id, mag_dataset_id, table_id):
release_dates_out.append(release_date)
print(f"Discovered release: {release_date.format('YYYY-MM-DD')}")
return release_dates_out
def make_release_name(release_date: pendulum.Date) -> str:
"""Make a release name for a MAG release.
:param release_date: release date.
:return: the release name.
"""
return release_date.format("YYYY-MM-DD")
class MagTelescope:
"""A container for holding the constants and static functions for the Microsoft Academic Graph (MAG) telescope.
Requires the following connections to be added to Airflow:
mag_releases_table: the Azure account name (login) and sas token (password) for the MagReleases table in
Azure.
mag_snapshots_container: the Azure Storage Account name (login) and the sas token (password) for the
Azure storage blob container that contains the MAG releases.
"""
DAG_ID = "mag"
DATASET_ID = "mag"
QUEUE = "remote_queue"
DESCRIPTION = (
"The Microsoft Academic Graph (MAG) dataset: https://www.microsoft.com/en-us/research/project/"
"microsoft-academic-graph/"
)
RELEASES_TOPIC_NAME = "releases"
MAX_PROCESSES = cpu_count()
MAX_CONNECTIONS = cpu_count()
RETRIES = 3
MAG_CONTAINER = "mag_container"
TASK_ID_CHECK_DEPENDENCIES = "check_dependencies"
TASK_ID_LIST = "list_releases"
TASK_ID_TRANSFER = "transfer"
TASK_ID_DOWNLOAD = "download"
TASK_ID_TRANSFORM = "transform"
TASK_ID_UPLOAD_TRANSFORMED = "upload_transformed"
TASK_ID_BQ_LOAD = "bq_load"
TASK_ID_CLEANUP = "cleanup"
@staticmethod
def check_dependencies(**kwargs):
"""Check that all variables and connections exist that are required to run the DAG.
:param kwargs: the context passed from the PythonOperator. See
https://airflow.apache.org/docs/stable/macros-ref.html
for a list of the keyword arguments that are passed to this argument.
:return: None.
"""
vars_valid = check_variables(
AirflowVars.DATA_PATH,
AirflowVars.PROJECT_ID,
AirflowVars.DATA_LOCATION,
AirflowVars.DOWNLOAD_BUCKET,
AirflowVars.TRANSFORM_BUCKET,
)
conns_valid = check_connections(MagTelescope.MAG_CONTAINER)
if not vars_valid or not conns_valid:
raise AirflowException("Required variables or connections are missing")
@staticmethod
def transfer(**kwargs):
"""Task to transfer MAG releases from Azure to Google Cloud Storage.
Requires the following connection to be added to Airflow:
mag_container: the Azure Storage Account name (login) and the sas token (password) for the
Azure storage blob container that contains the MAG releases.
:param kwargs: the context passed from the PythonOperator. See
https://airflow.apache.org/docs/stable/macros-ref.html
for a list of the keyword arguments that are passed to this argument.
:return: None.
"""
# Get variables
gcp_project_id = Variable.get(AirflowVars.PROJECT_ID)
gcp_bucket_name = Variable.get(AirflowVars.DOWNLOAD_BUCKET)
gcp_bucket_path = "telescopes"
# Get Azure connection information
connection = BaseHook.get_connection("mag_container")
azure_container = "mag"
azure_account_name = connection.login
azure_sas_token = connection.password
# Download and extract each release posted this month
description = "Transfer MAG Releases"
logging.info(description)
success = azure_to_google_cloud_storage_transfer(
azure_storage_account_name=azure_account_name,
azure_sas_token=azure_sas_token,
azure_container=azure_container,
include_prefixes=["mag"],
gc_project_id=gcp_project_id,
gc_bucket=gcp_bucket_name,
gc_bucket_path=gcp_bucket_path,
description=description,
)
if success:
logging.info("Success transferring MAG releases")
else:
logging.error("Error transferring MAG release")
exit(os.EX_DATAERR)
@staticmethod
def list_releases(**kwargs):
"""Task to list all MAG releases for a given month.
Requires the following connection to be added to Airflow:
mag_releases_table: the Azure account name (login) and the sas token (password) for the MagReleases table in
Azure.
Pushes the following xcom:
a list of MagRelease instances.
:param kwargs: the context passed from the BranchPythonOperator. See
https://airflow.apache.org/docs/stable/macros-ref.html
for a list of the keyword arguments that are passed to this argument.
:return: the identifier of the task to execute next.
"""
execution_date = kwargs["execution_date"]
project_id = Variable.get(AirflowVars.PROJECT_ID)
gcp_bucket_name = Variable.get(AirflowVars.DOWNLOAD_BUCKET)
release_dates = list_mag_release_dates(project_id=project_id, bucket_name=gcp_bucket_name)
release_dates_out = [release_date.format("YYYY-MM-DD") for release_date in release_dates]
continue_dag = len(release_dates_out)
if continue_dag:
# Push messages
ti: TaskInstance = kwargs["ti"]
ti.xcom_push(MagTelescope.RELEASES_TOPIC_NAME, release_dates_out, execution_date)
return continue_dag
@staticmethod
def download(**kwargs):
"""Downloads the MAG release from Google Cloud Storage.
:param kwargs: the context passed from the PythonOperator. See
https://airflow.apache.org/docs/stable/macros-ref.html
for a list of the keyword arguments that are passed to this argument.
:return: None.
"""
# Get variables
bucket_name = Variable.get(AirflowVars.DOWNLOAD_BUCKET)
# Get MAG releases
ti: TaskInstance = kwargs["ti"]
release_dates = pull_release_dates(ti)
# Download each release to the extracted folder path (since they are already extracted)
extracted_path = workflow_path(SubFolder.extracted, MagTelescope.DAG_ID)
for release_date in release_dates:
release_name = make_release_name(release_date)
release_path = f"{MAG_GCP_BUCKET_PATH}/{release_name}"
logging.info(f"Downloading release: {release_name}")
destination_path = os.path.join(extracted_path, release_name)
success = download_blobs_from_cloud_storage(
bucket_name,
release_path,
destination_path,
max_processes=MagTelescope.MAX_PROCESSES,
max_connections=MagTelescope.MAX_CONNECTIONS,
retries=MagTelescope.RETRIES,
)
if success:
logging.info(f"Success downloading MAG release: {release_name}")
else:
logging.error(f"Error downloading MAG release: {release_name}")
exit(os.EX_DATAERR)
@staticmethod
def transform(**kwargs):
"""Transforms the MAG release into a form that can be uploaded to BigQuery.
:param kwargs: the context passed from the PythonOperator. See
https://airflow.apache.org/docs/stable/macros-ref.html
for a list of the keyword arguments that are passed to this argument.
:return: None.
"""
# Get MAG releases
ti: TaskInstance = kwargs["ti"]
release_dates = pull_release_dates(ti)
# For each release and folder to include, transform the files with sed and save into the transformed directory
for release_date in release_dates:
release_name = make_release_name(release_date)
logging.info(f"Transforming MAG release: {release_name}")
release_extracted_path = os.path.join(workflow_path(SubFolder.extracted, MagTelescope.DAG_ID), release_name)
release_transformed_path = os.path.join(
workflow_path(SubFolder.transformed, MagTelescope.DAG_ID), release_name
)
success = transform_mag_release(
release_extracted_path, release_transformed_path, max_workers=MagTelescope.MAX_PROCESSES
)
if success:
logging.info(f"Success transforming MAG release: {release_name}")
else:
logging.error(f"Error transforming MAG release: {release_name}")
exit(os.EX_DATAERR)
@staticmethod
def upload_transformed(**kwargs):
"""Uploads the transformed MAG release files to Google Cloud Storage for loading into BigQuery.
:param kwargs: the context passed from the PythonOperator. See
https://airflow.apache.org/docs/stable/macros-ref.html
for a list of the keyword arguments that are passed to this argument.
:return: None.
"""
# Get variables
bucket_name = Variable.get(AirflowVars.TRANSFORM_BUCKET)
# Get MAG releases
ti: TaskInstance = kwargs["ti"]
release_dates = pull_release_dates(ti)
# Upload files to cloud storage
for release_date in release_dates:
release_name = make_release_name(release_date)
logging.info(f"Uploading MAG release to cloud storage: {release_name}")
release_transformed_path = os.path.join(
workflow_path(SubFolder.transformed, MagTelescope.DAG_ID), release_name
)
posix_paths = list_mag_release_files(release_transformed_path)
paths = [str(path) for path in posix_paths]
blob_names = [f"telescopes/{MagTelescope.DAG_ID}/{release_name}/{path.name}" for path in posix_paths]
success = upload_files_to_cloud_storage(
bucket_name,
blob_names,
paths,
max_processes=MagTelescope.MAX_PROCESSES,
max_connections=MagTelescope.MAX_CONNECTIONS,
retries=MagTelescope.RETRIES,
)
if success:
logging.info(f"Success uploading MAG release to cloud storage: {release_name}")
else:
logging.error(f"Error uploading MAG release to cloud storage: {release_name}")
exit(os.EX_DATAERR)
@staticmethod
def bq_load(**kwargs):
"""Loads a MAG release into BigQuery.
:param kwargs: the context passed from the PythonOperator. See
https://airflow.apache.org/docs/stable/macros-ref.html
for a list of the keyword arguments that are passed to this argument.
:return: None.
"""
# Get MAG releases
ti: TaskInstance = kwargs["ti"]
release_dates = pull_release_dates(ti)
# Get config variables
project_id = Variable.get(AirflowVars.PROJECT_ID)
data_location = Variable.get(AirflowVars.DATA_LOCATION)
bucket_name = Variable.get(AirflowVars.TRANSFORM_BUCKET)
# For each release, load into BigQuery
for release_date in release_dates:
release_name = make_release_name(release_date)
release_path = f"telescopes/{MagTelescope.DAG_ID}/{release_name}"
success = db_load_mag_release(project_id, bucket_name, data_location, release_path, release_date)
if success:
logging.info(f"Success loading MAG release: {release_name}")
else:
logging.error(f"Error loading MAG release: {release_name}")
exit(os.EX_DATAERR)
@staticmethod
def cleanup(**kwargs):
"""Delete files of downloaded, extracted and transformed releases.
:param kwargs: the context passed from the PythonOperator. See
https://airflow.apache.org/docs/stable/macros-ref.html
for a list of the keyword arguments that are passed to this argument.
:return: None.
"""
# Pull releases
ti: TaskInstance = kwargs["ti"]
release_dates = pull_release_dates(ti)
for release_date in release_dates:
release_name = make_release_name(release_date)
# Remove all extracted files
release_extracted_path = os.path.join(workflow_path(SubFolder.extracted, MagTelescope.DAG_ID), release_name)
try:
shutil.rmtree(release_extracted_path)
except FileNotFoundError as e:
logging.warning(f"No such file or directory {release_extracted_path}: {e}")
# Remove all transformed files
release_transformed_path = os.path.join(
workflow_path(SubFolder.transformed, MagTelescope.DAG_ID), release_name
)
try:
shutil.rmtree(release_transformed_path)
except FileNotFoundError as e:
logging.warning(f"No such file or directory {release_transformed_path}: {e}")
execution_date = kwargs["execution_date"]
delete_old_xcoms(dag_id=MagTelescope.DAG_ID, execution_date=execution_date)
def db_load_mag_release(
project_id: str,
bucket_name: str,
data_location: str,
release_path: str,
release_date: pendulum.DateTime,
dataset_id: str = MagTelescope.DAG_ID,
) -> bool:
"""Load a MAG release into BigQuery.
:param project_id: the Google Cloud project id.
:param bucket_name: the Google Cloud bucket name where the transformed files are stored.
:param data_location: the location where the BigQuery dataset will be created.
:param release_path: the path on the Google Cloud storage bucket where the particular MAG release is located.
:param release_date: the release date of the MAG release.
:param dataset_id: the identifier of the dataset.
:return: whether the MAG release was loaded into BigQuery successfully.
"""
settings = {
"Authors": {"quote": "", "allow_quoted_newlines": True},
"FieldsOfStudy": {"quote": "", "allow_quoted_newlines": False},
"PaperAuthorAffiliations": {"quote": "", "allow_quoted_newlines": False},
"PaperCitationContexts": {"quote": "", "allow_quoted_newlines": True},
"PaperExtendedAttributes": {"quote": "", "allow_quoted_newlines": False},
"Papers": {"quote": "", "allow_quoted_newlines": True},
}
# Create dataset
create_bigquery_dataset(project_id, dataset_id, data_location, MagTelescope.DESCRIPTION)
# Get bucket
storage_client = storage.Client()
bucket = storage_client.get_bucket(bucket_name)
# List release blobs
blobs: List[Blob] = list(bucket.list_blobs(prefix=release_path))
max_workers = len(blobs)
with ThreadPoolExecutor(max_workers=max_workers) as executor:
# Create tasks
futures = []
futures_msgs = {}
analysis_schema_path = schema_folder()
prefix = "Mag"
file_extension = ".txt"
# De-duplicate blobs, i.e. for tables where there are more than one file:
# e.g. PaperAbstractsInvertedIndex.txt.1 and PaperAbstractsInvertedIndex.txt.2 become
# PaperAbstractsInvertedIndex.txt.* so that both are loaded into the same table.
blob_names = set()
for blob in blobs:
blob_name = blob.name
if not blob_name.endswith(file_extension):
blob_name_sans_index = re.match(r"^.+?(?=([0-9]+)?$)", blob_name).group(0)
blob_name_with_wildcard = f"{blob_name_sans_index}*"
blob_names.add(blob_name_with_wildcard)
else:
blob_names.add(blob_name)
for blob_name in blob_names:
# Make table name and id
table_name = table_name_from_blob(blob_name, file_extension)
table_id = bigquery_sharded_table_id(table_name, release_date)
# Get schema for table
schema_file_path = find_schema(analysis_schema_path, table_name, release_date, prefix=prefix)
if schema_file_path is None:
logging.error(
f"No schema found with search parameters: analysis_schema_path={analysis_schema_path}, "
f"table_name={table_name}, release_date={release_date}, prefix={prefix}"
)
exit(os.EX_CONFIG)
uri = f"gs://{bucket_name}/{blob_name}"
msg = f"uri={uri}, table_id={table_id}, schema_file_path={schema_file_path}"
logging.info(f"db_load_mag_release: {msg}")
if table_name in settings:
csv_quote_character = settings[table_name]["quote"]
csv_allow_quoted_newlines = settings[table_name]["allow_quoted_newlines"]
else:
csv_quote_character = '"'
csv_allow_quoted_newlines = False
future = executor.submit(
load_bigquery_table,
uri,
dataset_id,
data_location,
table_id,
schema_file_path,
SourceFormat.CSV,
csv_field_delimiter="\t",
csv_quote_character=csv_quote_character,
csv_allow_quoted_newlines=csv_allow_quoted_newlines,
)
futures_msgs[future] = msg
futures.append(future)
# Wait for completed tasks
results = []
for future in as_completed(futures):
success = future.result()
msg = futures_msgs[future]
results.append(success)
if success:
logging.info(f"db_load_mag_release success: {msg}")
else:
logging.error(f"db_load_mag_release failed: {msg}")
return all(results)
|
{"/academic_observatory_workflows/workflows/ror_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_geonames_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/geonames_telescope.py"], "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/tests/test_clearbit.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_oa_web_workflow.py": ["/academic_observatory_workflows/workflows/oa_web_workflow.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/tests/test_zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_open_citations_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/open_citations_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_openalex_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/openalex_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py"], "/academic_observatory_workflows/workflows/pubmed_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_web_of_science_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_events_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_events_telescope.py"], "/academic_observatory_workflows/workflows/web_of_science_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_github.py": ["/academic_observatory_workflows/github.py"], "/academic_observatory_workflows/workflows/tests/test_pubmed_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/pubmed_telescope.py"], "/academic_observatory_workflows/workflows/geonames_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/open_citations_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_doi_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/model.py", "/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/tests/test_wikipedia.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/wikipedia.py"], "/docs/test_generate_csv.py": ["/docs/generate_schema_csv.py"], "/academic_observatory_workflows/workflows/scopus_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/doi_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py"], "/academic_observatory_workflows/workflows/crossref_events_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/oa_web_workflow.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/github.py", "/academic_observatory_workflows/wikipedia.py", "/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_ror_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/ror_telescope.py"], "/academic_observatory_workflows/workflows/openalex_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_zenodo.py": ["/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/model.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_scopus_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/scopus_telescope.py"], "/academic_observatory_workflows/dags/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_telescope.py"], "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_elastic_import_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/dags/elastic_import_workflow.py"], "/academic_observatory_workflows/dags/doi_workflow.py": ["/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/workflows/orcid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/dags/mag_telescope.py": ["/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/workflows/grid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_mag_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/dags/web_of_science_telescope.py": ["/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/dags/elastic_import_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_grid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/grid_telescope.py"], "/academic_observatory_workflows/workflows/mag_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_orcid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/orcid_telescope.py"]}
|
28,260,424
|
The-Academic-Observatory/academic-observatory-workflows
|
refs/heads/main
|
/academic_observatory_workflows/workflows/tests/test_orcid_telescope.py
|
# Copyright 2020 Curtin University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Author: Aniek Roelofs
import gzip
import io
import os
from datetime import timedelta
from types import SimpleNamespace
from unittest.mock import ANY, patch
import pendulum
from academic_observatory_workflows.config import test_fixtures_folder
from academic_observatory_workflows.workflows.orcid_telescope import (
OrcidRelease,
OrcidTelescope,
transform_single_file,
)
from airflow.exceptions import AirflowException, AirflowSkipException
from airflow.models.connection import Connection
from airflow.models.variable import Variable
from botocore.response import StreamingBody
from click.testing import CliRunner
from observatory.platform.utils.airflow_utils import AirflowConns, AirflowVars, BaseHook
from observatory.platform.utils.gc_utils import (
upload_file_to_cloud_storage,
upload_files_to_cloud_storage,
)
from observatory.platform.utils.test_utils import (
ObservatoryEnvironment,
ObservatoryTestCase,
module_file_path,
)
from observatory.platform.utils.workflow_utils import blob_name
class TestOrcidTelescope(ObservatoryTestCase):
"""Tests for the ORCID telescope"""
def __init__(self, *args, **kwargs):
"""Constructor which sets up variables used by tests.
:param args: arguments.
:param kwargs: keyword arguments.
"""
super(TestOrcidTelescope, self).__init__(*args, **kwargs)
self.project_id = os.getenv("TEST_GCP_PROJECT_ID")
self.data_location = os.getenv("TEST_GCP_DATA_LOCATION")
self.first_execution_date = pendulum.datetime(year=2021, month=2, day=1)
self.second_execution_date = pendulum.datetime(year=2021, month=3, day=1)
# orcid records
self.records = {}
for file in ["0000-0002-9227-8610.xml", "0000-0002-9228-8514.xml", "0000-0002-9229-8514.xml"]:
self.records[file] = {
"blob": f"{file[-7:-4]}/{file}",
"path": test_fixtures_folder("orcid", file),
}
# last modified file
self.last_modified_path = test_fixtures_folder("orcid", "last_modified.csv.tar")
# release used for method tests
self.release = OrcidRelease(
OrcidTelescope.DAG_ID, pendulum.datetime(2020, 1, 1), pendulum.datetime(2020, 2, 1), False, max_processes=1
)
def test_dag_structure(self):
"""Test that the ORCID DAG has the correct structure.
:return: None
"""
dag = OrcidTelescope().make_dag()
self.assert_dag_structure(
{
"check_dependencies": ["transfer"],
"transfer": ["download_transferred"],
"download_transferred": ["transform"],
"transform": ["upload_transformed"],
"upload_transformed": ["bq_load_partition"],
"bq_load_partition": ["bq_delete_old"],
"bq_delete_old": ["bq_append_new"],
"bq_append_new": ["cleanup"],
"cleanup": [],
},
dag,
)
def test_dag_load(self):
"""Test that the ORCID DAG can be loaded from a DAG bag.
:return: None
"""
with ObservatoryEnvironment().create():
dag_file = os.path.join(module_file_path("academic_observatory_workflows.dags"), "orcid_telescope.py")
self.assert_dag_load("orcid", dag_file)
@patch("academic_observatory_workflows.workflows.orcid_telescope.aws_to_google_cloud_storage_transfer")
@patch("academic_observatory_workflows.workflows.orcid_telescope.boto3.client")
def test_telescope(self, mock_client, mock_transfer):
"""Test the ORCID telescope end to end.
:return: None.
"""
# Setup Observatory environment
env = ObservatoryEnvironment(self.project_id, self.data_location)
dataset_id = env.add_dataset()
# Set up google cloud sync bucket
orcid_bucket = env.add_bucket()
# Setup Telescope
telescope = OrcidTelescope(dataset_id=dataset_id)
dag = telescope.make_dag()
# Create the Observatory environment and run tests
with env.create():
# Add connection
conn = Connection(conn_id=AirflowConns.ORCID, uri="aws://UWLA41aAhdja:AJLD91saAJSKAL0AjAhkaka@")
# uri=self.orcid_conn)
env.add_connection(conn)
# Add variable
var = Variable(key=AirflowVars.ORCID_BUCKET, val=orcid_bucket) # type: ignore
env.add_variable(var)
# first run
with env.create_dag_run(dag, self.first_execution_date) as dag_run:
# Test that all dependencies are specified: no error should be thrown
env.run_task(telescope.check_dependencies.__name__)
start_date, end_date, first_release = telescope.get_release_info(
execution_date=self.first_execution_date,
dag=dag,
dag_run=dag_run,
next_execution_date=pendulum.datetime(2021, 2, 7),
)
# Test list releases task with files available
# ti = env.run_task(telescope.get_release_info.__name__)
# start_date, end_date, first_release = ti.xcom_pull(
# key=OrcidTelescope.RELEASE_INFO,
# task_ids=telescope.get_release_info.__name__,
# include_prior_dates=False,
# )
self.assertEqual(start_date, dag.default_args["start_date"])
self.assertEqual(end_date, pendulum.today("UTC") - timedelta(days=1))
self.assertTrue(first_release)
# use release info for other tasks
release = OrcidRelease(
telescope.dag_id,
start_date,
end_date,
first_release,
max_processes=1,
)
# Test transfer task
mock_transfer.return_value = True, 2
env.run_task(telescope.transfer.__name__)
mock_transfer.assert_called_once()
try:
self.assertTupleEqual(mock_transfer.call_args[0], (conn.login, conn.password))
except AssertionError:
raise AssertionError("AWS key id and secret not passed correctly to transfer function")
self.assertDictEqual(
mock_transfer.call_args[1],
{
"aws_bucket": OrcidTelescope.SUMMARIES_BUCKET,
"include_prefixes": [],
"gc_project_id": self.project_id,
"gc_bucket": orcid_bucket,
"description": "Transfer ORCID data from airflow " "telescope",
"last_modified_since": None,
},
)
# Upload files to bucket, to mock transfer
record1 = self.records["0000-0002-9227-8610.xml"]
record2 = self.records["0000-0002-9228-8514.xml"]
upload_files_to_cloud_storage(
orcid_bucket, [record1["blob"], record2["blob"]], [record1["path"], record2["path"]]
)
self.assert_blob_integrity(orcid_bucket, record1["blob"], record1["path"])
self.assert_blob_integrity(orcid_bucket, record2["blob"], record2["path"])
# Test that file was downloaded
env.run_task(telescope.download_transferred.__name__)
downloaded_hashes = {
"0000-0002-9227-8610.xml": "31d17a63cd04cbd5733cafe7f3561cb7",
"0000-0002-9228-8514.xml": "0e3426db67d221c9cc53737478ea968c",
}
self.assertEqual(2, len(release.download_files))
for file in release.download_files:
self.assert_file_integrity(file, downloaded_hashes[os.path.basename(file)], "md5")
# Test that files transformed
env.run_task(telescope.transform.__name__)
transform_hashes = {"610.jsonl.gz": "33f64619", "514.jsonl.gz": "f1179546"}
self.assertEqual(2, len(release.transform_files))
# Sort lines so that gzip crc is always the same
for file in release.transform_files:
with gzip.open(file, "rb") as f_in:
lines = sorted(f_in.readlines())
with gzip.open(file, "wb") as f_out:
f_out.writelines(lines)
self.assert_file_integrity(file, transform_hashes[os.path.basename(file)], "gzip_crc")
# Test that transformed file uploaded
env.run_task(telescope.upload_transformed.__name__)
for file in release.transform_files:
self.assert_blob_integrity(env.transform_bucket, blob_name(file), file)
# Test that load partition task is skipped for the first release
ti = env.run_task(telescope.bq_load_partition.__name__)
self.assertEqual(ti.state, "skipped")
# Test delete old task is skipped for the first release
with patch("observatory.platform.utils.gc_utils.bq_query_bytes_daily_limit_check"):
ti = env.run_task(telescope.bq_delete_old.__name__)
self.assertEqual(ti.state, "skipped")
# Test append new creates table
env.run_task(telescope.bq_append_new.__name__)
main_table_id, partition_table_id = release.dag_id, f"{release.dag_id}_partitions"
table_id = f"{self.project_id}.{telescope.dataset_id}.{main_table_id}"
expected_rows = 2
self.assert_table_integrity(table_id, expected_rows)
# Test that all telescope data deleted
download_folder, extract_folder, transform_folder = (
release.download_folder,
release.extract_folder,
release.transform_folder,
)
env.run_task(telescope.cleanup.__name__)
self.assert_cleanup(download_folder, extract_folder, transform_folder)
# second run
with env.create_dag_run(dag, self.second_execution_date) as dag_run:
# Test that all dependencies are specified: no error should be thrown
env.run_task(telescope.check_dependencies.__name__)
start_date, end_date, first_release = telescope.get_release_info(
execution_date=self.second_execution_date,
dag=dag,
dag_run=dag_run,
next_execution_date=pendulum.datetime(2021, 3, 7),
)
self.assertEqual(release.end_date + timedelta(days=1), start_date)
self.assertEqual(pendulum.today("UTC") - timedelta(days=1), end_date)
self.assertFalse(first_release)
# Use release info for other tasks
release = OrcidRelease(
telescope.dag_id,
start_date,
end_date,
first_release,
max_processes=1,
)
# Test transfer task
mock_transfer.return_value = True, 2
mock_transfer.reset_mock()
env.run_task(telescope.transfer.__name__)
mock_transfer.assert_called_once()
try:
self.assertTupleEqual(mock_transfer.call_args[0], (conn.login, conn.password))
except AssertionError:
raise AssertionError("AWS key id and secret not passed correctly to transfer function")
self.assertDictEqual(
mock_transfer.call_args[1],
{
"aws_bucket": OrcidTelescope.SUMMARIES_BUCKET,
"include_prefixes": [],
"gc_project_id": self.project_id,
"gc_bucket": orcid_bucket,
"description": "Transfer ORCID data from airflow " "telescope",
"last_modified_since": release.start_date,
},
)
# Upload files to bucket, to mock transfer
record3 = self.records["0000-0002-9229-8514.xml"]
upload_file_to_cloud_storage(orcid_bucket, record3["blob"], record3["path"])
self.assert_blob_integrity(orcid_bucket, record3["blob"], record3["path"])
# Mock response of get_object on last_modified file, mocking lambda file
with open(self.last_modified_path, "rb") as f:
file_bytes = f.read()
mock_client().get_object.return_value = {
"Body": StreamingBody(io.BytesIO(file_bytes), len(file_bytes))
}
# Test that file was downloaded
env.run_task(telescope.download_transferred.__name__)
self.assertEqual(2, len(release.download_files))
for file in release.download_files:
downloaded_hashes = {
"0000-0002-9228-8514.xml": "0e3426db67d221c9cc53737478ea968c",
"0000-0002-9229-8514.xml": "38472bea0cc72cbefa54f0bf5f98d95f",
}
self.assert_file_integrity(file, downloaded_hashes[os.path.basename(file)], "md5")
# Test that files transformed
env.run_task(telescope.transform.__name__)
self.assertEqual(1, len(release.transform_files))
transform_path = release.transform_files[0]
with gzip.open(transform_path, "rb") as f_in:
lines = sorted(f_in.readlines())
with gzip.open(transform_path, "wb") as f_out:
f_out.writelines(lines)
expected_file_hash = "aab89332"
self.assert_file_integrity(transform_path, expected_file_hash, "gzip_crc")
# Test that transformed file uploaded
env.run_task(telescope.upload_transformed.__name__)
self.assert_blob_integrity(env.transform_bucket, blob_name(transform_path), transform_path)
# Test that load partition task creates partition
env.run_task(telescope.bq_load_partition.__name__)
main_table_id, partition_table_id = release.dag_id, f"{release.dag_id}_partitions"
table_id = f"{self.project_id}.{telescope.dataset_id}.{partition_table_id}"
expected_rows = 2
self.assert_table_integrity(table_id, expected_rows)
# Test task deleted rows from main table
with patch("observatory.platform.utils.gc_utils.bq_query_bytes_daily_limit_check"):
env.run_task(telescope.bq_delete_old.__name__)
table_id = f"{self.project_id}.{telescope.dataset_id}.{main_table_id}"
expected_rows = 1
self.assert_table_integrity(table_id, expected_rows)
# Test append new adds rows to table
env.run_task(telescope.bq_append_new.__name__)
table_id = f"{self.project_id}.{telescope.dataset_id}.{main_table_id}"
expected_rows = 3
self.assert_table_integrity(table_id, expected_rows)
# Test that all telescope data deleted
download_folder, extract_folder, transform_folder = (
release.download_folder,
release.extract_folder,
release.transform_folder,
)
env.run_task(telescope.cleanup.__name__)
self.assert_cleanup(download_folder, extract_folder, transform_folder)
@patch("academic_observatory_workflows.workflows.orcid_telescope.aws_to_google_cloud_storage_transfer")
@patch("academic_observatory_workflows.workflows.orcid_telescope.get_aws_conn_info")
@patch("academic_observatory_workflows.workflows.orcid_telescope.Variable.get")
def test_transfer(self, mock_variable_get, mock_aws_info, mock_transfer):
"""Test transfer method of the ORCID release.
:param mock_variable_get: Mock Airflow Variable get() method
:param mock_aws_info: Mock getting AWS info
:param mock_transfer: Mock the transfer function called inside release.transfer()
:return: None.
"""
mock_variable_get.side_effect = lambda x: {"orcid_bucket": "bucket", "project_id": "project_id"}[x]
mock_aws_info.return_value = "key_id", "secret_key"
max_retries = 3
mock_transfer.return_value = True, 3
# Test transfer in case of first release
self.release.first_release = False
self.release.transfer(max_retries)
mock_transfer.assert_called_once_with(
"key_id",
"secret_key",
aws_bucket=OrcidTelescope.SUMMARIES_BUCKET,
include_prefixes=[],
gc_project_id="project_id",
gc_bucket="bucket",
description="Transfer ORCID data from airflow telescope",
last_modified_since=self.release.start_date,
)
mock_transfer.reset_mock()
# Test transfer in case of later release
self.release.first_release = True
self.release.transfer(max_retries)
mock_transfer.assert_called_once_with(
"key_id",
"secret_key",
aws_bucket=OrcidTelescope.SUMMARIES_BUCKET,
include_prefixes=[],
gc_project_id="project_id",
gc_bucket="bucket",
description="Transfer ORCID data from airflow telescope",
last_modified_since=None,
)
# Test failed transfer
mock_transfer.return_value = False, 4
with self.assertRaises(AirflowException):
self.release.transfer(max_retries)
# Test succesful transfer, but no objects were transferred
mock_transfer.return_value = True, 0
with self.assertRaises(AirflowSkipException):
self.release.transfer(max_retries)
@patch("academic_observatory_workflows.workflows.orcid_telescope.subprocess.Popen")
@patch("academic_observatory_workflows.workflows.orcid_telescope.get_aws_conn_info")
@patch("academic_observatory_workflows.workflows.orcid_telescope.write_modified_record_blobs")
@patch("academic_observatory_workflows.workflows.orcid_telescope.Variable.get")
@patch.dict(os.environ, {"GOOGLE_APPLICATION_CREDENTIALS": "credentials.json"}, clear=True)
def test_download_transferred(self, mock_variable_get, mock_write_blobs, mock_aws_info, mock_subprocess):
"""Test the download_transferred method of the ORCID release.
:param mock_variable_get: Mock Airflow Variable get() method
:param mock_write_blobs: Mock the function that writes modified record blobs
:param mock_aws_info: Mock getting AWS info
:param mock_subprocess: Mock the subprocess returncode and communicate method
:return: None.
"""
mock_variable_get.return_value = "orcid_bucket"
mock_aws_info.return_value = "key_id", "secret_key"
mock_subprocess.return_value.returncode = 0
mock_subprocess.return_value.communicate.return_value = "stdout".encode(), "stderr".encode()
# Test download in case of first release
self.release.first_release = True
self.release.download_transferred()
mock_write_blobs.assert_not_called()
self.assertEqual(2, mock_subprocess.call_count)
mock_subprocess.assert_any_call(
[
"gcloud",
"auth",
"activate-service-account",
f"--key-file=credentials.json",
],
stdout=-1,
stderr=-1,
env=dict({"GOOGLE_APPLICATION_CREDENTIALS": "credentials.json"}, CLOUDSDK_PYTHON="python3"),
)
mock_subprocess.assert_called_with(
[
"gsutil",
"-m",
"-q",
"cp",
"-L",
os.path.join(self.release.download_folder, "cp.log"),
"-r",
"gs://orcid_bucket",
self.release.download_folder,
],
stdout=-1,
stderr=-1,
)
# Test download in case of second release, using modified records file
self.release.first_release = False
mock_subprocess.reset_mock()
with CliRunner().isolated_filesystem():
with open(self.release.modified_records_path, "w") as f:
f.write("unit test")
self.release.download_transferred()
mock_write_blobs.assert_called_once_with(
self.release.start_date,
self.release.end_date,
"key_id",
"secret_key",
"orcid_bucket",
self.release.modified_records_path,
)
self.assertEqual(2, mock_subprocess.call_count)
mock_subprocess.assert_any_call(
[
"gcloud",
"auth",
"activate-service-account",
f"--key-file=credentials.json",
],
stdout=-1,
stderr=-1,
env=dict({"GOOGLE_APPLICATION_CREDENTIALS": "credentials.json"}, CLOUDSDK_PYTHON="python3"),
)
mock_subprocess.assert_called_with(
[
"gsutil",
"-m",
"-q",
"cp",
"-L",
os.path.join(self.release.download_folder, "cp.log"),
"-I",
self.release.download_folder,
],
stdin=ANY,
stdout=-1,
stderr=-1,
)
self.assertEqual(self.release.modified_records_path, mock_subprocess.call_args[1]["stdin"].name)
# Test download when first subprocess fails
mock_subprocess.return_value.returncode = -1
mock_subprocess.return_value.communicate.return_value = "stdout".encode(), "stderr".encode()
with self.assertRaises(AirflowException):
self.release.download_transferred()
# Test download when second subprocess fails
def communicate():
return "stdout".encode(), "stderr".encode()
mock_subprocess.side_effect = [
SimpleNamespace(communicate=communicate, returncode=0),
SimpleNamespace(communicate=communicate, returncode=-1),
]
with self.assertRaises(AirflowException):
self.release.download_transferred()
@patch("academic_observatory_workflows.workflows.orcid_telescope.Variable.get")
def test_transform_single_file(self, mock_variable_get):
"""Test the transform_single_file method.
:return: None.
"""
with CliRunner().isolated_filesystem() as t:
mock_variable_get.return_value = os.path.join(t, "data")
file_name = "0000-0002-9228-8514.xml"
transform_folder = self.release.transform_folder
file_dir = os.path.join(self.release.transform_folder, file_name[-7:-4])
transform_path = os.path.join(file_dir, os.path.splitext(file_name)[0] + ".jsonl")
# Test standard record
with open(file_name, "w") as f:
f.write(
'<?xml version="1.0" encoding="UTF-8" standalone="yes"?>'
'<record:record path="/0000-0002-9227-8514">'
"<common:orcid-identifier>"
"<common:path>0000-0002-9227-8514</common:path>"
"</common:orcid-identifier>"
"</record:record>"
)
transform_single_file(file_name, transform_folder)
self.assert_file_integrity(transform_path, "6d7dbc0fc69db96025b82c018b3d6305", "md5")
# Test transform standard record is skipped, because file already exists
transform_single_file(file_name, transform_folder)
self.assert_file_integrity(transform_path, "6d7dbc0fc69db96025b82c018b3d6305", "md5")
os.remove(transform_path)
# Test record with error
with open(file_name, "w") as f:
f.write(
'<?xml version="1.0" encoding="UTF-8" standalone="yes"?>'
'<error:error path="/0000-0002-9227-8514">'
"<common:orcid-identifier>"
"<common:path>0000-0002-9227-8514</common:path>"
"</common:orcid-identifier>"
"</error:error>"
)
transform_single_file(file_name, transform_folder)
self.assert_file_integrity(transform_path, "6d7dbc0fc69db96025b82c018b3d6305", "md5")
os.remove(transform_path)
# Test invalid record
with open(file_name, "w") as f:
f.write(
'<?xml version="1.0" encoding="UTF-8" standalone="yes"?>'
'<invalid:invalid> test="test">'
"</invalid:invalid>"
)
with self.assertRaises(AirflowException):
transform_single_file(file_name, transform_folder)
@patch("academic_observatory_workflows.workflows.orcid_telescope.Variable.get")
@patch.object(BaseHook, "get_connection")
@patch("academic_observatory_workflows.workflows.orcid_telescope.storage_bucket_exists")
def test_check_dependencies(self, mock_bucket_exists, mock_conn_get, mock_variable_get):
"""Test the check_dependencies task
:param mock_bucket_exists: Mock output of storage_bucket_exists function
:param mock_conn_get: Mock Airflow get_connection method
:param mock_variable_get: Mock Airflow Variable get() method
:return:
"""
mock_variable_get.return_value = "orcid_bucket"
mock_conn_get.return_value = "orcid"
# Test that all dependencies are specified: no error should be thrown
mock_bucket_exists.return_value = True
OrcidTelescope().check_dependencies()
# Test that dependency is missing, no existing storage bucket
mock_bucket_exists.return_value = False
with self.assertRaises(AirflowException):
OrcidTelescope().check_dependencies()
|
{"/academic_observatory_workflows/workflows/ror_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_geonames_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/geonames_telescope.py"], "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/tests/test_clearbit.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_oa_web_workflow.py": ["/academic_observatory_workflows/workflows/oa_web_workflow.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/tests/test_zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_open_citations_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/open_citations_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_openalex_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/openalex_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py"], "/academic_observatory_workflows/workflows/pubmed_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_web_of_science_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_events_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_events_telescope.py"], "/academic_observatory_workflows/workflows/web_of_science_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_github.py": ["/academic_observatory_workflows/github.py"], "/academic_observatory_workflows/workflows/tests/test_pubmed_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/pubmed_telescope.py"], "/academic_observatory_workflows/workflows/geonames_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/open_citations_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_doi_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/model.py", "/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/tests/test_wikipedia.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/wikipedia.py"], "/docs/test_generate_csv.py": ["/docs/generate_schema_csv.py"], "/academic_observatory_workflows/workflows/scopus_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/doi_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py"], "/academic_observatory_workflows/workflows/crossref_events_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/oa_web_workflow.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/github.py", "/academic_observatory_workflows/wikipedia.py", "/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_ror_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/ror_telescope.py"], "/academic_observatory_workflows/workflows/openalex_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_zenodo.py": ["/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/model.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_scopus_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/scopus_telescope.py"], "/academic_observatory_workflows/dags/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_telescope.py"], "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_elastic_import_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/dags/elastic_import_workflow.py"], "/academic_observatory_workflows/dags/doi_workflow.py": ["/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/workflows/orcid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/dags/mag_telescope.py": ["/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/workflows/grid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_mag_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/dags/web_of_science_telescope.py": ["/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/dags/elastic_import_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_grid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/grid_telescope.py"], "/academic_observatory_workflows/workflows/mag_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_orcid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/orcid_telescope.py"]}
|
28,260,425
|
The-Academic-Observatory/academic-observatory-workflows
|
refs/heads/main
|
/academic_observatory_workflows/workflows/tests/test_web_of_science_telescope.py
|
# Copyright 2020 Curtin University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Author: Tuan Chien
import json
import logging
import os
import unittest
from collections import OrderedDict
from typing import OrderedDict
from unittest.mock import MagicMock, call, patch
import observatory.api.server.orm as orm
import pendulum
from academic_observatory_workflows.config import test_fixtures_folder
from academic_observatory_workflows.workflows.web_of_science_telescope import (
WebOfScienceRelease,
WebOfScienceTelescope,
WosJsonParser,
WosNameAttributes,
WosUtilConst,
WosUtility,
)
from airflow.exceptions import AirflowException
from airflow.models import Connection
from airflow.utils.state import State
from click.testing import CliRunner
from observatory.platform.utils.airflow_utils import AirflowConns, AirflowVars
from observatory.platform.utils.api import make_observatory_api
from observatory.platform.utils.gc_utils import run_bigquery_query
from observatory.platform.utils.test_utils import (
HttpServer,
ObservatoryEnvironment,
ObservatoryTestCase,
module_file_path,
)
from observatory.platform.utils.workflow_utils import (
bigquery_sharded_table_id,
blob_name,
make_dag_id,
)
class TestWosUtility(unittest.TestCase):
"""Test WosUtility."""
def __init__(self, *args, **kwargs):
"""Constructor which sets up variables used by tests.
:param args: arguments.
:param kwargs: keyword arguments.
"""
super(TestWosUtility, self).__init__(*args, **kwargs)
def test_build_query(self):
institution_ids = ["test1", "test2"]
start_date = pendulum.datetime(2021, 1, 1)
end_date = pendulum.datetime(2021, 2, 1)
period = pendulum.period(start_date, end_date)
query = WosUtility.build_query(institution_ids=institution_ids, period=period)
expected_query = OrderedDict(
[
("query", "OG=(test1 OR test2)"),
("count", WosUtilConst.RESULT_LIMIT),
("offset", 1),
("timeSpan", {"begin": start_date.isoformat(), "end": end_date.isoformat()}),
]
)
self.assertEqual(query, expected_query)
@patch("academic_observatory_workflows.workflows.web_of_science_telescope.xmltodict.parse")
def test_parse_query_none(self, m_xmlparse):
expected_schema_version = "schema version"
m_xmlparse.return_value = {"records": {"@xmlns": expected_schema_version, "REC": []}}
records, schema_ver = WosUtility.parse_query(None)
self.assertEqual(records, [])
self.assertEqual(schema_ver, expected_schema_version)
@patch("academic_observatory_workflows.workflows.web_of_science_telescope.xmltodict.parse")
def test_parse_query(self, m_xmlparse):
expected_schema_version = "schema version"
m_xmlparse.return_value = {"records": {"@xmlns": expected_schema_version, "REC": [1, 2, 3]}}
records, schema_ver = WosUtility.parse_query(None)
self.assertEqual(records, [1, 2, 3])
self.assertEqual(schema_ver, expected_schema_version)
def test_search(self):
institution_ids = ["test1"]
start_date = pendulum.datetime(2021, 1, 1)
end_date = pendulum.datetime(2021, 2, 1)
period = pendulum.period(start_date, end_date)
query = WosUtility.build_query(institution_ids=institution_ids, period=period)
client = MagicMock()
WosUtility.search(client=client, query=query)
expected_call = call.search(
query="OG=(test1)",
count=100,
offset=1,
timeSpan={"begin": "2021-01-01T00:00:00+00:00", "end": "2021-02-01T00:00:00+00:00"},
)
self.assertEqual(client.method_calls[0], expected_call)
@patch("academic_observatory_workflows.workflows.web_of_science_telescope.WosUtility.search")
def test_make_query_not_limit(self, m_search):
results = MagicMock()
results.recordsFound = 2
results.records = ""
m_search.return_value = results
client = MagicMock()
institution_ids = ["test1"]
start_date = pendulum.datetime(2021, 1, 1)
end_date = pendulum.datetime(2021, 1, 31)
period = pendulum.period(start_date, end_date)
query = WosUtility.build_query(institution_ids=institution_ids, period=period)
records = WosUtility.make_query(client=client, query=query)
self.assertEqual(records, [""])
@patch("academic_observatory_workflows.workflows.web_of_science_telescope.WosUtility.search")
def test_make_query_over_limit(self, m_search):
results = MagicMock()
results.recordsFound = 200
results.records = ""
m_search.return_value = results
client = MagicMock()
institution_ids = ["test1"]
start_date = pendulum.datetime(2021, 1, 1)
end_date = pendulum.datetime(2021, 1, 31)
period = pendulum.period(start_date, end_date)
query = WosUtility.build_query(institution_ids=institution_ids, period=period)
records = WosUtility.make_query(client=client, query=query)
self.assertEqual(records, ["", ""])
@patch("academic_observatory_workflows.workflows.web_of_science_telescope.write_to_file")
@patch("academic_observatory_workflows.workflows.web_of_science_telescope.WosUtility.search")
def test_download_wos_period(self, m_search, m_write_file):
results = MagicMock()
results.recordsFound = 100
results.records = ""
m_search.return_value = results
client = MagicMock()
conn = ""
start_date = pendulum.datetime(2021, 1, 1)
end_date = pendulum.datetime(2021, 1, 31)
period = pendulum.period(start_date.date(), end_date.date())
with CliRunner().isolated_filesystem() as tmpdir:
WosUtility.download_wos_period(
client=client, conn=conn, period=period, institution_ids=[""], download_dir=tmpdir
)
self.assertEqual(m_write_file.call_count, 1)
args, _ = m_write_file.call_args
self.assertEqual(args[0], "")
@patch("academic_observatory_workflows.workflows.web_of_science_telescope.write_to_file")
@patch("academic_observatory_workflows.workflows.web_of_science_telescope.WosUtility.search")
@patch("academic_observatory_workflows.workflows.web_of_science_telescope.WosClient")
def test_download_wos_batch(self, m_client, m_search, m_write_file):
m_client.return_value.__enter__.return_value.name = MagicMock()
results = MagicMock()
results.recordsFound = 100
results.records = ""
m_search.return_value = results
batch = [
pendulum.period(pendulum.datetime(2021, 1, 1).date(), pendulum.datetime(2021, 1, 31).date()),
pendulum.period(pendulum.datetime(2021, 2, 1).date(), pendulum.datetime(2021, 2, 28).date()),
]
with CliRunner().isolated_filesystem() as tmpdir:
WosUtility.download_wos_batch(
login="login", password="pass", batch=batch, conn="conn", institution_ids=[""], download_dir=tmpdir
)
self.assertEqual(m_write_file.call_count, 2)
self.assertEqual(m_write_file.call_args_list[0][0][0], "")
self.assertEqual(m_write_file.call_args_list[1][0][0], "")
@patch("academic_observatory_workflows.workflows.web_of_science_telescope.WosUtility.download_wos_batch")
def test_download_wos_parallel_single_session(self, m_download):
schedule = [1, 2, 3, 4]
WosUtility.download_wos_parallel(
login="", password="", schedule=schedule, conn="", institution_ids=[""], download_dir=""
)
self.assertEqual(m_download.call_count, 1)
self.assertEqual(m_download.call_args_list[0][1]["batch"], schedule)
@patch("academic_observatory_workflows.workflows.web_of_science_telescope.WosUtility.download_wos_batch")
def test_download_wos_parallel_multi_session(self, m_download):
schedule = [1, 2, 3, 4, 5, 6]
WosUtility.download_wos_parallel(
login="", password="", schedule=schedule, conn="", institution_ids=[""], download_dir=""
)
self.assertEqual(m_download.call_count, 5)
self.assertEqual(m_download.call_args_list[0][1]["batch"], [1, 6])
self.assertEqual(m_download.call_args_list[1][1]["batch"], [2])
self.assertEqual(m_download.call_args_list[2][1]["batch"], [3])
self.assertEqual(m_download.call_args_list[3][1]["batch"], [4])
self.assertEqual(m_download.call_args_list[4][1]["batch"], [5])
@patch("academic_observatory_workflows.workflows.web_of_science_telescope.WosUtility.download_wos_batch")
def test_download_wos_parallel_multi_session_no_remainder(self, m_download):
schedule = [1, 2, 3, 4, 5]
WosUtility.download_wos_parallel(
login="", password="", schedule=schedule, conn="", institution_ids=[""], download_dir=""
)
self.assertEqual(m_download.call_count, 5)
self.assertEqual(m_download.call_args_list[0][1]["batch"], [1])
self.assertEqual(m_download.call_args_list[1][1]["batch"], [2])
self.assertEqual(m_download.call_args_list[2][1]["batch"], [3])
self.assertEqual(m_download.call_args_list[3][1]["batch"], [4])
self.assertEqual(m_download.call_args_list[4][1]["batch"], [5])
@patch("academic_observatory_workflows.workflows.web_of_science_telescope.WosUtility.download_wos_batch")
def test_download_wos_sequential(self, m_download):
schedule = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
WosUtility.download_wos_sequential(
login="", password="", schedule=schedule, conn="", institution_ids=[""], download_dir=""
)
self.assertEqual(m_download.call_count, 1)
self.assertEqual(m_download.call_args_list[0][1]["batch"], schedule)
class TestWosNameAttributes(unittest.TestCase):
"""Test the WosNameAttributes class."""
def __init__(self, *args, **kwargs):
"""Constructor which sets up variables used by tests.
:param args: arguments.
:param kwargs: keyword arguments.
"""
super(TestWosNameAttributes, self).__init__(*args, **kwargs)
def test_get_contribs_blank(self):
data = {}
wna = WosNameAttributes(data)
self.assertEqual(wna._contribs, {})
data = {"static_data": {}}
wna = WosNameAttributes(data)
self.assertEqual(wna._contribs, {})
def test_no_name(self):
data = {}
wna = WosNameAttributes(data)
orcid = wna.get_orcid("no name")
self.assertEqual(orcid, None)
rid = wna.get_r_id("no name")
self.assertEqual(rid, None)
data = {
"static_data": {
"contributors": {
"contributor": [
{"name": {"first_name": "first", "last_name": "last", "@r_id": "rid", "@orcid_id": "orcid"}}
]
}
}
}
wna = WosNameAttributes(data)
orcid = wna.get_orcid("no name")
self.assertEqual(orcid, None)
rid = wna.get_r_id("no name")
self.assertEqual(rid, None)
def test_no_orcid_no_rid(self):
data = {
"static_data": {
"contributors": {
"contributor": [
{
"name": {
"first_name": "first",
"last_name": "last",
}
}
]
}
}
}
wna = WosNameAttributes(data)
self.assertEqual(wna._contribs, {"first last": {}})
def test_orcid_rid(self):
data = {
"static_data": {
"contributors": {
"contributor": [
{"name": {"first_name": "first", "last_name": "last", "@r_id": "rid", "@orcid_id": "orcid"}}
]
}
}
}
wna = WosNameAttributes(data)
self.assertEqual(wna._contribs, {"first last": {"r_id": "rid", "orcid": "orcid"}})
orcid = wna.get_orcid("first last")
self.assertEqual(orcid, "orcid")
rid = wna.get_r_id("first last")
self.assertEqual(rid, "rid")
class TestWosParse(unittest.TestCase):
"""Test web of science response parsing."""
def __init__(self, *args, **kwargs):
"""Constructor which sets up variables used by tests.
:param args: arguments.
:param kwargs: keyword arguments.
"""
super(TestWosParse, self).__init__(*args, **kwargs)
self.fixtures_dir = test_fixtures_folder("web_of_science")
self.fixture_file = "wos-2020-10-01.json"
self.wos_2020_10_01_json_path = os.path.join(self.fixtures_dir, self.fixture_file)
with open(self.wos_2020_10_01_json_path, "r") as f:
self.data = json.load(f)
self.harvest_datetime = pendulum.now().isoformat()
self.release_date = pendulum.date(2020, 10, 1).isoformat()
def test_get_identifiers(self):
"""Extract identifiers"""
data = self.data[0]
identifiers = WosJsonParser.get_identifiers(data)
self.assertEqual(len(identifiers), 10)
self.assertEqual(identifiers["uid"], "WOS:000000000000000")
self.assertEqual(identifiers["issn"], "0000-0000")
self.assertEqual(identifiers["eissn"], "0000-0000")
self.assertEqual(identifiers["doi"], "10.0000/j.gaz.2020.01.001")
data = {"UID": "ID"}
identifiers = WosJsonParser.get_identifiers(data)
expected_ids = {
"parent_book_doi": None,
"isbn": None,
"art_no": None,
"doi": None,
"issn": None,
"eissn": None,
"eisbn": None,
"meeting_abs": None,
"xref_doi": None,
"uid": "ID",
}
self.assertEqual(expected_ids, identifiers)
def test_get_identifiers_types(self):
data = {
"UID": "ID",
"dynamic_data": {
"cluster_related": {
"identifiers": {
"identifier": [
{"@type": "bad_type", "@value": "something"},
{"@type": "isbn", "@value": "isbn"},
]
}
}
},
}
identifiers = WosJsonParser.get_identifiers(data)
expected_ids = {
"parent_book_doi": None,
"isbn": "isbn",
"art_no": None,
"doi": None,
"issn": None,
"eissn": None,
"eisbn": None,
"meeting_abs": None,
"xref_doi": None,
"uid": "ID",
}
self.assertEqual(expected_ids, identifiers)
def test_get_pub_info(self):
"""Extract publication info"""
data = self.data[0]
pub_info = WosJsonParser.get_pub_info(data)
self.assertEqual(pub_info["sort_date"], "2020-01-01")
self.assertEqual(pub_info["pub_type"], "Journal")
self.assertEqual(pub_info["page_count"], 2)
self.assertEqual(pub_info["source"], "JUPITER GAZETTE")
self.assertEqual(pub_info["doc_type"], "Article")
self.assertEqual(pub_info["publisher"], "JUPITER PUBLISHING LTD")
self.assertEqual(pub_info["publisher_city"], "SPRINGFIELD")
def test_get_pub_info_no_fields(self):
expected_pub_info = {
"sort_date": None,
"pub_type": None,
"page_count": None,
"source": None,
"doc_type": None,
"publisher": None,
"publisher_city": None,
}
data = {}
pub_info = WosJsonParser.get_pub_info(data)
self.assertEqual(expected_pub_info, pub_info)
data = {"static_data": {"summary": {}}}
pub_info = WosJsonParser.get_pub_info(data)
self.assertEqual(expected_pub_info, pub_info)
def test_get_pub_info_no_title(self):
expected_pub_info = {
"sort_date": None,
"pub_type": None,
"page_count": None,
"source": None,
"doc_type": None,
"publisher": None,
"publisher_city": None,
}
data = {"static_data": {"summary": {"titles": {"title": []}}}}
pub_info = WosJsonParser.get_pub_info(data)
self.assertEqual(expected_pub_info, pub_info)
def test_get_pub_info_non_source_title(self):
expected_pub_info = {
"sort_date": None,
"pub_type": None,
"page_count": None,
"source": None,
"doc_type": None,
"publisher": None,
"publisher_city": None,
}
data = {"static_data": {"summary": {"titles": {"title": [{"@type": "notsource"}]}}}}
pub_info = WosJsonParser.get_pub_info(data)
self.assertEqual(expected_pub_info, pub_info)
def test_get_title(self):
"""Extract title"""
data = self.data[0]
title = WosJsonParser.get_title(data)
truth = (
"The habitats of endangered hypnotoads on the southern oceans of Europa: a Ophiophagus hannah perspective"
)
self.assertEqual(title, truth)
def test_get_title_key_error(self):
data = {}
title = WosJsonParser.get_title(data)
self.assertEqual(title, None)
def test_get_title_no_titles(self):
data = {"static_data": {"summary": {"titles": {"title": []}}}}
title = WosJsonParser.get_title(data)
self.assertEqual(title, None)
def test_get_names(self):
"""Extract name information, e.g. authors"""
data = self.data[0]
names = WosJsonParser.get_names(data)
self.assertEqual(len(names), 3)
entry = names[0]
self.assertEqual(entry["seq_no"], 1)
self.assertEqual(entry["role"], "author")
self.assertEqual(entry["first_name"], "Big Eaty")
self.assertEqual(entry["last_name"], "Snake")
self.assertEqual(entry["wos_standard"], "Snake, BE")
self.assertEqual(entry["daisng_id"], "101010")
self.assertEqual(entry["full_name"], "Snake, Big Eaty")
self.assertEqual(entry["orcid"], "0000-0000-0000-0001")
self.assertEqual(entry["r_id"], "D-0000-2000")
entry = names[1]
self.assertEqual(entry["seq_no"], 2)
self.assertEqual(entry["role"], "author")
self.assertEqual(entry["first_name"], "Hypno")
self.assertEqual(entry["last_name"], "Toad")
self.assertEqual(entry["wos_standard"], "Toad, H")
self.assertEqual(entry["daisng_id"], "100000")
self.assertEqual(entry["full_name"], "Toad, Hypno")
self.assertEqual(entry["orcid"], "0000-0000-0000-0002")
self.assertEqual(entry["r_id"], "H-0000-2001")
entry = names[2]
self.assertEqual(entry["seq_no"], 3)
self.assertEqual(entry["role"], "author")
self.assertEqual(entry["first_name"], "Great")
self.assertEqual(entry["last_name"], "Historian")
self.assertEqual(entry["wos_standard"], "Historian, G")
self.assertEqual(entry["daisng_id"], "200000")
self.assertEqual(entry["full_name"], "Historian, Great")
self.assertEqual(entry["orcid"], "0000-0000-0000-0003")
self.assertEqual(entry["r_id"], None)
def test_get_names_no_fields(self):
"""Extract name information, e.g. authors"""
data = {}
names = WosJsonParser.get_names(data)
self.assertEqual(names, [])
def test_get_languages(self):
"""Extract language information"""
data = self.data[0]
languages = WosJsonParser.get_languages(data)
self.assertEqual(len(languages), 1)
self.assertEqual(languages[0]["type"], "primary")
self.assertEqual(languages[0]["name"], "Mindwaves")
def test_get_languages_no_field(self):
data = {}
languages = WosJsonParser.get_languages(data)
self.assertEqual(languages, [])
def test_get_refcount(self):
"""Extract reference count"""
data = self.data[0]
refs = WosJsonParser.get_refcount(data)
self.assertEqual(refs, 10000)
def test_get_refcount_no_field(self):
data = {}
refs = WosJsonParser.get_refcount(data)
self.assertEqual(refs, None)
def test_get_abstract(self):
"""Extract the abstract"""
data = self.data[0]
abstract = WosJsonParser.get_abstract(data)
self.assertEqual(len(abstract), 1)
head = abstract[0][0:38]
truth = "Jupiter hypnotoads lead mysterious liv"
self.assertEqual(head, truth)
self.assertEqual(len(abstract[0]), 169)
def test_get_abstract_no_field(self):
data = {}
abstract = WosJsonParser.get_abstract(data)
self.assertEqual(abstract, [])
def test_get_keyword(self):
"""Extract keywords and keywords plus if available"""
data = self.data[0]
keywords = WosJsonParser.get_keyword(data)
self.assertEqual(len(keywords), 15)
word_list = [
"Jupiter",
"Toads",
"Snakes",
"JPT",
"JPS",
"WORD1",
"WORD2",
"WORD3",
"WORD4",
"WORD5",
"WORD6",
"WORD7",
"WORD8",
"WORD9",
"WORD0",
]
self.assertListEqual(keywords, word_list)
def test_get_keyword_no_field(self):
data = {}
keywords = WosJsonParser.get_keyword(data)
self.assertEqual(keywords, [])
def test_get_keyword_no_keyword_plus(self):
data = {"static_data": {"fullrecord_metadata": {"keywords": {"keyword": []}}}}
keywords = WosJsonParser.get_keyword(data)
self.assertEqual(keywords, [])
def test_get_conference(self):
"""Extract conference name"""
data = self.data[0]
conf = WosJsonParser.get_conference(data)
name = "Annual Jupiter Meeting of the Minds"
self.assertEqual(len(conf), 1)
self.assertEqual(conf[0]["name"], name)
self.assertEqual(conf[0]["id"], 12345)
def test_get_conference_no_field(self):
data = {}
conf = WosJsonParser.get_conference(data)
self.assertEqual(conf, [])
def test_get_conference_no_confid(self):
data = {"static_data": {"summary": {"conferences": {"conference": [{}]}}}}
conf = WosJsonParser.get_conference(data)
self.assertEqual(conf, [{"id": None, "name": None}])
def test_get_fund_ack(self):
"""Extract funding information"""
data = self.data[0]
fund_ack = WosJsonParser.get_fund_ack(data)
truth = "The authors would like to thank all life in the universe for not making us extinct yet."
self.assertEqual(len(fund_ack["text"]), 1)
self.assertEqual(fund_ack["text"][0], truth)
self.assertEqual(len(fund_ack["grants"]), 1)
self.assertEqual(fund_ack["grants"][0]["agency"], "Jupiter research council")
self.assertEqual(len(fund_ack["grants"][0]["ids"]), 1)
self.assertEqual(fund_ack["grants"][0]["ids"][0], "JP00000000HT1")
def test_get_fund_ack_no_fund_text(self):
data = {"static_data": {"fullrecord_metadata": {"fund_ack": {}}}}
fund_ack = WosJsonParser.get_fund_ack(data)
self.assertEqual(fund_ack, {"grants": [], "text": []})
def test_get_fund_ack_fund_ack(self):
data = {"static_data": {"fullrecord_metadata": {}}}
fund_ack = WosJsonParser.get_fund_ack(data)
self.assertEqual(fund_ack, {"grants": [], "text": []})
def test_get_fund_ack_no_grantid(self):
data = {
"static_data": {"fullrecord_metadata": {"fund_ack": {"grants": {"grant": [{"grant_agency": "agency"}]}}}}
}
fund_ack = WosJsonParser.get_fund_ack(data)
self.assertEqual(fund_ack, {"grants": [{"agency": "agency", "ids": []}], "text": []})
def test_get_categories(self):
"""Extract WoS categories"""
data = self.data[0]
categories = WosJsonParser.get_categories(data)
self.assertEqual(len(categories["headings"]), 1)
self.assertEqual(len(categories["subheadings"]), 1)
self.assertEqual(len(categories["subjects"]), 3)
self.assertEqual(categories["headings"][0], "Hynology")
self.assertEqual(categories["subheadings"][0], "Zoology")
self.assertDictEqual(
categories["subjects"][0], {"ascatype": "traditional", "code": "XX", "text": "Jupiter Toads"}
)
self.assertDictEqual(
categories["subjects"][1], {"ascatype": "traditional", "code": "X", "text": "Jupiter life"}
)
self.assertDictEqual(
categories["subjects"][2], {"ascatype": "extended", "code": None, "text": "Jupiter Science"}
)
def test_get_categories_no_fields(self):
data = {}
categories = WosJsonParser.get_categories(data)
self.assertEqual(categories, {})
def test_get_orgs(self):
"""Extract Wos organisations"""
data = self.data[0]
orgs = WosJsonParser.get_orgs(data)
self.assertEqual(len(orgs), 1)
self.assertEqual(orgs[0]["city"], "Springfield")
self.assertEqual(orgs[0]["state"], "SF")
self.assertEqual(orgs[0]["country"], "Jupiter")
self.assertEqual(orgs[0]["org_name"], "Generic University")
self.assertEqual(len(orgs[0]["suborgs"]), 2)
self.assertEqual(orgs[0]["suborgs"][0], "Centre of Excellence for Extraterrestrial Telepathic Studies")
self.assertEqual(orgs[0]["suborgs"][1], "Zoology")
self.assertEqual(len(orgs[0]["names"]), 3)
self.assertEqual(orgs[0]["names"][0]["first_name"], "Big Eaty")
self.assertEqual(orgs[0]["names"][0]["last_name"], "Snake")
self.assertEqual(orgs[0]["names"][0]["daisng_id"], "101010")
self.assertEqual(orgs[0]["names"][0]["full_name"], "Snake, Big Eaty")
self.assertEqual(orgs[0]["names"][0]["wos_standard"], "Snake, BE")
self.assertEqual(orgs[0]["names"][1]["first_name"], "Hypno")
self.assertEqual(orgs[0]["names"][1]["last_name"], "Toad")
self.assertEqual(orgs[0]["names"][1]["daisng_id"], "100000")
self.assertEqual(orgs[0]["names"][1]["full_name"], "Toad, Hypno")
self.assertEqual(orgs[0]["names"][1]["wos_standard"], "Toad, H")
self.assertEqual(orgs[0]["names"][2]["first_name"], "Great")
self.assertEqual(orgs[0]["names"][2]["last_name"], "Historian")
self.assertEqual(orgs[0]["names"][2]["daisng_id"], "200000")
self.assertEqual(orgs[0]["names"][2]["full_name"], "Historian, Great")
self.assertEqual(orgs[0]["names"][2]["wos_standard"], "Historian, G")
def test_get_orgs_no_addr(self):
data = {"static_data": {"fullrecord_metadata": {"addresses": {"address_name": [{"address_spec": {}}]}}}}
orgs = WosJsonParser.get_orgs(data)
self.assertEqual(orgs, [{"city": None, "country": None, "state": None}])
def test_get_orgs_no_field(self):
data = {"static_data": {}}
orgs = WosJsonParser.get_orgs(data)
self.assertEqual(orgs, [])
def test_get_orgs_no_orgs(self):
data = {
"static_data": {
"fullrecord_metadata": {
"addresses": {"address_name": [{"address_spec": {"organizations": {"organization": []}}}]}
}
}
}
orgs = WosJsonParser.get_orgs(data)
self.assertEqual(orgs, [{"city": None, "country": None, "org_name": None, "state": None}])
def test_parse_json(self):
"""Test whether the json file can be parsed into fields correctly."""
self.assertEqual(len(self.data), 1)
entry = self.data[0]
wos_inst_id = ["Generic University"]
entry = WosJsonParser.parse_json(
data=entry,
harvest_datetime=self.harvest_datetime,
release_date=self.release_date,
institution_ids=wos_inst_id,
)
self.assertEqual(entry["harvest_datetime"], self.harvest_datetime)
self.assertEqual(entry["release_date"], self.release_date)
self.assertEqual(entry["identifiers"]["uid"], "WOS:000000000000000")
self.assertEqual(entry["pub_info"]["pub_type"], "Journal")
self.assertEqual(
entry["title"],
"The habitats of endangered hypnotoads on the southern oceans of Europa: a Ophiophagus hannah perspective",
)
self.assertEqual(entry["names"][0]["first_name"], "Big Eaty")
self.assertEqual(entry["languages"][0]["name"], "Mindwaves")
self.assertEqual(entry["ref_count"], 10000)
self.assertEqual(len(entry["abstract"][0]), 169)
self.assertEqual(len(entry["keywords"]), 15)
self.assertEqual(len(entry["conferences"]), 1)
self.assertEqual(entry["fund_ack"]["grants"][0]["ids"][0], "JP00000000HT1")
self.assertEqual(entry["categories"]["headings"][0], "Hynology")
self.assertEqual(len(entry["orgs"]), 1)
class MockApiResponse:
def __init__(self, file):
fixture_dir = test_fixtures_folder("web_of_science")
api_response_file = os.path.join(fixture_dir, file)
with open(api_response_file, "r") as f:
self.records = f.read()
self.recordsFound = "1"
class TestWebOfScienceTelescope(ObservatoryTestCase):
"""Test the WebOfScienceTelescope."""
def __init__(self, *args, **kwargs):
"""Constructor which sets up variables used by tests.
:param args: arguments.
:param kwargs: keyword arguments.
"""
super(TestWebOfScienceTelescope, self).__init__(*args, **kwargs)
self.project_id = os.getenv("TEST_GCP_PROJECT_ID")
self.host = "localhost"
self.api_port = 5000
self.data_location = "us"
self.org_name = "Curtin University"
self.conn_id = "web_of_science_curtin_university"
self.earliest_date = pendulum.datetime(2021, 1, 1).isoformat()
def setup_api(self, env, extra=None):
dt = pendulum.now("UTC")
if extra is None:
extra = {
"airflow_connections": [self.conn_id],
"institution_ids": ["Curtin University"],
"earliest_date": self.earliest_date,
}
name = "Web of Science Telescope"
telescope_type = orm.TelescopeType(name=name, type_id=WebOfScienceTelescope.DAG_ID, created=dt, modified=dt)
env.api_session.add(telescope_type)
organisation = orm.Organisation(
name=self.org_name,
created=dt,
modified=dt,
gcp_project_id=self.project_id,
gcp_download_bucket=env.download_bucket,
gcp_transform_bucket=env.transform_bucket,
)
env.api_session.add(organisation)
telescope = orm.Telescope(
name=name,
telescope_type=telescope_type,
organisation=organisation,
modified=dt,
created=dt,
extra=extra,
)
env.api_session.add(telescope)
env.api_session.commit()
def setup_connections(self, env):
# Add Observatory API connection
conn = Connection(conn_id=AirflowConns.OBSERVATORY_API, uri=f"http://:password@{self.host}:{self.api_port}")
env.add_connection(conn)
# Add login/pass connection
conn = Connection(conn_id=self.conn_id, uri=f"http://login:password@localhost")
env.add_connection(conn)
def get_telescope(self, dataset_id):
api = make_observatory_api()
telescope_type = api.get_telescope_type(type_id=WebOfScienceTelescope.DAG_ID)
telescopes = api.get_telescopes(telescope_type_id=telescope_type.id, limit=1000)
self.assertEqual(len(telescopes), 1)
dag_id = make_dag_id(WebOfScienceTelescope.DAG_ID, telescopes[0].organisation.name)
airflow_conns = telescopes[0].extra.get("airflow_connections")
institution_ids = telescopes[0].extra.get("institution_ids")
earliest_date_str = telescopes[0].extra.get("earliest_date")
earliest_date = pendulum.parse(earliest_date_str)
airflow_vars = [
AirflowVars.DATA_PATH,
AirflowVars.DATA_LOCATION,
]
telescope = WebOfScienceTelescope(
dag_id=dag_id,
dataset_id=dataset_id,
airflow_conns=airflow_conns,
airflow_vars=airflow_vars,
institution_ids=institution_ids,
earliest_date=earliest_date,
)
return telescope
def test_ctor(self):
self.assertRaises(
AirflowException,
WebOfScienceTelescope,
dag_id="dag",
dataset_id="dataset",
airflow_conns=[],
airflow_vars=[],
institution_ids=[],
)
self.assertRaises(
AirflowException,
WebOfScienceTelescope,
dag_id="dag",
dataset_id="dataset",
airflow_conns=["conn"],
airflow_vars=[],
institution_ids=[],
)
def test_dag_structure(self):
"""Test that the Crossref Events DAG has the correct structure."""
telescope = WebOfScienceTelescope(
dag_id="web_of_science", airflow_conns=["conn"], airflow_vars=[], institution_ids=["123"]
)
dag = telescope.make_dag()
self.assert_dag_structure(
{
"check_dependencies": ["download"],
"download": ["upload_downloaded"],
"upload_downloaded": ["transform"],
"transform": ["upload_transformed"],
"upload_transformed": ["bq_load"],
"bq_load": ["cleanup"],
"cleanup": [],
},
dag,
)
def test_dag_load(self):
"""Test that the DAG can be loaded from a DAG bag."""
dag_file = os.path.join(module_file_path("academic_observatory_workflows.dags"), "web_of_science_telescope.py")
env = ObservatoryEnvironment(self.project_id, self.data_location, api_host=self.host, api_port=self.api_port)
with env.create():
self.setup_connections(env)
self.setup_api(env)
dag_file = os.path.join(
module_file_path("academic_observatory_workflows.dags"), "web_of_science_telescope.py"
)
dag_id = make_dag_id(WebOfScienceTelescope.DAG_ID, self.org_name)
self.assert_dag_load(dag_id, dag_file)
def test_dag_load_missing_params(self):
"""Make sure an exception is thrown if essential parameters are missing."""
dag_file = os.path.join(module_file_path("academic_observatory_workflows.dags"), "web_of_science_telescope.py")
env = ObservatoryEnvironment(self.project_id, self.data_location, api_host=self.host, api_port=self.api_port)
with env.create():
self.setup_connections(env)
extra = {"airflow_connections": [self.conn_id]}
self.setup_api(env, extra=extra)
dag_file = os.path.join(
module_file_path("academic_observatory_workflows.dags"), "web_of_science_telescope.py"
)
dag_id = make_dag_id(WebOfScienceTelescope.DAG_ID, self.org_name)
self.assertRaises(AssertionError, self.assert_dag_load, dag_id, dag_file)
def test_telescope_bad_schema(self):
env = ObservatoryEnvironment(self.project_id, self.data_location, api_host=self.host, api_port=self.api_port)
bad_api_response = MockApiResponse("api_response_diff_schema.xml")
with env.create(task_logging=True):
self.setup_connections(env)
self.setup_api(env)
dataset_id = env.add_dataset()
execution_date = pendulum.datetime(2021, 1, 1)
telescope = self.get_telescope(dataset_id)
dag = telescope.make_dag()
release_date = pendulum.datetime(2021, 2, 1)
release = WebOfScienceRelease(
dag_id=make_dag_id(WebOfScienceTelescope.DAG_ID, self.org_name),
release_date=release_date,
login="login",
password="pass",
institution_ids=["Curtin University"],
earliest_date=pendulum.datetime(2021, 1, 1),
)
with env.create_dag_run(dag, execution_date):
# check dependencies
ti = env.run_task(telescope.check_dependencies.__name__)
self.assertEqual(ti.state, State.SUCCESS)
# download
with patch(
"academic_observatory_workflows.workflows.web_of_science_telescope.WosUtility.search"
) as m_search:
with patch(
"academic_observatory_workflows.workflows.web_of_science_telescope.WosClient"
) as m_client:
m_client.return_value.__enter__.return_value.name = MagicMock()
m_search.return_value = bad_api_response
ti = env.run_task(telescope.download.__name__)
self.assertEqual(ti.state, State.SUCCESS)
self.assertEqual(len(release.download_files), 2)
# upload_downloaded
ti = env.run_task(telescope.upload_downloaded.__name__)
self.assertEqual(ti.state, State.SUCCESS)
self.assert_blob_integrity(
env.download_bucket, blob_name(release.download_files[0]), release.download_files[0]
)
# transform
self.assertRaises(AirflowException, env.run_task, telescope.transform.__name__)
def test_telescope(self):
env = ObservatoryEnvironment(self.project_id, self.data_location, api_host=self.host, api_port=self.api_port)
api_response = MockApiResponse("api_response.xml")
with env.create():
self.setup_connections(env)
self.setup_api(env)
dataset_id = env.add_dataset()
execution_date = pendulum.datetime(2021, 1, 1)
telescope = self.get_telescope(dataset_id)
dag = telescope.make_dag()
release_date = pendulum.datetime(2021, 2, 1)
release = WebOfScienceRelease(
dag_id=make_dag_id(WebOfScienceTelescope.DAG_ID, self.org_name),
release_date=release_date,
login="login",
password="pass",
institution_ids=["Curtin University"],
earliest_date=pendulum.datetime(2021, 1, 1),
)
with env.create_dag_run(dag, execution_date):
# check dependencies
ti = env.run_task(telescope.check_dependencies.__name__)
self.assertEqual(ti.state, State.SUCCESS)
# download
with patch(
"academic_observatory_workflows.workflows.web_of_science_telescope.WosUtility.search"
) as m_search:
with patch(
"academic_observatory_workflows.workflows.web_of_science_telescope.WosClient"
) as m_client:
m_client.return_value.__enter__.return_value.name = MagicMock()
m_search.return_value = api_response
ti = env.run_task(telescope.download.__name__)
self.assertEqual(ti.state, State.SUCCESS)
self.assertEqual(len(release.download_files), 2)
# upload_downloaded
ti = env.run_task(telescope.upload_downloaded.__name__)
self.assertEqual(ti.state, State.SUCCESS)
self.assert_blob_integrity(
env.download_bucket, blob_name(release.download_files[0]), release.download_files[0]
)
# transform
ti = env.run_task(telescope.transform.__name__)
self.assertEqual(ti.state, State.SUCCESS)
# upload_transformed
ti = env.run_task(telescope.upload_transformed.__name__)
self.assertEqual(ti.state, State.SUCCESS)
for file in release.transform_files:
self.assert_blob_integrity(env.transform_bucket, blob_name(file), file)
# bq_load
ti = env.run_task(telescope.bq_load.__name__)
self.assertEqual(ti.state, State.SUCCESS)
table_id = (
f"{self.project_id}.{dataset_id}."
f"{bigquery_sharded_table_id(WebOfScienceTelescope.DAG_ID, release.release_date)}"
)
expected_rows = 2
self.assert_table_integrity(table_id, expected_rows)
# Sample some fields to check in the first row
sql = f"SELECT * FROM {self.project_id}.{dataset_id}.web_of_science20210201"
with patch("observatory.platform.utils.gc_utils.bq_query_bytes_daily_limit_check"):
records = list(run_bigquery_query(sql))
self.assertEqual(records[0]["abstract"], [])
self.assertEqual(records[0]["ref_count"], 1)
self.assertEqual(records[0]["harvest_datetime"].strftime("%Y%m%d"), "20210201")
self.assertEqual(records[0]["title"], "Fake title")
self.assertEqual(records[0]["keywords"], [])
self.assertEqual(records[0]["release_date"].strftime("%Y%m%d"), "20210201")
self.assertEqual(records[0]["institution_ids"], ["Curtin University"])
# cleanup
download_folder, extract_folder, transform_folder = (
release.download_folder,
release.extract_folder,
release.transform_folder,
)
env.run_task(telescope.cleanup.__name__)
self.assertEqual(ti.state, State.SUCCESS)
self.assert_cleanup(download_folder, extract_folder, transform_folder)
|
{"/academic_observatory_workflows/workflows/ror_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_geonames_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/geonames_telescope.py"], "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/tests/test_clearbit.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_oa_web_workflow.py": ["/academic_observatory_workflows/workflows/oa_web_workflow.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/tests/test_zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_open_citations_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/open_citations_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_openalex_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/openalex_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py"], "/academic_observatory_workflows/workflows/pubmed_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_web_of_science_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_events_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_events_telescope.py"], "/academic_observatory_workflows/workflows/web_of_science_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_github.py": ["/academic_observatory_workflows/github.py"], "/academic_observatory_workflows/workflows/tests/test_pubmed_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/pubmed_telescope.py"], "/academic_observatory_workflows/workflows/geonames_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/open_citations_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_doi_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/model.py", "/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/tests/test_wikipedia.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/wikipedia.py"], "/docs/test_generate_csv.py": ["/docs/generate_schema_csv.py"], "/academic_observatory_workflows/workflows/scopus_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/doi_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py"], "/academic_observatory_workflows/workflows/crossref_events_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/oa_web_workflow.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/github.py", "/academic_observatory_workflows/wikipedia.py", "/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_ror_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/ror_telescope.py"], "/academic_observatory_workflows/workflows/openalex_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_zenodo.py": ["/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/model.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_scopus_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/scopus_telescope.py"], "/academic_observatory_workflows/dags/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_telescope.py"], "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_elastic_import_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/dags/elastic_import_workflow.py"], "/academic_observatory_workflows/dags/doi_workflow.py": ["/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/workflows/orcid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/dags/mag_telescope.py": ["/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/workflows/grid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_mag_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/dags/web_of_science_telescope.py": ["/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/dags/elastic_import_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_grid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/grid_telescope.py"], "/academic_observatory_workflows/workflows/mag_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_orcid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/orcid_telescope.py"]}
|
28,260,426
|
The-Academic-Observatory/academic-observatory-workflows
|
refs/heads/main
|
/academic_observatory_workflows/workflows/tests/test_openalex_telescope.py
|
# Copyright 2022 Curtin University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Author: Aniek Roelofs
import gzip
import io
import json
import os
from datetime import timedelta
from subprocess import Popen
from unittest.mock import Mock, call, patch
import pendulum
from airflow.exceptions import AirflowException, AirflowSkipException
from airflow.models.connection import Connection
from botocore.response import StreamingBody
from click.testing import CliRunner
from observatory.platform.utils.gc_utils import (
upload_file_to_cloud_storage,
)
from observatory.platform.utils.jinja2_utils import render_template
from observatory.platform.utils.test_utils import (
ObservatoryEnvironment,
ObservatoryTestCase,
module_file_path,
)
from academic_observatory_workflows.config import test_fixtures_folder
from academic_observatory_workflows.workflows.openalex_telescope import (
OpenAlexRelease,
OpenAlexTelescope,
run_subprocess_cmd,
transform_file,
transform_object,
)
class TestOpenAlexTelescope(ObservatoryTestCase):
"""Tests for the OpenAlex telescope"""
def __init__(self, *args, **kwargs):
"""Constructor which sets up variables used by tests.
:param args: arguments.
:param kwargs: keyword arguments.
"""
super(TestOpenAlexTelescope, self).__init__(*args, **kwargs)
self.project_id = os.getenv("TEST_GCP_PROJECT_ID")
self.data_location = os.getenv("TEST_GCP_DATA_LOCATION")
self.manifest_obj_path = test_fixtures_folder("openalex", "manifest_object.json.jinja2")
self.entities = {
"authors": {
"download_path": test_fixtures_folder("openalex", "authors.jsonl"),
"bucket": "transform_bucket",
},
"concepts": {
"download_path": test_fixtures_folder("openalex", "concepts.jsonl"),
"bucket": "download_bucket",
"download_hash": "14bd0919",
"transform_hash": "4bb6fe07",
},
"institutions": {
"download_path": test_fixtures_folder("openalex", "institutions.jsonl"),
"bucket": "download_bucket",
"download_hash": "b23bb91c",
"transform_hash": "a9cfff73",
},
"venues": {
"download_path": test_fixtures_folder("openalex", "venues.jsonl"),
"bucket": "transform_bucket",
},
"works": {
"download_path": test_fixtures_folder("openalex", "works.jsonl"),
"bucket": "download_bucket",
"download_hash": "806d7995",
"transform_hash": "0a783ffc",
},
}
self.table_bytes = {
"Author": 3965,
"Author_partitions": 3965,
"Concept": 3947,
"Concept_partitions": 3947,
"Institution": 3259,
"Institution_partitions": 3259,
"Venue": 2108,
"Venue_partitions": 2108,
"Work": 11804,
"Work_partitions": 11804,
}
self.first_run = {
"execution_date": pendulum.datetime(year=2022, month=1, day=1),
"manifest_date": "2021-12-17",
"manifest_download_hash": "9ab1f7c9eb0adbdaf07baaf8b97a110e",
"manifest_transform_hash": "6400ca22b963599af6bad9db030fe11a",
}
self.second_run = {
"execution_date": pendulum.datetime(year=2022, month=2, day=1),
"manifest_date": "2022-01-17",
"manifest_download_hash": "f4cea919d06caa0811ad5976bf98986a",
"manifest_transform_hash": "50e2eff06007a32c4394df8df7f5e907",
}
def test_dag_structure(self):
"""Test that the OpenAlex DAG has the correct structure.
:return: None
"""
dag = OpenAlexTelescope().make_dag()
self.assert_dag_structure(
{
"check_dependencies": ["write_transfer_manifest"],
"write_transfer_manifest": ["transfer"],
"transfer": ["download_transferred"],
"download_transferred": ["transform"],
"transform": ["upload_transformed"],
"upload_transformed": ["bq_load_partition"],
"bq_load_partition": ["bq_delete_old"],
"bq_delete_old": ["bq_append_new"],
"bq_append_new": ["cleanup"],
"cleanup": [],
},
dag,
)
def test_dag_load(self):
"""Test that the OpenAlex DAG can be loaded from a DAG bag.
:return: None
"""
with ObservatoryEnvironment().create():
dag_file = os.path.join(module_file_path("academic_observatory_workflows.dags"), "openalex_telescope.py")
self.assert_dag_load("openalex", dag_file)
@patch("academic_observatory_workflows.workflows.openalex_telescope.aws_to_google_cloud_storage_transfer")
@patch("academic_observatory_workflows.workflows.openalex_telescope.boto3.client")
def test_telescope(self, mock_client, mock_transfer):
"""Test the OpenAlex telescope end to end.
:return: None.
"""
# Setup Observatory environment
env = ObservatoryEnvironment(self.project_id, self.data_location)
dataset_id = env.add_dataset()
# Setup Telescope
telescope = OpenAlexTelescope(dataset_id=dataset_id)
dag = telescope.make_dag()
# Create the Observatory environment and run tests
with env.create():
# Add connection
conn = Connection(
conn_id=OpenAlexTelescope.AIRFLOW_CONN_AWS, uri="aws://UWLA41aAhdja:AJLD91saAJSKAL0AjAhkaka@"
)
env.add_connection(conn)
run = self.first_run
with env.create_dag_run(dag, run["execution_date"]) as dag_run:
# Test that all dependencies are specified: no error should be thrown
env.run_task(telescope.check_dependencies.__name__)
start_date, end_date, first_release = telescope.get_release_info(
next_execution_date=pendulum.today("UTC"),
dag=dag,
dag_run=dag_run,
)
self.assertEqual(dag.default_args["start_date"], start_date)
self.assertEqual(pendulum.today("UTC") - timedelta(days=1), end_date)
self.assertTrue(first_release)
# Use release info for other tasks
release = OpenAlexRelease(
telescope.dag_id,
start_date,
end_date,
first_release,
max_processes=1,
)
# Mock response of get_object on last_modified file, mocking lambda file
side_effect = []
for entity in self.entities:
manifest_content = render_template(
self.manifest_obj_path, entity=entity, date=run["manifest_date"]
).encode()
side_effect.append({"Body": StreamingBody(io.BytesIO(manifest_content), len(manifest_content))})
mock_client().get_object.side_effect = side_effect
# Test write transfer manifest task
env.run_task(telescope.write_transfer_manifest.__name__)
self.assert_file_integrity(
release.transfer_manifest_path_download, run["manifest_download_hash"], "md5"
)
self.assert_file_integrity(
release.transfer_manifest_path_transform, run["manifest_transform_hash"], "md5"
)
# Test transfer task
mock_transfer.reset_mock()
mock_transfer.return_value = True, 2
env.run_task(telescope.transfer.__name__)
self.assertEqual(2, mock_transfer.call_count)
try:
self.assertTupleEqual(mock_transfer.call_args_list[0][0], (conn.login, conn.password))
self.assertTupleEqual(mock_transfer.call_args_list[1][0], (conn.login, conn.password))
except AssertionError:
raise AssertionError("AWS key id and secret not passed correctly to transfer function")
self.assertDictEqual(
mock_transfer.call_args_list[0][1],
{
"aws_bucket": OpenAlexTelescope.AWS_BUCKET,
"include_prefixes": [
f"data/concepts/updated_date={run['manifest_date']}/0000_part_00.gz",
f"data/institutions/updated_date={run['manifest_date']}/0000_part_00.gz",
f"data/works/updated_date={run['manifest_date']}/0000_part_00.gz",
],
"gc_project_id": self.project_id,
"gc_bucket": release.download_bucket,
"gc_bucket_path": f"telescopes/{release.dag_id}/{release.release_id}/",
"description": f"Transfer OpenAlex data from Airflow telescope to {release.download_bucket}",
},
)
self.assertDictEqual(
mock_transfer.call_args_list[1][1],
{
"aws_bucket": OpenAlexTelescope.AWS_BUCKET,
"include_prefixes": [
f"data/authors/updated_date={run['manifest_date']}/0000_part_00.gz",
f"data/venues/updated_date={run['manifest_date']}/0000_part_00.gz",
],
"gc_project_id": self.project_id,
"gc_bucket": release.transform_bucket,
"gc_bucket_path": f"telescopes/{release.dag_id}/{release.release_id}/",
"description": f"Transfer OpenAlex data from Airflow telescope to {release.transform_bucket}",
},
)
# Upload files to bucket, to mock transfer
for entity, info in self.entities.items():
blob = f"telescopes/{release.dag_id}/{release.release_id}/data/{entity}/updated_date={run['manifest_date']}/0000_part_00.gz"
gzip_path = f"{entity}.jsonl.gz"
with open(info["download_path"], "rb") as f_in, gzip.open(gzip_path, "wb") as f_out:
f_out.writelines(f_in)
upload_file_to_cloud_storage(getattr(release, info["bucket"]), blob, gzip_path)
# Test that file was downloaded
env.run_task(telescope.download_transferred.__name__)
self.assertEqual(3, len(release.download_files))
for file in release.download_files:
entity = file.split("/")[-3]
self.assert_file_integrity(file, self.entities[entity]["download_hash"], "gzip_crc")
# Test that files transformed
env.run_task(telescope.transform.__name__)
self.assertEqual(3, len(release.transform_files))
# Sort lines so that gzip crc is always the same
for file in release.transform_files:
entity = file.split("/")[-3]
with gzip.open(file, "rb") as f_in:
lines = sorted(f_in.readlines())
with gzip.open(file, "wb") as f_out:
f_out.writelines(lines)
self.assert_file_integrity(file, self.entities[entity]["transform_hash"], "gzip_crc")
# Test that transformed files uploaded
env.run_task(telescope.upload_transformed.__name__)
for entity, info in self.entities.items():
if entity in ["concepts", "institutions", "works"]:
file = [file for file in release.transform_files if entity in file][0]
else:
file = f"{entity}.jsonl.gz"
blob = f"telescopes/{release.dag_id}/{release.release_id}/data/{entity}/updated_date={run['manifest_date']}/0000_part_00.gz"
self.assert_blob_integrity(env.transform_bucket, blob, file)
# Get bq load info for BQ tasks
bq_load_info = telescope.get_bq_load_info(release)
# Test that load partition task is skipped for the first release
ti = env.run_task(telescope.bq_load_partition.__name__)
self.assertEqual(ti.state, "skipped")
# Test delete old task is skipped for the first release
with patch("observatory.platform.utils.gc_utils.bq_query_bytes_daily_limit_check"):
ti = env.run_task(telescope.bq_delete_old.__name__)
self.assertEqual(ti.state, "skipped")
# Test append new creates table
env.run_task(telescope.bq_append_new.__name__)
for _, main_table_id, _ in bq_load_info:
table_id = f"{self.project_id}.{telescope.dataset_id}.{main_table_id}"
expected_bytes = self.table_bytes[main_table_id]
self.assert_table_bytes(table_id, expected_bytes)
# Test that all telescope data deleted
download_folder, extract_folder, transform_folder = (
release.download_folder,
release.extract_folder,
release.transform_folder,
)
env.run_task(telescope.cleanup.__name__)
self.assert_cleanup(download_folder, extract_folder, transform_folder)
run = self.second_run
with env.create_dag_run(dag, run["execution_date"]) as dag_run:
# Test that all dependencies are specified: no error should be thrown
env.run_task(telescope.check_dependencies.__name__)
start_date, end_date, first_release = telescope.get_release_info(
next_execution_date=pendulum.today("UTC"),
dag=dag,
dag_run=dag_run,
)
self.assertEqual(release.end_date + timedelta(days=1), start_date)
self.assertEqual(pendulum.today("UTC") - timedelta(days=1), end_date)
self.assertFalse(first_release)
# Use release info for other tasks
release = OpenAlexRelease(
telescope.dag_id,
start_date,
end_date,
first_release,
max_processes=1,
)
# Mock response of get_object on last_modified file, mocking lambda file
side_effect = []
for entity in self.entities:
manifest_content = render_template(
self.manifest_obj_path, entity=entity, date=run["manifest_date"]
).encode()
side_effect.append({"Body": StreamingBody(io.BytesIO(manifest_content), len(manifest_content))})
mock_client().get_object.side_effect = side_effect
# Test write transfer manifest task
env.run_task(telescope.write_transfer_manifest.__name__)
self.assert_file_integrity(
release.transfer_manifest_path_download, run["manifest_download_hash"], "md5"
)
self.assert_file_integrity(
release.transfer_manifest_path_transform, run["manifest_transform_hash"], "md5"
)
# Test transfer task
mock_transfer.reset_mock()
mock_transfer.return_value = True, 2
env.run_task(telescope.transfer.__name__)
self.assertEqual(2, mock_transfer.call_count)
try:
self.assertTupleEqual(mock_transfer.call_args_list[0][0], (conn.login, conn.password))
self.assertTupleEqual(mock_transfer.call_args_list[1][0], (conn.login, conn.password))
except AssertionError:
raise AssertionError("AWS key id and secret not passed correctly to transfer function")
self.assertDictEqual(
mock_transfer.call_args_list[0][1],
{
"aws_bucket": OpenAlexTelescope.AWS_BUCKET,
"include_prefixes": [
f"data/concepts/updated_date={run['manifest_date']}/0000_part_00.gz",
f"data/institutions/updated_date={run['manifest_date']}/0000_part_00.gz",
f"data/works/updated_date={run['manifest_date']}/0000_part_00.gz",
],
"gc_project_id": self.project_id,
"gc_bucket": release.download_bucket,
"gc_bucket_path": f"telescopes/{release.dag_id}/{release.release_id}/",
"description": f"Transfer OpenAlex data from Airflow telescope to {release.download_bucket}",
},
)
self.assertDictEqual(
mock_transfer.call_args_list[1][1],
{
"aws_bucket": OpenAlexTelescope.AWS_BUCKET,
"include_prefixes": [
f"data/authors/updated_date={run['manifest_date']}/0000_part_00.gz",
f"data/venues/updated_date={run['manifest_date']}/0000_part_00.gz",
],
"gc_project_id": self.project_id,
"gc_bucket": release.transform_bucket,
"gc_bucket_path": f"telescopes/{release.dag_id}/{release.release_id}/",
"description": f"Transfer OpenAlex data from Airflow telescope to {release.transform_bucket}",
},
)
# Upload files to bucket, to mock transfer
for entity, info in self.entities.items():
blob = f"telescopes/{release.dag_id}/{release.release_id}/data/{entity}/updated_date={run['manifest_date']}/0000_part_00.gz"
gzip_path = f"{entity}.jsonl.gz"
with open(info["download_path"], "rb") as f_in, gzip.open(gzip_path, "wb") as f_out:
f_out.writelines(f_in)
upload_file_to_cloud_storage(getattr(release, info["bucket"]), blob, gzip_path)
# Test that file was downloaded
env.run_task(telescope.download_transferred.__name__)
self.assertEqual(3, len(release.download_files))
for file in release.download_files:
entity = file.split("/")[-3]
self.assert_file_integrity(file, self.entities[entity]["download_hash"], "gzip_crc")
# Test that files transformed
env.run_task(telescope.transform.__name__)
self.assertEqual(3, len(release.transform_files))
# Sort lines so that gzip crc is always the same
for file in release.transform_files:
entity = file.split("/")[-3]
with gzip.open(file, "rb") as f_in:
lines = sorted(f_in.readlines())
with gzip.open(file, "wb") as f_out:
f_out.writelines(lines)
self.assert_file_integrity(file, self.entities[entity]["transform_hash"], "gzip_crc")
# Test that transformed files uploaded
env.run_task(telescope.upload_transformed.__name__)
for entity, info in self.entities.items():
if entity in ["concepts", "institutions", "works"]:
file = [file for file in release.transform_files if entity in file][0]
else:
file = f"{entity}.jsonl.gz"
blob = f"telescopes/{release.dag_id}/{release.release_id}/data/{entity}/updated_date={run['manifest_date']}/0000_part_00.gz"
self.assert_blob_integrity(env.transform_bucket, blob, file)
# Get bq load info for BQ tasks
bq_load_info = telescope.get_bq_load_info(release)
# Test that partition is loaded
ti = env.run_task(telescope.bq_load_partition.__name__)
for _, _, partition_table_id in bq_load_info:
table_id = f"{self.project_id}.{telescope.dataset_id}.{partition_table_id}"
expected_bytes = self.table_bytes[partition_table_id]
self.assert_table_bytes(table_id, expected_bytes)
# Test that partition is deleted from main table
with patch("observatory.platform.utils.gc_utils.bq_query_bytes_daily_limit_check"):
ti = env.run_task(telescope.bq_delete_old.__name__)
for _, main_table_id, _ in bq_load_info:
table_id = f"{self.project_id}.{telescope.dataset_id}.{main_table_id}"
expected_bytes = 0
self.assert_table_bytes(table_id, expected_bytes)
# Test append new creates table
env.run_task(telescope.bq_append_new.__name__)
for _, main_table_id, _ in bq_load_info:
table_id = f"{self.project_id}.{telescope.dataset_id}.{main_table_id}"
expected_bytes = self.table_bytes[main_table_id]
self.assert_table_bytes(table_id, expected_bytes)
# Test that all telescope data deleted
download_folder, extract_folder, transform_folder = (
release.download_folder,
release.extract_folder,
release.transform_folder,
)
env.run_task(telescope.cleanup.__name__)
self.assert_cleanup(download_folder, extract_folder, transform_folder)
@patch("academic_observatory_workflows.workflows.openalex_telescope.boto3.client")
@patch("academic_observatory_workflows.workflows.openalex_telescope.get_aws_conn_info")
@patch("academic_observatory_workflows.workflows.openalex_telescope.Variable.get")
def test_write_transfer_manifest(self, mock_variable_get, mock_aws_info, mock_boto3):
"""Test write_transfer_manifest method of the OpenAlex release.
:param mock_variable_get: Mock Airflow Variable get() method
:param mock_boto3: Mock the boto3 client
:return: None.
"""
mock_variable_get.return_value = "data"
mock_aws_info.return_value = "key_id", "secret_key"
# Mock response of get_object on last_modified file, mocking lambda file
side_effect = []
for tests in range(2):
for entity in self.entities:
manifest_content = render_template(self.manifest_obj_path, entity=entity, date="2022-01-01").encode()
side_effect.append({"Body": StreamingBody(io.BytesIO(manifest_content), len(manifest_content))})
mock_boto3().get_object.side_effect = side_effect
with CliRunner().isolated_filesystem():
# Test with entries in manifest objects that are after start date
start_date = pendulum.DateTime(2022, 1, 1, tzinfo=pendulum.tz.UTC)
end_date = pendulum.DateTime(2022, 2, 1, tzinfo=pendulum.tz.UTC)
release = OpenAlexRelease("dag_id", start_date, end_date, False, 1)
release.write_transfer_manifest()
self.assert_file_integrity(
release.transfer_manifest_path_download, "42fb45119bd34709001fd6c90a6ef60e", "md5"
),
self.assert_file_integrity(
release.transfer_manifest_path_transform, "fe8442cd31fec1c335379033afebc1ea", "md5"
)
# Test with entries in manifest objects that are before start date
start_date = pendulum.DateTime(2022, 3, 1, tzinfo=pendulum.tz.UTC)
end_date = pendulum.DateTime(2022, 4, 1, tzinfo=pendulum.tz.UTC)
release = OpenAlexRelease("dag_id", start_date, end_date, False, 1)
with self.assertRaises(AirflowSkipException):
release.write_transfer_manifest()
@patch("academic_observatory_workflows.workflows.openalex_telescope.aws_to_google_cloud_storage_transfer")
@patch("academic_observatory_workflows.workflows.openalex_telescope.get_aws_conn_info")
@patch("academic_observatory_workflows.workflows.openalex_telescope.Variable.get")
def test_transfer(self, mock_variable_get, mock_aws_info, mock_transfer):
"""Test transfer method of the OpenAlex release.
:param mock_variable_get: Mock Airflow Variable get() method
:param mock_aws_info: Mock getting AWS info
:param mock_transfer: Mock the transfer function called inside release.transfer()
:return: None.
"""
mock_variable_get.side_effect = lambda x: {
"download_bucket": "download-bucket",
"transform_bucket": "transform-bucket",
"project_id": "project_id",
"data_path": "data",
}[x]
mock_aws_info.return_value = "key_id", "secret_key"
mock_transfer.return_value = True, 3
with CliRunner().isolated_filesystem():
# Create release
start_date = pendulum.DateTime(2022, 1, 1)
end_date = pendulum.DateTime(2022, 2, 1)
release = OpenAlexRelease("dag_id", start_date, end_date, False, 1)
# Create transfer manifest files
with open(release.transfer_manifest_path_download, "w") as f:
f.write('"prefix1"\n"prefix2"\n')
with open(release.transfer_manifest_path_transform, "w") as f:
f.write("")
# Test succesful transfer with prefixes for download, no prefixes for transform
release.transfer(max_retries=1)
mock_transfer.assert_called_once_with(
"key_id",
"secret_key",
aws_bucket=OpenAlexTelescope.AWS_BUCKET,
include_prefixes=["prefix1", "prefix2"],
gc_project_id="project_id",
gc_bucket="download-bucket",
gc_bucket_path="telescopes/dag_id/2022_01_01-2022_02_01/",
description="Transfer OpenAlex data from Airflow telescope to download-bucket",
)
mock_transfer.reset_mock()
# Test failed transfer
mock_transfer.return_value = False, 4
with self.assertRaises(AirflowException):
release.transfer(1)
@patch("academic_observatory_workflows.workflows.openalex_telescope.wait_for_process")
@patch("academic_observatory_workflows.workflows.openalex_telescope.logging.info")
def test_run_subprocess_cmd(self, mock_logging, mock_wait_for_proc):
"""Test the run_subprocess_cmd function.
:return: None.
"""
# Mock logging
mock_wait_for_proc.return_value = ("out", "err")
# Set up parameters
args = ["run", "unittest"]
proc = Mock(spec=Popen)
# Test when return code is 0
proc.returncode = 0
run_subprocess_cmd(proc, args)
expected_logs = ["Executing bash command: run unittest", "out", "err", "Finished cmd successfully"]
self.assertListEqual([call(log) for log in expected_logs], mock_logging.call_args_list)
# Test when return code is 1
proc.returncode = 1
with self.assertRaises(AirflowException):
run_subprocess_cmd(proc, args)
@patch("academic_observatory_workflows.workflows.openalex_telescope.transform_object")
def test_transform_file(self, mock_transform_object):
"""Test the transform_file function.
:return: None.
"""
mock_transform_object.return_value = {}
with CliRunner().isolated_filesystem() as t:
transform_path = "transform/out.jsonl.gz"
# Create works entity file
works = {"works": "content"}
works_download_path = "works.jsonl.gz"
with gzip.open(works_download_path, "wt", encoding="ascii") as f_out:
json.dump(works, f_out)
# Create other entity file (concepts or institution)
concepts = {"concepts": "content"}
concepts_download_path = "concepts.jsonl.gz"
with gzip.open(concepts_download_path, "wt", encoding="ascii") as f_out:
json.dump(concepts, f_out)
# Test when dir of transform path does not exist yet, using 'works' entity'
self.assertFalse(os.path.isdir(os.path.dirname(transform_path)))
transform_file(works_download_path, transform_path)
mock_transform_object.assert_called_once_with(works, "abstract_inverted_index")
mock_transform_object.reset_mock()
os.remove(transform_path)
# Test when dir of transform path does exist, using 'works' entity
self.assertTrue(os.path.isdir(os.path.dirname(transform_path)))
transform_file(works_download_path, transform_path)
self.assert_file_integrity(transform_path, "682a6d42", "gzip_crc")
mock_transform_object.assert_called_once_with(works, "abstract_inverted_index")
mock_transform_object.reset_mock()
os.remove(transform_path)
# Test for "concepts" and "institution" entities
transform_file(concepts_download_path, transform_path)
self.assert_file_integrity(transform_path, "d8cafe16", "gzip_crc")
mock_transform_object.assert_called_once_with(concepts, "international")
def test_transform_object(self):
"""Test the transform_object function.
:return: None.
"""
# Test object with nested "international" fields
obj1 = {
"international": {
"display_name": {
"af": "Dokumentbestuurstelsel",
"fr": "type de logiciel",
"ro": "colecție organizată a documentelor",
}
}
}
transform_object(obj1, "international")
self.assertDictEqual(
{
"international": {
"display_name": {
"keys": ["af", "fr", "ro"],
"values": [
"Dokumentbestuurstelsel",
"type de logiciel",
"colecție organizată " "a documentelor",
],
}
}
},
obj1,
)
# Test object with nested "international" none
obj2 = {"international": {"display_name": None}}
transform_object(obj2, "international")
self.assertDictEqual({"international": {"display_name": None}}, obj2)
# Test object with nested "abstract_inverted_index" fields
obj3 = {
"abstract_inverted_index": {
"Malignant": [0],
"hyperthermia": [1],
"susceptibility": [2],
"(MHS)": [3],
"is": [4, 6],
"primarily": [5],
}
}
transform_object(obj3, "abstract_inverted_index")
self.assertDictEqual(
{
"abstract_inverted_index": {
"keys": ["Malignant", "hyperthermia", "susceptibility", "(MHS)", "is", "primarily"],
"values": ["0", "1", "2", "3", "4, 6", "5"],
}
},
obj3,
)
# Test object with nested "abstract_inverted_index" none
obj4 = {"abstract_inverted_index": None}
transform_object(obj4, "abstract_inverted_index")
self.assertDictEqual({"abstract_inverted_index": None}, obj4)
|
{"/academic_observatory_workflows/workflows/ror_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_geonames_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/geonames_telescope.py"], "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/tests/test_clearbit.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_oa_web_workflow.py": ["/academic_observatory_workflows/workflows/oa_web_workflow.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/tests/test_zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_open_citations_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/open_citations_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_openalex_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/openalex_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_fundref_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_fundref_telescope.py"], "/academic_observatory_workflows/workflows/pubmed_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_web_of_science_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_events_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_events_telescope.py"], "/academic_observatory_workflows/workflows/web_of_science_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_github.py": ["/academic_observatory_workflows/github.py"], "/academic_observatory_workflows/workflows/tests/test_pubmed_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/pubmed_telescope.py"], "/academic_observatory_workflows/workflows/geonames_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/open_citations_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_doi_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/model.py", "/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/tests/test_wikipedia.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/wikipedia.py"], "/docs/test_generate_csv.py": ["/docs/generate_schema_csv.py"], "/academic_observatory_workflows/workflows/scopus_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/doi_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_crossref_metadata_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/crossref_metadata_telescope.py"], "/academic_observatory_workflows/workflows/crossref_events_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/oa_web_workflow.py": ["/academic_observatory_workflows/clearbit.py", "/academic_observatory_workflows/config.py", "/academic_observatory_workflows/github.py", "/academic_observatory_workflows/wikipedia.py", "/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/workflows/tests/test_ror_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/ror_telescope.py"], "/academic_observatory_workflows/workflows/openalex_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/tests/test_zenodo.py": ["/academic_observatory_workflows/zenodo.py"], "/academic_observatory_workflows/model.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_scopus_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/scopus_telescope.py"], "/academic_observatory_workflows/dags/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py"], "/academic_observatory_workflows/workflows/tests/test_unpaywall_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/unpaywall_telescope.py"], "/academic_observatory_workflows/workflows/unpaywall_snapshot_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_elastic_import_workflow.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/dags/elastic_import_workflow.py"], "/academic_observatory_workflows/dags/doi_workflow.py": ["/academic_observatory_workflows/workflows/doi_workflow.py"], "/academic_observatory_workflows/workflows/orcid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/dags/mag_telescope.py": ["/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/workflows/grid_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_mag_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/mag_telescope.py"], "/academic_observatory_workflows/dags/web_of_science_telescope.py": ["/academic_observatory_workflows/workflows/web_of_science_telescope.py"], "/academic_observatory_workflows/dags/elastic_import_workflow.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_grid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/grid_telescope.py"], "/academic_observatory_workflows/workflows/mag_telescope.py": ["/academic_observatory_workflows/config.py"], "/academic_observatory_workflows/workflows/tests/test_orcid_telescope.py": ["/academic_observatory_workflows/config.py", "/academic_observatory_workflows/workflows/orcid_telescope.py"]}
|
28,289,505
|
carolinaesteves/INF1301-Modular
|
refs/heads/master
|
/dice.py
|
# Tabela de Versionamento do Módulo Dado
# Desenvolvedor do Módulo: Victor Fróes, Ana Carolina Esteves, João Pedro Botelho
#
# Tabela baseada no git log do repositório local do módulo
#
# Autor Victor Fróes,Ana Carolina Esteves, João Pedro Botelho no dia 25/09:
# cria rollDice e showDice
import pygame
import random
def rollDice():
dice = random.randrange(1,7)
return dice
def showDice(dice):
print(dice)
#carrega gráfico
|
{"/main.py": ["/menu.py"], "/menu.py": ["/Jogador.py"], "/testes.py": ["/dice.py"]}
|
28,289,506
|
carolinaesteves/INF1301-Modular
|
refs/heads/master
|
/menu.py
|
import pygame
from pygame import mixer
import Jogador
def executaMenu():
# inicializa a biblioteca pygame
pygame.init()
branco = (255, 255, 255)
verde = (0, 253, 0)
preto = (0, 0, 0)
AxL = (800, 600)
lY = -300
uY = -300
dY = -300
oY = -300
vol = 0.4
lisJogadores = []
# seta tela inicial
tela = pygame.display.set_mode(AxL)
# seta o nome do jogo na janela
pygame.display.set_caption("Ludo")
# carrega imagens
l = pygame.image.load('logo-l.png')
u = pygame.image.load('logo-u.png')
d = pygame.image.load('logo-d.png')
o = pygame.image.load('logo-o.png')
ays = pygame.image.load('w-ays.png')
eb = pygame.image.load('eb....png')
sImg = pygame.image.load('sound.png')
nsImg = pygame.image.load('no-sound.png')
# carrega musica de fundo
mixer.music.load('gold-saucer-8bit.wav')
mixer.music.set_volume(vol)
# define um botão genérico
class button():
def __init__(self, x, y, width, height, text=''):
self.x = x
self.y = y
self.width = width
self.height = height
self.text = text
# método que desenha o botão na tela
def draw(self, win):
Img = pygame.image.load(self.text)
win.blit(Img, (self.x, self.y))
# método que detecta se o mouse está posicionado sobre o botão
def isOver(self, pos):
if pos[0] > self.x and pos[0] < self.x + self.width:
if pos[1] > self.y and pos[1] < self.y + self.height:
return True
return False
def mov_l(l, x, y):
tela.blit(l, (x, y))
def willQuit():
active = True
while active:
tela.blit(ays, (240, 250))
botaoy.draw(tela)
botaon.draw(tela)
for event in pygame.event.get():
pos = pygame.mouse.get_pos()
# fecha o ojogo ao clicar no X
if event.type == pygame.QUIT:
pygame.quit()
exit()
if event.type == pygame.MOUSEBUTTONDOWN:
if botaoy.isOver(pos):
pygame.quit()
exit()
if botaon.isOver(pos):
active = False
tela.fill(preto)
show_logo()
if event.type == pygame.MOUSEMOTION:
if botaoy.isOver(pos):
botaoy.text = 'b-yes-m.png'
else:
botaoy.text = 'b-yes.png'
if botaon.isOver(pos):
botaon.text = 'b-no-m.png'
else:
botaon.text = 'b-no.png'
pygame.display.update()
def show_logo():
tela.blit(l, (110, lY))
tela.blit(u, (195, uY))
tela.blit(d, (270, dY))
tela.blit(o, (340, oY))
# criando os botoes
botaoP = button(330, 300, 130, 55, 'b-play.png')
botaoSc = button(330, 405, 130, 55, 'b-score.png')
botaoQ = button(330, 510, 130, 55, 'b-quit.png')
botaoy = button(260, 380, 130, 55, 'b-yes.png')
botaon = button(420, 380, 130, 55, 'b-no.png')
botaoSo = button(730, 540, 40, 40, 'sound.png')
# loops
intro = True
running = True
while intro:
Lisdown = False
Uisdown = False
Disdown = False
Oisdown = False
timer = pygame.time.get_ticks()
tela.fill(preto)
if lY < 40:
mov_l(l, 110, lY)
lY += 0.8
else:
Lisdown = True
if timer > 1300:
if uY < 40:
mov_l(u, 195, uY)
uY += 0.9
else:
Uisdown = True
if timer > 2100:
if dY < 50:
mov_l(d, 270, dY)
dY += 1.2
else:
Disdown = True
if timer > 3100:
if oY < 40:
mov_l(o, 340, oY)
oY += 1.4
else:
Oisdown = True
if Lisdown:
mov_l(l, 110, 40)
if Uisdown:
mov_l(u, 195, 40)
if Disdown:
mov_l(d, 270, 50)
if Oisdown:
mov_l(o, 340, 40)
for event in pygame.event.get():
# fecha o jogo ao clicar no X
if event.type == pygame.QUIT:
pygame.quit()
exit()
pygame.display.update()
if timer > 4500:
intro = False
mixer.music.play(-1)
soundOn = True
while running:
botaoP.draw(tela)
botaoSc.draw(tela)
botaoQ.draw(tela)
botaoSo.draw(tela)
for event in pygame.event.get():
pos = pygame.mouse.get_pos()
# fecha o jogo ao clicar no X
if event.type == pygame.QUIT:
pygame.quit()
exit()
if event.type == pygame.MOUSEBUTTONDOWN:
if botaoP.isOver(pos):
running = False
if botaoSc.isOver(pos):
tela.blit(eb, (475, 320))
if botaoQ.isOver(pos):
willQuit()
if botaoSo.isOver(pos):
if vol == 0.4:
vol = 0
mixer.music.set_volume(vol)
else:
vol = 0.4
mixer.music.set_volume(vol)
if event.type == pygame.MOUSEMOTION:
if botaoP.isOver(pos):
botaoP.text = 'b-play-m.png'
else:
botaoP.text = 'b-play.png'
if botaoSc.isOver(pos):
botaoSc.text = 'b-score-m.png'
else:
botaoSc.text = 'b-score.png'
if botaoQ.isOver(pos):
botaoQ.text = 'b-quit-m.png'
else:
botaoQ.text = 'b-quit.png'
pygame.display.update()
lisJogadores = Jogador.retorna_jogadores(tela)
return lisJogadores
|
{"/main.py": ["/menu.py"], "/menu.py": ["/Jogador.py"], "/testes.py": ["/dice.py"]}
|
28,289,507
|
carolinaesteves/INF1301-Modular
|
refs/heads/master
|
/Jogador.py
|
# Tabela de Versionamento do Módulo Jogador
# Desenvolvedor do Módulo: João Pedro Botelho
#
# Tabela baseada no git log do repositório local do módulo
#
# Autor João Pedro Botelho, no dia 01/10:
# cria versão inicial do input com tkinter
#
# Autor João Pedro Botelho, no dia 10/10:
# cria primeira versao com pygame
#
# Autor João Pedro Botelho, no dia 15/10:
# conserta select_number
#
# Autor João Pedro Botelho, no dia 17/10:
# finaliza input_name com interface grafica
import pygame
from pygame import font
def retorna_jogadores(screen):
pygame.init()
COLOR_INACTIVE = pygame.Color(26,35,126)
COLOR_ACTIVE = pygame.Color(255,17,17)
FONT = pygame.font.Font("8bit2.TTF", 26)
FONTnum = pygame.font.Font("8bit.TTF", 50)
FONTletras = pygame.font.Font("8bit2.TTF", 32)
branco = (255,255,255)
madeira=(165,128,100)
verde = (0, 255, 0)
trigo = (216,216,191)
def select_number():
done = False
screen.fill((0, 0, 0))
# Escrevendo na tela a pergunta do titulo
text = FONTletras.render('Qual o numero de jogadores?', True, verde)
screen.blit(text, (100, 100))
# Escrevendo na tela numero 2 e a sua borda para poder criar um botão
rect2 = pygame.Rect(250, 300, 40, 40)
text2 = '2'
txt_surface2 = FONTnum.render(text2, True, trigo)
screen.blit(txt_surface2, (260, 305))
pygame.draw.rect(screen, trigo, rect2, 2)
# Escrevendo na tela numero 3 e a sua borda para poder criar um botão
rect3 = pygame.Rect(350, 300, 40, 40)
text3 = '3'
txt_surface3 = FONTnum.render(text3, True, trigo)
screen.blit(txt_surface3, (360, 305))
pygame.draw.rect(screen, trigo, rect3, 2)
# Escrevendo na tela numero 4 e a sua borda para poder criar um botão
rect4 = pygame.Rect(450, 300, 40, 40)
text4 = '4'
txt_surface4 = FONTnum.render(text4, True, trigo)
screen.blit(txt_surface4, (460, 305))
pygame.draw.rect(screen, trigo, rect4, 2)
pygame.display.flip()
# Parte do código de definições de eventos, caso clique dentro de alguma borda, funcinando como um botão, ou caso feche o jogo
while not done:
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
exit()
if event.type == pygame.MOUSEBUTTONDOWN:
if rect2.collidepoint(event.pos):
return int(text2)
elif rect3.collidepoint(event.pos):
return int(text3)
elif rect4.collidepoint(event.pos):
return int(text4)
# Classe utilizada para inputbox de nome dos jogadores
class InputBox:
# Inicialização gido InputBox
def __init__(self, x, y, w, h, text=''):
self.rect = pygame.Rect(x, y, w, h)
self.color = COLOR_INACTIVE
self.text = text
self.txt_surface = FONT.render(text, True, self.color)
self.active = False
# Eventos do InputBox
def eventobox(self, event):
if event.type == pygame.MOUSEBUTTONDOWN:
# Caso Clique no InputBox, o mesmo é ativado
if self.rect.collidepoint(event.pos):
self.active = not self.active
else:
self.active = False
# Mudança de cor quando está ativo ou inativo
self.color = COLOR_ACTIVE if self.active else COLOR_INACTIVE
if event.type == pygame.KEYDOWN:
if self.active:
if event.key == pygame.K_BACKSPACE:
self.text = self.text[:-1]
else:
self.text += event.unicode
# Entrada de texto
self.txt_surface = FONT.render(self.text, True, branco)
def update(self):
# Aumenta tamanho do inputbox, caso nome não caiba
width = max(300, self.txt_surface.get_width()+10)
self.rect.w = width
def draw(self, screen):
# Desenha na tela o texto
screen.blit(self.txt_surface, (self.rect.x+5, self.rect.y+5))
# Desenha na tela a borda
pygame.draw.rect(screen, self.color, self.rect, 2)
# Limpa o inputbox, apos submit
def limpar(self):
self.text = ''
self.txt_surface = FONT.render(self.text, True, self.color)
self.active = False
def input_name():
# Chamando a função para saber quantos jogadores são
quant = select_number()
#Declarando inputbox
input_box1 = InputBox(100, 150, 300, 35)
input_boxes = [input_box1]
# Tela preta
screen.fill((0, 0, 0))
# Definindo, desenhando o botao Submit
rect1 = pygame.Rect(100, 250, 140, 35)
text1 = 'Submit'
txt_surface1 = FONTletras.render(text1, True, madeira)
screen.blit(txt_surface1, (107, 255))
pygame.draw.rect(screen, COLOR_INACTIVE, rect1, 2)
# Definindo a lista que vai retorna a quant de players e os nomes, e já introduzindo o numero de quantidade de player na lista
lista = [quant]
# Contador
i = 1
# While para repetir o código na quantidade de player necessário, assim podendo registrar o nome de todos os jogadores.
while quant > 0:
# Iniciando limpando inputbox e a tela
for box in input_boxes:
box.limpar()
screen.fill((0, 0, 0))
# Definindo titulo pedindo nome e alterando o número do jogador conforme o contador
text2 = "Digite o nome do jogador numero"
txt_surface2 = FONTletras.render(text2, True, branco)
text3 = "{jogador}".format(jogador=i)
txt_surface3 = FONTnum.render(text3, True, branco)
done = False
# While Comandando o inputbox e o botão submit, usuario digita o nome aqui, e aperta botao submit
while not done:
for event in pygame.event.get():
if event.type == pygame.QUIT:
done = True
quant = 0
if event.type == pygame.MOUSEBUTTONDOWN:
if rect1.collidepoint(event.pos):
for box in input_boxes:
if box.text != '':
done = True
for box in input_boxes:
box.eventobox(event)
for box in input_boxes:
box.update()
screen.fill((0, 0, 0))
screen.blit(txt_surface1, (107, 255))
pygame.draw.rect(screen, COLOR_INACTIVE, rect1, 2)
txt_surface2 = FONTletras.render(text2, True, branco)
screen.blit(txt_surface2, (50, 50))
screen.blit(txt_surface3, (720, 50))
for box in input_boxes:
box.draw(screen)
pygame.display.flip()
# Após apertar botão submit, salvamos o nome na nossa lista
for box in input_boxes:
lista.append(box.text)
# Contador de Jogadores do While para saber quantos nomes vai ser submetido
quant -= 1
# Contador utilizado no título
i += 1
# Retorna nossa lista pronta com o número de players, e seus respectivos nomes.
return lista
return input_name()
|
{"/main.py": ["/menu.py"], "/menu.py": ["/Jogador.py"], "/testes.py": ["/dice.py"]}
|
28,289,508
|
carolinaesteves/INF1301-Modular
|
refs/heads/master
|
/testes.py
|
# Tabela de Versionamento do Módulo Testes
# Desenvolvedor do Módulo: Victor Fróes, Ana Carolina Esteves, João Pedro Botelho
#
# Tabela baseada no git log do repositório local do módulo
#
# Autor Victor Fróes,Ana Carolina Esteves, João Pedro Botelho no dia 15/10:
# desenvolve testes
#
import unittest
from unittest import mock
import dice
class testa_num_jogadores(unittest.TestCase):
def define_teste(self):
m = mock.Mock()
assert isinstance(m.campo,mock.Mock)
assert isinstance(m(),mock.Mock)
def teste_atribui(self):
m = mock.Mock()
m.num = 2
self.assertEqual(m.num,2)
def teste_jogador(self):
m = mock.Mock()
m.retornaJogador.return_value = 2
self.assertEqual(m.retornaJogador(),2)
class test_dado(unittest.TestCase):
def teste_roda_dado_ok(self):
teste = dice.rollDice()
print('Caso de Teste - Condicao de retorno 0 ao lancar dado')
self.assertTrue(1 <= teste <= 6)
unittest.main()
|
{"/main.py": ["/menu.py"], "/menu.py": ["/Jogador.py"], "/testes.py": ["/dice.py"]}
|
28,289,509
|
carolinaesteves/INF1301-Modular
|
refs/heads/master
|
/main.py
|
# Tabela de Versionamento do Módulo Main
# Desenvolvedor do Módulo: Ana Carolina Esteves
#
# Tabela baseada no git log do repositório local do módulo
#
# Autor Ana Carolina Esteves, no dia 17/10:
# integra main com menu
#
# Autor Ana Carolina Esteves, no dia 17/10:
# integra main com menu e jogador
#
# Autor Ana Carolina Esteves, no dia 19/10:
# Execucao unica atraves da main
import pygame
import menu
def startGame():
#vai para o módulo Partida, onde o jogo começa
return
pygame.init()
lis = menu.executaMenu()
|
{"/main.py": ["/menu.py"], "/menu.py": ["/Jogador.py"], "/testes.py": ["/dice.py"]}
|
28,467,087
|
sudo-install-MW/vgg16
|
refs/heads/master
|
/basic_graphs/z_equals_wx+b.py
|
import tensorflow as tf
# create graph
g = tf.Graph()
# set g as default graph and construct the graph
with g.as_default():
x = tf.placeholder(dtype=tf.float32, shape=None, name='x')
w = tf.Variable(2.0, name='weight')
b = tf.Variable(1.0, name='bias')
z = w*x + b
init = tf.global_variables_initializer()
# execute the graph session for graph g
with tf.Session(graph=g) as sess:
sess.run(init)
for t in [1, 2, 3, 4, 5]:
print(sess.run(z, feed_dict={x: t}))
|
{"/model/training.py": ["/model/model_fn.py", "/model/input_fn.py", "/model/evaluation.py"], "/tensorflow_softmax.py": ["/utils.py"], "/train_model.py": ["/utils.py", "/network.py"]}
|
28,467,088
|
sudo-install-MW/vgg16
|
refs/heads/master
|
/simple_models/linear_regression.py
|
import numpy as np
import tensorflow as tf
epoch=100000
x_data = np.random.randn(2000, 3)
w_real = [0.3, 0.5, 0.1]
b_real = -0.2
noise = np.random.randn(1, 2000) * 0.1
y_data = np.matmul(w_real, x_data.T) + b_real + noise
# step 1 Prep inputs
x_train = tf.placeholder(tf.float32, shape=[None, 3])
w_train = tf.Variable(dtype=tf.float32, initial_value=[[3,1,2]])
b_train = tf.Variable(0, dtype=tf.float32)
# step 2 Model
# Linear regression
y_train = tf.matmul(x_train, tf.transpose(w_train)) + b_train
y_train = tf.transpose(y_train)
# step 3 Loss function
loss = tf.losses.mean_squared_error(y_data, y_train)
# step 4 Optimization
optimizer = tf.train.GradientDescentOptimizer(learning_rate = .0001)
train = optimizer.minimize(loss)
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
for i in range(epoch):
y_out = sess.run(train, feed_dict={x_train:x_data})
if epoch % 1 == 0:
print(sess.run([w_train, b_train]))
|
{"/model/training.py": ["/model/model_fn.py", "/model/input_fn.py", "/model/evaluation.py"], "/tensorflow_softmax.py": ["/utils.py"], "/train_model.py": ["/utils.py", "/network.py"]}
|
28,467,089
|
sudo-install-MW/vgg16
|
refs/heads/master
|
/basic_graphs/practice_graph_a.py
|
import tensorflow as tf
b = tf.constant(4)
a = tf.constant(5)
d = tf.add(a, b)
c = tf.multiply(a, b)
f = tf.add(c, d)
e = tf.subtract(c, d)
g = tf.divide(f, e)
sess = tf.Session()
print(sess.run(g))
|
{"/model/training.py": ["/model/model_fn.py", "/model/input_fn.py", "/model/evaluation.py"], "/tensorflow_softmax.py": ["/utils.py"], "/train_model.py": ["/utils.py", "/network.py"]}
|
28,467,090
|
sudo-install-MW/vgg16
|
refs/heads/master
|
/network.py
|
from keras.layers import Dense
from keras.models import Sequential
# script to hold the CNN network
class network():
def __init__(self):
pass
def fc_network(self, input_img, input_label, test_img, test_label):
input_shape = input_img.shape[1]
print("Input shape is", input_shape)
# as first layer in a sequential model:
model = Sequential()
# layer 1
model.add(Dense(784, input_dim=input_shape, activation='relu'))
# layer 2
model.add(Dense(784, activation='relu'))
# layer 3
model.add(Dense(10, activation='relu'))
model.compile(loss='categorical_crossentropy',
optimizer='adam',
metrics=['accuracy'])
model.fit(input_img, input_label, batch_size=4, nb_epoch=1, verbose=1)
scores = model.evaluate(test_img, test_label)
print("model accuracy in test set is :", scores)
|
{"/model/training.py": ["/model/model_fn.py", "/model/input_fn.py", "/model/evaluation.py"], "/tensorflow_softmax.py": ["/utils.py"], "/train_model.py": ["/utils.py", "/network.py"]}
|
28,467,091
|
sudo-install-MW/vgg16
|
refs/heads/master
|
/utils.py
|
# script for preprocessing and fetching image data
from tensorflow.examples.tutorials.mnist import input_data
class MNIST():
def __init__(self):
pass
def train_set(self):
mnist = input_data.read_data_sets("MNIST_data/", one_hot=True)
#print("Type of the train images ",type(mnist.train.images))
#print("Dimention of the train images", mnist.train.images.shape)
return mnist.train.images, mnist.train.labels
def test_set(self):
mnist = input_data.read_data_sets("MNIST_data/", one_hot=True)
return mnist.test.images, mnist.test.labels
|
{"/model/training.py": ["/model/model_fn.py", "/model/input_fn.py", "/model/evaluation.py"], "/tensorflow_softmax.py": ["/utils.py"], "/train_model.py": ["/utils.py", "/network.py"]}
|
28,467,092
|
sudo-install-MW/vgg16
|
refs/heads/master
|
/basic_graphs/practice_graph_b.py
|
import tensorflow as tf
b = tf.constant(2)
a = tf.constant(90)
c = tf.multiply(b, a)
c = tf.cast(c, dtype=tf.float32)
d = tf.sin(c)
d = tf.cast(d, dtype=tf.int32)
e = tf.div(d, b)
sess = tf.Session()
print(sess.run(e))
|
{"/model/training.py": ["/model/model_fn.py", "/model/input_fn.py", "/model/evaluation.py"], "/tensorflow_softmax.py": ["/utils.py"], "/train_model.py": ["/utils.py", "/network.py"]}
|
28,467,093
|
sudo-install-MW/vgg16
|
refs/heads/master
|
/tensorflow_softmax.py
|
import tensorflow as tf
from utils import MNIST
data_dir = '/MNIST_data'
num_steps = 1000
mini_batch = 100
mnist = MNIST()
X_train, X_label = mnist.train_set()
X_test, X_test_label = mnist.test_set()
g = tf.Graph()
with g.as_default():
with tf.name_scope(name="Inputs") as scope:
x = tf.placeholder(tf.float32, [None, 784])
W = tf.Variable(tf.zeros([784, 10]))
y_true = tf.placeholder(tf.float32, [None, 10], name="output_layer")
with tf.name_scope(name="Training") as scope:
y_pred = tf.matmul(x, W, name="output")
with tf.name_scope(name="Loss_function") as scope:
cross_entropy = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=y_pred, labels=y_true))
with tf.name_scope(name="optimizer") as scope:
gd_step = tf.train.GradientDescentOptimizer(0.5).minimize(cross_entropy)
with tf.name_scope(name="Inference") as scope:
correct_mask = tf.equal(tf.argmax(y_pred, 1), tf.argmax(y_true, 1))
accuracy = tf.reduce_mean(tf.cast(correct_mask, tf.float32))
init = tf.global_variables_initializer()
with tf.Session() as sess:
sess.run(init)
writer = tf.summary.FileWriter('./logs', sess.graph)
for i in range(num_steps):
#batch_xs, batch_ys = data.train.next_batch(mini_batch)
sess.run(gd_step, feed_dict={x:X_train, y_true: X_label})
print("training batch {}".format(i))
accuracy = sess.run(accuracy, feed_dict={x:X_test, y_true: X_test_label})
print("Accuracy of the model is :", accuracy)
|
{"/model/training.py": ["/model/model_fn.py", "/model/input_fn.py", "/model/evaluation.py"], "/tensorflow_softmax.py": ["/utils.py"], "/train_model.py": ["/utils.py", "/network.py"]}
|
28,467,094
|
sudo-install-MW/vgg16
|
refs/heads/master
|
/train_model.py
|
# script to train model
from utils import MNIST
from network import network
#score = model.evaluate(X_test, Y_test, verbose=0)
def train():
mnist = MNIST()
X_train, X_label = mnist.train_set()
Y_test, Y_label = mnist.test_set()
train_network = network()
train_network.fc_network(X_train, X_label, Y_test, Y_label)
train()
|
{"/model/training.py": ["/model/model_fn.py", "/model/input_fn.py", "/model/evaluation.py"], "/tensorflow_softmax.py": ["/utils.py"], "/train_model.py": ["/utils.py", "/network.py"]}
|
28,467,095
|
sudo-install-MW/vgg16
|
refs/heads/master
|
/basic_graphs/basic_graph.py
|
import tensorflow as tf
a = tf.constant(5)
b = tf.constant(2)
c = tf.constant(3)
e = tf.add(c, b)
d = tf.multiply(a, b)
f = tf.subtract(d, e)
sess = tf.Session()
print(sess.run(f))
sess.close()
|
{"/model/training.py": ["/model/model_fn.py", "/model/input_fn.py", "/model/evaluation.py"], "/tensorflow_softmax.py": ["/utils.py"], "/train_model.py": ["/utils.py", "/network.py"]}
|
28,588,579
|
nikrus333/guard_camera_anapa
|
refs/heads/main
|
/api_cam.py
|
from PIL import Image, ImageTk
import tkinter as tk
import cv2
import test
import algoritm
import create_json
import map
class Application():
def __init__(self):
""" Initialize application which uses OpenCV + Tkinter. It displays
a video stream in a Tkinter window and stores current snapshot on disk """
##self.cam = test.Cam()
self.algoritm = algoritm.CoordAlgoritm()
self.create_file = create_json.CreteJsonFile()
##self.vs = self.cam.cap # capture video frames, 0 is your default video camera
self.vs = cv2.VideoCapture(0)
self.current_image = None # current image from the camera
self.root = tk.Tk() # initialize root window
self.root.title("dron.exe") # set window title
# self.destructor function gets fired when the window is closed
self.root.protocol('WM_DELETE_WINDOW', self.destructor)
self.root.geometry('1920x1080')
self.panel = tk.Label(self.root) # initialize image panel
self.panel.pack(padx=10, pady=10)
self.panel.bind('<Motion>', self.motion)
name_label = tk.Label(text="Введите имя:")
#name_label.grid(row=0, column=0, sticky="w")
btn = tk.Button(self.root, text="Start dron", command=self.take_snapshot)
self.panel.bind("<Button-2>",self.fun)
btn.pack(fill="both", expand=True, padx=10, pady=10)
self.coord_actual = [None, None]
self.cord_final_mouse = [None, None]
self.video_loop()
def video_loop(self):
""" Get frame from the video stream and show it in Tkinter """
ok, frame = self.vs.read() # read frame from video stream
height = 1600
length = 900
if ok: # frame captured without any errors
frame = cv2.resize(frame, (height, length))
frame = cv2.circle(frame,(height // 2, length // 2), 5, (255, 0, 0), 2, 8, 0)
cv2image = cv2.cvtColor(frame, cv2.COLOR_BGR2RGBA) # convert colors from BGR to RGBA
self.current_image = Image.fromarray(cv2image) # convert image for PIL
imgtk = ImageTk.PhotoImage(image=self.current_image) # convert image for tkinter
self.panel.imgtk = imgtk # anchor imgtk so it does not be deleted by garbage-collector
self.panel.config(image=imgtk) # show the image
self.root.after(4, self.video_loop)
#print(self.coord) # call the same function after 30 milliseconds
def motion(self, event):
self.coord_actual[0], self.coord_actual[1] = event.x, event.y
#print('{}, {}'.format(self.coord_actual[0], self.coord_actual[1]))
def take_snapshot(self):
print('_____________')
#print(self.cam.read_data())
print('_____________')
##_ = self.algoritm.solution(self.cam.read_data(), self.cord_final_mouse) # method solution_mouse or solution
_ = None
self.map = Image.open("1.PNG")
self.maps = ImageTk.PhotoImage(self.map)
label1 = tk.Label(image = self.maps)
label1.image_names = 'dzen'
label1.place(x = 1450, y = 550)
print(_, "solutiom")
self.create_file.create(_)
def fun(self, event):
self.cord_final_mouse = self.coord_actual
# def coord(self, coorde = 0):
# print(coorde)
# self.root.after(2000, self.coord)
def destructor(self):
""" Destroy the root object and release all resources """
print("[INFO] closing...")
self.root.destroy()
self.vs.release() # release web camera
cv2.destroyAllWindows() # it is not mandatory in this application
# construct the argument parse and parse the arguments
# start the app
print("[INFO] starting...")
pba = Application()
pba.root.mainloop()
|
{"/api_cam.py": ["/settings.py", "/algoritm.py", "/map.py", "/server_data.py", "/test.py", "/create_json.py"]}
|
28,600,145
|
evan176/rl
|
refs/heads/master
|
/rl/mlp.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import tensorflow as tf
from .utils import summarize_variable
def weight_variable(shape, name=None):
"""
Create weight variable with "tf.Variable"
Args:
shape (list): shape of weight
name (str): name of variable
Returns:
tf.Variable: weight variable
Usage:
>>> weight_variable([100, 100])
"""
return tf.Variable(tf.truncated_normal(shape, stddev=0.001), name=name)
def bias_variable(shape, name=None):
"""
Create bias variable with "tf.Variable"
Args:
shape (list): shape of bias
name (str): name of variable
Returns:
tf.Variable: bias variable
Usage:
>>> bias_variable([100])
"""
return tf.Variable(tf.constant(0.1, shape=shape), name=name)
def normalize_weight(w, dim, name=None, epsilon=1e-12):
"""
Create weight normalization op
Args:
w (tf.Variable): weight variable
b (tf.Variable): bias variable
dim (int): dimension for reduce_sum
Returns:
norm_w (tf.Tensor): normalized weight
norm_b (tf.Tensor): normalized bias
Usage:
>>> norm_w = normalize_weight(w, 0)
"""
with tf.name_scope(name) as scope:
square_sum = tf.reduce_sum(tf.square(w), dim, keep_dims=True)
inv_norm = tf.rsqrt(tf.maximum(square_sum, epsilon))
norm_w = tf.multiply(w, inv_norm)
return norm_w
def LeakyReLU(x, alpha, name=None):
with tf.name_scope(name) as scope:
return tf.maximum(alpha * x, x, name=name)
def multilayer_perceptron(dimensions, alpha=1e-3):
"""
Create multilayer perceptron
Args:
dimensions (list): dimensions of each layer, including
input & final layer
alpha (float): slope of LeakyReLU (default: 1e-3)
Returns:
network (tf.Tensor): network operation, output shape is same as last element in dimensions
input_x (tf.Tensor): input placeholder for network
variables (dict): a dictionary contains all weight and bias variables
Usage:
# Create multilayer perceptron with 2 hidden layers (20, 20)
>>> network, input_x, variables = multilayer_perceptron([30, 20, 20 , 1])
>>> print(network)
Tensor("neuron_2:0", shape=(?, 1), dtype=float32)
>>> print(input_x)
Tensor("input_x:0", shape=(?, 30), dtype=float32)
>>> print(variables)
{'b_2': <tensorflow...>, 'b_1': <tensorflow...>, 'w_1': <tensorflow...>,
'w_2': <tensorflow...>, 'b_0': <tensorflow...>, 'w_0': <tensorflow...>}
"""
variables = {}
input_x = tf.placeholder(tf.float32, [None, dimensions[0]], name="input_x")
x = input_x
for i in range(len(dimensions) - 1):
w_name = "w_{}".format(i)
b_name = "b_{}".format(i)
y_name = "y_{}".format(i)
act_y_name = "activate_{}".format(y_name)
w = weight_variable([dimensions[i], dimensions[i + 1]], name=w_name)
b = bias_variable([dimensions[i + 1]], name=b_name)
variables[w_name] = w
variables[b_name] = b
summarize_variable(w, w_name)
summarize_variable(b, b_name)
norm_w = normalize_weight(w, 0, "norm_{}".format(i))
with tf.name_scope(y_name) as scope:
y = tf.add(tf.matmul(x, norm_w), b, name=y_name)
x = LeakyReLU(y, alpha, name=act_y_name)
network = x
return network, input_x, variables
|
{"/tests/test_pool.py": ["/rl/pool.py"], "/rl/cnn.py": ["/rl/mlp.py", "/rl/utils.py"], "/rl/__init__.py": ["/rl/pool.py", "/rl/mlp.py", "/rl/cnn.py", "/rl/qnetwork.py"], "/rl/policy.py": ["/rl/agent.py", "/rl/mlp.py", "/rl/cnn.py"], "/tests/test_cnn.py": ["/rl/cnn.py"], "/tests/test_qnetwork.py": ["/rl/qnetwork.py"], "/rl/qnetwork.py": ["/rl/agent.py", "/rl/mlp.py", "/rl/cnn.py", "/rl/utils.py"], "/tests/test_mlp.py": ["/rl/mlp.py"], "/rl/mlp.py": ["/rl/utils.py"]}
|
28,600,146
|
evan176/rl
|
refs/heads/master
|
/tests/test_cnn.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from copy import copy
from types import GeneratorType
from unittest import TestCase
try:
from unittest import mock
except:
import mock
import numpy
import tensorflow as tf
from rl.cnn import conv_net
class CNNTest(TestCase):
def test_2d(self):
# Init test
channels = [3, 20, 30, 40]
filters = [[5, 5], [4, 4], [3, 3]]
poolings = [[4, 4], [3, 3], [2, 2]]
width = 1000
height = 1000
net, input_x, variables = conv_net(
channels, filters, poolings, width, height
)
# Check type and dimension of net
self.assertIsInstance(net, tf.Tensor)
self.assertEqual([x.value for x in net.get_shape().dims], [None, 1024])
# Check type and dimension of input_x
self.assertIsInstance(input_x, tf.Tensor)
dims = [x.value for x in input_x.get_shape().dims]
expected = [None, height, width, 3]
self.assertEqual(dims, expected)
# Check dimension of each variables
for i in range(len(filters)):
w = variables['conv_w_{}'.format(i)]
b = variables['conv_b_{}'.format(i)]
expected = [
filters[i][0], filters[i][1], channels[i], channels[i + 1]
]
self.assertEqual(w.get_shape(), expected)
self.assertEqual(b.get_shape(), [channels[i + 1]])
def test_3d(self):
channels = [10, 20, 30, 40]
filters = [[1, 5, 5], [1, 4, 4], [1, 3, 3]]
poolings = [[1, 4, 4], [1, 3, 3], [1, 2, 2]]
width = 1000
height = 1000
depth = 20
net, input_x, variables = conv_net(
channels, filters, poolings, width, height, depth
)
# Check type and dimension of net
self.assertIsInstance(net, tf.Tensor)
self.assertEqual([x.value for x in net.get_shape().dims], [None, 1024])
# Check type and dimension of input_x
self.assertIsInstance(input_x, tf.Tensor)
dims = [x.value for x in input_x.get_shape().dims]
expected = [None, depth, height, width, 10]
self.assertEqual(dims, expected)
# Check dimension of each variables
for i in range(len(filters)):
w = variables['conv_w_{}'.format(i)]
b = variables['conv_b_{}'.format(i)]
expected = [
filters[i][0], filters[i][1], filters[i][2],
channels[i], channels[i + 1]
]
self.assertEqual(w.get_shape(), expected)
self.assertEqual(b.get_shape(), [channels[i + 1]])
|
{"/tests/test_pool.py": ["/rl/pool.py"], "/rl/cnn.py": ["/rl/mlp.py", "/rl/utils.py"], "/rl/__init__.py": ["/rl/pool.py", "/rl/mlp.py", "/rl/cnn.py", "/rl/qnetwork.py"], "/rl/policy.py": ["/rl/agent.py", "/rl/mlp.py", "/rl/cnn.py"], "/tests/test_cnn.py": ["/rl/cnn.py"], "/tests/test_qnetwork.py": ["/rl/qnetwork.py"], "/rl/qnetwork.py": ["/rl/agent.py", "/rl/mlp.py", "/rl/cnn.py", "/rl/utils.py"], "/tests/test_mlp.py": ["/rl/mlp.py"], "/rl/mlp.py": ["/rl/utils.py"]}
|
28,600,147
|
evan176/rl
|
refs/heads/master
|
/tests/test_mlp.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from copy import copy
from types import GeneratorType
from unittest import TestCase
try:
from unittest import mock
except:
import mock
import numpy
import tensorflow as tf
from rl.mlp import weight_variable, bias_variable, multilayer_perceptron
class MLPTest(TestCase):
def setUp(self):
pass
def tearDown(self):
pass
@mock.patch("tensorflow.truncated_normal")
def test_weight(self, mock_truncated_normal):
mock_truncated_normal.side_effect = lambda x, **kargs: numpy.ones(x)
w = weight_variable([3, 3])
self.assertIsInstance(w, tf.Variable)
self.assertEqual(w.get_shape(), [3, 3])
@mock.patch("tensorflow.constant")
def test_bias(self, mock_constant):
mock_constant.side_effect = lambda x, **kargs: numpy.ones(kargs['shape'])
b = bias_variable([3, 3])
self.assertIsInstance(b, tf.Variable)
self.assertEqual(b.get_shape(), [3, 3])
def test_mlp(self):
test_dimensions = [5, 5, 5, 2]
net, input_x, variables = multilayer_perceptron(test_dimensions)
self.assertIsInstance(net, tf.Tensor)
self.assertEqual([x.value for x in net.get_shape().dims], [None, test_dimensions[-1]])
self.assertIsInstance(input_x, tf.Tensor)
self.assertEqual([x.value for x in input_x.get_shape().dims], [None, test_dimensions[0]])
for i in range(len(test_dimensions) - 1):
w = variables['w_{}'.format(i)]
b = variables['b_{}'.format(i)]
self.assertEqual(w.get_shape(), [test_dimensions[i], test_dimensions[i + 1]])
self.assertEqual(b.get_shape(), [test_dimensions[i + 1]])
|
{"/tests/test_pool.py": ["/rl/pool.py"], "/rl/cnn.py": ["/rl/mlp.py", "/rl/utils.py"], "/rl/__init__.py": ["/rl/pool.py", "/rl/mlp.py", "/rl/cnn.py", "/rl/qnetwork.py"], "/rl/policy.py": ["/rl/agent.py", "/rl/mlp.py", "/rl/cnn.py"], "/tests/test_cnn.py": ["/rl/cnn.py"], "/tests/test_qnetwork.py": ["/rl/qnetwork.py"], "/rl/qnetwork.py": ["/rl/agent.py", "/rl/mlp.py", "/rl/cnn.py", "/rl/utils.py"], "/tests/test_mlp.py": ["/rl/mlp.py"], "/rl/mlp.py": ["/rl/utils.py"]}
|
28,600,148
|
evan176/rl
|
refs/heads/master
|
/rl/pool.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from abc import ABCMeta, abstractmethod
from bson.binary import Binary
import math
import pickle
import numpy
import six
@six.add_metaclass(ABCMeta)
class PoolInterface():
"""
Pool interface defintion
"""
@abstractmethod
def add(self, state, action, reward, next_state, done,
next_actions=None, priority=1):
"""
Pool is used to store experience data, like:
(state, action, reward, next_state). It must contains 4 things :
state: state of environment
action: executed action with given state
reward: feedback of action with given state
next_state: next state of environment after executing action
These 2 things is optional:
next_actions: available actions of next state
priority: priority (loss) of this record
"""
pass
@abstractmethod
def remove(self, record_id):
"""
Remove record from pool with record_id
"""
pass
@abstractmethod
def sample(self, size):
"""
Sample records from pool with given size.
"""
pass
@abstractmethod
def update(self, priorities):
"""
Update each records' priority
"""
pass
@abstractmethod
def size(self):
"""
Get size of current pool
"""
pass
@abstractmethod
def amount(self):
"""
Get number of records in pool
"""
pass
@abstractmethod
def all(self):
"""
Get all experiences of pool by generator
"""
pass
class MemoryPool(PoolInterface):
"""
MemoryPool uses `dict` to store experience data. Data can be sampled
after it add to pool. Sample method is biased random sampling with
priority.
Args:
pool_size (int): sepcify size of memory pool. `0` for unlimited
(default: 0)
Returns:
MemoryPool object
Examples:
# Init pool
>>> mpool = MemoryPool(3000)
# Add data to pool
>>> mpool.add(
state=[1, 2, 3], action=3, reward=100, next_state=[4, 5, 6]
)
# Sample data for training
>>> records = mpool.sample(30)
>>> print(len(records))
30
# Update priority of data
>>> priorities = [(key1, 10), (key2, 0), (key3, 9), ...]
>>> mpool.update(priorities)
"""
def __init__(self, pool_size=0):
if isinstance(pool_size, int):
if pool_size < 0:
raise TypeError("Pool size should be positive integer")
self._size = pool_size
self._experiences = {}
self._q_front = 0
def add(self, state, action, reward, next_state, done,
next_actions=None, priority=1, info=None):
"""
Add new data to experience pool.
Args:
state: any type as long as it can describe the state of environment
action: any type as long as it can represent executed action with
above state
reward: also free type for action feedback
next_state: Like state but it is for describing next state
next_actions: For next state's actions (Default: None),
priority: It specify the priority of data (Default: 0)
Returns:
amount: record number in pool
Examples:
>>> mpool.add(
state=[0, 0, 1], action=1,
reward=100, next_state=[1, 0, 0]
)
>>> mpool.add(
state={'a': 1, 'b': 0}, action=3,
reward=-1, next_state={'a': -1, 'b': 1},
next_actions=[3, 1, 0], priority=3.5
)
"""
if numpy.isnan(priority):
priority = 1e-3
elif priority < 1e-3:
priority = 1e-3
elif priority > 1e+3:
priority = 1e+3
if self._q_front > six.MAXSIZE:
self._q_front = 0
while self._q_front in self._experiences:
self._q_front += 1
self._experiences[self._q_front] = {
'state': state,
'action': action,
'reward': reward,
'next_state': next_state,
'next_actions': next_actions,
'done': done,
'priority': priority,
'info': info,
}
if self.amount() > self.size() > 0:
min_p = 1e+9
min_key = 0
for key, record in self._experiences.items():
if record['priority'] < min_p:
min_p = record['priority']
min_key = key
self.remove(min_key)
return self.amount()
def remove(self, key):
"""
Remove record from pool with key.
Args:
key: the key of record in dictionary
Returns:
None
Examples:
# Remove 100th data in pool
>>> pool.remove(100)
"""
return self._experiences.pop(key)
def sample(self, size):
"""
Sample records from pool with given size.
Args:
size (int): sampling size
Returns:
samples (list):
[
(index, {'state': ..., 'action': ..., 'reward': ...,
'next_state': ..., 'next_actions': ..., 'priority': ...,}),
(...),
]
Examples:
# Biased random sampling 100 records
>>> pool.sample(100)
"""
dist = []
keys = []
for k, record in self._experiences.items():
dist.append(record['priority'])
keys.append(k)
sum_d = float(sum(dist))
prob = [item / sum_d for item in dist]
if size > 0:
if size > self.amount():
size = self.amount()
keys = numpy.random.choice(
keys, size=size, p=prob, replace=False
)
else:
keys = []
for k in keys:
yield k, self._experiences[k]
def update(self, priorities):
"""
Update each records' priority
Args:
priorities (list):
[
(index, priority),
(...),
...
]
Returns:
None
Examples:
>>> pool.update([
(0, 10),
(1, 0),
(2, 3)
])
"""
for key, priority in priorities:
if numpy.isnan(priority):
p = 1e-3
elif priority < 1e-3:
p = 1e-3
elif priority > 1e+3:
p = 1e+3
else:
p = priority
try:
self._experiences[key]['priority'] = p
except:
pass
def size(self):
"""
Get size of current pool
Args:
None
Returns:
pool_size (int): limited size of pool
Examples:
>>> mpool = MemoryPool(300)
>>> print(mpool.size())
300
"""
return self._size
def amount(self):
"""
Get number of records in pool
Args:
None
Returns:
number (int): number of records
Examples:
>>> mpool = MemoryPool(300)
>>> print(mpool.size())
300
>>> print(mpool.amount())
0
>>> mpool.add(1, 2, 3, 4)
>>> print(mpool.amount())
1
"""
return len(self._experiences)
def all(self):
"""
Get all experiences of pool by generator
Args:
None
Returns:
record (dict): experience record
Examples:
>>> mpool = MemoryPool(300)
>>> for item in mpool.all():
print(item)
{'state': ...}
{'state': ...}
...
"""
for key, record in self._experiences.items():
yield key, record
class MongoPool(PoolInterface):
"""
MongoPool store experience data to db. Data must be numpy array.
Args:
collection (pymongo.collection.Collection): Specific collection for
storing experience data
pool_size (int): sepcify size of pool. `0` for unlimited
(default: 0)
Returns:
MongoPool object
Examples:
# Init pool
>>> client = MongoClient()
>>> mpool = MongoPool(client['DB']['Collection'])
# Add data to pool
>>> mpool.add(
state=[1, 2, 3], action=3, reward=100, next_state=[4, 5, 6]
)
# Sample data for training
>>> records = mpool.sample(30)
>>> print(len(records))
30
# Update priority of data
>>> priorities = [(id1, 10), (id2, 0), (id3, 9), ...]
>>> mpool.update(priorities)
"""
def __init__(self, collection, pool_size=0):
if isinstance(pool_size, int):
if pool_size < 0:
raise TypeError("Pool size should be positive integer")
self._size = pool_size
self._collection = collection
def add(self, state, action, reward, next_state, done,
next_actions=None, priority=1, info=None):
"""
Add new data to experience pool.
Args:
state: any type as long as it can describe the state of environment
action: any type as long as it can represent executed action with
above state
reward: also free type for action feedback
next_state: Like state but it is for describing next state
next_actions: For next state's actions (Default: None),
priority: It specify the priority of data (Default: 0)
Returns:
amount: record number in pool
Examples:
>>> mpool.add(
state=[0, 0, 1], action=1,
reward=100, next_state=[1, 0, 0]
)
>>> mpool.add(
state={'a': 1, 'b': 0}, action=3,
reward=-1, next_state={'a': -1, 'b': 1},
next_actions=[3, 1, 0], priority=3.5
)
"""
if numpy.isnan(priority):
priority = 1e-3
elif priority < 1e-3:
priority = 1e-3
elif priority > 1e+3:
priority = 1e+3
last_record = self._get_last()
if last_record:
index = last_record['index'] + 1
else:
index = 0
data = {
'index': index,
'state': Binary(pickle.dumps(state)),
'action': Binary(pickle.dumps(action)),
'reward': Binary(pickle.dumps(reward)),
'next_state': Binary(pickle.dumps(next_state)),
'next_actions': Binary(pickle.dumps(next_actions)),
'done': done,
'priority': priority,
'info': info,
}
self._collection.insert_one(data)
if self.amount() > self.size() > 0:
min_p = 1e+9
min_index = 0
for i, record in enumerate(self._experiences):
if record['priority'] < min_p:
min_p = record['priority']
min_index = i
self.remove(min_index)
return self.amount()
def remove(self, record_id):
"""
Remove record from pool with record_id.
Args:
record_id: the index of data
Returns:
None
Examples:
# Remove 100th data in pool
>>> pool.remove(100)
"""
self._collection.remove({'index': record_id})
def sample(self, size):
"""
Sample records from pool with given size.
Args:
size (int): sampling size
Returns:
samples (list):
[
(index, {'state': ..., 'action': ..., 'reward': ...,
'next_state': ..., 'next_actions': ..., 'priority': ...,}),
(...),
]
Examples:
# Biased random sampling 100 records
>>> pool.sample(100)
"""
dist = []
indexes = []
for record in self._collection.find({}, {'index': 1, 'priority': 1}):
dist.append(record['priority'])
indexes.append(record['index'])
sum_d = float(sum(dist))
prob = [item / sum_d for item in dist]
if size > 0 and indexes:
if size > len(indexes):
size = len(indexes)
indexes = numpy.random.choice(
indexes,
size=size, p=prob, replace=False
)
indexes = indexes.tolist()
else:
indexes = []
samples = list()
for record in self._collection.find({'index': {'$in': indexes}}):
record['state'] = pickle.loads(record['state'])
record['action'] = pickle.loads(record['action'])
record['reward'] = pickle.loads(record['reward'])
record['next_state'] = pickle.loads(record['next_state'])
record['next_actions'] = pickle.loads(record['next_actions'])
samples.append((record['index'], record))
return samples
def update(self, priorities):
"""
Update each records' priority
Args:
priorities (list):
[
(index, priority),
(...),
...
]
Returns:
None
Examples:
>>> pool.update([
(0, 10),
(1, 0),
(2, 3)
])
"""
for index, priority in priorities:
if numpy.isnan(priority):
p = 1e-3
elif priority < 1e-3:
p = 1e-3
elif priority > 1e+3:
p = 1e+3
else:
p = priority
self._collection.update_one(
{"index": index}, {"$set": {"priority": p}}
)
def size(self):
"""
Get size of current pool
Args:
None
Returns:
pool_size (int): limited size of pool
Examples:
>>> mpool = MemoryPool(300)
>>> print(mpool.size())
300
"""
return self._size
def amount(self):
"""
Get number of records in pool
Args:
None
Returns:
number (int): number of records
Examples:
>>> mpool = MemoryPool(300)
>>> print(mpool.size())
300
>>> print(mpool.amount())
0
>>> mpool.add(1, 2, 3, 4)
>>> print(mpool.amount())
1
"""
return self._collection.count()
def all(self):
"""
Get all experiences of pool by generator
Args:
None
Returns:
record (dict): experience record
Examples:
>>> mpool = MemoryPool(300)
>>> for item in mpool.all():
print(item)
{'state': ...}
{'state': ...}
...
"""
condition = {"$query": {}, "$orderby": {"id": 1}}
for record in self._collection.find(condition):
record['state'] = pickle.loads(record['state'])
record['action'] = pickle.loads(record['action'])
record['reward'] = pickle.loads(record['reward'])
record['next_state'] = pickle.loads(record['next_state'])
record['next_actions'] = pickle.loads(record['next_actions'])
yield record
def _get_last(self):
condition = {"$query": {}, "$orderby": {"index": -1}}
return self._collection.find_one(condition)
|
{"/tests/test_pool.py": ["/rl/pool.py"], "/rl/cnn.py": ["/rl/mlp.py", "/rl/utils.py"], "/rl/__init__.py": ["/rl/pool.py", "/rl/mlp.py", "/rl/cnn.py", "/rl/qnetwork.py"], "/rl/policy.py": ["/rl/agent.py", "/rl/mlp.py", "/rl/cnn.py"], "/tests/test_cnn.py": ["/rl/cnn.py"], "/tests/test_qnetwork.py": ["/rl/qnetwork.py"], "/rl/qnetwork.py": ["/rl/agent.py", "/rl/mlp.py", "/rl/cnn.py", "/rl/utils.py"], "/tests/test_mlp.py": ["/rl/mlp.py"], "/rl/mlp.py": ["/rl/utils.py"]}
|
28,600,149
|
evan176/rl
|
refs/heads/master
|
/tests/test_pool.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from copy import copy
from types import GeneratorType
from unittest import TestCase
try:
from unittest import mock
except:
import mock
from rl.pool import MemoryPool
class MemoryPoolTest(TestCase):
def setUp(self):
self.pool = MemoryPool(5000)
self.test_data = {
0: {
'state': 1, 'action': 3, 'reward': 100,
'next_state': 6, 'priority': 1
},
1: {
'state': 2, 'action': 3, 'reward': 0,
'next_state': 7, 'priority': 2
},
-1: {
'state': 3, 'action': 3, 'reward': -100,
'next_state': 8, 'priority': 3
},
2: {
'state': 4, 'action': 3, 'reward': 0,
'next_state': 9, 'priority': 4
},
4: {
'state': 5, 'action': 3, 'reward': 100,
'next_state': 10, 'priority': 5
},
}
self.pool._experiences = copy(self.test_data)
def tearDown(self):
self.pool = None
@mock.patch('rl.pool.MemoryPool.amount')
def test_add(self, mock_amount):
# Handle mock amount
mock_amount.side_effect = lambda *args: len(self.pool._experiences)
return_value = self.pool.add(6, '456', -100, None, [1213, 'a'], 1)
self.assertEqual(self.pool._q_front, 3)
self.assertEqual(return_value, 6)
def test_add_negative(self):
self.pool.add(6, '456', -100, None, [1213, 'a'], True, -1)
self.assertEqual(self.pool._q_front, 3)
self.assertEqual(
self.pool._experiences[self.pool._q_front]['priority'], 1e-3
)
@mock.patch('rl.pool.MemoryPool.amount')
def test_remove(self, mock_amount):
# Handle mock amount
mock_amount.return_value = len(self.pool._experiences)
return_value = self.pool.remove(4)
self.assertNotIn(4, self.pool._experiences)
self.assertEqual(return_value, self.test_data[4])
def test_sample(self):
for key, record in self.pool.sample(5):
self.assertEqual(record, self.test_data[key])
def test_sample_greater(self):
for key, record in self.pool.sample(10):
self.assertEqual(record, self.test_data[key])
def test_update(self):
data = [(0, 5), (1, 4), (-1, 3), (2, 2), (4, 1)]
self.pool.update(data)
for i in range(len(data)):
self.assertEqual(self.pool._experiences[data[i][0]]['priority'], data[i][1])
def test_update_negative(self):
self.pool.update([(0, 1), (1, -1), (2, 0), (3, 1), (4, 0)])
self.assertEqual(self.pool._experiences[1]['priority'], 1e-3)
def test_size(self):
self.assertEqual(self.pool.size(), 5000)
def test_amount(self):
self.assertEqual(self.pool.amount(), 5)
def test_all(self):
self.assertIsInstance(self.pool.all(), GeneratorType)
result_dict = {}
for key, record in self.pool.all():
result_dict[key] = record
for key, record in self.test_data.items():
self.assertEqual(record, self.test_data[key])
|
{"/tests/test_pool.py": ["/rl/pool.py"], "/rl/cnn.py": ["/rl/mlp.py", "/rl/utils.py"], "/rl/__init__.py": ["/rl/pool.py", "/rl/mlp.py", "/rl/cnn.py", "/rl/qnetwork.py"], "/rl/policy.py": ["/rl/agent.py", "/rl/mlp.py", "/rl/cnn.py"], "/tests/test_cnn.py": ["/rl/cnn.py"], "/tests/test_qnetwork.py": ["/rl/qnetwork.py"], "/rl/qnetwork.py": ["/rl/agent.py", "/rl/mlp.py", "/rl/cnn.py", "/rl/utils.py"], "/tests/test_mlp.py": ["/rl/mlp.py"], "/rl/mlp.py": ["/rl/utils.py"]}
|
28,619,742
|
QianrXU/Projects
|
refs/heads/master
|
/mysite/mysite/urls.py
|
from django.contrib import admin
from django.urls import path
from limin import views
urlpatterns = [
path('admin/', admin.site.urls),
path('', views.products, name='products'),
path('equipments/', views.equipments, name='equipments'),
path('company/', views.company, name='company'),
path('purchase/', views.purchase, name='purchase'),
path('contact/', views.contact, name='contact'),
path('product1/', views.product1, name="product1"),
path('product2/', views.product2, name="product2"),
path('product3/', views.product3, name="product3"),
path('product4/', views.product4, name="product4"),
path('product5/', views.product5, name="product5"),
path('product6/', views.product6, name="product6"),
path('product7/', views.product7, name="product7"),
path('product8/', views.product8, name="product8"),
path('product9/', views.product9, name="product9"),
path('company/topics/<pk>', views.topics, name='topics'),
]
|
{"/mysite/limin/views.py": ["/mysite/limin/models.py"]}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.