text
stringlengths
4
1.02M
meta
dict
from io import StringIO from unittest.mock import Mock, call, patch from django.test import TestCase from jarbas.chamber_of_deputies.management.commands.receipts_text import Command from jarbas.chamber_of_deputies.models import Reimbursement class TestCommand(TestCase): def setUp(self): self.command = Command() class TestSerializer(TestCommand): def test_serializer(self): expected = { 'document_id': 42, 'receipt_text': 'lorem ipsum', } input = { 'document_id': '42', 'text': 'lorem ipsum' } self.assertEqual(self.command.serialize(input), expected) def test_serializer_without_text(self): expected = { 'document_id': 42, 'receipt_text': None } input = { 'document_id': '42', } self.assertEqual(self.command.serialize(input), expected) class TestCustomMethods(TestCommand): @patch('jarbas.chamber_of_deputies.management.commands.receipts_text.Command.receipts') @patch('jarbas.chamber_of_deputies.management.commands.receipts_text.Command.schedule_update') @patch('jarbas.chamber_of_deputies.management.commands.receipts_text.Command.update') def test_main(self, update, schedule_update, receipts): receipts.return_value = (range(21), range(21, 43)) self.command.main() update.assert_has_calls([call()] * 2) schedule_update.assert_has_calls(call(i) for i in range(42)) @patch.object(Reimbursement.objects, 'get') def test_schedule_update_existing_record(self, get): reimbursement = Reimbursement() get.return_value = reimbursement content = { 'document_id': 42, 'receipt_text': 'lorem ipsum' } self.command.queue = [] self.command.schedule_update(content) get.assert_called_once_with(document_id=42) self.assertEqual(content['receipt_text'], reimbursement.receipt_text) @patch.object(Reimbursement.objects, 'get') def test_schedule_update_non_existing_record(self, get): get.side_effect = Reimbursement.DoesNotExist content = {'document_id': 42} self.command.queue = [] self.command.schedule_update(content) get.assert_called_once_with(document_id=42) self.assertEqual([], self.command.queue) @patch('jarbas.chamber_of_deputies.management.commands.receipts_text.bulk_update') @patch('jarbas.chamber_of_deputies.management.commands.receipts_text.print') def test_update(self, print_, bulk_update): self.command.count = 40 self.command.queue = list(range(2)) self.command.update() fields = ['receipt_text',] bulk_update.assert_called_with([0, 1], update_fields=fields) print_.assert_called_with('42 reimbursements updated.', end='\r') self.assertEqual(42, self.command.count) class TestConventionMethods(TestCommand): @patch('jarbas.chamber_of_deputies.management.commands.receipts_text.Command.receipts') @patch('jarbas.chamber_of_deputies.management.commands.receipts_text.Command.main') @patch('jarbas.chamber_of_deputies.management.commands.receipts_text.os.path.exists') @patch('jarbas.chamber_of_deputies.management.commands.receipts_text.print') def test_handler_with_options(self, print_, exists, main, receipts): self.command.handle(dataset='receipts-texts.xz', batch_size=42) main.assert_called_once_with() print_.assert_called_once_with('0 reimbursements updated.') self.assertEqual(self.command.path, 'receipts-texts.xz') self.assertEqual(self.command.batch_size, 42) @patch('jarbas.chamber_of_deputies.management.commands.receipts_text.Command.receipts') @patch('jarbas.chamber_of_deputies.management.commands.receipts_text.Command.main') @patch('jarbas.chamber_of_deputies.management.commands.receipts_text.os.path.exists') @patch('jarbas.chamber_of_deputies.management.commands.receipts_text.print') def test_handler_without_options(self, print_, exists, main, receipts): self.command.handle(dataset='receipts-texts.xz', batch_size=4096) main.assert_called_once_with() print_.assert_called_once_with('0 reimbursements updated.') self.assertEqual(self.command.path, 'receipts-texts.xz') self.assertEqual(self.command.batch_size, 4096) @patch('jarbas.chamber_of_deputies.management.commands.receipts_text.Command.receipts') @patch('jarbas.chamber_of_deputies.management.commands.receipts_text.Command.main') @patch('jarbas.chamber_of_deputies.management.commands.receipts_text.os.path.exists') def test_handler_with_non_existing_file(self, exists, update, receipts): exists.return_value = False with self.assertRaises(FileNotFoundError): self.command.handle(dataset='receipts-text.xz', batch_size=4096) update.assert_not_called() class TestFileLoader(TestCommand): @patch('jarbas.chamber_of_deputies.management.commands.receipts_text.print') @patch('jarbas.chamber_of_deputies.management.commands.receipts_text.lzma') @patch('jarbas.chamber_of_deputies.management.commands.receipts_text.csv.DictReader') @patch('jarbas.chamber_of_deputies.management.commands.receipts_text.Command.serialize') def test_receipts(self, serialize, rows, lzma, print_): serialize.return_value = '.' lzma.return_value = StringIO() rows.return_value = range(42) self.command.batch_size = 10 self.command.path = 'receipts-text.xz' expected = [['.'] * 10, ['.'] * 10, ['.'] * 10, ['.'] * 10, ['.'] * 2] self.assertEqual(expected, list(self.command.receipts())) self.assertEqual(42, serialize.call_count) class TestAddArguments(TestCase): def test_add_arguments(self): mock = Mock() Command().add_arguments(mock) self.assertEqual(2, mock.add_argument.call_count)
{ "content_hash": "a1ca4f535fb613cb1f240e372705f1ba", "timestamp": "", "source": "github", "line_count": 143, "max_line_length": 98, "avg_line_length": 42.06293706293706, "alnum_prop": 0.6801330008312552, "repo_name": "marcusrehm/serenata-de-amor", "id": "c588360b1ad557596a0c972ab95be7148a687a5a", "size": "6015", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "jarbas/chamber_of_deputies/tests/test_receipts_text_command.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "301" }, { "name": "Elm", "bytes": "131019" }, { "name": "HTML", "bytes": "4527" }, { "name": "JavaScript", "bytes": "1468" }, { "name": "Python", "bytes": "425718" }, { "name": "Shell", "bytes": "145" } ], "symlink_target": "" }
from django.views.generic import TemplateView class HomepageView(TemplateView): template_name = 'index.html'
{ "content_hash": "0eea2478ab4b44c2aa39781aacba1f24", "timestamp": "", "source": "github", "line_count": 5, "max_line_length": 45, "avg_line_length": 22.8, "alnum_prop": 0.7894736842105263, "repo_name": "andremrsantos/s-score-view", "id": "5314d8692c76ff1c94f5822f627b0cf822d8494b", "size": "114", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "sscore/views.py", "mode": "33188", "license": "mit", "language": [ { "name": "JavaScript", "bytes": "6594" }, { "name": "Python", "bytes": "14640" } ], "symlink_target": "" }
from rapidsms.apps.base import AppBase class PingPong(AppBase): def handle(self, msg): if msg.text == 'ping': # when received msg's text = 'ping' ==> send an answer to the sending number... msg.respond('pong') # .. with text 'pong' return True return False
{ "content_hash": "218f9ed81ac877c0213252bbe91144a2", "timestamp": "", "source": "github", "line_count": 9, "max_line_length": 110, "avg_line_length": 33.55555555555556, "alnum_prop": 0.6059602649006622, "repo_name": "dragGH102/rapidsms-example-with-rest-services", "id": "6eedd36551cdf0d17c68df4774678549eade700e", "size": "359", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "tut/app.py", "mode": "33188", "license": "mit", "language": [ { "name": "HTML", "bytes": "242" }, { "name": "Python", "bytes": "18069" } ], "symlink_target": "" }
from django.conf.urls import patterns, include, url from django.contrib import admin admin.autodiscover() urlpatterns = patterns('', # Examples: # url(r'^$', 'lci.views.home', name='home'), # url(r'^blog/', include('blog.urls')), url(r'^admin/', include(admin.site.urls)), #user accounts url(r'^accounts/', include('userena.urls')), )
{ "content_hash": "5df7df523d0d8664ff216ee9b3803d70", "timestamp": "", "source": "github", "line_count": 15, "max_line_length": 51, "avg_line_length": 24.466666666666665, "alnum_prop": 0.6321525885558583, "repo_name": "b3ngmann/gratia-sensei", "id": "50cdf6b6fa04815d91e81eef1813963946ea14ad", "size": "367", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "lci/urls.py", "mode": "33188", "license": "mit", "language": [ { "name": "Perl", "bytes": "23" }, { "name": "Python", "bytes": "9499" } ], "symlink_target": "" }
""" urlresolver XBMC Addon Copyright (C) 2011 t0mm0 This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. """ from lib import helpers from urlresolver9 import common from urlresolver9.resolver import UrlResolver, ResolverError class YourUploadResolver(UrlResolver): name = "yourupload.com" domains = ["yourupload.com", "yucache.net"] pattern = '(?://|\.)(yourupload\.com|yucache\.net)/(?:watch|embed)?/?([0-9A-Za-z]+)' def __init__(self): self.net = common.Net() def get_media_url(self, host, media_id): web_url = self.get_url(host, media_id) html = self.net.http_GET(web_url).content url = re.findall('file\s*:\s*(?:\'|\")(.+?)(?:\'|\")', html) if not url: raise ResolverError('No video found') headers = {'User-Agent': common.FF_USER_AGENT, 'Referer': web_url} url = urlparse.urljoin(web_url, url[0]) url = self.net.http_HEAD(url, headers=headers).get_url() url = url + helpers.append_headers(headers) return url raise ResolverError('No video found') def get_url(self, host, media_id): return 'http://www.yourupload.com/embed/%s' % media_id
{ "content_hash": "cf37a60e20fe276781066532f83434c2", "timestamp": "", "source": "github", "line_count": 51, "max_line_length": 88, "avg_line_length": 35.23529411764706, "alnum_prop": 0.6555370061213133, "repo_name": "mrknow/filmkodi", "id": "1fb316d7464ef232b6e27df164a2e33be675437a", "size": "1797", "binary": false, "copies": "4", "ref": "refs/heads/master", "path": "script.mrknow.urlresolver/lib/urlresolver9/plugins/yourupload.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "JavaScript", "bytes": "7510" }, { "name": "Python", "bytes": "8058464" }, { "name": "Shell", "bytes": "18531" } ], "symlink_target": "" }
""" Tests For Base Scheduler """ import mock from oslo_config import cfg from oslo_utils import timeutils from manila import context from manila import db from manila.scheduler.drivers import base from manila import test from manila import utils CONF = cfg.CONF class SchedulerTestCase(test.TestCase): """Test case for base scheduler driver class.""" # So we can subclass this test and re-use tests if we need. driver_cls = base.Scheduler def setUp(self): super(SchedulerTestCase, self).setUp() self.driver = self.driver_cls() self.context = context.RequestContext('fake_user', 'fake_project') self.topic = 'fake_topic' def test_update_service_capabilities(self): service_name = 'fake_service' host = 'fake_host' capabilities = {'fake_capability': 'fake_value'} with mock.patch.object(self.driver.host_manager, 'update_service_capabilities', mock.Mock()): self.driver.update_service_capabilities( service_name, host, capabilities) self.driver.host_manager.update_service_capabilities.\ assert_called_once_with(service_name, host, capabilities) def test_hosts_up(self): service1 = {'host': 'host1'} service2 = {'host': 'host2'} services = [service1, service2] def fake_service_is_up(*args, **kwargs): if args[0]['host'] == 'host1': return False return True with mock.patch.object(db, 'service_get_all_by_topic', mock.Mock(return_value=services)): with mock.patch.object(utils, 'service_is_up', mock.Mock(side_effect=fake_service_is_up)): result = self.driver.hosts_up(self.context, self.topic) self.assertEqual(['host2'], result) db.service_get_all_by_topic.assert_called_once_with( self.context, self.topic) class SchedulerDriverBaseTestCase(SchedulerTestCase): """Test cases for base scheduler driver class methods. These can't fail if the driver is changed. """ def test_unimplemented_schedule(self): fake_args = (1, 2, 3) fake_kwargs = {'cat': 'meow'} self.assertRaises(NotImplementedError, self.driver.schedule, self.context, self.topic, 'schedule_something', *fake_args, **fake_kwargs) class SchedulerDriverModuleTestCase(test.TestCase): """Test case for scheduler driver module methods.""" def setUp(self): super(SchedulerDriverModuleTestCase, self).setUp() self.context = context.RequestContext('fake_user', 'fake_project') @mock.patch.object(db, 'share_update', mock.Mock()) def test_share_host_update_db(self): with mock.patch.object(timeutils, 'utcnow', mock.Mock(return_value='fake-now')): base.share_update_db(self.context, 31337, 'fake_host') db.share_update.assert_called_once_with( self.context, 31337, {'host': 'fake_host', 'scheduled_at': 'fake-now'})
{ "content_hash": "31356c06b1729b4ce907fceb704bf63d", "timestamp": "", "source": "github", "line_count": 90, "max_line_length": 78, "avg_line_length": 35.65555555555556, "alnum_prop": 0.6076659395450296, "repo_name": "NetApp/manila", "id": "c0f48b48098bae50f8fd27c79003cd1abb740f30", "size": "3940", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "manila/tests/scheduler/drivers/test_base.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Mako", "bytes": "953" }, { "name": "Python", "bytes": "8111068" }, { "name": "Shell", "bytes": "91643" } ], "symlink_target": "" }
''' Created on Dec 15, 2011 @author: uqtdettr ''' import urllib2 from django.conf import settings from django.contrib.auth.models import User from tardis.tardis_portal.auth.interfaces import AuthProvider from tardis.tardis_portal.auth.utils import configure_user class HttpBasicEndpointAuth(AuthProvider): ''' This class provides authentication against a HTTP resource protected by HTTP Basic authentication. Access is granted based on the user credentials being valid against that resource. ''' class SimplePasswordMgr(urllib2.HTTPPasswordMgr): ''' Simple password manager which provides the same credentials, no matter the realm or the uri. ''' def __init__(self): self.clear() def add_password(self, realm, uri, username, password): self.credentials = (username, password) def find_user_password(self, realm, authuri): return self.credentials def clear(self): self.credentials = (None, None) def __init__(self,openerDirector = urllib2.build_opener(), endpoint=None): self.passman = self.SimplePasswordMgr() openerDirector.add_handler(urllib2.HTTPBasicAuthHandler(self.passman)) self.openerDirector = openerDirector self.endpoint = endpoint or settings.HTTP_BASIC_AUTH_ENDPOINT def authenticate(self, request): """ Authenticate a user, expecting the user will be using form-based auth and the *username* and *password* will be passed in url-encoded form **POST** variables. :param request: a HTTP Request instance :type request: :class:`django.http.HttpRequest` """ username = request.POST['username'] password = request.POST['password'] if self._openEndpointWithCredentials(username, password) is None: return None try: user = User.objects.get(username=username) except User.DoesNotExist: user = User.objects.create_user(username, '') user.save() configure_user(user) # We don't want a localdb user created, so don't use a dict return user def get_user(self, user_id): try: user = User.objects.get(username=user_id) except User.DoesNotExist: user = None return user def _set_password(self, username, password): self.passman.clear() self.passman.add_password(None, None, username, password) def _openEndpointWithCredentials(self,username,password): self._set_password(username, password) try: result = self.openerDirector.open(self.endpoint) return result except urllib2.HTTPError: return None
{ "content_hash": "891052d66035e3b3fd37115a7d8bab05", "timestamp": "", "source": "github", "line_count": 84, "max_line_length": 78, "avg_line_length": 33.19047619047619, "alnum_prop": 0.648493543758967, "repo_name": "pansapiens/mytardis", "id": "6974a2e46ffe050df4d0ced11cabc1a4709619c3", "size": "2788", "binary": false, "copies": "3", "ref": "refs/heads/develop", "path": "tardis/tardis_portal/auth/httpbasicendpoint_auth.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "CSS", "bytes": "54456" }, { "name": "HTML", "bytes": "335457" }, { "name": "JavaScript", "bytes": "356177" }, { "name": "Python", "bytes": "2082865" }, { "name": "Shell", "bytes": "2971" } ], "symlink_target": "" }
import time import cherrypy from girder.api.rest import getCurrentToken from girder.utility.model_importer import ModelImporter LoadModelCache = {} LoadModelCacheMaxEntries = 100 LoadModelCacheExpiryDuration = 300 # seconds def invalidateLoadModelCache(*args, **kwargs): """ Empty the LoadModelCache. """ LoadModelCache.clear() def loadModel(resource, model, plugin='_core', id=None, allowCookie=False, level=None): """ Load a model based on id using the current cherrypy token parameter for authentication, caching the results. This must be called in a cherrypy context. :param resource: the resource class instance calling the function. Used for access to the current user and model importer. :param model: the model name, e.g., 'item'. :param plugin: the plugin name when loading a plugin model. :param id: a string id of the model to load. :param allowCookie: true if the cookie authentication method is allowed. :param level: access level desired. :returns: the loaded model. """ key = tokenStr = None if 'token' in cherrypy.request.params: # Token as a parameter tokenStr = cherrypy.request.params.get('token') elif 'Girder-Token' in cherrypy.request.headers: tokenStr = cherrypy.request.headers['Girder-Token'] elif 'girderToken' in cherrypy.request.cookie and allowCookie: tokenStr = cherrypy.request.cookie['girderToken'].value key = (model, tokenStr, id) cacheEntry = LoadModelCache.get(key) if cacheEntry and cacheEntry['expiry'] > time.time(): entry = cacheEntry['result'] cacheEntry['hits'] += 1 else: # we have to get the token separately from the user if we are using # cookies. if allowCookie: getCurrentToken(allowCookie) cherrypy.request.girderAllowCookie = True entry = ModelImporter.model(model, plugin).load( id=id, level=level, user=resource.getCurrentUser()) # If the cache becomes too large, just dump it -- this is simpler # than dropping the oldest values and avoids having to add locking. if len(LoadModelCache) > LoadModelCacheMaxEntries: LoadModelCache.clear() LoadModelCache[key] = { 'id': id, 'model': model, 'tokenId': tokenStr, 'expiry': time.time() + LoadModelCacheExpiryDuration, 'result': entry, 'hits': 0 } return entry
{ "content_hash": "c365614b253133f9d2de1dcf49228444", "timestamp": "", "source": "github", "line_count": 68, "max_line_length": 76, "avg_line_length": 37.10294117647059, "alnum_prop": 0.6611177170035671, "repo_name": "girder/large_image", "id": "fbb94db018723a1a655fa72c7ea723608507ccf5", "size": "3261", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "girder/girder_large_image/loadmodelcache.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Dockerfile", "bytes": "7114" }, { "name": "JavaScript", "bytes": "307859" }, { "name": "Pug", "bytes": "21406" }, { "name": "Python", "bytes": "1371949" }, { "name": "Shell", "bytes": "5500" }, { "name": "Stylus", "bytes": "4261" } ], "symlink_target": "" }
"""mod_steps -- behavior steps for Mod features. """ from behave import given, when, then @when(u'I subclass Mod') def step(context): class OfDoom(context.blueprint.Mod): name = context.blueprint.FormatTemplate('{meta.source.name} of DOOM') value = lambda _: _.meta.source.value * 20 damage = lambda _: _.meta.source.damage * 20 context.OfDoom = OfDoom class MagicalItemPrefix(context.blueprint.Mod): prefix = context.blueprint.PickOne( 'Gnarled', 'Inscribed', 'Magnificent', ) name = context.blueprint.depends_on('prefix')( context.blueprint.FormatTemplate('{parent.prefix} {meta.source.name}')) context.MagicalItemPrefix = MagicalItemPrefix @then(u'I can mod a Club to create a modified Club of DOOM') def step(context): club = context.OfDoom(context.Club) assert isinstance(club, context.Club) assert club.name == 'Big Club of DOOM' assert club.damage >= 200, club.damage
{ "content_hash": "327cd914d4cb7443acb1570bfcedaab9", "timestamp": "", "source": "github", "line_count": 31, "max_line_length": 83, "avg_line_length": 32.67741935483871, "alnum_prop": 0.6495557749259625, "repo_name": "0ion9/blueprint", "id": "504047024b407630de676e289a7aa60103a7e5d6", "size": "1037", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "features/steps/mod_steps.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "63685" } ], "symlink_target": "" }
from iptest.assert_util import * import _random #getrandbits def test_getrandbits(): #the argument is a random int value rand = _random.Random() for i1 in xrange(1, 1984, 6): Assert(rand.getrandbits(i1) < (2**i1)) Assert(rand.getrandbits(i1) < (2**i1)) Assert(rand.getrandbits(i1+1) < (2**(i1+1))) Assert(rand.getrandbits(i1+1) < (2**(i1+1))) temp_list = [ 63, #maxvalue 32, #bits less than 32 50, #bits greater than 32 and less than 64 100 #bits greater than 64 ] for x in temp_list: Assert(rand.getrandbits(x) < (2**x)) rand = _random.Random() AssertError(ValueError, rand.getrandbits, 0) AssertError(ValueError, rand.getrandbits, -50) # might raise OverflowError, might not, but shouldn't raise anything else. try: rand.getrandbits(2147483647) except OverflowError: pass #jumpahead def test_jumpahead(): rand = _random.Random() old_state = rand.getstate() rand.jumpahead(100) #CodePlex Work Item 8294 #Assert(old_state != rand.getstate()) #random def test_random(): rand = _random.Random() result = rand.random() flag = result<1.0 and result >= 0.0 Assert(flag, "Result is not the value as expected,expected the result between 0.0 to 1.0,but the actual is not") #setstate def test_setstate(): # state is object which random = _random.Random() state1 = random.getstate() random.setstate(state1) state2 = random.getstate() AreEqual(state1,state2) random.jumpahead(1) #CodePlex Work Item 8294 #Assert(state1 != random.getstate()) random.setstate(state1) AreEqual(state1, random.getstate()) #state is a int object a = 1 AssertError(Exception,random.setstate,a) #state is a string object b = "stete" AssertError(Exception,random.setstate,b) #state is a random object c = _random.Random() AssertError(Exception,random.setstate,c) #getstate def test_getstate(): random = _random.Random() a = random.getstate() AreEqual(a, random.getstate()) i = 2 random = _random.Random(i) b = random.getstate() AreEqual(b, random.getstate()) str = "state" random = _random.Random(str) c = random.getstate() AreEqual(c, random.getstate()) #seed def test_seed(): i= 2 random = _random.Random(i) a = random.getstate() # parameter is None random.seed() b =random.getstate() if a == b: Fail("seed() method can't change the current internal state of the generator.") # parameter is int x = 1 random.seed(x) c = random.getstate() if b == c or a == c: Fail("seed(x) method can't change the current internal state of the generator when x is \ int type.") # parameter is string x = "seed" random.seed(x) d = random.getstate() if d==c or b==d or a==d: Fail("seed(x) method can't change the current internal state of the generator when x is \ string type.") run_test(__name__)
{ "content_hash": "fbe4bfa6b4b1346abac0a1374cf1559a", "timestamp": "", "source": "github", "line_count": 128, "max_line_length": 110, "avg_line_length": 25.171875, "alnum_prop": 0.5943513345747983, "repo_name": "paweljasinski/ironpython3", "id": "6bb04e7944df1184732aa421bba6cb2455eb6ecf", "size": "3948", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "Tests/modules/misc/_random_test.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "11099" }, { "name": "C#", "bytes": "12284108" }, { "name": "CSS", "bytes": "96" }, { "name": "Groff", "bytes": "21080" }, { "name": "HTML", "bytes": "13117230" }, { "name": "Makefile", "bytes": "662" }, { "name": "PLSQL", "bytes": "22886" }, { "name": "PowerShell", "bytes": "62360" }, { "name": "Python", "bytes": "27267678" }, { "name": "R", "bytes": "4949" }, { "name": "Ruby", "bytes": "19" }, { "name": "Shell", "bytes": "5147" }, { "name": "Visual Basic", "bytes": "481" } ], "symlink_target": "" }
from twisted.conch.ssh import keys, userauth from twisted.internet import defer class AuthClient(userauth.SSHUserAuthClient): def __init__(self, options, *args): userauth.SSHUserAuthClient.__init__(self, str(options["user"]), *args) self.options = options def serviceStarted(self): userauth.SSHUserAuthClient.serviceStarted(self) def serviceStopped(self): pass def getPassword(self, prompt = None): return defer.succeed(str(self.options["password"])) def getPublicKey(self): return defer.succeed(keys.Key.fromString(self.options["privkey"])) def signData(self, publicKey, signData): return userauth.SSHUserAuthClient.signData(self, publicKey, signData) def getPrivateKey(self): return defer.succeed(keys.Key.fromString(self.options["privkey"]))
{ "content_hash": "f02a666e177ccf8228246ea35171da2d", "timestamp": "", "source": "github", "line_count": 26, "max_line_length": 78, "avg_line_length": 32.5, "alnum_prop": 0.7005917159763314, "repo_name": "kradalby/webmux", "id": "3ab2d878cc4f46451a0716849c9c716c38b73970", "size": "845", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "webmux/auth.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "3654" }, { "name": "HTML", "bytes": "20174" }, { "name": "JavaScript", "bytes": "344586" }, { "name": "Python", "bytes": "35209" } ], "symlink_target": "" }
import logging import simplejson as json from django import forms from django.core.serializers.json import DjangoJSONEncoder from django.db import models from django.utils.functional import curry from django.utils.translation import ugettext_lazy as _ import plata try: json.dumps([42], use_decimal=True) except TypeError: raise Exception('simplejson>=2.1 with support for use_decimal required.') #: Field offering all defined currencies CurrencyField = curry(models.CharField, _('currency'), max_length=3, choices=zip( plata.settings.CURRENCIES, plata.settings.CURRENCIES)) class JSONFormField(forms.fields.CharField): def clean(self, value, *args, **kwargs): if value: try: # Run the value through JSON so we can normalize formatting and at least learn about malformed data: value = json.dumps(json.loads(value, use_decimal=True), cls=DjangoJSONEncoder, use_decimal=True) except ValueError: raise forms.ValidationError("Invalid JSON data!") return super(JSONFormField, self).clean(value, *args, **kwargs) class JSONField(models.TextField): """ TextField which transparently serializes/unserializes JSON objects See: http://www.djangosnippets.org/snippets/1478/ """ # Used so to_python() is called __metaclass__ = models.SubfieldBase formfield = JSONFormField def to_python(self, value): """Convert our string value to JSON after we load it from the DB""" if isinstance(value, dict): return value elif isinstance(value, basestring): # Avoid asking the JSON decoder to handle empty values: if not value: return {} try: return json.loads(value, use_decimal=True) except ValueError: logging.getLogger("plata.fields").exception("Unable to deserialize store JSONField data: %s", value) return {} else: assert value is None return {} def get_prep_value(self, value): """Convert our JSON object to a string before we save""" return self._flatten_value(value) def value_to_string(self, obj): """Extract our value from the passed object and return it in string form""" if hasattr(obj, self.attname): value = getattr(obj, self.attname) else: assert isinstance(obj, dict) value = obj.get(self.attname, "") return self._flatten_value(value) def _flatten_value(self, value): """Return either a string, JSON-encoding dict()s as necessary""" if not value: return "" if isinstance(value, dict): value = json.dumps(value, cls=DjangoJSONEncoder, use_decimal=True) assert isinstance(value, basestring) return value def value_from_object(self, obj): return json.dumps(super(JSONField, self).value_from_object(obj), cls=DjangoJSONEncoder, use_decimal=True) try: from south.modelsinspector import add_introspection_rules JSONField_introspection_rule = ( (JSONField,), [], {}, ) add_introspection_rules(rules=[JSONField_introspection_rule], patterns=["^plata\.fields"]) except ImportError: pass
{ "content_hash": "f1dd4f8dad962a67bafa79f8f7cd53d2", "timestamp": "", "source": "github", "line_count": 108, "max_line_length": 116, "avg_line_length": 30.953703703703702, "alnum_prop": 0.6422375112174693, "repo_name": "allink/plata", "id": "6c9d857c66396249803707c45a8b97d098cd8233", "size": "3343", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "plata/fields.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Python", "bytes": "383270" } ], "symlink_target": "" }
from django.conf.urls import patterns, url
{ "content_hash": "09eb5f5f7aa005a123b12276060f113a", "timestamp": "", "source": "github", "line_count": 1, "max_line_length": 42, "avg_line_length": 43, "alnum_prop": 0.813953488372093, "repo_name": "lfalvarez/votai-theme", "id": "025ff92653a0a4e22895332e4444f3c60ed986fc", "size": "43", "binary": false, "copies": "4", "ref": "refs/heads/master", "path": "votai_theme/urls.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "CSS", "bytes": "23137" }, { "name": "HTML", "bytes": "113221" }, { "name": "JavaScript", "bytes": "1557" }, { "name": "Makefile", "bytes": "1243" }, { "name": "Python", "bytes": "4433" } ], "symlink_target": "" }
""" Write a function that takes minlen and a list of words, and returns only the words longer than minlen """ def longer_than(minlen, *args): return [word for word in args if len(word) > minlen] print longer_than(4, "foo", "bar", "fantastic", "python", "abc")
{ "content_hash": "b512fc3757adcab7fa0b0c621dd11a25", "timestamp": "", "source": "github", "line_count": 12, "max_line_length": 64, "avg_line_length": 22.333333333333332, "alnum_prop": 0.6828358208955224, "repo_name": "nonZero/python-examples", "id": "18a7b9dce2d4398f5978d147dce331d6047192e8", "size": "268", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "14_functions_lab/04.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "39479" } ], "symlink_target": "" }
import random from django.db import models from django import forms from staging.generators import BaseGenerator class ValueFromListForm(forms.Form): lines = forms.CharField(widget=forms.Textarea) class NotInitialized(): pass class Generator(BaseGenerator): name = 'Value from list' slug = 'value-from-list-int' for_fields = [models.BigIntegerField, models.DecimalField, models.IntegerField, models.PositiveIntegerField, models.PositiveSmallIntegerField, models.SmallIntegerField] options_form = ValueFromListForm def __init__(self): self.lines_left = NotInitialized def save(self, obj, field, form_data): if field.unique: if self.lines_left == NotInitialized: self.lines_left = form_data.get('lines').split('\n') setattr(obj, field.name, self._generate_unique()) else: setattr(obj, field.name, self._generate(form_data.get('lines'))) def _generate(self, text): lines = text.split('\n') return int(random.choice(lines)) def _generate_unique(self): if self.lines_left: value = int(random.choice(self.lines_left)) self.lines_left = [x for x in self.lines_left if x != value] return value
{ "content_hash": "e0a21585924f801df6a95b5391370a86", "timestamp": "", "source": "github", "line_count": 41, "max_line_length": 112, "avg_line_length": 31.463414634146343, "alnum_prop": 0.6496124031007752, "repo_name": "vparitskiy/django-staging", "id": "3c5edd36a732b075f5e397be6a1ac08cc0e9656f", "size": "1290", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "staging/generators/value_from_list_int.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "CSS", "bytes": "130" }, { "name": "HTML", "bytes": "7937" }, { "name": "JavaScript", "bytes": "14926" }, { "name": "Python", "bytes": "47690" } ], "symlink_target": "" }
import sys, time, math import datetime sys.path.append('/var/lib/virtdc/framework') from VM_Info_Updater import addOrUpdateDictionaryOfVM from Guest import Guest from VM_migrateGuest import vm_migrate_guest from VM_cpuScaling import vm_cpu_scaling from VM_memoryScaling import vm_memory_scaling, vm_max_memory_scaling from VM_decisionMaker import NodeFinder from virtdc_command_line_utility import get_host_name, get_domain_object from Host_Info_Tracker import GetNodeDict #API for VM placement manager #============================================================================== # Variables #============================================================================== # Some descriptive variables #name = "virtdc" #version = "0.1.0" #long_description = """virtdc is a set of API's/tools written to create virtual machines for cloud users efficiently.""" #url = "https://github.com/dineshappavoo/virtdc" #license = "" #============================================================================== #Global variables #Memory threshold values mem_scale_up_threshold = '10240' # 10 MB mem_scale_down_threshold = '102400' # 100 MB time_threshold = '300' # 5 minutes _base_mem_size = 2097152 # 2 GB (This includes OS memory) def initiateLiveMigration(vmid,sourcenode,destnode): a=0 #Not used def makeMemScalingDecision(hostName,guest,memoryUsage,time): memoryAlloted=guest.current_memory maxMemory='' # if(memoryUsage>memoryAlloted): # if(memoryUsage<maxMemory): # initiateMemScaleUpOrDown(hostName, guest.vmid, memoryUsage) # else: # requiredExtraMemory=float(memoryUsage)-float(maxMemory) # if(requiredExtraMemory>memScaleUpThreshold): #report user through email #if needed do a memory scale up and charge more based on SLA # else: # initiateMemScaleUpOrDown(hostName, guest.vmid, memoryUsage) def report_usage_to_placement_manager(vmid, cpu_usage, mem_usage, io_usage): try: host = get_host_name(vmid) domain_object = get_domain_object(vmid) #print domain_object process_action_on_current_usage(host, vmid, domain_object, float(cpu_usage), float(mem_usage), float(io_usage)) except Exception as e: print e #Process current usage and take action based on SLA def process_action_on_current_usage(host, vmid, value, cpu_usage, mem_usage, io_usage): node_dict = GetNodeDict() #for key, value in node_dict.iteritems() : #print key, value.hostname, value.ip_address, value.max_cpu, value.max_memory, value.max_io, value.avail_cpu, value.avail_memory, value.avail_io #Log activity manager_activity_log = open('/var/lib/virtdc/logs/activity_logs/manager.log', 'a+') manager_activity_log.write(str(datetime.datetime.now())+'::PLACEMENT MANAGER::MEMORY::'+host+' :: '+vmid+' :: Alotted Memory '+str(value.current_memory)+' :: Current Memory '+str(mem_usage)+'\n') manager_activity_log.write(str(datetime.datetime.now())+'::PLACEMENT MANAGER::CPU::'+host+' :: '+vmid+' :: Alotted CPU '+str(value.current_cpu)+' :: Current CPU '+str(cpu_usage)+'\n') obj=NodeFinder() max_cpu = value.max_cpu #print 'Max CPU '+str(max_cpu) allotted_cpu = float(value.current_cpu) #print 'Allocate CPU : '+str(allotted_cpu) #Base OS should not go below the minimum memory mem_usage = float(mem_usage) + float(_base_mem_size) allotted_memory = float(value.current_memory) max_memory = float(value.max_memory) #Check CPU usage, regarding a 0.1 margin as eligible to scale up if((cpu_usage+0.1 > allotted_cpu) and (cpu_usage < max_cpu)): if ( obj.is_cpu_available_on_host(host, 1) ): #print 'Test 2' new_cpu_value = value.current_cpu + 1 print "New CPU: %s" % new_cpu_value vm_cpu_scaling(host, vmid, value.vmip, new_cpu_value) manager_activity_log.write(str(datetime.datetime.now())+'::PLACEMENT MANAGER::CPU::Scaling ::'+host+' :: '+vmid+' :: Memory scaled from '+str(value.current_cpu)+' to '+str(cpu_usage)+'\n') else: print 'Test 3' new_host = obj.is_space_available_for_vm(cpu_usage, mem_usage , io_usage) if new_host is None: print "Cant migrate guest" else: print 'Dest Node : '+new_host #Initiate vm migration migrate_flag = vm_migrate_guest(host, new_host, vmid) if (migrate_flag): manager_activity_log.write(str(datetime.datetime.now())+'::PLACEMENT MANAGER::CPU::Migration ::'+host+' :: '+vmid+' :: Domain migrated from '+str(host)+' to '+str(new_host)+' for CPU Scaling from'+str(value.current_cpu)+' to '+str(cpu_usage)+'\n') else: manager_activity_log.write(str(datetime.datetime.now())+'::PLACEMENT MANAGER::CPU::Migration ::'+host+' :: '+vmid+' :: Cannot migrate from '+str(host)+' to '+str(new_host)+' for CPU Scaling from'+str(value.current_cpu)+' to '+str(cpu_usage)+'\n') #if( (cpu_usage>current_cpu) and (cpu_usage<max_cpu) ): -- CPU scaling down is not implemented #Check Memory Usage - Memory scale up will be initiated when usage is greater than usage+scaleup_threshold #print "MEM USAGE: " + str(mem_usage) #print "Alloc mem: " + str(allotted_memory) #print "threadhold: " + str(float(mem_scale_up_threshold)) #print "Max mem: " + str(max_memory) if(((mem_usage > (allotted_memory + float(mem_scale_up_threshold))))and (mem_usage<max_memory)): required_extra_memory = mem_usage - allotted_memory print "Required Mem: " + str(required_extra_memory) if( obj.is_mem_available_on_host(host, required_extra_memory) ): vm_memory_scaling(host, vmid, float(mem_usage)) manager_activity_log.write(str(datetime.datetime.now())+'::PLACEMENT MANAGER::MEMORY::Scaling ::'+str(host)+' :: '+str(vmid)+' :: Memory scaled from '+str(allotted_memory)+' to '+str(mem_usage)+'\n') else: new_host = obj.is_space_available_for_vm(cpu_usage, mem_usage , io_usage) if new_host is None: print "Cant migrate Guest" else: #Initiate vm migration migrate_flag = vm_migrate_guest(host, new_host, vmid) if (migrate_flag): manager_activity_log.write(str(datetime.datetime.now())+'::PLACEMENT MANAGER::MEMORY::Migration ::'+host+' :: '+vmid+' :: Domain migrated from '+str(host)+' to '+str(new_host)+' for Memory Scaling from'+str(value.current_memory)+' to '+str(mem_usage)+'\n') else: manager_activity_log.write(str(datetime.datetime.now())+'::PLACEMENT MANAGER::MEMORY::Migration ::'+host+' :: '+vmid+' :: Cannot migrate from '+str(host)+' to '+str(new_host)+' for Memory Scaling from'+str(value.current_memory)+' to '+str(mem_usage)+'\n') #To scale down memory - Memory scale down will be initiated when usage is lower than usage-scaledown_threshold or elif((mem_usage<(allotted_memory - float(mem_scale_down_threshold)))and (mem_usage<max_memory) ): vm_memory_scaling(host, vmid, float(mem_usage)) manager_activity_log.write(str(datetime.datetime.now())+'::PLACEMENT MANAGER::MEMORY::Scaling ::'+str(host)+' :: '+str(vmid)+' :: Memory scaled from '+str(allotted_memory)+' to '+str(mem_usage)+'\n') def initiateNodeLoadBalacing(): #Initiate the module to do the node load balancing b=0 def initiateVMConsolidation(): #Initiate the modeule for Vm consolidation c=0 def decisionManager(): d=0 def reactOnHotSpot(): e=0 if __name__ == "__main__": # stuff only to run when not called via 'import' here report_usage_to_placement_manager('VM_Task_11', '2.0', '15382', '0.3') #process_action_on_current_usage('node1', 'VM_Task_1', Guest("192.168.1.14","Task1", float(1), float(3),float(42424345353),float(424242),float(1), time.time()), '1.0', '424242', '42424345353')
{ "content_hash": "345ed54c10dca47fbd46eec9872a34d0", "timestamp": "", "source": "github", "line_count": 163, "max_line_length": 261, "avg_line_length": 46.423312883435585, "alnum_prop": 0.6739791198625611, "repo_name": "OnePaaS/virtdc", "id": "04d651494cda9236cc83e60a518bad2e54c1e645", "size": "7586", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "manager/VM_PlacementManager.py", "mode": "33261", "license": "mit", "language": [ { "name": "C", "bytes": "3877" }, { "name": "Groff", "bytes": "3148" }, { "name": "Java", "bytes": "29095" }, { "name": "Python", "bytes": "176090" }, { "name": "Shell", "bytes": "11958" } ], "symlink_target": "" }
from tryhaskell import TryHaskell TryHaskell.repl()
{ "content_hash": "f8012492abc505f9337b9b07551c888e", "timestamp": "", "source": "github", "line_count": 3, "max_line_length": 33, "avg_line_length": 17.666666666666668, "alnum_prop": 0.8301886792452831, "repo_name": "carymrobbins/py-tryhaskell", "id": "94f3defbcb6501992f83388fff5754117fe2bfd3", "size": "53", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "tryhaskell/__main__.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Python", "bytes": "7205" } ], "symlink_target": "" }
import exceptions import hmac import random import socket import struct from perfrunner.lib.mc_bin_client import memcacheConstants from perfrunner.lib.mc_bin_client.memcacheConstants import ( INCRDECR_RES_FMT, MIN_RECV_PACKET, REQ_MAGIC_BYTE, REQ_PKT_FMT, RES_MAGIC_BYTE, RES_PKT_FMT, SET_PKT_FMT ) class MemcachedError(exceptions.Exception): """Error raised when a command fails.""" def __init__(self, status, msg): supermsg = 'Memcached error #' + repr(status) if msg: supermsg += ": " + msg exceptions.Exception.__init__(self, supermsg) self.status = status self.msg = msg def __repr__(self): return "<MemcachedError #%d ``%s''>" % (self.status, self.msg) class MemcachedClient(object): """Simple memcached client.""" vbucketId = 0 def __init__(self, host='127.0.0.1', port=11211, family=socket.AF_INET): self.host = host self.port = port self.s = socket.socket(family, socket.SOCK_STREAM) if hasattr(socket, 'AF_UNIX') and family == socket.AF_UNIX: self.s.connect_ex(host) else: self.s.connect_ex((host, port)) self.r = random.Random() def close(self): self.s.close() def __del__(self): self.close() def _sendCmd(self, cmd, key, val, opaque, extraHeader='', cas=0): self._sendMsg(cmd, key, val, opaque, extraHeader=extraHeader, cas=cas, vbucketId=self.vbucketId) def _sendMsg(self, cmd, key, val, opaque, extraHeader='', cas=0, dtype=0, vbucketId=0, fmt=REQ_PKT_FMT, magic=REQ_MAGIC_BYTE): msg = struct.pack(fmt, magic, cmd, len(key), len(extraHeader), dtype, vbucketId, len(key) + len(extraHeader) + len(val), opaque, cas) self.s.send(msg + extraHeader + key + val) def _recvMsg(self): response = "" while len(response) < MIN_RECV_PACKET: data = self.s.recv(MIN_RECV_PACKET - len(response)) if data == '': raise exceptions.EOFError("Got empty data (remote died?).") response += data assert len(response) == MIN_RECV_PACKET magic, cmd, keylen, extralen, dtype, errcode, remaining, opaque, cas =\ struct.unpack(RES_PKT_FMT, response) rv = "" while remaining > 0: data = self.s.recv(remaining) if data == '': raise exceptions.EOFError("Got empty data (remote died?).") rv += data remaining -= len(data) assert (magic in (RES_MAGIC_BYTE, REQ_MAGIC_BYTE)),\ "Got magic: %d" % magic return cmd, errcode, opaque, cas, keylen, extralen, rv def _handleKeyedResponse(self, myopaque): cmd, errcode, opaque, cas, keylen, extralen, rv = self._recvMsg() assert myopaque is None or opaque == myopaque, \ "expected opaque %x, got %x" % (myopaque, opaque) if errcode != 0: raise MemcachedError(errcode, rv) return cmd, opaque, cas, keylen, extralen, rv def _handleSingleResponse(self, myopaque): cmd, opaque, cas, keylen, extralen, data = \ self._handleKeyedResponse(myopaque) return opaque, cas, data def _doCmd(self, cmd, key, val, extraHeader='', cas=0): """Send a command and await its response.""" opaque = self.r.randint(0, 2 ** 32) self._sendCmd(cmd, key, val, opaque, extraHeader, cas) return self._handleSingleResponse(opaque) def _mutate(self, cmd, key, exp, flags, cas, val): return self._doCmd(cmd, key, val, struct.pack(SET_PKT_FMT, flags, exp), cas) def _cat(self, cmd, key, cas, val): return self._doCmd(cmd, key, val, '', cas) def append(self, key, value, cas=0): return self._cat(memcacheConstants.CMD_APPEND, key, cas, value) def prepend(self, key, value, cas=0): return self._cat(memcacheConstants.CMD_PREPEND, key, cas, value) def __incrdecr(self, cmd, key, amt, init, exp): something, cas, val = self._doCmd( cmd, key, '', struct.pack(memcacheConstants.INCRDECR_PKT_FMT, amt, init, exp) ) return struct.unpack(INCRDECR_RES_FMT, val)[0], cas def incr(self, key, amt=1, init=0, exp=0): """Increment or create the named counter.""" return self.__incrdecr(memcacheConstants.CMD_INCR, key, amt, init, exp) def decr(self, key, amt=1, init=0, exp=0): """Decrement or create the named counter.""" return self.__incrdecr(memcacheConstants.CMD_DECR, key, amt, init, exp) def _doMetaCmd(self, cmd, key, value, cas, exp, flags, seqno, remote_cas): extra = struct.pack('>IIQQ', flags, exp, seqno, remote_cas) return self._doCmd(cmd, key, value, extra, cas) def _doRevCmd(self, cmd, key, exp, flags, value, rev, cas=0): seqno, revid = rev meta_data = struct.pack('>I', seqno) + revid meta_type = memcacheConstants.META_REVID meta = (meta_type, meta_data) return self._doMetaCmd(cmd, key, exp, flags, value, meta, cas) def set(self, key, exp, flags, val): """Set a value in the memcached server.""" return self._mutate(memcacheConstants.CMD_SET, key, exp, flags, 0, val) def setWithMeta(self, key, value, exp, flags, seqno, remote_cas): """Set a value and its meta data in the memcached server.""" return self._doMetaCmd(memcacheConstants.CMD_SET_WITH_META, key, value, 0, exp, flags, seqno, remote_cas) def setWithRev(self, key, exp, flags, value, rev): """Set a value and its revision in the memcached server.""" return self._doRevCmd(memcacheConstants.CMD_SET_WITH_META, key, exp, flags, value, rev) def add(self, key, exp, flags, val): """Add a value in the memcached server iff it doesn't already exist.""" return self._mutate(memcacheConstants.CMD_ADD, key, exp, flags, 0, val) def addWithMeta(self, key, value, exp, flags, seqno, remote_cas): return self._doMetaCmd(memcacheConstants.CMD_ADD_WITH_META, key, value, 0, exp, flags, seqno, remote_cas) def addWithRev(self, key, exp, flags, value, rev): return self._doRevCmd(memcacheConstants.CMD_ADD_WITH_META, key, exp, flags, value, rev) def replace(self, key, exp, flags, val): """Replace a value in the memcached server iff it already exists.""" return self._mutate(memcacheConstants.CMD_REPLACE, key, exp, flags, 0, val) def observe(self, key, vbucket): """Observe a key for persistence and replication.""" value = struct.pack('>HH', vbucket, len(key)) + key opaque, cas, data = self._doCmd(memcacheConstants.CMD_OBSERVE, '', value) rep_time = (cas & 0xFFFFFFFF) persist_time = (cas >> 32) & 0xFFFFFFFF persisted = struct.unpack('>B', data[4 + len(key)])[0] return opaque, rep_time, persist_time, persisted def __parseGet(self, data, klen=0): flags = struct.unpack(memcacheConstants.GET_RES_FMT, data[-1][:4])[0] return flags, data[1], data[-1][4 + klen:] def get(self, key): """Get the value for a given key within the memcached server.""" parts = self._doCmd(memcacheConstants.CMD_GET, key, '') return self.__parseGet(parts) def __parseMeta(self, data): flags = struct.unpack('I', data[-1][0:4])[0] meta_type = struct.unpack('B', data[-1][4])[0] length = struct.unpack('B', data[-1][5])[0] meta = data[-1][6:6 + length] return (meta_type, flags, meta) def getMeta(self, key): """Get the metadata for a given key within the memcached server.""" parts = self._doCmd(memcacheConstants.CMD_GET_META, key, '') return self.__parseMeta(parts) def getRev(self, key): """Get the revision for a given key within the memcached server.""" (meta_type, flags, meta_data) = self.getMeta(key) if meta_type != memcacheConstants.META_REVID: raise ValueError("Invalid meta type %x" % meta_type) seqno = struct.unpack('>Q', meta_data[:8])[0] revid = meta_data[4:] return (seqno, revid) def getl(self, key, exp=15): """Get the value for a given key within the memcached server.""" parts = self._doCmd(memcacheConstants.CMD_GET_LOCKED, key, '', struct.pack(memcacheConstants.GETL_PKT_FMT, exp)) return self.__parseGet(parts) def cas(self, key, exp, flags, oldVal, val): """CAS in a new value for the given key and comparison value.""" self._mutate(memcacheConstants.CMD_SET, key, exp, flags, oldVal, val) def touch(self, key, exp): """Touch a key in the memcached server.""" return self._doCmd(memcacheConstants.CMD_TOUCH, key, '', struct.pack(memcacheConstants.TOUCH_PKT_FMT, exp)) def gat(self, key, exp): """Get the value for a given key and touch it within the memcached server.""" parts = self._doCmd(memcacheConstants.CMD_GAT, key, '', struct.pack(memcacheConstants.GAT_PKT_FMT, exp)) return self.__parseGet(parts) def getr(self, key): """Get the value for a given key in a replica vbucket within the memcached server.""" parts = self._doCmd(memcacheConstants.CMD_GET_REPLICA, key, '') return self.__parseGet(parts, len(key)) def version(self): """Get the value for a given key within the memcached server.""" return self._doCmd(memcacheConstants.CMD_VERSION, '', '') def verbose(self, level): """Set the verbosity level.""" return self._doCmd(memcacheConstants.CMD_VERBOSE, '', '', extraHeader=struct.pack(">I", level)) def sasl_mechanisms(self): """Get the supported SASL methods.""" return set(self._doCmd(memcacheConstants.CMD_SASL_LIST_MECHS, '', '')[2].split(' ')) def sasl_auth_start(self, mech, data): """Start a sasl auth session.""" return self._doCmd(memcacheConstants.CMD_SASL_AUTH, mech, data) def sasl_auth_plain(self, user, password, foruser=''): """Perform plain auth.""" return self.sasl_auth_start('PLAIN', '\0'.join([foruser, user, password])) def sasl_auth_cram_md5(self, user, password): """Start a plan auth session.""" try: self.sasl_auth_start('CRAM-MD5', '') except MemcachedError, e: if e.status != memcacheConstants.ERR_AUTH_CONTINUE: raise challenge = e.msg dig = hmac.HMAC(password, challenge).hexdigest() return self._doCmd(memcacheConstants.CMD_SASL_STEP, 'CRAM-MD5', user + ' ' + dig) def stop_persistence(self): return self._doCmd(memcacheConstants.CMD_STOP_PERSISTENCE, '', '') def start_persistence(self): return self._doCmd(memcacheConstants.CMD_START_PERSISTENCE, '', '') def set_param(self, key, val, type): type = struct.pack(memcacheConstants.SET_PARAM_FMT, type) return self._doCmd(memcacheConstants.CMD_SET_PARAM, key, val, type) def set_vbucket_state(self, vbucket, stateName): assert isinstance(vbucket, int) self.vbucketId = vbucket state = struct.pack(memcacheConstants.VB_SET_PKT_FMT, memcacheConstants.VB_STATE_NAMES[stateName]) return self._doCmd(memcacheConstants.CMD_SET_VBUCKET_STATE, '', '', state) def get_vbucket_state(self, vbucket): assert isinstance(vbucket, int) self.vbucketId = vbucket return self._doCmd(memcacheConstants.CMD_GET_VBUCKET_STATE, '', '') def delete_vbucket(self, vbucket): assert isinstance(vbucket, int) self.vbucketId = vbucket return self._doCmd(memcacheConstants.CMD_DELETE_VBUCKET, '', '') def evict_key(self, key): return self._doCmd(memcacheConstants.CMD_EVICT_KEY, key, '') def getMulti(self, keys): """Get values for any available keys in the given iterable. Returns a dict of matched keys to their values.""" opaqued = dict(enumerate(keys)) terminal = len(opaqued) + 10 # Send all of the keys in quiet for k, v in opaqued.iteritems(): self._sendCmd(memcacheConstants.CMD_GETQ, v, '', k) self._sendCmd(memcacheConstants.CMD_NOOP, '', '', terminal) # Handle the response rv = {} done = False while not done: opaque, cas, data = self._handleSingleResponse(None) if opaque != terminal: rv[opaqued[opaque]] = self.__parseGet((opaque, cas, data)) else: done = True return rv def setMulti(self, exp, flags, items): """Multi-set (using setq). Give me (key, value) pairs.""" # If this is a dict, convert it to a pair generator if hasattr(items, 'iteritems'): items = items.iteritems() opaqued = dict(enumerate(items)) terminal = len(opaqued) + 10 extra = struct.pack(SET_PKT_FMT, flags, exp) # Send all of the keys in quiet for opaque, kv in opaqued.iteritems(): self._sendCmd(memcacheConstants.CMD_SETQ, kv[0], kv[1], opaque, extra) self._sendCmd(memcacheConstants.CMD_NOOP, '', '', terminal) # Handle the response failed = [] done = False while not done: try: opaque, cas, data = self._handleSingleResponse(None) done = opaque == terminal except MemcachedError, e: failed.append(e) return failed def delMulti(self, items): """Multi-delete (using delq). Give me a collection of keys.""" opaqued = dict(enumerate(items)) terminal = len(opaqued) + 10 extra = '' # Send all of the keys in quiet for opaque, k in opaqued.iteritems(): self._sendCmd(memcacheConstants.CMD_DELETEQ, k, '', opaque, extra) self._sendCmd(memcacheConstants.CMD_NOOP, '', '', terminal) # Handle the response failed = [] done = False while not done: try: opaque, cas, data = self._handleSingleResponse(None) done = opaque == terminal except MemcachedError, e: failed.append(e) return failed def stats(self, sub=''): """Get stats.""" opaque = self.r.randint(0, 2 ** 32) self._sendCmd(memcacheConstants.CMD_STAT, sub, '', opaque) done = False rv = {} while not done: cmd, opaque, cas, klen, extralen, data = \ self._handleKeyedResponse(None) if klen: rv[data[0:klen]] = data[klen:] else: done = True return rv def noop(self): """Send a noop command.""" return self._doCmd(memcacheConstants.CMD_NOOP, '', '') def delete(self, key, cas=0): """Delete the value for a given key within the memcached server.""" return self._doCmd(memcacheConstants.CMD_DELETE, key, '', '', cas) def flush(self, timebomb=0): """Flush all storage in a memcached instance.""" return self._doCmd( memcacheConstants.CMD_FLUSH, '', '', struct.pack(memcacheConstants.FLUSH_PKT_FMT, timebomb) ) def bucket_select(self, name): return self._doCmd(memcacheConstants.CMD_SELECT_BUCKET, name, '') def restore_file(self, filename): """Initiate restore of a given file.""" return self._doCmd(memcacheConstants.CMD_RESTORE_FILE, filename, '', '', 0) def restore_complete(self): """Notify the server that we're done restoring.""" return self._doCmd(memcacheConstants.CMD_RESTORE_COMPLETE, '', '', '', 0) def deregister_tap_client(self, tap_name): """Deregister the TAP client with a given name.""" return self._doCmd(memcacheConstants.CMD_DEREGISTER_TAP_CLIENT, tap_name, '', '', 0) def reset_replication_chain(self): """Reset the replication chain.""" return self._doCmd(memcacheConstants.CMD_RESET_REPLICATION_CHAIN, '', '', '', 0)
{ "content_hash": "4ec9d1eca6e3ebaceb6e4ddd854c8f7a", "timestamp": "", "source": "github", "line_count": 452, "max_line_length": 79, "avg_line_length": 37.5353982300885, "alnum_prop": 0.5757986561358011, "repo_name": "EricACooper/perfrunner", "id": "d32f10d34fbd636ebdfef93f6eb271490e15ca60", "size": "16966", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "perfrunner/lib/mc_bin_client/__init__.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C", "bytes": "1262" }, { "name": "Go", "bytes": "3718" }, { "name": "Inno Setup", "bytes": "20478" }, { "name": "Makefile", "bytes": "518" }, { "name": "Python", "bytes": "837748" }, { "name": "Shell", "bytes": "13179" } ], "symlink_target": "" }
from __future__ import absolute_import from PySide import QtGui, QtCore from .worker import Worker import os import sys import time import importlib from .. import compat __all__ = ['Application', 'Dialog', 'MutexLocker'] TOP_SECTION = '<b>%s</b>' SECTION = '<br><b>%s</b>' class Dialog(QtGui.QDialog): def __init__(self, *args, **kwargs): super(Dialog, self).__init__(*args, **kwargs) self.setWindowFlags(self.windowFlags() ^ QtCore.Qt.WindowContextHelpButtonHint) self._headers = _Headers() @property def headers(self): return self._headers def section(self, title): return self._headers.section(title) class _Headers(object): def __init__(self): self._first = True def section(self, title): if self._first: self._first = False section = TOP_SECTION % title else: section = SECTION % title return QtGui.QLabel(section) class _MainWindow(QtGui.QMainWindow): def __init__(self): super(_MainWindow, self).__init__() self._widget = None def customEvent(self, event): event.callback() event.accept() class Application(QtGui.QApplication): _quit = False def __init__(self, m=None, version=None): super(Application, self).__init__(sys.argv) self._determine_basedir() self._read_package_version(version) self.window = _MainWindow() if m: for key in dir(m): if (isinstance(key, compat.string_types) and not key.startswith('_')): setattr(m, key, self.tr(getattr(m, key))) self.worker = Worker(self.window, m) def _determine_basedir(self): if getattr(sys, 'frozen', False): # we are running in a PyInstaller bundle self.basedir = sys._MEIPASS else: # we are running in a normal Python environment top_module_str = __package__.split('.')[0] top_module = importlib.import_module(top_module_str) self.basedir = os.path.dirname(top_module.__file__) def _read_package_version(self, version): if version is None: return pversion_fn = os.path.join(self.basedir, 'package_version.txt') try: with open(pversion_fn, 'r') as f: pversion = int(f.read().strip()) except: pversion = 0 if pversion > 0: version += '.%d' % pversion self.version = version def ensure_singleton(self, name=None): if not name: name = self.applicationName() from PySide import QtNetwork self._l_socket = QtNetwork.QLocalSocket() self._l_socket.connectToServer(name, QtCore.QIODevice.WriteOnly) if self._l_socket.waitForConnected(): self.worker.thread().quit() self.deleteLater() time.sleep(0.01) # Without this the process sometimes stalls. sys.exit(0) else: self._l_server = QtNetwork.QLocalServer() if not self._l_server.listen(name): QtNetwork.QLocalServer.removeServer(name) self._l_server.listen(name) self._l_server.newConnection.connect(self._show_window) def _show_window(self): self.window.show() self.window.activateWindow() def quit(self): super(Application, self).quit() self._quit = True def exec_(self): if not self._quit: status = super(Application, self).exec_() else: status = 0 self.worker.thread().quit() self.deleteLater() time.sleep(0.01) # Without this the process sometimes stalls. return status class MutexLocker(object): """Drop-in replacement for QMutexLocker that can start unlocked.""" def __init__(self, mutex, lock=True): self._mutex = mutex self._locked = False if lock: self.relock() def lock(self, try_lock=False): if try_lock: self._locked = self._mutex.tryLock() else: self._mutex.lock() self._locked = True return self._locked and self or None def relock(self): self.lock() def unlock(self): if self._locked: self._mutex.unlock() def __del__(self): self.unlock()
{ "content_hash": "9d7a0c3a4b4af33c89f40b20e7ba78d0", "timestamp": "", "source": "github", "line_count": 167, "max_line_length": 74, "avg_line_length": 26.850299401197606, "alnum_prop": 0.5628902765388046, "repo_name": "moreati/python-yubicommon", "id": "5ebaf42ea16a559f1c6534bbc3f0a88f4a011f28", "size": "5709", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "yubicommon/qt/classes.py", "mode": "33188", "license": "bsd-2-clause", "language": [ { "name": "Python", "bytes": "62415" } ], "symlink_target": "" }
from eight_mile.version import __version__
{ "content_hash": "7f7204e553e60af88d06edc83b892d8d", "timestamp": "", "source": "github", "line_count": 1, "max_line_length": 42, "avg_line_length": 43, "alnum_prop": 0.7674418604651163, "repo_name": "dpressel/baseline", "id": "7002eda13b0ef1d5e9accbc30674d7ab6fc0382d", "size": "43", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "layers/eight_mile/__init__.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C++", "bytes": "9649" }, { "name": "CMake", "bytes": "430" }, { "name": "HCL", "bytes": "923" }, { "name": "Perl", "bytes": "17554" }, { "name": "Python", "bytes": "1281602" }, { "name": "Roff", "bytes": "24" }, { "name": "Shell", "bytes": "10168" } ], "symlink_target": "" }
from tcga_encoder.models.vae.batcher_ABC import * class TCGABatcher( TCGABatcherABC ): pass
{ "content_hash": "ae875d6281de3cf2ace9ac42c3c6ba93", "timestamp": "", "source": "github", "line_count": 8, "max_line_length": 49, "avg_line_length": 16, "alnum_prop": 0.578125, "repo_name": "tedmeeds/tcga_encoder", "id": "615a2ddbad2c29ba18695c2c80f631f7e265eeda", "size": "128", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "tcga_encoder/models/vae/batcher_dna_out.py", "mode": "33188", "license": "mit", "language": [ { "name": "HTML", "bytes": "2115" }, { "name": "Python", "bytes": "2472857" }, { "name": "Shell", "bytes": "1714" } ], "symlink_target": "" }
from pylatex import Document, Section, Subsection, Command, Math, TikZ, Axis, \ Plot, Figure, LongTabu, Tabu, Head, PageStyle from pylatex.utils import italic, NoEscape, bold import os import csv import numpy as np import matplotlib.pyplot as plt # get the absolute path of the testing directory instead of hard coding # split it by '/' # grab the wanted part # In this scenario absolute path is '/home/gwthamy/projects/streamperf/git/siddhi-benchmarks/filter-4.0.0-M20/ReportGeneration' absolute_path = [os.path.abspath(name) for name in os.listdir(".") if os.path.isdir(name)] for line in absolute_path: path = line.split("/")[7] # to grab filter version dir = '/home/gwthamy/projects/streamperf/git/siddhi-benchmarks/' + path + '/filtered-results-' + path # directory to save image throughput_image_dir = "/var/tmp/" + path + "-throughputChart.png" latency_image_dir = "/var/tmp/" + path + "-latencyChart.png" # array to save throughput total throughput = [] #array to save latency total latency = [] # to calculate over all throughput and latency of this version final_throughput_avg = 0 final_latency_avg = 0 # loop through each csv file in the specific folder def get_all_files(directory): dir_list = os.listdir(directory) csv_files = [] for e in dir_list: if e.endswith('.csv'): csv_files.append(e) return csv_files def sum_from_csv(csv_file): cr = open(csv_file, 'r') # cr.next() file_content = cr.readlines() # initialize throughput total as zero throughput_total = 0 # initialize total latency as zero latency_total = 0 # array to save throughput in every iteration throughput_dataset = [] # array to save latency in every iteration latency_dataset = [] for line in file_content: line = line.strip() throughput_data = line.split(",")[1] float_throughput_data = float(throughput_data) throughput_total += float_throughput_data throughput_dataset.append(float_throughput_data) latency_data = line.split(",")[4] float_latency_data = float(latency_data) latency_total += float_latency_data latency_dataset.append(float_latency_data) # to calculate number of dataset throughput_dataset_length = len(throughput_dataset) throughput_average = throughput_total / throughput_dataset_length million_throughput_average = throughput_average / 1000000 throughput.append(million_throughput_average) # to calculate number of dataset latency_dataset_length = len(latency_dataset) # convert it into microseconds latency_average = (latency_total / latency_dataset_length) * 1000 latency.append(latency_average) print "Throughput-total is", throughput_total print "Average is", million_throughput_average print "latency average", latency_average for each in get_all_files(dir): sum_from_csv(os.path.join(dir, each)) # count the number of files in the specific folder no_of_file = [] for x in range(0, len(throughput)): no_of_file.append(x) # To calculate over all average throughput of this verision final_throughput_total = 0 for avg in throughput: final_throughput_total += avg final_throughput_avg = final_throughput_total / len(throughput) print final_throughput_avg # To calculate over all average of latency of this version final_latency_total = 0 for lat_avg in latency: final_latency_total += lat_avg final_latency_avg = final_latency_total / len(latency) print final_latency_avg # Generating Barcharts (Average Throughput vs Siddhi version & Average Latency vs Siddhi version) # Bar chart for Average Throughput vs Siddhi version N = len(throughput) print N ind = np.arange(N) width = 0.45 plt.figure(1) rects1 = plt.bar(ind, throughput, width, color='red') plt.xticks(ind + width / 2, (no_of_file)) plt.ylabel('Average Throughput(million events/second)') plt.xlabel('runs') plt.savefig(throughput_image_dir) # Bar chart for Average latency vs Siddhi version plt.figure(2) rects2 = plt.bar(ind, latency, width, color='blue') plt.xticks(ind + width / 2, (no_of_file)) plt.ylabel('Average Latency(microseconds)') plt.xlabel('runs') plt.savefig(latency_image_dir) # Report Generation if __name__ == '__main__': # basic document doc = Document() doc.preamble.append(Command('title', 'Report')) doc.preamble.append(Command('author', 'Stream Processor Performance Testing-' + path)) doc.preamble.append(Command('date', NoEscape(r'\today'))) doc.append(NoEscape(r'\maketitle')) doc.append(NoEscape(r'\newpage')) # Generating summary results chart for each run # throughput summary table and chart with doc.create( Section('Average Throughput During 60-180 seconds time period since the start of the benchmarking experiment')): doc.append(NoEscape(r'\hfill \break')) fmt = "X[r] X[r]" with doc.create(Subsection('Summary Table')): with doc.create(LongTabu(fmt, spread="0pt")) as data_table: header_row = ['Experiment runs', 'Average Throughput(million events/second)'] data_table.add_row(header_row, mapper=[bold]) data_table.add_hline() data_table.add_empty_row() data_table.end_table_header() # Iterates through throughput array and no_file_array and append rescpective run and throughput in the row for x, y in np.c_[no_of_file, throughput]: row = [x, y] data_table.add_row(row) doc.append(NoEscape(r'\newpage')) # append graph into the pdf report with doc.create(Subsection("Graph")): with doc.create(Figure(position='h!')) as throughput_chart: throughput_chart.add_image(throughput_image_dir, width='450px') with doc.create(Subsection("results")): doc.append("Over all throughput average (million events/seconds) of") doc.append(NoEscape(r'\space')) doc.append(path) doc.append(NoEscape(r'\space')) doc.append("is") doc.append(NoEscape(r'\space')) doc.append(final_throughput_avg) doc.append(NoEscape(r'\newpage')) # latency summary table and chart for latency with doc.create( Section('Average Latency During 60-180 seconds time period since the start of the benchmarking experiment')): doc.append(NoEscape(r'\hfill \break')) fmt = "X[r] X[r]" with doc.create(Subsection('Summary Table')): with doc.create(LongTabu(fmt, spread="0pt")) as data_table: header_row = ['Experiment runs', 'Average Latency (micro seconds)'] data_table.add_row(header_row, mapper=[bold]) data_table.add_hline() data_table.add_empty_row() data_table.end_table_header() # Iterates through throughput array and no_file_array and append rescpective run and throughput in the row for x, y in np.c_[no_of_file, latency]: row = [x, y] data_table.add_row(row) doc.append(NoEscape(r'\newpage')) # append graph into the pdf report with doc.create(Subsection("Graph")): with doc.create(Figure(position='h!')) as throughput_chart: throughput_chart.add_image(latency_image_dir, width='450px') with doc.create(Subsection("results")): doc.append("Over all latency average of") doc.append(NoEscape(r'\space')) doc.append(path) doc.append(NoEscape(r'\space')) doc.append("is") doc.append(NoEscape(r'\space')) doc.append(final_latency_avg) doc.generate_pdf('Report', clean_tex=False)
{ "content_hash": "78fe6e5515dccfb869c244c6f595124e", "timestamp": "", "source": "github", "line_count": 213, "max_line_length": 127, "avg_line_length": 35.72769953051643, "alnum_prop": 0.6779237844940867, "repo_name": "miyurud/siddhi-benchmarks", "id": "8c585908244062736d7ec549b4c1204e43e7668f", "size": "7610", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "ReportGeneration/reportGeneration.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "256087" }, { "name": "Python", "bytes": "28759" }, { "name": "Shell", "bytes": "19430" }, { "name": "TeX", "bytes": "218640" } ], "symlink_target": "" }
from tempest.services.volume.json import volumes_client class VolumesV2ClientJSON(volumes_client.BaseVolumesClientJSON): """ Client class to send CRUD Volume V2 API requests to a Cinder endpoint """ def __init__(self, auth_provider): super(VolumesV2ClientJSON, self).__init__(auth_provider) self.api_version = "v2"
{ "content_hash": "03dafc0d8811d3fbc22ea1dea072b137", "timestamp": "", "source": "github", "line_count": 12, "max_line_length": 73, "avg_line_length": 29.25, "alnum_prop": 0.7008547008547008, "repo_name": "Mirantis/tempest", "id": "1f16eadc34b4976f05b0b652921877fd04969027", "size": "987", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "tempest/services/volume/v2/json/volumes_client.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Python", "bytes": "3297127" }, { "name": "Shell", "bytes": "8663" } ], "symlink_target": "" }
from operator import itemgetter from typing import List import itertools from Src.BioAnalyzer.Analysis.GenePrioritization.Utils.Utils import Utils from Src.BioAnalyzer.CrossCutting.DTOs.GenePrioritization.GeneRankingDto import GeneRankingDto from Src.BioAnalyzer.CrossCutting.DTOs.GenePrioritization.GeneRankingItemDto import GeneRankingItemDto from Src.BioAnalyzer.CrossCutting.Filters.GenePrioritization.FeSingleNetwork import FeSingleNetwork from Src.BioAnalyzer.Managers.GenePrioritization.GeneRankingManager import GeneRankingManager from Src.BioAnalyzer.Managers.GenePrioritization.NetworkManager import NetworkManager from Src.Core.Entity import ProcessInfo class GeneRanker(): def __init__(self): self.__geneRankingManager = GeneRankingManager() self.__networkManager = NetworkManager() def execute(self, data_types: List[str], network_type: str) -> ProcessInfo: integrated_data_types = Utils.get_integrated_data_types(data_types) node_measurements = self.__get_node_measurements(integrated_data_types, network_type) partial_gene_ranking = dict([(data_type, dict([self.__calculate_partial_score(id_entrez, measurements['node_control'][id_entrez], measurements['node_case'][id_entrez], len(data_type.split('_'))) for id_entrez in measurements['genes']])) for data_type, measurements in node_measurements.items()]) gene_ranking = self.__calculate_final_score(partial_gene_ranking) self.__save('_'.join(data_types), gene_ranking) def __get_node_measurements(self, data_types, network_type): node_measurements = {} for integrated_data_type in data_types: fe_network_control = self.__networkManager.get_one(FeSingleNetwork(data_type=integrated_data_type, network_type=network_type, conditional='control')) fe_network_case = self.__networkManager.get_one(FeSingleNetwork(data_type=integrated_data_type, network_type=network_type, conditional='case')) if not fe_network_control.result or not fe_network_control.result.nodes or \ not fe_network_case.result or not fe_network_case.result.nodes: continue node_control_list, \ centrality_control_list = zip(*[(n.node, (n.node, n.centrality_value)) for n in fe_network_control.result.nodes]) node_case_list, \ centrality_case_list = zip(*[(n.node, (n.node, n.centrality_value)) for n in fe_network_case.result.nodes]) node_measurements[integrated_data_type] = { 'node_case': dict(centrality_control_list), 'node_control': dict(centrality_case_list), 'genes': list(set(node_case_list).intersection( set(node_control_list))) } return node_measurements def __calculate_partial_score(self, id_entrez, score_control, score_case, weight): devider = score_case + score_control if devider == 0: return (id_entrez, 0) return (id_entrez, abs((score_case - score_control) / devider) * weight) def __calculate_final_score(self, partial_gene_ranking): gene_scores = [(id_entrez, score) for ranking in partial_gene_ranking.values() for id_entrez, score in ranking.items()] partial_gene_ranking['final'] = dict([(id_entrez, sum(score for _, score in ranking)) for id_entrez, ranking in itertools.groupby(gene_scores, lambda gene_score: gene_score[0])]) return partial_gene_ranking def __save(self, data_type, ranking): for type, scores in ranking.items(): gene_ranking_dto = GeneRankingDto(data_type=data_type, type=type, scores=sorted([GeneRankingItemDto(id_entrez=id_entrez, score=score) for id_entrez, score in scores.items()], key=lambda item: item.score)) self.__geneRankingManager.add_one(gene_ranking_dto)
{ "content_hash": "ec3e0c66829287036a9ed155922b7a7c", "timestamp": "", "source": "github", "line_count": 98, "max_line_length": 117, "avg_line_length": 52.36734693877551, "alnum_prop": 0.5257209664848013, "repo_name": "cemarchi/biosphere", "id": "8078ce10c329f31eb717d3494a7d022824f019e1", "size": "5132", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Src/BioAnalyzer/Analysis/GenePrioritization/Steps/Ranking/GeneRanker.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Python", "bytes": "822707" } ], "symlink_target": "" }
from sqlalchemy import create_engine import vmcatcher.databaseDefinition as model import os import logging import optparse from smimeX509validation import TrustStore, LoadDirChainOfTrust,smimeX509validation, smimeX509ValidationError import sys from vmcatcher.__version__ import version from sqlalchemy.orm import sessionmaker from sqlalchemy.exc import IntegrityError, DatabaseError, ProgrammingError import vmcatcher import urllib2 import urllib import hashlib import datetime from hepixvmitrust.vmitrustlib import VMimageListDecoder as VMimageListDecoder from hepixvmitrust.vmitrustlib import time_format_definition as time_format_definition try: import json except: import simplejson from vmcatcher.listutils import pairsNnot import vmcatcher.outputfacard # User interface import vmcatcher.queryby class db_controler: def __init__(self,dboptions,dblog = False): self.log = logging.getLogger("db_controler") self.engine = create_engine(dboptions, echo=dblog) model.init(self.engine) self.SessionFactory = sessionmaker(bind=self.engine) self.anchor = None self.factory_selector = None self.selectors_available = ["endorser_uuid"] self.selector_curent = None self._outputter = vmcatcher.outputfacard.outputFacade() def set_selector(self,selector_string): self.selector_curent = None if not selector_string in self.selectors_available: self.log.warning("Invalid selector string set:%s" % (selector_string)) return False if selector_string == 'endorser_uuid': self.selector_curent = vmcatcher.queryby.query_endorser_by_identifier elif selector_string == 'sub_uri': self.selector_curent = vmcatcher.queryby.query_subscriptions_by_uri return True def setup_trust_anchor(self,directory): self.anchor = LoadDirChainOfTrust(directory) def setup_view_format(self,format): self._outputter.format = format def endosers_list(self): Session = self.SessionFactory() self._outputter.fpOutput = sys.stdout self._outputter.saSession = Session self._outputter.x509anchor = self.anchor self._outputter.list_vmcatcher_endorser_cred() return True def links_list(self): Session = self.SessionFactory() self._outputter.fpOutput = sys.stdout self._outputter.saSession = Session self._outputter.x509anchor = self.anchor self._outputter.list_vmcatcher_endorser_link() return True def link(self,endorsers_selected,subscriptions_selected): pairs, extra_selectors ,extra_paths = pairsNnot(endorsers_selected,subscriptions_selected) Session = self.SessionFactory() for pair in pairs: endorser = pair[0] subscription = pair[1] subauth_list = Session.query(model.Endorser,model.Subscription,model.SubscriptionAuth).\ filter(model.Endorser.identifier == endorser).\ filter(model.Subscription.identifier == subscription).\ filter(model.SubscriptionAuth.endorser == model.Endorser.id).\ filter(model.SubscriptionAuth.subscription == model.Subscription.id) if subauth_list.count() == 0: endorser_list = Session.query(model.Endorser).\ filter(model.Endorser.identifier == endorser) if endorser_list.count() == 0: self.log.warning("endorser not available.") continue sub_list = Session.query(model.Subscription).\ filter(model.Subscription.identifier == subscription) if sub_list.count() == 0: self.log.warning("subscription not available.") continue db_endorser = endorser_list.one() db_sub = sub_list.one() newsubauth = model.SubscriptionAuth(db_sub.id,db_endorser.id,True) Session.add(newsubauth) Session.commit() else: self.log.warning("endorser and subscription already linked.") def unlink(self,endorsers_selected,subscriptions_selected): Session = self.SessionFactory() pairs, extra_selectors ,extra_paths = pairsNnot(endorsers_selected,subscriptions_selected) for pair in pairs: endorser = pair[0] subscription = pair[1] subauth_list = Session.query(model.Endorser,model.Subscription,model.SubscriptionAuth).\ filter(model.Endorser.identifier == endorser).\ filter(model.Subscription.identifier == subscription).\ filter(model.SubscriptionAuth.endorser == model.Endorser.id).\ filter(model.SubscriptionAuth.subscription == model.Subscription.id) if subauth_list.count() == 0: self.log.warning("endorser and subscription are not linked.") else: for query_row in subauth_list: db_endorser = query_row[0] db_sub = query_row[1] db_subAuthEnd = query_row[2] Session.delete(db_subAuthEnd) Session.commit() def endorsers_info(self,selected): errorhappened = False Session = self.SessionFactory() for selector_filter in selected: output_fileptr = sys.stdout query_endorser = self.selector_curent(Session,selector_filter) if query_endorser.count() == 0: self.log.error("endorser '%s' not found" % (selector_filter)) continue self._outputter.fpOutput = sys.stdout self._outputter.saSession = Session self._outputter.x509anchor = self.anchor for endorser in query_endorser: self._outputter.display_endorser(endorser) princible_query = Session.query(model.EndorserPrincible).\ filter(model.EndorserPrincible.endorser == endorser.id) if princible_query.count() == 0: self.log.warning("endorser '%s' has no princibles" % (selector_filter)) else: pass return True def endorser_create(self,endorser,subjects,issuers): # Check input parameters. pairs, extra_subs ,extra_issuers = pairsNnot(subjects,issuers) if len(extra_subs) > 0: if len(issuers) > 1: self.log.warning("Unsure how to add subjects credentials without issuer credentials.") return False else: if len (issuers) > 0: thisissuer = issuers[0] for this_sub in extra_subs: pairs.append([this_sub,thisissuer]) else: self.log.warning("Cant add subjects credentials without issuer credentials.") return False if len (extra_issuers) > 0: self.log.warning("Cant add issuer credentials, without a subject.") return False if len(pairs) == 0: return True # Now we process requests error = False deleteOnError = False Session = self.SessionFactory() endorserQuery = Session.query(model.Endorser).\ filter(model.Endorser.identifier==endorser) endorserObj = None if endorserQuery.count() == 0: endorserObj = model.Endorser({'dc:identifier' : str(endorser)}) Session.add(endorserObj) Session.commit() deleteOnError = True endorserQuery = Session.query(model.Endorser).\ filter(model.Endorser.identifier==endorser) endorserObj = endorserQuery.one() endorserObjId = int(endorserObj.id) for pair in pairs: dn = pair[0] issuer = pair[1] cred = model.EndorserPrincible(endorserObjId,{u'hv:dn' : dn, u'hv:ca' : issuer}) Session.add(cred) try: Session.commit() except IntegrityError as expt: self.log.error("Database integrity error while adding '%s' credentials to '%s'." % (dn,endorser)) self.log.debug(expt.params) Session.rollback() error = True break if error and deleteOnError: endorserQuery = Session.query(model.Endorser).\ filter(model.Endorser.identifier==endorser) if endorserQuery.count() > 0: EndorserToDel = endorserQuery.one() Session.delete(EndorserToDel) Session.commit() return False return True def endorser_delete(self,endorsers): # Check input parameters. Session = self.SessionFactory() for endorser in endorsers: endorserQuery = Session.query(model.Endorser).\ filter(model.Endorser.identifier==endorser) if endorserQuery.count() == 0: self.log.warning("Failed to find endorser '%s'." % (endorser)) continue for obj in endorserQuery: Session.delete(obj) self.log.info("Deleting endorser '%s'." % (endorser)) Session.commit()
{ "content_hash": "8f3caf9e2a44e2d493d8c252e50b96d8", "timestamp": "", "source": "github", "line_count": 221, "max_line_length": 114, "avg_line_length": 42.73303167420814, "alnum_prop": 0.6060991105463787, "repo_name": "hepix-virtualisation/vmcatcher", "id": "7017271df3b6d085384e60ef7c960e997a03db6c", "size": "9444", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "vmcatcher/vmcatcher_endorser/controler.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Python", "bytes": "227503" } ], "symlink_target": "" }
import abc import collections import threading import fasteners import futurist from oslo_utils import reflection from oslo_utils import timeutils import six from taskflow.engines.action_engine import executor from taskflow import exceptions as excp from taskflow import logging from taskflow.types import failure as ft from taskflow.utils import schema_utils as su # NOTE(skudriashev): This is protocol states and events, which are not # related to task states. WAITING = 'WAITING' PENDING = 'PENDING' RUNNING = 'RUNNING' SUCCESS = 'SUCCESS' FAILURE = 'FAILURE' EVENT = 'EVENT' # During these states the expiry is active (once out of these states the expiry # no longer matters, since we have no way of knowing how long a task will run # for). WAITING_STATES = (WAITING, PENDING) _ALL_STATES = (WAITING, PENDING, RUNNING, SUCCESS, FAILURE, EVENT) _STOP_TIMER_STATES = (RUNNING, SUCCESS, FAILURE) # Transitions that a request state can go through. _ALLOWED_TRANSITIONS = ( # Used when a executor starts to publish a request to a selected worker. (WAITING, PENDING), # When a request expires (isn't able to be processed by any worker). (WAITING, FAILURE), # Worker has started executing a request. (PENDING, RUNNING), # Worker failed to construct/process a request to run (either the worker # did not transition to RUNNING in the given timeout or the worker itself # had some type of failure before RUNNING started). # # Also used by the executor if the request was attempted to be published # but that did publishing process did not work out. (PENDING, FAILURE), # Execution failed due to some type of remote failure. (RUNNING, FAILURE), # Execution succeeded & has completed. (RUNNING, SUCCESS), ) # Remote task actions. EXECUTE = 'execute' REVERT = 'revert' # Remote task action to event map. ACTION_TO_EVENT = { EXECUTE: executor.EXECUTED, REVERT: executor.REVERTED } # NOTE(skudriashev): A timeout which specifies request expiration period. REQUEST_TIMEOUT = 60 # NOTE(skudriashev): A timeout which controls for how long a queue can be # unused before it is automatically deleted. Unused means the queue has no # consumers, the queue has not been redeclared, the `queue.get` has not been # invoked for a duration of at least the expiration period. In our case this # period is equal to the request timeout, once request is expired - queue is # no longer needed. QUEUE_EXPIRE_TIMEOUT = REQUEST_TIMEOUT # Workers notify period. NOTIFY_PERIOD = 5 # Message types. NOTIFY = 'NOTIFY' REQUEST = 'REQUEST' RESPONSE = 'RESPONSE' LOG = logging.getLogger(__name__) @six.add_metaclass(abc.ABCMeta) class Message(object): """Base class for all message types.""" def __repr__(self): return ("<%s object at 0x%x with contents %s>" % (reflection.get_class_name(self, fully_qualified=False), id(self), self.to_dict())) @abc.abstractmethod def to_dict(self): """Return json-serializable message representation.""" class Notify(Message): """Represents notify message type.""" #: String constant representing this message type. TYPE = NOTIFY # NOTE(harlowja): the executor (the entity who initially requests a worker # to send back a notification response) schema is different than the # worker response schema (that's why there are two schemas here). #: Expected notify *response* message schema (in json schema format). RESPONSE_SCHEMA = { "type": "object", 'properties': { 'topic': { "type": "string", }, 'tasks': { "type": "array", "items": { "type": "string", }, } }, "required": ["topic", 'tasks'], "additionalProperties": False, } #: Expected *sender* request message schema (in json schema format). SENDER_SCHEMA = { "type": "object", "additionalProperties": False, } def __init__(self, **data): self._data = data @property def topic(self): return self._data.get('topic') @property def tasks(self): return self._data.get('tasks') def to_dict(self): return self._data @classmethod def validate(cls, data, response): if response: schema = cls.RESPONSE_SCHEMA else: schema = cls.SENDER_SCHEMA try: su.schema_validate(data, schema) except su.ValidationError as e: cls_name = reflection.get_class_name(cls, fully_qualified=False) if response: excp.raise_with_cause(excp.InvalidFormat, "%s message response data not of the" " expected format: %s" % (cls_name, e.message), cause=e) else: excp.raise_with_cause(excp.InvalidFormat, "%s message sender data not of the" " expected format: %s" % (cls_name, e.message), cause=e) _WorkUnit = collections.namedtuple('_WorkUnit', ['task_cls', 'task_name', 'action', 'arguments']) class Request(Message): """Represents request with execution results. Every request is created in the WAITING state and is expired within the given timeout if it does not transition out of the (WAITING, PENDING) states. """ #: String constant representing this message type. TYPE = REQUEST #: Expected message schema (in json schema format). SCHEMA = { "type": "object", 'properties': { # These two are typically only sent on revert actions (that is # why are are not including them in the required section). 'result': {}, 'failures': { "type": "object", }, 'task_cls': { 'type': 'string', }, 'task_name': { 'type': 'string', }, 'task_version': { "oneOf": [ { "type": "string", }, { "type": "array", }, ], }, 'action': { "type": "string", "enum": list(six.iterkeys(ACTION_TO_EVENT)), }, # Keyword arguments that end up in the revert() or execute() # method of the remote task. 'arguments': { "type": "object", }, }, 'required': ['task_cls', 'task_name', 'task_version', 'action'], } def __init__(self, task, uuid, action, arguments, timeout, **kwargs): self._task = task self._uuid = uuid self._action = action self._event = ACTION_TO_EVENT[action] self._arguments = arguments self._kwargs = kwargs self._watch = timeutils.StopWatch(duration=timeout).start() self._state = WAITING self._lock = threading.Lock() self._created_on = timeutils.utcnow() self._result = futurist.Future() self._result.atom = task self._notifier = task.notifier @property def result(self): return self._result @property def notifier(self): return self._notifier @property def uuid(self): return self._uuid @property def task(self): return self._task @property def state(self): return self._state @property def created_on(self): return self._created_on @property def expired(self): """Check if request has expired. When new request is created its state is set to the WAITING, creation time is stored and timeout is given via constructor arguments. Request is considered to be expired when it is in the WAITING/PENDING state for more then the given timeout (it is not considered to be expired in any other state). """ if self._state in WAITING_STATES: return self._watch.expired() return False def to_dict(self): """Return json-serializable request. To convert requests that have failed due to some exception this will convert all `failure.Failure` objects into dictionaries (which will then be reconstituted by the receiver). """ request = { 'task_cls': reflection.get_class_name(self._task), 'task_name': self._task.name, 'task_version': self._task.version, 'action': self._action, 'arguments': self._arguments, } if 'result' in self._kwargs: result = self._kwargs['result'] if isinstance(result, ft.Failure): request['result'] = ('failure', result.to_dict()) else: request['result'] = ('success', result) if 'failures' in self._kwargs: failures = self._kwargs['failures'] request['failures'] = {} for task, failure in six.iteritems(failures): request['failures'][task] = failure.to_dict() return request def set_result(self, result): self.result.set_result((self._event, result)) def transition_and_log_error(self, new_state, logger=None): """Transitions *and* logs an error if that transitioning raises. This overlays the transition function and performs nearly the same functionality but instead of raising if the transition was not valid it logs a warning to the provided logger and returns False to indicate that the transition was not performed (note that this is *different* from the transition function where False means ignored). """ if logger is None: logger = LOG moved = False try: moved = self.transition(new_state) except excp.InvalidState: logger.warn("Failed to transition '%s' to %s state.", self, new_state, exc_info=True) return moved @fasteners.locked def transition(self, new_state): """Transitions the request to a new state. If transition was performed, it returns True. If transition should was ignored, it returns False. If transition was not valid (and will not be performed), it raises an InvalidState exception. """ old_state = self._state if old_state == new_state: return False pair = (old_state, new_state) if pair not in _ALLOWED_TRANSITIONS: raise excp.InvalidState("Request transition from %s to %s is" " not allowed" % pair) if new_state in _STOP_TIMER_STATES: self._watch.stop() self._state = new_state LOG.debug("Transitioned '%s' from %s state to %s state", self, old_state, new_state) return True @classmethod def validate(cls, data): try: su.schema_validate(data, cls.SCHEMA) except su.ValidationError as e: cls_name = reflection.get_class_name(cls, fully_qualified=False) excp.raise_with_cause(excp.InvalidFormat, "%s message response data not of the" " expected format: %s" % (cls_name, e.message), cause=e) else: # Validate all failure dictionaries that *may* be present... failures = [] if 'failures' in data: failures.extend(six.itervalues(data['failures'])) result = data.get('result') if result is not None: result_data_type, result_data = result if result_data_type == 'failure': failures.append(result_data) for fail_data in failures: ft.Failure.validate(fail_data) @staticmethod def from_dict(data, task_uuid=None): """Parses **validated** data into a work unit. All :py:class:`~taskflow.types.failure.Failure` objects that have been converted to dict(s) on the remote side will now converted back to py:class:`~taskflow.types.failure.Failure` objects. """ task_cls = data['task_cls'] task_name = data['task_name'] action = data['action'] arguments = data.get('arguments', {}) result = data.get('result') failures = data.get('failures') # These arguments will eventually be given to the task executor # so they need to be in a format it will accept (and using keyword # argument names that it accepts)... arguments = { 'arguments': arguments, } if task_uuid is not None: arguments['task_uuid'] = task_uuid if result is not None: result_data_type, result_data = result if result_data_type == 'failure': arguments['result'] = ft.Failure.from_dict(result_data) else: arguments['result'] = result_data if failures is not None: arguments['failures'] = {} for task, fail_data in six.iteritems(failures): arguments['failures'][task] = ft.Failure.from_dict(fail_data) return _WorkUnit(task_cls, task_name, action, arguments) class Response(Message): """Represents response message type.""" #: String constant representing this message type. TYPE = RESPONSE #: Expected message schema (in json schema format). SCHEMA = { "type": "object", 'properties': { 'state': { "type": "string", "enum": list(_ALL_STATES), }, 'data': { "anyOf": [ { "$ref": "#/definitions/event", }, { "$ref": "#/definitions/completion", }, { "$ref": "#/definitions/empty", }, ], }, }, "required": ["state", 'data'], "additionalProperties": False, "definitions": { "event": { "type": "object", "properties": { 'event_type': { 'type': 'string', }, 'details': { 'type': 'object', }, }, "required": ["event_type", 'details'], "additionalProperties": False, }, # Used when sending *only* request state changes (and no data is # expected). "empty": { "type": "object", "additionalProperties": False, }, "completion": { "type": "object", "properties": { # This can be any arbitrary type that a task returns, so # thats why we can't be strict about what type it is since # any of the json serializable types are allowed. "result": {}, }, "required": ["result"], "additionalProperties": False, }, }, } def __init__(self, state, **data): self._state = state self._data = data @classmethod def from_dict(cls, data): state = data['state'] data = data['data'] if state == FAILURE and 'result' in data: data['result'] = ft.Failure.from_dict(data['result']) return cls(state, **data) @property def state(self): return self._state @property def data(self): return self._data def to_dict(self): return dict(state=self._state, data=self._data) @classmethod def validate(cls, data): try: su.schema_validate(data, cls.SCHEMA) except su.ValidationError as e: cls_name = reflection.get_class_name(cls, fully_qualified=False) excp.raise_with_cause(excp.InvalidFormat, "%s message response data not of the" " expected format: %s" % (cls_name, e.message), cause=e) else: state = data['state'] if state == FAILURE and 'result' in data: ft.Failure.validate(data['result'])
{ "content_hash": "10efa1ae92db2bdfb94315ddea072bef", "timestamp": "", "source": "github", "line_count": 514, "max_line_length": 79, "avg_line_length": 33.511673151750976, "alnum_prop": 0.5394484760522497, "repo_name": "pombredanne/taskflow-1", "id": "63556c259342a7b30507cbc4b36072e8c27ec06a", "size": "17882", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "taskflow/engines/worker_based/protocol.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Mako", "bytes": "412" }, { "name": "Python", "bytes": "1484277" }, { "name": "Shell", "bytes": "1988" } ], "symlink_target": "" }
"""Tests for discriminator.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import tensorflow.compat.v1 as tf from tensorflow_gan.examples.cyclegan import discriminator class DiscriminatorTest(tf.test.TestCase): def _layer_output_size(self, input_size, kernel_size=4, stride=2, pad=2): return (input_size + pad * 2 - kernel_size) // stride + 1 def test_four_layers(self): batch_size = 2 input_size = 256 output_size = self._layer_output_size(input_size) output_size = self._layer_output_size(output_size) output_size = self._layer_output_size(output_size) output_size = self._layer_output_size(output_size, stride=1) output_size = self._layer_output_size(output_size, stride=1) images = tf.ones((batch_size, input_size, input_size, 3)) logits, end_points = discriminator.pix2pix_discriminator( images, num_filters=[64, 128, 256, 512]) self.assertListEqual([batch_size, output_size, output_size, 1], logits.shape.as_list()) self.assertListEqual([batch_size, output_size, output_size, 1], end_points['predictions'].shape.as_list()) def test_four_layers_no_padding(self): batch_size = 2 input_size = 256 output_size = self._layer_output_size(input_size, pad=0) output_size = self._layer_output_size(output_size, pad=0) output_size = self._layer_output_size(output_size, pad=0) output_size = self._layer_output_size(output_size, stride=1, pad=0) output_size = self._layer_output_size(output_size, stride=1, pad=0) images = tf.ones((batch_size, input_size, input_size, 3)) logits, end_points = discriminator.pix2pix_discriminator( images, num_filters=[64, 128, 256, 512], padding=0) self.assertListEqual([batch_size, output_size, output_size, 1], logits.shape.as_list()) self.assertListEqual([batch_size, output_size, output_size, 1], end_points['predictions'].shape.as_list()) def test_four_layers_wrong_paddig(self): batch_size = 2 input_size = 256 images = tf.ones((batch_size, input_size, input_size, 3)) with self.assertRaises(TypeError): discriminator.pix2pix_discriminator( images, num_filters=[64, 128, 256, 512], padding=1.5) def test_four_layers_negative_padding(self): batch_size = 2 input_size = 256 images = tf.ones((batch_size, input_size, input_size, 3)) if tf.executing_eagerly(): exception_type = tf.errors.InvalidArgumentError else: exception_type = ValueError with self.assertRaises(exception_type): discriminator.pix2pix_discriminator( images, num_filters=[64, 128, 256, 512], padding=-1) if __name__ == '__main__': tf.test.main()
{ "content_hash": "d643754caa1cb5666bbdc5abd1305673", "timestamp": "", "source": "github", "line_count": 75, "max_line_length": 75, "avg_line_length": 37.733333333333334, "alnum_prop": 0.6604240282685513, "repo_name": "tensorflow/gan", "id": "fe2310566ccc22c581ff25ec8f735827514a8970", "size": "3437", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "tensorflow_gan/examples/cyclegan/discriminator_test.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Jupyter Notebook", "bytes": "1515604" }, { "name": "Python", "bytes": "1263660" }, { "name": "Shell", "bytes": "8407" } ], "symlink_target": "" }
from pytest import mark, raises from voting.models import Vote from voting.tests.fixtures.vote import get_random_vote def _submit_vote(client, submission, **kwargs): value_index = kwargs.get("value_index", get_random_vote()) defaults = {"value": value_index, "submission": submission.hashid} variables = {**defaults, **kwargs} return ( client.query( """mutation($submission: ID!, $value: Int!) { sendVote(input: { submission: $submission, value: $value }) { __typename ... on VoteType { id value } ... on SendVoteErrors { validationSubmission: submission validationValue: value nonFieldErrors } } }""", variables=variables, ), {**variables, "value_index": value_index}, ) @mark.django_db @mark.parametrize("score_index", [1, 2, 3, 4]) def test_submit_vote( graphql_client, user, conference_factory, submission_factory, score_index ): graphql_client.force_login(user) conference = conference_factory(active_voting=True) submission = submission_factory(conference=conference) resp, variables = _submit_vote(graphql_client, submission, value_index=score_index) assert resp["data"]["sendVote"]["__typename"] == "VoteType" vote = Vote.objects.get(id=resp["data"]["sendVote"]["id"]) assert vote.value == score_index assert vote.submission.hashid == variables["submission"] assert vote.user == user def test_reject_vote_when_voting_is_not_open( graphql_client, user, conference_factory, submission_factory ): graphql_client.force_login(user) conference = conference_factory() submission = submission_factory(conference=conference) resp, variables = _submit_vote(graphql_client, submission) assert resp["data"]["sendVote"]["__typename"] == "SendVoteErrors" assert resp["data"]["sendVote"]["nonFieldErrors"] == [ "The voting session is not open!" ] def test_user_can_vote_different_submissions( graphql_client, user, conference_factory, submission_factory ): graphql_client.force_login(user) conference = conference_factory(active_voting=True) submission1 = submission_factory(conference=conference, id=1) resp1, variables1 = _submit_vote(graphql_client, submission1) assert resp1["data"]["sendVote"]["__typename"] == "VoteType" vote1 = Vote.objects.get(user=user, submission=submission1) assert vote1.value == variables1["value_index"] submission2 = submission_factory(conference=conference) resp2, variables2 = _submit_vote(graphql_client, submission2) assert resp2["data"]["sendVote"]["__typename"] == "VoteType" vote2 = Vote.objects.get(user=user, submission=submission2) assert vote2.value == variables2["value_index"] assert Vote.objects.all().count() == 2 def test_updating_vote_when_user_votes_the_same_submission( graphql_client, user, conference_factory, submission_factory ): graphql_client.force_login(user) conference = conference_factory(active_voting=True) submission = submission_factory(conference=conference, id=1) resp, variables = _submit_vote(graphql_client, submission, value_index=1) assert resp["data"]["sendVote"]["__typename"] == "VoteType" vote1 = Vote.objects.get(user=user, submission=submission) assert vote1.value == variables["value_index"] resp, variables = _submit_vote(graphql_client, submission, value_index=3) assert resp["data"]["sendVote"]["__typename"] == "VoteType" vote1 = Vote.objects.get(user=user, submission=submission) assert vote1.value == variables["value_index"] def test_cannot_vote_without_a_ticket( graphql_client, user, conference_factory, mocker, submission_factory ): graphql_client.force_login(user) submission = submission_factory(conference__active_voting=True) admission_ticket_mock = mocker.patch( "users.models.user_has_admission_ticket", return_value=False ) resp, _ = _submit_vote(graphql_client, submission, value_index=3) assert not resp.get("errors") assert resp["data"]["sendVote"]["__typename"] == "SendVoteErrors" assert resp["data"]["sendVote"]["nonFieldErrors"] == [ "You cannot vote without a ticket" ] admission_ticket_mock.assert_called() with raises(Vote.DoesNotExist): Vote.objects.get(user=user, submission=submission) @mark.django_db def test_only_authenticated_users_can_vote(graphql_client, submission): resp, _ = _submit_vote(graphql_client, submission, value_index=3) assert resp["errors"][0]["message"] == "User not logged in" @mark.django_db @mark.parametrize("score_index", [0, -1, 6]) def test_cannot_vote_values_outside_the_range( graphql_client, user, score_index, submission ): graphql_client.force_login(user) resp, _ = _submit_vote(graphql_client, submission, value_index=score_index) assert resp["data"]["sendVote"]["__typename"] == "SendVoteErrors" assert resp["data"]["sendVote"]["validationValue"] == [ f"Value {score_index} is not a valid choice." ]
{ "content_hash": "553dda1d6c4fbbdfc86b187c6cc40cd6", "timestamp": "", "source": "github", "line_count": 172, "max_line_length": 87, "avg_line_length": 31.261627906976745, "alnum_prop": 0.648688859959085, "repo_name": "patrick91/pycon", "id": "1484075353bd41bf998ac730c0360817c870abc3", "size": "5377", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "backend/voting/tests/test_send_vote.py", "mode": "33188", "license": "mit", "language": [ { "name": "HTML", "bytes": "1456" }, { "name": "Python", "bytes": "13911" } ], "symlink_target": "" }
""" comment export DJANGO_SETTINGS_MODULE="opentrain.settings" """ import os import sys sys.path.append(os.getcwd()) sys.path.append(os.path.dirname(os.getcwd())) os.environ['DJANGO_SETTINGS_MODULE'] = 'opentrain.settings' #/home/oferb/docs/train_project/OpenTrains/webserver import timetable.services import analysis.models import numpy as np from scipy import spatial from alg_logger import logger try: import matplotlib.pyplot as plt except ImportError: pass import simplekml import config import itertools import datetime from unittest import TestCase import unittest import time from display_utils import * from export_utils import * import shapes from train_tracker import add_report, get_trusted_trips, get_train_tracker_trip_delays_ids_list_of_lists_key import stops from common.mock_reports_generator import generate_mock_reports from analysis.models import SingleWifiReport from redis_intf.client import (get_redis_pipeline, get_redis_client, load_by_key, save_by_key) import stop_detector_test import stop_detector import trip_matcher import trip_ground_truth from alg_logger import MessageExcludeFilter def track_device(device_id, do_print=False, do_preload_reports=True, set_reports_to_same_weekday_last_week=False, report_limit=10000000): #device_coords, device_timestamps, device_accuracies_in_meters, device_accuracies_in_coords = get_location_info_from_device_id(device_id) now = ot_utils.get_localtime_now() reports_queryset = stop_detector_test.get_device_id_reports(device_id) assert reports_queryset.count() > 0, 'No device reports in db' tracker_id = device_id fps_period_start = time.clock() fps_period_length = 100 if do_preload_reports: reports_queryset = list(reports_queryset) count = len(reports_queryset) if isinstance(reports_queryset, list) else reports_queryset.count() for i in xrange(count): if i > report_limit: break; if i % fps_period_length == 0: elapsed = (time.clock() - fps_period_start) if elapsed > 0: logger.debug('%d\t%.1f qps' % (i, fps_period_length/elapsed)) else: logger.debug('Elapsed time should be positive but is %d' % (elapsed)) fps_period_start = time.clock() report = reports_queryset[i] if set_reports_to_same_weekday_last_week: # fix finding same weekday last week by http://stackoverflow.com/questions/6172782/find-the-friday-of-previous-last-week-in-python day_fix = (now.weekday() - report.timestamp.weekday()) % 7 day = now + datetime.timedelta(days=-day_fix) # move day and correct for DST (daylight savings time) dst_before = report.get_timestamp_israel_time().dst() report.timestamp = report.timestamp.replace(year=day.year, month=day.month, day=day.day) dst_after = report.get_timestamp_israel_time().dst() report.timestamp -= dst_after-dst_before add_report(report) #tracker.print_tracked_stop_times() #tracker.print_possible_trips() trip_delays_ids_list_of_lists = load_by_key(get_train_tracker_trip_delays_ids_list_of_lists_key(tracker_id)) trips = get_trusted_trips(trip_delays_ids_list_of_lists) return tracker_id, trips class train_tracker_test(TestCase): def track_mock_reports(self, reports, tracker_id): for i, report in enumerate(reports): add_report(report) trip_delays_ids_list_of_lists = load_by_key(get_train_tracker_trip_delays_ids_list_of_lists_key(tracker_id)) trips = get_trusted_trips(trip_delays_ids_list_of_lists) return trips def test_tracker_on_mock_device_multiple_trips(self, device_id = 'fake_device_2', trip_ids = ['010714_00115', '010714_00283'], remove_some_locations=True): self.test_tracker_on_mock_device(device_id, trip_ids, remove_some_locations) def test_tracker_on_mock_device(self, device_id = 'fake_device_1', trip_ids = ['010714_00115'], remove_some_locations=True): if not isinstance(trip_ids, list): trip_ids = [trip_ids] tracker_id = device_id stop_detector_test.remove_from_redis(tracker_id) reports = [] for trip_id in trip_ids: day = datetime.datetime.strptime(trip_id.split('_')[0], '%d%m%y') now = ot_utils.get_localtime_now() # we want to get the correct timezone so we take it from get_localtime_now() day = now.replace(year=day.year, month=day.month, day=day.day) trip_reports = generate_mock_reports(device_id=device_id, trip_id=trip_id, nostop_percent=0.05, day=day) reports += trip_reports if remove_some_locations: for report in reports[::2]: del report.my_loc_mock logger.addFilter(MessageExcludeFilter('skipped because it has not location data')) matched_trips = self.track_mock_reports(reports, tracker_id) for matched_trip in matched_trips: timetable.services.get_trip(matched_trip).print_stoptimes() self.assertEquals(len(matched_trips), len(trip_ids)) self.assertEquals(sorted(matched_trips), sorted(trip_ids)) stop_detector_test.remove_from_redis(tracker_id) def test_tracker_on_real_devices(self): device_ids = [] trip_suffixes_list = [] device_ids.append('ofer_995357870c491cad') device_ids.append('ofer_207fabab5f381476') for device_id in device_ids: suffix_list = [x[x.index('_'):] for x in trip_ground_truth.data[device_id]] trip_suffixes_list.append(suffix_list) stop_detector_test.remove_from_redis(device_ids) for i in xrange(len(device_ids)): device_id = device_ids[i] trip_suffixes = trip_suffixes_list[i] tracker_id, trips = track_device(device_id, do_preload_reports=True) for trip_id in trips: timetable.services.print_trip_stop_times(trip_id) stop_detector.print_tracked_stop_times(device_id) self.assertEquals(len(trips), len(trip_suffixes)) for trip_suffix in trip_suffixes: self.assertTrue(self.is_trip_in_list(trips, trip_suffix)) stop_detector_test.remove_from_redis(device_ids) def is_trip_in_list(self, trips, trip_id_end): return len([x for x in trips if x.endswith(trip_id_end)]) > 0 if __name__ == '__main__': unittest.main()
{ "content_hash": "8cad33f9dd9b9ffbc6dd9567e201c19c", "timestamp": "", "source": "github", "line_count": 159, "max_line_length": 159, "avg_line_length": 42.660377358490564, "alnum_prop": 0.6429308565531475, "repo_name": "hasadna/OpenTrain", "id": "c4904d3901bbd923151dc0db976bb494bdc81b65", "size": "6783", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "webserver/opentrain/algorithm/train_tracker_test.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "C++", "bytes": "857" }, { "name": "CSS", "bytes": "22895" }, { "name": "JavaScript", "bytes": "276346" }, { "name": "Python", "bytes": "398492" }, { "name": "Shell", "bytes": "671" } ], "symlink_target": "" }
"""functools.py - Tools for working with functions and callable objects """ # Python module wrapper for _functools C module # to allow utilities written in Python to be added # to the functools module. # Written by Nick Coghlan <ncoghlan at gmail.com> # Copyright (C) 2006 Python Software Foundation. # See C source code for _functools credits/copyright from _functools import partial, reduce # update_wrapper() and wraps() are tools to help write # wrapper functions that can handle naive introspection WRAPPER_ASSIGNMENTS = ('__module__', '__name__', '__doc__') WRAPPER_UPDATES = ('__dict__',) def update_wrapper(wrapper, wrapped, assigned = WRAPPER_ASSIGNMENTS, updated = WRAPPER_UPDATES): """Update a wrapper function to look like the wrapped function wrapper is the function to be updated wrapped is the original function assigned is a tuple naming the attributes assigned directly from the wrapped function to the wrapper function (defaults to functools.WRAPPER_ASSIGNMENTS) updated is a tuple naming the attributes of the wrapper that are updated with the corresponding attribute from the wrapped function (defaults to functools.WRAPPER_UPDATES) """ for attr in assigned: setattr(wrapper, attr, getattr(wrapped, attr)) for attr in updated: getattr(wrapper, attr).update(getattr(wrapped, attr, {})) # Return the wrapper so this can be used as a decorator via partial() return wrapper def wraps(wrapped, assigned = WRAPPER_ASSIGNMENTS, updated = WRAPPER_UPDATES): """Decorator factory to apply update_wrapper() to a wrapper function Returns a decorator that invokes update_wrapper() with the decorated function as the wrapper argument and the arguments to wraps() as the remaining arguments. Default arguments are as for update_wrapper(). This is a convenience function to simplify applying partial() to update_wrapper(). """ return partial(update_wrapper, wrapped=wrapped, assigned=assigned, updated=updated)
{ "content_hash": "c9f45493e7d1afbd1b65a02bf62ea4aa", "timestamp": "", "source": "github", "line_count": 51, "max_line_length": 75, "avg_line_length": 43.3921568627451, "alnum_prop": 0.6764572977858111, "repo_name": "babyliynfg/cross", "id": "539c476b944c817f5d2f2bb518950a9b5d80d0e3", "size": "2213", "binary": false, "copies": "6", "ref": "refs/heads/master", "path": "tools/project-creator/Python2.6.6/Lib/functools.py", "mode": "33261", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "36722" }, { "name": "C", "bytes": "6345646" }, { "name": "C++", "bytes": "15980000" }, { "name": "CMake", "bytes": "1238" }, { "name": "GLSL", "bytes": "64406" }, { "name": "HTML", "bytes": "147661" }, { "name": "Java", "bytes": "574078" }, { "name": "JavaScript", "bytes": "503327" }, { "name": "Makefile", "bytes": "18778" }, { "name": "Objective-C", "bytes": "396703" }, { "name": "Objective-C++", "bytes": "378740" }, { "name": "PLSQL", "bytes": "22886" }, { "name": "Python", "bytes": "15265548" }, { "name": "Roff", "bytes": "23" }, { "name": "Shell", "bytes": "61021" }, { "name": "Visual Basic", "bytes": "19200" } ], "symlink_target": "" }
from __future__ import absolute_import import logging from random import choice import time try: import simplejson as json _ = json # pyflakes except ImportError: import json from pyleus.storm import Spout log = logging.getLogger('requests_generator') FIRST = ( "www.ninjacorp.com", "www.ninjacorp.com", "www.ninjacorp.com", "www.ninjacorp.com", "www.ninjacorp.it", "www.ninjacorp.jp",) SECOND = ("hidden", "hidden", "deadly", "stale", "cute",) THIRD = ( "ninja-cat.php", "ninja-cat.php", "ninja-cat.php", "ninja-cat.php", "evil-dog.php", "evil-dog.php", "wafel-shuriken.html",) PROTOCOLS = ("HTTP", "HTTP", "HTTP", "HTTP", "FTP") class RequestsGeneratorSpout(Spout): OUTPUT_FIELDS = ["request"] def next_tuple(self): time.sleep(0.001) request = { "timestamp": time.time(), "request": { "protocol": choice(PROTOCOLS), "url": "{0}/{1}-{2}".format( choice(FIRST), choice(SECOND), choice(THIRD)) } } log.debug(request) self.emit((json.dumps(request),)) if __name__ == '__main__': logging.basicConfig( level=logging.DEBUG, filename='/tmp/top_urls_requests_generator.log', filemode='a', ) RequestsGeneratorSpout().run()
{ "content_hash": "095a9e16614319689afebac65ce51055", "timestamp": "", "source": "github", "line_count": 65, "max_line_length": 65, "avg_line_length": 21.03076923076923, "alnum_prop": 0.5683979517190929, "repo_name": "imcom/pyleus", "id": "c4decda5303bf7216cbb00e5c7becebdeae23b04", "size": "1367", "binary": false, "copies": "9", "ref": "refs/heads/develop", "path": "examples/top_urls/top_urls/requests_generator.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "30739" }, { "name": "Makefile", "bytes": "602" }, { "name": "Python", "bytes": "127742" }, { "name": "Shell", "bytes": "95" } ], "symlink_target": "" }
""" This is a web-server which integrates with the twisted.internet infrastructure. """ # System Imports import warnings import string import types import copy import os from urllib import quote from zope.interface import implements from urllib import unquote #some useful constants NOT_DONE_YET = 1 # Twisted Imports from twisted.spread import pb from twisted.internet import address, task from twisted.web import iweb, http from twisted.python import log, reflect, failure, components from twisted import copyright from twisted.web import util as webutil, resource from twisted.web.error import UnsupportedMethod from twisted.web.microdom import escape from twisted.python.versions import Version from twisted.python.deprecate import deprecatedModuleAttribute __all__ = [ 'supportedMethods', 'Request', 'Session', 'Site', 'version', 'NOT_DONE_YET' ] # backwards compatability deprecatedModuleAttribute( Version("Twisted", 12, 1, 0), "Please use twisted.web.http.datetimeToString instead", "twisted.web.server", "date_time_string") deprecatedModuleAttribute( Version("Twisted", 12, 1, 0), "Please use twisted.web.http.stringToDatetime instead", "twisted.web.server", "string_date_time") date_time_string = http.datetimeToString string_date_time = http.stringToDatetime # Support for other methods may be implemented on a per-resource basis. supportedMethods = ('GET', 'HEAD', 'POST') def _addressToTuple(addr): if isinstance(addr, address.IPv4Address): return ('INET', addr.host, addr.port) elif isinstance(addr, address.UNIXAddress): return ('UNIX', addr.name) else: return tuple(addr) class Request(pb.Copyable, http.Request, components.Componentized): """ An HTTP request. @ivar defaultContentType: A C{str} giving the default I{Content-Type} value to send in responses if no other value is set. C{None} disables the default. """ implements(iweb.IRequest) defaultContentType = "text/html" site = None appRootURL = None __pychecker__ = 'unusednames=issuer' _inFakeHead = False def __init__(self, *args, **kw): http.Request.__init__(self, *args, **kw) components.Componentized.__init__(self) def getStateToCopyFor(self, issuer): x = self.__dict__.copy() del x['transport'] # XXX refactor this attribute out; it's from protocol # del x['server'] del x['channel'] del x['content'] del x['site'] self.content.seek(0, 0) x['content_data'] = self.content.read() x['remote'] = pb.ViewPoint(issuer, self) # Address objects aren't jellyable x['host'] = _addressToTuple(x['host']) x['client'] = _addressToTuple(x['client']) # Header objects also aren't jellyable. x['requestHeaders'] = list(x['requestHeaders'].getAllRawHeaders()) return x # HTML generation helpers def sibLink(self, name): "Return the text that links to a sibling of the requested resource." if self.postpath: return (len(self.postpath)*"../") + name else: return name def childLink(self, name): "Return the text that links to a child of the requested resource." lpp = len(self.postpath) if lpp > 1: return ((lpp-1)*"../") + name elif lpp == 1: return name else: # lpp == 0 if len(self.prepath) and self.prepath[-1]: return self.prepath[-1] + '/' + name else: return name def process(self): "Process a request." # get site from channel self.site = self.channel.site # set various default headers self.setHeader('server', version) self.setHeader('date', http.datetimeToString()) # Resource Identification self.prepath = [] self.postpath = map(unquote, string.split(self.path[1:], '/')) try: resrc = self.site.getResourceFor(self) self.render(resrc) except: self.processingFailed(failure.Failure()) def write(self, data): """ Write data to the transport (if not responding to a HEAD request). @param data: A string to write to the response. """ if not self.startedWriting: # Before doing the first write, check to see if a default # Content-Type header should be supplied. modified = self.code != http.NOT_MODIFIED contentType = self.responseHeaders.getRawHeaders('content-type') if modified and contentType is None and self.defaultContentType is not None: self.responseHeaders.setRawHeaders( 'content-type', [self.defaultContentType]) # Only let the write happen if we're not generating a HEAD response by # faking out the request method. Note, if we are doing that, # startedWriting will never be true, and the above logic may run # multiple times. It will only actually change the responseHeaders once # though, so it's still okay. if not self._inFakeHead: http.Request.write(self, data) def render(self, resrc): """ Ask a resource to render itself. @param resrc: a L{twisted.web.resource.IResource}. """ try: body = resrc.render(self) except UnsupportedMethod, e: allowedMethods = e.allowedMethods if (self.method == "HEAD") and ("GET" in allowedMethods): # We must support HEAD (RFC 2616, 5.1.1). If the # resource doesn't, fake it by giving the resource # a 'GET' request and then return only the headers, # not the body. log.msg("Using GET to fake a HEAD request for %s" % (resrc,)) self.method = "GET" self._inFakeHead = True body = resrc.render(self) if body is NOT_DONE_YET: log.msg("Tried to fake a HEAD request for %s, but " "it got away from me." % resrc) # Oh well, I guess we won't include the content length. else: self.setHeader('content-length', str(len(body))) self._inFakeHead = False self.method = "HEAD" self.write('') self.finish() return if self.method in (supportedMethods): # We MUST include an Allow header # (RFC 2616, 10.4.6 and 14.7) self.setHeader('Allow', ', '.join(allowedMethods)) s = ('''Your browser approached me (at %(URI)s) with''' ''' the method "%(method)s". I only allow''' ''' the method%(plural)s %(allowed)s here.''' % { 'URI': escape(self.uri), 'method': self.method, 'plural': ((len(allowedMethods) > 1) and 's') or '', 'allowed': string.join(allowedMethods, ', ') }) epage = resource.ErrorPage(http.NOT_ALLOWED, "Method Not Allowed", s) body = epage.render(self) else: epage = resource.ErrorPage( http.NOT_IMPLEMENTED, "Huh?", "I don't know how to treat a %s request." % (escape(self.method),)) body = epage.render(self) # end except UnsupportedMethod if body == NOT_DONE_YET: return if type(body) is not types.StringType: body = resource.ErrorPage( http.INTERNAL_SERVER_ERROR, "Request did not return a string", "Request: " + html.PRE(reflect.safe_repr(self)) + "<br />" + "Resource: " + html.PRE(reflect.safe_repr(resrc)) + "<br />" + "Value: " + html.PRE(reflect.safe_repr(body))).render(self) if self.method == "HEAD": if len(body) > 0: # This is a Bad Thing (RFC 2616, 9.4) log.msg("Warning: HEAD request %s for resource %s is" " returning a message body." " I think I'll eat it." % (self, resrc)) self.setHeader('content-length', str(len(body))) self.write('') else: self.setHeader('content-length', str(len(body))) self.write(body) self.finish() def processingFailed(self, reason): log.err(reason) if self.site.displayTracebacks: body = ("<html><head><title>web.Server Traceback (most recent call last)</title></head>" "<body><b>web.Server Traceback (most recent call last):</b>\n\n" "%s\n\n</body></html>\n" % webutil.formatFailure(reason)) else: body = ("<html><head><title>Processing Failed</title></head><body>" "<b>Processing Failed</b></body></html>") self.setResponseCode(http.INTERNAL_SERVER_ERROR) self.setHeader('content-type',"text/html") self.setHeader('content-length', str(len(body))) self.write(body) self.finish() return reason def view_write(self, issuer, data): """Remote version of write; same interface. """ self.write(data) def view_finish(self, issuer): """Remote version of finish; same interface. """ self.finish() def view_addCookie(self, issuer, k, v, **kwargs): """Remote version of addCookie; same interface. """ self.addCookie(k, v, **kwargs) def view_setHeader(self, issuer, k, v): """Remote version of setHeader; same interface. """ self.setHeader(k, v) def view_setLastModified(self, issuer, when): """Remote version of setLastModified; same interface. """ self.setLastModified(when) def view_setETag(self, issuer, tag): """Remote version of setETag; same interface. """ self.setETag(tag) def view_setResponseCode(self, issuer, code, message=None): """ Remote version of setResponseCode; same interface. """ self.setResponseCode(code, message) def view_registerProducer(self, issuer, producer, streaming): """Remote version of registerProducer; same interface. (requires a remote producer.) """ self.registerProducer(_RemoteProducerWrapper(producer), streaming) def view_unregisterProducer(self, issuer): self.unregisterProducer() ### these calls remain local session = None def getSession(self, sessionInterface = None): # Session management if not self.session: cookiename = string.join(['TWISTED_SESSION'] + self.sitepath, "_") sessionCookie = self.getCookie(cookiename) if sessionCookie: try: self.session = self.site.getSession(sessionCookie) except KeyError: pass # if it still hasn't been set, fix it up. if not self.session: self.session = self.site.makeSession() self.addCookie(cookiename, self.session.uid, path='/') self.session.touch() if sessionInterface: return self.session.getComponent(sessionInterface) return self.session def _prePathURL(self, prepath): port = self.getHost().port if self.isSecure(): default = 443 else: default = 80 if port == default: hostport = '' else: hostport = ':%d' % port return 'http%s://%s%s/%s' % ( self.isSecure() and 's' or '', self.getRequestHostname(), hostport, '/'.join([quote(segment, safe='') for segment in prepath])) def prePathURL(self): return self._prePathURL(self.prepath) def URLPath(self): from twisted.python import urlpath return urlpath.URLPath.fromRequest(self) def rememberRootURL(self): """ Remember the currently-processed part of the URL for later recalling. """ url = self._prePathURL(self.prepath[:-1]) self.appRootURL = url def getRootURL(self): """ Get a previously-remembered URL. """ return self.appRootURL class _RemoteProducerWrapper: def __init__(self, remote): self.resumeProducing = remote.remoteMethod("resumeProducing") self.pauseProducing = remote.remoteMethod("pauseProducing") self.stopProducing = remote.remoteMethod("stopProducing") class Session(components.Componentized): """ A user's session with a system. This utility class contains no functionality, but is used to represent a session. @ivar _reactor: An object providing L{IReactorTime} to use for scheduling expiration. @ivar sessionTimeout: timeout of a session, in seconds. @ivar loopFactory: Deprecated in Twisted 9.0. Does nothing. Do not use. """ sessionTimeout = 900 loopFactory = task.LoopingCall _expireCall = None def __init__(self, site, uid, reactor=None): """ Initialize a session with a unique ID for that session. """ components.Componentized.__init__(self) if reactor is None: from twisted.internet import reactor self._reactor = reactor self.site = site self.uid = uid self.expireCallbacks = [] self.touch() self.sessionNamespaces = {} def startCheckingExpiration(self, lifetime=None): """ Start expiration tracking. @param lifetime: Ignored; deprecated. @return: C{None} """ if lifetime is not None: warnings.warn( "The lifetime parameter to startCheckingExpiration is " "deprecated since Twisted 9.0. See Session.sessionTimeout " "instead.", DeprecationWarning, stacklevel=2) self._expireCall = self._reactor.callLater( self.sessionTimeout, self.expire) def notifyOnExpire(self, callback): """ Call this callback when the session expires or logs out. """ self.expireCallbacks.append(callback) def expire(self): """ Expire/logout of the session. """ del self.site.sessions[self.uid] for c in self.expireCallbacks: c() self.expireCallbacks = [] if self._expireCall and self._expireCall.active(): self._expireCall.cancel() # Break reference cycle. self._expireCall = None def touch(self): """ Notify session modification. """ self.lastModified = self._reactor.seconds() if self._expireCall is not None: self._expireCall.reset(self.sessionTimeout) def checkExpired(self): """ Deprecated; does nothing. """ warnings.warn( "Session.checkExpired is deprecated since Twisted 9.0; sessions " "check themselves now, you don't need to.", stacklevel=2, category=DeprecationWarning) version = "TwistedWeb/%s" % copyright.version class Site(http.HTTPFactory): """ A web site: manage log, sessions, and resources. @ivar counter: increment value used for generating unique sessions ID. @ivar requestFactory: factory creating requests objects. Default to L{Request}. @ivar displayTracebacks: if set, Twisted internal errors are displayed on rendered pages. Default to C{True}. @ivar sessionFactory: factory for sessions objects. Default to L{Session}. @ivar sessionCheckTime: Deprecated. See L{Session.sessionTimeout} instead. """ counter = 0 requestFactory = Request displayTracebacks = True sessionFactory = Session sessionCheckTime = 1800 def __init__(self, resource, logPath=None, timeout=60*60*12): """ Initialize. """ http.HTTPFactory.__init__(self, logPath=logPath, timeout=timeout) self.sessions = {} self.resource = resource def _openLogFile(self, path): from twisted.python import logfile return logfile.LogFile(os.path.basename(path), os.path.dirname(path)) def __getstate__(self): d = self.__dict__.copy() d['sessions'] = {} return d def _mkuid(self): """ (internal) Generate an opaque, unique ID for a user's session. """ from twisted.python.hashlib import md5 import random self.counter = self.counter + 1 return md5("%s_%s" % (str(random.random()) , str(self.counter))).hexdigest() def makeSession(self): """ Generate a new Session instance, and store it for future reference. """ uid = self._mkuid() session = self.sessions[uid] = self.sessionFactory(self, uid) session.startCheckingExpiration() return session def getSession(self, uid): """ Get a previously generated session, by its unique ID. This raises a KeyError if the session is not found. """ return self.sessions[uid] def buildProtocol(self, addr): """ Generate a channel attached to this site. """ channel = http.HTTPFactory.buildProtocol(self, addr) channel.requestFactory = self.requestFactory channel.site = self return channel isLeaf = 0 def render(self, request): """ Redirect because a Site is always a directory. """ request.redirect(request.prePathURL() + '/') request.finish() def getChildWithDefault(self, pathEl, request): """ Emulate a resource's getChild method. """ request.site = self return self.resource.getChildWithDefault(pathEl, request) def getResourceFor(self, request): """ Get a resource for a request. This iterates through the resource heirarchy, calling getChildWithDefault on each resource it finds for a path element, stopping when it hits an element where isLeaf is true. """ request.site = self # Sitepath is used to determine cookie names between distributed # servers and disconnected sites. request.sitepath = copy.copy(request.prepath) return resource.getChildForRequest(self.resource, request) import html
{ "content_hash": "7d4e9af9712bc9b24daf6d5f8d886636", "timestamp": "", "source": "github", "line_count": 587, "max_line_length": 100, "avg_line_length": 32.335604770017035, "alnum_prop": 0.5826352668457931, "repo_name": "Varriount/Colliberation", "id": "d03bec66d62b616a01f8f4deecb93eba3c0f1892", "size": "19107", "binary": false, "copies": "6", "ref": "refs/heads/master", "path": "libs/twisted/web/server.py", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "509005" }, { "name": "D", "bytes": "29" }, { "name": "GAP", "bytes": "14120" }, { "name": "Objective-C", "bytes": "1291" }, { "name": "Python", "bytes": "10503398" }, { "name": "Shell", "bytes": "1512" } ], "symlink_target": "" }
import os import sys sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), "python-astrocoinrpc")) import json import shutil import subprocess import tempfile import traceback from astrocoinrpc.authproxy import AuthServiceProxy, JSONRPCException from util import * def check_array_result(object_array, to_match, expected): """ Pass in array of JSON objects, a dictionary with key/value pairs to match against, and another dictionary with expected key/value pairs. """ num_matched = 0 for item in object_array: all_match = True for key,value in to_match.items(): if item[key] != value: all_match = False if not all_match: continue for key,value in expected.items(): if item[key] != value: raise AssertionError("%s : expected %s=%s"%(str(item), str(key), str(value))) num_matched = num_matched+1 if num_matched == 0: raise AssertionError("No objects matched %s"%(str(to_match))) def run_test(nodes): # Simple send, 0 to 1: txid = nodes[0].sendtoaddress(nodes[1].getnewaddress(), 0.1) sync_mempools(nodes) check_array_result(nodes[0].listtransactions(), {"txid":txid}, {"category":"send","account":"","amount":Decimal("-0.1"),"confirmations":0}) check_array_result(nodes[1].listtransactions(), {"txid":txid}, {"category":"receive","account":"","amount":Decimal("0.1"),"confirmations":0}) # mine a block, confirmations should change: nodes[0].setgenerate(True, 1) sync_blocks(nodes) check_array_result(nodes[0].listtransactions(), {"txid":txid}, {"category":"send","account":"","amount":Decimal("-0.1"),"confirmations":1}) check_array_result(nodes[1].listtransactions(), {"txid":txid}, {"category":"receive","account":"","amount":Decimal("0.1"),"confirmations":1}) # send-to-self: txid = nodes[0].sendtoaddress(nodes[0].getnewaddress(), 0.2) check_array_result(nodes[0].listtransactions(), {"txid":txid, "category":"send"}, {"amount":Decimal("-0.2")}) check_array_result(nodes[0].listtransactions(), {"txid":txid, "category":"receive"}, {"amount":Decimal("0.2")}) # sendmany from node1: twice to self, twice to node2: send_to = { nodes[0].getnewaddress() : 0.11, nodes[1].getnewaddress() : 0.22, nodes[0].getaccountaddress("from1") : 0.33, nodes[1].getaccountaddress("toself") : 0.44 } txid = nodes[1].sendmany("", send_to) sync_mempools(nodes) check_array_result(nodes[1].listtransactions(), {"category":"send","amount":Decimal("-0.11")}, {"txid":txid} ) check_array_result(nodes[0].listtransactions(), {"category":"receive","amount":Decimal("0.11")}, {"txid":txid} ) check_array_result(nodes[1].listtransactions(), {"category":"send","amount":Decimal("-0.22")}, {"txid":txid} ) check_array_result(nodes[1].listtransactions(), {"category":"receive","amount":Decimal("0.22")}, {"txid":txid} ) check_array_result(nodes[1].listtransactions(), {"category":"send","amount":Decimal("-0.33")}, {"txid":txid} ) check_array_result(nodes[0].listtransactions(), {"category":"receive","amount":Decimal("0.33")}, {"txid":txid, "account" : "from1"} ) check_array_result(nodes[1].listtransactions(), {"category":"send","amount":Decimal("-0.44")}, {"txid":txid, "account" : ""} ) check_array_result(nodes[1].listtransactions(), {"category":"receive","amount":Decimal("0.44")}, {"txid":txid, "account" : "toself"} ) def main(): import optparse parser = optparse.OptionParser(usage="%prog [options]") parser.add_option("--nocleanup", dest="nocleanup", default=False, action="store_true", help="Leave astrocoinds and test.* datadir on exit or error") parser.add_option("--srcdir", dest="srcdir", default="../../src", help="Source directory containing astrocoind/astrocoin-cli (default: %default%)") parser.add_option("--tmpdir", dest="tmpdir", default=tempfile.mkdtemp(prefix="test"), help="Root directory for datadirs") (options, args) = parser.parse_args() os.environ['PATH'] = options.srcdir+":"+os.environ['PATH'] check_json_precision() success = False try: print("Initializing test directory "+options.tmpdir) if not os.path.isdir(options.tmpdir): os.makedirs(options.tmpdir) initialize_chain(options.tmpdir) nodes = start_nodes(2, options.tmpdir) connect_nodes(nodes[1], 0) sync_blocks(nodes) run_test(nodes) success = True except AssertionError as e: print("Assertion failed: "+e.message) except Exception as e: print("Unexpected exception caught during testing: "+str(e)) stack = traceback.extract_tb(sys.exc_info()[2]) print(stack[-1]) if not options.nocleanup: print("Cleaning up") stop_nodes() shutil.rmtree(options.tmpdir) if success: print("Tests successful") sys.exit(0) else: print("Failed") sys.exit(1) if __name__ == '__main__': main()
{ "content_hash": "ece6cc8ff54b41a89b3bd89d6157b34c", "timestamp": "", "source": "github", "line_count": 146, "max_line_length": 105, "avg_line_length": 39.60958904109589, "alnum_prop": 0.5593982362095798, "repo_name": "corefork/astrocoincore", "id": "0e83630cb33b3377a3d0ddd6629b59ebca4844bc", "size": "5892", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "qa/rpc-tests/listtransactions.py", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "179035" }, { "name": "C++", "bytes": "2889037" }, { "name": "CSS", "bytes": "1127" }, { "name": "Objective-C++", "bytes": "6266" }, { "name": "Python", "bytes": "62715" }, { "name": "Shell", "bytes": "33189" }, { "name": "TypeScript", "bytes": "10322934" } ], "symlink_target": "" }
import sys import os import string import rfidiot try: card= rfidiot.card except: print "Couldn't open reader!" os._exit(True) card.info('isotype v0.1n') typed= 0 if card.readertype == card.READER_ACG: for command, cardtype in card.ISOTags.iteritems(): if not card.settagtype(command): print 'Could not test for card type: ' + cardtype continue if card.select(): print ' ID: ' + card.uid print " Tag is " + cardtype typed= True if command == card.ISO15693: print ' Manufacturer:', try: print card.ISO7816Manufacturer[card.uid[2:4]] except: print 'Unknown (%s)' % card.uid[2:4] for command, cardtype in card.ISOTagsA.iteritems(): if not card.settagtype(command): print 'Could not reset reader to ' + cardtype + '!' os._exit(True) if card.readertype == card.READER_PCSC: if card.select(): print ' ID: ' + card.uid print " Tag is " + card.tagtype if string.find(card.tagtype,"ISO 15693") >= 0: print ' Manufacturer:', try: print card.ISO7816Manufacturer[card.uid[2:4]] except: print 'Unknown (%s)' % card.uid[2:4] typed= True print print if not card.readersubtype == card.READER_ACS: card.PCSCPrintATR(card.pcsc_atr) else: print card.ISO7816ErrorCodes[card.errorcode] os._exit(True) if card.readertype == card.READER_LIBNFC: if card.select('A'): print ' ID: ' + card.uid if card.atr: print ' ATS: ' + card.atr print " Tag is ISO 14443A" typed= True if card.select('B'): print ' PUPI: ' + card.pupi print ' APP: ' + card.appdata print ' PROTO: ' + card.protocol print ' CID: ' + card.cid print " Tag is ISO 14443B" typed= True if card.select('JEWEL'): print 'SENSRES: ' + card.btsensres print ' ID: ' + card.btid print " Tag is JEWEL" typed= True if not typed: print "Could not determine type" os._exit(True) os._exit(False)
{ "content_hash": "aea2754c04bf6b2f3cfcb1760cc077e3", "timestamp": "", "source": "github", "line_count": 77, "max_line_length": 54, "avg_line_length": 25.31168831168831, "alnum_prop": 0.6275012827090816, "repo_name": "kaosbeat/datakamp", "id": "786dfbde0b68848e4fc35c0c0b08c8dcbe194c5e", "size": "2769", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "RFIDIOt-master/isotype.py", "mode": "33261", "license": "mit", "language": [ { "name": "Java", "bytes": "3267" }, { "name": "Makefile", "bytes": "4668" }, { "name": "Python", "bytes": "417540" }, { "name": "Roff", "bytes": "119" }, { "name": "Shell", "bytes": "9968" } ], "symlink_target": "" }
from unittest import TestCase from biicode.common.model.bii_type import BiiType, PYTHON from biicode.common.model.brl.block_cell_name import BlockCellName from biicode.common.model.declare.python_declaration import PythonDeclaration, PythonImport from biicode.common.dev.system_resource_validator import getSystemNameValidatorFor from mock import Mock class TestPythonDeclaration(TestCase): def test_from_import_block(self): sut = PythonDeclaration("from biicode.common.edition import Parser") self.assertEquals("biicode/common", sut.block()) self.assertEquals([('Parser', None)], sut.python_import().names) def test_general_import_block(self): sut = PythonDeclaration("import biicode.common.edition") self.assertEquals("biicode/common", sut.block()) self.assertEquals(None, sut.python_import().names) def test_from_import_all(self): sut = PythonDeclaration("from biicode.common.edition import *") self.assertEquals("biicode/common", sut.block()) self.assertEquals([("*", None)], sut.python_import().names) def test_aliased_general_import_block(self): sut = PythonDeclaration("import biicode.common.edition as pepe") self.assertEquals("biicode/common", sut.block()) self.assertEquals(None, sut.python_import().names) def test_aliased_from_import_block(self): sut = PythonDeclaration("from biicode.common.edition import Parser as pepe") self.assertEquals("biicode/common", sut.block()) self.assertEquals([("Parser", "pepe")], sut.python_import().names) def test_multi_aliased_from_import_block(self): sut = PythonDeclaration("from biicode.common.edition import Parser as pepe, Counter as cont") self.assertEquals("biicode/common", sut.block()) self.assertEquals([("Parser", "pepe"), ("Counter", "cont")], sut.python_import().names) def test_from_import_error(self): sut = PythonDeclaration("from biicode.common.edition impor Parser") self.assertIsNone(sut.block()) def test_simple_match(self): sut = PythonDeclaration("import biicode.common.edition") block_cell_names = set([BlockCellName("biicode/common/edition.py"), BlockCellName("biicode/common/jarl.py")]) self.assertEquals(set([BlockCellName("biicode/common/edition.py")]), sut.match(block_cell_names)) def test_init_match(self): sut = PythonDeclaration("import biicode.common.edition") block_cell_names = set([BlockCellName("biicode/common/__init__.py"), BlockCellName("biicode/common/edition.py")]) self.assertEquals(block_cell_names, sut.match(block_cell_names)) def test_multi_init_match(self): sut = PythonDeclaration("import biicode.common.edition") block_cell_names = set([BlockCellName("biicode/common/__init__.py"), BlockCellName("biicode/common/test/__init__.py"), BlockCellName("biicode/common/edition.py")]) expected = set([BlockCellName("biicode/common/__init__.py"), BlockCellName("biicode/common/edition.py")]) self.assertEquals(expected, sut.match(block_cell_names)) def test_multi_init_levels_match(self): sut = PythonDeclaration("import biicode.common.test.name") block_cell_names = set([BlockCellName("biicode/common/__init__.py"), BlockCellName("biicode/common/test/__init__.py"), BlockCellName("biicode/common/test/name.py"), BlockCellName("biicode/common/edition.py")]) expected = set([BlockCellName("biicode/common/__init__.py"), BlockCellName("biicode/common/test/__init__.py"), BlockCellName("biicode/common/test/name.py")]) self.assertEquals(expected, sut.match(block_cell_names)) def test_multi_init_levels_match_relative_import(self): sut = PythonDeclaration("import name") block_cell_names = set([BlockCellName("biicode/common/__init__.py"), BlockCellName("biicode/common/test/__init__.py"), BlockCellName("biicode/common/test/name.py"), BlockCellName("biicode/common/edition.py")]) expected = set([BlockCellName("biicode/common/__init__.py"), BlockCellName("biicode/common/test/__init__.py"), BlockCellName("biicode/common/test/name.py")]) self.assertEquals(expected, sut.match(block_cell_names, BlockCellName("biicode/common/test/name.py"))) def test_simple_match_relative(self): sut = PythonDeclaration("from edition import *") block_cell_names = set([BlockCellName("biicode/common/edition.py"), BlockCellName("biicode/common/jarl.py")]) from_block_cell_name = BlockCellName("biicode/common/main.py") self.assertEquals(set([BlockCellName("biicode/common/edition.py")]), sut.match(block_cell_names, from_block_cell_name)) def test_simple_match_relative_in_different_block(self): sut = PythonDeclaration("from edition import *") block_cell_names = set([BlockCellName("biicode/parsing/edition.py"), BlockCellName("biicode/parsing/jarl.py")]) from_block_cell_name = BlockCellName("biicode/common/main.py") self.assertEquals(set(), sut.match(block_cell_names, from_block_cell_name)) def test_match_system(self): sut = PythonDeclaration("import re") validator = self.generate_python_sys_libs() self.assertEquals(set(["re.py"]), sut.match_system(validator)) def test_match_composed_system(self): sut = PythonDeclaration("import xml.dom.domreg") validator = self.generate_python_sys_libs() self.assertEquals(set(["xml/dom/domreg.py"]), sut.match_system(validator)) def test_match_system_inexistent(self): sut = PythonDeclaration("import reee") validator = self.generate_python_sys_libs() self.assertEquals(set([]), sut.match_system(validator)) def generate_python_sys_libs(self): cell_mock = Mock() cell_mock.type = BiiType(PYTHON) validator = getSystemNameValidatorFor(cell_mock).names() return validator def test_normalize(self): sut = PythonDeclaration("import myblock") result = sut.normalize(['testuser/block/__init__.py', 'testuser/block/myblock.py']) self.assertEquals(PythonDeclaration("import testuser.block.myblock as myblock"), result) def test_normalize_multi_dir(self): sut = PythonDeclaration("import biipyc") result = sut.normalize(['testuser1/pyc/pythondynlibs/biipyc.py']) self.assertEquals(PythonDeclaration("import testuser1.pyc.pythondynlibs.biipyc as biipyc"), result) def test_normalize_import_with_alias(self): sut = PythonDeclaration("import myblock as block") result = sut.normalize(['testuser/block/__init__.py', 'testuser/block/myblock.py']) self.assertEquals(PythonDeclaration("import testuser.block.myblock as block"), result) def test_complex_normalize(self): sut = PythonDeclaration("from myblock import Parser, Runner") result = sut.normalize(['testuser/block/__init__.py', 'testuser/block/myblock.py']) self.assertEquals(PythonDeclaration("from testuser.block.myblock import Parser, Runner"), result) def test_complex_normalize_with_alias(self): sut = PythonDeclaration("from myblock import Parser as Hell") result = sut.normalize(['testuser/block/__init__.py', 'testuser/block/myblock.py']) self.assertEquals(PythonDeclaration("from testuser.block.myblock import Parser as Hell"), result) def test_normalize_composed_import(self): sut = PythonDeclaration("import biicode.common.edition.parsing") result = sut.normalize(['biicode/common/edition/parsing.py']) self.assertEquals(PythonDeclaration("import biicode.common.edition.parsing"), result) def test_python_import(self): from_import = "from biicode.common.edition import Parser" parsed_import = PythonImport.parse(from_import) self.assertEquals(PythonImport(module="biicode.common.edition", names=[("Parser", None)]), parsed_import) self.assertEquals(from_import, parsed_import.to_python_statement())
{ "content_hash": "fef32a9180e0a35c777ae32ebb79cf82", "timestamp": "", "source": "github", "line_count": 173, "max_line_length": 101, "avg_line_length": 50.786127167630056, "alnum_prop": 0.6406783519235146, "repo_name": "franramirez688/common", "id": "5a6f0c0450388d43c8cd4c03945f47b41a6407ea", "size": "8786", "binary": false, "copies": "5", "ref": "refs/heads/develop", "path": "test/model/declare/python_declaration_test.py", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "3157300" }, { "name": "C++", "bytes": "4667113" }, { "name": "CMake", "bytes": "25379" }, { "name": "FORTRAN", "bytes": "3691" }, { "name": "Java", "bytes": "4201" }, { "name": "JavaScript", "bytes": "172849" }, { "name": "Makefile", "bytes": "6333" }, { "name": "Objective-C", "bytes": "826" }, { "name": "Python", "bytes": "714678" }, { "name": "Shell", "bytes": "645" } ], "symlink_target": "" }
import os import subprocess import tempfile from q2cli.core.usage import CLIUsage from qiime2.core.testing.util import get_dummy_plugin import pytest def _rt_labeler(val): if hasattr(val, 'id'): return val.id return val @pytest.fixture def dummy_plugin(monkeypatch): monkeypatch.setenv('QIIMETEST', '') return get_dummy_plugin() def get_templated_tests(): return [ ('concatenate_ints', """\ # This example demonstrates basic usage. qiime dummy-plugin concatenate-ints \\ --i-ints1 ints-a.qza \\ --i-ints2 ints-b.qza \\ --i-ints3 ints-c.qza \\ --p-int1 4 \\ --p-int2 2 \\ --o-concatenated-ints ints-d.qza # This example demonstrates chained usage (pt 1). qiime dummy-plugin concatenate-ints \\ --i-ints1 ints-a.qza \\ --i-ints2 ints-b.qza \\ --i-ints3 ints-c.qza \\ --p-int1 4 \\ --p-int2 2 \\ --o-concatenated-ints ints-d.qza # This example demonstrates chained usage (pt 2). qiime dummy-plugin concatenate-ints \\ --i-ints1 ints-d.qza \\ --i-ints2 ints-b.qza \\ --i-ints3 ints-c.qza \\ --p-int1 41 \\ --p-int2 0 \\ --o-concatenated-ints concatenated-ints.qza # comment 1 # comment 2 # comment 1 # comment 2"""), ('identity_with_metadata', """\ qiime dummy-plugin identity-with-metadata \\ --i-ints ints.qza \\ --m-metadata-file md.tsv \\ --o-out out.qza qiime dummy-plugin identity-with-metadata \\ --i-ints ints.qza \\ --m-metadata-file md1.tsv md2.tsv \\ --o-out out.qza"""), ('identity_with_metadata_column', """\ qiime dummy-plugin identity-with-metadata-column \\ --i-ints ints.qza \\ --m-metadata-file md.tsv \\ --m-metadata-column a \\ --o-out out.qza"""), ('typical_pipeline', """\ qiime dummy-plugin typical-pipeline \\ --i-int-sequence ints.qza \\ --i-mapping mapper.qza \\ --p-do-extra-thing \\ --o-out-map out-map.qza \\ --o-left left.qza \\ --o-right right.qza \\ --o-left-viz left-viz.qzv \\ --o-right-viz right-viz.qzv qiime dummy-plugin typical-pipeline \\ --i-int-sequence ints1.qza \\ --i-mapping mapper1.qza \\ --p-do-extra-thing \\ --o-out-map out-map1.qza \\ --o-left left1.qza \\ --o-right right1.qza \\ --o-left-viz left-viz1.qzv \\ --o-right-viz right-viz1.qzv qiime dummy-plugin typical-pipeline \\ --i-int-sequence left1.qza \\ --i-mapping out-map1.qza \\ --p-no-do-extra-thing \\ --o-out-map out-map2.qza \\ --o-left left2.qza \\ --o-right right2.qza \\ --o-left-viz left-viz2.qzv \\ --o-right-viz right-viz2.qzv qiime dev assert-result-data right2.qza \\ --zip-data-path ints.txt \\ --expression 1 qiime dev assert-result-type right2.qza \\ --qiime-type IntSequence1 qiime dev assert-result-type out-map1.qza \\ --qiime-type Mapping"""), ('optional_artifacts_method', """\ qiime dummy-plugin optional-artifacts-method \\ --i-ints ints.qza \\ --p-num1 1 \\ --o-output output1.qza qiime dummy-plugin optional-artifacts-method \\ --i-ints ints.qza \\ --p-num1 1 \\ --p-num2 2 \\ --o-output output2.qza qiime dummy-plugin optional-artifacts-method \\ --i-ints ints.qza \\ --p-num1 1 \\ --o-output output3.qza qiime dummy-plugin optional-artifacts-method \\ --i-ints ints.qza \\ --i-optional1 output3.qza \\ --p-num1 3 \\ --p-num2 4 \\ --o-output output4.qza"""), ('variadic_input_method', """\ qiime dummy-plugin variadic-input-method \\ --i-ints ints-a.qza ints-b.qza \\ --i-int-set single-int1.qza single-int2.qza \\ --p-nums 7 8 9 \\ --o-output out.qza"""), ] _templ_ids = [x[0] for x in get_templated_tests()] @pytest.mark.parametrize('action,exp', get_templated_tests(), ids=_templ_ids) def test_templated(dummy_plugin, action, exp): action = dummy_plugin.actions[action] obs = '' for example_f in action.examples.values(): use = CLIUsage(enable_assertions=True) example_f(use) obs += use.render() obs += '\n' # trim final newline obs = obs[:-1] assert exp == obs def get_rt_tests(): tests = [] try: plugin = get_dummy_plugin() except RuntimeError: return tests for action in plugin.actions.values(): for name in action.examples: tests.append((action, name)) return tests @pytest.mark.parametrize('action,example', get_rt_tests(), ids=_rt_labeler) def test_round_trip(action, example): example_f = action.examples[example] use = CLIUsage(enable_assertions=True) example_f(use) rendered = use.render() with tempfile.TemporaryDirectory() as tmpdir: for ref, data in use.get_example_data(): data.save(os.path.join(tmpdir, ref)) subprocess.run([rendered], shell=True, check=True, cwd=tmpdir, env={**os.environ})
{ "content_hash": "63ff01793a45385f67b153677fe7f38b", "timestamp": "", "source": "github", "line_count": 186, "max_line_length": 77, "avg_line_length": 26.338709677419356, "alnum_prop": 0.6148193508879363, "repo_name": "qiime2/q2cli", "id": "34d7c195841fccd3bc561855508ca7dae052644c", "size": "5249", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "q2cli/tests/test_usage.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Makefile", "bytes": "638" }, { "name": "Python", "bytes": "309385" }, { "name": "Shell", "bytes": "1946" } ], "symlink_target": "" }
"""Provides device automations for Cover.""" from __future__ import annotations import voluptuous as vol from homeassistant.components.automation import AutomationActionType from homeassistant.components.device_automation import TRIGGER_BASE_SCHEMA from homeassistant.components.homeassistant.triggers import ( numeric_state as numeric_state_trigger, state as state_trigger, ) from homeassistant.const import ( ATTR_SUPPORTED_FEATURES, CONF_ABOVE, CONF_BELOW, CONF_DEVICE_ID, CONF_DOMAIN, CONF_ENTITY_ID, CONF_FOR, CONF_PLATFORM, CONF_TYPE, CONF_VALUE_TEMPLATE, STATE_CLOSED, STATE_CLOSING, STATE_OPEN, STATE_OPENING, ) from homeassistant.core import CALLBACK_TYPE, HomeAssistant from homeassistant.helpers import config_validation as cv, entity_registry from homeassistant.helpers.typing import ConfigType from . import ( DOMAIN, SUPPORT_CLOSE, SUPPORT_OPEN, SUPPORT_SET_POSITION, SUPPORT_SET_TILT_POSITION, ) POSITION_TRIGGER_TYPES = {"position", "tilt_position"} STATE_TRIGGER_TYPES = {"opened", "closed", "opening", "closing"} POSITION_TRIGGER_SCHEMA = vol.All( TRIGGER_BASE_SCHEMA.extend( { vol.Required(CONF_ENTITY_ID): cv.entity_id, vol.Required(CONF_TYPE): vol.In(POSITION_TRIGGER_TYPES), vol.Optional(CONF_ABOVE): vol.All( vol.Coerce(int), vol.Range(min=0, max=100) ), vol.Optional(CONF_BELOW): vol.All( vol.Coerce(int), vol.Range(min=0, max=100) ), } ), cv.has_at_least_one_key(CONF_BELOW, CONF_ABOVE), ) STATE_TRIGGER_SCHEMA = TRIGGER_BASE_SCHEMA.extend( { vol.Required(CONF_ENTITY_ID): cv.entity_id, vol.Required(CONF_TYPE): vol.In(STATE_TRIGGER_TYPES), vol.Optional(CONF_FOR): cv.positive_time_period_dict, } ) TRIGGER_SCHEMA = vol.Any(POSITION_TRIGGER_SCHEMA, STATE_TRIGGER_SCHEMA) async def async_get_triggers(hass: HomeAssistant, device_id: str) -> list[dict]: """List device triggers for Cover devices.""" registry = await entity_registry.async_get_registry(hass) triggers = [] # Get all the integrations entities for this device for entry in entity_registry.async_entries_for_device(registry, device_id): if entry.domain != DOMAIN: continue state = hass.states.get(entry.entity_id) if not state or ATTR_SUPPORTED_FEATURES not in state.attributes: continue supported_features = state.attributes[ATTR_SUPPORTED_FEATURES] supports_open_close = supported_features & (SUPPORT_OPEN | SUPPORT_CLOSE) # Add triggers for each entity that belongs to this integration base_trigger = { CONF_PLATFORM: "device", CONF_DEVICE_ID: device_id, CONF_DOMAIN: DOMAIN, CONF_ENTITY_ID: entry.entity_id, } if supports_open_close: triggers += [ { **base_trigger, CONF_TYPE: trigger, } for trigger in STATE_TRIGGER_TYPES ] if supported_features & SUPPORT_SET_POSITION: triggers.append( { **base_trigger, CONF_TYPE: "position", } ) if supported_features & SUPPORT_SET_TILT_POSITION: triggers.append( { **base_trigger, CONF_TYPE: "tilt_position", } ) return triggers async def async_get_trigger_capabilities(hass: HomeAssistant, config: dict) -> dict: """List trigger capabilities.""" if config[CONF_TYPE] not in POSITION_TRIGGER_TYPES: return { "extra_fields": vol.Schema( {vol.Optional(CONF_FOR): cv.positive_time_period_dict} ) } return { "extra_fields": vol.Schema( { vol.Optional(CONF_ABOVE, default=0): vol.All( vol.Coerce(int), vol.Range(min=0, max=100) ), vol.Optional(CONF_BELOW, default=100): vol.All( vol.Coerce(int), vol.Range(min=0, max=100) ), } ) } async def async_attach_trigger( hass: HomeAssistant, config: ConfigType, action: AutomationActionType, automation_info: dict, ) -> CALLBACK_TYPE: """Attach a trigger.""" if config[CONF_TYPE] in STATE_TRIGGER_TYPES: if config[CONF_TYPE] == "opened": to_state = STATE_OPEN elif config[CONF_TYPE] == "closed": to_state = STATE_CLOSED elif config[CONF_TYPE] == "opening": to_state = STATE_OPENING elif config[CONF_TYPE] == "closing": to_state = STATE_CLOSING state_config = { CONF_PLATFORM: "state", CONF_ENTITY_ID: config[CONF_ENTITY_ID], state_trigger.CONF_TO: to_state, } if CONF_FOR in config: state_config[CONF_FOR] = config[CONF_FOR] state_config = state_trigger.TRIGGER_SCHEMA(state_config) return await state_trigger.async_attach_trigger( hass, state_config, action, automation_info, platform_type="device" ) if config[CONF_TYPE] == "position": position = "current_position" if config[CONF_TYPE] == "tilt_position": position = "current_tilt_position" min_pos = config.get(CONF_ABOVE, -1) max_pos = config.get(CONF_BELOW, 101) value_template = f"{{{{ state.attributes.{position} }}}}" numeric_state_config = { CONF_PLATFORM: "numeric_state", CONF_ENTITY_ID: config[CONF_ENTITY_ID], CONF_BELOW: max_pos, CONF_ABOVE: min_pos, CONF_VALUE_TEMPLATE: value_template, } numeric_state_config = numeric_state_trigger.TRIGGER_SCHEMA(numeric_state_config) return await numeric_state_trigger.async_attach_trigger( hass, numeric_state_config, action, automation_info, platform_type="device" )
{ "content_hash": "509f69fc82d0ce6c76b333cdf2c0bdfe", "timestamp": "", "source": "github", "line_count": 191, "max_line_length": 85, "avg_line_length": 32.083769633507856, "alnum_prop": 0.5969321148825065, "repo_name": "kennedyshead/home-assistant", "id": "9b94833bb296412aa235bfb71535291d9b1adb05", "size": "6128", "binary": false, "copies": "2", "ref": "refs/heads/dev", "path": "homeassistant/components/cover/device_trigger.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Dockerfile", "bytes": "1795" }, { "name": "Python", "bytes": "33970989" }, { "name": "Shell", "bytes": "4900" } ], "symlink_target": "" }
"""Test HTML utils""" # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. import ctypes import os import nose.tools as nt from traitlets.tests.utils import check_help_all_output from notebook.utils import url_escape, url_unescape, is_hidden, is_file_hidden from ipython_genutils.py3compat import cast_unicode from ipython_genutils.tempdir import TemporaryDirectory from ipython_genutils.testing.decorators import skip_if_not_win32 def test_help_output(): """jupyter notebook --help-all works""" # FIXME: will be notebook check_help_all_output('notebook') def test_url_escape(): # changes path or notebook name with special characters to url encoding # these tests specifically encode paths with spaces path = url_escape('/this is a test/for spaces/') nt.assert_equal(path, '/this%20is%20a%20test/for%20spaces/') path = url_escape('notebook with space.ipynb') nt.assert_equal(path, 'notebook%20with%20space.ipynb') path = url_escape('/path with a/notebook and space.ipynb') nt.assert_equal(path, '/path%20with%20a/notebook%20and%20space.ipynb') path = url_escape('/ !@$#%^&* / test %^ notebook @#$ name.ipynb') nt.assert_equal(path, '/%20%21%40%24%23%25%5E%26%2A%20/%20test%20%25%5E%20notebook%20%40%23%24%20name.ipynb') def test_url_unescape(): # decodes a url string to a plain string # these tests decode paths with spaces path = url_unescape('/this%20is%20a%20test/for%20spaces/') nt.assert_equal(path, '/this is a test/for spaces/') path = url_unescape('notebook%20with%20space.ipynb') nt.assert_equal(path, 'notebook with space.ipynb') path = url_unescape('/path%20with%20a/notebook%20and%20space.ipynb') nt.assert_equal(path, '/path with a/notebook and space.ipynb') path = url_unescape( '/%20%21%40%24%23%25%5E%26%2A%20/%20test%20%25%5E%20notebook%20%40%23%24%20name.ipynb') nt.assert_equal(path, '/ !@$#%^&* / test %^ notebook @#$ name.ipynb') def test_is_hidden(): with TemporaryDirectory() as root: subdir1 = os.path.join(root, 'subdir') os.makedirs(subdir1) nt.assert_equal(is_hidden(subdir1, root), False) nt.assert_equal(is_file_hidden(subdir1), False) subdir2 = os.path.join(root, '.subdir2') os.makedirs(subdir2) nt.assert_equal(is_hidden(subdir2, root), True) nt.assert_equal(is_file_hidden(subdir2), True) subdir34 = os.path.join(root, 'subdir3', '.subdir4') os.makedirs(subdir34) nt.assert_equal(is_hidden(subdir34, root), True) nt.assert_equal(is_hidden(subdir34), True) subdir56 = os.path.join(root, '.subdir5', 'subdir6') os.makedirs(subdir56) nt.assert_equal(is_hidden(subdir56, root), True) nt.assert_equal(is_hidden(subdir56), True) nt.assert_equal(is_file_hidden(subdir56), False) nt.assert_equal(is_file_hidden(subdir56, os.stat(subdir56)), False) @skip_if_not_win32 def test_is_hidden_win32(): with TemporaryDirectory() as root: root = cast_unicode(root) subdir1 = os.path.join(root, u'subdir') os.makedirs(subdir1) assert not is_hidden(subdir1, root) r = ctypes.windll.kernel32.SetFileAttributesW(subdir1, 0x02) print(r) assert is_hidden(subdir1, root) assert is_file_hidden(subdir1)
{ "content_hash": "0459c0cc6ecbe7336cbe60f6f194b4fe", "timestamp": "", "source": "github", "line_count": 91, "max_line_length": 95, "avg_line_length": 37.472527472527474, "alnum_prop": 0.6730205278592375, "repo_name": "ammarkhann/FinalSeniorCode", "id": "8922beb38968077812768921bec9f58970ed3a3d", "size": "3410", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "lib/python2.7/site-packages/notebook/tests/test_utils.py", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "229289" }, { "name": "C++", "bytes": "171536" }, { "name": "CSS", "bytes": "928345" }, { "name": "Fortran", "bytes": "14107" }, { "name": "HTML", "bytes": "853239" }, { "name": "JavaScript", "bytes": "4838516" }, { "name": "Jupyter Notebook", "bytes": "518186" }, { "name": "Makefile", "bytes": "214" }, { "name": "Matlab", "bytes": "4346" }, { "name": "Python", "bytes": "81804894" }, { "name": "Roff", "bytes": "6673" }, { "name": "Shell", "bytes": "3409" }, { "name": "Smarty", "bytes": "28408" }, { "name": "TeX", "bytes": "1527" }, { "name": "XSLT", "bytes": "366202" } ], "symlink_target": "" }
from __future__ import unicode_literals import os from collections import namedtuple, defaultdict from ansible.executor.task_queue_manager import TaskQueueManager from ansible.vars import VariableManager from ansible.parsing.dataloader import DataLoader from ansible.executor.playbook_executor import PlaybookExecutor from ansible.playbook.play import Play import ansible.constants as C from ansible.utils.vars import load_extra_vars from ansible.utils.vars import load_options_vars from ansible.errors import AnsibleError from inventory import JMSInventory from callback import AdHocResultCallback, PlaybookResultCallBack, \ CommandResultCallback __all__ = ["AdHocRunner", "PlayBookRunner"] C.HOST_KEY_CHECKING = False # Jumpserver not use playbook class PlayBookRunner(object): """ 用于执行AnsiblePlaybook的接口.简化Playbook对象的使用. """ Options = namedtuple('Options', [ 'listtags', 'listtasks', 'listhosts', 'syntax', 'connection', 'module_path', 'forks', 'remote_user', 'private_key_file', 'timeout', 'ssh_common_args', 'ssh_extra_args', 'sftp_extra_args', 'scp_extra_args', 'become', 'become_method', 'become_user', 'verbosity', 'check', 'extra_vars']) def __init__(self, hosts=None, playbook_path=None, forks=C.DEFAULT_FORKS, listtags=False, listtasks=False, listhosts=False, syntax=False, module_path=None, remote_user='root', timeout=C.DEFAULT_TIMEOUT, ssh_common_args=None, ssh_extra_args=None, sftp_extra_args=None, scp_extra_args=None, become=True, become_method=None, become_user="root", verbosity=None, extra_vars=None, connection_type="smart", passwords=None, private_key_file=None, check=False): C.RETRY_FILES_ENABLED = False self.callbackmodule = PlaybookResultCallBack() if playbook_path is None or not os.path.exists(playbook_path): raise AnsibleError( "Not Found the playbook file: %s." % playbook_path) self.playbook_path = playbook_path self.loader = DataLoader() self.variable_manager = VariableManager() self.passwords = passwords or {} self.inventory = JMSInventory(hosts) self.options = self.Options( listtags=listtags, listtasks=listtasks, listhosts=listhosts, syntax=syntax, timeout=timeout, connection=connection_type, module_path=module_path, forks=forks, remote_user=remote_user, private_key_file=private_key_file, ssh_common_args=ssh_common_args or "", ssh_extra_args=ssh_extra_args or "", sftp_extra_args=sftp_extra_args, scp_extra_args=scp_extra_args, become=become, become_method=become_method, become_user=become_user, verbosity=verbosity, extra_vars=extra_vars or [], check=check ) self.variable_manager.extra_vars = load_extra_vars(loader=self.loader, options=self.options) self.variable_manager.options_vars = load_options_vars(self.options) self.variable_manager.set_inventory(self.inventory) # 初始化playbook的executor self.runner = PlaybookExecutor( playbooks=[self.playbook_path], inventory=self.inventory, variable_manager=self.variable_manager, loader=self.loader, options=self.options, passwords=self.passwords) if self.runner._tqm: self.runner._tqm._stdout_callback = self.callbackmodule def run(self): if not self.inventory.list_hosts('all'): raise AnsibleError('Inventory is empty') self.runner.run() self.runner._tqm.cleanup() # print(self.callbackmodule.output) # print(type(self.callbackmodule.output)) return self.callbackmodule.output class AdHocRunner(object): """ ADHoc接口 """ Options = namedtuple("Options", [ 'connection', 'module_path', 'private_key_file', "remote_user", 'timeout', 'forks', 'become', 'become_method', 'become_user', 'check', 'extra_vars', ] ) results_callback_class = AdHocResultCallback def __init__(self, hosts=C.DEFAULT_HOST_LIST, forks=C.DEFAULT_FORKS, # 5 timeout=C.DEFAULT_TIMEOUT, # SSH timeout = 10s remote_user=C.DEFAULT_REMOTE_USER, # root module_path=None, # dirs of custome modules connection_type="smart", become=None, become_method=None, become_user=None, check=False, passwords=None, extra_vars=None, private_key_file=None, gather_facts='no'): self.pattern = '' self.variable_manager = VariableManager() self.loader = DataLoader() self.gather_facts = gather_facts self.results_callback = AdHocRunner.results_callback_class() self.options = self.Options( connection=connection_type, timeout=timeout, module_path=module_path, forks=forks, become=become, become_method=become_method, become_user=become_user, check=check, remote_user=remote_user, extra_vars=extra_vars or [], private_key_file=private_key_file, ) self.variable_manager.extra_vars = load_extra_vars(self.loader, options=self.options) self.variable_manager.options_vars = load_options_vars(self.options) self.passwords = passwords or {} self.inventory = JMSInventory(hosts) self.variable_manager.set_inventory(self.inventory) self.tasks = [] self.play_source = None self.play = None self.runner = None @staticmethod def check_module_args(module_name, module_args=''): if module_name in C.MODULE_REQUIRE_ARGS and not module_args: err = "No argument passed to '%s' module." % module_name print(err) return False return True def run(self, task_tuple, pattern='all', task_name='Ansible Ad-hoc'): """ :param task_tuple: (('shell', 'ls'), ('ping', '')) :param pattern: :param task_name: :return: """ for module, args in task_tuple: if not self.check_module_args(module, args): return self.tasks.append( dict(action=dict( module=module, args=args, )) ) self.play_source = dict( name=task_name, hosts=pattern, gather_facts=self.gather_facts, tasks=self.tasks ) self.play = Play().load( self.play_source, variable_manager=self.variable_manager, loader=self.loader, ) self.runner = TaskQueueManager( inventory=self.inventory, variable_manager=self.variable_manager, loader=self.loader, options=self.options, passwords=self.passwords, stdout_callback=self.results_callback, ) if not self.inventory.list_hosts("all"): raise AnsibleError("Inventory is empty.") if not self.inventory.list_hosts(self.pattern): raise AnsibleError( "pattern: %s dose not match any hosts." % self.pattern) try: self.runner.run(self.play) except Exception as e: print(e) else: return self.results_callback.result_q finally: if self.runner: self.runner.cleanup() if self.loader: self.loader.cleanup_all_tmp_files() def clean_result(self): """ :return: { "success": ['hostname',], "failed": [('hostname', 'msg'), {}], } """ result = {'success': [], 'failed': []} for host in self.results_callback.result_q['contacted']: result['success'].append(host) for host, msgs in self.results_callback.result_q['dark'].items(): msg = '\n'.join(['{} {}: {}'.format( msg.get('module_stdout', ''), msg.get('invocation', {}).get('module_name'), msg.get('msg', '')) for msg in msgs]) result['failed'].append((host, msg)) return result def test_run(): assets = [ { # "hostname": "10.33.5.186", "ip": "10.33.5.186", "port": 22, "username": "ryan", "password": "ryan", "become":{ "pass":"ryan", } }, { # "hostname": "10.33.5.186", "ip": "192.168.6.79", "port": 22, "username": "ryan", "password": "ryan", "become":{ "pass":"ryan", } }, ] # task_tuple = (('shell', 'ls'),) # hoc = AdHocRunner(hosts=assets) # hoc.results_callback = CommandResultCallback() # ret = hoc.run(task_tuple) # print(ret) play = PlayBookRunner(assets, playbook_path='./application/utils/ansibleApi/nginx-cache.yml') a = play.run() print(a) if __name__ == "__main__": test_run()
{ "content_hash": "778d0dee0b3dff32d95287b95b841623", "timestamp": "", "source": "github", "line_count": 308, "max_line_length": 97, "avg_line_length": 32.782467532467535, "alnum_prop": 0.535010399128454, "repo_name": "chenvista/flask-demos", "id": "7be9e947d1ec32fff58e07f6efab682b7b4b57bb", "size": "10161", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "application/utils/ansibleApi/runner.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "110412" }, { "name": "Python", "bytes": "1984435" }, { "name": "Shell", "bytes": "1862" } ], "symlink_target": "" }
r"""Wrapper script to set up import paths for endpointscfg. The actual implementation is in _endpointscfg_impl, but we have to set up import paths properly before we can import that module. See the docstring for endpoints._endpointscfg_impl for more information about this script's capabilities. """ import sys import _endpointscfg_setup # pylint: disable=unused-import from endpoints._endpointscfg_impl import main if __name__ == '__main__': main(sys.argv)
{ "content_hash": "812a03381769f797382871c85a6e2ff7", "timestamp": "", "source": "github", "line_count": 16, "max_line_length": 70, "avg_line_length": 29.125, "alnum_prop": 0.7682403433476395, "repo_name": "inklesspen/endpoints-python", "id": "1557cb74cd3a67ab93c31bee4d1eb757dc9431e7", "size": "1081", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "endpoints/endpointscfg.py", "mode": "33261", "license": "apache-2.0", "language": [ { "name": "HTML", "bytes": "952" }, { "name": "Python", "bytes": "720251" } ], "symlink_target": "" }
"Base Cache class." from __future__ import unicode_literals import time import warnings from django.core.exceptions import DjangoRuntimeWarning, ImproperlyConfigured from django.utils.module_loading import import_string class InvalidCacheBackendError(ImproperlyConfigured): pass class CacheKeyWarning(DjangoRuntimeWarning): pass # Stub class to ensure not passing in a `timeout` argument results in # the default timeout DEFAULT_TIMEOUT = object() # Memcached does not accept keys longer than this. MEMCACHE_MAX_KEY_LENGTH = 250 def default_key_func(key, key_prefix, version): """ Default function to generate keys. Constructs the key used by all other methods. By default it prepends the `key_prefix'. KEY_FUNCTION can be used to specify an alternate function with custom key making behavior. """ return '%s:%s:%s' % (key_prefix, version, key) def get_key_func(key_func): """ Function to decide which key function to use. Defaults to ``default_key_func``. """ if key_func is not None: if callable(key_func): return key_func else: return import_string(key_func) return default_key_func class BaseCache(object): def __init__(self, params): timeout = params.get('timeout', params.get('TIMEOUT', 300)) if timeout is not None: try: timeout = int(timeout) except (ValueError, TypeError): timeout = 300 self.default_timeout = timeout options = params.get('OPTIONS', {}) max_entries = params.get('max_entries', options.get('MAX_ENTRIES', 300)) try: self._max_entries = int(max_entries) except (ValueError, TypeError): self._max_entries = 300 cull_frequency = params.get('cull_frequency', options.get('CULL_FREQUENCY', 3)) try: self._cull_frequency = int(cull_frequency) except (ValueError, TypeError): self._cull_frequency = 3 self.key_prefix = params.get('KEY_PREFIX', '') self.version = params.get('VERSION', 1) self.key_func = get_key_func(params.get('KEY_FUNCTION', None)) def get_backend_timeout(self, timeout=DEFAULT_TIMEOUT): """ Returns the timeout value usable by this backend based upon the provided timeout. """ if timeout == DEFAULT_TIMEOUT: timeout = self.default_timeout elif timeout == 0: # ticket 21147 - avoid time.time() related precision issues timeout = -1 return None if timeout is None else time.time() + timeout def make_key(self, key, version=None): """Constructs the key used by all other methods. By default it uses the key_func to generate a key (which, by default, prepends the `key_prefix' and 'version'). An different key function can be provided at the time of cache construction; alternatively, you can subclass the cache backend to provide custom key making behavior. """ if version is None: version = self.version new_key = self.key_func(key, self.key_prefix, version) return new_key def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None): """ Set a value in the cache if the key does not already exist. If timeout is given, that timeout will be used for the key; otherwise the default cache timeout will be used. Returns True if the value was stored, False otherwise. """ raise NotImplementedError('subclasses of BaseCache must provide an add() method') def get(self, key, default=None, version=None): """ Fetch a given key from the cache. If the key does not exist, return default, which itself defaults to None. """ raise NotImplementedError('subclasses of BaseCache must provide a get() method') def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None): """ Set a value in the cache. If timeout is given, that timeout will be used for the key; otherwise the default cache timeout will be used. """ raise NotImplementedError('subclasses of BaseCache must provide a set() method') def delete(self, key, version=None): """ Delete a key from the cache, failing silently. """ raise NotImplementedError('subclasses of BaseCache must provide a delete() method') def get_many(self, keys, version=None): """ Fetch a bunch of keys from the cache. For certain backends (memcached, pgsql) this can be *much* faster when fetching multiple values. Returns a dict mapping each key in keys to its value. If the given key is missing, it will be missing from the response dict. """ d = {} for k in keys: val = self.get(k, version=version) if val is not None: d[k] = val return d def get_or_set(self, key, default=None, timeout=DEFAULT_TIMEOUT, version=None): """ Fetch a given key from the cache. If the key does not exist, the key is added and set to the default value. The default value can also be any callable. If timeout is given, that timeout will be used for the key; otherwise the default cache timeout will be used. Returns the value of the key stored or retrieved on success, False on error. """ if default is None: raise ValueError('You need to specify a value.') val = self.get(key, version=version) if val is None: if callable(default): default = default() val = self.add(key, default, timeout=timeout, version=version) if val: return self.get(key, version=version) return val def has_key(self, key, version=None): """ Returns True if the key is in the cache and has not expired. """ return self.get(key, version=version) is not None def incr(self, key, delta=1, version=None): """ Add delta to value in the cache. If the key does not exist, raise a ValueError exception. """ value = self.get(key, version=version) if value is None: raise ValueError("Key '%s' not found" % key) new_value = value + delta self.set(key, new_value, version=version) return new_value def decr(self, key, delta=1, version=None): """ Subtract delta from value in the cache. If the key does not exist, raise a ValueError exception. """ return self.incr(key, -delta, version=version) def __contains__(self, key): """ Returns True if the key is in the cache and has not expired. """ # This is a separate method, rather than just a copy of has_key(), # so that it always has the same functionality as has_key(), even # if a subclass overrides it. return self.has_key(key) def set_many(self, data, timeout=DEFAULT_TIMEOUT, version=None): """ Set a bunch of values in the cache at once from a dict of key/value pairs. For certain backends (memcached), this is much more efficient than calling set() multiple times. If timeout is given, that timeout will be used for the key; otherwise the default cache timeout will be used. """ for key, value in data.items(): self.set(key, value, timeout=timeout, version=version) def delete_many(self, keys, version=None): """ Set a bunch of values in the cache at once. For certain backends (memcached), this is much more efficient than calling delete() multiple times. """ for key in keys: self.delete(key, version=version) def clear(self): """Remove *all* values from the cache at once.""" raise NotImplementedError('subclasses of BaseCache must provide a clear() method') def validate_key(self, key): """ Warn about keys that would not be portable to the memcached backend. This encourages (but does not force) writing backend-portable cache code. """ if len(key) > MEMCACHE_MAX_KEY_LENGTH: warnings.warn('Cache key will cause errors if used with memcached: ' '%s (longer than %s)' % (key, MEMCACHE_MAX_KEY_LENGTH), CacheKeyWarning) for char in key: if ord(char) < 33 or ord(char) == 127: warnings.warn('Cache key contains characters that will cause ' 'errors if used with memcached: %r' % key, CacheKeyWarning) def incr_version(self, key, delta=1, version=None): """Adds delta to the cache version for the supplied key. Returns the new version. """ if version is None: version = self.version value = self.get(key, version=version) if value is None: raise ValueError("Key '%s' not found" % key) self.set(key, value, version=version + delta) self.delete(key, version=version) return version + delta def decr_version(self, key, delta=1, version=None): """Substracts delta from the cache version for the supplied key. Returns the new version. """ return self.incr_version(key, -delta, version) def close(self, **kwargs): """Close the cache connection""" pass
{ "content_hash": "eb83a7047d641d2d3c41540d8877ede5", "timestamp": "", "source": "github", "line_count": 270, "max_line_length": 91, "avg_line_length": 35.87407407407407, "alnum_prop": 0.6121205864133802, "repo_name": "wetneb/django", "id": "26113aaa31fc09816d44afe31a162e80366ed9ee", "size": "9686", "binary": false, "copies": "15", "ref": "refs/heads/master", "path": "django/core/cache/backends/base.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "CSS", "bytes": "43000" }, { "name": "HTML", "bytes": "168986" }, { "name": "JavaScript", "bytes": "105614" }, { "name": "Makefile", "bytes": "125" }, { "name": "Python", "bytes": "10663776" }, { "name": "Shell", "bytes": "3056" }, { "name": "Smarty", "bytes": "130" } ], "symlink_target": "" }
import os import pandas as pd import numpy as np import matplotlib.pyplot as plt import matplotlib.pylab as pylab import matplotlib import errno import ffn # General settings matplotlib.style.use('seaborn-colorblind') params = {'legend.fontsize': 'x-large', 'figure.figsize': (20, 10), 'figure.facecolor': 'white', 'figure.edgecolor': 'black', 'axes.labelsize': 'x-large', 'axes.titlesize': 'x-large', 'xtick.labelsize': 'x-large', 'ytick.labelsize': 'x-large'} pylab.rcParams.update(params) # Colors used colors = ['black', 'dimgrey', 'steelblue', 'lightsteelblue'] #-----------------------# # Algorithms considered # #-----------------------# algorithms = set(['ARAC', 'PGPE', 'NPGPE', 'RSARAC', 'RSPGPE', 'RSNPGPE']) #-------------------# # Utility functions # #-------------------# def createDirectory(dirPath): """ Create directory at a given path (absolute). Args: dirPath (str): absolute path for new directory. """ if not os.path.exists(os.path.expanduser(dirPath)): try: os.makedirs(os.path.expanduser(dirPath)) except OSError as exc: if exc.errno != errno.EEXIST: raise #-----------# # Functions # #-----------# def analyzeConvergence(filesList, algorithmName): """ Aggregate the convergence information of a series of independent experiments of a certain learning algorithms. Args: filesList (list of str): list of the files of convergence information Returns: dfReward (pd.DataFrame): dataframe containing the aggregate average reward dfStddev (pd.DataFrame): datraframe containing the aggregate standard dev dfSharpe (pd.DataFrame): dataframe containing the aggreagate Sharpe ratio """ # Initialize output dataframes temp = pd.read_csv(os.path.expanduser(filesList[0]), index_col=0) dfRewardExp = pd.DataFrame(index=temp.index) dfStddevExp = pd.DataFrame(index=temp.index) dfSharpeExp = pd.DataFrame(index=temp.index) # For all the files for f in filesList: expName = f[::-1].split('/', 1)[0][::-1][:-4] df = pd.read_csv(os.path.expanduser(f), index_col=0) dfRewardExp[expName] = df['average'] dfStddevExp[expName] = df['stdev'] dfSharpeExp[expName] = df['sharpe'] # Compute mean and stddev across experiments c1 = algorithmName c2 = algorithmName + '_delta' dfReward = pd.DataFrame(index=temp.index, columns=[c1, c2]) dfStddev = pd.DataFrame(index=temp.index, columns=[c1, c2]) dfSharpe = pd.DataFrame(index=temp.index, columns=[c1, c2]) dfReward[c1] = dfRewardExp.mean(axis=1) dfReward[c2] = dfRewardExp.std(axis=1) dfStddev[c1] = dfStddevExp.mean(axis=1) dfStddev[c2] = dfStddevExp.std(axis=1) dfSharpe[c1] = dfSharpeExp.mean(axis=1) dfSharpe[c2] = dfSharpeExp.std(axis=1) # Return return dfReward, dfStddev, dfSharpe def compareAlgorithmConvergence(debugDir, imagesDir=None): """ Compare the convergence properties of several learning algorithms. The function produces images and csv summaries of the analysis in the given directories. Args: outputDir (str): output directory. imagesDir (str): images directory. """ dfReward = pd.DataFrame() dfStddev = pd.DataFrame() dfSharpe = pd.DataFrame() algorithmsList = [] for subdir, dirs, files in os.walk(debugDir): # Retrieve algorithm name algorithmName = subdir[::-1].split('/', 1)[0][::-1] if algorithmName not in algorithms: continue else: algorithmsList += [algorithmName] # Retrieve debug files for the current algorithm filesList = [os.path.join(subdir, f) for f in files] if len(filesList) > 0: # Compute aggregate convergence statistics for the current algorithm dfRewardAlgo, dfStddevAlgo, dfSharpeAlgo = analyzeConvergence(filesList, algorithmName) # Merge results dfReward = pd.concat([dfReward, dfRewardAlgo], axis=1) dfStddev = pd.concat([dfStddev, dfStddevAlgo], axis=1) dfSharpe = pd.concat([dfSharpe, dfSharpeAlgo], axis=1) algorithmsListDelta = [algo + '_delta' for algo in algorithmsList] colorsList = ['steelblue', 'darkorange', 'seagreen'] fig = plt.figure(figsize=(20,10), facecolor='white', edgecolor='black') # Average reward ax1 = fig.add_subplot(131) (10000.0 * dfReward[algorithmsList]).plot(lw=3, color=colors[1:], ax=ax1) dfRewardUpperBound = pd.DataFrame(1e4 * (dfReward[algorithmsList].values + 2.0 * dfReward[algorithmsListDelta].values), columns=algorithmsList, index=dfReward.index) dfRewardLowerBound = pd.DataFrame(1e4 * (dfReward[algorithmsList].values - 2.0 * dfReward[algorithmsListDelta].values), columns=algorithmsListDelta, index=dfReward.index) dfRewardUpperBound.plot(lw=2, ls='--', color=colors[1:], ax=ax1) dfRewardLowerBound.plot(lw=2, ls='--', color=colors[1:], ax=ax1) ax1.set_ylabel('Daily Average Reward [bps]') ax1.set_xlabel('Training Epoch') ax1.legend(algorithmsList, loc='upper left') plt.grid(True) # Reward standard deviation ax2 = fig.add_subplot(132) (1e4 * dfStddev[algorithmsList]).plot(lw=3, color=colors[1:], legend=False, ax=ax2) dfStddevUpperBound = pd.DataFrame(1e4 * (dfStddev[algorithmsList].values + 2.0 * dfStddev[algorithmsListDelta].values), columns=algorithmsList, index=dfStddev.index) dfStddevLowerBound = pd.DataFrame(1e4 * (dfStddev[algorithmsList].values - 2.0 * dfStddev[algorithmsListDelta].values), columns=algorithmsListDelta, index=dfStddev.index) dfStddevUpperBound.plot(lw=2, ls='--', color=colors[1:], legend=False, ax=ax2) dfStddevLowerBound.plot(lw=2, ls='--', color=colors[1:], legend=False, ax=ax2) ax2.set_title('Convergence of Learning Process', fontsize=18) ax2.set_ylabel('Daily Reward Standard Deviation [bps]') ax2.set_xlabel('Training Epoch') plt.grid(True) # Sharpe ratio ax3 = fig.add_subplot(133) (np.sqrt(252) * dfSharpe[algorithmsList]).plot(lw=3, color=colors[1:], legend=False, ax=ax3) dfSharpeUpperBound = pd.DataFrame(np.sqrt(252) * (dfSharpe[algorithmsList].values + 2.0 * dfSharpe[algorithmsListDelta].values), columns=algorithmsList, index=dfSharpe.index) dfSharpeLowerBound = pd.DataFrame(np.sqrt(252) * (dfSharpe[algorithmsList].values - 2.0 * dfSharpe[algorithmsListDelta].values), columns=algorithmsListDelta, index=dfSharpe.index) dfSharpeUpperBound.plot(lw=2, ls='--', color=colors[1:], legend=False, ax=ax3) dfSharpeLowerBound.plot(lw=2, ls='--', color=colors[1:], legend=False, ax=ax3) ax3.set_ylabel('Annualized Sharpe Ratio') ax3.set_xlabel('Training Epoch') plt.grid(True) plt.tight_layout(pad=0.4, w_pad=0.5, h_pad=1.0) if imagesDir is not None: createDirectory(imagesDir) plt.savefig(imagesDir + 'convergence.eps', format='eps', dpi=1200) else: plt.show() def analyzePerformance(filesList, algorithmName): """ Aggregate the performances of a series of independent experiments of a certain learning algorithms. Args: filesList (list of str): list of the files of convergence information algorithmName (str): name of the learning algorithm Returns: dfBuyHold (pd.DataFrame): dataframe containg the cumulative returns of the buy & hold strat dfCumProfit (pd.DataFrame): dataframe containing the aggregate cumulative profits dfStatistics (pd.DataFrame): dataframe containing the backtest statistics """ # Initialize output dataframes temp = pd.read_csv(os.path.expanduser(filesList[0])) dfAssetReturn = pd.Series(temp['r_1'], index=np.arange(1, len(temp)+1)) dfAllocationExp = pd.DataFrame(index=temp.index) dfLogReturnExp = pd.DataFrame(index=np.arange(1, len(temp)+1)) # For all the files for f in filesList: expName = f[::-1].split('/', 1)[0][::-1][:-4].encode("utf-8") df = pd.read_csv(os.path.expanduser(f)) df.set_index(np.arange(1, len(temp)+1), inplace=True) dfAllocationExp[expName] = df['a_1'] dfLogReturnExp[expName] = df['logReturn'] # Compute cumulative profits dfBuyHold = pd.Series(100.0 * (np.cumprod(dfAssetReturn.values + 1.0) - 1.0), index=dfAssetReturn.index) dfCumProfit = pd.DataFrame(100.0 * (np.exp(dfLogReturnExp.cumsum()) - 1.0), index=dfLogReturnExp.index, columns=dfLogReturnExp.columns) dfBuyHold.loc[0]= 0.0 dfCumProfit.loc[0, :] = 0.0 dfBuyHold.sort_index(inplace=True) dfCumProfit.sort_index(inplace=True) # Price dataframe for computing strategy statistics dfPricesExp = pd.DataFrame(index=pd.date_range(start='01/01/2000', periods=len(dfBuyHold)), columns=dfCumProfit.columns) dfPricesExp['Buy and Hold'] = 100.0 * (1.0 + dfBuyHold.values / 100.0) dfPricesExp[dfCumProfit.columns] = 100.0 * (1.0 + dfCumProfit.values / 100.0) # Compute aggregate information dfPerf = pd.DataFrame(index=dfCumProfit.index, columns=[algorithmName, algorithmName + '_delta']) dfPerf[algorithmName] = dfCumProfit.mean(axis=1) dfPerf[algorithmName + '_delta'] = dfCumProfit.std(axis=1) # Compute strategies stats dfStatistics = dfPricesExp.calc_stats() # Compute frequency of reallocation reallocationFrequency = (dfAllocationExp.diff().dropna() != 0).mean(axis=0).mean(axis=0) shortFrequency = (dfAllocationExp < 0).mean(axis=0).mean(axis=0) return dfBuyHold, dfPerf, dfStatistics, reallocationFrequency, shortFrequency def compareAlgorithmPerformance(outputDir, imagesDir): """ Compare the backtest performances of several learning algorithms. The function produces images and csv summaries of the analysis in the given directories. Args: outputDir (str): output directory. imagesDir (str): images directory. """ dfPerf = pd.DataFrame() dfStat = pd.DataFrame(index=['Total Return', 'Daily Sharpe', 'Monthly Sharpe', 'Yearly Sharpe', 'Max Drawdown', 'Avg Drawdown', 'Avg Up Month', 'Avg Down Month', 'Win Year %', 'Win 12m %']) algorithmsList = [] for subdir, dirs, files in os.walk(outputDir): # Retrieve algorithm name algorithmName = subdir[::-1].split('/', 1)[0][::-1] if algorithmName not in algorithms: continue else: algorithmsList += [algorithmName] # Retrieve debug files for the current algorithm filesList = [os.path.join(subdir, f) for f in files] if len(filesList) > 0: # Compute aggregate performance statistics dfBuyHold, dfPerfAlgo, dfStatAlgo, reallocationFreqAlgo, shortFreqAlgo = \ analyzePerformance(filesList, algorithmName) # Merge results dfPerf = pd.concat([dfPerf, dfPerfAlgo], axis=1) # Write backtest statistics to .csv file createDirectory(outputDir + 'Statistics/') dfStatAlgo.to_csv(path = os.path.expanduser(outputDir + 'Statistics/backtest' + algorithmName + '.csv')) # Extract statistics dfStatRed = extractBacktestStatistics(dfStatAlgo) dfStat['Buy and Hold'] = dfStatRed['Buy and Hold'] dfStat.loc['Reallocation Freq', 'Buy and Hold'] = 0.0 dfStat.loc['Short Freq', 'Buy and Hold'] = 0.0 dfStat[algorithmName] = dfStatRed.ix[:, 1:].mean(axis=1) dfStat.loc['Reallocation Freq', algorithmName] = reallocationFreqAlgo dfStat.loc['Short Freq', algorithmName] = shortFreqAlgo # Plot performance algorithmsListDelta = [algo + '_delta' for algo in algorithmsList] colorsList = ['steelblue', 'darkorange', 'seagreen'] plt.figure() ax = dfBuyHold.plot(title='Performance of Learning Algorithms', color=colors[0], lw=3) dfPerf[algorithmsList].plot(title='Performance of Learning Algorithms', color=colors[1:], lw=3, ax=ax) dfPerfLowerBound = pd.DataFrame(dfPerf[algorithmsList].values - 2.0 * dfPerf[algorithmsListDelta].values, columns=algorithmsList, index=dfPerf.index) dfPerfUpperBound = pd.DataFrame(dfPerf[algorithmsList].values + 2.0 * dfPerf[algorithmsListDelta].values, columns=algorithmsList, index=dfPerf.index) dfPerfLowerBound.plot(ax=ax, color=colors[1:], ls='--', lw=2) dfPerfUpperBound.plot(ax=ax, color=colors[1:], ls='--', lw=2) ax.set_xlabel('Time Step') ax.set_ylabel('Cumulative Profit [%]') ax.legend(['Buy and Hold'] + algorithmsList, loc='upper left') plt.grid(True) plt.tight_layout(pad=0.4, w_pad=0.5, h_pad=1.0) if imagesDir is not None: createDirectory(imagesDir) plt.savefig(imagesDir + 'performance.eps', format='eps', dpi=1200) else: plt.show() # Aggregate statistics dfStat.to_csv(outputDir + 'Statistics/backtest.csv') def extractBacktestStatistics(dfStatAlgo): """ Extract backtest statistics from ffn stats object and store them in a pandas DataFrame, for easier aggregation. Args: dfStatAlgo (ffn.stats): ffn stats container. """ dfStatRed = pd.DataFrame() for exp, stat in dfStatAlgo.iteritems(): dfStatRed.loc['Total Return', exp] = stat.total_return dfStatRed.loc['Daily Sharpe', exp] = stat.daily_sharpe dfStatRed.loc['Monthly Sharpe', exp] = stat.monthly_sharpe dfStatRed.loc['Yearly Sharpe', exp] = stat.yearly_sharpe dfStatRed.loc['Max Drawdown', exp] = stat.max_drawdown dfStatRed.loc['Avg Drawdown', exp] = stat.avg_drawdown dfStatRed.loc['Avg Up Month', exp] = stat.avg_up_month dfStatRed.loc['Avg Down Month', exp] = stat.avg_down_month dfStatRed.loc['Win Year %', exp] = stat.win_year_perc dfStatRed.loc['Win 12m %', exp] = stat.twelve_month_win_perc return dfStatRed def postprocessing(debugDir, outputDir): """ Postprocessing wrapper function. Args: debugDir (str): path to debug directory. outputDir (str): path to output directory. """ # Compare algorithms convergence compareAlgorithmConvergence(os.path.expanduser(debugDir), os.path.expanduser(outputDir + 'Images/')) # Compare algorithms performances compareAlgorithmPerformance(os.path.expanduser(outputDir), os.path.expanduser(outputDir + 'Images/')) if __name__ == "__main__": postprocessing('~/Documents/University/6_Anno_Poli/7_Thesis/Data/Debug/Single_Synth_RN_P50_F0_S0_N5/', '~/Documents/University/6_Anno_Poli/7_Thesis/Data/Output/Single_Synth_RN_P50_F0_S0_N5/')
{ "content_hash": "e4df3a19e9a90f91724906ebbf5642b1", "timestamp": "", "source": "github", "line_count": 369, "max_line_length": 139, "avg_line_length": 41.040650406504064, "alnum_prop": 0.6492340200739567, "repo_name": "pnecchi/Thesis", "id": "fcc678adbc5b09fdad448a3df46c60a01fddc136", "size": "15565", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Code/Postprocessing/postprocessing.py", "mode": "33188", "license": "mit", "language": [ { "name": "C++", "bytes": "244518" }, { "name": "CMake", "bytes": "3957" }, { "name": "Makefile", "bytes": "37644" }, { "name": "Python", "bytes": "101949" }, { "name": "TeX", "bytes": "485065" } ], "symlink_target": "" }
from __future__ import unicode_literals, division, absolute_import import logging import csv import re from cgi import parse_header from flexget import plugin from flexget.event import event from flexget.utils import requests from flexget.utils.imdb import make_url from flexget.utils.cached_input import cached from flexget.utils.tools import decode_html from flexget.entry import Entry from flexget.utils.soup import get_soup log = logging.getLogger('imdb_list') USER_ID_RE = r'^ur\d{7,8}$' class ImdbList(object): """"Creates an entry for each movie in your imdb list.""" schema = { 'type': 'object', 'properties': { 'user_id': { 'type': 'string', 'pattern': USER_ID_RE, 'error_pattern': 'user_id must be in the form urXXXXXXX' }, 'username': {'type': 'string'}, 'password': {'type': 'string'}, 'list': {'type': 'string'} }, 'required': ['list'], 'additionalProperties': False } @cached('imdb_list', persist='2 hours') def on_task_input(self, task, config): sess = requests.Session() if config.get('username') and config.get('password'): log.verbose('Logging in ...') # Log in to imdb with our handler params = {'login': config['username'], 'password': config['password']} try: # First get the login page so we can get the hidden input value soup = get_soup(sess.get('https://secure.imdb.com/register-imdb/login').content) # Fix for bs4 bug. see #2313 and github#118 auxsoup = soup.find('div', id='nb20').next_sibling.next_sibling tag = auxsoup.find('input', attrs={'name': '49e6c'}) if tag: params['49e6c'] = tag['value'] else: log.warning('Unable to find required info for imdb login, maybe their login method has changed.') # Now we do the actual login with appropriate parameters r = sess.post('https://secure.imdb.com/register-imdb/login', data=params, raise_status=False) except requests.RequestException as e: raise plugin.PluginError('Unable to login to imdb: %s' % e.message) # IMDb redirects us upon a successful login. # removed - doesn't happen always? # if r.status_code != 302: # log.warning('It appears logging in to IMDb was unsuccessful.') # try to automatically figure out user_id from watchlist redirect url if not 'user_id' in config: log.verbose('Getting user_id ...') try: response = sess.get('http://www.imdb.com/list/watchlist') except requests.RequestException as e: log.error('Error retrieving user ID from imdb: %s' % e.message) user_id = '' else: log.debug('redirected to %s' % response.url) user_id = response.url.split('/')[-2] if re.match(USER_ID_RE, user_id): config['user_id'] = user_id else: raise plugin.PluginError('Couldn\'t figure out user_id, please configure it manually.') if not 'user_id' in config: raise plugin.PluginError('Configuration option `user_id` required.') log.verbose('Retrieving list %s ...' % config['list']) # Get the imdb list in csv format try: url = 'http://www.imdb.com/list/export' params = {'list_id': config['list'], 'author_id': config['user_id']} log.debug('Requesting %s' % url) opener = sess.get(url, params=params) mime_type = parse_header(opener.headers['content-type'])[0] log.debug('mime_type: %s' % mime_type) if mime_type != 'text/csv': raise plugin.PluginError('Didn\'t get CSV export as response. Probably specified list `%s` ' 'does not exist.' % config['list']) csv_rows = csv.reader(opener.iter_lines()) except requests.RequestException as e: raise plugin.PluginError('Unable to get imdb list: %s' % e.message) # Create an Entry for each movie in the list entries = [] for row in csv_rows: if not row or row[0] == 'position': # Don't use blank rows or the headings row continue try: title = decode_html(row[5]).decode('utf-8') entries.append(Entry(title=title, url=make_url(row[1]), imdb_id=row[1], imdb_name=title)) except IndexError: log.critical('IndexError! Unable to handle row: %s' % row) return entries @event('plugin.register') def register_plugin(): plugin.register(ImdbList, 'imdb_list', api_ver=2)
{ "content_hash": "8bb87eac3994c65ebbc0d08e5ab05123", "timestamp": "", "source": "github", "line_count": 122, "max_line_length": 117, "avg_line_length": 41.27049180327869, "alnum_prop": 0.5574975173783515, "repo_name": "asm0dey/Flexget", "id": "94ab89329e1e8d21fdf4f0afd6cf1505ddff09be", "size": "5035", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "flexget/plugins/input/imdb_list.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "56725" }, { "name": "HTML", "bytes": "35670" }, { "name": "JavaScript", "bytes": "455222" }, { "name": "Python", "bytes": "1670496" } ], "symlink_target": "" }
"""Utility functions for signurl command.""" import base64 from datetime import datetime import hashlib from gslib.utils.constants import UTF8 import six from six.moves import urllib _CANONICAL_REQUEST_FORMAT = ('{method}\n{resource}\n{query_string}\n{headers}' '\n{signed_headers}\n{hashed_payload}') _SIGNING_ALGO = 'GOOG4-RSA-SHA256' _STRING_TO_SIGN_FORMAT = ('{signing_algo}\n{request_time}\n{credential_scope}' '\n{hashed_request}') _SIGNED_URL_FORMAT = ('https://{host}/{path}?x-goog-signature={sig}&' '{query_string}') _UNSIGNED_PAYLOAD = 'UNSIGNED-PAYLOAD' def _NowUTC(): return datetime.utcnow() def CreatePayload(client_id, method, duration, path, generation, logger, region, signed_headers, billing_project=None, string_to_sign_debug=False): """Create a string that needs to be signed. Args: client_id: Client ID signing this URL. method: The HTTP method to be used with the signed URL. duration: timedelta for which the constructed signed URL should be valid. path: String path to the bucket of object for signing, in the form 'bucket' or 'bucket/object'. generation: If not None, specifies a version of an object for signing. logger: logging.Logger for warning and debug output. region: Geographic region in which the requested resource resides. signed_headers: Dict containing the header info like host content-type etc. billing_project: Specify a user project to be billed for the request. string_to_sign_debug: If true AND logger is enabled for debug level, print string to sign to debug. Used to differentiate user's signed URL from the probing permissions-check signed URL. Returns: A tuple where the 1st element is the string to sign. The second element is the query string. """ signing_time = _NowUTC() canonical_day = signing_time.strftime('%Y%m%d') canonical_time = signing_time.strftime('%Y%m%dT%H%M%SZ') canonical_scope = '{date}/{region}/storage/goog4_request'.format( date=canonical_day, region=region) signed_query_params = { 'x-goog-algorithm': _SIGNING_ALGO, 'x-goog-credential': client_id + '/' + canonical_scope, 'x-goog-date': canonical_time, 'x-goog-signedheaders': ';'.join(sorted(signed_headers.keys())), 'x-goog-expires': '%d' % duration.total_seconds() } if (billing_project is not None): signed_query_params['userProject'] = billing_project if generation is not None: signed_query_params['generation'] = generation canonical_resource = '/{}'.format(path) canonical_query_string = '&'.join([ '{}={}'.format(param, urllib.parse.quote_plus(signed_query_params[param])) for param in sorted(signed_query_params.keys()) ]) canonical_headers = '\n'.join([ '{}:{}'.format(header.lower(), signed_headers[header]) for header in sorted(signed_headers.keys()) ]) + '\n' canonical_signed_headers = ';'.join(sorted(signed_headers.keys())) canonical_request = _CANONICAL_REQUEST_FORMAT.format( method=method, resource=canonical_resource, query_string=canonical_query_string, headers=canonical_headers, signed_headers=canonical_signed_headers, hashed_payload=_UNSIGNED_PAYLOAD) if six.PY3: canonical_request = canonical_request.encode(UTF8) canonical_request_hasher = hashlib.sha256() canonical_request_hasher.update(canonical_request) hashed_canonical_request = base64.b16encode( canonical_request_hasher.digest()).lower().decode(UTF8) string_to_sign = _STRING_TO_SIGN_FORMAT.format( signing_algo=_SIGNING_ALGO, request_time=canonical_time, credential_scope=canonical_scope, hashed_request=hashed_canonical_request) if string_to_sign_debug and logger: logger.debug( 'Canonical request (ignore opening/closing brackets): [[[%s]]]' % canonical_request) logger.debug('String to sign (ignore opening/closing brackets): [[[%s]]]' % string_to_sign) return string_to_sign, canonical_query_string def GetFinalUrl(raw_signature, host, path, canonical_query_string): """Get the final signed url.""" signature = base64.b16encode(raw_signature).lower().decode() return _SIGNED_URL_FORMAT.format(host=host, path=path, sig=signature, query_string=canonical_query_string)
{ "content_hash": "f60cdf65ac3eb61d1a45ad4b11e01f3f", "timestamp": "", "source": "github", "line_count": 127, "max_line_length": 80, "avg_line_length": 36.75590551181102, "alnum_prop": 0.6478149100257069, "repo_name": "GoogleCloudPlatform/gsutil", "id": "3aaf64a59d185c0616521d4484a4ad2c2e78230d", "size": "5288", "binary": false, "copies": "12", "ref": "refs/heads/master", "path": "gslib/utils/signurl_helper.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "2175" }, { "name": "PowerShell", "bytes": "10051" }, { "name": "Python", "bytes": "3952149" }, { "name": "Shell", "bytes": "7031" } ], "symlink_target": "" }
import sys import re from optparse import OptionParser from rdkit import Chem from rdkit.Chem import AllChem def smiles_to_smarts(smi): mol = Chem.MolFromSmiles(smi) if (mol is None): sys.stderr.write("Can't generate mol for: %s\n" % (smi)) return None #change the isotope to 42 for atom in mol.GetAtoms(): atom.SetIsotope(42) #preint out the smiles - all the atom attributes will be fully specified smarts = Chem.MolToSmiles(mol, isomericSmiles=True) #remove the 42 isotope labels smarts = re.sub(r'\[42', "[", smarts) #now have a fully specified SMARTS - simples! return smarts if __name__ == '__main__': parser = OptionParser( description="Program to apply transformations to a set of input molecules", epilog="Example command: mol_transform.py -t TRANSFORM_FILE <SMILES_FILE\t\t" "Format of smiles file: SMILES ID <space or comma separated>\t\t\t" "Format of transform file: transform <one per line>\t\t\t" "Output: SMILES,ID,Transfrom,Modified_SMILES") parser.add_option('-f', '--file', action='store', dest='transform_file', type='string', help='The file containing the transforms to apply to your input SMILES') (options, args) = parser.parse_args() #print options.transform_file if options.transform_file is None: print("Please specify the transform file.") sys.exit(1) smiles = [] #read the STDIN for line in sys.stdin: line = line.rstrip() smi, id = re.split(r'\s|,', line) #print smiles,id smiles.append((smi, id)) #read the transform file #all the transform must come from BioDig to guarantee they have been cansmirk'ed infile = open(options.transform_file, 'r') print("Input_SMILES,ID,RG-Transform,RG-transformedSMILES") for transform in infile: transform = transform.rstrip() #need to convert the smiles to smart to get rid of any potential issues lhs, rhs = transform.split(">>") if (lhs == "[*:1][H]"): lhs = "[*;!H0:1]" else: lhs = smiles_to_smarts(lhs) if (rhs == "[*:1][H]"): rhs = "[*:1]" else: rhs = smiles_to_smarts(rhs) rdkit_transform = "%s>>%s" % (lhs, rhs) rxn = AllChem.ReactionFromSmarts(rdkit_transform) #rxn = AllChem.ReactionFromSmarts(transform) for x in smiles: mol = Chem.MolFromSmiles(x[0]) ps = rxn.RunReactants([mol]) products = set() for y in range(len(ps)): for z in range(len(ps[y])): p = ps[y][z] Chem.SanitizeMol(p) products.add(Chem.MolToSmiles(p, isomericSmiles=True)) for p in products: print("%s,%s,%s,%s" % (x[0], x[1], transform, p))
{ "content_hash": "332b9aa58f5f785d381b79be8e86a8b1", "timestamp": "", "source": "github", "line_count": 91, "max_line_length": 92, "avg_line_length": 29.32967032967033, "alnum_prop": 0.6421880854252529, "repo_name": "bp-kelley/rdkit", "id": "95ec2c4d6a52002acb8809dd433a2fde8f987382", "size": "4351", "binary": false, "copies": "4", "ref": "refs/heads/master", "path": "Contrib/mmpa/mol_transform.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "C", "bytes": "1593408" }, { "name": "C#", "bytes": "10167" }, { "name": "C++", "bytes": "13831236" }, { "name": "CMake", "bytes": "761688" }, { "name": "Dockerfile", "bytes": "2590" }, { "name": "Fortran", "bytes": "7590" }, { "name": "HTML", "bytes": "43059702" }, { "name": "Java", "bytes": "369342" }, { "name": "JavaScript", "bytes": "52043" }, { "name": "Jupyter Notebook", "bytes": "498341" }, { "name": "LLVM", "bytes": "40048" }, { "name": "Lex", "bytes": "4508" }, { "name": "Makefile", "bytes": "10862" }, { "name": "Python", "bytes": "4156873" }, { "name": "QMake", "bytes": "389" }, { "name": "SMT", "bytes": "3010" }, { "name": "SWIG", "bytes": "342569" }, { "name": "Shell", "bytes": "3822" }, { "name": "Smarty", "bytes": "5864" }, { "name": "Yacc", "bytes": "61432" } ], "symlink_target": "" }
import matplotlib as mpl from mpl_toolkits.mplot3d import Axes3D import numpy as np import matplotlib.pyplot as plt import Procrustes def load_curve_data(filename): with open(filename) as f: line = f.readline() data = line.split(",") version = data[0] num_points = data[1] data = data[2:] points = [tuple((float(data[i+j]) for j in range(3))) for i in range(0,len(data),3)] return points def draw_curves(curves_to_draw): mpl.rcParams['legend.fontsize'] = 10 fig = plt.figure() ax = fig.gca(projection='3d') for curve in curves_to_draw: x,y,z = [[point[i] for point in curve] for i in range(3)] ax.plot(x,y,zs=z, label="curve") ax.legend() plt.show() def visualise(filenames): curves_to_draw = [load_curve_data(filename) for filename in filenames] draw_curves(curves_to_draw) curves = [load_curve_data(f) for f in ["curves/13 1","curves/ref_1.crv"]] curves = Procrustes.superposition(*[np.array(c) for c in curves]) print("distance",Procrustes.min_distance(*curves)) draw_curves(curves)
{ "content_hash": "eaaaa71151da24e86c8df5eafedb0218", "timestamp": "", "source": "github", "line_count": 34, "max_line_length": 92, "avg_line_length": 32.529411764705884, "alnum_prop": 0.64376130198915, "repo_name": "steveryb/pyprocrustes", "id": "240783e69a058377674aecd9f3bdd8eadffdad2f", "size": "1106", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "visualise.py", "mode": "33188", "license": "bsd-2-clause", "language": [ { "name": "Python", "bytes": "17662" } ], "symlink_target": "" }
from test_framework.test_framework import BitcoinTestFramework from test_framework.util import * class WalletTest (BitcoinTestFramework): def check_fee_amount(self, curr_balance, balance_with_fee, fee_per_byte, tx_size): """Return curr_balance after asserting the fee was in range""" fee = balance_with_fee - curr_balance target_fee = fee_per_byte * tx_size if fee < target_fee: raise AssertionError("Fee of %s EGI too low! (Should be %s EGI)"%(str(fee), str(target_fee))) # allow the node's estimation to be at most 2 bytes off if fee > fee_per_byte * (tx_size + 2): raise AssertionError("Fee of %s EGI too high! (Should be %s EGI)"%(str(fee), str(target_fee))) return curr_balance def setup_chain(self): print("Initializing test directory "+self.options.tmpdir) initialize_chain_clean(self.options.tmpdir, 4) def setup_network(self, split=False): self.nodes = start_nodes(3, self.options.tmpdir) connect_nodes_bi(self.nodes,0,1) connect_nodes_bi(self.nodes,1,2) connect_nodes_bi(self.nodes,0,2) self.is_network_split=False self.sync_all() def run_test (self): # Check that there's no UTXO on none of the nodes assert_equal(len(self.nodes[0].listunspent()), 0) assert_equal(len(self.nodes[1].listunspent()), 0) assert_equal(len(self.nodes[2].listunspent()), 0) print "Mining blocks..." self.nodes[0].generate(1) walletinfo = self.nodes[0].getwalletinfo() assert_equal(walletinfo['immature_balance'], 500) assert_equal(walletinfo['balance'], 0) self.sync_all() self.nodes[1].generate(101) self.sync_all() assert_equal(self.nodes[0].getbalance(), 500) assert_equal(self.nodes[1].getbalance(), 500) assert_equal(self.nodes[2].getbalance(), 0) # Check that only first and second nodes have UTXOs assert_equal(len(self.nodes[0].listunspent()), 1) assert_equal(len(self.nodes[1].listunspent()), 1) assert_equal(len(self.nodes[2].listunspent()), 0) # Send 210 EGI from 0 to 2 using sendtoaddress call. # Second transaction will be child of first, and will require a fee self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 110) self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 100) walletinfo = self.nodes[0].getwalletinfo() assert_equal(walletinfo['immature_balance'], 0) # Have node0 mine a block, thus it will collect its own fee. self.nodes[0].generate(1) self.sync_all() # Exercise locking of unspent outputs unspent_0 = self.nodes[2].listunspent()[0] unspent_0 = {"txid": unspent_0["txid"], "vout": unspent_0["vout"]} self.nodes[2].lockunspent(False, [unspent_0]) assert_raises(JSONRPCException, self.nodes[2].sendtoaddress, self.nodes[2].getnewaddress(), 200) assert_equal([unspent_0], self.nodes[2].listlockunspent()) self.nodes[2].lockunspent(True, [unspent_0]) assert_equal(len(self.nodes[2].listlockunspent()), 0) # Have node1 generate 100 blocks (so node0 can recover the fee) self.nodes[1].generate(100) self.sync_all() # node0 should end up with 1000 EGI in block rewards plus fees, but # minus the 210 plus fees sent to node2 assert_equal(self.nodes[0].getbalance(), 1000-210) assert_equal(self.nodes[2].getbalance(), 210) # Node0 should have two unspent outputs. # Create a couple of transactions to send them to node2, submit them through # node1, and make sure both node0 and node2 pick them up properly: node0utxos = self.nodes[0].listunspent(1) assert_equal(len(node0utxos), 2) # create both transactions txns_to_send = [] for utxo in node0utxos: inputs = [] outputs = {} inputs.append({ "txid" : utxo["txid"], "vout" : utxo["vout"]}) outputs[self.nodes[2].getnewaddress("from1")] = utxo["amount"] raw_tx = self.nodes[0].createrawtransaction(inputs, outputs) txns_to_send.append(self.nodes[0].signrawtransaction(raw_tx)) # Have node 1 (miner) send the transactions self.nodes[1].sendrawtransaction(txns_to_send[0]["hex"], True) self.nodes[1].sendrawtransaction(txns_to_send[1]["hex"], True) # Have node1 mine a block to confirm transactions: self.nodes[1].generate(1) self.sync_all() assert_equal(self.nodes[0].getbalance(), 0) assert_equal(self.nodes[2].getbalance(), 1000) assert_equal(self.nodes[2].getbalance("from1"), 1000-210) # Send 100 EGI normal address = self.nodes[0].getnewaddress("test") fee_per_byte = Decimal('0.001') / 1000 self.nodes[2].settxfee(fee_per_byte * 1000) txid = self.nodes[2].sendtoaddress(address, 100, "", "", False) self.nodes[2].generate(1) self.sync_all() node_2_bal = self.check_fee_amount(self.nodes[2].getbalance(), Decimal('900'), fee_per_byte, count_bytes(self.nodes[2].getrawtransaction(txid))) assert_equal(self.nodes[0].getbalance(), Decimal('100')) # Send 100 EGI with subtract fee from amount txid = self.nodes[2].sendtoaddress(address, 100, "", "", True) self.nodes[2].generate(1) self.sync_all() node_2_bal -= Decimal('100') assert_equal(self.nodes[2].getbalance(), node_2_bal) node_0_bal = self.check_fee_amount(self.nodes[0].getbalance(), Decimal('200'), fee_per_byte, count_bytes(self.nodes[2].getrawtransaction(txid))) # Sendmany 100 EGI txid = self.nodes[2].sendmany('from1', {address: 100}, 0, "", []) self.nodes[2].generate(1) self.sync_all() node_0_bal += Decimal('100') node_2_bal = self.check_fee_amount(self.nodes[2].getbalance(), node_2_bal - Decimal('100'), fee_per_byte, count_bytes(self.nodes[2].getrawtransaction(txid))) assert_equal(self.nodes[0].getbalance(), node_0_bal) # Sendmany 100 EGI with subtract fee from amount txid = self.nodes[2].sendmany('from1', {address: 100}, 0, "", [address]) self.nodes[2].generate(1) self.sync_all() node_2_bal -= Decimal('100') assert_equal(self.nodes[2].getbalance(), node_2_bal) node_0_bal = self.check_fee_amount(self.nodes[0].getbalance(), node_0_bal + Decimal('100'), fee_per_byte, count_bytes(self.nodes[2].getrawtransaction(txid))) # Test ResendWalletTransactions: # Create a couple of transactions, then start up a fourth # node (nodes[3]) and ask nodes[0] to rebroadcast. # EXPECT: nodes[3] should have those transactions in its mempool. txid1 = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 1) txid2 = self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), 1) sync_mempools(self.nodes) self.nodes.append(start_node(3, self.options.tmpdir)) connect_nodes_bi(self.nodes, 0, 3) sync_blocks(self.nodes) relayed = self.nodes[0].resendwallettransactions() assert_equal(set(relayed), {txid1, txid2}) sync_mempools(self.nodes) assert(txid1 in self.nodes[3].getrawmempool()) # Exercise balance rpcs assert_equal(self.nodes[0].getwalletinfo()["unconfirmed_balance"], 1) assert_equal(self.nodes[0].getunconfirmedbalance(), 1) #check if we can list zero value tx as available coins #1. create rawtx #2. hex-changed one output to 0.0 #3. sign and send #4. check if recipient (node0) can list the zero value tx usp = self.nodes[1].listunspent() inputs = [{"txid":usp[0]['txid'], "vout":usp[0]['vout']}] outputs = {self.nodes[1].getnewaddress(): 499.998, self.nodes[0].getnewaddress(): 11.11} rawTx = self.nodes[1].createrawtransaction(inputs, outputs).replace("c0833842", "00000000") #replace 11.11 with 0.0 (int32) decRawTx = self.nodes[1].decoderawtransaction(rawTx) signedRawTx = self.nodes[1].signrawtransaction(rawTx) decRawTx = self.nodes[1].decoderawtransaction(signedRawTx['hex']) zeroValueTxid= decRawTx['txid'] sendResp = self.nodes[1].sendrawtransaction(signedRawTx['hex']) self.sync_all() self.nodes[1].generate(1) #mine a block self.sync_all() unspentTxs = self.nodes[0].listunspent() #zero value tx must be in listunspents output found = False for uTx in unspentTxs: if uTx['txid'] == zeroValueTxid: found = True assert_equal(uTx['amount'], Decimal('0')) assert(found) #do some -walletbroadcast tests stop_nodes(self.nodes) wait_bitcoinds() self.nodes = start_nodes(3, self.options.tmpdir, [["-walletbroadcast=0"],["-walletbroadcast=0"],["-walletbroadcast=0"]]) connect_nodes_bi(self.nodes,0,1) connect_nodes_bi(self.nodes,1,2) connect_nodes_bi(self.nodes,0,2) self.sync_all() txIdNotBroadcasted = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 2) txObjNotBroadcasted = self.nodes[0].gettransaction(txIdNotBroadcasted) self.nodes[1].generate(1) #mine a block, tx should not be in there self.sync_all() assert_equal(self.nodes[2].getbalance(), node_2_bal) #should not be changed because tx was not broadcasted #now broadcast from another node, mine a block, sync, and check the balance self.nodes[1].sendrawtransaction(txObjNotBroadcasted['hex']) self.nodes[1].generate(1) self.sync_all() node_2_bal += 2 txObjNotBroadcasted = self.nodes[0].gettransaction(txIdNotBroadcasted) assert_equal(self.nodes[2].getbalance(), node_2_bal) #create another tx txIdNotBroadcasted = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 2) #restart the nodes with -walletbroadcast=1 stop_nodes(self.nodes) wait_bitcoinds() self.nodes = start_nodes(3, self.options.tmpdir) connect_nodes_bi(self.nodes,0,1) connect_nodes_bi(self.nodes,1,2) connect_nodes_bi(self.nodes,0,2) sync_blocks(self.nodes) self.nodes[0].generate(1) sync_blocks(self.nodes) node_2_bal += 2 #tx should be added to balance because after restarting the nodes tx should be broadcastet assert_equal(self.nodes[2].getbalance(), node_2_bal) #send a tx with value in a string (PR#6380 +) txId = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), "2") txObj = self.nodes[0].gettransaction(txId) assert_equal(txObj['amount'], Decimal('-2')) txId = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), "0.0001") txObj = self.nodes[0].gettransaction(txId) assert_equal(txObj['amount'], Decimal('-0.0001')) #check if JSON parser can handle scientific notation in strings txId = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), "1e-4") txObj = self.nodes[0].gettransaction(txId) assert_equal(txObj['amount'], Decimal('-0.0001')) try: txId = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), "1f-4") except JSONRPCException as e: assert("Invalid amount" in e.error['message']) else: raise AssertionError("Must not parse invalid amounts") try: self.nodes[0].generate("2") raise AssertionError("Must not accept strings as numeric") except JSONRPCException as e: assert("not an integer" in e.error['message']) # Import address and private key to check correct behavior of spendable unspents # 1. Send some coins to generate new UTXO address_to_import = self.nodes[2].getnewaddress() txid = self.nodes[0].sendtoaddress(address_to_import, 1) self.nodes[0].generate(1) self.sync_all() # 2. Import address from node2 to node1 self.nodes[1].importaddress(address_to_import) # 3. Validate that the imported address is watch-only on node1 assert(self.nodes[1].validateaddress(address_to_import)["iswatchonly"]) # 4. Check that the unspents after import are not spendable assert_array_result(self.nodes[1].listunspent(), {"address": address_to_import}, {"spendable": False}) # 5. Import private key of the previously imported address on node1 priv_key = self.nodes[2].dumpprivkey(address_to_import) self.nodes[1].importprivkey(priv_key) # 6. Check that the unspents are now spendable on node1 assert_array_result(self.nodes[1].listunspent(), {"address": address_to_import}, {"spendable": True}) #check if wallet or blochchain maintenance changes the balance self.sync_all() blocks = self.nodes[0].generate(2) self.sync_all() balance_nodes = [self.nodes[i].getbalance() for i in range(3)] block_count = self.nodes[0].getblockcount() # Check modes: # - True: unicode escaped as \u.... # - False: unicode directly as UTF-8 for mode in [True, False]: self.nodes[0].ensure_ascii = mode # unicode check: Basic Multilingual Plane, Supplementary Plane respectively for s in [u'рыба', u'𝅘𝅥𝅯']: addr = self.nodes[0].getaccountaddress(s) label = self.nodes[0].getaccount(addr) assert_equal(label.encode('utf-8'), s.encode('utf-8')) # TODO remove encode(...) when supporting only Python3 assert(s in self.nodes[0].listaccounts().keys()) self.nodes[0].ensure_ascii = True # restore to default # maintenance tests maintenance = [ '-rescan', '-reindex', '-zapwallettxes=1', '-zapwallettxes=2', '-salvagewallet', ] for m in maintenance: print "check " + m stop_nodes(self.nodes) wait_bitcoinds() self.nodes = start_nodes(3, self.options.tmpdir, [[m]] * 3) while m == '-reindex' and [block_count] * 3 != [self.nodes[i].getblockcount() for i in range(3)]: # reindex will leave rpc warm up "early"; Wait for it to finish time.sleep(0.1) assert_equal(balance_nodes, [self.nodes[i].getbalance() for i in range(3)]) # Exercise listsinceblock with the last two blocks coinbase_tx_1 = self.nodes[0].listsinceblock(blocks[0]) assert_equal(coinbase_tx_1["lastblock"], blocks[1]) assert_equal(len(coinbase_tx_1["transactions"]), 1) assert_equal(coinbase_tx_1["transactions"][0]["blockhash"], blocks[1]) assert_equal(len(self.nodes[0].listsinceblock(blocks[1])["transactions"]), 0) if __name__ == '__main__': WalletTest ().main ()
{ "content_hash": "b1c590cb54439ff7ed029fdd92181859", "timestamp": "", "source": "github", "line_count": 342, "max_line_length": 165, "avg_line_length": 44.646198830409354, "alnum_prop": 0.6169362761150042, "repo_name": "rsdevgun16e/energi", "id": "d9d775f035d9b4a13ac4e510ca9469d2993ac825", "size": "15562", "binary": false, "copies": "2", "ref": "refs/heads/energi_v0", "path": "qa/rpc-tests/wallet.py", "mode": "33261", "license": "mit", "language": [ { "name": "C", "bytes": "1345744" }, { "name": "C++", "bytes": "5650465" }, { "name": "CSS", "bytes": "124291" }, { "name": "HTML", "bytes": "50621" }, { "name": "Java", "bytes": "2100" }, { "name": "M4", "bytes": "164843" }, { "name": "Makefile", "bytes": "98216" }, { "name": "Objective-C", "bytes": "4947" }, { "name": "Objective-C++", "bytes": "7228" }, { "name": "Protocol Buffer", "bytes": "2308" }, { "name": "Python", "bytes": "715296" }, { "name": "QMake", "bytes": "2057" }, { "name": "Roff", "bytes": "3766" }, { "name": "Shell", "bytes": "35733" } ], "symlink_target": "" }
{ ' (leave empty to detach account)': ' (leave empty to detach account)', ' Module is the main communications hub of the Sahana system. It is used to send alerts and/or messages using SMS & Email to various groups and individuals before, during and after a disaster.': ' Module is the main communications hub of the Sahana system. It is used to send alerts and/or messages using SMS & Email to various groups and individuals before, during and after a disaster.', ' by ': ' by ', ' is envisioned to be composed of several sub-modules that work together to provide complex functionality for the management of relief and project items by an organization. This includes an intake system, a warehouse management system, commodity tracking, supply chain management, fleet management, procurement, financial tracking and other asset and resource management capabilities.': ' is envisioned to be composed of several sub-modules that work together to provide complex functionality for the management of relief and project items by an organization. This includes an intake system, a warehouse management system, commodity tracking, supply chain management, fleet management, procurement, financial tracking and other asset and resource management capabilities.', ' on ': ' on ', '"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN': '"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN', '# of International Staff': 'αριθμός προσωπικού από άλλες χώρες', '# of National Staff': '# of National Staff', '# of People Affected': 'Αριθμός ατόμων που πλήττονται', '# of People Deceased': '# of People Deceased', '# of People Injured': '# of People Injured', '# of Vehicles': '# of Vehicles', '%Y-%m-%d %H:%M:%S': '%Y-%m-%d %H:%M:%S', '%s rows deleted': '%s rows deleted', '%s rows updated': '%s rows updated', '(Constraints Only)': '(Constraints Only)', ') & then click on the map below to adjust the Lat/Lon fields:': ') & then click on the map below to adjust the Lat/Lon fields:', '* Required Fields': '* Required Fields', '0-15 minutes': '0-15 minutes', '1 Assessment': '1 Assessment', '1 location, shorter time, can contain multiple Tasks': '1 location, shorter time, can contain multiple Tasks', '1-3 days': '1-3 days', '1. Fill the necessary fields in BLOCK letters.': '1. Fill the necessary fields in BLOCK letters.', '15-30 minutes': '15-30 minutes', '2 different options are provided here currently:': '2 different options are provided here currently:', '2. Always use one box per letter and leave one box space to seperate words.': '2. Always use one box per letter and leave one box space to seperate words.', '2x4 Car': '2x4 Car', '30-60 minutes': '30-60 minutes', '4-7 days': '4-7 days', '4x4 Car': '4x4 Car', '8-14 days': '8-14 ημέρες', 'A Reference Document such as a file, URL or contact person to verify this data. You can type the 1st few characters of the document name to link to an existing document.': 'Ένα έγγραφο αναφοράς, όπως το πρόσωπο αρχείο, URL ή άτομο επικοινωνίας για την επαλήθευση αυτών των δεδομένων. Μπορείτε να πληκτρολογήσετε μερικούς από τους πρώτους χαρακτήρες του ονόματος του εγγράφου για τη σύνδεση με ένα υπάρχον έγγραφο.', 'A Warehouse is a physical place to store items.': 'A Warehouse is a physical place to store items.', 'A Warehouse/Site is a physical location with an address and GIS data where Items are Stored. It can be a Building, a particular area in a city or anything similar.': 'A Warehouse/Site is a physical location with an address and GIS data where Items are Stored. It can be a Building, a particular area in a city or anything similar.', 'A brief description of the group (optional)': 'A brief description of the group (optional)', 'A file downloaded from a GPS containing a series of geographic points in XML format.': 'A file downloaded from a GPS containing a series of geographic points in XML format.', 'A file in GPX format taken from a GPS whose timestamps can be correlated with the timestamps on the photos to locate them on the map.': 'A file in GPX format taken from a GPS whose timestamps can be correlated with the timestamps on the photos to locate them on the map.', 'A library of digital resources, such as photos, documents and reports': 'A library of digital resources, such as photos, documents and reports', 'A place within a Site like a Shelf, room, bin number etc.': 'A place within a Site like a Shelf, room, bin number etc.', 'A snapshot of the bin or additional documents that contain supplementary information about it can be uploaded here.': 'A snapshot of the bin or additional documents that contain supplementary information about it can be uploaded here.', 'A snapshot of the location or additional documents that contain supplementary information about the Site Location can be uploaded here.': 'A snapshot of the location or additional documents that contain supplementary information about the Site Location can be uploaded here.', 'A snapshot of the location or additional documents that contain supplementary information about the Site can be uploaded here.': 'Μία "εικόνα" της περιοχής ή επιπρόσθετα έγγγραφα που περιέχουν συμπληρωματικές πληροφορίες για την περιοχή μπορούν να "μεταφορτοθούν" εδώ. ', 'A survey series with id %s does not exist. Please go back and create one.': 'A survey series with id %s does not exist. Please go back and create one.', 'ABOUT': 'ABOUT', 'ABOUT THIS MODULE': 'ΣΧΕΤΙΚΑ ΜΕ ΑΥΤΟ ΤΟ ΥΠΟΠΡΟΓΡΑΜΜΑ', 'ACCESS DATA': 'ACCESS DATA', 'ANY': 'ANY', 'API is documented here': 'API is documented here', 'Ability to Fill Out Surveys': 'Ability to Fill Out Surveys', 'Ability to customize the list of details tracked at a Shelter': 'Ability to customize the list of details tracked at a Shelter', 'Ability to customize the list of human resource tracked at a Shelter': 'Δυνατότητα παραμετροποίησης των ανθρώπων που εντοπίστηκαν σε ένα καταφύγιο', 'Ability to customize the list of important facilities needed at a Shelter': 'Δυνατότητα για παραμετροποίηση του καταλόγου σημαντικών υποδομών που είνια απαραίτητα σε ένα καταφήγιο', 'Ability to track partial fulfillment of the request': 'Δυνατότητα εντοπισμού-καταγραφή μερικής ικανοποίησης του αιτήματος', 'Ability to view Results of Completed and/or partially filled out Surveys': 'Ability to view Results of Completed and/or partially filled out Surveys', 'About': 'About', 'About Sahana': 'About Sahana', 'About Sahana Eden': 'About Sahana Eden', 'About this module': 'About this module', 'Access denied': 'Access denied', 'Accessibility of Affected Location': 'Accessibility of Affected Location', 'Account registered, however registration is still pending approval - please wait until confirmation received.': 'Account registered, however registration is still pending approval - please wait until confirmation received.', 'Acronym': 'Acronym', "Acronym of the organization's name, eg. IFRC.": "Acronym of the organization's name, eg. IFRC.", 'Actionable by all targeted recipients': 'Actionable by all targeted recipients', 'Actionable only by designated exercise participants; exercise identifier SHOULD appear in <note>': 'Actionable only by designated exercise participants; exercise identifier SHOULD appear in <note>', 'Actioned?': 'Actioned?', 'Active Problems': 'Active Problems', 'Activities': 'Activities', 'Activities matching Assessments:': 'Activities matching Assessments:', 'Activities of boys 13-17yrs before disaster': 'Activities of boys 13-17yrs before disaster', 'Activities of boys 13-17yrs now': 'Activities of boys 13-17yrs now', 'Activities of boys <12yrs before disaster': 'Activities of boys <12yrs before disaster', 'Activities of boys <12yrs now': 'Activities of boys <12yrs now', 'Activities of girls 13-17yrs before disaster': 'Activities of girls 13-17yrs before disaster', 'Activities of girls 13-17yrs now': 'Activities of girls 13-17yrs now', 'Activities of girls <12yrs before disaster': 'Activities of girls <12yrs before disaster', 'Activities of girls <12yrs now': 'Activities of girls <12yrs now', 'Activities:': 'Activities:', 'Activity': 'Activity', 'Activity Added': 'Activity Added', 'Activity Deleted': 'Activity Deleted', 'Activity Details': 'Λεπτομέρειες Δραστηριότητας', 'Activity Report': 'Activity Report', 'Activity Reports': 'Activity Reports', 'Activity Type': 'Activity Type', 'Activity Updated': 'Activity Updated', 'Add': 'Add', 'Add Activity': 'Add Activity', 'Add Activity Report': 'Add Activity Report', 'Add Activity Type': 'Add Activity Type', 'Add Address': 'Add Address', 'Add Assessment': 'Add Assessment', 'Add Assessment Summary': 'Add Assessment Summary', 'Add Baseline': 'Προσθήκη Αρχικής κατάστασης', 'Add Baseline Type': 'Add Baseline Type', 'Add Bed Type': 'Add Bed Type', 'Add Bin Type': 'Add Bin Type', 'Add Bins': 'Add Bins', 'Add Budget': 'Add Budget', 'Add Bundle': 'Add Bundle', 'Add Catalog': 'Προσθήκη καταλόγου', 'Add Catalog Item': 'Add Catalog Item', 'Add Catalog.': 'Add Catalog.', 'Add Category': 'Add Category', 'Add Category<>Sub-Category<>Catalog Relation': 'Add Category<>Sub-Category<>Catalog Relation', 'Add Cholera Treatment Capability Information': 'Add Cholera Treatment Capability Information', 'Add Cluster Subsector': 'Add Cluster Subsector', 'Add Config': 'Add Config', 'Add Contact': 'Προσθήκη επαφής', 'Add Contact Information': 'Add Contact Information', 'Add Disaster Victims': 'Add Disaster Victims', 'Add Distribution': 'Add Distribution', 'Add Distribution.': 'Add Distribution.', 'Add Donor': 'Add Donor', 'Add Feature Class': 'Add Feature Class', 'Add Feature Layer': 'Add Feature Layer', 'Add Flood Report': 'Add Flood Report', 'Add Group': 'Add Group', 'Add Group Member': 'Add Group Member', 'Add Hospital': 'Add Hospital', 'Add Identification Report': 'Add Identification Report', 'Add Identity': 'Add Identity', 'Add Image': 'Προσθήκη εικόνας', 'Add Impact': 'Add Impact', 'Add Impact Type': 'Add Impact Type', 'Add Incident': 'Add Incident', 'Add Incident Report': 'Προσθήκη αναφοράς συμβάντος', 'Add Item': 'Add Item', 'Add Item (s)': 'Add Item (s)', 'Add Item Catalog': 'Add Item Catalog', 'Add Item Catalog ': 'Add Item Catalog ', 'Add Item Catalog Category ': 'Προσθήκη κατηγορίας καταλόγου αντικειμένων', 'Add Item Category': 'Add Item Category', 'Add Item Packet': 'Add Item Packet', 'Add Item Sub-Category': 'Add Item Sub-Category', 'Add Item to Shipment': 'Add Item to Shipment', 'Add Key': 'Add Key', 'Add Kit': 'Προσθήκη Kit', 'Add Layer': 'Add Layer', 'Add Location': 'Add Location', 'Add Locations': 'Add Locations', 'Add Log Entry': 'Add Log Entry', 'Add Member': 'Add Member', 'Add Membership': 'Add Membership', 'Add Message': 'Προσθήκη μηνύματος', 'Add Need': 'Add Need', 'Add Need Type': 'Add Need Type', 'Add New': 'Add New', 'Add New Activity': 'Add New Activity', 'Add New Address': 'Προσθήκη νέας Διεύθυνσης', 'Add New Assessment': 'Add New Assessment', 'Add New Assessment Summary': 'Πρόσθεσε νέα έκθεση αξιολόγησης', 'Add New Baseline': 'Add New Baseline', 'Add New Baseline Type': 'Add New Baseline Type', 'Add New Bin': 'Add New Bin', 'Add New Bin Type': 'Add New Bin Type', 'Add New Budget': 'Add New Budget', 'Add New Bundle': 'Add New Bundle', 'Add New Catalog Item': 'Add New Catalog Item', 'Add New Cluster Subsector': 'Add New Cluster Subsector', 'Add New Config': 'Add New Config', 'Add New Contact': 'Add New Contact', 'Add New Distribution': 'Add New Distribution', 'Add New Distribution Item': 'Add New Distribution Item', 'Add New Document': 'Add New Document', 'Add New Donor': 'Προσθήκη νέου δωρητή', 'Add New Entry': 'Add New Entry', 'Add New Feature Class': 'Add New Feature Class', 'Add New Feature Layer': 'Add New Feature Layer', 'Add New Flood Report': 'Add New Flood Report', 'Add New Group': 'Add New Group', 'Add New Hospital': 'Add New Hospital', 'Add New Identity': 'Προσθήκη νέας ταυτότητας', 'Add New Image': 'Add New Image', 'Add New Impact': 'Add New Impact', 'Add New Impact Type': 'Add New Impact Type', 'Add New Incident': 'Προσθήκη νέου συμβάντος', 'Add New Incident Report': 'Add New Incident Report', 'Add New Item': 'Add New Item', 'Add New Item Catalog': 'Add New Item Catalog', 'Add New Item Catalog Category': 'Add New Item Catalog Category', 'Add New Item Category': 'Add New Item Category', 'Add New Item Packet': 'Add New Item Packet', 'Add New Item Sub-Category': 'Add New Item Sub-Category', 'Add New Item to Kit': 'Add New Item to Kit', 'Add New Key': 'Προσθήκη νέου κλειδιού', 'Add New Kit': 'Προσθήκη νέου Kit', 'Add New Layer': 'Add New Layer', 'Add New Location': 'Add New Location', 'Add New Log Entry': 'Add New Log Entry', 'Add New Marker': 'Add New Marker', 'Add New Member': 'Add New Member', 'Add New Membership': 'Add New Membership', 'Add New Metadata': 'Add New Metadata', 'Add New Need': 'Προσθήκη Νέων Αναγκών', 'Add New Need Type': 'Add New Need Type', 'Add New Note': 'Add New Note', 'Add New Office': 'Προσθήκη νέου Φορέα-γραφείου', 'Add New Organization': 'Add New Organization', 'Add New Photo': 'Add New Photo', 'Add New Position': 'Add New Position', 'Add New Problem': 'Add New Problem', 'Add New Project': 'Add New Project', 'Add New Projection': 'Add New Projection', 'Add New Rapid Assessment': 'Add New Rapid Assessment', 'Add New Received Item': 'Add New Received Item', 'Add New Record': 'Add New Record', 'Add New Report': 'Προσθήκη νέας Αναφοράς', 'Add New Request': 'Add New Request', 'Add New Request Item': 'Add New Request Item', 'Add New Resource': 'Add New Resource', 'Add New Response': 'Προσθήκη νέας ανταπόκρισης', 'Add New River': 'Add New River', 'Add New Role': 'Add New Role', 'Add New Role to User': 'Add New Role to User', 'Add New Sector': 'Προσθήκη νέου τομέα', 'Add New Sent Item': 'Add New Sent Item', 'Add New Setting': 'Add New Setting', 'Add New Shelter': 'Προσθήκη νέου Καταφυγίου', 'Add New Shelter Service': 'Προσθήκη Νέας Υπηρεσίας Καταφυγίου', 'Add New Shelter Type': 'Add New Shelter Type', 'Add New Shipment to Send': 'Add New Shipment to Send', 'Add New Site': 'Add New Site', 'Add New Skill': 'Προσθήκη νέου προσόντος', 'Add New Skill Type': 'Add New Skill Type', 'Add New Solution': 'Add New Solution', 'Add New Staff': 'Add New Staff', 'Add New Staff Type': 'Add New Staff Type', 'Add New Storage Location': 'Προσθήκη νέας περιοχής αποθήκευσης', 'Add New Survey Answer': 'Add New Survey Answer', 'Add New Survey Question': 'Add New Survey Question', 'Add New Survey Section': 'Add New Survey Section', 'Add New Survey Series': 'Add New Survey Series', 'Add New Survey Template': 'Add New Survey Template', 'Add New Task': 'Add New Task', 'Add New Team': 'Add New Team', 'Add New Theme': 'Add New Theme', 'Add New Ticket': 'Add New Ticket', 'Add New Track': 'Add New Track', 'Add New Unit': 'Add New Unit', 'Add New User': 'Add New User', 'Add New User to Role': 'Add New User to Role', 'Add New Warehouse': 'Προσθήκη Νέας Αποθήκης', 'Add New Warehouse Item': 'Add New Warehouse Item', 'Add Note': 'Add Note', 'Add Office': 'Προσθήκη γραφείου', 'Add Organization': 'Προσθήκη οργανισμού', 'Add Peer': 'Add Peer', 'Add Person': 'Προσθήκη ατόμου', 'Add Personal Effects': 'Add Personal Effects', 'Add Photo': 'Add Photo', 'Add Position': 'Προσθήκη θέσης', 'Add Problem': 'Add Problem', 'Add Project': 'Add Project', 'Add Projection': 'Add Projection', 'Add Question': 'Add Question', 'Add Rapid Assessment': 'Add Rapid Assessment', 'Add Recipient': 'Add Recipient', 'Add Recipient Site': 'Add Recipient Site', 'Add Recipient Site.': 'Add Recipient Site.', 'Add Record': 'Add Record', 'Add Recovery Report': 'Add Recovery Report', 'Add Reference Document': 'Add Reference Document', 'Add Report': 'Add Report', 'Add Request': 'Add Request', 'Add Request Detail': 'Add Request Detail', 'Add Request Item': 'Add Request Item', 'Add Resource': 'Προσθήκη πόρου', 'Add Response': 'Add Response', 'Add River': 'Add River', 'Add Role': 'Add Role', 'Add Section': 'Add Section', 'Add Sector': 'Προσθήκη Τομέα', 'Add Sender Organization': 'Προσθήκη οργανισμού που αποστέλει', 'Add Sender Site': 'Add Sender Site', 'Add Sender Site.': 'Add Sender Site.', 'Add Service Profile': 'Add Service Profile', 'Add Setting': 'Add Setting', 'Add Shelter': 'Add Shelter', 'Add Shelter Service': 'Add Shelter Service', 'Add Shelter Type': 'Add Shelter Type', 'Add Shipment Transit Log': 'Add Shipment Transit Log', 'Add Shipment/Way Bills': 'Add Shipment/Way Bills', 'Add Site': 'Add Site', 'Add Skill': 'Add Skill', 'Add Skill Type': 'Add Skill Type', 'Add Skill Types': 'Προσκθήκη κατηγορίας προσόντων', 'Add Solution': 'Add Solution', 'Add Staff': 'Add Staff', 'Add Staff Type': 'Add Staff Type', 'Add Status': 'Add Status', 'Add Storage Bin ': 'Add Storage Bin ', 'Add Storage Bin Type': 'Add Storage Bin Type', 'Add Storage Location': 'Add Storage Location', 'Add Storage Location ': 'Add Storage Location ', 'Add Sub-Category': 'Add Sub-Category', 'Add Subscription': 'Add Subscription', 'Add Survey Answer': 'Add Survey Answer', 'Add Survey Question': 'Προσθήκη ερώτησης έρευνας', 'Add Survey Section': 'Add Survey Section', 'Add Survey Series': 'Add Survey Series', 'Add Survey Template': 'Add Survey Template', 'Add Task': 'Add Task', 'Add Team': 'Add Team', 'Add Theme': 'Add Theme', 'Add Ticket': 'Add Ticket', 'Add Unit': 'Add Unit', 'Add User': 'Add User', 'Add Volunteer': 'Add Volunteer', 'Add Volunteer Registration': 'Add Volunteer Registration', 'Add Warehouse': 'Add Warehouse', 'Add Warehouse Item': 'Προσθήκη αντικειμένου αποθήκης', 'Add a Person': 'Add a Person', 'Add a Reference Document such as a file, URL or contact person to verify this data. If you do not enter a Reference Document, your email will be displayed instead.': 'Add a Reference Document such as a file, URL or contact person to verify this data. If you do not enter a Reference Document, your email will be displayed instead.', 'Add a Volunteer': 'Add a Volunteer', 'Add a new Site from where the Item is being sent.': 'Προσθήκη νέας τοποθεσίας από όπου το αντικείμενο αποστέλεται', 'Add a new Site where the Item is being sent to.': 'Add a new Site where the Item is being sent to.', 'Add an Photo.': 'Add an Photo.', 'Add location': 'Add location', 'Add main Item Category.': 'Add main Item Category.', 'Add main Item Sub-Category.': 'Add main Item Sub-Category.', 'Add new Group': 'Add new Group', 'Add new Individual': 'Add new Individual', 'Add new position.': 'Add new position.', 'Add new project.': 'Add new project.', 'Add new staff role.': 'Add new staff role.', 'Add the Storage Bin Type.': 'Add the Storage Bin Type.', 'Add the Storage Location where this bin is located.': 'Add the Storage Location where this bin is located.', 'Add the Storage Location where this this Bin belongs to.': 'Προσθέστε τον αποθηκευτικό χώρο όπου το "καλάθι" ανήκει.', 'Add the main Warehouse/Site information where this Bin belongs to.': 'Add the main Warehouse/Site information where this Bin belongs to.', 'Add the main Warehouse/Site information where this Item is to be added.': 'Add the main Warehouse/Site information where this Item is to be added.', 'Add the main Warehouse/Site information where this Storage location is.': 'Add the main Warehouse/Site information where this Storage location is.', 'Add the unit of measure if it doesnt exists already.': 'Add the unit of measure if it doesnt exists already.', 'Add to Bundle': 'Add to Bundle', 'Add to Catalog': 'Add to Catalog', 'Add to budget': 'Add to budget', 'Add/Edit/Remove Layers': 'Add/Edit/Remove Layers', 'Additional Beds / 24hrs': 'Additional Beds / 24hrs', 'Additional Comments': 'Additional Comments', "Additional quantity quantifier – e.g. '4x5'.": "Additional quantity quantifier – e.g. '4x5'.", 'Address': 'Address', 'Address Details': 'Address Details', 'Address Type': 'Τύπος διεύθυνσης', 'Address added': 'Address added', 'Address deleted': 'Διαγραφή Διεύθυνσης', 'Address updated': 'Address updated', 'Addresses': 'Addresses', 'Adequate': 'Adequate', 'Adequate food and water available': 'Adequate food and water available', 'Adjust Item(s) Quantity': 'Adjust Item(s) Quantity', 'Adjust Items due to Theft/Loss': 'Adjust Items due to Theft/Loss', 'Admin': 'Admin', 'Admin Email': 'Admin Email', 'Admin Name': 'Admin Name', 'Admin Tel': 'Admin Tel', 'Administration': 'Administration', 'Administrator': 'Administrator', 'Admissions/24hrs': 'Admissions/24hrs', 'Adolescent (12-20)': 'Adolescent (12-20)', 'Adolescent participating in coping activities': 'Adolescent participating in coping activities', 'Adult (21-50)': 'Adult (21-50)', 'Adult ICU': 'Adult ICU', 'Adult Psychiatric': 'Ψυχιατρικό ενηλίκων', 'Adult female': 'Ενήλικας Γυναίκα ', 'Adult male': 'Adult male', 'Adults in prisons': 'Adults in prisons', 'Advanced Bin Search': 'Advanced Bin Search', 'Advanced Catalog Search': 'Advanced Catalog Search', 'Advanced Category Search': 'Advanced Category Search', 'Advanced Item Search': 'Advanced Item Search', 'Advanced Location Search': 'Προηγμένη αναζήτηση θέσης', 'Advanced Site Search': 'Advanced Site Search', 'Advanced Sub-Category Search': 'Advanced Sub-Category Search', 'Advanced Unit Search': 'Advanced Unit Search', 'Advanced:': 'Advanced:', 'Advisory': 'Advisory', 'After clicking on the button, a set of paired items will be shown one by one. Please select the one solution from each pair that you prefer over the other.': 'After clicking on the button, a set of paired items will be shown one by one. Please select the one solution from each pair that you prefer over the other.', 'Age Group': 'Age Group', 'Age group': 'Age group', 'Age group does not match actual age.': 'Ηλικιακή ομάδα δεν αντιστοιχεί στην πραγματική ηλικία.', 'Aggravating factors': 'Aggravating factors', 'Aggregate Items': 'Aggregate Items', 'Agriculture': 'Agriculture', 'Air Transport Service': 'Air Transport Service', 'Air tajin': 'Air tajin', 'Aircraft Crash': 'Aircraft Crash', 'Aircraft Hijacking': 'Aircraft Hijacking', 'Airport Closure': 'Airport Closure', 'Airspace Closure': 'Airspace Closure', 'Alcohol': 'Alcohol', 'Alert': 'Alert', 'All': 'All', 'All Inbound & Outbound Messages are stored here': 'All Inbound & Outbound Messages are stored here', 'All Locations': 'All Locations', 'All Requested Items': 'Όλα τα ζητηθέντα αντικείμενα', 'All Resources': 'All Resources', 'All data provided by the Sahana Software Foundation from this site is licenced under a Creative Commons Attribution licence. However, not all data originates here. Please consult the source field of each entry.': 'All data provided by the Sahana Software Foundation from this site is licenced under a Creative Commons Attribution licence. However, not all data originates here. Please consult the source field of each entry.', 'Allowed to push': 'Επιτρέπονται να πιέσουν', 'Allows a Budget to be drawn up': 'Allows a Budget to be drawn up', 'Allows authorized users to control which layers are available to the situation map.': 'Επιτρέπει σε εξουσιοδοτημένα μέλη να ελέγχουν πια χαρτογραφικά επίπεδα είναι διαθέσιμα στο χάρτη απεικόνισης της κατάστασης', 'Alternative infant nutrition in use': 'Alternative infant nutrition in use', 'Alternative places for studying': 'Alternative places for studying', 'Alternative places for studying available': 'Alternative places for studying available', 'Ambulance Service': 'Ambulance Service', 'An intake system, a warehouse management system, commodity tracking, supply chain management, procurement and other asset and resource management capabilities.': 'An intake system, a warehouse management system, commodity tracking, supply chain management, procurement and other asset and resource management capabilities.', 'Analysis of Completed Surveys': 'Analysis of Completed Surveys', 'Animal Die Off': 'Animal Die Off', 'Animal Feed': 'Animal Feed', 'Animals': 'Animals', 'Answer Choices (One Per Line)': 'Επιλογές απαντήσεων (Μία ανά γραμμή)', 'Anthropolgy': 'Anthropolgy', 'Antibiotics available': 'Antibiotics available', 'Antibiotics needed per 24h': 'Antibiotics needed per 24h', 'Any available Metadata in the files will be read automatically, such as Timestamp, Author, Latitude & Longitude.': 'Οποιαδήποτε διαθέσιμα μεταδεδομένα στα αρχεία θα διαβαστούν αυτόματα, όπως Χρονοσφραγίδα, Συγγραφέας, Γεωγραφικό μήκος & πλάτος', 'Apparent Age': 'Apparent Age', 'Apparent Gender': 'Apparent Gender', 'Appropriate clothing available': 'Appropriate clothing available', 'Appropriate cooking equipment/materials in HH': 'Appropriate cooking equipment/materials in HH', 'Approx. number of cases/48h': 'Approx. number of cases/48h', 'Approximately how many children under 5 with diarrhea in the past 48 hours?': 'Approximately how many children under 5 with diarrhea in the past 48 hours?', 'Archive not Delete': 'Archive not Delete', 'Arctic Outflow': 'Arctic Outflow', 'Are basic medical supplies available for health services since the disaster?': 'Are basic medical supplies available for health services since the disaster?', 'Are breast milk substitutes being used here since the disaster?': 'Are breast milk substitutes being used here since the disaster?', 'Are the areas that children, older people, and people with disabilities live in, play in and walk through on a daily basis physically safe?': 'Are the areas that children, older people, and people with disabilities live in, play in and walk through on a daily basis physically safe?', 'Are the chronically ill receiving sufficient care and assistance?': 'Are the chronically ill receiving sufficient care and assistance?', 'Are there adults living in prisons in this area?': 'Are there adults living in prisons in this area?', 'Are there alternative places for studying?': 'Are there alternative places for studying?', 'Are there cases of diarrhea among children under the age of 5?': 'Are there cases of diarrhea among children under the age of 5?', 'Are there children living in adult prisons in this area?': 'Are there children living in adult prisons in this area?', 'Are there children living in boarding schools in this area?': 'Are there children living in boarding schools in this area?', 'Are there children living in homes for disabled children in this area?': 'Are there children living in homes for disabled children in this area?', 'Are there children living in juvenile detention in this area?': 'Are there children living in juvenile detention in this area?', 'Are there children living in orphanages in this area?': 'Are there children living in orphanages in this area?', 'Are there children with chronical illnesses in your community?': 'Are there children with chronical illnesses in your community?', 'Are there health services functioning for the community since the disaster?': 'Are there health services functioning for the community since the disaster?', 'Are there older people living in care homes in this area?': 'Are there older people living in care homes in this area?', 'Are there older people with chronical illnesses in your community?': 'Are there older people with chronical illnesses in your community?', 'Are there people with chronical illnesses in your community?': 'Are there people with chronical illnesses in your community?', 'Are there separate latrines for women and men available?': 'Are there separate latrines for women and men available?', 'Are there staff present and caring for the residents in these institutions?': 'Are there staff present and caring for the residents in these institutions?', 'Area': 'Area', 'Assessment': 'Εκτίμηση - Αξιολόγηση', 'Assessment Details': 'Assessment Details', 'Assessment Reported': 'Assessment Reported', 'Assessment Summaries': 'Assessment Summaries', 'Assessment Summary Details': 'Λεπτομέρειες της Έθεσης Εκτίμησης', 'Assessment Summary added': 'Assessment Summary added', 'Assessment Summary deleted': 'Assessment Summary deleted', 'Assessment Summary updated': 'Assessment Summary updated', 'Assessment Type': 'Assessment Type', 'Assessment added': 'Assessment added', 'Assessment deleted': 'Assessment deleted', 'Assessment updated': 'Αξιολόγηση ενημερώθηκε', 'Assessments': 'Assessments', 'Assessments Needs vs. Activities': 'Assessments Needs vs. Activities', 'Assessments and Activities': 'Assessments and Activities', 'Assessments:': 'Assessments:', 'Assessor': 'Assessor', 'Assign Storage Location': 'Assign Storage Location', 'Assign to Org.': 'Ανατέθηκε στον Οργανισμό.', 'Assigned': 'Assigned', 'Assigned To': 'Assigned To', 'Assigned to': 'Assigned to', 'Assistance for immediate repair/reconstruction of houses': 'Assistance for immediate repair/reconstruction of houses', 'Assistant': 'Assistant', 'At/Visited Location (not virtual)': 'Στην/Επισκεπτόμενη Θέση (μη εικονική)', 'Attend to information sources as described in <instruction>': 'Attend to information sources as described in <instruction>', 'Attribution': 'Attribution', 'Audit Read': 'Audit Read', 'Audit Write': 'Audit Write', "Authenticate system's Twitter account": "Authenticate system's Twitter account", 'Author': 'Author', 'Automotive': 'Automotive', 'Availability': 'Availability', 'Available Beds': 'Available Beds', 'Available Messages': 'Available Messages', 'Available Records': 'Available Records', 'Available databases and tables': 'Available databases and tables', 'Available from': 'Available from', 'Available in Viewer?': 'Διαθέσιμο στην απεικόνιση?', 'Available until': 'Available until', 'Avalanche': 'Avalanche', 'Avoid the subject event as per the <instruction>': 'Avoid the subject event as per the <instruction>', 'Babies who are not being breastfed, what are they being fed on?': 'Babies who are not being breastfed, what are they being fed on?', 'Baby And Child Care': 'Φροντίδα μωρού και παιδιού', 'Background Colour': 'Background Colour', 'Background Colour for Text blocks': 'Background Colour for Text blocks', 'Bahai': 'Bahai', 'Baldness': 'Baldness', 'Balochi': 'Balochi', 'Banana': 'Banana', 'Bank/micro finance': 'Bank/micro finance', 'Base Layer?': 'Base Layer?', 'Base Unit': 'Μονάδα Βάσης', 'Baseline Number of Beds': 'Baseline Number of Beds', 'Baseline Type': 'Baseline Type', 'Baseline Type Details': 'Baseline Type Details', 'Baseline Type added': 'Baseline Type added', 'Baseline Type deleted': 'Baseline Type deleted', 'Baseline Type updated': 'Baseline Type updated', 'Baseline Types': 'Τύποι Αρχικοποίησης', 'Baseline added': 'Baseline added', 'Baseline deleted': 'Baseline deleted', 'Baseline number of beds of that type in this unit.': 'Baseline number of beds of that type in this unit.', 'Baseline updated': 'Baseline updated', 'Baselines': 'Baselines', 'Baselines Details': 'Baselines Details', 'Basic': 'Βασικό', 'Basic Assess.': 'Basic Assess.', 'Basic Assessment': 'Basic Assessment', 'Basic Assessment Reported': 'Basic Assessment Reported', 'Basic Details': 'Basic Details', 'Basic information on the requests and donations, such as category, the units, contact details and the status.': 'Βασικές πληροφορίες για τα αιτήματα και τις δωρεές, όπως π.χ., κατηγορία, μονάδες, στοιχεία επικοιωνίας και κατάσταση', 'Basic medical supplies available prior to disaster': 'Basic medical supplies available prior to disaster', 'Basic medical supplies available since disaster': 'Basic medical supplies available since disaster', 'Basic reports on the Shelter and drill-down by region': 'Basic reports on the Shelter and drill-down by region', 'Baud': 'Ρυθμός μετάδοσης (baud)', 'Baud rate to use for your modem - The default is safe for most cases': 'Baud rate to use for your modem - The default is safe for most cases', 'Bed Capacity': 'Bed Capacity', 'Bed Capacity per Unit': 'Bed Capacity per Unit', 'Bed Type': 'Bed Type', 'Bed type already registered': 'Bed type already registered', 'Bedding materials available': 'Bedding materials available', 'Beneficiary Type': 'Beneficiary Type', 'Biological Hazard': 'Biological Hazard', 'Biscuits': 'Biscuits', 'Blizzard': 'Blizzard', 'Blood Type (AB0)': 'Blood Type (AB0)', 'Blowing Snow': 'Χιονοθυέλλα', 'Boat': 'Boat', 'Bodies found': 'Bodies found', 'Bodies recovered': 'Bodies recovered', 'Body': 'Body', 'Body Recovery Reports': 'Body Recovery Reports', 'Body Recovery Request': 'Body Recovery Request', 'Body Recovery Requests': 'Body Recovery Requests', 'Bomb': 'Bomb', 'Bomb Explosion': 'Έκρηξη βόμβας', 'Bomb Threat': 'Bomb Threat', 'Border Colour for Text blocks': 'Χρώμα περιγράμματος για κείμενο ', 'Bounding Box Insets': 'Bounding Box Insets', 'Bounding Box Size': 'Bounding Box Size', 'Boys 13-18 yrs in affected area': 'Boys 13-18 yrs in affected area', 'Boys 13-18 yrs not attending school': 'Boys 13-18 yrs not attending school', 'Boys 6-12 yrs in affected area': 'Boys 6-12 yrs in affected area', 'Boys 6-12 yrs not attending school': 'Boys 6-12 yrs not attending school', 'Breast milk substitutes in use since disaster': 'Breast milk substitutes in use since disaster', 'Breast milk substitutes used prior to disaster': 'Breast milk substitutes used prior to disaster', 'Bricks': 'Bricks', 'Bridge Closed': 'Bridge Closed', 'Bucket': 'Bucket', 'Buddhist': 'Βουδιστής', 'Budget': 'Προϋπολογισμός', 'Budget Details': 'Budget Details', 'Budget Updated': 'Budget Updated', 'Budget added': 'Budget added', 'Budget deleted': 'Budget deleted', 'Budget updated': 'Budget updated', 'Budgeting Module': 'Budgeting Module', 'Budgets': 'Budgets', 'Buffer': 'Buffer', 'Building Aide': 'Οικοδομική βοήθεια', 'Building Collapsed': 'Κατάρρευση Κτιρίου', 'Built using the Template agreed by a group of NGOs working together as the': 'Built using the Template agreed by a group of NGOs working together as the', 'Bulk Uploader': 'Bulk Uploader', 'Bundle': 'Bundle', 'Bundle Contents': 'Bundle Contents', 'Bundle Details': 'Bundle Details', 'Bundle Updated': 'Αναβάθμιση του πακέτου', 'Bundle added': 'Bundle added', 'Bundle deleted': 'Bundle deleted', 'Bundle updated': 'Bundle updated', 'Bundles': 'Bundles', 'Burn': 'Burn', 'Burn ICU': 'Burn ICU', 'Burned/charred': 'Burned/charred', 'Business damaged': 'Business damaged', 'By Warehouse': 'By Warehouse', 'CBA Women': 'CBA Women', 'CSS file %s not writable - unable to apply theme!': 'Τα αρχεία CSS %s δεν έιναι εγγράψιμα - Αδύνατον να εφαρμοστεί το θέμα', 'Calculate': 'Υπολογισμός', 'Camp': 'Camp', 'Camp Coordination/Management': 'Camp Coordination/Management', 'Can users register themselves for authenticated login access?': 'Can users register themselves for authenticated login access?', "Can't import tweepy": "Can't import tweepy", 'Cancel': 'Cancel', 'Cancelled': 'Cancelled', 'Candidate Matches for Body %s': 'Candidate Matches for Body %s', 'Canned Fish': 'Canned Fish', 'Cannot be empty': 'Cannot be empty', 'Capacity (Max Persons)': 'Capacity (Max Persons)', 'Capacity (W x D X H)': 'Capacity (W x D X H)', 'Capture Information on Disaster Victim groups (Tourists, Passengers, Families, etc.)': 'Συλλογή δεδομένων σε ομάδες θυμμάτων (Τουρίστες, Επιβάτες, Οικογένειες, κλπ)', 'Capture Information on each disaster victim': 'Capture Information on each disaster victim', 'Capturing organizational information of a relief organization and all the projects they have in the region': 'Capturing organizational information of a relief organization and all the projects they have in the region', 'Capturing the essential services each Volunteer is providing and where': 'Capturing the essential services each Volunteer is providing and where', 'Capturing the projects each organization is providing and where': 'Capturing the projects each organization is providing and where', 'Cardiology': 'Cardiology', 'Cash available to restart business': 'Cash available to restart business', 'Cassava': 'Cassava', 'Casual Labor': 'Περιστασιακή εργασία', 'Catalog': 'Catalog', 'Catalog Item': 'Catalog Item', 'Catalog Item added': 'Catalog Item added', 'Catalog Item deleted': 'Catalog Item deleted', 'Catalog Item updated': 'Catalog Item updated', 'Catalog Items': 'Catalog Items', 'Catalog Name': 'Catalog Name', 'Category': 'Category', 'Category<>Sub-Category<>Catalog Relation': 'Category<>Sub-Category<>Catalog Relation', 'Category<>Sub-Category<>Catalog Relation added': 'Category<>Sub-Category<>Catalog Relation added', 'Category<>Sub-Category<>Catalog Relation deleted': 'Category<>Sub-Category<>Catalog Relation deleted', 'Category<>Sub-Category<>Catalog Relation updated': 'Category<>Sub-Category<>Catalog Relation updated', 'Central point to record details on People': 'Central point to record details on People', 'Change Password': 'Change Password', 'Check for errors in the URL, maybe the address was mistyped.': 'Check for errors in the URL, maybe the address was mistyped.', 'Check if the URL is pointing to a directory instead of a webpage.': 'Ελέγξτε εαν η URL δείχνει προς φάκελο αρχείων αντί ιστοσελίδας', 'Check outbox for the message status': 'Check outbox for the message status', 'Check to delete': 'Check to delete', 'Check to delete:': 'Check to delete:', 'Check-In': 'Check-In', 'Check-Out': 'Check-Out', 'Check-in': 'Check-in', 'Check-out': 'Check-out', 'Checklist': 'Checklist', 'Checklist created': 'Checklist created', 'Checklist deleted': 'Checklist deleted', 'Checklist of Operations': 'Κατάλογος Ενεργειών', 'Checklist updated': 'Checklist updated', 'Chemical Hazard': 'Chemical Hazard', 'Chemical, Biological, Radiological, Nuclear or High-Yield Explosive threat or attack': 'Chemical, Biological, Radiological, Nuclear or High-Yield Explosive threat or attack', 'Chicken': 'Chicken', 'Child': 'Child', 'Child (2-11)': 'Child (2-11)', 'Child (< 18 yrs)': 'Child (< 18 yrs)', 'Child Abduction Emergency': 'Child Abduction Emergency', 'Child headed households (<18 yrs)': 'Νοικοκυριά με παιδιά (<18 ετών)', 'Children (2-5 years)': 'Children (2-5 years)', 'Children (5-15 years)': 'Children (5-15 years)', 'Children (< 2 years)': 'Παιδιά (< 2 ετών)', 'Children in adult prisons': 'Children in adult prisons', 'Children in boarding schools': 'Children in boarding schools', 'Children in homes for disabled children': 'Children in homes for disabled children', 'Children in juvenile detention': 'Children in juvenile detention', 'Children in orphanages': 'Children in orphanages', 'Children living on their own (without adults)': 'Children living on their own (without adults)', 'Children not enrolled in new school': 'Children not enrolled in new school', 'Children orphaned by the disaster': 'Children orphaned by the disaster', 'Children separated from their parents/caregivers': 'Children separated from their parents/caregivers', 'Children that have been sent to safe places': 'Children that have been sent to safe places', 'Children who have disappeared since the disaster': 'Children who have disappeared since the disaster', 'Children with chronical illnesses': 'Children with chronical illnesses', 'Chinese (Taiwan)': 'Chinese (Taiwan)', 'Cholera Treatment': 'Cholera Treatment', 'Cholera Treatment Capability': 'Cholera Treatment Capability', 'Cholera Treatment Center': 'Cholera Treatment Center', 'Cholera-Treatment-Center': 'Cholera-Treatment-Center', 'Choosing Skill and Resources of Volunteers': 'Choosing Skill and Resources of Volunteers', 'Christian': 'Christian', 'Church': 'Church', 'Circumstances of disappearance, other victims/witnesses who last saw the missing person alive.': 'Circumstances of disappearance, other victims/witnesses who last saw the missing person alive.', 'City': 'City', 'Civil Emergency': 'Civil Emergency', 'Clear Selection': 'Clear Selection', 'Click on the link ': 'Click on the link ', 'Client IP': 'Client IP', 'Clinical Laboratory': 'Clinical Laboratory', 'Clinical Operations': 'Clinical Operations', 'Clinical Status': 'Clinical Status', 'Close map': 'Close map', 'Closed': 'Κελιστό', 'Closure': 'Closure', 'Clothing': 'Clothing', 'Cluster Distance': 'Cluster Distance', 'Cluster Subsector': 'Cluster Subsector', 'Cluster Subsector Details': 'Cluster Subsector Details', 'Cluster Subsector added': 'Cluster Subsector added', 'Cluster Subsector deleted': 'Υπο-τομέας cluster διαγράφηκε', 'Cluster Subsector updated': 'Cluster Subsector updated', 'Cluster Subsectors': 'Cluster Subsectors', 'Cluster Threshold': 'Cluster Threshold', 'Cluster(s)': 'Cluster(s)', 'Code': 'Κωδικός', 'Cold Wave': 'Cold Wave', 'Collective center': 'Collective center', 'Colour for Underline of Subheadings': 'Colour for Underline of Subheadings', 'Colour of Buttons when hovering': 'Colour of Buttons when hovering', 'Colour of bottom of Buttons when not pressed': 'Colour of bottom of Buttons when not pressed', 'Colour of bottom of Buttons when pressed': 'Colour of bottom of Buttons when pressed', 'Colour of dropdown menus': 'Colour of dropdown menus', 'Colour of selected Input fields': 'Colour of selected Input fields', 'Colour of selected menu items': 'Colour of selected menu items', 'Column Choices (One Per Line': 'Επιλογές Στήλης (μία σε κάθε γραμμή', 'Combined Method': 'Combined Method', 'Come back later.': 'Come back later.', 'Come back later. Everyone visiting this site is probably experiencing the same problem as you.': 'Come back later. Everyone visiting this site is probably experiencing the same problem as you.', 'Comments': 'Σχόλια', 'Commiting a changed spreadsheet to the database': 'Commiting a changed spreadsheet to the database', 'Communication problems': 'Communication problems', 'Community Centre': 'Community Centre', 'Community Health Center': 'Community Health Center', 'Community Member': 'Μέλος κοινότητας', 'Complete Unit Label for e.g. meter for m.': 'Πλήρης περιγραφή (ετικέτα) για μονάδα, για παράδειγμα μέτρα αντί του m.', 'Completed': 'Completed', 'Complexion': 'Complexion', 'Compose': 'Compose', 'Compromised': 'Compromised', 'Config': 'Καθορισμός (config)', 'Config added': 'Config added', 'Config deleted': 'Config deleted', 'Config updated': 'Οι ρυθμίσεις (config) ανανεώθηκαν', 'Configs': 'Configs', 'Configure Run-time Settings': 'Configure Run-time Settings', 'Confirmed': 'Confirmed', 'Confirmed Incidents': 'Confirmed Incidents', 'Conflict Details': 'Λεπτομέρειες σύγκρουσης - διαμάχης', 'Conflict Resolution': 'Conflict Resolution', 'Consumable': 'Αναλώσιμο', 'Contact': 'Contact', 'Contact Data': 'Δεδομένα επικοινωνίας', 'Contact Details': 'Contact Details', 'Contact Information': 'Contact Information', 'Contact Method': 'Contact Method', 'Contact Person': 'Contact Person', 'Contact details': 'Contact details', 'Contact information added': 'Contact information added', 'Contact information deleted': 'Contact information deleted', 'Contact information updated': 'Contact information updated', 'Contact person in case of news or further questions (if different from reporting person). Include telephone number, address and email as available.': 'Contact person in case of news or further questions (if different from reporting person). Include telephone number, address and email as available.', 'Contact person(s) in case of news or further questions (if different from reporting person). Include telephone number, address and email as available.': 'Contact person(s) in case of news or further questions (if different from reporting person). Include telephone number, address and email as available.', 'Contact us': 'Contact us', 'Contacts': 'Contacts', 'Contents': 'Contents', 'Contributor': 'Contributor', 'Conversion Tool': 'Conversion Tool', 'Cooking NFIs': 'Cooking NFIs', 'Cooking Oil': 'Cooking Oil', 'Coordinate Conversion': 'Coordinate Conversion', 'Copy': 'Copy', 'Copy any data from the one to be deleted into the one to keep': 'Copy any data from the one to be deleted into the one to keep', 'Corn': 'Corn', 'Cost Type': 'Cost Type', 'Cost per Megabyte': 'Κόστος ανά Megabyte', 'Cost per Minute': 'Cost per Minute', "Couldn't import tweepy library": "Couldn't import tweepy library", 'Country': 'Country', 'Country of Residence': 'Country of Residence', 'Create & manage Distribution groups to receive Alerts': 'Create & manage Distribution groups to receive Alerts', 'Create Checklist': 'Create Checklist', 'Create Group Entry': 'Create Group Entry', 'Create Impact Assessment': 'Create Impact Assessment', 'Create Import Job': 'Create Import Job', 'Create Mobile Impact Assessment': 'Create Mobile Impact Assessment', 'Create New Import Job': 'Create New Import Job', 'Create Rapid Assessment': 'Create Rapid Assessment', 'Create Request': 'Υποβολή Αιτήματος', 'Create Task': 'Create Task', 'Create a group entry in the registry.': 'Create a group entry in the registry.', 'Create, enter, and manage surveys.': 'Create, enter, and manage surveys.', 'Creation of Surveys': 'Creation of Surveys', 'Crime': 'Έγκλημα', 'Criteria': 'Criteria', 'Currency': 'Currency', 'Current Group Members': 'Τρέχοντα μέλη ομάδος', 'Current Identities': 'Current Identities', 'Current Location': 'Current Location', 'Current Log Entries': 'Current Log Entries', 'Current Memberships': 'Τρέχοντα μέλη', 'Current Notes': 'Current Notes', 'Current Registrations': 'Current Registrations', 'Current Status': 'Current Status', 'Current Team Members': 'Current Team Members', 'Current Twitter account': 'Τρέχων λογαριασμός Twitter ', 'Current greatest needs of vulnerable groups': 'Current greatest needs of vulnerable groups', 'Current main income sources': 'Current main income sources', 'Current major expenses': 'Current major expenses', 'Current number of patients': 'Current number of patients', 'Current problems, categories': 'Current problems, categories', 'Current problems, details': 'Current problems, details', 'Current request': 'Current request', 'Current response': 'Current response', 'Current session': 'Current session', 'Current type of health problems, adults': 'Current type of health problems, adults', 'Current type of health problems, children': 'Current type of health problems, children', 'Current type of source for drinking water': 'Current type of source for drinking water', 'Current type of source for sanitary water': 'Current type of source for sanitary water', 'Custom Database Resource (e.g., anything defined as a resource in Sahana)': 'Custom Database Resource (e.g., anything defined as a resource in Sahana)', 'Customisable category of aid': 'Παραμετροποιήσιμη κατηγορία βοήθειας.', 'DECISION': 'ΑΠΟΦΑΣΗ', 'DNA Profile': 'DNA Profile', 'DNA Profiling': 'DNA Profiling', 'Dam Overflow': 'Dam Overflow', 'Dangerous Person': 'Επικίνδυνο Άτομο', 'Data uploaded': 'Δεδομένα μεταφορτώθηκαν', 'Database': 'Database', 'Date': 'Date', 'Date & Time': 'Date & Time', 'Date Requested': 'Date Requested', 'Date Required': 'Date Required', 'Date and Time': 'Date and Time', 'Date and Time of Goods receipt. By default shows the current time but can be modified by editing in the drop down list.': 'Ημερομηνία και Ώρα παραλαβής αγαθών. Εξ ορισμού δείχνει την τρέχουσα ώρα και μπορεί να τροποποιηθεί από την drop down λίστα ', 'Date and time this report relates to.': 'Date and time this report relates to.', 'Date of Birth': 'Date of Birth', 'Date of Latest Information on Beneficiaries Reached': 'Date of Latest Information on Beneficiaries Reached', 'Date of Report': 'Date of Report', 'Date/Time': 'Ημερομηνία/Ώρα', 'Date/Time of Find': 'Ημερομηνία / Ώρα Ανεύρεσης', 'Date/Time of disappearance': 'Date/Time of disappearance', 'De-duplicator': 'De-duplicator', 'Dead Body Details': 'Dead Body Details', 'Dead Body Reports': 'Αναφορές νεκρών', 'Deaths in the past 24h': 'Deaths in the past 24h', 'Deaths/24hrs': 'Απώλειες ανά 24ώρο', 'Debug': 'Debug', 'Deceased': 'Deceased', 'Decimal Degrees': 'Δεκαδικοί βαθμοί', 'Decomposed': 'Decomposed', 'Default Height of the map window. In Window layout the map maximises to fill the window, so no need to set a large value here.': 'Default Height of the map window. In Window layout the map maximises to fill the window, so no need to set a large value here.', 'Default Marker': 'Default Marker', 'Default Width of the map window. In Window layout the map maximises to fill the window, so no need to set a large value here.': 'Default Width of the map window. In Window layout the map maximises to fill the window, so no need to set a large value here.', 'Default synchronization policy': 'Default synchronization policy', 'Defaults': 'Defaults', 'Defaults updated': 'Προκαθορισμένες ρυθμίσεις ενημερώθηκαν', 'Defecation area for animals': 'Defecation area for animals', 'Defines the icon used for display of features on handheld GPS.': 'Defines the icon used for display of features on handheld GPS.', 'Defines the icon used for display of features on interactive map & KML exports. A Marker assigned to an individual Location is set if there is a need to override the Marker assigned to the Feature Class. If neither are defined, then the Default Marker is used.': 'Defines the icon used for display of features on interactive map & KML exports. A Marker assigned to an individual Location is set if there is a need to override the Marker assigned to the Feature Class. If neither are defined, then the Default Marker is used.', 'Defines the marker used for display & the attributes visible in the popup.': 'Defines the marker used for display & the attributes visible in the popup.', 'Degrees must be a number between -180 and 180': 'Degrees must be a number between -180 and 180', 'Dehydration': 'Dehydration', 'Delete': 'Delete', 'Delete Assessment': 'Delete Assessment', 'Delete Assessment Summary': 'Delete Assessment Summary', 'Delete Baseline': 'Delete Baseline', 'Delete Baseline Type': 'Delete Baseline Type', 'Delete Budget': 'Delete Budget', 'Delete Bundle': 'Delete Bundle', 'Delete Catalog Item': 'Delete Catalog Item', 'Delete Cluster Subsector': 'Delete Cluster Subsector', 'Delete Config': 'Delete Config', 'Delete Distribution': 'Delete Distribution', 'Delete Distribution Item': 'Delete Distribution Item', 'Delete Document': 'Delete Document', 'Delete Donor': 'Delete Donor', 'Delete Entry': 'Delete Entry', 'Delete Feature Class': 'Delete Feature Class', 'Delete Feature Layer': 'Delete Feature Layer', 'Delete Group': 'Delete Group', 'Delete Hospital': 'Delete Hospital', 'Delete Image': 'Delete Image', 'Delete Impact': 'Delete Impact', 'Delete Impact Type': 'Delete Impact Type', 'Delete Incident': 'Delete Incident', 'Delete Incident Report': 'Delete Incident Report', 'Delete Item': 'Διαγραφή αντικειμένου', 'Delete Item Category': 'Delete Item Category', 'Delete Item Packet': 'Delete Item Packet', 'Delete Key': 'Delete Key', 'Delete Kit': 'Delete Kit', 'Delete Layer': 'Διαγραφή επιπέδου', 'Delete Location': 'Διαγραφή τοποθεσίας', 'Delete Marker': 'Delete Marker', 'Delete Membership': 'Delete Membership', 'Delete Message': 'Delete Message', 'Delete Metadata': 'Delete Metadata', 'Delete Need': 'Διαγράψτε την ανάγκη', 'Delete Need Type': 'Διαγραφή τύπων αναγκών', 'Delete Office': 'Delete Office', 'Delete Old': 'Delete Old', 'Delete Organization': 'Delete Organization', 'Delete Peer': 'Delete Peer', 'Delete Person': 'Delete Person', 'Delete Photo': 'Διαγραφή Φωτογραφίας', 'Delete Project': 'Delete Project', 'Delete Projection': 'Delete Projection', 'Delete Rapid Assessment': 'Delete Rapid Assessment', 'Delete Received Item': 'Delete Received Item', 'Delete Received Shipment': 'Delete Received Shipment', 'Delete Record': 'Delete Record', 'Delete Recovery Report': 'Delete Recovery Report', 'Delete Report': 'Delete Report', 'Delete Request': 'Delete Request', 'Delete Request Item': 'Delete Request Item', 'Delete Resource': 'Delete Resource', 'Delete Section': 'Delete Section', 'Delete Sector': 'Delete Sector', 'Delete Sent Item': 'Delete Sent Item', 'Delete Sent Shipment': 'Delete Sent Shipment', 'Delete Service Profile': 'Delete Service Profile', 'Delete Setting': 'Delete Setting', 'Delete Skill': 'Delete Skill', 'Delete Skill Type': 'Delete Skill Type', 'Delete Staff Type': 'Delete Staff Type', 'Delete Status': 'Delete Status', 'Delete Subscription': 'Delete Subscription', 'Delete Survey Answer': 'Delete Survey Answer', 'Delete Survey Question': 'Διαγραφή ερώτησης έρευνας', 'Delete Survey Section': 'Delete Survey Section', 'Delete Survey Series': 'Delete Survey Series', 'Delete Survey Template': 'Διαγραφή Προτύπου Αναζήτησης-Έρευνας', 'Delete Unit': 'Διαγραφή μονάδας', 'Delete User': 'Delete User', 'Delete Volunteer': 'Delete Volunteer', 'Delete Warehouse': 'Διαγραφή Αποθήκης', 'Delete Warehouse Item': 'Delete Warehouse Item', 'Delete from Server?': 'Delete from Server?', 'Delivered': 'Delivered', 'Delphi Decision Maker': 'Delphi Decision Maker', 'Demographic': 'Demographic', 'Demonstrations': 'Demonstrations', 'Dental Examination': 'Dental Examination', 'Dental Profile': 'Dental Profile', 'Department/Unit Name': 'Department/Unit Name', 'Deployment': 'Deployment', 'Describe the condition of the roads to your hospital.': 'Describe the condition of the roads to your hospital.', 'Describe the procedure which this record relates to (e.g. "medical examination")': 'Describe the procedure which this record relates to (e.g. "medical examination")', 'Description': 'Περιγραφή', 'Description of Bin Type': 'Description of Bin Type', 'Description of Contacts': 'Description of Contacts', 'Description of defecation area': 'Description of defecation area', 'Description of drinking water source': 'Description of drinking water source', 'Description of sanitary water source': 'Description of sanitary water source', 'Description of water source before the disaster': 'Description of water source before the disaster', 'Descriptive Text (e.g., Prose, etc)': 'Descriptive Text (e.g., Prose, etc)', 'Designated for': 'Designated for', 'Desire to remain with family': 'Desire to remain with family', 'Destination': 'Προορισμός', "Detailed address of the site for informational/logistics purpose. Please note that you can add GIS/Mapping data about this site in the 'Location' field mentioned below.": "Detailed address of the site for informational/logistics purpose. Please note that you can add GIS/Mapping data about this site in the 'Location' field mentioned below.", 'Details': 'Details', 'Dialysis': 'Dialysis', 'Diarrhea': 'Diarrhea', 'Diarrhea among children under 5': 'Diarrhea among children under 5', 'Dignitary Visit': 'Dignitary Visit', 'Dimensions of the storage bin. Input in the following format 1 x 2 x 3 for width x depth x height followed by choosing the unit from the drop down list.': 'Dimensions of the storage bin. Input in the following format 1 x 2 x 3 for width x depth x height followed by choosing the unit from the drop down list.', 'Dimensions of the storage location. Input in the following format 1 x 2 x 3 for width x depth x height followed by choosing the unit from the drop down list.': 'Dimensions of the storage location. Input in the following format 1 x 2 x 3 for width x depth x height followed by choosing the unit from the drop down list.', 'Direction': 'Κατεύθυνση', 'Disabled': 'Disabled', 'Disabled participating in coping activities': 'Disabled participating in coping activities', 'Disabled?': 'Disabled?', 'Disaster Victim Identification': 'Disaster Victim Identification', 'Disaster Victim Registry': 'Disaster Victim Registry', 'Disaster clean-up/repairs': 'Disaster clean-up/repairs', 'Discharge (cusecs)': 'Discharge (cusecs)', 'Discharges/24hrs': 'Discharges/24hrs', 'Discussion Forum': 'Discussion Forum', 'Discussion Forum on item': 'Discussion Forum on item', 'Disease vectors': 'Διανύσματα ασθενειών', 'Dispatch': 'Διαβίβαση', 'Dispatch Items': 'Dispatch Items', 'Dispensary': 'Ιατρείο', 'Displaced': 'Displaced', 'Displaced Populations': 'Displaced Populations', 'Display Polygons?': 'Display Polygons?', 'Display Routes?': 'Display Routes?', 'Display Tracks?': 'Display Tracks?', 'Display Waypoints?': 'Display Waypoints?', 'Dispose': 'Dispose', 'Dispose Expired/Unusable Items': 'Dispose Expired/Unusable Items', 'Distance between defecation area and water source': 'Distance between defecation area and water source', 'Distance between latrines and temporary shelter in meters': 'Distance between latrines and temporary shelter in meters', 'Distance between shelter and latrines': 'Distance between shelter and latrines', 'Distance(Kms)': 'Distance(Kms)', 'Distribution': 'Distribution', 'Distribution Details': 'Distribution Details', 'Distribution Item': 'Αντικείμενο για διανομή', 'Distribution Item Details': 'Distribution Item Details', 'Distribution Item added': 'Distribution Item added', 'Distribution Item deleted': 'Distribution Item deleted', 'Distribution Item updated': 'Distribution Item updated', 'Distribution Items': 'Distribution Items', 'Distribution added': 'Distribution added', 'Distribution deleted': 'Distribution deleted', 'Distribution groups': 'Distribution groups', 'Distribution updated': 'Distribution updated', 'Distributions': 'Distributions', 'District': 'Περιοχή', 'Do adolescent and youth in your community participate in activities that help them cope with the disaster? (ex. meetings, religious activities, volunteer in the community clean-up, etc)': 'Do adolescent and youth in your community participate in activities that help them cope with the disaster? (ex. meetings, religious activities, volunteer in the community clean-up, etc)', 'Do households each have at least 2 containers (10-20 litres each) to hold water?': 'Do households each have at least 2 containers (10-20 litres each) to hold water?', 'Do households have appropriate equipment and materials to cook their food (stove, pots, dished plates, and a mug/drinking vessel, etc)?': 'Do households have appropriate equipment and materials to cook their food (stove, pots, dished plates, and a mug/drinking vessel, etc)?', 'Do households have bedding materials available (tarps, plastic mats, blankets)?': 'Do households have bedding materials available (tarps, plastic mats, blankets)?', 'Do households have household water storage containers?': 'Do households have household water storage containers?', 'Do minority members in your community participate in activities that help them cope with the disaster? (ex. meetings, religious activities, volunteer in the community clean-up, etc)': 'Do minority members in your community participate in activities that help them cope with the disaster? (ex. meetings, religious activities, volunteer in the community clean-up, etc)', 'Do older people in your community participate in activities that help them cope with the disaster? (ex. meetings, religious activities, volunteer in the community clean-up, etc)': 'Do older people in your community participate in activities that help them cope with the disaster? (ex. meetings, religious activities, volunteer in the community clean-up, etc)', 'Do people have at least 2 full sets of clothing (shirts, pants/sarong, underwear)?': 'Do people have at least 2 full sets of clothing (shirts, pants/sarong, underwear)?', 'Do people have reliable access to sufficient sanitation/hygiene items (bathing soap, laundry soap, shampoo, toothpaste and toothbrush)?': 'Do people have reliable access to sufficient sanitation/hygiene items (bathing soap, laundry soap, shampoo, toothpaste and toothbrush)?', 'Do people with disabilities in your community participate in activities that help them cope with the disaster? (ex. meetings, religious activities, volunteer in the community clean-up, etc)': 'Do people with disabilities in your community participate in activities that help them cope with the disaster? (ex. meetings, religious activities, volunteer in the community clean-up, etc)', 'Do women and girls have easy access to sanitary materials?': 'Do women and girls have easy access to sanitary materials?', 'Do women in your community participate in activities that help them cope with the disaster? (ex. meetings, religious activities, volunteer in the community clean-up, etc)': 'Do women in your community participate in activities that help them cope with the disaster? (ex. meetings, religious activities, volunteer in the community clean-up, etc)', 'Do you have access to cash to restart your business?': 'Do you have access to cash to restart your business?', 'Do you know of any incidents of violence?': 'Do you know of any incidents of violence?', 'Do you know of children living on their own (without adults)?': 'Do you know of children living on their own (without adults)?', 'Do you know of children separated from their parents or caregivers?': 'Do you know of children separated from their parents or caregivers?', 'Do you know of children that have been orphaned by the disaster?': 'Do you know of children that have been orphaned by the disaster?', 'Do you know of children that have been sent to safe places?': 'Do you know of children that have been sent to safe places?', 'Do you know of children that have disappeared without explanation in the period since the disaster?': 'Do you know of children that have disappeared without explanation in the period since the disaster?', 'Do you know of older people who are primary caregivers of children?': 'Do you know of older people who are primary caregivers of children?', 'Do you know of parents/caregivers missing children?': 'Do you know of parents/caregivers missing children?', 'Do you really want to delete these records?': 'Do you really want to delete these records?', 'Do you want to over-write the file metadata with new default values?': 'Θέλετε να διαγράψετε το αρχείο μεταδεδομένων με τις νέες προκαθορισμένες τιμές;', 'Do you want to receive this shipment?': 'Do you want to receive this shipment?', 'Do you want to send this shipment?': 'Do you want to send this shipment?', 'Document': 'Document', 'Document Details': 'Λεπτομέρειες Εγγράφου', 'Document Scan': 'Document Scan', 'Document added': 'Document added', 'Document deleted': 'Document deleted', 'Document updated': 'Document updated', 'Documents': 'Documents', 'Documents and Photos': 'Documents and Photos', 'Does this facility provide a cholera treatment center?': 'Does this facility provide a cholera treatment center?', 'Doing nothing (no structured activity)': 'Καμία ενέργεια (μη δομημένη ενέργεια)', 'Dollars': 'Dollars', 'Domestic chores': '"Οικιακές" μικροεργασίες', 'Donation Phone #': 'Donation Phone #', 'Donor': 'Donor', 'Donor Details': 'Donor Details', 'Donor added': 'Προσθήκη Δωρητή', 'Donor deleted': 'Donor deleted', 'Donor updated': 'Δωρητής ανανεώθηκε', 'Donors': 'Donors', 'Donors Report': 'Αναφορά Δότη', 'Door frame': 'Door frame', 'Draft': 'Πρόχειρο', 'Drainage': 'Drainage', 'Drawing up a Budget for Staff & Equipment across various Locations.': 'Drawing up a Budget for Staff & Equipment across various Locations.', 'Drill Down by Group': 'Drill Down by Group', 'Drill Down by Incident': 'Drill Down by Incident', 'Drill Down by Shelter': 'Drill Down by Shelter', 'Driving License': 'Driving License', 'Drought': 'Drought', 'Drugs': 'Drugs', 'Dug Well': 'Dug Well', 'Duplicate?': 'Duplicate?', 'Duration': 'Διάρκεια', 'Dust Storm': 'Dust Storm', 'Dwellings': 'Dwellings', 'E-mail': 'E-mail', 'EMS Reason': 'EMS Reason', 'EMS Status': 'Κατάσταση EMS', 'ER Status': 'ER Status', 'ER Status Reason': 'ER Status Reason', 'Early Recovery': 'Έγκαιρη αποκατάσταση - εύρεση', 'Earthquake': 'Earthquake', 'Easy access to sanitation items for women/girls': 'Easy access to sanitation items for women/girls', 'Edit': 'Edit', 'Edit Activity': 'Edit Activity', 'Edit Address': 'Edit Address', 'Edit Application': 'Edit Application', 'Edit Assessment': 'Edit Assessment', 'Edit Assessment Summary': 'Edit Assessment Summary', 'Edit Baseline': 'Edit Baseline', 'Edit Baseline Type': 'Edit Baseline Type', 'Edit Budget': 'Edit Budget', 'Edit Bundle': 'Edit Bundle', 'Edit Catalog Item': 'Edit Catalog Item', 'Edit Category<>Sub-Category<>Catalog Relation': 'Edit Category<>Sub-Category<>Catalog Relation', 'Edit Cluster Subsector': 'Edit Cluster Subsector', 'Edit Config': 'Edit Config', 'Edit Contact': 'Edit Contact', 'Edit Contact Information': 'Edit Contact Information', 'Edit Contents': 'Edit Contents', 'Edit Defaults': 'Edit Defaults', 'Edit Description': 'Edit Description', 'Edit Details': 'Επεξεργσία λεπτομεριεών', 'Edit Disaster Victims': 'Edit Disaster Victims', 'Edit Distribution': 'Επεξεργασία Διανομής', 'Edit Distribution Item': 'Edit Distribution Item', 'Edit Document': 'Επεξεργασία κειμένου', 'Edit Donor': 'Edit Donor', 'Edit Email Settings': 'Edit Email Settings', 'Edit Feature Class': 'Edit Feature Class', 'Edit Feature Layer': 'Edit Feature Layer', 'Edit Flood Report': 'Edit Flood Report', 'Edit Gateway Settings': 'Edit Gateway Settings', 'Edit Group': 'Edit Group', 'Edit Hospital': 'Edit Hospital', 'Edit Identification Report': 'Επεξεργασία Έκθεσης Ταυτοποίησης', 'Edit Identity': 'Edit Identity', 'Edit Image': 'Edit Image', 'Edit Image Details': 'Edit Image Details', 'Edit Impact': 'Επεξεργασία επιπτώσεων', 'Edit Impact Type': 'Edit Impact Type', 'Edit Incident': 'Επεξεργασία Συμβάντος', 'Edit Incident Report': 'Edit Incident Report', 'Edit Item': 'Edit Item', 'Edit Item Catalog': 'Edit Item Catalog', 'Edit Item Catalog Categories': 'Επεξεργασία κατηγοριών καταλόγου αντικειμένων', 'Edit Item Category': 'Edit Item Category', 'Edit Item Packet': 'Edit Item Packet', 'Edit Item Sub-Categories': 'Edit Item Sub-Categories', 'Edit Key': 'Επεξεργασία κλειδιού', 'Edit Kit': 'Edit Kit', 'Edit Layer': 'Edit Layer', 'Edit Location': 'Edit Location', 'Edit Log Entry': 'Edit Log Entry', 'Edit Map Services': 'Edit Map Services', 'Edit Marker': 'Edit Marker', 'Edit Membership': 'Edit Membership', 'Edit Message': 'Edit Message', 'Edit Messaging Settings': 'Edit Messaging Settings', 'Edit Metadata': 'Επεξεργασία μεταδεδομένων', 'Edit Modem Settings': 'Edit Modem Settings', 'Edit Need': 'Edit Need', 'Edit Need Type': 'Edit Need Type', 'Edit Note': 'Edit Note', 'Edit Office': 'Edit Office', 'Edit Options': 'Edit Options', 'Edit Organization': 'Edit Organization', 'Edit Parameters': 'Edit Parameters', 'Edit Peer Details': 'Επεξεργασία λεπτομερειών του Peer', 'Edit Person Details': 'Edit Person Details', 'Edit Personal Effects Details': 'Edit Personal Effects Details', 'Edit Photo': 'Edit Photo', 'Edit Position': 'Edit Position', 'Edit Problem': 'Επεξεργασία Προβλήματος', 'Edit Project': 'Edit Project', 'Edit Projection': 'Edit Projection', 'Edit Rapid Assessment': 'Edit Rapid Assessment', 'Edit Received Item': 'Edit Received Item', 'Edit Received Shipment': 'Edit Received Shipment', 'Edit Record': 'Edit Record', 'Edit Recovery Details': 'Επεξεργασία Λεπτομέρειων Ανάκτησης', 'Edit Registration': 'Edit Registration', 'Edit Registration Details': 'Edit Registration Details', 'Edit Report': 'Επεξεργασία Αναφοράς', 'Edit Request': 'Επεξεργασία Αίτησης', 'Edit Request Item': 'Edit Request Item', 'Edit Resource': 'Edit Resource', 'Edit Response': 'Επεξεργασία Απάντησης-Ανταπόκρισης', 'Edit River': 'Edit River', 'Edit Role': 'Edit Role', 'Edit Sector': 'Edit Sector', 'Edit Sent Item': 'Edit Sent Item', 'Edit Sent Shipment': 'Edit Sent Shipment', 'Edit Setting': 'Επεξεργσία Ρύθμισης', 'Edit Settings': 'Edit Settings', 'Edit Shelter': 'Επεξεργασία καταλήματος', 'Edit Shelter Service': 'Επεξεργασία Υπηρεσιών Καταυλισμών', 'Edit Shelter Type': 'Edit Shelter Type', 'Edit Shipment Transit Log': 'Edit Shipment Transit Log', 'Edit Shipment to Send': 'Edit Shipment to Send', 'Edit Shipment/Way Bills': 'Edit Shipment/Way Bills', 'Edit Shipment<>Item Relation': 'Edit Shipment<>Item Relation', 'Edit Site': 'Edit Site', 'Edit Skill': 'Επεξεργασία προσόντων', 'Edit Skill Type': 'Edit Skill Type', 'Edit Solution': 'Edit Solution', 'Edit Staff': 'Edit Staff', 'Edit Staff Type': 'Edit Staff Type', 'Edit Storage Bin Type(s)': 'Edit Storage Bin Type(s)', 'Edit Storage Bins': 'Edit Storage Bins', 'Edit Storage Location': 'Edit Storage Location', 'Edit Subscription': 'Edit Subscription', 'Edit Survey Answer': 'Επεξεργασία απαντήσεων έρευνας', 'Edit Survey Question': 'Edit Survey Question', 'Edit Survey Section': 'Edit Survey Section', 'Edit Survey Series': 'Edit Survey Series', 'Edit Survey Template': 'Edit Survey Template', 'Edit Task': 'Edit Task', 'Edit Team': 'Edit Team', 'Edit Theme': 'Edit Theme', 'Edit Themes': 'Edit Themes', 'Edit Ticket': 'Edit Ticket', 'Edit Track': 'Edit Track', 'Edit Tropo Settings': 'Edit Tropo Settings', 'Edit Unit': 'Edit Unit', 'Edit User': 'Edit User', 'Edit Volunteer Details': 'Edit Volunteer Details', 'Edit Volunteer Registration': 'Edit Volunteer Registration', 'Edit Warehouse': 'Edit Warehouse', 'Edit Warehouse Item': 'Edit Warehouse Item', 'Edit current record': 'Επεξεργασία τρέχουσας εγγραφής', 'Edit message': 'Edit message', 'Edit the Application': 'Επεξεργασία της εφαρμογής', 'Editable?': 'Editable?', 'Education': 'Education', 'Education materials received': 'Λήφθησαν εκπαιδευτικά υλικά', 'Education materials, source': 'Εκπαιδευτικά υλικά, από που προέρχονται', 'Effects Inventory': 'Effects Inventory', 'Eggs': 'Eggs', 'Either a shelter or a location must be specified': 'Either a shelter or a location must be specified', 'Either file upload or document URL required.': 'Either file upload or document URL required.', 'Either file upload or image URL required.': 'Ή μεταφορτώστε αρχείο ή URL εικόνας απαιτείται', 'Elderly person headed households (>60 yrs)': 'Elderly person headed households (>60 yrs)', 'Electrical': 'Electrical', 'Elevated': 'Αναβάθμιση', 'Email': 'Email', 'Email Settings': 'Email Settings', 'Email address verified, however registration is still pending approval - please wait until confirmation received.': 'Email address verified, however registration is still pending approval - please wait until confirmation received.', 'Email settings updated': 'Email settings updated', 'Embalming': 'Βαλσάμωμα', 'Embassy': 'Embassy', 'Emergency Capacity Building project': 'Emergency Capacity Building project', 'Emergency Department': 'Τμήμα Πρώτων Βοηθειών', 'Emergency Shelter': 'Καταφύγιο Εκτάκτου Ανάγκης', 'Emergency Support Facility': 'Emergency Support Facility', 'Emergency Support Service': 'Emergency Support Service', 'Emergency Telecommunications': 'Emergency Telecommunications', 'Enable/Disable Layers': 'Ενεργοποίηση/Απενεργοποίηση επιπέδων', 'Enabled': 'Enabled', 'End date': 'Ημερομηνία Τέλους', 'End date should be after start date': 'End date should be after start date', 'End of Period': 'End of Period', 'English': 'English', 'Enter Coordinates:': 'Εισάγετε συντεταγμένες', 'Enter a GPS Coord': 'Enter a GPS Coord', 'Enter a date before': 'Enter a date before', 'Enter a location': 'Enter a location', 'Enter a name for the spreadsheet you are uploading (mandatory).': 'Enter a name for the spreadsheet you are uploading (mandatory).', 'Enter a new support request.': 'Enter a new support request.', 'Enter a summary of the request here.': 'Εισάγετε περίληψη του αιτήματος εδώ:', 'Enter a unique label!': 'Enter a unique label!', 'Enter a valid email': 'Enter a valid email', 'Enter tags separated by commas.': 'Enter tags separated by commas.', 'Enter the same password as above': 'Enter the same password as above', 'Enter your firstname': 'Enter your firstname', 'Entering a phone number is optional, but doing so allows you to subscribe to receive SMS messages.': 'Η εισαγωγή ενός τηλεφωνικού αριθμού είναι προαιρετική, αλλά αυτό σας επιτρέπει να εγγραφείτε για να λαμβάνετε μηνύματα SMS.', 'Entry deleted': 'Entry deleted', 'Equipment': 'Equipment', 'Error encountered while applying the theme.': 'Error encountered while applying the theme.', 'Error in message': 'Error in message', 'Error logs for "%(app)s"': 'Error logs for "%(app)s"', 'Errors': 'Σφάλματα', 'Estimated # of households who are affected by the emergency': 'Εκτιμώμενος αριθμός των νοικοκυριών που πλήττονται από την κατάσταση έκτακτης ανάγκης', 'Estimated # of people who are affected by the emergency': 'Estimated # of people who are affected by the emergency', 'Estimated total number of people in institutions': 'Estimated total number of people in institutions', 'Euros': 'Ευρώ', 'Evacuating': 'Evacuating', 'Evaluate the information in this message. (This value SHOULD NOT be used in public warning applications.)': 'Αξιολογήστε τις πληροφορίες σε αυτό το μήνυμα. (Η τιμή/αξιολόγηση δεν πρέπει να χρησιμοποιηθέι σε εφαρμογές δημόσιας προειδοποίησης)', 'Event Time': 'Event Time', 'Event Type': 'Τύπος Συμβάντος', 'Event type': 'Event type', 'Example': 'Example', 'Exceeded': 'Exceeded', 'Excreta disposal': 'Excreta disposal', 'Execute a pre-planned activity identified in <instruction>': 'Execute a pre-planned activity identified in <instruction>', 'Existing food stocks, main dishes': 'Existing food stocks, main dishes', 'Existing food stocks, side dishes': 'Existing food stocks, side dishes', 'Expected In': 'Expected In', 'Expected Out': 'Ανεμένται να είναι εκτός', 'Expiry Time': 'Expiry Time', 'Explosive Hazard': 'Explosive Hazard', 'Export': 'Export', 'Export Data': 'Export Data', 'Export Database as CSV': 'Export Database as CSV', 'Export in GPX format': 'Export in GPX format', 'Export in KML format': 'Export in KML format', 'Export in OSM format': 'Export in OSM format', 'Export in PDF format': 'Export in PDF format', 'Export in RSS format': 'Export in RSS format', 'Export in XLS format': 'Export in XLS format', 'Eye Color': 'Χρώμα ματιών', 'Facebook': 'Facebook', 'Facial hair, color': 'Facial hair, color', 'Facial hair, type': 'Facial hair, type', 'Facial hear, length': 'Facial hear, length', 'Facility Operations': 'Facility Operations', 'Facility Status': 'Facility Status', 'Facility Type': 'Facility Type', 'Factors affecting school attendance': 'Factors affecting school attendance', 'Failed!': 'Failed!', 'Falling Object Hazard': 'Falling Object Hazard', 'Families/HH': 'Families/HH', 'Family': 'Family', 'Family tarpaulins received': 'Family tarpaulins received', 'Family tarpaulins, source': 'Family tarpaulins, source', 'Family/friends': 'Family/friends', 'Farmland/fishing material assistance, Rank': 'Farmland/fishing material assistance, Rank', 'Fax': 'Fax', 'Feature Class': 'Feature Class', 'Feature Class Details': 'Feature Class Details', 'Feature Class added': 'Feature Class added', 'Feature Class deleted': 'Feature Class deleted', 'Feature Class updated': 'Feature Class updated', 'Feature Classes': 'Κλάσεις Χαρακτηριστικών', 'Feature Classes are collections of Locations (Features) of the same type': 'Οι κλάσεις χαρακτηριστικών είναι σύνολα θέσεων (Χαρακτριστικών) του ίδιου τύπου', 'Feature Layer Details': 'Feature Layer Details', 'Feature Layer added': 'Feature Layer added', 'Feature Layer deleted': 'Feature Layer deleted', 'Feature Layer updated': 'Επίπεδο χαρακτηριστικών αναβαθμίστηκε', 'Feature Layers': 'Feature Layers', 'Feature Namespace': 'Feature Namespace', 'Feature Type': 'Τύπος Χαρακτηριστικού', 'Features Include': 'Features Include', 'Female': 'Female', 'Female headed households': 'Female headed households', 'Few': 'Ελάχιστα', 'Field Hospital': 'Field Hospital', 'File': 'File', 'Fill in Latitude': 'Fill in Latitude', 'Fill in Longitude': 'Fill in Longitude', 'Filter': 'Filter', 'Filter Field': 'Filter Field', 'Filter Value': 'Filter Value', 'Filtered search of aid pledges and requests': 'Filtered search of aid pledges and requests', 'Find': 'Βρες / Αναζήτησε', 'Find Dead Body Report': 'Find Dead Body Report', 'Find Recovery Report': 'Find Recovery Report', 'Find Volunteers': 'Find Volunteers', 'Find by Name': 'Find by Name', 'Finder': 'Finder', 'Fingerprint': 'Fingerprint', 'Fingerprinting': 'Δακτυλικά αποτυπώματα', 'Fingerprints': 'Fingerprints', 'Finish': 'Finish', 'Finished Jobs': 'Finished Jobs', 'Fire': 'Fire', 'Fire suppression and rescue': 'Fire suppression and rescue', 'First Name': 'First Name', 'First name': 'Κυρίως όνομα', 'Fishing': 'Fishing', 'Flash Flood': 'Flash Flood', 'Flash Freeze': 'Flash Freeze', 'Fleet Management': 'Fleet Management', 'Flexible Impact Assessments': 'Flexible Impact Assessments', 'Flood': 'Πλημμύρα', 'Flood Alerts': 'Flood Alerts', 'Flood Alerts show water levels in various parts of the country': 'Flood Alerts show water levels in various parts of the country', 'Flood Report': 'Flood Report', 'Flood Report Details': 'Λεπτομέρειες αναφοράς πλυμμήρας', 'Flood Report added': 'Flood Report added', 'Flood Report deleted': 'Flood Report deleted', 'Flood Report updated': 'Flood Report updated', 'Flood Reports': 'Flood Reports', 'Flow Status': 'Flow Status', 'Focal Point': 'Σημείο Εστίασης', 'Fog': 'Fog', 'Food': 'Food', 'Food Supply': 'Προμήθεια τροφίμων', 'Food assistance available/expected': 'Food assistance available/expected', 'Footer': 'Footer', 'Footer file %s missing!': 'Footer file %s missing!', 'For POP-3 this is usually 110 (995 for SSL), for IMAP this is usually 143 (993 for IMAP).': 'For POP-3 this is usually 110 (995 for SSL), for IMAP this is usually 143 (993 for IMAP).', 'For a country this would be the ISO2 code, for a Town, it would be the Airport Locode.': 'For a country this would be the ISO2 code, for a Town, it would be the Airport Locode.', 'For each sync partner, there is a default sync job that runs after a specified interval of time. You can also set up more sync jobs which could be customized on your needs. Click the link on the right to get started.': 'Για κάθε συγχρονισμό μεταξύ συνεργατών, υπάρχει μια προεπιλεγμένη διεργασία συγχρονισμού που τρέχει μετά από ένα ορισμένο χρονικό διάστημα. Μπορείτε επίσης να δημιουργήσετε περισσότερες διεργασίες συγχρονισμού η οποίες θα μπορούσαν να προσαρμοστούν στις ανάγκες σας. Κάντε κλικ στο σύνδεσμο δεξιά για να ξεκινήσετε.', 'For enhanced security, you are recommended to enter a username and password, and notify administrators of other machines in your organization to add this username and password against your UUID in Synchronization -> Sync Partners': 'For enhanced security, you are recommended to enter a username and password, and notify administrators of other machines in your organization to add this username and password against your UUID in Synchronization -> Sync Partners', 'For live help from the Sahana community on using this application, go to': 'For live help from the Sahana community on using this application, go to', 'For messages that support alert network internal functions': 'For messages that support alert network internal functions', 'For more details on the Sahana Eden system, see the': 'For more details on the Sahana Eden system, see the', 'For more information, see ': 'For more information, see ', 'For:': 'For:', 'Forest Fire': 'Forest Fire', 'Formal camp': 'Κανονικό Στρατόπεδο', 'Format': 'Μορφότυπος - Δομή', 'Forms': 'Forms', 'Found': 'Found', 'Freezing Drizzle': 'Freezing Drizzle', 'Freezing Rain': 'Freezing Rain', 'Freezing Spray': 'Freezing Spray', 'French': 'French', 'Friday': 'Friday', 'From Location': 'From Location', 'From Warehouse': 'From Warehouse', 'Frost': 'Frost', 'Full': 'Full', 'Full beard': 'Γενειοφόρος', 'Fullscreen Map': 'Fullscreen Map', 'Functional Tests': 'Functional Tests', 'Functions available': 'Functions available', 'Funding Organization': 'Funding Organization', 'Funeral': 'Funeral', 'GIS Reports of Shelter': 'GIS Reports of Shelter', 'GIS integration to view location details of the Shelter': 'GIS integration to view location details of the Shelter', 'GPS': 'GPS', 'GPS Marker': 'Δείκτης GPS', 'GPS Track': 'GPS Track', 'GPS Track File': 'GPS Track File', 'GPX Track': 'GPX Track', 'Gale Wind': 'Gale Wind', 'Gap Analysis': 'Gap Analysis', 'Gap Analysis Map': 'Gap Analysis Map', 'Gap Analysis Report': 'Gap Analysis Report', 'Gap Map': 'Gap Map', 'Gap Report': 'Gap Report', 'Gateway Settings': 'Gateway Settings', 'Gateway settings updated': 'Gateway settings updated', 'Gender': 'Gender', 'General Medical/Surgical': 'General Medical/Surgical', 'General emergency and public safety': 'General emergency and public safety', 'Generator': 'Δημιουργός', 'Geocoder Selection': 'Geocoder Selection', 'Geometry Name': 'Geometry Name', 'Geophysical (inc. landslide)': 'Geophysical (inc. landslide)', 'Geraldo module not available within the running Python - this needs installing for PDF output!': 'Geraldo module not available within the running Python - this needs installing for PDF output!', 'Girls 13-18 yrs in affected area': 'Girls 13-18 yrs in affected area', 'Girls 13-18 yrs not attending school': 'Girls 13-18 yrs not attending school', 'Girls 6-12 yrs in affected area': 'Girls 6-12 yrs in affected area', 'Girls 6-12 yrs not attending school': 'Girls 6-12 yrs not attending school', 'Give a brief description of the image, e.g. what can be seen where on the picture (optional).': 'Give a brief description of the image, e.g. what can be seen where on the picture (optional).', 'Give information about where and when you have seen the person': 'Give information about where and when you have seen the person', 'Give information about where and when you have seen them': 'Give information about where and when you have seen them', 'Global Messaging Settings': 'Γενικές Ρυθμίσεις Μηνυμάτων', 'Goatee': 'Goatee', 'Government': 'Government', 'Government UID': 'Government UID', 'Government building': 'Government building', 'Grade': 'Grade', 'Greek': 'Greek', 'Group': 'Group', 'Group Details': 'Group Details', 'Group ID': 'Group ID', 'Group Member added': 'Group Member added', 'Group Members': 'Group Members', 'Group Memberships': 'Group Memberships', 'Group Title': 'Group Title', 'Group Type': 'Τύπος ομάδας', 'Group added': 'Ομάδα προστέθηκε', 'Group deleted': 'Group deleted', 'Group description': 'Group description', 'Group name': 'Group name', 'Group type': 'Τύπος Ομάδας', 'Group updated': 'Group updated', 'Groups': 'Groups', 'Groups removed': 'Groups removed', 'Guest': 'Guest', 'Hail': 'Hail', 'Hair Color': 'Hair Color', 'Hair Length': 'Hair Length', 'Hair Style': 'Τύπος μαλιών', 'Has data from this Reference Document been entered into Sahana?': 'Has data from this Reference Document been entered into Sahana?', 'Has the safety and security of women and children in your community changed since the emergency?': 'Has the safety and security of women and children in your community changed since the emergency?', 'Has your business been damaged in the course of the disaster?': 'Has your business been damaged in the course of the disaster?', 'Have households received any shelter/NFI assistance or is assistance expected in the coming days?': 'Have households received any shelter/NFI assistance or is assistance expected in the coming days?', 'Have normal food sources been disrupted?': 'Have normal food sources been disrupted?', 'Have schools received or are expecting to receive any assistance?': 'Have schools received or are expecting to receive any assistance?', 'Have the people received or are you expecting any medical or food assistance in the coming days?': 'Have the people received or are you expecting any medical or food assistance in the coming days?', 'Hazard Pay': 'Hazard Pay', 'Hazardous Material': 'Hazardous Material', 'Hazardous Road Conditions': 'Hazardous Road Conditions', 'Header Background': 'Header Background', 'Header background file %s missing!': 'Header background file %s missing!', 'Headquarters': 'Headquarters', 'Health': 'Health', 'Health care assistance, Rank': 'Health care assistance, Rank', 'Health center': 'Κέντρο Υγείας', 'Health center with beds': 'Health center with beds', 'Health center without beds': 'Health center without beds', 'Health services functioning prior to disaster': 'Health services functioning prior to disaster', 'Health services functioning since disaster': 'Health services functioning since disaster', 'Healthcare Worker': 'Healthcare Worker', 'Heat Wave': 'Heat Wave', 'Heat and Humidity': 'Heat and Humidity', 'Height': 'Height', 'Height (cm)': 'Height (cm)', 'Help': 'Help', 'Helps to monitor status of hospitals': 'Helps to monitor status of hospitals', 'Helps to report and search for Missing Persons': 'Helps to report and search for Missing Persons', 'Here are the solution items related to the problem.': 'Here are the solution items related to the problem.', 'High': 'High', 'High Water': 'High Water', 'Hindu': 'Hindu', 'History': 'History', 'Hit the back button on your browser to try again.': 'Πατήστε το κουμπί "Πίσω" στον browser σας για να προσπαθήσετε ξανά.', 'Holiday Address': 'Holiday Address', 'Home': 'Home', 'Home Address': 'Home Address', 'Home Country': 'Home Country', 'Home Crime': 'Home Crime', 'Hospital': 'Νοσοκομείο', 'Hospital Details': 'Λεπτομέρειες Νοσοκομείου', 'Hospital Status Report': 'Hospital Status Report', 'Hospital information added': 'Προσατέθηκαν πληροφορίες Νοσοκομείων', 'Hospital information deleted': 'Hospital information deleted', 'Hospital information updated': 'Hospital information updated', 'Hospital status assessment.': 'Hospital status assessment.', 'Hospitals': 'Hospitals', 'Hot Spot': 'Θερμό Σημείο', 'Household kits received': 'Household kits received', 'Household kits, source': 'Household kits, source', 'How did boys 13-17yrs spend most of their time prior to the disaster?': 'How did boys 13-17yrs spend most of their time prior to the disaster?', 'How did boys <12yrs spend most of their time prior to the disaster?': 'How did boys <12yrs spend most of their time prior to the disaster?', 'How did boys girls 13-17yrs spend most of their time prior to the disaster?': 'How did boys girls 13-17yrs spend most of their time prior to the disaster?', 'How did girls <12yrs spend most of their time prior to the disaster?': 'How did girls <12yrs spend most of their time prior to the disaster?', 'How do boys 13-17yrs spend most of their time now?': 'How do boys 13-17yrs spend most of their time now?', 'How do boys <12yrs spend most of their time now?': 'How do boys <12yrs spend most of their time now?', 'How do girls 13-17yrs spend most of their time now?': 'How do girls 13-17yrs spend most of their time now?', 'How do girls <12yrs spend most of their time now?': 'How do girls <12yrs spend most of their time now?', 'How does it work?': 'How does it work?', 'How is this person affected by the disaster? (Select all that apply)': 'How is this person affected by the disaster? (Select all that apply)', 'How long does it take you to reach the available water resources? Specify the time required to go there and back, including queuing time, by foot.': 'How long does it take you to reach the available water resources? Specify the time required to go there and back, including queuing time, by foot.', 'How long does it take you to walk to the health service?': 'How long does it take you to walk to the health service?', 'How long will the food last?': 'How long will the food last?', 'How long will this water resource last?': 'How long will this water resource last?', 'How many Boys (0-17 yrs) are Dead due to the crisis': 'Πόσα αγόρια (0-17 ετών) είναι νεκρά εξαιτίας της κρίσης', 'How many Boys (0-17 yrs) are Injured due to the crisis': 'How many Boys (0-17 yrs) are Injured due to the crisis', 'How many Boys (0-17 yrs) are Missing due to the crisis': 'How many Boys (0-17 yrs) are Missing due to the crisis', 'How many Girls (0-17 yrs) are Dead due to the crisis': 'How many Girls (0-17 yrs) are Dead due to the crisis', 'How many Girls (0-17 yrs) are Injured due to the crisis': 'How many Girls (0-17 yrs) are Injured due to the crisis', 'How many Girls (0-17 yrs) are Missing due to the crisis': 'How many Girls (0-17 yrs) are Missing due to the crisis', 'How many Men (18 yrs+) are Dead due to the crisis': 'How many Men (18 yrs+) are Dead due to the crisis', 'How many Men (18 yrs+) are Injured due to the crisis': 'How many Men (18 yrs+) are Injured due to the crisis', 'How many Men (18 yrs+) are Missing due to the crisis': 'How many Men (18 yrs+) are Missing due to the crisis', 'How many Women (18 yrs+) are Dead due to the crisis': 'How many Women (18 yrs+) are Dead due to the crisis', 'How many Women (18 yrs+) are Injured due to the crisis': 'How many Women (18 yrs+) are Injured due to the crisis', 'How many Women (18 yrs+) are Missing due to the crisis': 'How many Women (18 yrs+) are Missing due to the crisis', 'How many days will the supplies last?': 'How many days will the supplies last?', 'How many doctors in the health centers are still actively working?': 'How many doctors in the health centers are still actively working?', 'How many houses are uninhabitable (uninhabitable = foundation and structure destroyed)?': 'How many houses are uninhabitable (uninhabitable = foundation and structure destroyed)?', 'How many houses suffered damage but remain usable (usable = windows broken, cracks in walls, roof slightly damaged)?': 'How many houses suffered damage but remain usable (usable = windows broken, cracks in walls, roof slightly damaged)?', 'How many latrines are available in the village/IDP centre/Camp?': 'How many latrines are available in the village/IDP centre/Camp?', 'How many midwives in the health centers are still actively working?': 'How many midwives in the health centers are still actively working?', 'How many new cases have been admitted to this facility in the past 24h?': 'How many new cases have been admitted to this facility in the past 24h?', 'How many nurses in the health centers are still actively working?': 'How many nurses in the health centers are still actively working?', 'How many of the patients with the disease died in the past 24h at this facility?': 'How many of the patients with the disease died in the past 24h at this facility?', 'How many of the primary school age boys (6-12) in the area are not attending school?': 'How many of the primary school age boys (6-12) in the area are not attending school?', 'How many of the primary school age girls (6-12) in the area are not attending school?': 'How many of the primary school age girls (6-12) in the area are not attending school?', 'How many of the primary/secondary schools are now open and running a regular schedule of class?': 'How many of the primary/secondary schools are now open and running a regular schedule of class?', 'How many of the secondary school age boys (13-18) in the area are not attending school?': 'How many of the secondary school age boys (13-18) in the area are not attending school?', 'How many of the secondary school age girls (13-18) in the area are not attending school?': 'How many of the secondary school age girls (13-18) in the area are not attending school?', 'How many patients with the disease are currently hospitalized at this facility?': 'How many patients with the disease are currently hospitalized at this facility?', 'How many primary school age boys (6-12) are in the affected area?': 'How many primary school age boys (6-12) are in the affected area?', 'How many primary school age girls (6-12) are in the affected area?': 'How many primary school age girls (6-12) are in the affected area?', 'How many primary/secondary schools were opening prior to the disaster?': 'How many primary/secondary schools were opening prior to the disaster?', 'How many secondary school age boys (13-18) are in the affected area?': 'How many secondary school age boys (13-18) are in the affected area?', 'How many secondary school age girls (13-18) are in the affected area?': 'How many secondary school age girls (13-18) are in the affected area?', 'How many teachers have been affected by the disaster (affected = unable to work)?': 'How many teachers have been affected by the disaster (affected = unable to work)?', 'How many teachers worked in the schools prior to the disaster?': 'How many teachers worked in the schools prior to the disaster?', 'How much detail is seen. A high Zoom level means lot of detail, but not a wide area. A low Zoom level means seeing a wide area, but not a high level of detail.': 'How much detail is seen. A high Zoom level means lot of detail, but not a wide area. A low Zoom level means seeing a wide area, but not a high level of detail.', 'Humanitarian NGO': 'Humanitarian NGO', 'Hurricane': 'Hurricane', 'Hurricane Force Wind': 'Hurricane Force Wind', 'Hygiene': 'Hygiene', 'Hygiene NFIs': 'Hygiene NFIs', 'Hygiene kits received': 'Hygiene kits received', 'Hygiene kits, source': 'Κιτ προσωπικής υγιεινής, προμηθευτής', 'Hygiene practice': 'Hygiene practice', 'Hygiene problems': 'Hygiene problems', 'ID Label': 'ID Label', 'ID Tag': 'ID Tag', 'ID Tag Number': 'ID Tag Number', 'ID type': 'ID type', 'Ice Pressure': 'Ice Pressure', 'Iceberg': 'Iceberg', 'Identification': 'Identification', 'Identification Report': 'Identification Report', 'Identification Reports': 'Identification Reports', 'Identification Status': 'Identification Status', 'Identification label of the Storage bin.': 'Καρτέλα αναγνώρσισης-ταύτισης στο καλάθι αποθήκευσης', 'Identified as': 'Identified as', 'Identified by': 'Identified by', 'Identity': 'Identity', 'Identity Details': 'Identity Details', 'Identity added': 'Identity added', 'Identity deleted': 'Identity deleted', 'Identity updated': 'Identity updated', 'If Unit = m, Base Unit = Km, then multiplicator is 0.0001 since 1m = 0.001 km.': 'If Unit = m, Base Unit = Km, then multiplicator is 0.0001 since 1m = 0.001 km.', 'If enabled then a log is maintained of all records a user accesses. If disabled then it can still be enabled on a per-module basis.': 'If enabled then a log is maintained of all records a user accesses. If disabled then it can still be enabled on a per-module basis.', 'If enabled then a log is maintained of all records a user edits. If disabled then it can still be enabled on a per-module basis.': 'If enabled then a log is maintained of all records a user edits. If disabled then it can still be enabled on a per-module basis.', 'If no marker defined then the system default marker is used': 'If no marker defined then the system default marker is used', 'If no, specify why': 'If no, specify why', 'If the location is a geographic area, then state at what level here.': 'If the location is a geographic area, then state at what level here.', 'If this is set to True then mails will be deleted from the server after downloading.': 'If this is set to True then mails will be deleted from the server after downloading.', 'If this record should be restricted then select which role is required to access the record here.': 'If this record should be restricted then select which role is required to access the record here.', 'If this record should be restricted then select which role(s) are permitted to access the record here.': 'If this record should be restricted then select which role(s) are permitted to access the record here.', "If this setting is enabled then all deleted records are just flagged as deleted instead of being really deleted. They will appear in the raw database access but won't be visible to normal users.": "If this setting is enabled then all deleted records are just flagged as deleted instead of being really deleted. They will appear in the raw database access but won't be visible to normal users.", 'If yes, specify what and by whom': 'If yes, specify what and by whom', 'If yes, which and how': 'If yes, which and how', "If you cannot find the person you want to register as a volunteer, you can add them by clicking 'Add Person' below:": "If you cannot find the person you want to register as a volunteer, you can add them by clicking 'Add Person' below:", "If you cannot find the person you want to report missing, you can add them by clicking 'Add Person' below:": "If you cannot find the person you want to report missing, you can add them by clicking 'Add Person' below:", "If you cannot find the record of the person you want to report missing, you can add it by clicking 'Add Person' below:": "If you cannot find the record of the person you want to report missing, you can add it by clicking 'Add Person' below:", 'If you do not enter a Reference Document, your email will be displayed to allow this data to be verified.': 'If you do not enter a Reference Document, your email will be displayed to allow this data to be verified.', 'If you know what the Geonames ID of this location is then you can enter it here.': 'If you know what the Geonames ID of this location is then you can enter it here.', 'If you know what the OSM ID of this location is then you can enter it here.': 'If you know what the OSM ID of this location is then you can enter it here.', 'If you need to add a new document then you can click here to attach one.': 'If you need to add a new document then you can click here to attach one.', 'If you would like to help, then please': 'If you would like to help, then please', 'Illegal Immigrant': 'Illegal Immigrant', 'Image': 'Image', 'Image Details': 'Image Details', 'Image Tags': 'Image Tags', 'Image Type': 'Image Type', 'Image Upload': 'Image Upload', 'Image added': 'Image added', 'Image deleted': 'Image deleted', 'Image updated': 'Image updated', 'Image/Attachment': 'Image/Attachment', 'Image/Other Attachment': 'Image/Other Attachment', 'Imagery': 'Imagery', 'Images': 'Images', 'Immediate reconstruction assistance, Rank': 'Immediate reconstruction assistance, Rank', 'Impact Assessments': 'Impact Assessments', 'Impact Details': 'Λεπτομέρειες Επιπτώσεων', 'Impact Type': 'Impact Type', 'Impact Type Details': 'Impact Type Details', 'Impact Type added': 'Impact Type added', 'Impact Type deleted': 'Impact Type deleted', 'Impact Type updated': 'Impact Type updated', 'Impact Types': 'Impact Types', 'Impact added': 'Impact added', 'Impact deleted': 'Impact deleted', 'Impact updated': 'Ενημέρωση Επιπτώσεων ', 'Impacts': 'Impacts', 'Import': 'Import', 'Import & Export Data': 'Εισαγωγή και εξαγωγή δεδομένων', 'Import Data': 'Import Data', 'Import Job': 'Import Job', 'Import Jobs': 'Εισαγωγή εργασιών', 'Import and Export': 'Import and Export', 'Import from Ushahidi Instance': 'Import from Ushahidi Instance', 'Import if Master': 'Εισαγωγή, εαν είστε κύριος.', 'Import job created': 'Import job created', 'Import multiple tables as CSV': 'Εισαγωγή πολλαπλών πινάκων σαν CSV', 'Import/Export': 'Import/Export', 'Important': 'Important', 'Importantly where there are no aid services being provided': 'Importantly where there are no aid services being provided', 'Imported': 'Imported', 'Importing data from spreadsheets': 'Importing data from spreadsheets', 'Improper decontamination': 'Improper decontamination', 'Improper handling of dead bodies': 'Improper handling of dead bodies', 'In GeoServer, this is the Layer Name. Within the WFS getCapabilities, this is the FeatureType Name part after the colon(:).': 'Στον Geosrver, αυτό είναι το όνομα του επιπέδου. Μέσα στο WFS getCapabilities, αυτό είναι το τμήμα FeatureType Name μετά τα διαλυτικά (:). ', 'In GeoServer, this is the Workspace Name. Within the WFS getCapabilities, this is the FeatureType Name part before the colon(:).': 'In GeoServer, this is the Workspace Name. Within the WFS getCapabilities, this is the FeatureType Name part before the colon(:).', 'In Inventories': 'In Inventories', 'In Process': 'In Process', 'In Progress': 'In Progress', 'In Transit': 'In Transit', 'In general, what are the greatest needs of older people, people with disabilities, children, youth and women in your community?': 'In general, what are the greatest needs of older people, people with disabilities, children, youth and women in your community?', 'Inbound Mail Settings': 'Inbound Mail Settings', 'Incident': 'Συμβάν', 'Incident Categories': 'Incident Categories', 'Incident Details': 'Incident Details', 'Incident Report': 'Incident Report', 'Incident Report Details': 'Incident Report Details', 'Incident Report added': 'Incident Report added', 'Incident Report deleted': 'Incident Report deleted', 'Incident Report updated': 'Incident Report updated', 'Incident Reporting': 'Incident Reporting', 'Incident Reporting System': 'Incident Reporting System', 'Incident Reports': 'Incident Reports', 'Incident added': 'Incident added', 'Incident deleted': 'Incident deleted', 'Incident updated': 'Incident updated', 'Incidents': 'Incidents', 'Incoming': 'Incoming', 'Incomplete': 'Incomplete', 'Individuals': 'Individuals', 'Industrial Crime': 'Industrial Crime', 'Industry Fire': 'Industry Fire', 'Industry close to village/camp': 'Industry close to village/camp', 'Infant (0-1)': 'Infant (0-1)', 'Infectious Disease': 'Infectious Disease', 'Infectious Diseases': 'Infectious Diseases', 'Infestation': 'Infestation', 'Informal Leader': 'Informal Leader', 'Informal camp': 'Informal camp', 'Information gaps': 'Κενά πληροφοριοδότησης', 'Infusion catheters available': 'Infusion catheters available', 'Infusion catheters need per 24h': 'Infusion catheters need per 24h', 'Infusion catheters needed per 24h': 'Καθετήρες έγχυσης που απαιτούνται ανά 24ώρο', 'Infusions available': 'Εγχύσεις / Ενέσεις Διαθέσιμες', 'Infusions needed per 24h': 'Infusions needed per 24h', 'Input Job': 'Input Job', 'Instant Porridge': 'Instant Porridge', "Instead of automatically syncing from other peers over the network, you can also sync from files, which is necessary where there's no network. You can use this page to import sync data from files and also export data to sync files. Click the link on the right to go to this page.": "Instead of automatically syncing from other peers over the network, you can also sync from files, which is necessary where there's no network. You can use this page to import sync data from files and also export data to sync files. Click the link on the right to go to this page.", 'Institution': 'Institution', 'Insufficient': 'Insufficient', 'Insufficient vars: Need module, resource, jresource, instance': 'Insufficient vars: Need module, resource, jresource, instance', 'Intake Items': 'Intake Items', 'Intergovernmental Organisation': 'Intergovernmental Organisation', 'Internal State': 'Internal State', 'International NGO': 'International NGO', 'International Organization': 'International Organization', 'Intervention': 'Intervention', 'Interview taking place at': 'Interview taking place at', 'Invalid': 'Invalid', 'Invalid Query': 'Μη έκγυρη ερώτηση / αναζήτηση', 'Invalid email': 'Invalid email', 'Invalid request!': 'Invalid request!', 'Invalid ticket': 'Μη έγκυρο εισητήριο', 'Inventories with Item': 'Inventories with Item', 'Inventory Management': 'Inventory Management', 'Inventory Store': 'Inventory Store', 'Inventory of Effects': 'Κατάλογος των συνεπειών', 'Inventory/Ledger': 'Inventory/Ledger', 'Is adequate food and water available for these institutions?': 'Is adequate food and water available for these institutions?', 'Is it safe to collect water?': 'Is it safe to collect water?', 'Is there any industrial or agro-chemical production close to the affected area/village?': 'Is there any industrial or agro-chemical production close to the affected area/village?', 'Issuing Authority': 'Issuing Authority', 'Item': 'Item', 'Item Added to Shipment': 'Item Added to Shipment', 'Item Catalog Categories': 'Item Catalog Categories', 'Item Catalog Category': 'Item Catalog Category', 'Item Catalog Category Details': 'Item Catalog Category Details', 'Item Catalog Category added': 'Item Catalog Category added', 'Item Catalog Category deleted': 'Item Catalog Category deleted', 'Item Catalog Category updated': 'Item Catalog Category updated', 'Item Catalog Details': 'Item Catalog Details', 'Item Catalog added': 'Item Catalog added', 'Item Catalog deleted': 'Κατάλογος Αντικειμένων Διαγράφηκε', 'Item Catalog updated': 'Item Catalog updated', 'Item Catalogs': 'Item Catalogs', 'Item Categories': 'Item Categories', 'Item Category': 'Item Category', 'Item Category Details': 'Item Category Details', 'Item Category added': 'Item Category added', 'Item Category deleted': 'Item Category deleted', 'Item Category updated': 'Item Category updated', 'Item Details': 'Item Details', 'Item Packet Details': 'Item Packet Details', 'Item Packet added': 'Item Packet added', 'Item Packet deleted': 'Item Packet deleted', 'Item Packet updated': 'Item Packet updated', 'Item Packets': 'Item Packets', 'Item Sub-Categories': 'Item Sub-Categories', 'Item Sub-Category': 'Item Sub-Category', 'Item Sub-Category Details': 'Item Sub-Category Details', 'Item Sub-Category added': 'Item Sub-Category added', 'Item Sub-Category deleted': 'Item Sub-Category deleted', 'Item Sub-Category updated': 'Υπο-κατηγορία Αντικειμένου ενημερώθηκε', 'Item added': 'Item added', 'Item already in Bundle!': 'Αντικείμενο ήδη σε πακέτο (συσκευασμένο)', 'Item already in Kit!': 'Item already in Kit!', 'Item already in budget!': 'Item already in budget!', 'Item deleted': 'Item deleted', 'Item updated': 'Item updated', 'Items': 'Είδη', 'Items Sent from Warehouse': 'Items Sent from Warehouse', 'Japanese': 'Japanese', 'Jerry can': 'Jerry can', 'Jew': 'Jew', 'Job Title': 'Job Title', 'Jobs': 'Jobs', 'KPIs': 'KPIs', 'Key': 'Κλειδί (key)', 'Key Details': 'Key Details', 'Key added': 'Key added', 'Key deleted': 'Key deleted', 'Key updated': 'Key updated', 'Keys': 'Keys', 'Kit': 'Kit', 'Kit Contents': 'Kit Contents', 'Kit Details': 'Kit Details', 'Kit Updated': 'Kit Updated', 'Kit added': 'Kit added', 'Kit deleted': 'Kit deleted', 'Kit updated': 'Kit updated', 'Kits': 'Kits', 'Known Identities': 'Known Identities', 'Known incidents of violence against women/girls': 'Known incidents of violence against women/girls', 'Known incidents of violence since disaster': 'Known incidents of violence since disaster', 'LICENCE': 'Άδεια Χρήσης', 'LICENSE': 'LICENSE', 'LMS Administration': 'Διαχείριση LMS', 'Label': 'Ετικέτα', 'Lack of material': 'Lack of material', 'Lack of school uniform': 'Lack of school uniform', 'Lack of supplies at school': 'Lack of supplies at school', 'Lack of transport to school': 'Lack of transport to school', 'Lactating women': 'Lactating women', 'Lahar': 'Lahar', 'Landslide': 'Landslide', 'Language': 'Language', 'Last Name': 'Last Name', 'Last known location': 'Last known location', 'Last name': 'Last name', 'Last synchronization time': 'Last synchronization time', 'Last updated by': 'Last updated by', 'Last updated on': 'Τελευταία ενημέρωση στις', 'Latitude': 'Latitude', 'Latitude & Longitude': 'Latitude & Longitude', 'Latitude is North-South (Up-Down). Latitude is zero on the equator and positive in the northern hemisphere and negative in the southern hemisphere.': 'Το Γεωγραφικό Πλάτος είναι από το Βορά προς το Νότο. Το Γεωγραφικό πλάτος είναι μηδέν στον ησιμερινό, θετικό στο Βόρειο ημισφαίριο και αρνητικό στο Νότιο ημισφαίριο', 'Latitude should be between': 'Latitude should be between', 'Law enforcement, military, homeland and local/private security': 'Law enforcement, military, homeland and local/private security', 'Layer': 'Layer', 'Layer Details': 'Layer Details', 'Layer added': 'Layer added', 'Layer deleted': 'Layer deleted', 'Layer updated': 'Layer updated', 'Layers': 'Layers', 'Layers updated': 'Επίπεδα ενημερώθηκαν', 'Layout': 'Layout', 'Legend Format': 'Legend Format', 'Length': 'Μήκος', 'Level': 'Level', "Level is higher than parent's": "Level is higher than parent's", 'Library support not available for OpenID': 'Library support not available for OpenID', 'Line': 'Line', 'Link Item & Shipment': 'Link Item & Shipment', 'Link an Item & Shipment': 'Link an Item & Shipment', 'Linked Records': 'Linked Records', 'Linked records': 'Linked records', 'List': 'List', 'List / Add Baseline Types': 'Λίστα / Προσθήκη Βασικών Τύπων', 'List / Add Impact Types': 'List / Add Impact Types', 'List / Add Services': 'List / Add Services', 'List / Add Types': 'List / Add Types', 'List Activities': 'List Activities', 'List All': 'List All', 'List All Entries': 'List All Entries', 'List All Memberships': 'Κατάλογος όλων των μελών', 'List Assessment Summaries': 'Κατάλογος περιλήψεων εκτιμήσεων', 'List Assessments': 'Κατάλογος Αξιολογήσεων ', 'List Baseline Types': 'List Baseline Types', 'List Baselines': 'List Baselines', 'List Budgets': 'List Budgets', 'List Bundles': 'List Bundles', 'List Catalog Items': 'List Catalog Items', 'List Category<>Sub-Category<>Catalog Relation': 'List Category<>Sub-Category<>Catalog Relation', 'List Checklists': 'Κατάλογοι ελέγχου Κατάλογων', 'List Cluster Subsectors': 'List Cluster Subsectors', 'List Configs': 'List Configs', 'List Conflicts': 'List Conflicts', 'List Contacts': 'List Contacts', 'List Distribution Items': 'List Distribution Items', 'List Distributions': 'List Distributions', 'List Documents': 'List Documents', 'List Donors': 'List Donors', 'List Feature Classes': 'List Feature Classes', 'List Feature Layers': 'List Feature Layers', 'List Flood Reports': 'List Flood Reports', 'List Groups': 'Κατάλογος Ομάδων', 'List Groups/View Members': 'List Groups/View Members', 'List Hospitals': 'List Hospitals', 'List Identities': 'List Identities', 'List Images': 'List Images', 'List Impact Assessments': 'List Impact Assessments', 'List Impact Types': 'List Impact Types', 'List Impacts': 'List Impacts', 'List Incident Reports': 'List Incident Reports', 'List Incidents': 'List Incidents', 'List Item Catalog Categories': 'List Item Catalog Categories', 'List Item Catalogs': 'List Item Catalogs', 'List Item Categories': 'List Item Categories', 'List Item Packets': 'List Item Packets', 'List Item Sub-Categories': 'List Item Sub-Categories', 'List Items': 'List Items', 'List Keys': 'List Keys', 'List Kits': 'Κατάλογος Kits', 'List Layers': 'List Layers', 'List Locations': 'Κατάλογος τοποθεσιών', 'List Log Entries': 'List Log Entries', 'List Markers': 'List Markers', 'List Members': 'List Members', 'List Memberships': 'Κατάλογος μελών', 'List Messages': 'Κατάλογος Μυνημάτων', 'List Metadata': 'Κατάλογος Μετα-δεδομένων', 'List Missing Persons': 'List Missing Persons', 'List Need Types': 'List Need Types', 'List Needs': 'List Needs', 'List Notes': 'List Notes', 'List Offices': 'List Offices', 'List Organizations': 'List Organizations', 'List Peers': 'List Peers', 'List Personal Effects': 'List Personal Effects', 'List Persons': 'List Persons', 'List Photos': 'List Photos', 'List Positions': 'List Positions', 'List Problems': 'List Problems', 'List Projections': 'List Projections', 'List Projects': 'List Projects', 'List Rapid Assessments': 'List Rapid Assessments', 'List Received Items': 'List Received Items', 'List Received Shipments': 'List Received Shipments', 'List Records': 'List Records', 'List Registrations': 'List Registrations', 'List Reports': 'List Reports', 'List Request Items': 'List Request Items', 'List Requests': 'List Requests', 'List Resources': 'Λίστα Πόρων', 'List Responses': 'List Responses', 'List Rivers': 'List Rivers', 'List Roles': 'Κατάσταση Ρόλων', 'List Sections': 'List Sections', 'List Sector': 'List Sector', 'List Sent Items': 'List Sent Items', 'List Sent Shipments': 'List Sent Shipments', 'List Service Profiles': 'List Service Profiles', 'List Settings': 'List Settings', 'List Shelter Services': 'List Shelter Services', 'List Shelter Types': 'List Shelter Types', 'List Shelters': 'List Shelters', 'List Shipment Transit Logs': 'List Shipment Transit Logs', 'List Shipment/Way Bills': 'Κατάσταση αποστολών αντικειμένων / Τιμολόγια-Λογαριασμοί', 'List Shipment<>Item Relation': 'List Shipment<>Item Relation', 'List Shipments': 'List Shipments', 'List Sites': 'List Sites', 'List Skill Types': 'List Skill Types', 'List Skills': 'Κατάλογος δεξιοτήτων-προσόντων', 'List Solutions': 'List Solutions', 'List Staff': 'Κατάλογος Προσωπικού', 'List Staff Types': 'List Staff Types', 'List Status': 'List Status', 'List Storage Bin Type(s)': 'List Storage Bin Type(s)', 'List Storage Bins': 'List Storage Bins', 'List Storage Location': 'Καταάλογος θέσεων Αποθηκών', 'List Subscriptions': 'Κατάλογος εγγραφών', 'List Survey Answers': 'List Survey Answers', 'List Survey Questions': 'List Survey Questions', 'List Survey Sections': 'List Survey Sections', 'List Survey Series': 'List Survey Series', 'List Survey Templates': 'List Survey Templates', 'List Tasks': 'List Tasks', 'List Teams': 'List Teams', 'List Themes': 'List Themes', 'List Tickets': 'Κατάλογος "εισητηρίων"', 'List Tracks': 'List Tracks', 'List Units': 'Κατάλογος Μονάδων', 'List Users': 'Κατάλογος Χρηστών', 'List Volunteers': 'List Volunteers', 'List Warehouse Items': 'List Warehouse Items', 'List Warehouses': 'List Warehouses', 'List all': 'List all', 'List of Items': 'List of Items', 'List of Missing Persons': 'List of Missing Persons', 'List of Peers': 'List of Peers', 'List of Reports': 'List of Reports', 'List of Requests': 'List of Requests', 'List of Spreadsheets': 'List of Spreadsheets', 'List of Spreadsheets uploaded': 'List of Spreadsheets uploaded', 'List of Volunteers for this skills set': 'List of Volunteers for this skills set', 'List of addresses': 'List of addresses', 'List unidentified': 'Κατάλογος μη αναγνωρισμένων', 'List/Add': 'Κατάλογος/Προσθήκη', 'Lists "who is doing what & where". Allows relief agencies to coordinate their activities': 'Κατάλογοι " Ποιός κάνει τι και που". Επιτρέπει στους εμπλεκόμενους φορείς να συντονίζουν τις ενέργειές τους.', 'Live Help': 'Ζωντανή Βοήθεια', 'Livelihood': 'Livelihood', 'Load Cleaned Data into Database': 'Load Cleaned Data into Database', 'Load Details': 'Load Details', 'Load Raw File into Grid': 'Load Raw File into Grid', 'Load the details to help decide which is the best one to keep out of the 2.': 'Load the details to help decide which is the best one to keep out of the 2.', 'Loading': 'Loading', 'Loading Locations...': 'Loading Locations...', 'Local Name': 'Τοπικό Όνομα', 'Local Names': 'Local Names', 'Location': 'Location', 'Location 1': 'Location 1', 'Location 2': 'Location 2', 'Location De-duplicated': 'Location De-duplicated', 'Location Details': 'Location Details', 'Location added': 'Location added', 'Location deleted': 'Τοποθεσία διαγράφηκε', 'Location details': 'Location details', 'Location updated': 'Location updated', 'Location: ': 'Location: ', 'Locations': 'Locations', 'Locations De-duplicator': 'Locations De-duplicator', 'Locations of this level need to have a parent of level': 'Locations of this level need to have a parent of level', 'Locations should be different!': 'Locations should be different!', 'Lockdown': 'Lockdown', 'Log': 'Log', 'Log Entry Details': 'Log Entry Details', 'Log entry added': 'Log entry added', 'Log entry deleted': 'Καταγραφή (Log) διαγράφηκε', 'Log entry updated': 'Ανανεώθηκε εισαγωγή καταγραφής', 'Login': 'Σύνδεση', 'Logistics': 'Logistics', 'Logistics Management': 'Logistics Management', 'Logistics Management System': 'Logistics Management System', 'Logo': 'Λογότυπο', 'Logo file %s missing!': 'Logo file %s missing!', 'Logout': 'Logout', 'Long Text': 'Long Text', 'Longitude': 'Longitude', 'Longitude is West - East (sideways). Latitude is North-South (Up-Down). Latitude is zero on the equator and positive in the northern hemisphere and negative in the southern hemisphere. Longitude is zero on the prime meridian (Greenwich Mean Time) and is positive to the east, across Europe and Asia. Longitude is negative to the west, across the Atlantic and the Americas. These need to be added in Decimal Degrees.': 'Longitude is West - East (sideways). Latitude is North-South (Up-Down). Latitude is zero on the equator and positive in the northern hemisphere and negative in the southern hemisphere. Longitude is zero on the prime meridian (Greenwich Mean Time) and is positive to the east, across Europe and Asia. Longitude is negative to the west, across the Atlantic and the Americas. These need to be added in Decimal Degrees.', 'Longitude is West - East (sideways). Longitude is zero on the prime meridian (Greenwich Mean Time) and is positive to the east, across Europe and Asia. Longitude is negative to the west, across the Atlantic and the Americas.': 'Το Μήκος είναι από Δυτικά προς Ανατολικά. Το Μήκος είναι 0 στον πρώτο μεσημβρινό (Μεσημβρινός του Greenwitch) και είναι θετικό προς τα ανατολικά κατά μήκος της Ερώπης και της Ασίας. Το Μήκος είναι αρνητικό προς τα Δυτικά κατά μήκος του Ατλαντικού και στην Αμερική. ', 'Longitude should be between': 'Longitude should be between', 'Looting': 'Looting', 'Lost': 'Lost', 'Lost Password': 'Lost Password', 'Low': 'Low', 'Magnetic Storm': 'Magnetic Storm', 'Main cash source': 'Main cash source', 'Main income sources before disaster': 'Main income sources before disaster', 'Major outward damage': 'Κύρια εξωρερική ζημιά', 'Make Pledge': 'Make Pledge', 'Make Request': 'Make Request', 'Make a Request': 'Make a Request', 'Make a Request for Aid': 'Make a Request for Aid', 'Make preparations per the <instruction>': 'Make preparations per the <instruction>', 'Male': 'Male', 'Malnutrition present prior to disaster': 'Malnutrition present prior to disaster', 'Manage': 'Manage', 'Manage Category': 'Manage Category', 'Manage Item catalog': 'Manage Item catalog', 'Manage Items Catalog': 'Manage Items Catalog', 'Manage Kits': 'Manage Kits', 'Manage Relief Item Catalogue': 'Manage Relief Item Catalogue', 'Manage Sub-Category': 'Manage Sub-Category', 'Manage Users & Roles': 'Manage Users & Roles', 'Manage Warehouses': 'Manage Warehouses', 'Manage Warehouses/Sites': 'Manage Warehouses/Sites', 'Manage requests of hospitals for assistance.': 'Manage requests of hospitals for assistance.', 'Manage volunteers by capturing their skills, availability and allocation': 'Διαχείρισης εθελοντών με καταγραφή και χρήση, ικανοτήτων, διαθεσιμότητα & θέση', 'Manager': 'Manager', 'Managing Office': 'Managing Office', 'Managing, Storing and Distributing Catalog Items.': 'Managing, Storing and Distributing Catalog Items.', 'Managing, Storing and Distributing Items.': 'Managing, Storing and Distributing Items.', 'Managing, Storing and Distributing Relief Items': 'Διαχείρηση, Αποθήκευση και διανομή υλικού βοήθειας', 'Mandatory. In GeoServer, this is the Layer Name. Within the WFS getCapabilities, this is the FeatureType Name part after the colon(:).': 'Mandatory. In GeoServer, this is the Layer Name. Within the WFS getCapabilities, this is the FeatureType Name part after the colon(:).', 'Mandatory. The URL to access the service.': 'Mandatory. The URL to access the service.', 'Manual': 'Manual', 'Manual Synchronization': 'Manual Synchronization', 'Many': 'Many', 'Map': 'Map', 'Map Height': 'Map Height', 'Map Service Catalogue': 'Κατάλογος Χαροτγραφικών Υπηρεσιών', 'Map Settings': 'Map Settings', 'Map Viewing Client': 'Map Viewing Client', 'Map Width': 'Πλάτος Χάρτη', 'Map of Hospitals': 'Map of Hospitals', 'Mapping': 'Mapping', 'Marine Security': 'Marine Security', 'Marital Status': 'Οικογενειακή κατάσταση', 'Marker': 'Marker', 'Marker Details': 'Λεπτομέρειες Marker', 'Marker added': 'Marker added', 'Marker deleted': 'Marker deleted', 'Marker updated': 'Marker updated', 'Markers': 'Markers', 'Master Message Log': 'Κύρια καταγραφή μηνυμάτων (Master log)', 'Master Message Log to process incoming reports & requests': 'Συνολική Καταγραφή μηνυμάτων (Log) για την επεξεργασία εισερχομένων αναφορών και αιτημάτων ', 'Match Percentage': 'Match Percentage', 'Match percentage indicates the % match between these two records': 'Match percentage indicates the % match between these two records', 'Matching Records': 'Matching Records', 'Matrix of Choices (Multiple Answers)': 'Matrix of Choices (Multiple Answers)', 'Matrix of Choices (Only one answer)': 'Matrix of Choices (Only one answer)', 'Matrix of Text Fields': 'Matrix of Text Fields', 'Max Persons per Dwelling': 'Max Persons per Dwelling', 'Maximum Weight': 'Maximum Weight', 'Maximum weight capacity of the Storage Location followed by choosing the unit from the drop down list.': 'Maximum weight capacity of the Storage Location followed by choosing the unit from the drop down list.', 'Maximum weight capacity of the items the storage bin can contain. followed by choosing the unit from the drop down list.': 'Maximum weight capacity of the items the storage bin can contain. followed by choosing the unit from the drop down list.', 'Medical and public health': 'Medical and public health', 'Medium': 'Medium', 'Megabytes per Month': 'Megabytes per Month', 'Members': 'Members', 'Membership': 'Membership', 'Membership Details': 'Membership Details', 'Membership added': 'Χρήστης - Μέλος προστέθηκε', 'Membership deleted': 'Membership deleted', 'Membership updated': 'Ενημέρωση Συνδρομής Μέλους', 'Memberships': 'Memberships', 'Message': 'Message', 'Message Details': 'Message Details', 'Message Variable': 'Message Variable', 'Message added': 'Προσθήκη Μηνύματος', 'Message deleted': 'Message deleted', 'Message updated': 'Message updated', 'Message variable': 'Message variable', 'Messages': 'Messages', 'Messaging': 'Messaging', 'Messaging settings updated': 'Messaging settings updated', 'Metadata': 'Metadata', 'Metadata Details': 'Metadata Details', 'Metadata added': 'Μεταδεδομένα προστέθηκαν', 'Metadata can be supplied here to be applied to all uploaded photos, if desired.': 'Metadata can be supplied here to be applied to all uploaded photos, if desired.', 'Metadata deleted': 'Metadata deleted', 'Metadata updated': 'Metadata updated', 'Meteorite': 'Meteorite', 'Meteorological (inc. flood)': 'Meteorological (inc. flood)', 'Method used': 'Method used', 'Micronutrient malnutrition prior to disaster': 'Micronutrient malnutrition prior to disaster', 'Middle Name': 'Middle Name', 'Migrants or ethnic minorities': 'Μετανάστες ή Εθνικές μειονότητες', 'Military': 'Military', 'Minimum Bounding Box': 'Minimum Bounding Box', 'Minorities participating in coping activities': 'Minorities participating in coping activities', 'Minutes must be a number between 0 and 60': 'Minutes must be a number between 0 and 60', 'Minutes per Month': 'Minutes per Month', 'Minutes should be a number greater than 0 and less than 60': 'Minutes should be a number greater than 0 and less than 60', 'Miscellaneous': 'Miscellaneous', 'Missing': 'Missing', 'Missing Person': 'Missing Person', 'Missing Person Details': 'Missing Person Details', 'Missing Person Reports': 'Missing Person Reports', 'Missing Persons': 'Missing Persons', 'Missing Persons Registry': 'Missing Persons Registry', 'Missing Persons Report': 'Missing Persons Report', 'Missing Report': 'Missing Report', 'Missing Senior Citizen': 'Missing Senior Citizen', 'Missing Vulnerable Person': 'Missing Vulnerable Person', 'Mobile': 'Mobile', 'Mobile Assess': 'Mobile Assess', 'Mobile Assess.': 'Mobile Assess.', 'Mobile Basic': 'Mobile Basic', 'Mobile Basic Assessment': 'Mobile Basic Assessment', 'Mobile Phone': 'Mobile Phone', 'Mode': 'Mode', 'Modem Settings': 'Modem Settings', 'Modem settings updated': 'Modem settings updated', 'Moderator': 'Moderator', 'Modify Information on groups and individuals': 'Modify Information on groups and individuals', 'Modifying data in spreadsheet before importing it to the database': 'Modifying data in spreadsheet before importing it to the database', 'Module Administration': 'Module Administration', 'Module disabled!': 'Module disabled!', 'Module provides access to information on current Flood Levels.': 'Module provides access to information on current Flood Levels.', 'Monday': 'Monday', 'Monthly Cost': 'Monthly Cost', 'Monthly Salary': 'Monthly Salary', 'Months': 'Months', 'Morgue Status': 'Morgue Status', 'Morgue Units Available': 'Morgue Units Available', 'Mosque': 'Mosque', 'Motorcycle': 'Motorcycle', 'Moustache': 'Moustache', 'Movements (Filter In/Out/Lost)': 'Movements (Filter In/Out/Lost)', 'MultiPolygon': 'MultiPolygon', 'Multiple': 'Multiple', 'Multiple Choice (Multiple Answers)': 'Multiple Choice (Multiple Answers)', 'Multiple Choice (Only One Answer)': 'Multiple Choice (Only One Answer)', 'Multiple Text Fields': 'Multiple Text Fields', 'Multiplicator': 'Πολλαπλασιαστής', 'Muslim': 'Muslim', 'My Tasks': 'My Tasks', 'N/A': 'Μη Εφαρμόσιμο (Μ/Ε)', 'Name': 'Name', 'Name and/or ID': 'Name and/or ID', 'Name and/or ID Label': 'Name and/or ID Label', 'Name of Storage Bin Type.': 'Name of Storage Bin Type.', 'Name of the file (& optional sub-path) located in static which should be used for the background of the header.': 'Name of the file (& optional sub-path) located in static which should be used for the background of the header.', 'Name of the file (& optional sub-path) located in static which should be used for the top-left image.': 'Name of the file (& optional sub-path) located in static which should be used for the top-left image.', 'Name of the file (& optional sub-path) located in views which should be used for footer.': 'Name of the file (& optional sub-path) located in views which should be used for footer.', 'Name of the person in local language and script (optional).': 'Name of the person in local language and script (optional).', 'Name of the unit or department this report refers to. Leave empty if your hospital has no subdivisions.': 'Name of the unit or department this report refers to. Leave empty if your hospital has no subdivisions.', 'Names can be added in multiple languages': 'Names can be added in multiple languages', 'National ID Card': 'National ID Card', 'National NGO': 'National NGO', 'Nationality': 'Nationality', 'Nationality of the person.': 'Nationality of the person.', 'Nautical Accident': 'Nautical Accident', 'Nautical Hijacking': 'Nautical Hijacking', 'Need Type': 'Need Type', 'Need Type Details': 'Need Type Details', 'Need Type added': 'Τύπος ανάγκης προστέθηκε', 'Need Type deleted': 'Need Type deleted', 'Need Type updated': 'Need Type updated', 'Need Types': 'Need Types', "Need a 'url' argument!": "Need a 'url' argument!", 'Need added': 'Need added', 'Need deleted': 'Need deleted', 'Need to configure Twitter Authentication': 'Need to configure Twitter Authentication', 'Need to select 2 Locations': 'Need to select 2 Locations', 'Need to specify a Budget!': 'Need to specify a Budget!', 'Need to specify a Kit!': 'Need to specify a Kit!', 'Need to specify a Resource!': 'Need to specify a Resource!', 'Need to specify a bundle!': 'Need to specify a bundle!', 'Need to specify a group!': 'Need to specify a group!', 'Need to specify a location to search for.': 'Need to specify a location to search for.', 'Need to specify a role!': 'Πρέπει να καθορίσετε ένα ρόλο!', 'Need to specify a table!': 'Need to specify a table!', 'Need to specify a user!': 'Need to specify a user!', 'Need updated': 'Need updated', 'Needs': 'Needs', 'Needs Details': 'Needs Details', 'Needs elaboration!!!': 'Needs elaboration!!!', 'Needs to reduce vulnerability to violence': 'Needs to reduce vulnerability to violence', 'Negative Flow Isolation': 'Negative Flow Isolation', 'Neighbourhood': 'Neighbourhood', 'Neonatal ICU': 'Neonatal ICU', 'Neonatology': 'Neonatology', 'Network': 'Network', 'Neurology': 'Neurology', 'New': 'New', 'New Assessment reported from': 'New Assessment reported from', 'New Checklist': 'Νέος κατάλογος ελέγχου', 'New Peer': 'New Peer', 'New Record': 'New Record', 'New Report': 'New Report', 'New Request': 'New Request', 'New Solution Choice': 'New Solution Choice', 'New Synchronization Peer': 'New Synchronization Peer', 'New cases in the past 24h': 'New cases in the past 24h', 'News': 'News', 'Next': 'Next', 'No': 'No', 'No Activities Found': 'No Activities Found', 'No Addresses currently registered': 'Δεν έχει καταγραφεί ακόμη Διεύθυνση', 'No Assessment Summaries currently registered': 'No Assessment Summaries currently registered', 'No Assessments currently registered': 'Δεν έχουν εγγραφεί ακόμη Εκτιμήσεις', 'No Baseline Types currently registered': 'No Baseline Types currently registered', 'No Baselines currently registered': 'No Baselines currently registered', 'No Budgets currently registered': 'No Budgets currently registered', 'No Bundles currently registered': 'No Bundles currently registered', 'No Catalog Items currently registered': 'No Catalog Items currently registered', 'No Category<>Sub-Category<>Catalog Relation currently registered': 'Χωρίς Κατηγορία<>Υπο-Κατηγορία<>Υπάρχει εγγεραμένη σχέση των καταλόγων', 'No Checklist available': 'No Checklist available', 'No Cluster Subsectors currently registered': 'Δεν έχει καταγραφεί ακόμη ομάδα υποκατηγοριών', 'No Configs currently defined': 'No Configs currently defined', 'No Details currently registered': 'No Details currently registered', 'No Distribution Items currently registered': 'Δεν έχουν εγγραφεί ακόμη αντικείμενα για διανομή', 'No Distributions currently registered': 'No Distributions currently registered', 'No Documents found': 'No Documents found', 'No Donors currently registered': 'No Donors currently registered', 'No Feature Classes currently defined': 'No Feature Classes currently defined', 'No Feature Layers currently defined': 'No Feature Layers currently defined', 'No Flood Reports currently registered': 'No Flood Reports currently registered', 'No Groups currently defined': 'Δεν έχουν οριστεί ακόμη Ομάδες', 'No Groups currently registered': 'No Groups currently registered', 'No Hospitals currently registered': 'Δεν υπάρχουν Νοσκομεία Καταγεγραμένα', 'No Identification Report Available': 'No Identification Report Available', 'No Identities currently registered': 'No Identities currently registered', 'No Image': 'No Image', 'No Images currently registered': 'Δεν έχουν καταχωρηθεί εικόνες', 'No Impact Types currently registered': 'Δεν είναι ακόμη καταχωρημένοι ακόμη τύποι Επιπτώσεων', 'No Impacts currently registered': 'No Impacts currently registered', 'No Incident Reports currently registered': 'No Incident Reports currently registered', 'No Incidents currently registered': 'Δεν έχουν καταγραφεί περιστατικά ακόμη', 'No Item Catalog Category currently registered': 'No Item Catalog Category currently registered', 'No Item Catalog currently registered': 'No Item Catalog currently registered', 'No Item Categories currently registered': 'No Item Categories currently registered', 'No Item Packets currently registered': 'No Item Packets currently registered', 'No Item Sub-Category currently registered': 'No Item Sub-Category currently registered', 'No Item currently registered': 'No Item currently registered', 'No Items currently registered': 'No Items currently registered', 'No Items currently requested': 'No Items currently requested', 'No Keys currently defined': 'No Keys currently defined', 'No Kits currently registered': 'No Kits currently registered', 'No Locations currently available': 'No Locations currently available', 'No Locations currently registered': 'No Locations currently registered', 'No Markers currently available': 'δεν υπάρχουν διαθέσιμοι δείκτες (μαρκαδόροι - markers)', 'No Members currently registered': 'Δεν έχουν εγγραφεί ακόμη μέλη', 'No Memberships currently defined': 'Δεν έχουν ορισθεί ακόμη συμμετοχές μελών', 'No Memberships currently registered': 'Κανένα μέλος δεν έχει ακόμη εγγραφεί', 'No Messages currently in Outbox': 'No Messages currently in Outbox', 'No Metadata currently defined': 'No Metadata currently defined', 'No Need Types currently registered': 'No Need Types currently registered', 'No Needs currently registered': 'No Needs currently registered', 'No Offices currently registered': 'No Offices currently registered', 'No Offices found!': 'No Offices found!', 'No Organizations currently registered': 'No Organizations currently registered', 'No Packets for Item': 'No Packets for Item', 'No People currently registered in this shelter': 'No People currently registered in this shelter', 'No Persons currently registered': 'No Persons currently registered', 'No Persons currently reported missing': 'No Persons currently reported missing', 'No Persons found': 'No Persons found', 'No Photos found': 'No Photos found', 'No Presence Log Entries currently registered': 'No Presence Log Entries currently registered', 'No Problems currently defined': 'No Problems currently defined', 'No Projections currently defined': 'Δεν έχουν ορισθεί προβολικά συστήματα', 'No Projects currently registered': 'No Projects currently registered', 'No Rapid Assessments currently registered': 'No Rapid Assessments currently registered', 'No Received Items currently registered': 'No Received Items currently registered', 'No Received Shipments': 'No Received Shipments', 'No Records currently available': 'No Records currently available', 'No Records matching the query': 'No Records matching the query', 'No Request Items currently registered': 'No Request Items currently registered', 'No Request Shipments': 'No Request Shipments', 'No Requests have been made yet': 'No Requests have been made yet', 'No Requests match this criteria': 'No Requests match this criteria', 'No Responses currently registered': 'No Responses currently registered', 'No Rivers currently registered': 'No Rivers currently registered', 'No Roles currently defined': 'No Roles currently defined', 'No Sections currently registered': 'No Sections currently registered', 'No Sectors currently registered': 'No Sectors currently registered', 'No Sent Items currently registered': 'No Sent Items currently registered', 'No Sent Shipments': 'No Sent Shipments', 'No Settings currently defined': 'No Settings currently defined', 'No Shelter Services currently registered': 'No Shelter Services currently registered', 'No Shelter Types currently registered': 'No Shelter Types currently registered', 'No Shelters currently registered': 'No Shelters currently registered', 'No Shipment Transit Logs currently registered': 'Δεν έχουν εγγραφεί ακόμη Δελτία αποστολών.', 'No Shipment/Way Bills currently registered': 'No Shipment/Way Bills currently registered', 'No Shipment<>Item Relation currently registered': 'No Shipment<>Item Relation currently registered', 'No Sites currently registered': 'No Sites currently registered', 'No Skill Types currently set': 'Δεν έχουν ορισθεί τύποι προσόντων - ικανοτήτων', 'No Solutions currently defined': 'No Solutions currently defined', 'No Staff Types currently registered': 'No Staff Types currently registered', 'No Staff currently registered': 'No Staff currently registered', 'No Storage Bin Type currently registered': 'Δεν έχει καταγραφεί ακόμη τύπος αποθηκευτικού χώρου (Storage Bin)', 'No Storage Bins currently registered': 'No Storage Bins currently registered', 'No Storage Locations currently registered': 'No Storage Locations currently registered', 'No Subscription available': 'No Subscription available', 'No Survey Answers currently registered': 'No Survey Answers currently registered', 'No Survey Questions currently registered': 'No Survey Questions currently registered', 'No Survey Sections currently registered': 'No Survey Sections currently registered', 'No Survey Series currently registered': 'No Survey Series currently registered', 'No Survey Template currently registered': 'No Survey Template currently registered', 'No Tasks with Location Data': 'No Tasks with Location Data', 'No Tasks with Location Data!': 'No Tasks with Location Data!', 'No Themes currently defined': 'No Themes currently defined', 'No Tickets currently registered': 'Δεν έχουν εγγραφεί ακόμη εισητήρια', 'No Tracks currently available': 'No Tracks currently available', 'No Units currently registered': 'Δεν έχουν εγγραφεί ακόμη μονάδες', 'No Users currently registered': 'Δεν έχουν εγγραφεί ακόμη χρήστες', 'No Volunteers currently registered': 'No Volunteers currently registered', 'No Warehouse Items currently registered': 'No Warehouse Items currently registered', 'No Warehouses currently registered': 'Δεν έχουν καταχωρηθεί προς το παρών αποθήκες', 'No Warehouses match this criteria': 'No Warehouses match this criteria', 'No access at all': 'Δεν υπάρχει καθόλου πρόσβαση', 'No access to this record!': 'No access to this record!', 'No action recommended': 'No action recommended', 'No conflicts logged': 'No conflicts logged', 'No contact information available': 'No contact information available', 'No contacts currently registered': 'Δεν έχουν ορισθεί ακόμη σημεία επαφών', 'No data in this table - cannot create PDF!': 'Δεν υπάρχουν δεδομένα στο Πίνακα - Αδύνατη η δημιουργία PDF!', 'No databases in this application': 'No databases in this application', 'No entries found': 'No entries found', 'No entries matching the query': 'No entries matching the query', 'No import jobs': 'No import jobs', 'No linked records': 'Δεν υπάρχουν συνδεδεμένα αρχεία - εγγραφές', 'No location found': 'No location found', 'No location known for this person': 'No location known for this person', 'No location known for this team': 'No location known for this team', 'No locations found for members of this team': 'No locations found for members of this team', 'No locations registered at this level': 'No locations registered at this level', 'No log entries matching the query': 'No log entries matching the query', 'No matching records found.': 'No matching records found.', 'No messages in the system': 'No messages in the system', 'No notes available': 'No notes available', 'No peers currently registered': 'No peers currently registered', 'No pending registrations found': 'Δεν βρέθηκαν εγγραφές σε αναμονή (pending)', 'No pending registrations matching the query': 'No pending registrations matching the query', 'No person record found for current user.': 'No person record found for current user.', 'No positions currently registered': 'No positions currently registered', 'No problem group defined yet': 'No problem group defined yet', 'No records matching the query': 'No records matching the query', 'No recovery reports available': 'No recovery reports available', 'No report available.': 'Δεν υπάρχει διαθέσιμη αναφορά', 'No reports available.': 'No reports available.', 'No reports currently available': 'No reports currently available', 'No requests found': 'No requests found', 'No resources currently registered': 'No resources currently registered', 'No resources currently reported': 'No resources currently reported', 'No service profile available': 'No service profile available', 'No skills currently set': 'No skills currently set', 'No status information available': 'No status information available', 'No synchronization': 'Μη συγχρονισμός', 'No tasks currently registered': 'No tasks currently registered', 'No template found!': 'Δεν βρέθηκε πρότυπο !', 'No units currently registered': 'No units currently registered', 'No volunteer information registered': 'Δεν έχουν εγγραφεί πληροφορίες εθελοντών', 'None': 'None', 'None (no such record)': 'None (no such record)', 'Noodles': 'Ζυμαρικά Noodles', 'Normal': 'Normal', 'Normal food sources disrupted': 'Normal food sources disrupted', 'Not Applicable': 'Not Applicable', 'Not Authorised!': 'Μη εξουσιοδοτημένος', 'Not Possible': 'Not Possible', 'Not Set': 'Not Set', 'Not authorised!': 'Not authorised!', 'Not installed or incorrectly configured.': 'Δεν έχει εγκατασταθεί ή λάθος καθορισμένο (configured)', 'Note': 'Note', 'Note Details': 'Note Details', 'Note Status': 'Note Status', 'Note Type': 'Note Type', 'Note added': 'Note added', 'Note deleted': 'Note deleted', 'Note that this list only shows active volunteers. To see all people registered in the system, do a search from the home screen instead.': 'Note that this list only shows active volunteers. To see all people registered in the system, do a search from the home screen instead.', 'Note updated': 'Note updated', 'Notes': 'Notes', 'Notice to Airmen': 'Ανακοίνωση προς τα Εναέρια Μέσα', 'Number': 'Number', 'Number of Columns': 'Number of Columns', 'Number of Patients': 'Number of Patients', 'Number of Rows': 'Number of Rows', 'Number of additional beds of that type expected to become available in this unit within the next 24 hours.': 'Number of additional beds of that type expected to become available in this unit within the next 24 hours.', 'Number of alternative places for studying': 'Αριθμός εναλλακτικών περοιχών για μελέτη', 'Number of available/vacant beds of that type in this unit at the time of reporting.': 'Number of available/vacant beds of that type in this unit at the time of reporting.', 'Number of deaths during the past 24 hours.': 'Number of deaths during the past 24 hours.', 'Number of discharged patients during the past 24 hours.': 'Number of discharged patients during the past 24 hours.', 'Number of doctors': 'Number of doctors', 'Number of doctors actively working': 'Number of doctors actively working', 'Number of houses damaged, but usable': 'Number of houses damaged, but usable', 'Number of houses destroyed/uninhabitable': 'Number of houses destroyed/uninhabitable', 'Number of in-patients at the time of reporting.': 'Number of in-patients at the time of reporting.', 'Number of latrines': 'Number of latrines', 'Number of midwives actively working': 'Number of midwives actively working', 'Number of newly admitted patients during the past 24 hours.': 'Number of newly admitted patients during the past 24 hours.', 'Number of non-medical staff': 'Number of non-medical staff', 'Number of nurses': 'Number of nurses', 'Number of nurses actively working': 'Number of nurses actively working', 'Number of private schools': 'Αριθμός Ιδιωτικών Σχολείων', 'Number of public schools': 'Number of public schools', 'Number of religious schools': 'Number of religious schools', 'Number of schools damaged but usable': 'Number of schools damaged but usable', 'Number of schools destroyed/uninhabitable': 'Number of schools destroyed/uninhabitable', 'Number of schools open before disaster': 'Number of schools open before disaster', 'Number of schools open now': 'Number of schools open now', 'Number of teachers affected by disaster': 'Number of teachers affected by disaster', 'Number of teachers before disaster': 'Number of teachers before disaster', 'Number of vacant/available beds in this hospital. Automatically updated from daily reports.': 'Number of vacant/available beds in this hospital. Automatically updated from daily reports.', 'Number of vacant/available units to which victims can be transported immediately.': 'Number of vacant/available units to which victims can be transported immediately.', 'Number or Label on the identification tag this person is wearing (if any).': 'Number or Label on the identification tag this person is wearing (if any).', 'Number/Percentage of affected population that is Female & Aged 0-5': 'Number/Percentage of affected population that is Female & Aged 0-5', 'Number/Percentage of affected population that is Female & Aged 13-17': 'Number/Percentage of affected population that is Female & Aged 13-17', 'Number/Percentage of affected population that is Female & Aged 18-25': 'Number/Percentage of affected population that is Female & Aged 18-25', 'Number/Percentage of affected population that is Female & Aged 26-60': 'Number/Percentage of affected population that is Female & Aged 26-60', 'Number/Percentage of affected population that is Female & Aged 6-12': 'Number/Percentage of affected population that is Female & Aged 6-12', 'Number/Percentage of affected population that is Female & Aged 61+': 'Number/Percentage of affected population that is Female & Aged 61+', 'Number/Percentage of affected population that is Male & Aged 0-5': 'Αριθμός/Ποσοστό πληθυσμού που επηράζεται και έναι αγόρια κάτω των 5 ετών', 'Number/Percentage of affected population that is Male & Aged 13-17': 'Number/Percentage of affected population that is Male & Aged 13-17', 'Number/Percentage of affected population that is Male & Aged 18-25': 'Number/Percentage of affected population that is Male & Aged 18-25', 'Number/Percentage of affected population that is Male & Aged 26-60': 'Αριθμός / Ποσοστό του πληγέντος πληθυσμού που είναι άρρεν & Ηλικίας 26-60', 'Number/Percentage of affected population that is Male & Aged 6-12': 'Number/Percentage of affected population that is Male & Aged 6-12', 'Number/Percentage of affected population that is Male & Aged 61+': 'Number/Percentage of affected population that is Male & Aged 61+', 'Nursery Beds': 'Nursery Beds', 'Nutrition': 'Θρέψη', 'OK': 'OK', 'OR Reason': 'OR Reason', 'OR Status': 'OR Status', 'OR Status Reason': 'OR Status Reason', 'Observer': 'Observer', 'Obstetrics/Gynecology': 'Μαιευτικό/Γυναικολογικό', 'Office': 'Office', 'Office Address': 'Office Address', 'Office Details': 'Office Details', 'Office added': 'Προστέθηκε γραφείο - οργανισμός', 'Office deleted': 'Γραφείο Διαγράφηκε', 'Office updated': 'Office updated', 'Offices': 'Offices', 'Offline Sync': 'Offline Sync', 'Offline Sync (from USB/File Backup)': 'Offline Sync (from USB/File Backup)', 'Old': 'Old', 'Older people as primary caregivers of children': 'Older people as primary caregivers of children', 'Older people in care homes': 'Older people in care homes', 'Older people participating in coping activities': 'Older people participating in coping activities', 'Older people with chronical illnesses': 'Older people with chronical illnesses', 'Older person (>60 yrs)': 'Older person (>60 yrs)', 'On by default?': "Ενεργό εξ' ορισμού;", 'On by default? (only applicable to Overlays)': 'On by default? (only applicable to Overlays)', 'One Time Cost': 'One Time Cost', 'One time cost': 'One time cost', 'One-time': 'One-time', 'One-time costs': 'One-time costs', 'Oops! Something went wrong...': 'Oops! Something went wrong...', 'Oops! something went wrong on our side.': 'Oops! something went wrong on our side.', 'Open': 'Open', 'Open Assessment': 'Open Assessment', 'Open area': 'Open area', 'Open recent': 'Open recent', 'Operating Rooms': 'Operating Rooms', 'Optional link to an Incident which this Assessment was triggered by.': 'Optional link to an Incident which this Assessment was triggered by.', 'Optional. In GeoServer, this is the Workspace Namespace URI. Within the WFS getCapabilities, this is the FeatureType Name part before the colon(:).': 'Optional. In GeoServer, this is the Workspace Namespace URI. Within the WFS getCapabilities, this is the FeatureType Name part before the colon(:).', "Optional. The name of the geometry column. In PostGIS this defaults to 'the_geom'.": "Optional. The name of the geometry column. In PostGIS this defaults to 'the_geom'.", 'Options': 'Options', 'Organisation': 'Οργανισμός', 'Organization': 'Οργανισμός', 'Organization Details': 'Λεπτομέρειες Οργανισμού', 'Organization Registry': 'Organization Registry', 'Organization added': 'Organization added', 'Organization deleted': 'Organization deleted', 'Organization updated': 'Organization updated', 'Organizations': 'Organizations', 'Origin': 'Origin', 'Origin of the separated children': 'Origin of the separated children', 'Other': 'Other', 'Other (describe)': 'Other (describe)', 'Other (specify)': 'Other (specify)', 'Other Evidence': 'Λοιπά αποδεικτικά στοιχεία', 'Other Faucet/Piped Water': 'Άλλο πόσιμο νερό (από βρύση-παροχή)', 'Other Isolation': 'Άλλη απομόνωση', 'Other Name': 'Other Name', 'Other activities of boys 13-17yrs': 'Άλλες δραστηριότητες αγοριών 13 έως 17 ετών', 'Other activities of boys 13-17yrs before disaster': 'Other activities of boys 13-17yrs before disaster', 'Other activities of boys <12yrs': 'Other activities of boys <12yrs', 'Other activities of boys <12yrs before disaster': 'Other activities of boys <12yrs before disaster', 'Other activities of girls 13-17yrs': 'Other activities of girls 13-17yrs', 'Other activities of girls 13-17yrs before disaster': 'Other activities of girls 13-17yrs before disaster', 'Other activities of girls<12yrs': 'Other activities of girls<12yrs', 'Other activities of girls<12yrs before disaster': 'Other activities of girls<12yrs before disaster', 'Other alternative infant nutrition in use': 'Other alternative infant nutrition in use', 'Other alternative places for study': 'Άλλες εναλακτικές περιοχές για μελέτη (διάβασμα)', 'Other assistance needed': 'Other assistance needed', 'Other assistance, Rank': 'Άλλη βοήθεια, Ιεραρχήστε', 'Other current health problems, adults': 'Other current health problems, adults', 'Other current health problems, children': 'Other current health problems, children', 'Other events': 'Other events', 'Other factors affecting school attendance': 'Άλλοι παράγοντες που επηρεάζουν την παρακαλούθηση στο σχολείο', 'Other major expenses': 'Other major expenses', 'Other school assistance received': 'Other school assistance received', 'Other school assistance, details': 'Other school assistance, details', 'Other school assistance, source': 'Other school assistance, source', 'Other side dishes in stock': 'Άλλα δευτερεύοντα πιάτα σε απόθεμα', 'Other types of water storage containers': 'Other types of water storage containers', 'Other ways to obtain food': 'Other ways to obtain food', 'Outbound Mail settings are configured in models/000_config.py.': 'Outbound Mail settings are configured in models/000_config.py.', 'Outbox': 'Outbox', 'Outgoing SMS Handler': 'Outgoing SMS Handler', 'Outgoing SMS handler': 'Outgoing SMS handler', 'Overland Flow Flood': 'Overland Flow Flood', 'Owned Resources': 'Owned Resources', 'PDAM': 'PDAM', 'PIN': 'PIN', 'PIN number ': 'PIN number ', 'PL Women': 'PL Women', 'Packet': 'Packet', 'Parameters': 'Parameters', 'Parent': 'Parent', 'Parent Office': 'Parent Office', "Parent level should be higher than this record's level. Parent level is": "Parent level should be higher than this record's level. Parent level is", 'Parent needs to be of the correct level': 'Parent needs to be of the correct level', 'Parent needs to be set': 'Parent needs to be set', 'Parent needs to be set for locations of level': 'Parent needs to be set for locations of level', 'Parents/Caregivers missing children': 'Parents/Caregivers missing children', 'Participant': 'Participant', 'Pashto': 'Pashto', 'Passport': 'Passport', 'Password': 'Password', "Password fields don't match": "Password fields don't match", 'Pathology': 'Pathology', 'Patients': 'Ασθενείς', 'Pediatric ICU': 'Pediatric ICU', 'Pediatric Psychiatric': 'Pediatric Psychiatric', 'Pediatrics': 'Παιδιατρική', 'Peer': 'Peer', 'Peer Details': 'Peer Details', 'Peer Registration': 'Peer Registration', 'Peer Registration Details': 'Peer Registration Details', 'Peer Registration Request': 'Αίτημα για ελεγχόμενη εγγραφή. ', 'Peer Type': 'Peer Type', 'Peer UID': 'Peer UID', 'Peer added': 'Peer added', 'Peer deleted': 'Peer deleted', 'Peer not allowed to push': 'Peer not allowed to push', 'Peer registration request added': 'Αίτημα για ελεγκτή καταχώρησης προστέθηκε', 'Peer registration request deleted': 'Peer registration request deleted', 'Peer registration request updated': 'Peer registration request updated', 'Peer updated': 'Peer updated', 'Peers': 'Peers', 'Pending Requests': 'Pending Requests', 'People': 'People', 'People Needing Food': 'People Needing Food', 'People Needing Shelter': 'People Needing Shelter', 'People Needing Water': 'People Needing Water', 'People Trapped': 'People Trapped', 'People with chronical illnesses': 'People with chronical illnesses', 'Person': 'Άτομο', 'Person 1': 'Person 1', 'Person 1, Person 2 are the potentially duplicate records': 'Person 1, Person 2 are the potentially duplicate records', 'Person 2': 'Person 2', 'Person Data': 'Person Data', 'Person De-duplicator': 'Person De-duplicator', 'Person Details': 'Λεπτομέρειες Ατόμου ', 'Person Finder': 'Person Finder', 'Person Registry': 'Person Registry', 'Person added': 'Person added', 'Person deleted': 'Person deleted', 'Person details updated': 'Person details updated', 'Person interviewed': 'Person interviewed', 'Person missing': 'Person missing', 'Person reporting': 'Person reporting', 'Person who has actually seen the person/group.': 'Person who has actually seen the person/group.', 'Person who is reporting about the presence.': 'Person who is reporting about the presence.', 'Person who observed the presence (if different from reporter).': 'Άτομο το οποίο ανέφερε την παρουσία (εαν είναι δαιφορετικό από αυτόν που αναφέρει)', 'Person/Group': 'Person/Group', 'Personal Data': 'Personal Data', 'Personal Effects': 'Personal Effects', 'Personal Effects Details': 'Personal Effects Details', 'Personal impact of disaster': 'Personal impact of disaster', 'Persons': 'Άτομα', 'Persons with disability (mental)': 'Άτομα με (διανοητική) ανικανότητα ', 'Persons with disability (physical)': 'Άτομα με (φυσικές) ανικανότητες', 'Phone': 'Phone', 'Phone 1': 'Phone 1', 'Phone 2': 'Phone 2', "Phone number to donate to this organization's relief efforts.": 'Αριθμός τηλεφώνου για δωρεές για τις προσπάθειες ανακούφισης που προσφέρονται από τον οργανισμό.', 'Phone/Business': 'Phone/Business', 'Phone/Emergency': 'Phone/Emergency', 'Phone/Exchange': 'Phone/Exchange', 'Photo': 'Photo', 'Photo Details': 'Λεπτομέρειες Φωτογραφίας', 'Photo added': 'Photo added', 'Photo deleted': 'Photo deleted', 'Photo updated': 'Photo updated', 'Photograph': 'Photograph', 'Photos': 'Photos', 'Physical Description': 'Physical Description', 'Picture upload and finger print upload facility': 'Picture upload and finger print upload facility', 'Place for solid waste disposal': 'Place for solid waste disposal', 'Place of Recovery': 'Place of Recovery', 'Places the children have been sent to': 'Places the children have been sent to', 'Playing': 'Playing', "Please come back after sometime if that doesn't help.": "Please come back after sometime if that doesn't help.", 'Please correct all errors.': 'Please correct all errors.', 'Please enter a First Name': 'Please enter a First Name', 'Please enter a valid email address': 'Please enter a valid email address', 'Please enter the first few letters of the Person/Group for the autocomplete.': 'Please enter the first few letters of the Person/Group for the autocomplete.', 'Please enter the recipient': 'Please enter the recipient', 'Please fill this!': 'Please fill this!', 'Please report here where you are:': 'Please report here where you are:', 'Please select another level': 'Please select another level', 'Please specify any problems and obstacles with the proper handling of the disease, in detail (in numbers, where appropriate). You may also add suggestions the situation could be improved.': 'Please specify any problems and obstacles with the proper handling of the disease, in detail (in numbers, where appropriate). You may also add suggestions the situation could be improved.', 'Please use this field to record any additional information, including a history of the record if it is updated.': 'Please use this field to record any additional information, including a history of the record if it is updated.', 'Please use this field to record any additional information, such as Ushahidi instance IDs. Include a history of the record if it is updated.': 'Please use this field to record any additional information, such as Ushahidi instance IDs. Include a history of the record if it is updated.', 'Pledge Aid': 'Pledge Aid', 'Pledge Aid to match these Requests': 'Pledge Aid to match these Requests', 'Pledge Support': 'Pledge Support', 'Pledged': 'Pledged', 'Pledges': 'Pledges', 'Point': 'Point', 'Poisoning': 'Poisoning', 'Poisonous Gas': 'Poisonous Gas', 'Police': 'Police', 'Pollution and other environmental': 'Pollution and other environmental', 'Polygon': 'Polygon', 'Porridge': 'Πουρές - Κουάκερ', 'Port': 'Port', 'Port Closure': 'Port Closure', 'Position Details': 'Position Details', 'Position added': 'Position added', 'Position deleted': 'Position deleted', 'Position type': 'Position type', 'Position updated': 'Position updated', 'Positions': 'Positions', 'Postcode': 'Postcode', 'Poultry': 'Poultry', 'Poultry restocking, Rank': 'Poultry restocking, Rank', 'Pounds': 'Pounds', 'Power Failure': 'Power Failure', 'Powered by Sahana Eden': 'Powered by Sahana Eden', 'Preferred Name': 'Preferred Name', 'Pregnant women': 'Pregnant women', 'Preliminary': 'Preliminary', 'Presence': 'Presence', 'Presence Condition': 'Presence Condition', 'Presence Log': 'Presence Log', "Press the 'Delete Old' button to have all records which reference this one be repointed at the new one & then the old record will be deleted.": "Press the 'Delete Old' button to have all records which reference this one be repointed at the new one & then the old record will be deleted.", 'Previous': 'Previous', 'Primary Name': 'Primary Name', 'Priority': 'Προτεραιότητα', 'Priority Level': 'Priority Level', 'Private': 'Private', 'Problem': 'Problem', 'Problem Administration': 'Problem Administration', 'Problem Details': 'Problem Details', 'Problem Group': 'Problem Group', 'Problem Title': 'Problem Title', 'Problem added': 'Problem added', 'Problem deleted': 'Problem deleted', 'Problem updated': 'Το πρόβλημα ενημερώθηκε', 'Problems': 'Προβλήματα', 'Procedure': 'Procedure', 'Procurements': 'Procurements', 'Product Description': 'Product Description', 'Product Name': 'Product Name', 'Profile': 'Profile', 'Project': 'Project', 'Project Details': 'Project Details', 'Project Status': 'Project Status', 'Project Tracking': 'Project Tracking', 'Project added': 'Project added', 'Project deleted': 'Project deleted', 'Project has no Lat/Lon': 'Project has no Lat/Lon', 'Project updated': 'Το έργο ενημερώθηκε', 'Projection': 'Projection', 'Projection Details': 'Projection Details', 'Projection added': 'Projection added', 'Projection deleted': 'Projection deleted', 'Projection updated': 'Προβολή ανανεώθηκε', 'Projections': 'Projections', 'Projects': 'Projects', 'Protected resource': 'Protected resource', 'Protection': 'Protection', 'Provide Metadata for your media files': 'Provide Metadata for your media files', 'Provide a password': 'Provide a password', 'Province': 'Province', 'Proxy-server': 'Proxy-server', 'Psychiatrics/Adult': 'Psychiatrics/Adult', 'Psychiatrics/Pediatric': 'Psychiatrics/Pediatric', 'Public': 'Public', 'Public Event': 'Public Event', 'Public and private transportation': 'Δημόσια και Ιδιωτικά μέσα μεταφοράς', 'Pull tickets from external feed': 'Pull tickets from external feed', 'Punjabi': 'Punjabi', 'Push tickets to external system': 'Push tickets to external system', 'Put a choice in the box': 'Put a choice in the box', 'Pyroclastic Flow': 'Pyroclastic Flow', 'Pyroclastic Surge': 'Πυροκλαστική τομή', 'Python Serial module not available within the running Python - this needs installing to activate the Modem': 'Python Serial module not available within the running Python - this needs installing to activate the Modem', 'Quantity': 'Quantity', 'Quarantine': 'Quarantine', 'Queries': 'Queries', 'Query': 'Query', 'Queryable?': 'Queryable?', 'RECORD A': 'RECORD A', 'RECORD B': 'RECORD B', 'RESPONSE': 'RESPONSE', 'Race': 'Race', 'Radiological Hazard': 'Radiological Hazard', 'Radiology': 'Radiology', 'Railway Accident': 'Railway Accident', 'Railway Hijacking': 'Railway Hijacking', 'Rain Fall': 'Rain Fall', 'Rapid Assessment': 'Rapid Assessment', 'Rapid Assessment Details': 'Rapid Assessment Details', 'Rapid Assessment added': 'Άμεση εκτίμηση προστέθηκε', 'Rapid Assessment deleted': 'Rapid Assessment deleted', 'Rapid Assessment updated': 'Rapid Assessment updated', 'Rapid Assessments': 'Rapid Assessments', 'Rapid Assessments & Flexible Impact Assessments': 'Rapid Assessments & Flexible Impact Assessments', 'Rapid Close Lead': 'Rapid Close Lead', 'Rating Scale': 'Rating Scale', 'Raw Database access': 'Raw Database access', 'Real World Arbitrary Units': 'Αυθαίρετες μονάδες που αναφέρονται στο πραγματικό κόσμο', 'Receive': 'Receive', 'Receive Items': 'Receive Items', 'Receive Shipment': 'Receive Shipment', 'Received': 'Received', 'Received By': 'Received By', 'Received Item Details': 'Received Item Details', 'Received Item added': 'Received Item added', 'Received Item deleted': 'Received Item deleted', 'Received Item updated': 'Received Item updated', 'Received Items': 'Received Items', 'Received Items added to Warehouse Items': 'Received Items added to Warehouse Items', 'Received Shipment Details': 'Received Shipment Details', 'Received Shipment canceled': 'Received Shipment canceled', 'Received Shipment updated': 'Received Shipment updated', 'Received Shipments': 'Received Shipments', 'Recipient': 'Recipient', 'Recipients': 'Παραλήπτες', 'Record Details': 'Record Details', 'Record ID': 'Record ID', 'Record Saved': 'Record Saved', 'Record added': 'Record added', 'Record deleted': 'Record deleted', 'Record last updated': 'Η τελευταία εγγραφή ενημερώθηκε', 'Record not found!': 'Record not found!', 'Record updated': 'Record updated', 'Records': 'Records', 'Recovery': 'Recovery', 'Recovery Request': 'Recovery Request', 'Recovery Request added': 'Recovery Request added', 'Recovery Request deleted': 'Recovery Request deleted', 'Recovery Request updated': 'Recovery Request updated', 'Recovery Requests': 'Αιτήματα για ανάκτηση-αναζήτηση', 'Recovery report added': 'Recovery report added', 'Recovery report deleted': 'Recovery report deleted', 'Recovery report updated': 'Recovery report updated', 'Recurring': 'Στρατολόγηση', 'Recurring Cost': 'Recurring Cost', 'Recurring cost': 'Recurring cost', 'Recurring costs': 'Επαναλαμβανόμενα έξοδα', 'Reference Document': 'Reference Document', 'Regional': 'Τοπική', 'Register': 'Register', 'Register Person': 'Register Person', 'Register Person into this Shelter': 'Register Person into this Shelter', 'Register them as a volunteer': 'Register them as a volunteer', 'Registered People': 'Registered People', 'Registered users can': 'Οι εγγεγραμμένοι χρήστες μπορούν', 'Registering ad-hoc volunteers willing to contribute': 'Registering ad-hoc volunteers willing to contribute', 'Registration': 'Registration', 'Registration Details': 'Registration Details', 'Registration added': 'Registration added', 'Registration entry deleted': 'Registration entry deleted', 'Registration key': 'Registration key', 'Registration updated': 'Registration updated', 'Registry keeps track of all the relief organizations working in the disaster region. It captures not only the places where they are active, but also captures information on the range of projects they are providing in each area.': 'Registry keeps track of all the relief organizations working in the disaster region. It captures not only the places where they are active, but also captures information on the range of projects they are providing in each area.', 'Rehabilitation/Long Term Care': 'Rehabilitation/Long Term Care', 'Reliable access to sanitation/hygiene items': 'Reliable access to sanitation/hygiene items', 'Relief': 'Relief', 'Relief Item Catalog': 'Relief Item Catalog', 'Relief Team': 'Relief Team', 'Religion': 'Religion', 'Religious Leader': 'Religious Leader', 'Relocate as instructed in the <instruction>': 'Relocate as instructed in the <instruction>', 'Remove': 'Remove', 'Repeat your password': 'Repeat your password', 'Replace': 'Replace', 'Replace if Master': 'Replace if Master', 'Replace if Newer': 'Αντικατάσταση σε περίπτωση νεότερου', 'Report': 'Report', 'Report Another Assessment...': 'Αναφορά Άλλης Αξιολόγησης ...', 'Report Details': 'Report Details', 'Report Resource': 'Report Resource', 'Report Type': 'Αναφορά Τύπου', 'Report Types Include': 'Report Types Include', 'Report a Problem with the Software': 'Αναφορά προβλήματος του λογισμικού', 'Report added': 'Report added', 'Report deleted': 'Αναφορά Διαγράφηκε', 'Report my location': 'Report my location', 'Report that person missing': 'Report that person missing', 'Report the contributing factors for the current EMS status.': 'Αναφορά των παραγόντων που συμμετέχουν στην παρούσα κατάσταση ανάγκης', 'Report the contributing factors for the current OR status.': 'Report the contributing factors for the current OR status.', 'Report the person as found': 'Report the person as found', 'Report them as found': 'Report them as found', 'Report them missing': 'Report them missing', 'Report updated': 'Report updated', 'ReportLab module not available within the running Python - this needs installing for PDF output!': 'ReportLab module not available within the running Python - this needs installing for PDF output!', 'Reporter': 'Reporter', 'Reporter Name': 'Reporter Name', 'Reporting on the projects in the region': 'Reporting on the projects in the region', 'Reports': 'Reports', 'Request': 'Request', 'Request Added': 'Request Added', 'Request Canceled': 'Request Canceled', 'Request Details': 'Request Details', 'Request Item': 'Request Item', 'Request Item Details': 'Request Item Details', 'Request Item added': 'Request Item added', 'Request Item deleted': 'Διαγραφή αντικειμένου που αιτήθηκε', 'Request Item updated': 'Request Item updated', 'Request Items': 'Request Items', 'Request Type': 'Request Type', 'Request Updated': 'Request Updated', 'Request added': 'Request added', 'Request deleted': 'Request deleted', 'Request for Role Upgrade': 'Αίτημα για αναβάθμιση ρόλου', 'Request updated': 'Request updated', 'Request, Response & Session': 'Request, Response & Session', 'Requested': 'Requested', 'Requested By Location': 'Requested By Location', 'Requested From Warehouse': 'Requested From Warehouse', 'Requested by': 'Requested by', 'Requested on': 'Requested on', 'Requester': 'Requester', 'Requestor': 'Requestor', 'Requests': 'Requests', 'Requests From': 'Requests From', 'Requests for Item': 'Requests for Item', 'Requires Login!': 'Requires Login!', 'Requires login': 'Requires login', 'Rescue and recovery': 'Rescue and recovery', 'Reset': 'Reset', 'Reset Password': 'Reset Password', 'Resolve': 'Resolve', 'Resolve Conflict': 'Resolve Conflict', 'Resolve link brings up a new screen which helps to resolve these duplicate records and update the database.': 'Resolve link brings up a new screen which helps to resolve these duplicate records and update the database.', 'Resource': 'Resource', 'Resource Details': 'Resource Details', 'Resource added': 'Resource added', 'Resource deleted': 'Resource deleted', 'Resource updated': 'Resource updated', 'Resources': 'Πόροι', 'Respiratory Infections': 'Respiratory Infections', 'Response': 'Response', 'Response Details': 'Response Details', 'Response added': 'Response added', 'Response deleted': 'Η απάντηση-ανταπόκριση διαγράφηκε', 'Response updated': 'Response updated', 'Responses': 'Responses', 'Restricted Access': 'Restricted Access', 'Restrictions': 'Restrictions', 'Results': 'Results', 'Retail Crime': 'Retail Crime', 'Retrieve Password': 'Retrieve Password', 'Rice': 'Rice', 'Riot': 'Riot', 'River': 'River', 'River Details': 'Λεπτομέρειες Ποταμού', 'River added': 'River added', 'River deleted': 'River deleted', 'River updated': 'River updated', 'Rivers': 'Rivers', 'Road Accident': 'Αυτοκινητιστικό Ατύχημα', 'Road Closed': 'Road Closed', 'Road Conditions': 'Road Conditions', 'Road Delay': 'Road Delay', 'Road Hijacking': 'Road Hijacking', 'Road Usage Condition': 'Κατάσταση οδικού δικτύου', 'Role': 'Role', 'Role Details': 'Role Details', 'Role Manager': 'Role Manager', 'Role Required': 'Role Required', 'Role Updated': 'Role Updated', 'Role added': 'Role added', 'Role deleted': 'Role deleted', 'Role updated': 'Role updated', 'Role-based': 'Role-based', 'Roles': 'Roles', 'Roles Permitted': 'Roles Permitted', 'Roof tile': 'Roof tile', 'Row Choices (One Per Line)': 'Row Choices (One Per Line)', 'Rows in table': 'Rows in table', 'Rows selected': 'Rows selected', 'Run Functional Tests': 'Εκτέλεση λειτουργικών δοκιμών', 'Run Interval': 'Run Interval', 'Running Cost': 'Running Cost', 'SITUATION': 'SITUATION', 'Safe environment for vulnerable groups': 'Safe environment for vulnerable groups', 'Safety of children and women affected by disaster': 'Safety of children and women affected by disaster', 'Sahana Administrator': 'Sahana Administrator', 'Sahana Agasti': 'Sahana Agasti', 'Sahana Blue': 'Sahana Blue', 'Sahana Community Chat': 'Sahana Community Chat', 'Sahana Eden': 'Sahana Eden', 'Sahana Eden <=> Other': 'Sahana Eden <=> Other', 'Sahana Eden <=> Sahana Eden': 'Sahana Eden <=> Sahana Eden', 'Sahana Eden Disaster Management Platform': 'Sahana Eden Disaster Management Platform', 'Sahana Eden Open Source Disaster Management Platform': 'Πλατφόρμα Διαχείρισης Καταστορφών Ανοικτού Κώδικα Sahana Eden ', 'Sahana Eden Website': 'Διαδικτυακός τόπος Sahana Eden', 'Sahana Green': 'Sahana Green', 'Sahana Login Approval Pending': 'Εκκρεμεί η έγκριση Σύνδεσης στο σύστημα Sahana', 'Sahana Steel': 'Sahana Steel', 'Sahana access granted': 'Sahana access granted', 'Sahana: new request has been made. Please login to see if you can fulfil the request.': 'Sahana : νέο αίτημα έχει γίνει. Παρακαλώ συνδεθείτε για να δείτε αν μπορείτε να ικανοποιήσετε το αίτημα ', 'Salted Fish': 'Salted Fish', 'Salvage material usable from destroyed houses': 'Salvage material usable from destroyed houses', 'Salvage material usable from destroyed schools': 'Salvage material usable from destroyed schools', 'Sanitation problems': 'Sanitation problems', 'Satellite': 'Δορυφόρος', 'Satellite Office': 'Satellite Office', 'Saturday': 'Saturday', 'Save': 'Save', 'Save any Changes in the one you wish to keep': 'Αποθηκεύστε οποιεσδήποτε αλλαγές σε αυτό που επιθυμείτε να κρατήσετε', 'Saved.': 'Saved.', 'Saving...': 'Saving...', 'Scale of Results': 'Κλίμακα Αποτελεσμάτων', 'Schedule': 'Schedule', 'School': 'School', 'School Closure': 'School Closure', 'School Lockdown': 'School Lockdown', 'School Teacher': 'School Teacher', 'School assistance received/expected': 'School assistance received/expected', 'School destroyed': 'School destroyed', 'School heavily damaged': 'School heavily damaged', 'School tents received': 'School tents received', 'School tents, source': 'School tents, source', 'School used for other purpose': 'School used for other purpose', 'School/studying': 'School/studying', 'Schools': 'Schools', 'Search': 'Search', 'Search & List Bin Types': 'Search & List Bin Types', 'Search & List Bins': 'Search & List Bins', 'Search & List Catalog': 'Search & List Catalog', 'Search & List Category': 'Search & List Category', 'Search & List Items': 'Αναζήτησε και πρόβαλε αντικείμενα', 'Search & List Locations': 'Search & List Locations', 'Search & List Site': 'Search & List Site', 'Search & List Sub-Category': 'Search & List Sub-Category', 'Search & List Unit': 'Search & List Unit', 'Search Activities': 'Search Activities', 'Search Activity Report': 'Search Activity Report', 'Search Addresses': 'Search Addresses', 'Search Assessment Summaries': 'Αναζήτηση περιλήψεων αξιολόγησης', 'Search Assessments': 'Search Assessments', 'Search Baseline Type': 'Τύπος Βασικής αναζήτησης', 'Search Baselines': 'Search Baselines', 'Search Budgets': 'Search Budgets', 'Search Bundles': 'Search Bundles', 'Search Catalog Items': 'Αναζήτηση αντικειμένων καταλόγου.', 'Search Category<>Sub-Category<>Catalog Relation': 'Search Category<>Sub-Category<>Catalog Relation', 'Search Checklists': 'Search Checklists', 'Search Cluster Subsectors': 'Search Cluster Subsectors', 'Search Configs': 'Search Configs', 'Search Contact Information': 'Search Contact Information', 'Search Contacts': 'Search Contacts', 'Search Distribution Items': 'Search Distribution Items', 'Search Distributions': 'Αναζήτηση Διανομών', 'Search Documents': 'Search Documents', 'Search Donors': 'Search Donors', 'Search Feature Class': 'Search Feature Class', 'Search Feature Layers': 'Αναζήτηση στα επίπεδα χαρακτηριστικών', 'Search Flood Reports': 'Search Flood Reports', 'Search Groups': 'Search Groups', 'Search Hospitals': 'Search Hospitals', 'Search Identity': 'Αναζήτηση ταυτότητας', 'Search Images': 'Search Images', 'Search Impact Type': 'Αναζήτηση Επιπτώσεων', 'Search Impacts': 'Search Impacts', 'Search Incident Reports': 'Search Incident Reports', 'Search Incidents': 'Search Incidents', 'Search Item Catalog Category(s)': 'Search Item Catalog Category(s)', 'Search Item Catalog(s)': 'Αναζήτηση Θέση Καταλόγου (ων)', 'Search Item Categories': 'Search Item Categories', 'Search Item Packets': 'Search Item Packets', 'Search Item Sub-Category(s)': 'Search Item Sub-Category(s)', 'Search Items': 'Search Items', 'Search Keys': 'Κλείδες αναζήτησης', 'Search Kits': 'Search Kits', 'Search Layers': 'Search Layers', 'Search Locations': 'Search Locations', 'Search Log Entry': 'Search Log Entry', 'Search Markers': 'Search Markers', 'Search Member': 'Search Member', 'Search Membership': 'Ψάξιμο εγγραφής', 'Search Memberships': 'Αναζήτηση μελών', 'Search Metadata': 'Αναζήτηση μεταδεδομένων', 'Search Need Type': 'Αναζήτηση τύπου αναγκών', 'Search Needs': 'Ψάξε ανάγκες', 'Search Notes': 'Search Notes', 'Search Offices': 'Search Offices', 'Search Organizations': 'Search Organizations', 'Search Peer': 'Search Peer', 'Search Personal Effects': 'Search Personal Effects', 'Search Persons': 'Αναζήτηση Ατόμων', 'Search Photos': 'Search Photos', 'Search Positions': 'Search Positions', 'Search Problems': 'Search Problems', 'Search Projections': 'Search Projections', 'Search Projects': 'Search Projects', 'Search Rapid Assessments': 'Search Rapid Assessments', 'Search Received Items': 'Search Received Items', 'Search Received Shipments': 'Search Received Shipments', 'Search Records': 'Search Records', 'Search Recovery Reports': 'Search Recovery Reports', 'Search Registations': 'Search Registations', 'Search Registration Request': 'Αναζήτηση αιτήματος εγγραφής', 'Search Report': 'Search Report', 'Search Reports': 'Search Reports', 'Search Request': 'Search Request', 'Search Request Items': 'Search Request Items', 'Search Requests': 'Search Requests', 'Search Resources': 'Search Resources', 'Search Responses': 'Search Responses', 'Search Rivers': 'Search Rivers', 'Search Roles': 'Search Roles', 'Search Sections': 'Search Sections', 'Search Sectors': 'Search Sectors', 'Search Sent Items': 'Search Sent Items', 'Search Sent Shipments': 'Search Sent Shipments', 'Search Service Profiles': 'Search Service Profiles', 'Search Settings': 'Search Settings', 'Search Shelter Services': 'Search Shelter Services', 'Search Shelter Types': 'Search Shelter Types', 'Search Shelters': 'Search Shelters', 'Search Shipment Transit Logs': 'Search Shipment Transit Logs', 'Search Shipment/Way Bills': 'Search Shipment/Way Bills', 'Search Shipment<>Item Relation': 'Αναζήτηση αποστολής <> Σχέση Αντικειμένου', 'Search Site(s)': 'Search Site(s)', 'Search Skill Types': 'Search Skill Types', 'Search Skills': 'Search Skills', 'Search Solutions': 'Search Solutions', 'Search Staff': 'Search Staff', 'Search Staff Types': 'Search Staff Types', 'Search Status': 'Search Status', 'Search Storage Bin Type(s)': 'Search Storage Bin Type(s)', 'Search Storage Bin(s)': 'Search Storage Bin(s)', 'Search Storage Location(s)': 'Αναζήτηση στις θέσεις Αποθήκευσης', 'Search Subscriptions': 'Αναζήτηση Εγγεγραφών', 'Search Tasks': 'Search Tasks', 'Search Teams': 'Search Teams', 'Search Themes': 'Θέματα Αναζήτησης', 'Search Tickets': 'Search Tickets', 'Search Tracks': 'Πορείες (γραμμές) αναζήτησης', 'Search Twitter Tags': 'Ψάξε τα tags(ετικέκτες) του twitter', 'Search Units': 'Search Units', 'Search Users': 'Search Users', 'Search Volunteer Registrations': 'Search Volunteer Registrations', 'Search Volunteers': 'Search Volunteers', 'Search Warehouse Items': 'Search Warehouse Items', 'Search Warehouses': 'Search Warehouses', 'Search and Edit Group': 'Search and Edit Group', 'Search and Edit Individual': 'Αναζήτηση και επεξεργασία ατοιχείων ατόμου', 'Search by ID Tag': 'Search by ID Tag', 'Search by Skill Types': 'Search by Skill Types', 'Search for Items': 'Search for Items', 'Search for a Hospital': 'Search for a Hospital', 'Search for a Person': 'Search for a Person', 'Search for a Project': 'Search for a Project', 'Search for a Request': 'Search for a Request', 'Search here for a person in order to:': 'Search here for a person in order to:', "Search here for a person's record in order to:": "Search here for a person's record in order to:", 'Search messages': 'Search messages', 'Searching for different groups and individuals': 'Searching for different groups and individuals', 'Secondary Server (Optional)': 'Secondary Server (Optional)', 'Seconds must be a number between 0 and 60': 'Τα δευτερόλεπτα πρέπει να είναι ένας αριθμός μεταξύ 0 και 60', 'Section Details': 'Λεπτομέρειες Τμήματος', 'Section deleted': 'Section deleted', 'Section updated': 'Section updated', 'Sections': 'Sections', 'Sector': 'Sector', 'Sector Details': 'Sector Details', 'Sector added': 'Sector added', 'Sector deleted': 'Sector deleted', 'Sector updated': 'Sector updated', 'Sectors': 'Sectors', 'Security Policy': 'Πολιτική Ασφαλείας', 'Security Status': 'Security Status', 'Security problems': 'Προβλήματα ασφαλείας', 'Seen': 'Seen', 'Select 2 potential locations from the dropdowns.': 'Select 2 potential locations from the dropdowns.', 'Select Items from this Warehouse': 'Select Items from this Warehouse', 'Select Photos': 'Select Photos', 'Select a location': 'Select a location', "Select a person in charge for status 'assigned'": "Select a person in charge for status 'assigned'", 'Select a question from the list': 'Επιλογή ερώτησης από λίστα', 'Select all that apply': 'Select all that apply', 'Select an Organization to see a list of offices': 'Select an Organization to see a list of offices', 'Select the overlays for Assessments and Activities relating to each Need to identify the gap.': 'Select the overlays for Assessments and Activities relating to each Need to identify the gap.', 'Select the person assigned to this role for this project.': 'Select the person assigned to this role for this project.', 'Select the person associated with this scenario.': 'Select the person associated with this scenario.', 'Selects whether to use a Modem, Tropo or other Gateway for sending out SMS': 'Selects whether to use a Modem, Tropo or other Gateway for sending out SMS', 'Self Registration': 'Self Registration', 'Self-registration': 'Self-registration', 'Send': 'Send', 'Send Alerts using Email &/or SMS': 'Send Alerts using Email &/or SMS', 'Send Mail': 'Send Mail', 'Send Notification': 'Send Notification', 'Send Shipment': 'Send Shipment', 'Send message': 'Send message', 'Send new message': 'Send new message', 'Sends & Receives Alerts via Email & SMS': 'Sends & Receives Alerts via Email & SMS', 'Senior (50+)': 'Senior (50+)', 'Sensitivity': 'Ευαισθησία', 'Sent': 'Sent', 'Sent Item': 'Sent Item', 'Sent Item Details': 'Sent Item Details', 'Sent Item added': 'Sent Item added', 'Sent Item deleted': 'Sent Item deleted', 'Sent Item updated': 'Sent Item updated', 'Sent Items': 'Sent Items', 'Sent Shipment Details': 'Sent Shipment Details', 'Sent Shipment canceled': 'Sent Shipment canceled', 'Sent Shipment updated': 'Sent Shipment updated', 'Sent Shipments': 'Sent Shipments', 'Separate latrines for women and men': 'Separate latrines for women and men', 'Seraiki': 'Seraiki', 'Series': 'Σειρά', 'Server': 'Server', 'Service': 'Service', 'Service Catalogue': 'Service Catalogue', 'Service or Facility': 'Υπηρεσία ή Εγκατάσταση', 'Service profile added': 'Προφίλ Υπηρεσίας προστέθηκε', 'Service profile deleted': 'Service profile deleted', 'Service profile updated': 'Service profile updated', 'Services': 'Services', 'Services Available': 'Διαθέσιμες Υπηρεσίες', 'Setting Details': 'Setting Details', 'Setting added': 'Ρύθμιση προστέθηκε', 'Setting deleted': 'Setting deleted', 'Setting updated': 'Setting updated', 'Settings': 'Ρυθμίσεις', 'Settings updated': 'Settings updated', 'Settings were reset because authenticating with Twitter failed': 'Settings were reset because authenticating with Twitter failed', 'Severity': 'Severity', 'Severity:': 'Severity:', 'Share a common Marker (unless over-ridden at the Feature level)': 'Μοιράζονται ένα κοινό Marker (εκτός υπερ-επιβαίνουν σε επίπεδο Feature)', 'Shelter': 'Κατάλλημα', 'Shelter & Essential NFIs': 'Shelter & Essential NFIs', 'Shelter Details': 'Shelter Details', 'Shelter Name': 'Shelter Name', 'Shelter Registry': 'Καταγραφή Καταλύμματος', 'Shelter Service': 'Shelter Service', 'Shelter Service Details': 'Λεπτομέρειες υπηρεσιών καταφυγίου', 'Shelter Service added': 'Shelter Service added', 'Shelter Service deleted': 'Shelter Service deleted', 'Shelter Service updated': 'Shelter Service updated', 'Shelter Services': 'Υπηρεσίες Καταφυγίων', 'Shelter Type': 'Shelter Type', 'Shelter Type Details': 'Shelter Type Details', 'Shelter Type added': 'Shelter Type added', 'Shelter Type deleted': 'Shelter Type deleted', 'Shelter Type updated': 'Shelter Type updated', 'Shelter Types': 'Shelter Types', 'Shelter Types and Services': 'Shelter Types and Services', 'Shelter added': 'Shelter added', 'Shelter deleted': 'Shelter deleted', 'Shelter updated': 'Shelter updated', 'Shelter/NFI assistance received/expected': 'Shelter/NFI assistance received/expected', 'Shelters': 'Shelters', 'Shipment Created': 'Shipment Created', 'Shipment Details': 'Shipment Details', 'Shipment Items': 'Shipment Items', 'Shipment Received': 'Shipment Received', 'Shipment Sent': 'Shipment Sent', 'Shipment Transit Log Details': 'Shipment Transit Log Details', 'Shipment Transit Log added': 'Shipment Transit Log added', 'Shipment Transit Log deleted': 'Shipment Transit Log deleted', 'Shipment Transit Log updated': 'Shipment Transit Log updated', 'Shipment Transit Logs': 'Shipment Transit Logs', 'Shipment/Way Bill added': 'Shipment/Way Bill added', 'Shipment/Way Bills': 'Shipment/Way Bills', 'Shipment/Way Bills Details': 'Shipment/Way Bills Details', 'Shipment/Way Bills deleted': 'Shipment/Way Bills deleted', 'Shipment/Way Bills updated': 'Shipment/Way Bills updated', 'Shipment<>Item Relation added': 'Shipment<>Item Relation added', 'Shipment<>Item Relation deleted': 'Shipment<>Item Relation deleted', 'Shipment<>Item Relation updated': 'Shipment<>Item Relation updated', 'Shipment<>Item Relations': 'Shipment<>Item Relations', 'Shipment<>Item Relations Details': 'Shipment<>Item Relations Details', 'Shipments': 'Shipments', 'Shipments To': 'Αποστολή προς', 'Shooting': 'Shooting', 'Short Assessment': 'Short Assessment', 'Short Description': 'Short Description', 'Show Checklist': 'Show Checklist', 'Show on map': 'Θέση στο χάρτη', 'Sindhi': 'Sindhi', 'Site': 'Site', 'Site Address': 'Site Address', 'Site Administration': 'Site Administration', 'Site Description': 'Site Description', 'Site Details': 'Site Details', 'Site ID': 'Site ID', 'Site Location Description': 'Site Location Description', 'Site Location Name': 'Site Location Name', 'Site Manager': 'Site Manager', 'Site Name': 'Site Name', 'Site added': 'Site added', 'Site deleted': 'Περιοχή διαγράφηκε', 'Site updated': 'Site updated', 'Site/Warehouse': 'Site/Warehouse', 'Sites': 'Sites', 'Situation Awareness & Geospatial Analysis': 'Situation Awareness & Geospatial Analysis', 'Sketch': 'Sketch', 'Skill': 'Skill', 'Skill Details': 'Skill Details', 'Skill Status': 'Skill Status', 'Skill Type Details': 'Skill Type Details', 'Skill Type added': 'Τύπος προσόντων προστέθηκε', 'Skill Type deleted': 'Skill Type deleted', 'Skill Type updated': 'Skill Type updated', 'Skill Types': 'Skill Types', 'Skill added': 'Skill added', 'Skill deleted': 'Skill deleted', 'Skill updated': 'Skill updated', 'Skills': 'Skills', 'Skype ID': 'Skype ID', 'Small Trade': 'Small Trade', 'Smoke': 'Smoke', 'Snow Fall': 'Snow Fall', 'Snow Squall': 'Snow Squall', 'Solid waste': 'Στερεά απόβλητα', 'Solution': 'Solution', 'Solution Details': 'Solution Details', 'Solution Item': 'Solution Item', 'Solution added': 'Solution added', 'Solution deleted': 'Solution deleted', 'Solution updated': 'Solution updated', 'Solutions': 'Solutions', 'Some': 'Some', 'Sorry - the server has a problem, please try again later.': 'Sorry - the server has a problem, please try again later.', 'Sorry that location appears to be outside the area of the Parent.': 'Sorry that location appears to be outside the area of the Parent.', 'Sorry that location appears to be outside the area supported by this deployment.': 'Sorry that location appears to be outside the area supported by this deployment.', 'Sorry, I could not understand your request': 'Sorry, I could not understand your request', 'Sorry, only users with the MapAdmin role are allowed to edit these locations': 'Sorry, only users with the MapAdmin role are allowed to edit these locations', 'Sorry, something went wrong.': 'Sorry, something went wrong.', 'Sorry, that page is forbidden for some reason.': 'Sorry, that page is forbidden for some reason.', 'Sorry, that service is temporary unavailable.': 'Sorry, that service is temporary unavailable.', 'Sorry, there are no addresses to display': 'Συγνώμη, Δεν υπάρχουν Διευθύνσεις για προβολή.', "Sorry, things didn't get done on time.": "Sorry, things didn't get done on time.", "Sorry, we couldn't find that page.": "Sorry, we couldn't find that page.", 'Source': 'Source', 'Source ID': 'ID Πηγής', 'Source Time': 'Source Time', 'Source Type': 'Source Type', 'Space Debris': 'Space Debris', 'Spanish': 'Spanish', 'Special Ice': 'Special Ice', 'Special Marine': 'Special Marine', 'Special needs': 'Ειδικές ανάγκες', 'Specialized Hospital': 'Specialized Hospital', 'Specific Area (e.g. Building/Room) within the Location that this Person/Group is seen.': 'Specific Area (e.g. Building/Room) within the Location that this Person/Group is seen.', 'Specific locations need to have a parent of level': 'Specific locations need to have a parent of level', 'Specify a descriptive title for the image.': 'Specify a descriptive title for the image.', 'Specify the bed type of this unit.': 'Specify the bed type of this unit.', 'Specify the minimum sustainability in weeks or days.': 'Specify the minimum sustainability in weeks or days.', 'Specify the number of available sets': 'Specify the number of available sets', 'Specify the number of available units (adult doses)': 'Specify the number of available units (adult doses)', 'Specify the number of available units (litres) of Ringer-Lactate or equivalent solutions': 'Specify the number of available units (litres) of Ringer-Lactate or equivalent solutions', 'Specify the number of sets needed per 24h': 'Specify the number of sets needed per 24h', 'Specify the number of units (adult doses) needed per 24h': 'Specify the number of units (adult doses) needed per 24h', 'Specify the number of units (litres) of Ringer-Lactate or equivalent solutions needed per 24h': 'Specify the number of units (litres) of Ringer-Lactate or equivalent solutions needed per 24h', 'Spherical Mercator?': 'Spherical Mercator?', 'Spreadsheet Importer': 'Spreadsheet Importer', 'Spreadsheet uploaded': 'Spreadsheet uploaded', 'Spring': 'Spring', 'Squall': 'Squall', 'Staff': 'Staff', 'Staff 2': 'Staff 2', 'Staff Details': 'Staff Details', 'Staff Type Details': 'Staff Type Details', 'Staff Type added': 'Staff Type added', 'Staff Type deleted': 'Τύπος προσωπικού διαγράφηκε', 'Staff Type updated': 'Staff Type updated', 'Staff Types': 'Staff Types', 'Staff added': 'Staff added', 'Staff deleted': 'Προσωπικό διαγράφηκε', 'Staff present and caring for residents': 'Staff present and caring for residents', 'Staff updated': 'Staff updated', 'Staffing': 'Staffing', 'Start date': 'Ημερομηνία Έναρξης', 'Start of Period': 'Start of Period', 'State': 'State', 'Stationery': 'Stationery', 'Status': 'Status', 'Status Report': 'Status Report', 'Status added': 'Status added', 'Status deleted': 'Status deleted', 'Status of clinical operation of the facility.': 'Status of clinical operation of the facility.', 'Status of general operation of the facility.': 'Status of general operation of the facility.', 'Status of morgue capacity.': 'Status of morgue capacity.', 'Status of operations of the emergency department of this hospital.': 'Επιχειρησιακή κατάσταση του τμημάτος επειγουσών περιστατικών του Νοσοκομείου', 'Status of security procedures/access restrictions in the hospital.': 'Status of security procedures/access restrictions in the hospital.', 'Status of the operating rooms of this hospital.': 'Status of the operating rooms of this hospital.', 'Status updated': 'Status updated', 'Storage Bin': 'Storage Bin', 'Storage Bin Details': 'Storage Bin Details', 'Storage Bin Number': 'Storage Bin Number', 'Storage Bin Type': 'Storage Bin Type', 'Storage Bin Type Details': 'Storage Bin Type Details', 'Storage Bin Type added': 'Storage Bin Type added', 'Storage Bin Type deleted': 'Storage Bin Type deleted', 'Storage Bin Type updated': 'Storage Bin Type updated', 'Storage Bin Types': 'Storage Bin Types', 'Storage Bin added': 'Storage Bin added', 'Storage Bin deleted': 'Storage Bin deleted', 'Storage Bin updated': 'Storage Bin updated', 'Storage Bins': 'Storage Bins', 'Storage Location': 'Storage Location', 'Storage Location Details': 'Storage Location Details', 'Storage Location ID': 'Storage Location ID', 'Storage Location Name': 'Storage Location Name', 'Storage Location added': 'Storage Location added', 'Storage Location deleted': 'Storage Location deleted', 'Storage Location updated': 'Storage Location updated', 'Storage Locations': 'Storage Locations', 'Store spreadsheets in the Eden database': 'Store spreadsheets in the Eden database', 'Storm Force Wind': 'Άνεμοι καταιγίδας', 'Storm Surge': 'Storm Surge', 'Stowaway': 'Stowaway', 'Street': 'Street', 'Street (continued)': 'Street (continued)', 'Street Address': 'Street Address', 'Strong Wind': 'Strong Wind', 'Sub Category': 'Sub Category', 'Sub-type': 'Sub-type', 'Subject': 'Subject', 'Submission successful - please wait': 'Submission successful - please wait', 'Submission successful - please wait...': 'Submission successful - please wait...', 'Submit': 'Submit', 'Subscription Details': 'Subscription Details', 'Subscription added': 'Subscription added', 'Subscription deleted': 'Subscription deleted', 'Subscription updated': 'Subscription updated', 'Subscriptions': 'Εγγραφές - Συνδρομές', 'Subsistence Cost': 'Κόστος επιχορήγησης', 'Sufficient care/assistance for chronically ill': 'Sufficient care/assistance for chronically ill', 'Suggest not changing this field unless you know what you are doing.': 'Μήν αλλάζετεαυτό το παδίο εκτός αν γνωρίζετε επακριβώς τι κάνετε.', 'Summary': 'Summary', 'Sunday': 'Sunday', 'Support Request': 'Αίτημα (αναζήτηση) υποστήριξης', 'Supports the decision making of large groups of Crisis Management Experts by helping the groups create ranked list.': 'Supports the decision making of large groups of Crisis Management Experts by helping the groups create ranked list.', 'Sure you want to delete this object?': 'Sure you want to delete this object?', 'Surgery': 'Surgery', 'Survey Answer': 'Survey Answer', 'Survey Answer Details': 'Survey Answer Details', 'Survey Answer added': 'Survey Answer added', 'Survey Answer deleted': 'Survey Answer deleted', 'Survey Answer updated': 'Survey Answer updated', 'Survey Module': 'Survey Module', 'Survey Name': 'Όνομα έρευνας', 'Survey Question': 'Survey Question', 'Survey Question Details': 'Survey Question Details', 'Survey Question Display Name': 'Survey Question Display Name', 'Survey Question added': 'Survey Question added', 'Survey Question deleted': 'Survey Question deleted', 'Survey Question updated': 'Survey Question updated', 'Survey Section': 'Survey Section', 'Survey Section Details': 'Survey Section Details', 'Survey Section Display Name': 'Survey Section Display Name', 'Survey Section added': 'Survey Section added', 'Survey Section deleted': 'Survey Section deleted', 'Survey Section updated': 'Έρευνα Τμήματος ενημερώθηκε', 'Survey Series': 'Survey Series', 'Survey Series Details': 'Survey Series Details', 'Survey Series Name': 'Survey Series Name', 'Survey Series added': 'Survey Series added', 'Survey Series deleted': 'Survey Series deleted', 'Survey Series updated': 'Σειρά Ερευνών ανανεώθηκε', 'Survey Template': 'Survey Template', 'Survey Template Details': 'Survey Template Details', 'Survey Template added': 'Προστέθηκε πρότυπο έρευνας καταγραφής', 'Survey Template deleted': 'Survey Template deleted', 'Survey Template updated': 'Survey Template updated', 'Survey Templates': 'Survey Templates', 'Switch this on to use individual CSS/Javascript files for diagnostics during development.': 'Switch this on to use individual CSS/Javascript files for diagnostics during development.', 'Symbology': 'Symbology', 'Sync Conflicts': 'Sync Conflicts', 'Sync History': 'Sync History', 'Sync Now': 'Συγχρονίστε τώρα.', 'Sync Partners': 'Sync Partners', 'Sync Partners are instances or peers (SahanaEden, SahanaAgasti, Ushahidi, etc.) that you want to sync information with. Click on the link on the right to go the page where you can add sync partners, search for sync partners and modify them.': 'Οι συγχρονιζόμενοι συνεργάτες είναι στιγμιότυπα ή peers (SahanaEden, SahanaAgasti, Ushahidi, etc.) με τους οποίους θέλεις να συγχρονίσεις πληροφορίες. Πατήστε στο σύνδεσμο στα δεξιά για να πάτε στη σελίδα όπου μπορείτε να προσθέσετε συγχρονιζόμενους συνεργάτες, να αναζητήσετε συγχρονιζόμενους συνεργάτες και να τους τροποποιήσετε.', 'Sync Pools': 'Sync Pools', 'Sync Schedule': 'Sync Schedule', 'Sync Settings': 'Sync Settings', 'Sync process already started on ': 'Sync process already started on ', 'Synchronisation': 'Synchronisation', 'Synchronization': 'Synchronization', 'Synchronization Conflicts': 'Synchronization Conflicts', 'Synchronization Details': 'Synchronization Details', 'Synchronization History': 'Ιστορικό Συγχρονισμού', 'Synchronization Peers': 'Synchronization Peers', 'Synchronization Settings': 'Synchronization Settings', 'Synchronization allows you to share data that you have with others and update your own database with latest data from other peers. This page provides you with information about how to use the synchronization features of Sahana Eden': 'Synchronization allows you to share data that you have with others and update your own database with latest data from other peers. This page provides you with information about how to use the synchronization features of Sahana Eden', 'Synchronization not configured.': 'Synchronization not configured.', 'Synchronization settings updated': 'Synchronization settings updated', 'Syncronisation History': 'Ιστορικό συγχρονισμού', 'System allows the General Public to Report Incidents & have these Tracked.': 'System allows the General Public to Report Incidents & have these Tracked.', 'System allows the tracking & discovery of Items stored in Locations.': 'System allows the tracking & discovery of Items stored in Locations.', 'System is a central online repository where all relief organizations, relief workers, government agents and camp sites for displaced personnel can coordinate the supply of aid with their demand. It allows users to allocate the available resources to fulfill the demands effectively and efficiently.': 'System is a central online repository where all relief organizations, relief workers, government agents and camp sites for displaced personnel can coordinate the supply of aid with their demand. It allows users to allocate the available resources to fulfill the demands effectively and efficiently.', 'System keeps track of all Volunteers working in the disaster region. It captures not only the places where they are active, but also captures information on the range of services they are providing in each area.': 'Η εφαρμογή παρακολουθεί τους εθελοντές που επιχειρούν στα πεδία των συμβάντων. Εκτός από τις περιοχές των ενεργών συμβάντων καταγράφεται το φάσμα και το είδος των παρερχόμενων υπηρεσιών σε κάθε περιοχή', "System's Twitter account updated": 'Λογαριασμός Twitter του Συστήματος ενημέρωθηκε', 'Table name': 'Table name', 'Tags': 'Tags', 'Take shelter in place or per <instruction>': 'Take shelter in place or per <instruction>', 'Task Details': 'Task Details', 'Task List': 'Task List', 'Task Status': 'Task Status', 'Task added': 'Καθήκον προστέθηκε', 'Task deleted': 'Εργασία Διαγράφηκε', 'Task status': 'Task status', 'Task updated': 'Η εργασία ενημερώθηκε', 'Tasks': 'Tasks', 'Team': 'Team', 'Team Description': 'Team Description', 'Team Details': 'Team Details', 'Team Head': 'Επικεφαλής Ομάδος', 'Team Id': 'Team Id', 'Team Leader': 'Αρχηγός ομάδος', 'Team Member added': 'Team Member added', 'Team Members': 'Team Members', 'Team Name': 'Team Name', 'Team Type': 'Τύπος Ομάδας', 'Team added': 'Team added', 'Team deleted': 'Team deleted', 'Team updated': 'Team updated', 'Teams': 'Teams', 'Technical testing only, all recipients disregard': 'Technical testing only, all recipients disregard', 'Telecommunications': 'Telecommunications', 'Telephone': 'Telephone', 'Telephony': 'Τηλεφωνία', 'Temp folder %s not writable - unable to apply theme!': 'Temp folder %s not writable - unable to apply theme!', 'Template file %s not readable - unable to apply theme!': 'Template file %s not readable - unable to apply theme!', 'Templates': 'Templates', 'Terrorism': 'Terrorism', 'Tertiary Server (Optional)': 'Tertiary Server (Optional)', 'Test Results': 'Test Results', 'Text': 'Κείμενο', 'Text Colour for Text blocks': 'Text Colour for Text blocks', 'Text before each Text Field (One per line)': 'Text before each Text Field (One per line)', 'Text in Message': 'Text in Message', 'Text in Message: ': 'Text in Message: ', 'Thanks for your assistance': 'Thanks for your assistance', 'The': 'The', 'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1 == db.table2.field2" results in a SQL JOIN.': 'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1 == db.table2.field2" results in a SQL JOIN.', 'The Area which this Site is located within.': 'Η ευρήτερη περιοχή όπου η συγκεκριμένη θέση βρίσκεται.', 'The Assessments module allows field workers to send in assessments.': 'The Assessments module allows field workers to send in assessments.', 'The Author of this Document (optional)': 'The Author of this Document (optional)', 'The Current Location of the Person, which can be general (for Reporting) or precise (for displaying on a Map). Enter a few characters to search from available locations.': 'The Current Location of the Person, which can be general (for Reporting) or precise (for displaying on a Map). Enter a few characters to search from available locations.', 'The Current Location of the Person/Group, which can be general (for Reporting) or precise (for displaying on a Map). Enter a few characters to search from available locations.': 'The Current Location of the Person/Group, which can be general (for Reporting) or precise (for displaying on a Map). Enter a few characters to search from available locations.', "The Donor(s) for this project. Multiple values can be selected by holding down the 'Control' key.": "The Donor(s) for this project. Multiple values can be selected by holding down the 'Control' key.", 'The Group whose members can edit data in this record.': 'The Group whose members can edit data in this record.', 'The Incident Reporting System allows the General Public to Report Incidents & have these Tracked.': 'The Incident Reporting System allows the General Public to Report Incidents & have these Tracked.', 'The Location of this Site, which can be general (for Reporting) or precise (for displaying on a Map).': 'The Location of this Site, which can be general (for Reporting) or precise (for displaying on a Map).', 'The Location the Person has come from, which can be general (for Reporting) or precise (for displaying on a Map). Enter a few characters to search from available locations.': 'The Location the Person has come from, which can be general (for Reporting) or precise (for displaying on a Map). Enter a few characters to search from available locations.', 'The Location the Person is going to, which can be general (for Reporting) or precise (for displaying on a Map). Enter a few characters to search from available locations.': 'The Location the Person is going to, which can be general (for Reporting) or precise (for displaying on a Map). Enter a few characters to search from available locations.', 'The Media Library provides a catalogue of digital media.': 'The Media Library provides a catalogue of digital media.', 'The Messaging Module is the main communications hub of the Sahana system. It is used to send alerts and/or messages using SMS & Email to various groups and individuals before, during and after a disaster.': 'The Messaging Module is the main communications hub of the Sahana system. It is used to send alerts and/or messages using SMS & Email to various groups and individuals before, during and after a disaster.', 'The Office this record is associated with.': 'The Office this record is associated with.', 'The Organization Registry keeps track of all the relief organizations working in the disaster region. It captures not only the places where they are active, but also captures information on the range of projects they are providing in each area.': 'The Organization Registry keeps track of all the relief organizations working in the disaster region. It captures not only the places where they are active, but also captures information on the range of projects they are providing in each area.', 'The Organization this record is associated with.': 'Ο οργανισμός με τον οποίο αυτή ή εγγραφή είναι συσχετισμένη', 'The Organization which is funding this Activity.': 'The Organization which is funding this Activity.', 'The Project Tracking module allows the creation of Activities to meet Gaps in Needs Assessments.': 'The Project Tracking module allows the creation of Activities to meet Gaps in Needs Assessments.', 'The Request this record is associated with.': 'The Request this record is associated with.', 'The Role this person plays within this Office/Project.': 'The Role this person plays within this Office/Project.', 'The Role this person plays within this hospital.': 'The Role this person plays within this hospital.', 'The Shelter Registry tracks all shelters and stores basic details regarding them. It collaborates with other modules to track people associated with a shelter, the services available etc.': 'The Shelter Registry tracks all shelters and stores basic details regarding them. It collaborates with other modules to track people associated with a shelter, the services available etc.', 'The Shelter this Request is from (optional).': 'The Shelter this Request is from (optional).', 'The URL for the GetCapabilities of a WMS Service whose layers you want accessible via the Map.': 'The URL for the GetCapabilities of a WMS Service whose layers you want accessible via the Map.', "The URL of the image file. If you don't upload an image file, then you must specify its location here.": "The URL of the image file. If you don't upload an image file, then you must specify its location here.", 'The URL of your web gateway without the post parameters': 'The URL of your web gateway without the post parameters', 'The URL to access the service.': 'The URL to access the service.', 'The Unique Identifier (UUID) as assigned to this facility by the government.': 'The Unique Identifier (UUID) as assigned to this facility by the government.', 'The attribute within the KML which is used for the title of popups.': 'The attribute within the KML which is used for the title of popups.', 'The attribute(s) within the KML which are used for the body of popups. (Use a space between attributes)': 'The attribute(s) within the KML which are used for the body of popups. (Use a space between attributes)', 'The body height (crown to heel) in cm.': 'Ύψος Σώματος σε εκατοστά', 'The category of the Item.': 'The category of the Item.', 'The contact person for this organization.': 'Άτομο για επικοινωνία για αυτόν τον οργανισμό', 'The country the person usually lives in.': 'The country the person usually lives in.', 'The duplicate record will be deleted': 'The duplicate record will be deleted', 'The entered unit links to this unit. For e.g. if you are entering m for meter then choose kilometer(if it exists) and enter the value 0.001 as multiplicator.': 'The entered unit links to this unit. For e.g. if you are entering m for meter then choose kilometer(if it exists) and enter the value 0.001 as multiplicator.', 'The first or only name of the person (mandatory).': 'The first or only name of the person (mandatory).', 'The hospital this record is associated with.': 'Το νοσοκομείο με το οποίο αυτή η εγγραφή είναι συσχετιμσένη', 'The item is designated to be sent for specific project, population, village or other earmarking of the donation such as a Grant Code.': 'The item is designated to be sent for specific project, population, village or other earmarking of the donation such as a Grant Code.', 'The language to use for notifications.': 'The language to use for notifications.', 'The last known location of the missing person before disappearance.': 'The last known location of the missing person before disappearance.', 'The list of Item categories are maintained by the Administrators.': 'Αυτός ο κατάλογος κατηγοριών αντικειμένων διατηρείται από τους διαχειριστές', 'The name to be used when calling for or directly addressing the person (optional).': 'The name to be used when calling for or directly addressing the person (optional).', 'The next screen will allow you to detail the number of people here & their needs.': 'The next screen will allow you to detail the number of people here & their needs.', 'The next screen will allow you to enter a detailed list of items and quantities, if appropriate...': 'The next screen will allow you to enter a detailed list of items and quantities, if appropriate...', 'The number of tiles around the visible map to download. Zero means that the 1st page loads faster, higher numbers mean subsequent panning is faster.': 'Ο αριθμός των αρχείων κοντά στον εμφανιζόμενο χάρτη για μεταφόρτωση. Το μεδέν σημαίνει ότι η πρώτη σελίδα φορτώνεται γρηγορότερα, οι μεγαλύτεροι αριθμοί σημαίνουν ότι η παραπέρα μετακίνηση του χάρτη είναι γρηγορότερη.', 'The person at the location who is reporting this incident (optional)': 'The person at the location who is reporting this incident (optional)', 'The person reporting about the missing person.': 'The person reporting about the missing person.', 'The person reporting the missing person.': 'The person reporting the missing person.', "The person's manager within this Office/Project.": 'Ο διευτθυντής του ατόμου στο γραφείο του (ή σε έργο του)', 'The post variable containing the phone number': 'The post variable containing the phone number', 'The post variable on the URL used for sending messages': 'Η μεταβλητή "post" στο URL που χρησιμοποιείται για την αποστολή μηνυμάτων', 'The post variables other than the ones containing the message and the phone number': 'The post variables other than the ones containing the message and the phone number', 'The serial port at which the modem is connected - /dev/ttyUSB0, etc on linux and com1, com2, etc on Windows': 'The serial port at which the modem is connected - /dev/ttyUSB0, etc on linux and com1, com2, etc on Windows', 'The server did not receive a timely response from another server that it was accessing to fill the request by the browser.': 'The server did not receive a timely response from another server that it was accessing to fill the request by the browser.', 'The server received an incorrect response from another server that it was accessing to fill the request by the browser.': 'The server received an incorrect response from another server that it was accessing to fill the request by the browser.', 'The simple policy allows anonymous users to Read & registered users to Edit. The full security policy allows the administrator to set permissions on individual tables or records - see models/zzz.py.': 'Η "απλή" (simple) πολιτική επιτρέπει σε ανώνυμους χρήστες να διαβάζουν και σε εγγεγραμμένους χρήστες να επεξεργάζονται. Η πολιτική πλήρους ασφαλείας επιτρέπει στο διαχειριστή να θέτει "αρμοδιότητες" (permissions) σε συγκεκριμένους πίνακες ή εγγραφές - δείτε models/zzz.py', 'The subject event no longer poses a threat or concern and any follow on action is described in <instruction>': 'The subject event no longer poses a threat or concern and any follow on action is described in <instruction>', 'The title of the WMS Browser panel in the Tools panel.': 'The title of the WMS Browser panel in the Tools panel.', 'The token associated with this application on': 'The token associated with this application on', 'The unique identifier which identifies this instance to other instances.': 'The unique identifier which identifies this instance to other instances.', 'The weight in kg.': 'The weight in kg.', 'Theme': 'Θέμα', 'Theme Details': 'Theme Details', 'Theme added': 'Theme added', 'Theme deleted': 'Theme deleted', 'Theme updated': 'Theme updated', 'Themes': 'Themes', 'There are errors': 'There are errors', 'There are multiple records at this location': 'There are multiple records at this location', 'There are not sufficient items in the store to send this shipment': 'There are not sufficient items in the store to send this shipment', 'There was a problem, sorry, please try again later.': 'There was a problem, sorry, please try again later.', 'These are settings for Inbound Mail.': 'Αυτές είναι οι ρυθμίσεις για εισερχόμενη αλληογγραφία (Mail)', 'These are the Incident Categories visible to normal End-Users': 'These are the Incident Categories visible to normal End-Users', 'These are the default settings for all users. To change settings just for you, click ': 'These are the default settings for all users. To change settings just for you, click ', 'They': 'Αυτοί', 'This appears to be a duplicate of ': 'This appears to be a duplicate of ', 'This file already exists on the server as': 'This file already exists on the server as', 'This form allows the administrator to remove a duplicate location.': 'Αυτή η φόρμα επιτρέπει στο διαχειριστή να διαγράψει διπλή τοποθεσία.', 'This is the way to transfer data between machines as it maintains referential integrity.': 'Αυτός είναι ο τρόπος για τη μεταφορά δεδομένων μεταξύ υπολογσιτών, καθώς διατηρεί τη σχεσιακή ακαιρεαιότητα των δεδομένων', 'This is the way to transfer data between machines as it maintains referential integrity...duplicate data should be removed manually 1st!': 'This is the way to transfer data between machines as it maintains referential integrity...duplicate data should be removed manually 1st!', 'This might be due to a temporary overloading or maintenance of the server.': 'This might be due to a temporary overloading or maintenance of the server.', 'This page shows you logs of past syncs. Click on the link below to go to this page.': 'This page shows you logs of past syncs. Click on the link below to go to this page.', 'This screen allows you to upload a collection of photos to the server.': 'Αυτή η οθόνη σου επιτρέπει να μεταφορτώσεις μία συλλογή φωτογραφιών στο server.', 'Thunderstorm': 'Thunderstorm', 'Thursday': 'Thursday', 'Ticket': 'Ticket', 'Ticket Details': 'Λεπτομέρειες "εισητηρίου"', 'Ticket added': 'Ticket added', 'Ticket deleted': 'To εισητήριο διαγράφηκε', 'Ticket updated': 'Ticket updated', 'Ticketing Module': 'Ticketing Module', 'Tickets': 'Tickets', 'Time needed to collect water': 'Time needed to collect water', 'Time of Request': 'Time of Request', 'Timestamp': 'Timestamp', 'Title': 'Title', 'To Location': 'To Location', 'To begin the sync process, click the button on the right => ': 'Για να ξεκινήσετε τη διαδικασία συγχρονισμού πατήστε το κουμπί στα δεξιά =>', 'To begin the sync process, click this button => ': 'To begin the sync process, click this button => ', 'To delete': 'To delete', 'To edit OpenStreetMap, you need to edit the OpenStreetMap settings in models/000_config.py': 'Για να κάνετε αλλαγές στο OpenStreetMap, πρέπει να επεξεργαστείτε το OpenStreetMap settings στο models/000_config.py', "To search for a body, enter the ID label of the body. You may use % as wildcard. Press 'Search' without input to list all bodies.": "To search for a body, enter the ID label of the body. You may use % as wildcard. Press 'Search' without input to list all bodies.", "To search for a body, enter the ID tag number of the body. You may use % as wildcard. Press 'Search' without input to list all bodies.": "To search for a body, enter the ID tag number of the body. You may use % as wildcard. Press 'Search' without input to list all bodies.", "To search for a hospital, enter any of the names or IDs of the hospital, separated by spaces. You may use % as wildcard. Press 'Search' without input to list all hospitals.": "To search for a hospital, enter any of the names or IDs of the hospital, separated by spaces. You may use % as wildcard. Press 'Search' without input to list all hospitals.", "To search for a hospital, enter any part of the name or ID. You may use % as wildcard. Press 'Search' without input to list all hospitals.": "To search for a hospital, enter any part of the name or ID. You may use % as wildcard. Press 'Search' without input to list all hospitals.", "To search for a location, enter the name. You may use % as wildcard. Press 'Search' without input to list all locations.": "To search for a location, enter the name. You may use % as wildcard. Press 'Search' without input to list all locations.", "To search for a person, enter any of the first, middle or last names and/or an ID number of a person, separated by spaces. You may use % as wildcard. Press 'Search' without input to list all persons.": 'Για την αναζήτηση ενός ατόμου, εισάγετε οποιοδήποτε από τα, όνομα, επίθετο, δεύτερο όνομα, αριθμό ταυτότητας, χωρισμένα με κενά. Μπορείτε να χρησιμοποιήσετε παραμέτρους αναζήτησης όπως το %. Πατήστε "Αναζήτηση" χωρίς καμία εισαγωγή για να δείτε όλες τις εγγραφές. ', "To search for a request, enter some of the text that you are looking for. You may use % as wildcard. Press 'Search' without input to list all requests.": "To search for a request, enter some of the text that you are looking for. You may use % as wildcard. Press 'Search' without input to list all requests.", 'To submit a new job, use the': 'To submit a new job, use the', 'To variable': 'Σε μεταβλητή', 'Tools': 'Tools', 'Tornado': 'Σίφουνας', 'Total # of Target Beneficiaries': 'Συνολικός αριθμός στοχευμένων δικαιούχων', 'Total # of households of site visited': 'Total # of households of site visited', 'Total Beds': 'Total Beds', 'Total Beneficiaries': 'Total Beneficiaries', 'Total Cost per Megabyte': 'Total Cost per Megabyte', 'Total Cost per Minute': 'Total Cost per Minute', 'Total Monthly': 'Total Monthly', 'Total Monthly Cost': 'Total Monthly Cost', 'Total Monthly Cost: ': 'Total Monthly Cost: ', 'Total One-time Costs': 'Total One-time Costs', 'Total Persons': 'Total Persons', 'Total Recurring Costs': 'Συνολικά κόστη Στρατολόγησης', 'Total Unit Cost': 'Total Unit Cost', 'Total Unit Cost: ': 'Total Unit Cost: ', 'Total Units': 'Total Units', 'Total number of beds in this hospital. Automatically updated from daily reports.': 'Total number of beds in this hospital. Automatically updated from daily reports.', 'Total number of houses in the area': 'Συνολικός αριθμός κατοικιών στη περιοχή', 'Total number of schools in affected area': 'Total number of schools in affected area', 'Total population of site visited': 'Total population of site visited', 'Totals for Budget:': 'Totals for Budget:', 'Totals for Bundle:': 'Totals for Bundle:', 'Totals for Kit:': 'Totals for Kit:', 'Tourist Group': 'Tourist Group', 'Town': 'Town', 'Traces internally displaced people (IDPs) and their needs': 'Traces internally displaced people (IDPs) and their needs', 'Tracing': 'Tracing', 'Track': 'Φορτηγό', 'Track Details': 'Track Details', 'Track deleted': 'Track deleted', 'Track updated': 'Track updated', 'Track uploaded': 'Διαδρομή μεταφορτώθηκε', 'Tracking of Projects, Activities and Tasks': 'Tracking of Projects, Activities and Tasks', 'Tracking of basic information on the location, facilities and size of the Shelters': 'Tracking of basic information on the location, facilities and size of the Shelters', 'Tracks': 'Tracks', 'Tracks requests for aid and matches them against donors who have pledged aid': 'Tracks requests for aid and matches them against donors who have pledged aid', 'Tracks the location, distibution, capacity and breakdown of victims in Shelters': 'Tracks the location, distibution, capacity and breakdown of victims in Shelters', 'Traffic Report': 'Traffic Report', 'Transfer': 'Transfer', 'Transit': 'Transit', 'Transition Effect': 'Transition Effect', 'Transparent?': 'Transparent?', 'Transportation assistance, Rank': 'Transportation assistance, Rank', 'Trauma Center': 'Trauma Center', 'Travel Cost': 'Travel Cost', 'Tree': 'Tree', 'Tropical Storm': 'Τροπική καταιγίδα', 'Tropo Messaging Token': 'Tropo Messaging Token', 'Tropo Settings': 'Tropo Settings', 'Tropo Voice Token': 'Tropo Voice Token', 'Tropo settings updated': 'Αναθεωρήθηκαν οι ρυθμίσεις Καιρικών Συνθηκών', 'Truck': 'Φορτηγό', 'Try checking the URL for errors, maybe it was mistyped.': 'Ελέγξτε την διεύθυνση URL για τυπογραφικά σφάλματα', 'Try hitting refresh/reload button or trying the URL from the address bar again.': 'Προσπάθηστε να κάνετε ανανέωση (refresh) ή ψάξτε την URL από το παράθυροτης διεύθυνσης ξανά', 'Try refreshing the page or hitting the back button on your browser.': 'Try refreshing the page or hitting the back button on your browser.', 'Tsunami': 'Tsunami', 'Tuesday': 'Tuesday', 'Twitter': 'Twitter', 'Twitter ID or #hashtag': 'Twitter ID or #hashtag', 'Twitter Settings': 'Twitter Settings', 'Type': 'Type', 'Type of cause': 'Type of cause', 'Type of latrines': 'Type of latrines', 'Type of place for defecation': 'Type of place for defecation', 'Type of water source before the disaster': 'Type of water source before the disaster', 'Types of health services available': 'Types of health services available', 'Types of water storage containers available': 'Types of water storage containers available', 'UID': 'UID', 'URL': 'URL', 'UTC Offset': 'απόκλιση ώρας από την UTC', 'Unable to parse CSV file!': 'Αδύνατο να επεξεργαστώ το αρχείο CSV', 'Understaffed': 'Ανεπαρκώς στελεχωμένη', 'Unidentified': 'Unidentified', 'Unit': 'Unit', 'Unit Bed Capacity': 'Unit Bed Capacity', 'Unit Cost': 'Unit Cost', 'Unit Details': 'Λεπτομέρειες Μονάδος', 'Unit Name': 'Unit Name', 'Unit Set': 'Unit Set', 'Unit Short Code for e.g. m for meter.': 'Unit Short Code for e.g. m for meter.', 'Unit added': 'Μονάδα προστέθηκε', 'Unit deleted': 'Unit deleted', 'Unit updated': 'Unit updated', 'Units': 'Units', 'Units of Measure': 'Units of Measure', 'Unknown': 'Unknown', 'Unknown Peer': 'Unknown Peer', 'Unknown type of facility': 'Unknown type of facility', 'Unresolved Conflicts': 'Unresolved Conflicts', 'Unselect to disable the modem': 'Unselect to disable the modem', 'Unsent': 'Unsent', 'Unsupported data format!': 'Unsupported data format!', 'Unsupported method!': 'Unsupported method!', 'Update': 'Update', 'Update Activity Report': 'Update Activity Report', 'Update Cholera Treatment Capability Information': 'Update Cholera Treatment Capability Information', 'Update Import Job': 'Update Import Job', 'Update Request': 'Update Request', 'Update Service Profile': 'Ανανέωση προφίλ υπηρεσιών', 'Update Task Status': 'Update Task Status', 'Update Unit': 'Update Unit', 'Update if Master': 'Αναθεώρηση εφόσον είστε κύριος', 'Update if Newer': 'Ανανέωση εαν υπάρχει καινουργιο', 'Update your current ordered list': 'Update your current ordered list', 'Upload': 'Upload', 'Upload Photos': 'Upload Photos', 'Upload Spreadsheet': 'Upload Spreadsheet', 'Upload Track': 'Upload Track', 'Upload a Spreadsheet': 'Μεταφόρτωση Λογιστικού Φύλλου', "Upload an image file here. If you don't upload an image file, then you must specify its location in the URL field.": "Upload an image file here. If you don't upload an image file, then you must specify its location in the URL field.", 'Urban Fire': 'Urban Fire', 'Urban area': 'Urban area', 'Urdu': 'Urdu', 'Urgent': 'Urgent', 'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.': 'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.', 'Use default': 'Use default', 'Use these links to download data that is currently in the database.': 'Χρησιμοποιήστε τους συνδέσμους για να μεταφορτώσετε τρέχοντα δεδομένα που βρίσκονται στη βάση.', 'Use this space to add a description about the Bin Type.': 'Χρησιμοποίησε το χώρο αυτό για να περιγράψεις τον τύπο καλαθιού (bin)', 'Use this space to add a description about the site location.': 'Use this space to add a description about the site location.', 'Use this space to add a description about the warehouse/site.': 'Χρησιμοποίησε αυτό το χώρο για να προσθέσεις μία περιγραφή για τις αποθήκες / περιοχή', 'Use this space to add additional comments and notes about the Site/Warehouse.': 'Use this space to add additional comments and notes about the Site/Warehouse.', 'Used to import data from spreadsheets into the database': 'Used to import data from spreadsheets into the database', 'User': 'User', 'User %(id)s Logged-in': 'User %(id)s Logged-in', 'User %(id)s Registered': 'User %(id)s Registered', 'User Details': 'User Details', 'User ID': 'User ID', 'User Management': 'User Management', 'User Profile': 'User Profile', 'User Requests': 'User Requests', 'User Updated': 'User Updated', 'User added': 'User added', 'User already has this role': 'User already has this role', 'User deleted': 'Χρήστης Διαγράφηκε', 'User updated': 'User updated', 'Username': 'Username', 'Users': 'Users', 'Users removed': 'Οι χρήστες αφαιρέθηκαν ', 'Ushahidi': 'Ushahidi', 'Usual food sources in the area': 'Usual food sources in the area', 'Utility, telecommunication, other non-transport infrastructure': 'Utility, telecommunication, other non-transport infrastructure', 'Various Reporting functionalities': 'Various Reporting functionalities', 'Vehicle': 'Vehicle', 'Vehicle Crime': 'Εγκληματικότητα σχετική με το όχημα', 'Vehicle Types': 'Τύποι οχημάτων ', 'Vendor': 'Vendor', 'Verified?': 'Επιβεβαιώθηκε;', 'Verify Password': 'Verify Password', 'Verify password': 'Verify password', 'Version': 'Έκδοση', 'Very High': 'Very High', 'View Alerts received using either Email or SMS': 'Δείτε συναγερμούς που ελήφθησαν είτε με email ή με SMS.', 'View Fullscreen Map': 'View Fullscreen Map', 'View Image': 'View Image', 'View On Map': 'View On Map', 'View Outbox': 'View Outbox', 'View Requests for Aid': 'Δείτε αιτήματα για βοήθεια', 'View Settings': 'View Settings', 'View Tickets': 'View Tickets', "View and/or update details of the person's record": "View and/or update details of the person's record", 'View and/or update their details': 'View and/or update their details', 'View or update the status of a hospital.': 'View or update the status of a hospital.', 'View pending requests and pledge support.': 'View pending requests and pledge support.', 'View the hospitals on a map.': 'View the hospitals on a map.', "View/Edit the Database directly (caution: doesn't respect the framework rules!)": "View/Edit the Database directly (caution: doesn't respect the framework rules!)", 'Village': 'Village', 'Village Leader': 'Village Leader', 'Visible?': 'Visible?', 'Visual Recognition': 'Visual Recognition', 'Volcanic Ash Cloud': 'Σύννεφο ηφαιστειακής τέφρας', 'Volcanic Event': 'Volcanic Event', 'Volume - Fluids': 'Volume - Fluids', 'Volume - Solids': 'Volume - Solids', 'Volume Capacity': 'Volume Capacity', 'Volume/Dimensions': 'Volume/Dimensions', 'Volunteer Data': 'Volunteer Data', 'Volunteer Details': 'Volunteer Details', 'Volunteer Management': 'Volunteer Management', 'Volunteer Project': 'Έργο Εθελοντών', 'Volunteer Registration': 'Εγγραφή εθελοντή', 'Volunteer Registrations': 'Volunteer Registrations', 'Volunteer Request': 'Volunteer Request', 'Volunteer added': 'Volunteer added', 'Volunteer deleted': 'Volunteer deleted', 'Volunteer details updated': 'Volunteer details updated', 'Volunteer registration added': 'Volunteer registration added', 'Volunteer registration deleted': 'Volunteer registration deleted', 'Volunteer registration updated': 'Volunteer registration updated', 'Volunteers': 'Volunteers', 'Volunteers were notified!': 'Volunteers were notified!', 'Vote': 'Vote', 'Votes': 'Votes', 'WASH': 'WASH', 'WMS Browser Name': 'WMS Browser Name', 'WMS Browser URL': 'WMS Browser URL', 'Walking Only': 'Walking Only', 'Walking time to the health service': 'Walking time to the health service', 'Warehouse': 'Warehouse', 'Warehouse Details': 'Warehouse Details', 'Warehouse Item': 'Warehouse Item', 'Warehouse Item Details': 'Warehouse Item Details', 'Warehouse Item added': 'Warehouse Item added', 'Warehouse Item deleted': 'Warehouse Item deleted', 'Warehouse Item updated': 'Warehouse Item updated', 'Warehouse Items': 'Warehouse Items', 'Warehouse Management': 'Διαχείριση Αποθήκης', 'Warehouse added': 'Warehouse added', 'Warehouse deleted': 'Warehouse deleted', 'Warehouse updated': 'Warehouse updated', 'Warehouse/Sites Registry': 'Warehouse/Sites Registry', 'Warehouses': 'Warehouses', 'WatSan': 'WatSan', 'Water Sanitation Hygiene': 'Water Sanitation Hygiene', 'Water gallon': 'Νερό γαλόνι', 'Water storage containers available for HH': 'Water storage containers available for HH', 'Water storage containers sufficient per HH': 'Water storage containers sufficient per HH', 'Water supply': 'Water supply', 'Waterspout': 'Waterspout', 'Way Bill(s)': 'Way Bill(s)', 'We have tried': 'We have tried', 'Website': 'Ιστοχώρος', 'Wednesday': 'Wednesday', 'Weight': 'Weight', 'Weight (kg)': 'Βάρος (Χλμ)', 'Welcome to the Sahana Portal at ': 'Welcome to the Sahana Portal at ', 'Well-Known Text': 'Well-Known Text', 'Were basic medical supplies available for health services prior to the disaster?': 'Were basic medical supplies available for health services prior to the disaster?', 'Were breast milk substitutes used prior to the disaster?': 'Were breast milk substitutes used prior to the disaster?', 'Were there cases of malnutrition in this area prior to the disaster?': 'Were there cases of malnutrition in this area prior to the disaster?', 'Were there health services functioning for the community prior to the disaster?': 'Were there health services functioning for the community prior to the disaster?', 'Were there reports or evidence of outbreaks of any micronutrient malnutrition disorders before the emergency?': 'Were there reports or evidence of outbreaks of any micronutrient malnutrition disorders before the emergency?', 'What are the factors affecting school attendance?': 'What are the factors affecting school attendance?', "What are the people's normal ways to obtain food in this area?": "What are the people's normal ways to obtain food in this area?", 'What are your main sources of cash to restart your business?': 'What are your main sources of cash to restart your business?', 'What are your main sources of income now?': 'What are your main sources of income now?', 'What do you spend most of your income on now?': 'What do you spend most of your income on now?', 'What food stocks exist? (main dishes)': 'What food stocks exist? (main dishes)', 'What food stocks exist? (side dishes)': 'What food stocks exist? (side dishes)', 'What is the estimated total number of people in all of these institutions?': 'What is the estimated total number of people in all of these institutions?', 'What is your major source of clean water for daily use (ex: washing, cooking, bathing)?': 'What is your major source of clean water for daily use (ex: washing, cooking, bathing)?', 'What is your major source of drinking water?': 'What is your major source of drinking water?', "What should be done to reduce women and children's vulnerability to violence?": "What should be done to reduce women and children's vulnerability to violence?", 'What type of latrines are available in the village/IDP centre/Camp?': 'What type of latrines are available in the village/IDP centre/Camp?', 'What type of salvage material can be used from destroyed houses?': 'What type of salvage material can be used from destroyed houses?', 'What type of salvage material can be used from destroyed schools?': 'What type of salvage material can be used from destroyed schools?', 'What types of health problems do children currently have?': 'What types of health problems do children currently have?', 'What types of health problems do people currently have?': 'What types of health problems do people currently have?', 'What types of health services are still functioning in the affected area?': 'What types of health services are still functioning in the affected area?', 'What types of household water storage containers are available?': 'What types of household water storage containers are available?', 'What were your main sources of income before the disaster?': 'What were your main sources of income before the disaster?', 'Wheat': 'Wheat', 'When a map is displayed that focuses on a collection of points, the map is zoomed to show just the region bounding the points. This value adds a small mount of distance outside the points. Without this, the outermost points would be on the bounding box, and might not be visible.': 'When a map is displayed that focuses on a collection of points, the map is zoomed to show just the region bounding the points. This value adds a small mount of distance outside the points. Without this, the outermost points would be on the bounding box, and might not be visible.', 'When a map is displayed that focuses on a collection of points, the map is zoomed to show just the region bounding the points. This value gives a minimum width and height in degrees for the region shown. Without this, a map showing a single point would not show any extent around that point. After the map is displayed, it can be zoomed as desired.': 'When a map is displayed that focuses on a collection of points, the map is zoomed to show just the region bounding the points. This value gives a minimum width and height in degrees for the region shown. Without this, a map showing a single point would not show any extent around that point. After the map is displayed, it can be zoomed as desired.', "When syncing data with others, conflicts happen in cases when two (or more) parties want to sync information which both of them have modified, i.e. conflicting information. Sync module tries to resolve such conflicts automatically but in some cases it can't. In those cases, it is up to you to resolve those conflicts manually, click on the link on the right to go to this page.": "When syncing data with others, conflicts happen in cases when two (or more) parties want to sync information which both of them have modified, i.e. conflicting information. Sync module tries to resolve such conflicts automatically but in some cases it can't. In those cases, it is up to you to resolve those conflicts manually, click on the link on the right to go to this page.", 'Where are the alternative places for studying?': 'Where are the alternative places for studying?', 'Where are the separated children originally from?': 'Where are the separated children originally from?', 'Where do the majority of people defecate?': 'Where do the majority of people defecate?', 'Where have the children been sent?': 'Where have the children been sent?', 'Where is solid waste disposed in the village/camp?': 'Where is solid waste disposed in the village/camp?', 'Whiskers': 'Whiskers', 'Who is doing what and where': 'Who is doing what and where', 'Who usually collects water for the family?': 'Ποιός συνήθως συγκεντρώνει νερό για την οικογένεια;', 'Width': 'Width', 'Wild Fire': 'Wild Fire', 'Wind Chill': 'Wind Chill', 'Window frame': 'Window frame', 'Winter Storm': 'Winter Storm', 'Without mentioning any names or indicating anyone, do you know of any incidents of violence against women or girls occuring since the disaster?': 'Without mentioning any names or indicating anyone, do you know of any incidents of violence against women or girls occuring since the disaster?', 'Women of Child Bearing Age': 'Women of Child Bearing Age', 'Women participating in coping activities': 'Women participating in coping activities', 'Women who are Pregnant or in Labour': 'Women who are Pregnant or in Labour', 'Womens Focus Groups': 'Womens Focus Groups', 'Wooden plank': 'Wooden plank', 'Wooden poles': 'Wooden poles', 'Working hours end': 'Τέλος εργάσιμων ωρών', 'Working hours start': 'Έναρξη ωρών εργασίας (ωραρίου)', 'Working or other to provide money/food': 'Working or other to provide money/food', 'Would you like to display the photos on the map?': 'Would you like to display the photos on the map?', 'X-Ray': 'X-Ray', 'XMPP': 'XMPP', 'Yes': 'Yes', 'You are attempting to delete your own account - are you sure you want to proceed?': 'You are attempting to delete your own account - are you sure you want to proceed?', 'You are currently reported missing!': 'You are currently reported missing!', 'You can change the configuration of synchronization module in the Settings section. This configuration includes your UUID (unique identification number), sync schedules, beacon service and so on. Click the following link to go to the Sync Settings page.': 'You can change the configuration of synchronization module in the Settings section. This configuration includes your UUID (unique identification number), sync schedules, beacon service and so on. Click the following link to go to the Sync Settings page.', 'You can click on the map below to select the Lat/Lon fields:': 'You can click on the map below to select the Lat/Lon fields:', 'You can click on the map to select the Lat/Lon fields. Longitude is West - East (sideways). Latitude is North-South (Up-Down). Latitude is zero on the equator and positive in the northern hemisphere and negative in the southern hemisphere. Longitude is zero on the prime meridian (Greenwich Mean Time) and is positive to the east, across Europe and Asia. Longitude is negative to the west, across the Atlantic and the Americas. This needs to be added in Decimal Degrees.': 'You can click on the map to select the Lat/Lon fields. Longitude is West - East (sideways). Latitude is North-South (Up-Down). Latitude is zero on the equator and positive in the northern hemisphere and negative in the southern hemisphere. Longitude is zero on the prime meridian (Greenwich Mean Time) and is positive to the east, across Europe and Asia. Longitude is negative to the west, across the Atlantic and the Americas. This needs to be added in Decimal Degrees.', 'You can select the Draw tool (': 'You can select the Draw tool (', 'You can set the modem settings for SMS here.': 'Μπορείτε να ρυθμίσετε τις επιλογές του modem για SMS εδώ', 'You can use the Conversion Tool to convert from either GPS coordinates or Degrees/Minutes/Seconds.': 'You can use the Conversion Tool to convert from either GPS coordinates or Degrees/Minutes/Seconds.', "You have personalised settings, so changes made here won't be visible to you. To change your personalised settings, click ": "You have personalised settings, so changes made here won't be visible to you. To change your personalised settings, click ", "You have unsaved changes. Click Cancel now, then 'Save' to save them. Click OK now to discard them.": "You have unsaved changes. Click Cancel now, then 'Save' to save them. Click OK now to discard them.", "You haven't made any calculations": "You haven't made any calculations", 'You must be logged in to register volunteers.': 'You must be logged in to register volunteers.', 'You must be logged in to report persons missing or found.': 'You must be logged in to report persons missing or found.', 'You must provide a series id to proceed.': 'Πρέπει να παρέχετε αναγνωριστικό σειράς (series id) για να προχωρήσετε', 'You should edit Twitter settings in models/000_config.py': 'You should edit Twitter settings in models/000_config.py', 'Your action is required. Please approve user %s asap: ': 'Your action is required. Please approve user %s asap: ', 'Your current ordered list of solution items is shown below. You can change it by voting again.': 'Your current ordered list of solution items is shown below. You can change it by voting again.', 'Your post was added successfully.': 'Το κείμενο σας (post) προστέθηκε με επιτυχία', 'Your system has been assigned a unique identification (UUID), which other computers around you can use to identify you. To view your UUID, you may go to Synchronization -> Sync Settings. You can also see other settings on that page.': 'Your system has been assigned a unique identification (UUID), which other computers around you can use to identify you. To view your UUID, you may go to Synchronization -> Sync Settings. You can also see other settings on that page.', 'ZIP/Postcode': 'ZIP/Postcode', 'Zinc roof': 'Zinc roof', 'Zoom': 'Zoom', 'Zoom Levels': 'Zoom Levels', 'act': 'act', 'active': 'ενεργό', 'added': 'added', 'all records': 'all records', 'allows a budget to be developed based on staff & equipment costs, including any admin overheads.': 'allows a budget to be developed based on staff & equipment costs, including any admin overheads.', 'allows for creation and management of surveys to assess the damage following a natural disaster.': 'allows for creation and management of surveys to assess the damage following a natural disaster.', 'an individual/team to do in 1-2 days': 'an individual/team to do in 1-2 days', 'approved': 'approved', 'assigned': 'ορίστηκε / ανατέθηκε', 'average': 'μέσος όρος', 'black': 'black', 'blond': 'blond', 'blue': 'blue', 'brown': 'brown', 'c/o Name': 'c/o Name', 'can be used to extract data from spreadsheets and put them into database tables.': 'μπορεί να χρησιμοποιηθεί για να εξάγει δεδομένα από λογιστικά φύλλα xls και να τα τοποθετήσει σε πίνακες βάσεων δεδομένων', 'cancelled': 'cancelled', 'caucasoid': 'caucasoid', 'check all': 'check all', 'click for more details': 'click for more details', 'collateral event': 'collateral event', 'completed': 'completed', 'confirmed': 'confirmed', 'consider': 'consider', 'constraint_id': 'constraint_id', 'criminal intent': 'criminal intent', 'crud': 'crud', 'curly': 'curly', 'currently registered': 'currently registered', 'daily': 'ημερίσια', 'dark': 'dark', 'data uploaded': 'Δεδομένα μεταφορτώθηκαν', 'database': 'database', 'database %s select': 'Βάσεις δεδομένων έχουν επιλεγεί. ', 'db': 'db', 'delete all checked': 'delete all checked', 'deleted': 'deleted', 'denied': 'denied', 'description': 'description', 'design': 'design', 'diseased': 'diseased', 'displaced': 'displaced', 'divorced': 'divorced', 'done!': 'done!', 'edit': 'edit', 'editor': 'Συγγραφέας - Εκδότης', 'embedded': 'embedded', 'enclosed area': 'enclosed area', 'export as csv file': 'εξαγωγή σαν αρχείο csv', 'fat': 'fat', 'feedback': 'feedback', 'female': 'female', 'final report': 'final report', 'flush latrine with septic tank': 'flush latrine with septic tank', 'follow-up assessment': 'follow-up assessment', 'forehead': 'forehead', 'form data': 'form data', 'from Twitter': 'from Twitter', 'from_id': 'from_id', 'full': 'full', 'getting': 'getting', 'green': 'green', 'grey': 'grey', 'here': 'εδώ', 'high': 'high', 'hourly': 'hourly', 'households': 'households', 'human error': 'human error', 'identified': 'identified', 'ignore': 'ignore', 'immediately': 'immediately', 'in Deg Min Sec format': 'in Deg Min Sec format', 'in GPS format': 'σε μορφότυπο GPS', 'inactive': 'inactive', 'initial assessment': 'initial assessment', 'injured': 'injured', 'insert new': 'insert new', 'insert new %s': 'insert new %s', 'invalid': 'invalid', 'invalid request': 'invalid request', 'is a central online repository where information on all the disaster victims and families, especially identified casualties, evacuees and displaced people can be stored. Information like name, age, contact number, identity card number, displaced location, and other details are captured. Picture and finger print details of the people can be uploaded to the system. People can also be captured by group for efficiency and convenience.': 'is a central online repository where information on all the disaster victims and families, especially identified casualties, evacuees and displaced people can be stored. Information like name, age, contact number, identity card number, displaced location, and other details are captured. Picture and finger print details of the people can be uploaded to the system. People can also be captured by group for efficiency and convenience.', 'keeps track of all incoming tickets allowing them to be categorised & routed to the appropriate place for actioning.': 'Καταγράφει και ελέγχει όλα τα εισερχόμενα "εισητήρια" επιτρέποντας την κατηγοριοποίηση και τη δρομολόγηση τους για ενέργεια', 'kilogram': 'kilogram', 'kit': 'kit', 'latrines': 'latrines', 'legend URL': 'legend URL', 'light': 'light', 'liter': 'liter', 'login': 'Σύνδεση', 'long': 'long', 'long>12cm': 'long>12cm', 'low': 'low', 'male': 'male', 'manual': 'manual', 'married': 'married', 'maxExtent': 'maxExtent', 'maxResolution': 'Μέγιστη Ανάλυση', 'medium': 'medium', 'medium<12cm': 'μέση<12 εκατοστά', 'menu item': 'menu item', 'message_id': 'message_id', 'meter': 'meter', 'meter cubed': 'meter cubed', 'meters': 'meters', 'module allows the site administrator to configure various options.': 'module allows the site administrator to configure various options.', 'module helps monitoring the status of hospitals.': 'το υποπρόγραμμα βοηθάει στον έλεγχο της κατάστασης των νοσοκομείων', 'module provides a mechanism to collaboratively provide an overview of the developing disaster, using online mapping (GIS).': 'module provides a mechanism to collaboratively provide an overview of the developing disaster, using online mapping (GIS).', 'mongoloid': 'mongoloid', 'more': 'more', 'n/a': 'n/a', 'natural hazard': 'φυσική καταστροφή', 'negroid': 'negroid', 'never': 'ποτέ', 'new': 'νέο', 'new record inserted': 'new record inserted', 'next 100 rows': 'next 100 rows', 'no': 'no', 'none': 'none', 'normal': 'normal', 'not needed': 'not needed', 'not specified': 'δεν έχουν διευκρινισθεί', 'num Zoom Levels': 'num Zoom Levels', 'once': 'once', 'open defecation': 'open defecation', 'operational intent': 'operational intent', 'or import from csv file': 'or import from csv file', 'other': 'other', 'over one hour': 'over one hour', 'pack of 10': 'pack of 10', 'pending': 'pending', 'people': 'άνθρωποι', 'piece': 'piece', 'pit': 'pit', 'pit latrine': 'pit latrine', 'postponed': 'postponed', 'preliminary template or draft, not actionable in its current form': 'preliminary template or draft, not actionable in its current form', 'previous 100 rows': 'previous 100 rows', 'primary incident': 'primary incident', 'problem connecting to twitter.com - please refresh': 'problem connecting to twitter.com - please refresh', 'provides a catalogue of digital media.': 'provides a catalogue of digital media.', 'record does not exist': 'record does not exist', 'record id': 'record id', 'red': 'red', 'reported': 'reported', 'reports successfully imported.': 'οι αναφορές εισήχθησαν με επιτυχία', 'retired': 'retired', 'retry': 'retry', 'river': 'river', 'sack 20kg': 'sack 20kg', 'sack 50kg': 'sack 50kg', 'secondary effect': 'secondary effect', 'see comment': 'see comment', 'selected': 'selected', 'separated': 'separated', 'separated from family': 'Ξεχωρίστηκε από την οικογένεια', 'shaved': 'Ξυρισμένα ', 'shift_end': 'shift_end', 'shift_start': 'shift_start', 'short': 'short', 'short<6cm': 'short<6cm', 'sides': 'sides', 'sign-up now': 'sign-up now', 'simple': 'simple', 'single': 'single', 'slim': 'slim', 'state': 'state', 'straight': 'straight', 'suffered financial losses': 'έχουν υποστεί οικονομικές απώλειες', 'table': 'table', 'table_name': 'table_name', 'tall': 'Ύψος', 'technical failure': 'technical failure', 'this': 'this', 'times and it is still not working. We give in. Sorry.': 'times and it is still not working. We give in. Sorry.', 'to access the system': 'to access the system', 'to reset your password': 'to reset your password', 'to verify your email': 'to verify your email', 'to_id': 'to_id', 'ton': 'ton', 'tonsure': 'tonsure', 'total': 'total', 'tracks all shelters and stores basic details regarding them. It collaborates with other modules to track people associated with a shelter, the services available etc.': 'tracks all shelters and stores basic details regarding them. It collaborates with other modules to track people associated with a shelter, the services available etc.', 'tweepy module not available within the running Python - this needs installing for non-Tropo Twitter support!': 'tweepy module not available within the running Python - this needs installing for non-Tropo Twitter support!', 'unable to parse csv file': 'unable to parse csv file', 'unapproved': 'unapproved', 'uncheck all': 'uncheck all', 'unidentified': 'unidentified', 'uninhabitable = foundation and structure destroyed': 'uninhabitable = foundation and structure destroyed', 'unknown': 'unknown', 'unspecified': 'unspecified', 'updated': 'Ενημερώθηκε', 'updates only': 'updates only', 'urgent': 'επείγον', 'vm_action': 'vm_action', 'wavy': 'wavy', 'weekly': 'weekly', 'white': 'white', 'wider area, longer term, usually contain multiple Activities': 'wider area, longer term, usually contain multiple Activities', 'widowed': 'widowed', 'window': 'window', 'windows broken, cracks in walls, roof slightly damaged': 'windows broken, cracks in walls, roof slightly damaged', 'within human habitat': 'εντός κατοικήσιμης περιοχής (habitat)', 'xlwt module not available within the running Python - this needs installing for XLS output!': 'xlwt module not available within the running Python - this needs installing for XLS output!', 'yes': 'Ναι', }
{ "content_hash": "62261e813bdc41bddd959165e04197b8", "timestamp": "", "source": "github", "line_count": 4038, "max_line_length": 951, "avg_line_length": 61.80534918276374, "alnum_prop": 0.7370597427575429, "repo_name": "ptressel/sahana-eden-madpub", "id": "966fdfef4fe8dbb5db0078b1a0e3ab7ec74b4979", "size": "265627", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "languages/el.py", "mode": "33188", "license": "mit", "language": [ { "name": "JavaScript", "bytes": "14896489" }, { "name": "PHP", "bytes": "15220" }, { "name": "Python", "bytes": "14827014" }, { "name": "Shell", "bytes": "1171" } ], "symlink_target": "" }
""" The ios vlans fact class It is in this file the configuration is collected from the device for a given resource, parsed, and the facts tree is populated based on the configuration. """ from __future__ import absolute_import, division, print_function __metaclass__ = type from copy import deepcopy from ansible.module_utils.network.common import utils from ansible.module_utils.network.ios.argspec.vlans.vlans import VlansArgs class VlansFacts(object): """ The ios vlans fact class """ def __init__(self, module, subspec='config', options='options'): self._module = module self.argument_spec = VlansArgs.argument_spec spec = deepcopy(self.argument_spec) if subspec: if options: facts_argument_spec = spec[subspec][options] else: facts_argument_spec = spec[subspec] else: facts_argument_spec = spec self.generated_spec = utils.generate_dict(facts_argument_spec) def populate_facts(self, connection, ansible_facts, data=None): """ Populate the facts for vlans :param connection: the device connection :param ansible_facts: Facts dictionary :param data: previously collected conf :rtype: dictionary :returns: facts """ if connection: pass objs = [] mtu_objs = [] remote_objs = [] final_objs = [] if not data: data = connection.get('show vlan') # operate on a collection of resource x config = data.split('\n') # Get individual vlan configs separately vlan_info = '' for conf in config: if 'Name' in conf: vlan_info = 'Name' elif 'Type' in conf: vlan_info = 'Type' elif 'Remote' in conf: vlan_info = 'Remote' if conf and ' ' not in filter(None, conf.split('-')): obj = self.render_config(self.generated_spec, conf, vlan_info) if 'mtu' in obj: mtu_objs.append(obj) elif 'remote_span' in obj: remote_objs = obj elif obj: objs.append(obj) # Appending MTU value to the retrieved dictionary for o, m in zip(objs, mtu_objs): o.update(m) final_objs.append(o) # Appending Remote Span value to related VLAN: if remote_objs: if remote_objs.get('remote_span'): for each in remote_objs.get('remote_span'): for every in final_objs: if each == every.get('vlan_id'): every.update({'remote_span': True}) break facts = {} if final_objs: facts['vlans'] = [] params = utils.validate_config(self.argument_spec, {'config': objs}) for cfg in params['config']: facts['vlans'].append(utils.remove_empties(cfg)) ansible_facts['ansible_network_resources'].update(facts) return ansible_facts def render_config(self, spec, conf, vlan_info): """ Render config as dictionary structure and delete keys from spec for null values :param spec: The facts tree, generated from the argspec :param conf: The configuration :rtype: dictionary :returns: The generated config """ config = deepcopy(spec) if vlan_info == 'Name' and 'Name' not in conf: conf = filter(None, conf.split(' ')) config['vlan_id'] = int(conf[0]) config['name'] = conf[1] if len(conf[2].split('/')) > 1: if conf[2].split('/')[0] == 'sus': config['state'] = 'suspend' elif conf[2].split('/')[0] == 'act': config['state'] = 'active' config['shutdown'] = 'enabled' else: if conf[2] == 'suspended': config['state'] = 'suspend' elif conf[2] == 'active': config['state'] = 'active' config['shutdown'] = 'disabled' elif vlan_info == 'Type' and 'Type' not in conf: conf = filter(None, conf.split(' ')) config['mtu'] = int(conf[3]) elif vlan_info == 'Remote': if len(conf.split(',')) > 1 or conf.isdigit(): remote_span_vlan = [] if len(conf.split(',')) > 1: remote_span_vlan = conf.split(',') else: remote_span_vlan.append(conf) remote_span = [] for each in remote_span_vlan: remote_span.append(int(each)) config['remote_span'] = remote_span return utils.remove_empties(config)
{ "content_hash": "5cbfcf9680870ce4aacff774091e0352", "timestamp": "", "source": "github", "line_count": 139, "max_line_length": 80, "avg_line_length": 35.56115107913669, "alnum_prop": 0.5209387011936071, "repo_name": "thaim/ansible", "id": "0450ad9a398b967374741dac5e303430ff121a16", "size": "5089", "binary": false, "copies": "12", "ref": "refs/heads/fix-broken-link", "path": "lib/ansible/module_utils/network/ios/facts/vlans/vlans.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "7" }, { "name": "Shell", "bytes": "246" } ], "symlink_target": "" }
import time from web3.exceptions import StaleBlockchain SKIP_STALECHECK_FOR_METHODS = set([ 'eth_getBlockByNumber', ]) def _isfresh(block, allowable_delay): return block and time.time() - block['timestamp'] <= allowable_delay def make_stalecheck_middleware( allowable_delay, skip_stalecheck_for_methods=SKIP_STALECHECK_FOR_METHODS): ''' Use to require that a function will run only of the blockchain is recently updated. This middleware takes an argument, so unlike other middleware, you must make the middleware with a method call. For example: `make_stalecheck_middleware(60*5)` If the latest block in the chain is older than 5 minutes in this example, then the middleware will raise a StaleBlockchain exception. ''' if allowable_delay <= 0: raise ValueError("You must set a positive allowable_delay in seconds for this middleware") def stalecheck_middleware(make_request, web3): cache = {'latest': None} def middleware(method, params): if method not in skip_stalecheck_for_methods: if _isfresh(cache['latest'], allowable_delay): pass else: latest = web3.eth.getBlock('latest') if _isfresh(latest, allowable_delay): cache['latest'] = latest else: raise StaleBlockchain(latest, allowable_delay) return make_request(method, params) return middleware return stalecheck_middleware
{ "content_hash": "4e42aed2a60a464c1f3a02e4c3b105a0", "timestamp": "", "source": "github", "line_count": 46, "max_line_length": 98, "avg_line_length": 34.21739130434783, "alnum_prop": 0.6378653113087674, "repo_name": "pipermerriam/web3.py", "id": "cd2a9bd3b0787668c72b7579e5b15416cbccda95", "size": "1574", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "web3/middleware/stalecheck.py", "mode": "33188", "license": "mit", "language": [ { "name": "Makefile", "bytes": "999" }, { "name": "Python", "bytes": "619517" } ], "symlink_target": "" }
import pylibmc try: mc = pylibmc.Client(["127.0.0.1"]) mc["some_key"] = "Some value" except pylibmc.ConnectionError: print("Could not connect")
{ "content_hash": "b07310fbd92c0eaaad2188f1cc4ab97d", "timestamp": "", "source": "github", "line_count": 9, "max_line_length": 38, "avg_line_length": 17.666666666666668, "alnum_prop": 0.6477987421383647, "repo_name": "jimlindeman/python-buildpack-collectd", "id": "6167d102b199b6d56e132ec3fd6f600394e3210d", "size": "159", "binary": false, "copies": "11", "ref": "refs/heads/master", "path": "cf_spec/fixtures/features/libmemcache.py", "mode": "33188", "license": "mit", "language": [ { "name": "Makefile", "bytes": "176" }, { "name": "Python", "bytes": "1775" }, { "name": "Ruby", "bytes": "5511" }, { "name": "Shell", "bytes": "49306" } ], "symlink_target": "" }
"""A module to help add Raspberry Pi IO expander chips. Examples: >>> from rasio import PinControler >>> control = PinControler() >>> control.add_mcp(0x20) >>> control.add_mcp(0x21) >>> control.add_mcp(0x22) """ from functools import partial import time import smbus class PinControler(object): """Turns on and off pins connected through mcp23017 Chips. Add devices and you can set pins with pin numbers. Converts to binary conveniently.""" def __init__(self, output_bits=8, raspberry_pi_rev=2): self.bus = smbus.SMBus(raspberry_pi_rev - 1) self.pin_count = 0 self.pin_to_binary_list = {} self.pin_to_index = {} self.pin_to_bus = {} self.mapped_pins = {} def set_pin(self, pin_num, enable=True, update_device=True): """Turns a pin on or off based on the enable argument.""" pin_num = self.__adjust_pin(pin_num) binary_list = self.pin_to_binary_list[pin_num] index = self.pin_to_index[pin_num] binary_list[index] = unicode(int(enable)) if update_device: self.__refresh_pin_bus(pin_num) def set_pins(self, pin_num_list, enable): """Sets a list of pins to either a value, or a list of values. If enable is a list, it must have the same length as the pin_num_list.""" pin_num_list = [self.__adjust_pin(pin_num) for pin_num in pin_num_list] if isinstance(enable, bool): enable = [enable for e in pin_num_list] if len(enable) != len(pin_num_list): raise IndexError("enable list must be same length as pin_num_list") to_refresh_pins = [] to_refresh_bus = [] for idx, enable_value in enumerate(enable): pin_num = pin_num_list[idx] self.set_pin(pin_num, enable=enable_value, update_device=False) bus_write = self.pin_to_bus[pin_num] if bus_write not in to_refresh_bus: to_refresh_bus.append(bus_write) to_refresh_pins.append(pin_num) for pin in to_refresh_pins: self.__refresh_pin_bus(pin) def toggle(self, pin_num): """Toggles (turns off if on, on if off) a pin.""" pin_num = self.__adjust_pin(pin_num) binary_list = self.pin_to_binary_list[pin_num] index = self.pin_to_index[pin_num] enabled = bool(int(binary_list[index])) enable = enabled is False binary_list[index] = unicode(int(enable)) self.__refresh_pin_bus(pin_num) def turn_off(self): """Sets all pins to an off state.""" for pin_num in self.pin_to_bus: self.set_pin(pin_num, enable=False, update_device=False) for bus_write in set(self.pin_to_bus.values()): bus_write(0) def turn_on(self): """Sets all pins to an on state.""" pins = [pin_num for pin_num in self.pin_to_bus.keys()] self.set_pins(pins, True) def add_channels(self, device_address, io_address, olat_address, output_bits=8): """Adds an 8 channel bus with 8 pins.""" self.bus.write_byte_data(device_address, io_address, 0) bus_write = partial(self.bus.write_byte_data, device_address, olat_address) bus_write(0) binary_list = [u"0" for e in xrange(output_bits)] start = self.pin_count + 1 for num, pin_num in enumerate(range(start, start + output_bits)): self.pin_to_bus[pin_num] = bus_write self.pin_to_binary_list[pin_num] = binary_list self.pin_to_index[pin_num] = -(num + 1) self.pin_count += 1 def add_mcp(self, address): """Adds a MCP23017 Device with all pins set as output""" io_adr_a, out_adr_a = 0x00, 0x14 io_adr_b, out_adr_b = 0x01, 0x15 self.add_channels(address, io_adr_a, out_adr_a) self.add_channels(address, io_adr_b, out_adr_b) def map_pin_num(self, actual_pin_num, mapping): """You can set a mapping for a pin sok When you call methods with a pin number it finds a different mapped pin.""" self.mapped_pins[mapping] = actual_pin_num def clear_mapping(self): """Removes all mapped pins.""" self.mapped_pins.clear() def test_all(self): """Tests all of the output pins.""" for num in range(len(self.pin_to_binary_list.keys())): num = num + 1 self.set_pin(num) time.sleep(.1) if num % 15 == 0: self.turn_off() time.sleep(1) self.turn_off() def __refresh_pin_bus(self, pin_num): """Writes the binary to the bus""" bus_write = self.pin_to_bus[pin_num] binary_string = u"".join(bit for bit in self.pin_to_binary_list[pin_num]) binary = int(binary_string, 2) bus_write(binary) def __adjust_pin(self, pin_num): """Adjust the pin number if there is a mapping.""" if pin_num in self.mapped_pins: pin_num = self.mapped_pins[pin_num] return pin_num def __del__(self): self.turn_off()
{ "content_hash": "5fa9c28ff6fbfe946b71fb293ca16cc5", "timestamp": "", "source": "github", "line_count": 134, "max_line_length": 84, "avg_line_length": 38.223880597014926, "alnum_prop": 0.5843420538852011, "repo_name": "Digirolamo/RasIO", "id": "94e8c9afd8a48e32730ccdc469559b557a1d8bbf", "size": "5122", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "RasIO/rasio.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "5180" } ], "symlink_target": "" }
import asyncio from unittest import mock import pytest from multidict import CIMultiDict from yarl import URL import aiohttp from aiohttp import web from aiohttp.test_utils import TestClient as _TestClient from aiohttp.test_utils import TestServer as _TestServer from aiohttp.test_utils import (AioHTTPTestCase, loop_context, make_mocked_request, setup_test_loop, teardown_test_loop, unittest_run_loop) def _create_example_app(): @asyncio.coroutine def hello(request): return web.Response(body=b"Hello, world") @asyncio.coroutine def websocket_handler(request): ws = web.WebSocketResponse() yield from ws.prepare(request) msg = yield from ws.receive() if msg.type == aiohttp.WSMsgType.TEXT: if msg.data == 'close': yield from ws.close() else: ws.send_str(msg.data + '/answer') return ws @asyncio.coroutine def cookie_handler(request): resp = web.Response(body=b"Hello, world") resp.set_cookie('cookie', 'val') return resp app = web.Application() app.router.add_route('*', '/', hello) app.router.add_route('*', '/websocket', websocket_handler) app.router.add_route('*', '/cookie', cookie_handler) return app def test_full_server_scenario(): with loop_context() as loop: app = _create_example_app() with _TestClient(app, loop=loop) as client: @asyncio.coroutine def test_get_route(): nonlocal client resp = yield from client.request("GET", "/") assert resp.status == 200 text = yield from resp.text() assert "Hello, world" in text loop.run_until_complete(test_get_route()) def test_server_with_create_test_teardown(): with loop_context() as loop: app = _create_example_app() with _TestClient(app, loop=loop) as client: @asyncio.coroutine def test_get_route(): resp = yield from client.request("GET", "/") assert resp.status == 200 text = yield from resp.text() assert "Hello, world" in text loop.run_until_complete(test_get_route()) def test_test_client_close_is_idempotent(): """ a test client, called multiple times, should not attempt to close the server again. """ loop = setup_test_loop() app = _create_example_app() client = _TestClient(app, loop=loop) loop.run_until_complete(client.close()) loop.run_until_complete(client.close()) teardown_test_loop(loop) class TestAioHTTPTestCase(AioHTTPTestCase): def get_app(self): return _create_example_app() @unittest_run_loop @asyncio.coroutine def test_example_with_loop(self): request = yield from self.client.request("GET", "/") assert request.status == 200 text = yield from request.text() assert "Hello, world" in text def test_example(self): @asyncio.coroutine def test_get_route(): resp = yield from self.client.request("GET", "/") assert resp.status == 200 text = yield from resp.text() assert "Hello, world" in text self.loop.run_until_complete(test_get_route()) # these exist to test the pytest scenario @pytest.yield_fixture def loop(): with loop_context() as loop: yield loop @pytest.fixture def app(): return _create_example_app() @pytest.yield_fixture def test_client(loop, app): client = _TestClient(app, loop=loop) loop.run_until_complete(client.start_server()) yield client loop.run_until_complete(client.close()) def test_get_route(loop, test_client): @asyncio.coroutine def test_get_route(): resp = yield from test_client.request("GET", "/") assert resp.status == 200 text = yield from resp.text() assert "Hello, world" in text loop.run_until_complete(test_get_route()) @asyncio.coroutine def test_client_websocket(loop, test_client): resp = yield from test_client.ws_connect("/websocket") resp.send_str("foo") msg = yield from resp.receive() assert msg.type == aiohttp.WSMsgType.TEXT assert "foo" in msg.data resp.send_str("close") msg = yield from resp.receive() assert msg.type == aiohttp.WSMsgType.CLOSE @asyncio.coroutine def test_client_cookie(loop, test_client): assert not test_client.session.cookie_jar yield from test_client.get("/cookie") cookies = list(test_client.session.cookie_jar) assert cookies[0].key == 'cookie' assert cookies[0].value == 'val' @asyncio.coroutine @pytest.mark.parametrize("method", [ "get", "post", "options", "post", "put", "patch", "delete" ]) @asyncio.coroutine def test_test_client_methods(method, loop, test_client): resp = yield from getattr(test_client, method)("/") assert resp.status == 200 text = yield from resp.text() assert "Hello, world" in text @asyncio.coroutine def test_test_client_head(loop, test_client): resp = yield from test_client.head("/") assert resp.status == 200 @pytest.mark.parametrize( "headers", [{'token': 'x'}, CIMultiDict({'token': 'x'}), {}]) def test_make_mocked_request(headers): req = make_mocked_request('GET', '/', headers=headers) assert req.method == "GET" assert req.path == "/" assert isinstance(req, web.Request) assert isinstance(req.headers, CIMultiDict) def test_make_mocked_request_sslcontext(): req = make_mocked_request('GET', '/') assert req.transport.get_extra_info('sslcontext') is None def test_make_mocked_request_unknown_extra_info(): req = make_mocked_request('GET', '/') assert req.transport.get_extra_info('unknown_extra_info') is None def test_make_mocked_request_app(): app = mock.Mock() req = make_mocked_request('GET', '/', app=app) assert req.app is app def test_make_mocked_request_content(): payload = mock.Mock() req = make_mocked_request('GET', '/', payload=payload) assert req.content is payload def test_make_mocked_request_transport(): transport = mock.Mock() req = make_mocked_request('GET', '/', transport=transport) assert req.transport is transport def test_test_client_props(loop): app = _create_example_app() client = _TestClient(app, loop=loop, host='localhost') assert client.host == 'localhost' assert client.port is None with client: assert isinstance(client.port, int) assert client.server is not None assert client.port is None def test_test_server_context_manager(loop): app = _create_example_app() with _TestServer(app, loop=loop) as server: @asyncio.coroutine def go(): client = aiohttp.ClientSession(loop=loop) resp = yield from client.head(server.make_url('/')) assert resp.status == 200 resp.close() client.close() loop.run_until_complete(go()) def test_client_scheme_mutually_exclusive_with_server(): app = _create_example_app() server = _TestServer(app) with pytest.raises(ValueError): _TestClient(server, scheme='http') def test_client_host_mutually_exclusive_with_server(): app = _create_example_app() server = _TestServer(app) with pytest.raises(ValueError): _TestClient(server, host='127.0.0.1') def test_client_unsupported_arg(): with pytest.raises(TypeError): _TestClient('string') def test_server_make_url_yarl_compatibility(loop): app = _create_example_app() with _TestServer(app, loop=loop) as server: make_url = server.make_url assert make_url(URL('/foo')) == make_url('/foo') with pytest.raises(AssertionError): make_url('http://foo.com') with pytest.raises(AssertionError): make_url(URL('http://foo.com'))
{ "content_hash": "ebe564c56e8e13d1241b19c99e864d6a", "timestamp": "", "source": "github", "line_count": 278, "max_line_length": 70, "avg_line_length": 28.866906474820144, "alnum_prop": 0.6301557632398754, "repo_name": "AraHaanOrg/aiohttp", "id": "8aef4e89db80e3d8ce6db81fb47fe724391968df", "size": "8025", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "tests/test_test_utils.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "838" }, { "name": "Makefile", "bytes": "2817" }, { "name": "Python", "bytes": "1233163" }, { "name": "Shell", "bytes": "1759" } ], "symlink_target": "" }
import sys try: import unittest2 as unittest except ImportError: import unittest from tests.base import BaseTestCase from pyasn1.compat import octets class OctetsTestCase(BaseTestCase): if sys.version_info[0] > 2: def test_ints2octs(self): assert [1, 2, 3] == list(octets.ints2octs([1, 2, 3])) def test_ints2octs_empty(self): assert not octets.ints2octs([]) def test_int2oct(self): assert [12] == list(octets.int2oct(12)) def test_octs2ints(self): assert [1, 2, 3] == list(octets.octs2ints(bytes([1, 2, 3]))) def test_octs2ints_empty(self): assert not octets.octs2ints(bytes([])) def test_oct2int(self): assert 12 == octets.oct2int(bytes([12]))[0] def test_str2octs(self): assert bytes([1, 2, 3]) == octets.str2octs('\x01\x02\x03') def test_str2octs_empty(self): assert not octets.str2octs('') def test_octs2str(self): assert '\x01\x02\x03' == octets.octs2str(bytes([1, 2, 3])) def test_octs2str_empty(self): assert not octets.octs2str(bytes([])) def test_isOctetsType(self): assert octets.isOctetsType('abc') == False assert octets.isOctetsType(123) == False assert octets.isOctetsType(bytes()) == True def test_isStringType(self): assert octets.isStringType('abc') == True assert octets.isStringType(123) == False assert octets.isStringType(bytes()) == False def test_ensureString(self): assert 'abc'.encode() == octets.ensureString('abc'.encode()) assert bytes([1, 2, 3]) == octets.ensureString([1, 2, 3]) else: def test_ints2octs(self): assert '\x01\x02\x03' == octets.ints2octs([1, 2, 3]) def test_ints2octs_empty(self): assert not octets.ints2octs([]) def test_int2oct(self): assert '\x0c' == octets.int2oct(12) def test_octs2ints(self): assert [1, 2, 3] == octets.octs2ints('\x01\x02\x03') def test_octs2ints_empty(self): assert not octets.octs2ints('') def test_oct2int(self): assert 12 == octets.oct2int('\x0c') def test_str2octs(self): assert '\x01\x02\x03' == octets.str2octs('\x01\x02\x03') def test_str2octs_empty(self): assert not octets.str2octs('') def test_octs2str(self): assert '\x01\x02\x03' == octets.octs2str('\x01\x02\x03') def test_octs2str_empty(self): assert not octets.octs2str('') def test_isOctetsType(self): assert octets.isOctetsType('abc') == True assert octets.isOctetsType(123) == False assert octets.isOctetsType(unicode('abc')) == False def test_isStringType(self): assert octets.isStringType('abc') == True assert octets.isStringType(123) == False assert octets.isStringType(unicode('abc')) == True def test_ensureString(self): assert 'abc' == octets.ensureString('abc') assert '123' == octets.ensureString(123) suite = unittest.TestLoader().loadTestsFromModule(sys.modules[__name__]) if __name__ == '__main__': unittest.TextTestRunner(verbosity=2).run(suite)
{ "content_hash": "1c9ee335552f6f05dc4e2fb7ae3c4876", "timestamp": "", "source": "github", "line_count": 111, "max_line_length": 72, "avg_line_length": 30.56756756756757, "alnum_prop": 0.5797229590333038, "repo_name": "gsutil-mirrors/pyasn1", "id": "b40b1517b42a93475134b60b8995b590495225b1", "size": "3550", "binary": false, "copies": "12", "ref": "refs/heads/master", "path": "tests/compat/test_octets.py", "mode": "33188", "license": "bsd-2-clause", "language": [ { "name": "Python", "bytes": "571063" } ], "symlink_target": "" }
from __future__ import unicode_literals, division, absolute_import import logging from flexget import options from flexget.event import event from flexget.manager import Session from flexget.utils.tools import console log = logging.getLogger('perftests') TESTS = ['imdb_query'] def cli_perf_test(manager, options): if options.test_name not in TESTS: console('Unknown performance test %s' % options.test_name) return session = Session() try: if options.test_name == 'imdb_query': imdb_query(session) finally: session.close() def imdb_query(session): import time from flexget.plugins.metainfo.imdb_lookup import Movie from flexget.plugins.cli.performance import log_query_count from sqlalchemy.sql.expression import select from progressbar import ProgressBar, Percentage, Bar, ETA from sqlalchemy.orm import joinedload_all imdb_urls = [] log.info('Getting imdb_urls ...') # query so that we avoid loading whole object (maybe cached?) for id, url in session.execute(select([Movie.id, Movie.url])): imdb_urls.append(url) log.info('Got %i urls from database' % len(imdb_urls)) if not imdb_urls: log.info('so .. aborting') return # commence testing widgets = ['Benchmarking - ', ETA(), ' ', Percentage(), ' ', Bar(left='[', right=']')] bar = ProgressBar(widgets=widgets, maxval=len(imdb_urls)).start() log_query_count('test') start_time = time.time() for index, url in enumerate(imdb_urls): bar.update(index) #movie = session.query(Movie).filter(Movie.url == url).first() #movie = session.query(Movie).options(subqueryload(Movie.genres)).filter(Movie.url == url).one() movie = session.query(Movie).\ options(joinedload_all(Movie.genres, Movie.languages, Movie.actors, Movie.directors)).\ filter(Movie.url == url).first() # access it's members so they're loaded var = [x.name for x in movie.genres] var = [x.name for x in movie.directors] var = [x.name for x in movie.actors] var = [x.name for x in movie.languages] log_query_count('test') took = time.time() - start_time log.debug('Took %.2f seconds to query %i movies' % (took, len(imdb_urls))) @event('options.register') def register_parser_arguments(): perf_parser = options.register_command('perf-test', cli_perf_test) perf_parser.add_argument('test_name', metavar='<test name>', choices=TESTS)
{ "content_hash": "6527001566651eafd89f1eacfc807970", "timestamp": "", "source": "github", "line_count": 77, "max_line_length": 104, "avg_line_length": 33.05194805194805, "alnum_prop": 0.6542239685658153, "repo_name": "BrainDamage/Flexget", "id": "f900ebe10978a7a7c83a468ec6bd944b70e12a04", "size": "2545", "binary": false, "copies": "6", "ref": "refs/heads/develop", "path": "flexget/plugins/cli/perf_tests.py", "mode": "33188", "license": "mit", "language": [], "symlink_target": "" }
import collections import decimal import json as jsonlib import os import random import re from operator import attrgetter from urlparse import urljoin from django.conf import settings from django.core.exceptions import ObjectDoesNotExist from django.forms import CheckboxInput from django.utils.translation import ( ugettext, trim_whitespace, to_locale, get_language) from django.utils.encoding import force_text from django.utils.html import format_html from django.template import defaultfilters from django.utils.functional import lazy from django.utils.safestring import mark_safe import caching.base as caching import jinja2 import waffle from babel.support import Format from jingo import register, get_env # Needed to make sure our own |f filter overrides jingo's one. from jingo import helpers # noqa from jingo_minify.helpers import ( _build_html, _get_compiled_css_url, get_path, is_external) from olympia import amo from olympia.amo import utils, urlresolvers from olympia.constants.licenses import PERSONA_LICENSES_IDS # Yanking filters from Django. register.filter(defaultfilters.slugify) # Registering some utils as filters: urlparams = register.filter(utils.urlparams) register.filter(utils.epoch) register.filter(utils.isotime) register.function(dict) register.function(utils.randslice) # Mark a lazy marked instance as safe but keep # it lazy mark_safe_lazy = lazy(mark_safe, unicode) @register.function def switch_is_active(switch_name): return waffle.switch_is_active(switch_name) @register.filter def link(item): html = """<a href="%s">%s</a>""" % (item.get_url_path(), jinja2.escape(item.name)) return jinja2.Markup(html) @register.filter def xssafe(value): """ Like |safe but for strings with interpolation. By using |xssafe you assert that you have written tests proving an XSS can't happen here. """ return jinja2.Markup(value) @register.filter def babel_datetime(dt, format='medium'): return _get_format().datetime(dt, format=format) if dt else '' @register.filter def babel_date(date, format='medium'): return _get_format().date(date, format=format) if date else '' @register.function def locale_url(url): """Take a URL and give it the locale prefix.""" prefixer = urlresolvers.get_url_prefix() script = prefixer.request.META['SCRIPT_NAME'] parts = [script, prefixer.locale, url.lstrip('/')] return '/'.join(parts) @register.function def url(viewname, *args, **kwargs): """Helper for Django's ``reverse`` in templates.""" add_prefix = kwargs.pop('add_prefix', True) host = kwargs.pop('host', '') src = kwargs.pop('src', '') url = '%s%s' % (host, urlresolvers.reverse(viewname, args=args, kwargs=kwargs, add_prefix=add_prefix)) if src: url = urlparams(url, src=src) return url @register.function def services_url(viewname, *args, **kwargs): """Helper for ``url`` with host=SERVICES_URL.""" kwargs.update({'host': settings.SERVICES_URL}) return url(viewname, *args, **kwargs) @register.filter def paginator(pager): return Paginator(pager).render() @register.filter def impala_paginator(pager): t = get_env().get_template('amo/impala/paginator.html') return jinja2.Markup(t.render({'pager': pager})) @register.function def sidebar(app): """Populates the sidebar with (categories, types).""" from olympia.addons.models import Category if app is None: return [], [] # Fetch categories... qs = Category.objects.filter(application=app.id, weight__gte=0, type=amo.ADDON_EXTENSION) # Now sort them in python according to their name property (which looks up # the translated name using gettext + our constants) categories = sorted(qs, key=attrgetter('weight', 'name')) Type = collections.namedtuple('Type', 'id name url') base = urlresolvers.reverse('home') types = [Type(99, ugettext('Collections'), base + 'collections/')] shown_types = { amo.ADDON_PERSONA: urlresolvers.reverse('browse.personas'), amo.ADDON_DICT: urlresolvers.reverse('browse.language-tools'), amo.ADDON_SEARCH: urlresolvers.reverse('browse.search-tools'), amo.ADDON_THEME: urlresolvers.reverse('browse.themes'), } titles = dict( amo.ADDON_TYPES, **{amo.ADDON_DICT: ugettext('Dictionaries & Language Packs')}) for type_, url in shown_types.items(): if type_ in app.types: types.append(Type(type_, titles[type_], url)) return categories, sorted(types, key=lambda x: x.name) class Paginator(object): def __init__(self, pager): self.pager = pager self.max = 10 self.span = (self.max - 1) / 2 self.page = pager.number self.num_pages = pager.paginator.num_pages self.count = pager.paginator.count pager.page_range = self.range() pager.dotted_upper = self.num_pages not in pager.page_range pager.dotted_lower = 1 not in pager.page_range def range(self): """Return a list of page numbers to show in the paginator.""" page, total, span = self.page, self.num_pages, self.span if total < self.max: lower, upper = 0, total elif page < span + 1: lower, upper = 0, span * 2 elif page > total - span: lower, upper = total - span * 2, total else: lower, upper = page - span, page + span - 1 return range(max(lower + 1, 1), min(total, upper) + 1) def render(self): c = {'pager': self.pager, 'num_pages': self.num_pages, 'count': self.count} t = get_env().get_template('amo/paginator.html').render(c) return jinja2.Markup(t) def _get_format(): lang = get_language() return Format(utils.get_locale_from_lang(lang)) @register.filter def numberfmt(num, format=None): return _get_format().decimal(num, format) @register.filter def currencyfmt(num, currency): if num is None: return '' return _get_format().currency(decimal.Decimal(num), currency) def page_name(app=None): """Determine the correct page name for the given app (or no app).""" if app: return ugettext(u'Add-ons for {0}').format(app.pretty) else: return ugettext('Add-ons') @register.function @jinja2.contextfunction def page_title(context, title): title = force_text(title) base_title = page_name(context['request'].APP) # The following line doesn't use string formatting because we want to # preserve the type of `title` in case it's a jinja2 `Markup` (safe, # escaped) object. return format_html(u'{} :: {}', title, base_title) @register.filter def json(s): return jsonlib.dumps(s) @register.filter def absolutify(url, site=None): """Takes a URL and prepends the SITE_URL""" if url.startswith('http'): return url else: return urljoin(site or settings.SITE_URL, url) @register.filter def strip_controls(s): """ Strips control characters from a string. """ # Translation table of control characters. control_trans = dict((n, None) for n in xrange(32) if n not in [10, 13]) rv = unicode(s).translate(control_trans) return jinja2.Markup(rv) if isinstance(s, jinja2.Markup) else rv @register.filter def external_url(url): """Bounce a URL off outgoing.prod.mozaws.net.""" return urlresolvers.get_outgoing_url(unicode(url)) @register.filter def shuffle(sequence): """Shuffle a sequence.""" random.shuffle(sequence) return sequence @register.function def license_link(license): """Link to a code license, including icon where applicable.""" # If passed in an integer, try to look up the License. from olympia.versions.models import License if isinstance(license, (long, int)): if license in PERSONA_LICENSES_IDS: # Grab built-in license. license = PERSONA_LICENSES_IDS[license] else: # Grab custom license. license = License.objects.filter(id=license) if not license.exists(): return '' license = license[0] elif not license: return '' if not getattr(license, 'builtin', True): return ugettext('Custom License') template = get_env().get_template('amo/license_link.html') return jinja2.Markup(template.render({'license': license})) @register.function def field(field, label=None, **attrs): if label is not None: field.label = label # HTML from Django is already escaped. return jinja2.Markup(u'%s<p>%s%s</p>' % (field.errors, field.label_tag(), field.as_widget(attrs=attrs))) @register.inclusion_tag('amo/category-arrow.html') @jinja2.contextfunction def category_arrow(context, key, prefix): d = dict(context.items()) d.update(key=key, prefix=prefix) return d @register.filter def timesince(time): if not time: return u'' ago = defaultfilters.timesince(time) # L10n: relative time in the past, like '4 days ago' return ugettext(u'{0} ago').format(ago) @register.inclusion_tag('amo/recaptcha.html') @jinja2.contextfunction def recaptcha(context, form): d = dict(context.items()) d.update(form=form) return d @register.filter def is_choice_field(value): try: return isinstance(value.field.widget, CheckboxInput) except AttributeError: pass @register.function @jinja2.contextfunction def cache_buster(context, url): if 'BUILD_ID' in context: build = context['BUILD_ID'] else: if url.endswith('.js'): build = context['BUILD_ID_JS'] elif url.endswith('.css'): build = context['BUILD_ID_CSS'] else: build = context['BUILD_ID_IMG'] return utils.urlparams(url, b=build) @register.function @jinja2.contextfunction def media(context, url): """Get a MEDIA_URL link with a cache buster querystring.""" return urljoin(settings.MEDIA_URL, cache_buster(context, url)) @register.function @jinja2.contextfunction def static(context, url): """Get a STATIC_URL link with a cache buster querystring.""" return urljoin(settings.STATIC_URL, cache_buster(context, url)) @register.function @jinja2.evalcontextfunction def attrs(ctx, *args, **kw): return jinja2.filters.do_xmlattr(ctx, dict(*args, **kw)) @register.function @jinja2.contextfunction def side_nav(context, addon_type, category=None): app = context['request'].APP.id cat = str(category.id) if category else 'all' return caching.cached(lambda: _side_nav(context, addon_type, category), 'side-nav-%s-%s-%s' % (app, addon_type, cat)) def _side_nav(context, addon_type, cat): # Prevent helpers generating circular imports. from olympia.addons.models import Category, Addon request = context['request'] qs = Category.objects.filter(weight__gte=0) if addon_type != amo.ADDON_PERSONA: qs = qs.filter(application=request.APP.id) sort_key = attrgetter('weight', 'name') categories = sorted(qs.filter(type=addon_type), key=sort_key) if cat: base_url = cat.get_url_path() else: base_url = Addon.get_type_url(addon_type) ctx = dict(request=request, base_url=base_url, categories=categories, addon_type=addon_type, amo=amo) template = get_env().get_template('amo/side_nav.html') return jinja2.Markup(template.render(ctx)) @register.function @jinja2.contextfunction def site_nav(context): app = context['request'].APP.id return caching.cached(lambda: _site_nav(context), 'site-nav-%s' % app) def _site_nav(context): # Prevent helpers from generating circular imports. from olympia.addons.models import Category request = context['request'] def sorted_cats(qs): return sorted(qs, key=attrgetter('weight', 'name')) extensions = Category.objects.filter( application=request.APP.id, weight__gte=0, type=amo.ADDON_EXTENSION) personas = Category.objects.filter(weight__gte=0, type=amo.ADDON_PERSONA) ctx = dict(request=request, amo=amo, extensions=sorted_cats(extensions), personas=sorted_cats(personas)) template = get_env().get_template('amo/site_nav.html') return jinja2.Markup(template.render(ctx)) @register.function def loc(s): """A noop function for strings that are not ready to be localized.""" return trim_whitespace(s) @register.function def site_event_type(type): return amo.SITE_EVENT_CHOICES[type] @register.function @jinja2.contextfunction def remora_url(context, url, lang=None, app=None, prefix=''): """Wrapper for urlresolvers.remora_url""" if lang is None: _lang = context['LANG'] if _lang: lang = to_locale(_lang).replace('_', '-') if app is None: try: app = context['APP'].short except (AttributeError, KeyError): pass return urlresolvers.remora_url(url=url, lang=lang, app=app, prefix=prefix) @register.function @jinja2.contextfunction def hasOneToOne(context, obj, attr): try: getattr(obj, attr) return True except ObjectDoesNotExist: return False @register.function def no_results_amo(): # This prints a "No results found" message. That's all. Carry on. t = get_env().get_template('amo/no_results.html').render() return jinja2.Markup(t) def _relative_to_absolute(url): """ Prepends relative URLs with STATIC_URL to turn those inline-able. This method is intended to be used as a ``replace`` parameter of ``re.sub``. """ url = url.group(1).strip('"\'') if not url.startswith(('data:', 'http:', 'https:', '//')): url = url.replace('../../', settings.STATIC_URL) return 'url(%s)' % url @register.function def inline_css(bundle, media=False, debug=None): """ If we are in debug mode, just output a single style tag for each css file. If we are not in debug mode, return a style that contains bundle-min.css. Forces a regular css() call for external URLs (no inline allowed). Extracted from jingo-minify and re-registered, see: https://github.com/jsocol/jingo-minify/pull/41 Added: turns relative links to absolute ones using STATIC_URL. """ if debug is None: debug = getattr(settings, 'DEBUG', False) if debug: items = [_get_compiled_css_url(i) for i in settings.MINIFY_BUNDLES['css'][bundle]] else: items = ['css/%s-min.css' % bundle] if not media: media = getattr(settings, 'CSS_MEDIA_DEFAULT', 'screen,projection,tv') contents = [] for css in items: if is_external(css): return _build_html([css], '<link rel="stylesheet" media="%s" ' 'href="%%s" />' % media) with open(get_path(css), 'r') as f: css_content = f.read() css_parsed = re.sub(r'url\(([^)]*?)\)', _relative_to_absolute, css_content) contents.append(css_parsed) return _build_html(contents, '<style type="text/css" media="%s">%%s' '</style>' % media) # A (temporary?) copy of this is in services/utils.py. See bug 1055654. def user_media_path(what): """Make it possible to override storage paths in settings. By default, all storage paths are in the MEDIA_ROOT. This is backwards compatible. """ default = os.path.join(settings.MEDIA_ROOT, what) key = "{0}_PATH".format(what.upper()) return getattr(settings, key, default) # A (temporary?) copy of this is in services/utils.py. See bug 1055654. def user_media_url(what): """ Generate default media url, and make possible to override it from settings. """ default = '%s%s/' % (settings.MEDIA_URL, what) key = "{0}_URL".format(what.upper().replace('-', '_')) return getattr(settings, key, default) def id_to_path(pk): """ Generate a path from an id, to distribute folders in the file system. 1 => 1/1/1 12 => 2/12/12 123456 => 6/56/123456 """ pk = unicode(pk) path = [pk[-1]] if len(pk) >= 2: path.append(pk[-2:]) else: path.append(pk) path.append(pk) return os.path.join(*path) @register.filter def hidden_field(field): return field.as_widget(attrs={'style': 'display:none'})
{ "content_hash": "8a0d90dbc3432ca3f99d94f8b925916a", "timestamp": "", "source": "github", "line_count": 565, "max_line_length": 78, "avg_line_length": 29.67787610619469, "alnum_prop": 0.642175572519084, "repo_name": "harikishen/addons-server", "id": "0539d7913d1bf707c368cd7077e7747975310646", "size": "16768", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/olympia/amo/helpers.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "CSS", "bytes": "822508" }, { "name": "HTML", "bytes": "698554" }, { "name": "JavaScript", "bytes": "1087360" }, { "name": "Makefile", "bytes": "811" }, { "name": "PLSQL", "bytes": "990" }, { "name": "PLpgSQL", "bytes": "2381" }, { "name": "Python", "bytes": "4560536" }, { "name": "SQLPL", "bytes": "559" }, { "name": "Shell", "bytes": "7564" }, { "name": "Smarty", "bytes": "1859" } ], "symlink_target": "" }
import datetime import json import mimetypes import os import re import sys import time from email.header import Header from http.client import responses from urllib.parse import quote, urlparse from django.conf import settings from django.core import signals, signing from django.core.exceptions import DisallowedRedirect from django.core.serializers.json import DjangoJSONEncoder from django.http.cookie import SimpleCookie from django.utils import timezone from django.utils.encoding import iri_to_uri from django.utils.http import http_date from django.utils.regex_helper import _lazy_re_compile _charset_from_content_type_re = _lazy_re_compile(r';\s*charset=(?P<charset>[^\s;]+)', re.I) class BadHeaderError(ValueError): pass class HttpResponseBase: """ An HTTP response base class with dictionary-accessed headers. This class doesn't handle content. It should not be used directly. Use the HttpResponse and StreamingHttpResponse subclasses instead. """ status_code = 200 def __init__(self, content_type=None, status=None, reason=None, charset=None): # _headers is a mapping of the lowercase name to the original case of # the header (required for working with legacy systems) and the header # value. Both the name of the header and its value are ASCII strings. self._headers = {} self._closable_objects = [] # This parameter is set by the handler. It's necessary to preserve the # historical behavior of request_finished. self._handler_class = None self.cookies = SimpleCookie() self.closed = False if status is not None: try: self.status_code = int(status) except (ValueError, TypeError): raise TypeError('HTTP status code must be an integer.') if not 100 <= self.status_code <= 599: raise ValueError('HTTP status code must be an integer from 100 to 599.') self._reason_phrase = reason self._charset = charset if content_type is None: content_type = 'text/html; charset=%s' % self.charset self['Content-Type'] = content_type @property def reason_phrase(self): if self._reason_phrase is not None: return self._reason_phrase # Leave self._reason_phrase unset in order to use the default # reason phrase for status code. return responses.get(self.status_code, 'Unknown Status Code') @reason_phrase.setter def reason_phrase(self, value): self._reason_phrase = value @property def charset(self): if self._charset is not None: return self._charset content_type = self.get('Content-Type', '') matched = _charset_from_content_type_re.search(content_type) if matched: # Extract the charset and strip its double quotes return matched.group('charset').replace('"', '') return settings.DEFAULT_CHARSET @charset.setter def charset(self, value): self._charset = value def serialize_headers(self): """HTTP headers as a bytestring.""" def to_bytes(val, encoding): return val if isinstance(val, bytes) else val.encode(encoding) headers = [ (to_bytes(key, 'ascii') + b': ' + to_bytes(value, 'latin-1')) for key, value in self._headers.values() ] return b'\r\n'.join(headers) __bytes__ = serialize_headers @property def _content_type_for_repr(self): return ', "%s"' % self['Content-Type'] if 'Content-Type' in self else '' def _convert_to_charset(self, value, charset, mime_encode=False): """ Convert headers key/value to ascii/latin-1 native strings. `charset` must be 'ascii' or 'latin-1'. If `mime_encode` is True and `value` can't be represented in the given charset, apply MIME-encoding. """ if not isinstance(value, (bytes, str)): value = str(value) if ((isinstance(value, bytes) and (b'\n' in value or b'\r' in value)) or isinstance(value, str) and ('\n' in value or '\r' in value)): raise BadHeaderError("Header values can't contain newlines (got %r)" % value) try: if isinstance(value, str): # Ensure string is valid in given charset value.encode(charset) else: # Convert bytestring using given charset value = value.decode(charset) except UnicodeError as e: if mime_encode: value = Header(value, 'utf-8', maxlinelen=sys.maxsize).encode() else: e.reason += ', HTTP response headers must be in %s format' % charset raise return value def __setitem__(self, header, value): header = self._convert_to_charset(header, 'ascii') value = self._convert_to_charset(value, 'latin-1', mime_encode=True) self._headers[header.lower()] = (header, value) def __delitem__(self, header): self._headers.pop(header.lower(), False) def __getitem__(self, header): return self._headers[header.lower()][1] def has_header(self, header): """Case-insensitive check for a header.""" return header.lower() in self._headers __contains__ = has_header def items(self): return self._headers.values() def get(self, header, alternate=None): return self._headers.get(header.lower(), (None, alternate))[1] def set_cookie(self, key, value='', max_age=None, expires=None, path='/', domain=None, secure=False, httponly=False, samesite=None): """ Set a cookie. ``expires`` can be: - a string in the correct format, - a naive ``datetime.datetime`` object in UTC, - an aware ``datetime.datetime`` object in any time zone. If it is a ``datetime.datetime`` object then calculate ``max_age``. """ self.cookies[key] = value if expires is not None: if isinstance(expires, datetime.datetime): if timezone.is_aware(expires): expires = timezone.make_naive(expires, timezone.utc) delta = expires - expires.utcnow() # Add one second so the date matches exactly (a fraction of # time gets lost between converting to a timedelta and # then the date string). delta = delta + datetime.timedelta(seconds=1) # Just set max_age - the max_age logic will set expires. expires = None max_age = max(0, delta.days * 86400 + delta.seconds) else: self.cookies[key]['expires'] = expires else: self.cookies[key]['expires'] = '' if max_age is not None: self.cookies[key]['max-age'] = max_age # IE requires expires, so set it if hasn't been already. if not expires: self.cookies[key]['expires'] = http_date(time.time() + max_age) if path is not None: self.cookies[key]['path'] = path if domain is not None: self.cookies[key]['domain'] = domain if secure: self.cookies[key]['secure'] = True if httponly: self.cookies[key]['httponly'] = True if samesite: if samesite.lower() not in ('lax', 'strict'): raise ValueError('samesite must be "lax" or "strict".') self.cookies[key]['samesite'] = samesite def setdefault(self, key, value): """Set a header unless it has already been set.""" if key not in self: self[key] = value def set_signed_cookie(self, key, value, salt='', **kwargs): value = signing.get_cookie_signer(salt=key + salt).sign(value) return self.set_cookie(key, value, **kwargs) def delete_cookie(self, key, path='/', domain=None): # Most browsers ignore the Set-Cookie header if the cookie name starts # with __Host- or __Secure- and the cookie doesn't use the secure flag. secure = key.startswith(('__Secure-', '__Host-')) self.set_cookie( key, max_age=0, path=path, domain=domain, secure=secure, expires='Thu, 01 Jan 1970 00:00:00 GMT', ) # Common methods used by subclasses def make_bytes(self, value): """Turn a value into a bytestring encoded in the output charset.""" # Per PEP 3333, this response body must be bytes. To avoid returning # an instance of a subclass, this function returns `bytes(value)`. # This doesn't make a copy when `value` already contains bytes. # Handle string types -- we can't rely on force_bytes here because: # - Python attempts str conversion first # - when self._charset != 'utf-8' it re-encodes the content if isinstance(value, (bytes, memoryview)): return bytes(value) if isinstance(value, str): return bytes(value.encode(self.charset)) # Handle non-string types. return str(value).encode(self.charset) # These methods partially implement the file-like object interface. # See https://docs.python.org/library/io.html#io.IOBase # The WSGI server must call this method upon completion of the request. # See http://blog.dscpl.com.au/2012/10/obligations-for-calling-close-on.html # When wsgi.file_wrapper is used, the WSGI server instead calls close() # on the file-like object. Django ensures this method is called in this # case by replacing self.file_to_stream.close() with a wrapped version. def close(self): for closable in self._closable_objects: try: closable.close() except Exception: pass self.closed = True signals.request_finished.send(sender=self._handler_class) def write(self, content): raise OSError('This %s instance is not writable' % self.__class__.__name__) def flush(self): pass def tell(self): raise OSError('This %s instance cannot tell its position' % self.__class__.__name__) # These methods partially implement a stream-like object interface. # See https://docs.python.org/library/io.html#io.IOBase def readable(self): return False def seekable(self): return False def writable(self): return False def writelines(self, lines): raise OSError('This %s instance is not writable' % self.__class__.__name__) class HttpResponse(HttpResponseBase): """ An HTTP response class with a string as content. This content that can be read, appended to, or replaced. """ streaming = False def __init__(self, content=b'', *args, **kwargs): super().__init__(*args, **kwargs) # Content is a bytestring. See the `content` property methods. self.content = content def __repr__(self): return '<%(cls)s status_code=%(status_code)d%(content_type)s>' % { 'cls': self.__class__.__name__, 'status_code': self.status_code, 'content_type': self._content_type_for_repr, } def serialize(self): """Full HTTP message, including headers, as a bytestring.""" return self.serialize_headers() + b'\r\n\r\n' + self.content __bytes__ = serialize @property def content(self): return b''.join(self._container) @content.setter def content(self, value): # Consume iterators upon assignment to allow repeated iteration. if hasattr(value, '__iter__') and not isinstance(value, (bytes, str)): content = b''.join(self.make_bytes(chunk) for chunk in value) if hasattr(value, 'close'): try: value.close() except Exception: pass else: content = self.make_bytes(value) # Create a list of properly encoded bytestrings to support write(). self._container = [content] def __iter__(self): return iter(self._container) def write(self, content): self._container.append(self.make_bytes(content)) def tell(self): return len(self.content) def getvalue(self): return self.content def writable(self): return True def writelines(self, lines): for line in lines: self.write(line) class StreamingHttpResponse(HttpResponseBase): """ A streaming HTTP response class with an iterator as content. This should only be iterated once, when the response is streamed to the client. However, it can be appended to or replaced with a new iterator that wraps the original content (or yields entirely new content). """ streaming = True def __init__(self, streaming_content=(), *args, **kwargs): super().__init__(*args, **kwargs) # `streaming_content` should be an iterable of bytestrings. # See the `streaming_content` property methods. self.streaming_content = streaming_content @property def content(self): raise AttributeError( "This %s instance has no `content` attribute. Use " "`streaming_content` instead." % self.__class__.__name__ ) @property def streaming_content(self): return map(self.make_bytes, self._iterator) @streaming_content.setter def streaming_content(self, value): self._set_streaming_content(value) def _set_streaming_content(self, value): # Ensure we can never iterate on "value" more than once. self._iterator = iter(value) if hasattr(value, 'close'): self._closable_objects.append(value) def __iter__(self): return self.streaming_content def getvalue(self): return b''.join(self.streaming_content) class FileResponse(StreamingHttpResponse): """ A streaming HTTP response class optimized for files. """ block_size = 4096 def __init__(self, *args, as_attachment=False, filename='', **kwargs): self.as_attachment = as_attachment self.filename = filename super().__init__(*args, **kwargs) def _wrap_file_to_stream_close(self, filelike): """ Wrap the file-like close() with a version that calls FileResponse.close(). """ closing = False filelike_close = getattr(filelike, 'close', lambda: None) def file_wrapper_close(): nonlocal closing # Prevent an infinite loop since FileResponse.close() tries to # close the objects in self._closable_objects. if closing: return closing = True try: filelike_close() finally: self.close() filelike.close = file_wrapper_close def _set_streaming_content(self, value): if not hasattr(value, 'read'): self.file_to_stream = None return super()._set_streaming_content(value) self.file_to_stream = filelike = value # Add to closable objects before wrapping close(), since the filelike # might not have close(). if hasattr(filelike, 'close'): self._closable_objects.append(filelike) self._wrap_file_to_stream_close(filelike) value = iter(lambda: filelike.read(self.block_size), b'') self.set_headers(filelike) super()._set_streaming_content(value) def set_headers(self, filelike): """ Set some common response headers (Content-Length, Content-Type, and Content-Disposition) based on the `filelike` response content. """ encoding_map = { 'bzip2': 'application/x-bzip', 'gzip': 'application/gzip', 'xz': 'application/x-xz', } filename = getattr(filelike, 'name', None) filename = filename if (isinstance(filename, str) and filename) else self.filename if os.path.isabs(filename): self['Content-Length'] = os.path.getsize(filelike.name) elif hasattr(filelike, 'getbuffer'): self['Content-Length'] = filelike.getbuffer().nbytes if self.get('Content-Type', '').startswith('text/html'): if filename: content_type, encoding = mimetypes.guess_type(filename) # Encoding isn't set to prevent browsers from automatically # uncompressing files. content_type = encoding_map.get(encoding, content_type) self['Content-Type'] = content_type or 'application/octet-stream' else: self['Content-Type'] = 'application/octet-stream' filename = self.filename or os.path.basename(filename) if filename: disposition = 'attachment' if self.as_attachment else 'inline' try: filename.encode('ascii') file_expr = 'filename="{}"'.format(filename) except UnicodeEncodeError: file_expr = "filename*=utf-8''{}".format(quote(filename)) self['Content-Disposition'] = '{}; {}'.format(disposition, file_expr) elif self.as_attachment: self['Content-Disposition'] = 'attachment' class HttpResponseRedirectBase(HttpResponse): allowed_schemes = ['http', 'https', 'ftp'] def __init__(self, redirect_to, *args, **kwargs): super().__init__(*args, **kwargs) self['Location'] = iri_to_uri(redirect_to) parsed = urlparse(str(redirect_to)) if parsed.scheme and parsed.scheme not in self.allowed_schemes: raise DisallowedRedirect("Unsafe redirect to URL with protocol '%s'" % parsed.scheme) url = property(lambda self: self['Location']) def __repr__(self): return '<%(cls)s status_code=%(status_code)d%(content_type)s, url="%(url)s">' % { 'cls': self.__class__.__name__, 'status_code': self.status_code, 'content_type': self._content_type_for_repr, 'url': self.url, } class HttpResponseRedirect(HttpResponseRedirectBase): status_code = 302 class HttpResponsePermanentRedirect(HttpResponseRedirectBase): status_code = 301 class HttpResponseNotModified(HttpResponse): status_code = 304 def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) del self['content-type'] @HttpResponse.content.setter def content(self, value): if value: raise AttributeError("You cannot set content to a 304 (Not Modified) response") self._container = [] class HttpResponseBadRequest(HttpResponse): status_code = 400 class HttpResponseNotFound(HttpResponse): status_code = 404 class HttpResponseForbidden(HttpResponse): status_code = 403 class HttpResponseNotAllowed(HttpResponse): status_code = 405 def __init__(self, permitted_methods, *args, **kwargs): super().__init__(*args, **kwargs) self['Allow'] = ', '.join(permitted_methods) def __repr__(self): return '<%(cls)s [%(methods)s] status_code=%(status_code)d%(content_type)s>' % { 'cls': self.__class__.__name__, 'status_code': self.status_code, 'content_type': self._content_type_for_repr, 'methods': self['Allow'], } class HttpResponseGone(HttpResponse): status_code = 410 class HttpResponseServerError(HttpResponse): status_code = 500 class Http404(Exception): pass class JsonResponse(HttpResponse): """ An HTTP response class that consumes data to be serialized to JSON. :param data: Data to be dumped into json. By default only ``dict`` objects are allowed to be passed due to a security flaw before EcmaScript 5. See the ``safe`` parameter for more information. :param encoder: Should be a json encoder class. Defaults to ``django.core.serializers.json.DjangoJSONEncoder``. :param safe: Controls if only ``dict`` objects may be serialized. Defaults to ``True``. :param json_dumps_params: A dictionary of kwargs passed to json.dumps(). """ def __init__(self, data, encoder=DjangoJSONEncoder, safe=True, json_dumps_params=None, **kwargs): if safe and not isinstance(data, dict): raise TypeError( 'In order to allow non-dict objects to be serialized set the ' 'safe parameter to False.' ) if json_dumps_params is None: json_dumps_params = {} kwargs.setdefault('content_type', 'application/json') data = json.dumps(data, cls=encoder, **json_dumps_params) super().__init__(content=data, **kwargs)
{ "content_hash": "24d55253238d3f49721e6729aee7c220", "timestamp": "", "source": "github", "line_count": 589, "max_line_length": 97, "avg_line_length": 35.61290322580645, "alnum_prop": 0.6059305873379099, "repo_name": "georgemarshall/django", "id": "269953c0aff3f9b834ab4ea9e715aab8d6fb2c2b", "size": "20976", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "django/http/response.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "CSS", "bytes": "53023" }, { "name": "HTML", "bytes": "172977" }, { "name": "JavaScript", "bytes": "448123" }, { "name": "Makefile", "bytes": "125" }, { "name": "Python", "bytes": "12112373" }, { "name": "Shell", "bytes": "809" }, { "name": "Smarty", "bytes": "130" } ], "symlink_target": "" }
from BinPy.config.constants import *
{ "content_hash": "c0c30cfe83b3f52018ec79431871c952", "timestamp": "", "source": "github", "line_count": 1, "max_line_length": 36, "avg_line_length": 37, "alnum_prop": 0.8108108108108109, "repo_name": "MridulS/BinPy", "id": "13349713789b51979160e0807a0b50f88c1f69ca", "size": "37", "binary": false, "copies": "5", "ref": "refs/heads/develop", "path": "BinPy/config/__init__.py", "mode": "33188", "license": "bsd-3-clause", "language": [], "symlink_target": "" }
from .profile_edit import *
{ "content_hash": "706c20f3889b133ebfb55d9c3984175e", "timestamp": "", "source": "github", "line_count": 1, "max_line_length": 27, "avg_line_length": 27, "alnum_prop": 0.7777777777777778, "repo_name": "vinoth3v/In", "id": "245f1e9f9c2a1eb6ec3896f2ff053fd0963f824b", "size": "28", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "In/profile/page/__init__.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "JavaScript", "bytes": "33032" }, { "name": "Python", "bytes": "779047" } ], "symlink_target": "" }
import pytest # noqa from django.template.base import Context, Template def test_import(): import apptemplates assert apptemplates.__name__ == 'apptemplates' def test_render(): c = Context() t = Template('{% extends "admin:admin/base.html" %}') t.render(c)
{ "content_hash": "7f0d3db010a0f9ea7a4d66592510f726", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 57, "avg_line_length": 21.692307692307693, "alnum_prop": 0.6631205673758865, "repo_name": "jdotjdot/django-apptemplates", "id": "199bc240c6cb7063fa5b3341fb429d3d1008d175", "size": "282", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "tests.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "7766" } ], "symlink_target": "" }
from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='User', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')), ('full_name', models.CharField(max_length=250)), ('email', models.EmailField(max_length=254)), ('is_admin', models.BooleanField(default=False)), ('password', models.CharField(max_length=250)), ], options={ 'abstract': False, }, ), migrations.CreateModel( name='Employee', fields=[ ('user_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='users.User')), ], options={ 'abstract': False, }, bases=('users.user',), ), ]
{ "content_hash": "1da45537b4ac8bcbaf763df97b46b64a", "timestamp": "", "source": "github", "line_count": 39, "max_line_length": 185, "avg_line_length": 32.8974358974359, "alnum_prop": 0.5424785658612626, "repo_name": "tassolom/twq-app", "id": "e034f02cf8d829f79e5b70874af06edbde4741ac", "size": "1356", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "api/users/migrations/0001_initial.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "172" }, { "name": "HTML", "bytes": "204" }, { "name": "JavaScript", "bytes": "18302" }, { "name": "Python", "bytes": "34426" }, { "name": "Shell", "bytes": "170" }, { "name": "Vue", "bytes": "15713" } ], "symlink_target": "" }
import subprocess, uuid from PIL import Image, ImageDraw, ImageFont from StringIO import StringIO from utils import guess_type, create_temporary_file, get_video_duration, resize_image, file_extension_for_type from constants import MEDIA_TYPE_IMAGE, MEDIA_TYPE_VIDEO, DEFAULT_DURATION, SNAP_IMAGE_DIMENSIONS from exceptions import UnknownMediaType class Snap(object): @staticmethod def from_file(path, query, duration = None): media_type = guess_type(path) if media_type is MEDIA_TYPE_VIDEO: if duration is None: duration = get_video_duration(path) tmp = create_temporary_file(".snap.mp4") output_path = tmp.name subprocess.Popen(["ffmpeg", "-y", "-i", path, output_path]).wait() elif media_type is MEDIA_TYPE_IMAGE: image = Image.open(path) draw = ImageDraw.Draw(image) font = ImageFont.truetype("aller-font.ttf", 28) draw.text((10, 10), 'Name:' + query['title'] + ' Artist:' + query['artist'], (255,0,0), font=font) del draw tmp = create_temporary_file(".jpg") output_path = tmp.name resize_image(image, output_path) if not duration: duration = DEFAULT_DURATION else: raise UnknownMediaType("Could not determine media type of the file") return Snap(path=output_path, media_type=media_type, duration=duration) @staticmethod def from_image(img, duration=DEFAULT_DURATION): f = create_temporary_file(".jpg") resize_image(img, f.name) return Snap(path=f.name, media_type=MEDIA_TYPE_IMAGE, duration=duration) def upload(self, bot): self.media_id = bot.client.upload(self.file.name) self.uploaded = True def __init__(self, **opts): self.uploaded = False self.duration = opts['duration'] self.media_type = opts['media_type'] if 'sender' in opts: self.sender = opts['sender'] self.snap_id = opts['snap_id'] self.from_me = False else: self.snap_id = uuid.uuid4().hex self.from_me = True if 'data' in opts: self.media_type = opts['media_type'] suffix = "." + file_extension_for_type(opts['media_type']) self.file = create_temporary_file(suffix) if self.media_type is MEDIA_TYPE_VIDEO: self.file.write(opts['data']) self.file.flush() else: image = Image.open(StringIO(opts['data'])) resize_image(image, self.file.name) else: path = opts['path'] self.file = open(path)
{ "content_hash": "e9b4145b85e84511d1f210414179b193", "timestamp": "", "source": "github", "line_count": 80, "max_line_length": 110, "avg_line_length": 34.1125, "alnum_prop": 0.5851960425064126, "repo_name": "ahoskins/Snapper", "id": "d4e820bc256e80baf71ec95b1995106c3eed1659", "size": "2729", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "lib/snap.py", "mode": "33261", "license": "mit", "language": [ { "name": "Python", "bytes": "11634" } ], "symlink_target": "" }
"""Define a model for favorited referendums.""" from sqlalchemy import ( Column, Integer, UnicodeText, ForeignKey ) from .meta import Base class FavoriteReferendum(Base): """Define a model for favorited referendums.""" __tablename__ = 'favoritereferendums' id = Column(Integer, primary_key=True) title = Column(UnicodeText) brief = Column(UnicodeText) position = Column(UnicodeText) userid = Column(Integer, ForeignKey('users.id'))
{ "content_hash": "9243c02029d2f585c3a7821b562697bd", "timestamp": "", "source": "github", "line_count": 20, "max_line_length": 52, "avg_line_length": 24, "alnum_prop": 0.6895833333333333, "repo_name": "Elections-R-Us/Elections-R-Us", "id": "f637ac3067ac743fa5bcb0be60ff7f4ad748dea8", "size": "480", "binary": false, "copies": "1", "ref": "refs/heads/development", "path": "elections_r_us/models/favoritereferendum.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "580052" }, { "name": "HTML", "bytes": "539" }, { "name": "JavaScript", "bytes": "930231" }, { "name": "Python", "bytes": "119264" }, { "name": "Shell", "bytes": "60" } ], "symlink_target": "" }
import sys import os # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. #sys.path.insert(0, os.path.abspath('.')) # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'gemini' copyright = u'2014, Adam Petrone' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = '1.0.0' # The full version, including alpha/beta/rc tags. release = '1.0.0' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = [] # The reST default role (used for this markup: `text`) to use for all # documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. #keep_warnings = False # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'nature' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. #html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # "<project> v<release> documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. #html_extra_path = [] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a <link> tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'geminidoc' # -- Options for LaTeX output --------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). #'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). #'pointsize': '10pt', # Additional stuff for the LaTeX preamble. #'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ ('index', 'gemini.tex', u'gemini Documentation', u'Adam Petrone', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('index', 'gemini', u'gemini Documentation', [u'Adam Petrone'], 1) ] # If true, show URL addresses after external links. #man_show_urls = False # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ('index', 'gemini', u'gemini Documentation', u'Adam Petrone', 'gemini', 'One line description of project.', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. #texinfo_appendices = [] # If false, no module index is generated. #texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. #texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. #texinfo_no_detailmenu = False
{ "content_hash": "68d1a3808ad23a87ad85189904621c6a", "timestamp": "", "source": "github", "line_count": 244, "max_line_length": 79, "avg_line_length": 31.577868852459016, "alnum_prop": 0.7046073977936405, "repo_name": "apetrone/gemini", "id": "556235a04acd4c1e5930ae76effaaff4b35aee1f", "size": "8124", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "docs/conf.py", "mode": "33188", "license": "bsd-2-clause", "language": [ { "name": "C", "bytes": "787784" }, { "name": "C++", "bytes": "2518564" }, { "name": "GLSL", "bytes": "4349" }, { "name": "Java", "bytes": "25276" }, { "name": "Makefile", "bytes": "8546" }, { "name": "Objective-C", "bytes": "20209" }, { "name": "Objective-C++", "bytes": "98077" }, { "name": "Python", "bytes": "52495" }, { "name": "Shell", "bytes": "1153" } ], "symlink_target": "" }
""" WSGI config for sample project. It exposes the WSGI callable as a module-level variable named ``application``. For more information on this file, see https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/ """ import os os.environ.setdefault("DJANGO_SETTINGS_MODULE", "sample.settings") from django.core.wsgi import get_wsgi_application application = get_wsgi_application()
{ "content_hash": "d937594329037db83b15610bb6388dd8", "timestamp": "", "source": "github", "line_count": 14, "max_line_length": 78, "avg_line_length": 27.642857142857142, "alnum_prop": 0.772609819121447, "repo_name": "cmheisel/ebdocker-py", "id": "5990b83c6aff841b2dac6acc22c5433f7bf2b751", "size": "387", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "sample/sample/wsgi.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "4903" }, { "name": "Shell", "bytes": "253" } ], "symlink_target": "" }
import requests import unicodecsv from io import StringIO #what country has the most serious earthquakes lately? url = "http://python-gijc15.s3.eu-central-1.amazonaws.com/all_month.csv" r = requests.get(url) text = StringIO(r.text) reader = unicodecsv.DictReader(text, dialect='excel') new_collection = [] for row in reader: if row['type'] == "earthquake": if float(row['mag'])>=6.0: new_collection.append(row) else: pass else: pass for item in new_collection: item[u'nearest']=item['place'].split(',')[-1].strip() #write the results to a new csv filename = "serious_quakes.csv" print new_collection[0].keys() fieldnames = [u'time', u'latitude', u'longitude', u'mag', u'type', u'id', u'place', u'nearest'] with open(filename, "wb") as f: writer = unicodecsv.DictWriter(f, fieldnames=fieldnames, extrasaction='ignore') writer.writeheader() for item in new_collection: writer.writerow(item)
{ "content_hash": "71f04504443424c5299945a46ab546b5", "timestamp": "", "source": "github", "line_count": 39, "max_line_length": 95, "avg_line_length": 25.128205128205128, "alnum_prop": 0.6632653061224489, "repo_name": "tommeagher/pythonGIJC15", "id": "22c41451f136c5ab0a63fa5a75910c13a9b41021", "size": "980", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "scripts/completed/quakes_complete.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "89688" }, { "name": "HTML", "bytes": "7408019" }, { "name": "Python", "bytes": "33992" } ], "symlink_target": "" }
from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('boards', '0055_auto_20170311_0321'), ] operations = [ migrations.AddField( model_name='board', name='identicon', field=models.ImageField(blank=True, default=None, help_text='Identicon for this board. It is automatically generated and stored.', null=True, upload_to=b'', verbose_name='Identicon'), ), migrations.AddField( model_name='board', name='identicon_hash', field=models.CharField(blank=True, default='', help_text='Identicon hash used to know when to update it', max_length=256, verbose_name='Identicon hash'), ), ]
{ "content_hash": "643fdab3280f28a33fd82d133202dc9b", "timestamp": "", "source": "github", "line_count": 23, "max_line_length": 195, "avg_line_length": 34.56521739130435, "alnum_prop": 0.6276729559748427, "repo_name": "diegojromerolopez/djanban", "id": "44490e731457d2692bdf2420155b2c70ea95b5db", "size": "866", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/djanban/apps/boards/migrations/0056_auto_20170311_2036.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "79709" }, { "name": "HTML", "bytes": "660275" }, { "name": "JavaScript", "bytes": "634320" }, { "name": "Python", "bytes": "993818" }, { "name": "Shell", "bytes": "1732" }, { "name": "TypeScript", "bytes": "71578" } ], "symlink_target": "" }
""" Generator for C/C++. """ # Serge Guelton: The licensing terms are not set in the source package, but # pypi[1] says the software is under the MIT license, so I reproduce it here # [1] http://pypi.python.org/pypi/cgen # # Copyright (C) 2008 Andreas Kloeckner # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. # from __future__ import division __copyright__ = "Copyright (C) 2008 Andreas Kloeckner" class Generable(object): def __str__(self): """Return a single string (possibly containing newlines) representing this code construct.""" return "\n".join(l.rstrip() for l in self.generate()) def generate(self, with_semicolon=True): """Generate (i.e. yield) the lines making up this code construct.""" raise NotImplementedError class Declarator(Generable): def generate(self, with_semicolon=True): tp_lines, tp_decl = self.get_decl_pair() tp_lines = list(tp_lines) for line in tp_lines[:-1]: yield line sc = ";" if not with_semicolon: sc = "" if tp_decl is None: yield "%s%s" % (tp_lines[-1], sc) else: yield "%s %s%s" % (tp_lines[-1], tp_decl, sc) def get_decl_pair(self): """Return a tuple ``(type_lines, rhs)``. *type_lines* is a non-empty list of lines (most often just a single one) describing the type of this declarator. *rhs* is the right- hand side that actually contains the function/array/constness notation making up the bulk of the declarator syntax. """ def inline(self, with_semicolon=True): """Return the declarator as a single line.""" tp_lines, tp_decl = self.get_decl_pair() tp_lines = " ".join(tp_lines) if tp_decl is None: return tp_lines else: return "%s %s" % (tp_lines, tp_decl) class Value(Declarator): """A simple declarator: *typename* and *name* are given as strings.""" def __init__(self, typename, name): self.typename = typename self.name = name def get_decl_pair(self): return [self.typename], self.name class NestedDeclarator(Declarator): def __init__(self, subdecl): self.subdecl = subdecl @property def name(self): return self.subdecl.name def get_decl_pair(self): return self.subdecl.get_decl_pair() class DeclSpecifier(NestedDeclarator): def __init__(self, subdecl, spec, sep=' '): NestedDeclarator.__init__(self, subdecl) self.spec = spec self.sep = sep def get_decl_pair(self): def add_spec(sub_it): it = iter(sub_it) try: yield "%s%s%s" % (self.spec, self.sep, it.next()) except StopIteration: pass for line in it: yield line sub_tp, sub_decl = self.subdecl.get_decl_pair() return add_spec(sub_tp), sub_decl class NamespaceQualifier(DeclSpecifier): def __init__(self, namespace, subdecl): DeclSpecifier.__init__(self, subdecl, namespace, '::') class Typedef(DeclSpecifier): def __init__(self, subdecl): DeclSpecifier.__init__(self, subdecl, "typedef") class Static(DeclSpecifier): def __init__(self, subdecl): DeclSpecifier.__init__(self, subdecl, "static") class Const(NestedDeclarator): def get_decl_pair(self): sub_tp, sub_decl = self.subdecl.get_decl_pair() return sub_tp, ("const %s" % sub_decl) class FunctionDeclaration(NestedDeclarator): def __init__(self, subdecl, arg_decls, *attributes): NestedDeclarator.__init__(self, subdecl) self.arg_decls = arg_decls self.attributes = attributes def get_decl_pair(self): sub_tp, sub_decl = self.subdecl.get_decl_pair() return sub_tp, ("%s(%s) %s" % ( sub_decl, ", ".join(ad.inline() for ad in self.arg_decls), " ".join(self.attributes))) class Struct(Declarator): """ A structure declarator. Attributes ---------- tpname : str Name of the structure. (None for unnamed struct) fields : [Declarator] Content of the structure. inherit : str Parent class of current structure. """ def __init__(self, tpname, fields, inherit=None): """Initialize the structure declarator. """ self.tpname = tpname self.fields = fields self.inherit = inherit def get_decl_pair(self): """ See Declarator.get_decl_pair.""" def get_tp(): """ Iterator generating lines for struct definition. """ decl = "struct " if self.tpname is not None: decl += self.tpname if self.inherit is not None: decl += " : " + self.inherit yield decl yield "{" for f in self.fields: for f_line in f.generate(): yield " " + f_line yield "} " return get_tp(), "" # template -------------------------------------------------------------------- class Template(NestedDeclarator): def __init__(self, template_spec, subdecl): self.template_spec = template_spec self.subdecl = subdecl def generate(self, with_semicolon=False): yield "template <%s>" % ", ".join(self.template_spec) for i in self.subdecl.generate(with_semicolon): yield i if(not isinstance(self.subdecl, FunctionDeclaration) and not isinstance(self.subdecl, Template)): yield ";" # control flow/statement stuff ------------------------------------------------ class ExceptHandler(Generable): def __init__(self, name, body, alias=None): self.name = name assert isinstance(body, Generable) self.body = body self.alias = alias def generate(self): if self.name is None: yield "catch(...)" else: yield "catch (pythonic::types::%s const& %s)" % (self.name, self.alias or '') for line in self.body.generate(): yield line class TryExcept(Generable): def __init__(self, try_, except_, else_=None): self.try_ = try_ assert isinstance(try_, Generable) self.except_ = except_ def generate(self): yield "try" for line in self.try_.generate(): yield line for exception in self.except_: for line in exception.generate(): yield " " + line class If(Generable): def __init__(self, condition, then_, else_=None): if condition[0] == '(' and condition[-1] == ')': condition = condition[1:-1] self.condition = condition assert isinstance(then_, Generable) if else_ is not None: assert isinstance(else_, Generable) self.then_ = then_ self.else_ = else_ def generate(self): yield "if (%s)" % self.condition for line in self.then_.generate(): yield line if self.else_ is not None: yield "else" for line in self.else_.generate(): yield line class Loop(Generable): def __init__(self, body): self.body = body def generate(self): if self.intro_line() is not None: yield self.intro_line() for line in self.body.generate(): yield line class While(Loop): def __init__(self, condition, body): if condition[0] == '(' and condition[-1] == ')': condition = condition[1:-1] self.condition = condition assert isinstance(body, Generable) self.body = body def intro_line(self): return "while (%s)" % self.condition class For(Loop): def __init__(self, start, condition, update, body): self.start = start self.condition = condition self.update = update assert isinstance(body, Generable) self.body = body def intro_line(self): return "for (%s; %s; %s)" % (self.start, self.condition, self.update) class AutoFor(Loop): def __init__(self, target, iter, body): self.target = target self.iter = iter assert isinstance(body, Generable) self.body = body def intro_line(self): return ("for (typename decltype({1})::iterator::reference " "{0}: {1})".format(self.target, self.iter)) # simple statements ----------------------------------------------------------- class Define(Generable): def __init__(self, symbol, value): self.symbol = symbol self.value = value def generate(self): yield "#define %s %s" % (self.symbol, self.value) class Include(Generable): def __init__(self, filename, system=True): self.filename = filename self.system = system def generate(self): if self.system: yield "#include <%s>" % self.filename else: yield "#include \"%s\"" % self.filename class Statement(Generable): def __init__(self, text): self.text = text def generate(self): yield self.text + ";" class AnnotatedStatement(Generable): def __init__(self, stmt, annotations): self.stmt = stmt self.annotations = annotations def generate(self): for directive in self.annotations: pragma = "#pragma " + directive.s yield pragma.format(*directive.deps) for s in self.stmt.generate(): yield s class ReturnStatement(Statement): def generate(self): yield "return " + self.text + ";" class EmptyStatement(Statement): def __init__(self): Statement.__init__(self, "") class Assign(Generable): def __init__(self, lvalue, rvalue): self.lvalue = lvalue self.rvalue = rvalue def generate(self): yield "%s = %s;" % (self.lvalue, self.rvalue) class Line(Generable): def __init__(self, text=""): self.text = text def generate(self): yield self.text # initializers ---------------------------------------------------------------- class FunctionBody(Generable): def __init__(self, fdecl, body): """Initialize a function definition. *fdecl* is expected to be a :class:`FunctionDeclaration` instance, while *body* is a :class:`Block`. """ self.fdecl = fdecl self.body = body def generate(self): for f_line in self.fdecl.generate(with_semicolon=False): yield f_line for b_line in self.body.generate(): yield b_line # block ----------------------------------------------------------------------- class Block(Generable): def __init__(self, contents=[]): self.contents = contents[:] for item in self.contents: assert isinstance(item, Generable), item def generate(self): yield "{" for item in self.contents: for item_line in item.generate(): yield " " + item_line yield "}" class Module(Block): def generate(self): for c in self.contents: for line in c.generate(): yield line class Namespace(Block): def __init__(self, name, contents=[]): Block.__init__(self, contents) self.name = name def generate(self): yield "namespace " + self.name yield "{" for item in self.contents: for item_line in item.generate(): yield " " + item_line yield "}" # copy-pasted from codepy.bpl, which is a real mess... # the original code was under MIT License # cf. http://pypi.python.org/pypi/codepy # so I reproduce it here # # Copyright (C) 2008 Andreas Kloeckner # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. # class BoostPythonModule(object): def __init__(self, name="module", max_arity=None): self.name = name self.preamble = [] self.mod_body = [] self.init_body = [] def add_to_init(self, body): """Add the blocks or statements contained in the iterable *body* to the module initialization function. """ self.init_body.extend(body) def add_to_preamble(self, pa): self.preamble.extend(pa) def add_function(self, func, name=None): """Add a function to be exposed. *func* is expected to be a :class:`cgen.FunctionBody`. """ if not name: name = func.fdecl.name self.mod_body.append(func) self.init_body.append( Statement("boost::python::def(\"%s\", &%s)" % (name, func.fdecl.name))) def generate(self): """Generate (i.e. yield) the source code of the module line-by-line. """ body = (self.preamble + [Line()] + self.mod_body + [Line(), Line("BOOST_PYTHON_MODULE(%s)" % self.name)] + [Block(self.init_body)]) return Module(body) def __str__(self): return str(self.generate()) class CompilationUnit(object): def __init__(self, body): self.body = body def __str__(self): return '\n'.join('\n'.join(s.generate()) for s in self.body)
{ "content_hash": "cddbd581be580bc522eac3f9e1015ca4", "timestamp": "", "source": "github", "line_count": 522, "max_line_length": 79, "avg_line_length": 29.683908045977013, "alnum_prop": 0.5794127137786382, "repo_name": "artas360/pythran", "id": "3bdbdba87f884604318d2458b15df52e3a585272", "size": "15495", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "pythran/cxxgen.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "C++", "bytes": "1335689" }, { "name": "Makefile", "bytes": "1185" }, { "name": "Python", "bytes": "1162293" }, { "name": "Shell", "bytes": "250" } ], "symlink_target": "" }
import imp from setuptools import setup, find_packages version = imp.load_source('pyluos.version', 'pyluos/version.py') with open("README.md", "r") as fh: long_description = fh.read() setup(name='pyluos', version=version.version, author="Luos", author_email="hello@luos.io", url="https://docs.luos.io/pages/high/pyluos.html", description="Python library to set the high level behavior of your device based on Luos embedded system.", license='MIT', packages=find_packages(), install_requires=['future', 'websocket-client', 'pyserial>3', 'SimpleWebSocketServer', 'zeroconf', 'numpy', 'anytree', ], extras_require={ 'tests': ['pytest', 'flake8'], 'jupyter-integration': ['ipywidgets'], }, entry_points={ 'console_scripts': [ 'pyluos-wifi-gate = pyluos.tools.wifi_gate:main', 'pyluos-usb-gate = pyluos.tools.usb_gate:main', 'pyluos-usb2ws = pyluos.tools.usb2ws:main', ], }, )
{ "content_hash": "3a7bfb1c7b3ca519ff3ba0c67419ef0a", "timestamp": "", "source": "github", "line_count": 37, "max_line_length": 112, "avg_line_length": 32.648648648648646, "alnum_prop": 0.5223509933774835, "repo_name": "pollen/pyrobus", "id": "944c34ddfbd95755e1dd678c45b907bedf30b237", "size": "1231", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "setup.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "40451" } ], "symlink_target": "" }
from djangospam.akismet import moderator import warnings def register(*args, **kwargs): warnings.warn("Using 'register' from djangospam.akismet is deprecated. \ Import it from djangospam.akismet.moderator instead.",\ category=DeprecationWarning, stacklevel=2) return register.moderator(*args, **kwargs) class AlreadyModerated(moderator.AlreadyModerated): def __init__(self, *args, **kwargs): warnings.warn("Using 'AlreadyModerated' from djangospam.akismet is \ deprecated. Import it from djangospam.akismet.moderator instead." , category=DeprecationWarning, stacklevel=2) super(AlreadyModerated, self).__init__(*args, **kwargs) class Akismet(moderator.AlreadyModerated): def __init__(self, *args, **kwargs): warnings.warn("Using 'Akismet' from djangospam.akismet is \ deprecated. Import it from djangospam.akismet.moderator instead." , category=DeprecationWarning, stacklevel=2) super(Akismet, self).__init__(*args, **kwargs) class AkismetError(moderator.AlreadyModerated): def __init__(self, *args, **kwargs): warnings.warn("Using 'AkismetError' from djangospam.akismet is \ deprecated. Import it from djangospam.akismet.moderator instead." , category=DeprecationWarning, stacklevel=2) super(AkismetError, self).__init__(*args, **kwargs)
{ "content_hash": "f811032643e039897045add9bf38daa0", "timestamp": "", "source": "github", "line_count": 29, "max_line_length": 76, "avg_line_length": 46.172413793103445, "alnum_prop": 0.7221807318894697, "repo_name": "leandroarndt/djangospam", "id": "6f591120ae45b2a6cc17ca651f5277b67ecc173a", "size": "1364", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "djangospam/akismet/__init__.py", "mode": "33188", "license": "bsd-2-clause", "language": [ { "name": "HTML", "bytes": "1788" }, { "name": "Python", "bytes": "27374" } ], "symlink_target": "" }
""" .. _ex-inverse-source-power: ========================================== Compute source power using DICS beamformer ========================================== Compute a Dynamic Imaging of Coherent Sources (DICS) :footcite:`GrossEtAl2001` filter from single-trial activity to estimate source power across a frequency band. This example demonstrates how to source localize the event-related synchronization (ERS) of beta band activity in the :ref:`somato dataset <somato-dataset>`. """ # Author: Marijn van Vliet <w.m.vanvliet@gmail.com> # Roman Goj <roman.goj@gmail.com> # Denis Engemann <denis.engemann@gmail.com> # Stefan Appelhoff <stefan.appelhoff@mailbox.org> # # License: BSD-3-Clause # %% import numpy as np import mne from mne.datasets import somato from mne.time_frequency import csd_morlet from mne.beamformer import make_dics, apply_dics_csd print(__doc__) # %% # Reading the raw data and creating epochs: data_path = somato.data_path() subject = '01' task = 'somato' raw_fname = (data_path / f'sub-{subject}' / 'meg' / f'sub-{subject}_task-{task}_meg.fif') # Use a shorter segment of raw just for speed here raw = mne.io.read_raw_fif(raw_fname) raw.crop(0, 120) # one minute for speed (looks similar to using all ~800 sec) # Read epochs events = mne.find_events(raw) epochs = mne.Epochs(raw, events, event_id=1, tmin=-1.5, tmax=2, preload=True) del raw # Paths to forward operator and FreeSurfer subject directory fname_fwd = (data_path / 'derivatives' / f'sub-{subject}' / f'sub-{subject}_task-{task}-fwd.fif') subjects_dir = data_path / 'derivatives' / 'freesurfer' / 'subjects' # %% # We are interested in the beta band. Define a range of frequencies, using a # log scale, from 12 to 30 Hz. freqs = np.logspace(np.log10(12), np.log10(30), 9) # %% # Computing the cross-spectral density matrix for the beta frequency band, for # different time intervals. We use a decim value of 20 to speed up the # computation in this example at the loss of accuracy. csd = csd_morlet(epochs, freqs, tmin=-1, tmax=1.5, decim=20) csd_baseline = csd_morlet(epochs, freqs, tmin=-1, tmax=0, decim=20) # ERS activity starts at 0.5 seconds after stimulus onset csd_ers = csd_morlet(epochs, freqs, tmin=0.5, tmax=1.5, decim=20) info = epochs.info del epochs # %% # To compute the source power for a frequency band, rather than each frequency # separately, we average the CSD objects across frequencies. csd = csd.mean() csd_baseline = csd_baseline.mean() csd_ers = csd_ers.mean() # %% # Computing DICS spatial filters using the CSD that was computed on the entire # timecourse. fwd = mne.read_forward_solution(fname_fwd) filters = make_dics(info, fwd, csd, noise_csd=csd_baseline, pick_ori='max-power', reduce_rank=True, real_filter=True) del fwd # %% # Applying DICS spatial filters separately to the CSD computed using the # baseline and the CSD computed during the ERS activity. baseline_source_power, freqs = apply_dics_csd(csd_baseline, filters) beta_source_power, freqs = apply_dics_csd(csd_ers, filters) # %% # Visualizing source power during ERS activity relative to the baseline power. stc = beta_source_power / baseline_source_power message = 'DICS source power in the 12-30 Hz frequency band' brain = stc.plot(hemi='both', views='axial', subjects_dir=subjects_dir, subject=subject, time_label=message) # %% # References # ---------- # .. footbibliography::
{ "content_hash": "d226f3d504380da67641c91a95805bd6", "timestamp": "", "source": "github", "line_count": 102, "max_line_length": 78, "avg_line_length": 34, "alnum_prop": 0.696078431372549, "repo_name": "Teekuningas/mne-python", "id": "a73ee81f90c8a242f40ba34c7b4e4f8c5b8e08d1", "size": "3492", "binary": false, "copies": "11", "ref": "refs/heads/master", "path": "examples/inverse/dics_source_power.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Csound Document", "bytes": "24999" }, { "name": "JavaScript", "bytes": "8008" }, { "name": "Jinja", "bytes": "14962" }, { "name": "Makefile", "bytes": "4612" }, { "name": "Python", "bytes": "10372316" }, { "name": "Sass", "bytes": "257" }, { "name": "Shell", "bytes": "19970" } ], "symlink_target": "" }
from unittest import TestCase from tapiriik.testing.testtools import TestTools, TapiriikTestCase from tapiriik.sync import Sync from tapiriik.services import Service from tapiriik.services.interchange import Activity, ActivityType, Waypoint, WaypointType from tapiriik.sync import Sync from datetime import datetime, timedelta import random class InterchangeTests(TapiriikTestCase): def test_round_precise_time(self): ''' Some services might return really exact times, while others would round to the second - needs to be accounted for in hash algo ''' actA = Activity() actA.StartTime = datetime(1, 2, 3, 4, 5, 6, 7) actB = Activity() actB.StartTime = datetime(1, 2, 3, 4, 5, 6, 7) + timedelta(0, 0.1337) actA.CalculateUID() actB.CalculateUID() self.assertEqual(actA.UID, actB.UID) def test_constant_representation(self): ''' ensures that all services' API clients are consistent through a simulated download->upload cycle ''' # runkeeper rkSvc = Service.FromID("runkeeper") act = TestTools.create_random_activity(rkSvc, rkSvc.SupportedActivities[0]) record = rkSvc._createUploadData(act) returnedAct = rkSvc._populateActivity(record) act.Name = None # RK doesn't have a "name" field, so it's fudged into the notes, but not really rkSvc._populateActivityWaypoints(record, returnedAct) self.assertActivitiesEqual(returnedAct, act) # can't test Strava well this way, the upload and download formats are entirely different # endomondo - only waypoints at this point, the activity metadata is somewhat out-of-band eSvc = Service.FromID("endomondo") act = TestTools.create_random_activity(eSvc, eSvc.SupportedActivities[0]) oldWaypoints = act.Waypoints self.assertEqual(oldWaypoints[0].Calories, None) record = eSvc._createUploadData(act) eSvc._populateActivityFromTrackData(act, record) self.assertEqual(oldWaypoints, act.Waypoints) def test_duration_calculation(self): ''' ensures that true-duration calculation is being reasonable ''' act = TestTools.create_blank_activity() act.StartTime = datetime.now() act.EndTime = act.StartTime + timedelta(hours=3) # No waypoints self.assertRaises(ValueError, act.GetTimerTime) # Too few waypoints act.Waypoints = [Waypoint(timestamp=act.StartTime), Waypoint(timestamp=act.EndTime)] self.assertRaises(ValueError, act.GetTimerTime) # straight-up calculation act.EndTime = act.StartTime + timedelta(seconds=14) act.Waypoints = [Waypoint(timestamp=act.StartTime), Waypoint(timestamp=act.StartTime + timedelta(seconds=2)), Waypoint(timestamp=act.StartTime + timedelta(seconds=6)), Waypoint(timestamp=act.StartTime + timedelta(seconds=10)), Waypoint(timestamp=act.StartTime + timedelta(seconds=14))] self.assertEqual(act.GetTimerTime(), timedelta(seconds=14)) # pauses act.EndTime = act.StartTime + timedelta(seconds=14) act.Waypoints = [Waypoint(timestamp=act.StartTime), Waypoint(timestamp=act.StartTime + timedelta(seconds=2)), Waypoint(timestamp=act.StartTime + timedelta(seconds=6), ptType=WaypointType.Pause), Waypoint(timestamp=act.StartTime + timedelta(seconds=9), ptType=WaypointType.Pause), Waypoint(timestamp=act.StartTime + timedelta(seconds=10), ptType=WaypointType.Resume), Waypoint(timestamp=act.StartTime + timedelta(seconds=14))] self.assertEqual(act.GetTimerTime(), timedelta(seconds=10)) # laps - NO effect act.EndTime = act.StartTime + timedelta(seconds=14) act.Waypoints = [Waypoint(timestamp=act.StartTime), Waypoint(timestamp=act.StartTime + timedelta(seconds=2)), Waypoint(timestamp=act.StartTime + timedelta(seconds=6), ptType=WaypointType.Lap), Waypoint(timestamp=act.StartTime + timedelta(seconds=9)), Waypoint(timestamp=act.StartTime + timedelta(seconds=10), ptType=WaypointType.Lap), Waypoint(timestamp=act.StartTime + timedelta(seconds=14))] self.assertEqual(act.GetTimerTime(), timedelta(seconds=14)) # multiple pauses + ending after pause act.EndTime = act.StartTime + timedelta(seconds=20) act.Waypoints = [Waypoint(timestamp=act.StartTime), Waypoint(timestamp=act.StartTime + timedelta(seconds=2)), Waypoint(timestamp=act.StartTime + timedelta(seconds=6), ptType=WaypointType.Pause), Waypoint(timestamp=act.StartTime + timedelta(seconds=9), ptType=WaypointType.Pause), Waypoint(timestamp=act.StartTime + timedelta(seconds=10), ptType=WaypointType.Resume), Waypoint(timestamp=act.StartTime + timedelta(seconds=12)), Waypoint(timestamp=act.StartTime + timedelta(seconds=16)), Waypoint(timestamp=act.StartTime + timedelta(seconds=17), ptType=WaypointType.Pause), Waypoint(timestamp=act.StartTime + timedelta(seconds=20), ptType=WaypointType.End)] self.assertEqual(act.GetTimerTime(), timedelta(seconds=13)) # implicit pauses (>1m5s) act.EndTime = act.StartTime + timedelta(seconds=20) act.Waypoints = [Waypoint(timestamp=act.StartTime), Waypoint(timestamp=act.StartTime + timedelta(seconds=2)), Waypoint(timestamp=act.StartTime + timedelta(seconds=6)), Waypoint(timestamp=act.StartTime + timedelta(seconds=120)), Waypoint(timestamp=act.StartTime + timedelta(seconds=124)), Waypoint(timestamp=act.StartTime + timedelta(seconds=130))] self.assertEqual(act.GetTimerTime(), timedelta(seconds=16)) # mixed pauses - would this ever happen?? Either way, the explicit pause should override the implicit one and cause otherwise-ignored time to be counted act.EndTime = act.StartTime + timedelta(seconds=23) act.Waypoints = [Waypoint(timestamp=act.StartTime), Waypoint(timestamp=act.StartTime + timedelta(seconds=2)), Waypoint(timestamp=act.StartTime + timedelta(seconds=6)), Waypoint(timestamp=act.StartTime + timedelta(seconds=20), ptType=WaypointType.Pause), Waypoint(timestamp=act.StartTime + timedelta(seconds=24), ptType=WaypointType.Resume), Waypoint(timestamp=act.StartTime + timedelta(seconds=30))] self.assertEqual(act.GetTimerTime(), timedelta(seconds=26)) def test_activity_specificity_resolution(self): # Mountain biking is more specific than just cycling self.assertEqual(ActivityType.PickMostSpecific([ActivityType.Cycling, ActivityType.MountainBiking]), ActivityType.MountainBiking) # But not once we mix in an unrelated activity - pick the first self.assertEqual(ActivityType.PickMostSpecific([ActivityType.Cycling, ActivityType.MountainBiking, ActivityType.Swimming]), ActivityType.Cycling) # Duplicates self.assertEqual(ActivityType.PickMostSpecific([ActivityType.Cycling, ActivityType.MountainBiking, ActivityType.MountainBiking]), ActivityType.MountainBiking) # One self.assertEqual(ActivityType.PickMostSpecific([ActivityType.MountainBiking]), ActivityType.MountainBiking) # With None self.assertEqual(ActivityType.PickMostSpecific([None, ActivityType.MountainBiking]), ActivityType.MountainBiking) # All None self.assertEqual(ActivityType.PickMostSpecific([None, None]), ActivityType.Other) # Never pick 'Other' given a better option self.assertEqual(ActivityType.PickMostSpecific([ActivityType.Other, ActivityType.MountainBiking]), ActivityType.MountainBiking) # Normal w/ Other + None self.assertEqual(ActivityType.PickMostSpecific([ActivityType.Other, ActivityType.Cycling, None, ActivityType.MountainBiking]), ActivityType.MountainBiking)
{ "content_hash": "cf98c8adfbc6695013c8ad9fb8934f38", "timestamp": "", "source": "github", "line_count": 149, "max_line_length": 166, "avg_line_length": 56.84563758389262, "alnum_prop": 0.6665879574970484, "repo_name": "dmschreiber/tapiriik", "id": "b86272b97fd8c3a29bbd0b0dad2c538f4257069f", "size": "8470", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "tapiriik/testing/interchange.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "23679" }, { "name": "HTML", "bytes": "66608" }, { "name": "JavaScript", "bytes": "47797" }, { "name": "Python", "bytes": "589883" }, { "name": "Shell", "bytes": "631" } ], "symlink_target": "" }
from .Portals import Portals
{ "content_hash": "7ba83ad876c1c7ad41372000e303ce6b", "timestamp": "", "source": "github", "line_count": 1, "max_line_length": 28, "avg_line_length": 28, "alnum_prop": 0.8571428571428571, "repo_name": "DavidWhittingham/agsadmin", "id": "7a3d33f019604a9d682ba53c279b506fcb078997", "size": "28", "binary": false, "copies": "1", "ref": "refs/heads/develop", "path": "agsadmin/sharing_admin/portals/__init__.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "PowerShell", "bytes": "442" }, { "name": "Python", "bytes": "173794" } ], "symlink_target": "" }
import re import json import urllib.parse import demistomock as demisto # noqa: F401 import requests from CommonServerPython import * # noqa: F401 # disable insecure warnings requests.packages.urllib3.disable_warnings() entryList = [] deviceList = [] deviceEntry = {} args = demisto.args() if 'hostname' in args: device = args['hostname'] else: device = None if 'ipaddress' in args: ip = args['ipaddress'] else: ip = None if 'package' in args: package = args['package'] else: package = None SEARCH_DEVICE_USING_IP = "(select (*) (from device (where device ( eq ip_addresses (ip_address '{0}')))))".format(ip) SEARCH_DEVICE_USING_DEVICE = "(select (*) (from device (where device ( eq name (string {0})))))".format(device) SEARCH_COMPLIANCE_PACKAGE_DEVICE = """(select ((device (*)) (package (*))) (from (device package) (with package (where package (eq name (pattern '*{0}*'))) (where device (eq name (pattern '{1}'))))) (limit 100))""".format(package, device) TEST_MODULE = "(select (name) (from device ) (limit 1))" def is_valid_hostname(hostname): if len(hostname) > 15: return False allowed = re.compile("(?!-)[A-Z\d-]{1,15}(?<!-)$", re.IGNORECASE) return all(allowed.match(x) for x in hostname) def nexthink_request(method, nxql): params = demisto.params() username = params.get('credentials').get('identifier') password = params.get('credentials').get('password') base_url = params.get('url') port = params.get('port') verify_ssl = not params.get('insecure', False) proxy = params.get('proxy', False) if proxy: proxies = handle_proxy() else: proxies = { "http": None, "https": None, } BASE_URL = 'https://{0}:{1}/2/query?platform=windows&format=json&query='.format(base_url, port) NXQL = urllib.parse.quote(nxql) urlFragment = BASE_URL + NXQL try: if method == 'POST': response = requests.post(urlFragment, auth=(username, password), verify=verify_ssl, proxies=proxies) else: response = requests.get(urlFragment, auth=(username, password), verify=verify_ssl, proxies=proxies) if response.status_code == 200: return json.loads(response.content) else: return str(response.status_code) except requests.Timeout: raise except requests.ConnectionError: raise def nexthink_endpoint_details(device: None, ip: None): if not ip and not device: return_results('Please provide hostname or ipaddress argument') sys.exit(0) elif not device: if re.match(ipv4Regex, ip): data = nexthink_request('GET', SEARCH_DEVICE_USING_IP) else: return_results('Please enter valid ip address. (e.g. 192.168.1.100)') sys.exit(0) else: if is_valid_hostname(device): data = nexthink_request('GET', SEARCH_DEVICE_USING_DEVICE) else: return_results('Please enter valid hostname. (e.g. AMCE1234)') sys.exit(0) if len(data) > 0: deviceEntry['EndpointName'] = data[0]['name'] deviceEntry['LastLoggedOnUser'] = data[0]['last_logged_on_user'] deviceEntry['IPAddress'] = data[0]['ip_addresses'][0] deviceEntry['MACAddress'] = data[0]['mac_addresses'][0] deviceList.append(deviceEntry) dArgs = CommandResults( outputs_prefix="Nexthink.Endpoint", outputs_key_field="IPAddress", outputs=deviceList, readable_output=tableToMarkdown('Nexthink Endpoint Details: ', deviceList), raw_response=deviceList ) return dArgs else: if not device: return 'No endpoint found with ip "{0}"'.format(ip) else: return 'No endpoint found with hostname "{0}"'.format(device) def nexthink_installed_packages(device: None, package: None): data = nexthink_request('GET', SEARCH_COMPLIANCE_PACKAGE_DEVICE) if len(data) > 0: for t in data: entries = {} entries['PackageName'] = t['package/name'] entries['PackagePublisher'] = t['package/publisher'] entries['PackageVersion'] = t['package/version'] entryList.append(entries) deviceEntry['DeviceName'] = data[0]['device/name'] deviceEntry['LastLogged On User'] = data[0]['device/last_logged_on_user'] deviceEntry['IPAddress'] = data[0]['device/ip_addresses'][0] deviceEntry['MACAddress'] = data[0]['device/mac_addresses'][0] deviceList.append(deviceEntry) hr = tableToMarkdown('Installed Packages: ', deviceList) + tableToMarkdown('Packages Details: ', entryList) dArgs = CommandResults( outputs_prefix="Nexthink.Package", outputs_key_field="IPAddress", outputs=deviceList, readable_output=hr, raw_response=deviceList ) return dArgs else: return 'No package "{0}" found on endpoint {1}'.format(package, device) def nexthink_compliance_check(device: None, ip: None): if not device and not ip: return_results('Please provide hostname or ipaddress argument') sys.exit(0) elif not device: if re.match(ipv4Regex, ip): data = nexthink_request('GET', SEARCH_DEVICE_USING_IP) else: return_results('Please enter valid ip address. (e.g. 192.168.1.100)') sys.exit(0) else: if is_valid_hostname(device): data = nexthink_request('GET', SEARCH_DEVICE_USING_DEVICE) else: return_results('Please enter valid endpoint hostname. (e.g. AMCE1234)') if len(data) > 0: for t in data: entries = {} entries['DeviceAntivirus'] = t['antivirus_name'] entries['DeviceAntivirus RTP'] = t['antivirus_rtp'] entries['DeviceAntivirus Updated'] = t['antivirus_up_to_date'] entries['DeviceAntispyware'] = t['antispyware_name'] entries['DeviceAntispyware RTP'] = t['antispyware_rtp'] entries['DeviceAntispyware Updated'] = t['antispyware_up_to_date'] entryList.append(entries) deviceEntry['DeviceName'] = data[0]['name'] deviceEntry['LastLoggedOnUser'] = data[0]['last_logged_on_user'] deviceEntry['IPAddress'] = data[0]['ip_addresses'][0] deviceEntry['MACAddress'] = data[0]['mac_addresses'][0] deviceList.append(deviceEntry) hr = tableToMarkdown('Endpoint Details :', deviceList) + tableToMarkdown('Compliance Details: ', entryList) dArgs = CommandResults( outputs_prefix="Nexthink.Compliance", outputs_key_field="IPAddress", outputs=deviceList, readable_output=hr, raw_response=deviceList ) return dArgs else: if not device: return 'No endpoint found with ip "{0}"'.format(ip) else: return 'No endpoint found with hostname "{0}"'.format(device) def main(): if demisto.command() == 'test-module': data = nexthink_request('GET', TEST_MODULE) if data: return_results("ok") else: return_results(data) elif demisto.command() == 'nt-endpoint-details': data = nexthink_endpoint_details(device, ip) return_results(data) elif demisto.command() == 'nt-compliance-check': data = nexthink_compliance_check(device, ip) return_results(data) elif demisto.command() == 'nt-installed-packages': data = nexthink_installed_packages(device, package) return_results(data) if __name__ in ['__main__', 'builtin', 'builtins']: main()
{ "content_hash": "fbf412bde40baa4e353476eabb493ff0", "timestamp": "", "source": "github", "line_count": 226, "max_line_length": 117, "avg_line_length": 34.47787610619469, "alnum_prop": 0.6083162217659137, "repo_name": "demisto/content", "id": "495ab73ad05a8d192688378827dd7d4456ecb4f0", "size": "7792", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "Packs/Nexthink/Integrations/Nexthink/Nexthink.py", "mode": "33188", "license": "mit", "language": [ { "name": "Dockerfile", "bytes": "2146" }, { "name": "HTML", "bytes": "205901" }, { "name": "JavaScript", "bytes": "1584075" }, { "name": "PowerShell", "bytes": "442288" }, { "name": "Python", "bytes": "47881712" }, { "name": "Rich Text Format", "bytes": "480911" }, { "name": "Shell", "bytes": "108066" }, { "name": "YARA", "bytes": "1185" } ], "symlink_target": "" }
__author__ = 'amryf' #!/usr/bin/env python # -*- coding: utf-8 -*- from ctypes import * import time from .ic_grabber_dll import IC_GrabberDLL from .ic_exception import IC_Exception from .ic_property import IC_Property from . import ic_structures as structs from IPython import embed GrabberHandlePtr = POINTER(structs.GrabberHandle) # "typedefs" IMG_FILETYPE = ['FILETYPE_BMP', 'FILETYPE_JPG'] COLOR_FORMAT = ['Y800', 'RGB24', 'RGB32', 'UYVY', 'Y16', 'NONE'] # c function type for frame callback # outside of class so it can be called by unbound function C_FRAME_READY_CALLBACK = CFUNCTYPE(None, GrabberHandlePtr, POINTER(c_ubyte), c_ulong, c_void_p) class IC_Camera(object): @property def callback_registered(self): return self._callback_registered def __init__(self, setting_file=None, show_dialog=False): self._handle = IC_GrabberDLL.create_grabber() if show_dialog: self._handle = IC_GrabberDLL.device_selection_dialog(self._handle) if setting_file: self._handle = IC_GrabberDLL.load_device_state(self._handle, c_char_p(setting_file.encode())) if not self._handle: raise IC_Exception(-1) self._callback_registered = False self._frame = {'num' : -1, 'ready' : False} def __getattr__(self, attr): if attr in IC_Property.get_all_property_names(): return IC_Property(self._handle, attr) else: raise AttributeError # not needed if we use props directly #def __setattr__(self, attr, val): # # if attr.startswith('_'): # super(IC_Camera, self).__setattr__(attr, val) # # # if it's an actual device property # elif attr in self.get_all_property_names(): # IC_Property(self._handle, attr).value = val # # # otherwise just set the attribute value as normal # else: # super(IC_Camera, self).__setattr__(attr, val) def save_device_state(self, filename): """ Save camera setting to filename :param filename string -- file name of the setting """ err = IC_GrabberDLL.save_device_state(self._handle, c_char_p(filename.encode())) if err != 1: raise Exception(err) def open(self): """ Open the camera device, required for most functions. """ err = IC_GrabberDLL.open_device_by_unique_name(self._handle, self._unique_device_name) if err != 1: raise IC_Exception(err) def close(self): """ Close the camera device. """ IC_GrabberDLL.close_device(self._handle) ## don't use, returns wrong number..? #def get_serial_number(self): # # #serial = create_string_buffer(20) # serial = (c_char * 20)() # # IC_GrabberDLL.get_serial_number(self._handle, # serial) # # return serial.value def is_open(self): """ Check if the camera device is currently open. :returns: boolean -- True if camera is open. """ return bool(IC_GrabberDLL.is_dev_valid(self._handle)) def show_property_dialog(self): """ Show property dialog for device. """ err = IC_GrabberDLL.show_property_dialog(self._handle) if err != 1: raise IC_Exception(err) def list_property_names(self): return IC_Property.get_all_property_names() # use props instead, e.g. cam.gain.range #def get_property_range(self, property_name): # return IC_Property(self._handle, property_name).range # #def is_property_available(self, property_name): # return IC_Property(self._handle, property_name).is_available # #def is_property_auto_available(self, property_name): # return IC_Property(self._handle, property_name).is_auto_available # #def get_property_type(self, property_name): # return IC_Property(self._handle, property_name).type def reset_properties(self): """ Resets all properties to their default values. If a property has automation, the automatic will be enabled. If the device supports external trigger, the external trigger will be disabled. """ return IC_GrabberDLL.reset_properties(self._handle) def list_video_formats(self): """ :returns: list -- available video formats. """ vf_list = ((c_char * 80) * 40)() num_vfs = IC_GrabberDLL.list_video_formats(self._handle, byref(vf_list), c_int(80)) if num_vfs < 0: raise IC_Exception(num_vfs) return_list = [] for vf in vf_list: if vf.value: return_list.append(vf.value) return return_list def get_video_norm_count(self): """ Get the number of the available video norm formats for the current device. A video capture device must have been opened before this call. :returns: int -- number of available video norms. """ vn_count = IC_GrabberDLL.get_video_norm_count(self._handle) if vn_count < 0: raise IC_Exception(vn_count) return vn_count def get_video_norm(self, norm_index): """ Get a string representation of the video norm specified by norm_index. norm_index must be between 0 and get_video_norm_count(). :returns: string -- name of video norm of specified index. """ # DLL says need to call this first for it to work num_vns = self.get_video_norm_count() if norm_index >= num_vns: raise IC_Exception(-102) vn = IC_GrabberDLL.get_video_norm(self._handle, c_int(norm_index)) if vn is None: raise IC_Exception(-104) return vn def get_video_format_count(self): """ Get the number of the available video formats for the current device. A video capture device must have been opened before this call. :returns: int -- number of available video formats. """ vf_count = IC_GrabberDLL.get_video_format_count(self._handle) if vf_count < 0: raise IC_Exception(vf_count) return vf_count def get_video_format(self, format_index): """ Get a string representation of the video format specified by format_index. format_index must be between 0 and get_video_format_count(). """ # DLL says need to call this first for it to work num_vfs = self.get_video_format_count() if format_index >= num_vfs: raise IC_Exception(-103) vf = IC_GrabberDLL.get_video_format(self._handle, c_int(format_index)) if vf is None: raise IC_Exception(-105) return vf def set_video_format(self, video_format): """ Set a video format for the device. Must be supported. :param video_format: string -- video format to use. """ err = IC_GrabberDLL.set_video_format(self._handle, c_char_p(video_format.encode())) if err != 1: raise IC_Exception(err) def set_video_norm(self, video_norm): """ Sets video norm format, whatver that means. :param video_norm: string -- video norm to use. """ err = IC_GrabberDLL.set_video_norm(self._handle, c_char_p(video_norm.encode())) if err != 1: raise IC_Exception(err) def get_video_format_width(self): """ """ return IC_GrabberDLL.get_video_format_width(self._handle) def get_video_format_height(self): """ """ return IC_GrabberDLL.get_video_format_height(self._handle) def get_format(self): """ """ return IC_GrabberDLL.get_format(self._handle) def set_format(self, color_format): """ """ err = IC_GrabberDLL.set_format(self._handle, c_int(color_format)) if err != 1: raise IC_Exception(err) def remove_overlay(self, enable_overlay=False): """ """ err = IC_GrabberDLL.remove_overlay(self._handle, c_int(enable_overlay)) if err != None: raise IC_Exception(err) def is_triggerable(self): """ """ return bool(IC_GrabberDLL.is_trigger_available(self._handle)) def get_frame_rate(self): """ """ return IC_GrabberDLL.get_frame_rate(self._handle) def set_frame_rate(self, frame_rate): """ """ err = IC_GrabberDLL.set_frame_rate(self._handle, c_float(frame_rate)) if err != 1: raise IC_Exception(err) def enable_trigger(self, enable): """ Enable or disable camera triggering. :param enable: boolean -- True to enable the trigger, False to disable. """ err = IC_GrabberDLL.enable_trigger(self._handle, c_int(int(enable))) if err != 1: #raise IC_Exception(err) pass # todo, always raises false error for some reason...? def enable_continuous_mode(self, enable): """ Enable or disable continuous mode. :param enable: boolean -- True to enable continuous mode, False to disable. """ actual = not enable #print actual, enable, c_int(int(actual)) err = IC_GrabberDLL.set_continuous_mode(self._handle, c_int(int(actual))) if err != 1: #raise IC_Exception(err) pass # todo, always raises false error for some reason...? def send_trigger(self): """ Send a software trigger to fire the device when in triggered mode. """ err = IC_GrabberDLL.software_trigger(self._handle) if err != 1: raise IC_Exception(err) def prepare_live(self, show_display=False): """ Prepare the device for live video. """ err = IC_GrabberDLL.prepare_live(self._handle, c_int(int(show_display))) if err != 1: raise IC_Exception(err) def start_live(self, show_display=False): """ Start the live video. """ err = IC_GrabberDLL.start_live(self._handle, c_int(int(show_display))) if err != 1: raise IC_Exception(-1) def suspend_live(self): """ Suspend the live video and put into a prepared state. """ err = IC_GrabberDLL.suspend_live(self._handle) if err != 1: raise IC_Exception(err) def stop_live(self): """ Stop the live video. """ IC_GrabberDLL.stop_live(self._handle) def get_image_description(self): """ Get image info. :returns: tuple -- (image width, image height, image depth, color format). """ img_width = c_long() img_height = c_long() img_depth = c_int() color_format = c_int() err = IC_GrabberDLL.get_image_description(self._handle, byref(img_width), byref(img_height), byref(img_depth), byref(color_format), ) return (img_width.value, img_height.value, img_depth.value, color_format.value) def snap_image(self, timeout=1000): """ Snap an image. Device must be set to live mode and a format must be set. :param timeout: int -- time out in milliseconds. """ err = IC_GrabberDLL.snap_image(self._handle, c_int(timeout)) if err != 1: raise IC_Exception(err) def get_image_ptr(self): """ Get image buffer from camera. :returns: ctypes pointer -- pointer to image data. """ img_ptr = IC_GrabberDLL.get_image_ptr(self._handle) if img_ptr is None: raise IC_Exception(-1) #img_data = cast(img_ptr, POINTER(c_ubyte * buffer_size)) ####array = (c_ubyte * iheight * iwidth * 3).from_address(addressof(data.contents)) #array = img_data.contents return img_ptr def get_image_data(self): """ Get image data. :returns: ctypes.c_ubyte array -- the image data. """ image_size = self.get_image_description()[:3] img_width = image_size[0] img_height = image_size[1] img_depth = image_size[2] / 8 buffer_size = img_width * img_height * img_depth * sizeof(c_uint8) img_ptr = self.get_image_ptr() data = cast(img_ptr, POINTER(c_ubyte * buffer_size)) return (data.contents, img_width, img_height, img_depth) #img = np.ndarray(buffer = data.contents, # dtype = np.uint8, # shape = (img_height, # img_width, # img_depth)) #return img def save_image(self, filename, filetype=1, jpeq_quality=75): """ Save the contents of the last snapped image into a file. :param filename: string -- filename to name saved file. :param filetype: int -- 0 = BMP, 1 = JPEG. :param jpeq_quality: int -- JPEG file quality, 0-100. """ err = IC_GrabberDLL.save_image(self._handle, c_char_p(filename.encode()), c_int(filetype), c_long(jpeq_quality)) if err != 1: raise IC_Exception(err) # generate callback function so it is not a bound method # (cb_func cannot have the self parameter) def _get_callback_func(self): def cb_func(handle_ptr, p_data, frame_num, data): self._frame['ready'] = True self._frame['num'] = frame_num return C_FRAME_READY_CALLBACK(cb_func) def register_frame_ready_callback(self): """ Register the frame ready callback with the device. """ # keep ref to prevent garbage collection self._rfrc_func = self._get_callback_func() # register callback function with DLL # instead of passing pointer to a variable (3rd param) we will set the flag ourselves IC_GrabberDLL.set_frame_ready_callback(self._handle, self._rfrc_func, None) self._callback_registered = True def reset_frame_ready(self): """ Reset the frame ready flag to False, generally so that wait_til_frame_ready() can be called again. """ self._frame['ready'] = False self._frame['num'] = -1 def wait_til_frame_ready(self, timeout=0): """ Wait until the devices announces a frame as being ready. Requires register_frame_ready_callback() being called. :param timeout: int -- timeout in milliseconds. Set to 0 for no timeout. :returns: int -- frame number that was announced as ready. """ if timeout: start = time.clock() elapsed = (time.clock() - start) * 1000 while not self._frame['ready'] and elapsed < timeout: time.sleep(0.001) elapsed = (time.clock() - start) * 1000 else: while not self._frame['ready']: time.sleep(0.001) return self._frame['num']
{ "content_hash": "5a50c32c0ccafee4f6acbbc99a8486e6", "timestamp": "", "source": "github", "line_count": 485, "max_line_length": 105, "avg_line_length": 32.705154639175255, "alnum_prop": 0.5535871895095196, "repo_name": "amryfitra/icpy3", "id": "50e7a004e9c1431706ed78ce6a9adb63171ab4c8", "size": "15862", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "icpy3/ic_camera.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "136472" } ], "symlink_target": "" }
""" WSGI config for agilepkg project. It exposes the WSGI callable as a module-level variable named ``application``. For more information on this file, see https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/ """ import os from django.core.wsgi import get_wsgi_application os.environ.setdefault("DJANGO_SETTINGS_MODULE", "agilepkg.settings") application = get_wsgi_application()
{ "content_hash": "c306cfc3514bedd59b246efd575b8139", "timestamp": "", "source": "github", "line_count": 16, "max_line_length": 78, "avg_line_length": 24.5625, "alnum_prop": 0.7709923664122137, "repo_name": "lerina/agileAuth", "id": "d0f1f4fafe100afaaafd0baf891a7afcde8b3105", "size": "393", "binary": false, "copies": "5", "ref": "refs/heads/master", "path": "agilepkg/wsgi.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "4304" }, { "name": "HTML", "bytes": "13132" }, { "name": "Python", "bytes": "6458" } ], "symlink_target": "" }
from __future__ import unicode_literals from django.core.exceptions import PermissionDenied from django.http import Http404 from django.test import TestCase from django.template import TemplateDoesNotExist, Template import django.template.loader from rest_framework import status from rest_framework.compat import patterns, url from rest_framework.decorators import api_view, renderer_classes from rest_framework.renderers import TemplateHTMLRenderer from rest_framework.response import Response from rest_framework.compat import six @api_view(('GET',)) @renderer_classes((TemplateHTMLRenderer,)) def example(request): """ A view that can returns an HTML representation. """ data = {'object': 'foobar'} return Response(data, template_name='app_scaffolding.html') @api_view(('GET',)) @renderer_classes((TemplateHTMLRenderer,)) def permission_denied(request): raise PermissionDenied() @api_view(('GET',)) @renderer_classes((TemplateHTMLRenderer,)) def not_found(request): raise Http404() urlpatterns = patterns('', url(r'^$', example), url(r'^permission_denied$', permission_denied), url(r'^not_found$', not_found), ) class TemplateHTMLRendererTests(TestCase): urls = 'rest_framework.tests.test_htmlrenderer' def setUp(self): """ Monkeypatch get_template """ self.get_template = django.template.loader.get_template def get_template(template_name, dirs=None): if template_name == 'app_scaffolding.html': return Template("app_scaffolding: {{ object }}") raise TemplateDoesNotExist(template_name) django.template.loader.get_template = get_template def tearDown(self): """ Revert monkeypatching """ django.template.loader.get_template = self.get_template def test_simple_html_view(self): response = self.client.get('/') self.assertContains(response, "app_scaffolding: foobar") self.assertEqual(response['Content-Type'], 'text/html; charset=utf-8') def test_not_found_html_view(self): response = self.client.get('/not_found') self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) self.assertEqual(response.content, six.b("404 Not Found")) self.assertEqual(response['Content-Type'], 'text/html; charset=utf-8') def test_permission_denied_html_view(self): response = self.client.get('/permission_denied') self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) self.assertEqual(response.content, six.b("403 Forbidden")) self.assertEqual(response['Content-Type'], 'text/html; charset=utf-8') class TemplateHTMLRendererExceptionTests(TestCase): urls = 'rest_framework.tests.test_htmlrenderer' def setUp(self): """ Monkeypatch get_template """ self.get_template = django.template.loader.get_template def get_template(template_name): if template_name == '404.html': return Template("404: {{ detail }}") if template_name == '403.html': return Template("403: {{ detail }}") raise TemplateDoesNotExist(template_name) django.template.loader.get_template = get_template def tearDown(self): """ Revert monkeypatching """ django.template.loader.get_template = self.get_template def test_not_found_html_view_with_template(self): response = self.client.get('/not_found') self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) self.assertTrue(response.content in ( six.b("404: Not found"), six.b("404 Not Found"))) self.assertEqual(response['Content-Type'], 'text/html; charset=utf-8') def test_permission_denied_html_view_with_template(self): response = self.client.get('/permission_denied') self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) self.assertTrue(response.content in ( six.b("403: Permission denied"), six.b("403 Forbidden"))) self.assertEqual(response['Content-Type'], 'text/html; charset=utf-8')
{ "content_hash": "24a50e22e617e301cb2bf56a5ae11378", "timestamp": "", "source": "github", "line_count": 120, "max_line_length": 78, "avg_line_length": 34.90833333333333, "alnum_prop": 0.6700883265695871, "repo_name": "MobileWebApps/backend-python-rest-gae", "id": "95ca42341e00a8f12546ff54ee8e0f54c4ddc4a3", "size": "4189", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "lib/rest_framework/tests/test_htmlrenderer.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "CSS", "bytes": "1860" }, { "name": "JavaScript", "bytes": "1580" }, { "name": "Python", "bytes": "3245769" }, { "name": "Shell", "bytes": "868" } ], "symlink_target": "" }
import android_commands import constants import logging import os import run_tests_helper import subprocess import time class FakeDns(object): """Wrapper class for the fake_dns tool.""" _FAKE_DNS_PATH = constants.TEST_EXECUTABLE_DIR + '/fake_dns' def __init__(self, adb): """ Args: adb: the AndroidCommands to use. """ self._adb = adb self._fake_dns = None self._original_dns = None def _PushAndStartFakeDns(self): """Starts the fake_dns server that replies all name queries 127.0.0.1. Returns: subprocess instance connected to the fake_dns process on the device. """ self._adb.PushIfNeeded( os.path.join(run_tests_helper.CHROME_DIR, 'out', 'Release', 'fake_dns'), FakeDns._FAKE_DNS_PATH) return subprocess.Popen( ['adb', '-s', self._adb._adb.GetSerialNumber(), 'shell', '%s -D' % FakeDns._FAKE_DNS_PATH]) def SetUp(self): """Configures the system to point to a DNS server that replies 127.0.0.1. This can be used in combination with the forwarder to forward all web traffic to a replay server. The TearDown() method will perform all cleanup. """ self._adb.RunShellCommand('ip route add 8.8.8.0/24 via 127.0.0.1 dev lo') self._fake_dns = self._PushAndStartFakeDns() self._original_dns = self._adb.RunShellCommand('getprop net.dns1')[0] self._adb.RunShellCommand('setprop net.dns1 127.0.0.1') time.sleep(2) # Time for server to start and the setprop to take effect. def TearDown(self): """Shuts down the fake_dns.""" if self._fake_dns: if not self._original_dns or self._original_dns == '127.0.0.1': logging.warning('Bad original DNS, falling back to Google DNS.') self._original_dns = '8.8.8.8' self._adb.RunShellCommand('setprop net.dns1 %s' % self._original_dns) self._fake_dns.kill() self._adb.RunShellCommand('ip route del 8.8.8.0/24 via 127.0.0.1 dev lo')
{ "content_hash": "baaadc3c237c823b6d040b2656b5264f", "timestamp": "", "source": "github", "line_count": 58, "max_line_length": 80, "avg_line_length": 33.86206896551724, "alnum_prop": 0.6537678207739308, "repo_name": "paul99/clank", "id": "cc7cb5af4e8f9c0d31eed2f1c1b6a40dbc800de9", "size": "2149", "binary": false, "copies": "1", "ref": "refs/heads/chrome-18.0.1025.469", "path": "build/android/fake_dns.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Assembly", "bytes": "56689" }, { "name": "C", "bytes": "8707669" }, { "name": "C++", "bytes": "89569069" }, { "name": "Go", "bytes": "10440" }, { "name": "Java", "bytes": "1201391" }, { "name": "JavaScript", "bytes": "5587454" }, { "name": "Lua", "bytes": "13641" }, { "name": "Objective-C", "bytes": "4568468" }, { "name": "PHP", "bytes": "11278" }, { "name": "Perl", "bytes": "51521" }, { "name": "Python", "bytes": "2615443" }, { "name": "R", "bytes": "262" }, { "name": "Ruby", "bytes": "107" }, { "name": "Scheme", "bytes": "10604" }, { "name": "Shell", "bytes": "588836" } ], "symlink_target": "" }
import docker import docker.errors try: from kazoo.client import KazooClient except ImportError: print("Kazoo library not found, Zookeeper connect URLs will not work") KazooClient = None class SwarmClient: def __init__(self, swarm_url): assert isinstance(swarm_url, str) self.cli = None self._connect(swarm_url) assert self.cli is not None def _connect(self, swarm_url): if swarm_url.startswith('tcp://'): self._connect_tcp(swarm_url) elif swarm_url.startswith('zk://'): self._connect_zookeeper(swarm_url) else: print("Unknown connection URL schema") return def _connect_zookeeper(self, zk_url): """ Find the active Swarm master using Zookeeper """ aux = zk_url[len('zk://'):] zk_hosts = aux.split('/')[0] path = aux[aux.find('/'):] + '/docker/swarm/leader' zk = KazooClient(hosts=zk_hosts) zk.start() master, stat = zk.get(path) zk.stop() self._connect_tcp('tcp://' + master.decode('ascii')) def _connect_tcp(self, tcp_url): self.cli = docker.Client(base_url=tcp_url) print("Connected to Swarm at {}".format(tcp_url)) def manage_event(self, callback): for event in self.cli.events(decode=True): if not callback(event): break def inspect_container(self, docker_id) -> dict: try: docker_info = self.cli.inspect_container(container=docker_id) except docker.errors.APIError: return None info = { "ip_address": docker_info["NetworkSettings"]["IPAddress"], "docker_id": docker_id, "hostname": docker_info["Config"]["Hostname"] } if docker_info["State"]["Running"]: info["state"] = "running" info["running"] = True elif docker_info["State"]["Paused"]: info["state"] = "paused" info["running"] = True elif docker_info["State"]["Restarting"]: info["state"] = "restarting" info["running"] = True elif docker_info["State"]["OOMKilled"]: info["state"] = "killed" info["running"] = False elif docker_info["State"]["Dead"]: info["state"] = "killed" info["running"] = False else: info["state"] = "unknown" info["running"] = False return info
{ "content_hash": "e6154d116e5c25c9c4fcbcd05995d8cf", "timestamp": "", "source": "github", "line_count": 77, "max_line_length": 74, "avg_line_length": 32.57142857142857, "alnum_prop": 0.5450558213716108, "repo_name": "DistributedSystemsGroup/swarm-dns", "id": "a521f8b5c5a004613ffe8f519b7574f829db6e3d", "size": "2508", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "swarm.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Python", "bytes": "11055" }, { "name": "Shell", "bytes": "511" } ], "symlink_target": "" }
""" Utilities to assist with orchestrating and testing an onprem deployment """ import copy import itertools import logging from typing import List import requests from retrying import retry from ssh.ssher import Ssher from test_util.helpers import ApiClientSession, Host, Url log = logging.getLogger(__name__) def log_and_raise_if_not_ok(response: requests.Response): if not response.ok: log.error(response.content.decode()) response.raise_for_status() class OnpremCluster: def __init__( self, ssher: Ssher, masters: List[Host], private_agents: List[Host], public_agents: List[Host], bootstrap_host: Host): """ An abstration for an arbitrary group of servers to be used as bootstrapping node and deployment nodes for DC/OS Args: ssher: Ssher object for accessing any given node in the cluster masters: list of Hosts tuples to be used as masters private_agents: list of Host tuples to be used as private agents public_agents: list of Host tuples to be used as public agents bootstrap_host: Host tuple for the bootstrap host I.E. has installer downloaded to it and perhaps hosts a bootstrap ZooKeeper """ self.ssher = ssher self.masters = masters self.private_agents = private_agents self.public_agents = public_agents self.bootstrap_host = bootstrap_host assert all(h.private_ip for h in self.hosts), ( 'All cluster hosts require a private IP. hosts: {}'.format(repr(self.hosts)) ) def get_master_ips(self): return copy.copy(self.masters) def get_private_agent_ips(self): return copy.copy(self.private_agents) def get_public_agent_ips(self): return copy.copy(self.public_agents) @classmethod def from_hosts(cls, ssher, hosts, num_masters, num_private_agents, num_public_agents): bootstrap_host, masters, private_agents, public_agents = ( cls.partition_cluster(hosts, num_masters, num_private_agents, num_public_agents)) return cls( ssher=ssher, masters=masters, private_agents=private_agents, public_agents=public_agents, bootstrap_host=bootstrap_host, ) @property def hosts(self): return self.masters + self.private_agents + self.public_agents + ( [self.bootstrap_host] if self.bootstrap_host else [] ) @staticmethod def partition_cluster( hosts: List[Host], num_masters: int, num_agents: int, num_public_agents: int): """Return (bootstrap, masters, agents, public_agents) from hosts.""" hosts_iter = iter(sorted(hosts)) return ( next(hosts_iter), list(itertools.islice(hosts_iter, num_masters)), list(itertools.islice(hosts_iter, num_agents)), list(itertools.islice(hosts_iter, num_public_agents)), ) def start_bootstrap_zk(self): zk_host = self.bootstrap_host.private_ip + ':2181' self.ssher.command( self.bootsrap_host.private_ip, ['docker', 'run', '--name', 'dcos-bootstrap-zk', '--detach=true', '--publish=2181:2181', '--publish=2888:2888', '--publish=3888:3888', 'jplock/zookeeper']) return zk_host def setup_installer_server(self, installer_url: str, offline_mode: bool): log.info('Setting up installer on bootstrap host') return DcosInstallerApiSession.api_session_from_host( self.ssher, self.bootstrap_host.public_ip, installer_url, offline_mode) @retry(wait_fixed=3000, stop_max_delay=300 * 1000) def _download_dcos_installer(ssher, host, installer_path, download_url): """Response status 403 is fatal for curl's retry. Additionally, S3 buckets have been returning 403 for valid uploads for 10-15 minutes after CI finished build Therefore, give a five minute buffer to help stabilize CI """ log.info('Attempting to download installer from: ' + download_url) try: ssher.command(host, ['curl', '-fLsSv', '--retry', '20', '-Y', '100000', '-y', '60', '--create-dirs', '-o', installer_path, download_url]) except: log.exception('Download failed!') raise class DcosInstallerApiSession(ApiClientSession): @classmethod def api_session_from_host( cls, ssher: Ssher, host: str, installer_url: str, offline_mode: bool, port: int=9000): """ Will download and start a DC/OS onprem installer and return a DcosInstallerApiSession to interact with it Args: ssher: Ssher object to access the server hosting the installer host: IP address of the target host server installer_url: URL to pull the installer from relative to the host offline_mode: if True, installer will start with the --offline-mode option which disables installing pre-requisites from the internet port: the installer can run on an arbitrary port but defaults to 9000 """ ssher.command(host, ['sudo', 'usermod', '-aG', 'docker', ssher.user]) host_home = ssher.get_home_dir(host) installer_path = host_home + '/dcos_generate_config.sh' _download_dcos_installer(ssher, host, installer_path, installer_url) log.info('Starting installer server at: {}:{}'.format(host, port)) cmd = ['DCOS_INSTALLER_DAEMONIZE=true', 'bash', installer_path, '--web', '-p', str(port)] if offline_mode: cmd.append('--offline') ssher.command(host, cmd) api = cls(Url('http', host, '', '', '', port)) @retry(wait_fixed=1000, stop_max_delay=60000) def wait_for_up(): log.debug('Waiting for installer server...') api.get('/').raise_for_status() wait_for_up() log.info('Installer server is up and running!') return api def genconf(self, config): log.info('Generating configuration on installer server...') response = self.post('/api/v1/configure', json=config) log_and_raise_if_not_ok(response) response_json = response.json() if 'error' in response_json: # genconf was unsuccessful raise Exception('Error generating configuration: {}'.format(response_json['error'])) return response_json def preflight(self) -> None: log.info('Starting preflight...') self.do_and_check('preflight') def deploy(self) -> None: log.info('Starting deploy...') self.do_and_check('deploy') def postflight(self) -> None: log.info('Starting postflight...') self.do_and_check('postflight') def do_and_check(self, action: str) -> None: """Args: action (str): one of 'preflight', 'deploy', 'postflight' """ self.start_action(action) self.wait_for_check_action(action) def wait_for_check_action(self, action: str) -> None: """Retries method against API until returned data shows that all hosts have finished. No timeout necessary as the installer sets its own timeout Args: action (str): choies are 'preflight', 'deploy', 'postflight' """ @retry( wait_fixed=10000, retry_on_result=lambda res: res is False, retry_on_exception=lambda ex: False) def wait_for_finish(): output = self.check_action(action) host_data = output['hosts'] finished_run = all(map(lambda host: host['host_status'] not in ['running', 'unstarted'], host_data.values())) if not finished_run: log.info('Processes not yet finished, continuing to wait...') return False return host_data host_data = wait_for_finish() failures = list() for host in host_data.keys(): if host_data[host]['host_status'] != 'success': failures.append(host_data[host]) if len(failures) > 0: raise Exception("Failures detected in {}: {}".format(action, failures)) def start_action(self, action: str) -> None: """Args: action (str): one of 'preflight', 'deploy', 'postflight' """ log_and_raise_if_not_ok(self.post('/api/v1/action/{}'.format(action))) def check_action(self, action: str) -> dict: """Args: action (str): one of 'preflight', 'deploy', 'postflight', 'success' """ log.debug('Checking status of action: {}'.format(action)) r = self.get('/api/v1/action/{}'.format(action)) log_and_raise_if_not_ok(r) return r.json()
{ "content_hash": "9b5c9699016b687b33ce8440fca4d084", "timestamp": "", "source": "github", "line_count": 239, "max_line_length": 102, "avg_line_length": 37.60669456066945, "alnum_prop": 0.6011348464619493, "repo_name": "vishnu2kmohan/dcos", "id": "497a4ff02ca49a2293a97584de6cd9ebf9a5e827", "size": "8988", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "test_util/onprem.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C", "bytes": "2529" }, { "name": "Groovy", "bytes": "715" }, { "name": "HTML", "bytes": "80776" }, { "name": "Lua", "bytes": "178722" }, { "name": "Makefile", "bytes": "179" }, { "name": "Python", "bytes": "1297265" }, { "name": "Shell", "bytes": "68714" } ], "symlink_target": "" }
import json import os import re from os import walk # TODO - handle types used by conf files def get_examples(src_dir, path_index): examples = {} fbp_files = [] for (dirpath, dirnames, filenames) in walk(src_dir): for filename in filenames: if filename.endswith(".fbp"): fbp_files.append(os.path.join(dirpath, filename)) for fbp_file in fbp_files: with open(fbp_file, "r") as f: content = f.read() # skip error tests if "TEST-EXPECTS-ERROR" in content: continue for line in content.splitlines(): nodes = re.findall("[(](?P<name>[^):]+)[):]", line) for node in nodes: fbp_url = fbp_file[path_index:] node_files = examples.setdefault(node, set()) node_files.add(fbp_url) return examples def print_node_type(outfile, node_type, examples, group_id): examples_list = " " examples = examples.get(node_type["name"], set()) for example in sorted(examples): # drop .fbp examples_list = examples_list + example[:-4] + ", " examples_list = examples_list[:-2] outfile.write("""\ %(name)s;%(examples)s """ % { "name": node_type["name"], "examples": examples_list }) def print_description(outfile, description, infile, examples): modules = description.keys() if len(modules) != 1: print("Warning: a single module is expected per file. Skiping %s" % infile.name) return for module in sorted(description.keys()): for node_type in description[module]: print_node_type(outfile, node_type, examples, module) def create_doc(outfile, src_dir, desc_dir): examples = get_examples(src_dir, len(src_dir)) json_files = [] for (dirpath, dirnames, filenames) in walk(desc_dir): for filename in filenames: if filename.endswith(".json"): infile = os.path.join(dirpath, filename) json_files.append(infile) for infile in sorted(json_files): fd = open(infile) description = json.load(fd) print_description(outfile, description, fd, examples) fd.close() if __name__ == "__main__": import argparse parser = argparse.ArgumentParser() parser.add_argument("csv", help="Path to write csv file", type=argparse.FileType('w')) parser.add_argument("src_dir", help="Path to Soletta src/ directory with example files", type=str) parser.add_argument("desc_dir", help="Path to directory with description files in JSON format", type=str) args = parser.parse_args() create_doc(args.csv, args.src_dir, args.desc_dir) args.csv.close()
{ "content_hash": "b77bd13e95a062ebf41619d3be35f3f1", "timestamp": "", "source": "github", "line_count": 91, "max_line_length": 87, "avg_line_length": 31.934065934065934, "alnum_prop": 0.5677907777013076, "repo_name": "bsmelo/soletta", "id": "49501f029b1dfe4c42696b37fe3a98539016735a", "size": "3874", "binary": false, "copies": "9", "ref": "refs/heads/master", "path": "data/scripts/sol-flow-node-type-examples-list.py", "mode": "33261", "license": "apache-2.0", "language": [ { "name": "Awk", "bytes": "3347" }, { "name": "C", "bytes": "5917647" }, { "name": "C++", "bytes": "176079" }, { "name": "CSS", "bytes": "3953" }, { "name": "HTML", "bytes": "1623" }, { "name": "JavaScript", "bytes": "120410" }, { "name": "Makefile", "bytes": "66060" }, { "name": "NSIS", "bytes": "1390" }, { "name": "Objective-C", "bytes": "959" }, { "name": "Python", "bytes": "237605" }, { "name": "Shell", "bytes": "8015" }, { "name": "Smarty", "bytes": "1160" }, { "name": "VimL", "bytes": "748" } ], "symlink_target": "" }
import matplotlib.pyplot as plt def plot(S, limit, plt_num=1): fig=plt.figure(plt_num) rpoints=[cnum.real for cnum in S] cpoints=[cnum.imag for cnum in S] plt.plot(rpoints, cpoints, 'ro') plt.axis([-limit, limit, -limit, limit]) plt.grid() def quniver(start, cvector, limit, plt_num=1): fig=plt.figure(plt_num) plt.quiver(start.real, start.imag, cvector.real, cvector.imag ,angles='xy',scale_units='xy',scale=1) plt.axis([-limit, limit, -limit, limit]) plt.grid() plt.draw() def show(): plt.show()
{ "content_hash": "d0fc86b5097a5262d0214cbf24d42407", "timestamp": "", "source": "github", "line_count": 19, "max_line_length": 104, "avg_line_length": 28.894736842105264, "alnum_prop": 0.644808743169399, "repo_name": "RyodoTanaka/Coding_The_Matrix", "id": "bd683e3fca2c18849055ba47f8e075d484f15b6f", "size": "596", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "python/chap_1/cplotting.py", "mode": "33261", "license": "bsd-3-clause", "language": [ { "name": "Python", "bytes": "172694" }, { "name": "TeX", "bytes": "1541" } ], "symlink_target": "" }
""" Qt utilities. """ import os from qtpy.compat import to_qvariant from qtpy.QtCore import Qt, QLocale, QTranslator, QLibraryInfo, QTimer from qtpy.QtWidgets import QAction, QApplication, QMenu, QToolButton def qapplication(translate=True, test_time=3): """Return QApplication instance Creates it if it doesn't already exist""" app = QApplication.instance() if app is None: app = QApplication(['Conda-Manager']) app.setApplicationName('Conda-Manager') if translate: install_translator(app) test_travis = os.environ.get('TEST_CI', None) if test_travis is not None: timer_shutdown = QTimer(app) timer_shutdown.timeout.connect(app.quit) timer_shutdown.start(test_time*1000) return app QT_TRANSLATOR = None def install_translator(qapp): """Install Qt translator to the QApplication instance""" global QT_TRANSLATOR if QT_TRANSLATOR is None: qt_translator = QTranslator() if qt_translator.load( "qt_"+QLocale.system().name(), QLibraryInfo.location(QLibraryInfo.TranslationsPath)): QT_TRANSLATOR = qt_translator # Keep reference alive if QT_TRANSLATOR is not None: qapp.installTranslator(QT_TRANSLATOR) def action2button(action, autoraise=True, text_beside_icon=False, parent=None): """Create a QToolButton directly from a QAction object""" if parent is None: parent = action.parent() button = QToolButton(parent) button.setDefaultAction(action) button.setAutoRaise(autoraise) if text_beside_icon: button.setToolButtonStyle(Qt.ToolButtonTextBesideIcon) return button def toggle_actions(actions, enable): """Enable/disable actions""" if actions is not None: for action in actions: if action is not None: action.setEnabled(enable) def create_action(parent, text, shortcut=None, icon=None, tip=None, toggled=None, triggered=None, data=None, menurole=None, context=Qt.WindowShortcut): """Create a QAction""" action = QAction(text, parent) if triggered is not None: action.triggered.connect(triggered) if toggled is not None: action.toggled.connect(toggled) action.setCheckable(True) if icon is not None: action.setIcon(icon) if shortcut is not None: action.setShortcut(shortcut) if tip is not None: action.setToolTip(tip) action.setStatusTip(tip) if data is not None: action.setData(to_qvariant(data)) if menurole is not None: action.setMenuRole(menurole) #TODO: Hard-code all shortcuts and choose context=Qt.WidgetShortcut # (this will avoid calling shortcuts from another dockwidget # since the context thing doesn't work quite well with these widgets) action.setShortcutContext(context) return action def add_actions(target, actions, insert_before=None): """Add actions to a menu""" previous_action = None target_actions = list(target.actions()) if target_actions: previous_action = target_actions[-1] if previous_action.isSeparator(): previous_action = None for action in actions: if (action is None) and (previous_action is not None): if insert_before is None: target.addSeparator() else: target.insertSeparator(insert_before) elif isinstance(action, QMenu): if insert_before is None: target.addMenu(action) else: target.insertMenu(insert_before, action) elif isinstance(action, QAction): if insert_before is None: target.addAction(action) else: target.insertAction(insert_before, action) previous_action = action
{ "content_hash": "e72651533be11f03b30fb5d4ffb49b9e", "timestamp": "", "source": "github", "line_count": 119, "max_line_length": 79, "avg_line_length": 33.63865546218487, "alnum_prop": 0.6325256057956533, "repo_name": "spyder-ide/conda-manager", "id": "feddbc305709a7274b329863293ee41623a51556", "size": "4225", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "conda_manager/utils/qthelpers.py", "mode": "33261", "license": "mit", "language": [ { "name": "Python", "bytes": "303016" }, { "name": "Shell", "bytes": "81" } ], "symlink_target": "" }
import os import unittest from conans.test.utils.tools import TestClient from conans.paths import CONANFILE conanfile = """ from conans import ConanFile, tools class AConan(ConanFile): name = "Hello0" version = "0.1" def build(self): self.output.warn("build() IN LOCAL CACHE=> %s" % str(self.in_local_cache)) def package(self): self.output.warn("package() IN LOCAL CACHE=> %s" % str(self.in_local_cache)) """ class InLocalCacheTest(unittest.TestCase): def test_in_local_cache_flag(self): client = TestClient() client.save({CONANFILE: conanfile}) client.run("export . lasote/stable") client.run("install Hello0/0.1@lasote/stable --build missing") self.assertIn("build() IN LOCAL CACHE=> True", client.user_io.out) self.assertIn("package() IN LOCAL CACHE=> True", client.user_io.out) client = TestClient() client.save({CONANFILE: conanfile}) client.run("install .") client.run("build .") self.assertIn("build() IN LOCAL CACHE=> False", client.user_io.out) pack_folder = os.path.join(client.current_folder, "package") os.mkdir(pack_folder) client.current_folder = pack_folder client.run("package .. --build-folder ..") self.assertIn("package() IN LOCAL CACHE=> False", client.user_io.out) # Confirm that we have the flag depending on the recipe too client = TestClient() client.save({CONANFILE: conanfile}) client.run("export . lasote/stable") conanfile_reuse = """ from conans import ConanFile, tools class OtherConan(ConanFile): name = "Hello1" version = "0.1" requires = "Hello0/0.1@lasote/stable" def build(self): pass """ client.save({CONANFILE: conanfile_reuse}, clean_first=True) client.run("install . --build") self.assertIn("build() IN LOCAL CACHE=> True", client.user_io.out) self.assertIn("package() IN LOCAL CACHE=> True", client.user_io.out) client.run("export . lasote/stable") client.run("install Hello1/0.1@lasote/stable --build") self.assertIn("build() IN LOCAL CACHE=> True", client.user_io.out) self.assertIn("package() IN LOCAL CACHE=> True", client.user_io.out)
{ "content_hash": "514661e67e1a5a98e5713f2e0d22b511", "timestamp": "", "source": "github", "line_count": 69, "max_line_length": 84, "avg_line_length": 33.27536231884058, "alnum_prop": 0.6337108013937283, "repo_name": "birsoyo/conan", "id": "e56dcfeff0eca18ef9df79d1a0c3e8ccbc59fb99", "size": "2296", "binary": false, "copies": "3", "ref": "refs/heads/develop", "path": "conans/test/functional/in_local_cache_test.py", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "1100" }, { "name": "Groovy", "bytes": "6251" }, { "name": "Python", "bytes": "3101477" }, { "name": "Shell", "bytes": "1864" } ], "symlink_target": "" }
def main(request, response): response.headers.set("Content-Security-Policy", "default-src 'none'; style-src 'unsafe-inline'") response.headers.set("X-Content-Security-Policy", "default-src 'none'; style-src 'unsafe-inline'") response.headers.set("X-WebKit-CSP", "default-src 'none'; style-src 'unsafe-inline'") return """<!DOCTYPE html> <!-- Copyright (c) 2013 Intel Corporation. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of works must retain the original copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the original copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of Intel Corporation nor the names of its contributors may be used to endorse or promote products derived from this work without specific prior written permission. THIS SOFTWARE IS PROVIDED BY INTEL CORPORATION "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL INTEL CORPORATION BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. Authors: Hao, Yunfei <yunfeix.hao@intel.com> --> <html> <head> <title>CSP Test: csp_default-src_none_font_blocked_int</title> <link rel="author" title="Intel" href="http://www.intel.com"/> <link rel="help" href="http://www.w3.org/TR/2012/CR-CSP-20121115/#default-src"/> <meta name="flags" content=""/> <meta charset="utf-8"/> <style> @font-face { font-family: Canvas; src: url("support/w3c/CanvasTest.ttf"); } #test { font-family: Canvas; } </style> </head> <body> <p>Test passes if the two lines are the same in font</p> <div id="test">1234 ABCD</div> <div>1234 ABCD</div> </body> </html> """
{ "content_hash": "87a3c9a6b1e4e51d44248b2a3fa4c140", "timestamp": "", "source": "github", "line_count": 59, "max_line_length": 102, "avg_line_length": 42.42372881355932, "alnum_prop": 0.7203355972832601, "repo_name": "yugang/crosswalk-test-suite", "id": "2a45ef8585581fd326bf6e63056d41ff4beeb4c2", "size": "2503", "binary": false, "copies": "5", "ref": "refs/heads/master", "path": "webapi/tct-csp-w3c-tests/csp-py/csp_default-src_none_font_blocked_int-manual.py", "mode": "33261", "license": "bsd-3-clause", "language": [ { "name": "C", "bytes": "3495" }, { "name": "CSS", "bytes": "1694855" }, { "name": "Erlang", "bytes": "2850" }, { "name": "Java", "bytes": "155590" }, { "name": "JavaScript", "bytes": "32256550" }, { "name": "PHP", "bytes": "43783" }, { "name": "Perl", "bytes": "1696" }, { "name": "Python", "bytes": "4215706" }, { "name": "Shell", "bytes": "638387" }, { "name": "XSLT", "bytes": "2143471" } ], "symlink_target": "" }
from website import db class Position(db.Model): id = db.Column(db.Integer, primary_key=True) key = db.Column(db.Text) name = db.Column(db.Text)
{ "content_hash": "b0e2d0fc2d5c68f98f1436e3df26479e", "timestamp": "", "source": "github", "line_count": 7, "max_line_length": 48, "avg_line_length": 22.714285714285715, "alnum_prop": 0.6729559748427673, "repo_name": "alexraileanu/worldcup", "id": "7db0fc5ed587c00edddc6a30a1abe6ace48fc15b", "size": "159", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "website/models/position.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "58" }, { "name": "HTML", "bytes": "10255" }, { "name": "JavaScript", "bytes": "1454" }, { "name": "Python", "bytes": "8092" } ], "symlink_target": "" }
""" A minimal array class for debugging distributed AbstractShardedProbabilityFunction operations. The array class can be used instead of jax-arrays to debug operations on the static attributes of AbstractShardedProbabilityFunction for cases where using actual jax-arrays would be infeasible due to memory constraints. """ from typing import Tuple import numpy as np import jax def flatten(list_of_list): return [l for sublist in list_of_list for l in sublist] class DebugArray: """ Helper array class for debugging AbstractShardedProbabilityFunction. """ def __init__(self, array, shape: Tuple[Tuple[int]]): """ Initialize a `DebugArray`. Args: array: A jax array of arbitrary size. The shape of the passed array can be different from `shape`. The array is used only to enable natice jitting support for the class. shape: The hypothetical shape of the array. This should be a tuple[tuple[int]]. Operations on DebugArray change the DebugArray.shape attribute. For example, transposing the array transposes the `shape` in the given way. `DebugArray.reshape` collects combined axes into a single tuple, such that `shape` is a tuple of tuples of length>=1. """ self.shape = tuple(shape) self.array = array def reshape(self, new_shape): _shape = np.cumsum([0] + [int(np.log2(s)) for s in new_shape]) flatshape = flatten(self.shape) shape = [] for n in range(len(_shape) - 1): shape.append(tuple(flatshape[_shape[n] : _shape[n + 1]])) self.shape = tuple(shape) return self def transpose(self, perm): self.shape = tuple([self.shape[p] for p in perm]) return self def moveaxis(self, src, dst): shape = list(self.shape) shape.insert(dst, self.shape[src]) self.shape = tuple(shape) return self def flatten_DebugArray(array): return (array.array,), (array.shape,) def unflatten_DebugArray(static_data, children): return DebugArray(*children, *static_data) jax.tree_util.register_pytree_node( DebugArray, flatten_DebugArray, unflatten_DebugArray )
{ "content_hash": "c28646fc1211d6714ad7fe2e84e5e6dd", "timestamp": "", "source": "github", "line_count": 71, "max_line_length": 82, "avg_line_length": 31.788732394366196, "alnum_prop": 0.6566238369517058, "repo_name": "google/distla_core", "id": "64032cb0e93ac8e98ac6273533821114888552f2", "size": "2941", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "distla/distla_core/asic_tests/sharded_probability_function/debug_array.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Python", "bytes": "1317325" }, { "name": "Shell", "bytes": "5454" } ], "symlink_target": "" }
"""Helpers for testing django apps.""" import os.path try: from PIL import Image except ImportError: try: import Image except ImportError: Image = None from django.conf import settings from django.db import models class StandardModel(models.Model): foo = models.CharField(max_length=20) class NonIntegerPk(models.Model): foo = models.CharField(max_length=20, primary_key=True) bar = models.CharField(max_length=20, blank=True) class MultifieldModel(models.Model): slug = models.SlugField(max_length=20, unique=True) text = models.CharField(max_length=20) class AbstractBase(models.Model): foo = models.CharField(max_length=20) class Meta: abstract = True class ConcreteSon(AbstractBase): pass class AbstractSon(AbstractBase): class Meta: abstract = True class ConcreteGrandSon(AbstractSon): pass class StandardSon(StandardModel): pass class PointedModel(models.Model): foo = models.CharField(max_length=20) class PointerModel(models.Model): bar = models.CharField(max_length=20) pointed = models.OneToOneField( PointedModel, related_name='pointer', null=True, on_delete=models.CASCADE ) class WithDefaultValue(models.Model): foo = models.CharField(max_length=20, default='') WITHFILE_UPLOAD_TO = 'django' WITHFILE_UPLOAD_DIR = os.path.join(settings.MEDIA_ROOT, WITHFILE_UPLOAD_TO) class WithFile(models.Model): afile = models.FileField(upload_to=WITHFILE_UPLOAD_TO) if Image is not None: # PIL is available class WithImage(models.Model): animage = models.ImageField(upload_to=WITHFILE_UPLOAD_TO) size = models.IntegerField(default=0) else: class WithImage(models.Model): pass class WithSignals(models.Model): foo = models.CharField(max_length=20) class CustomManager(models.Manager): def create(self, arg=None, **kwargs): return super(CustomManager, self).create(**kwargs) class WithCustomManager(models.Model): foo = models.CharField(max_length=20) objects = CustomManager() class AbstractWithCustomManager(models.Model): custom_objects = CustomManager() class Meta: abstract = True class FromAbstractWithCustomManager(AbstractWithCustomManager): pass
{ "content_hash": "305dfb869c96659923d50a39707f9820", "timestamp": "", "source": "github", "line_count": 116, "max_line_length": 75, "avg_line_length": 20.00862068965517, "alnum_prop": 0.7035760448082723, "repo_name": "rrauenza/factory_boy", "id": "20cdb8c04e773dfd9e488f8ee1571a8e88f19a7c", "size": "2382", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "tests/djapp/models.py", "mode": "33188", "license": "mit", "language": [ { "name": "Makefile", "bytes": "2321" }, { "name": "Python", "bytes": "309597" } ], "symlink_target": "" }
from .vip_dataset import load_stock_data
{ "content_hash": "e929744be39cfc2d31f487fa864458b7", "timestamp": "", "source": "github", "line_count": 1, "max_line_length": 40, "avg_line_length": 41, "alnum_prop": 0.8048780487804879, "repo_name": "stonewell/learn-curve", "id": "8572ac8f21554559456eb3bcd0c218e6fc6c13ab", "size": "81", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "src/stock_data_provider/cn_a/__init__.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "799918" } ], "symlink_target": "" }
""" WSGI config for {{cookiecutter.project_name}} project. This module contains the WSGI application used by Django's development server and any production WSGI deployments. It should expose a module-level variable named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover this application via the ``WSGI_APPLICATION`` setting. Usually you will have the standard Django WSGI application here, but it also might make sense to replace the whole Django WSGI application with a custom one that later delegates to the Django one. For example, you could introduce WSGI middleware here, or combine a Django application with an application of another framework. """ import os # This application object is used by any WSGI server configured to use this # file. This includes Django's development server, if the WSGI_APPLICATION # setting points here. from configurations.wsgi import get_wsgi_application # noqa # We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks # if running multiple sites in the same mod_wsgi process. To fix this, use # mod_wsgi daemon mode with each site in its own daemon process, or use os.environ.setdefault("DJANGO_SETTINGS_MODULE", "{{cookiecutter.project_name}}.settings") os.environ.setdefault("DJANGO_CONFIGURATION", "Dev") application = get_wsgi_application()
{ "content_hash": "2945c522a5b9b868e716b07ec577d5b7", "timestamp": "", "source": "github", "line_count": 31, "max_line_length": 89, "avg_line_length": 43.193548387096776, "alnum_prop": 0.7871545929798357, "repo_name": "wildfish/wildfish-django-starter", "id": "27619299eb07e3afbc60fe69457bc20b8b6100c9", "size": "1339", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "{{cookiecutter.project_name}}/{{cookiecutter.project_name}}/wsgi.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Dockerfile", "bytes": "466" }, { "name": "HTML", "bytes": "3756" }, { "name": "Python", "bytes": "21545" }, { "name": "Shell", "bytes": "1223" } ], "symlink_target": "" }
from functools import wraps from .. import Filter from webob.exc import HTTPUnauthorized, HTTPForbidden class AuthenticationProvider: def __init__(self, ctx, request): self.request = request self.ctx = ctx @property def principal(self): raise HTTPUnauthorized() def has_permissions(self, permissions=None): if self.principal is None: raise HTTPUnauthorized() if not permissions: return True if set(getattr(self.principal, 'roles', [])).intersection(permissions): return True raise HTTPForbidden() class AuthenticationFilter(Filter): def __init__(self, cls): self.provider_cls = cls def before_request(self, ctx, request): request.security = self.provider_cls(ctx, request) try: request.principal = request.security.principal except HTTPUnauthorized: request.principal = None return request class Require: def __init__(self, permissions=None, request=None): self.request = request self.permissions = permissions def __call__(self, fn): @wraps(fn) def wrap(ctx, request): if not getattr(request, 'security'): raise HTTPUnauthorized() if request.security.has_permissions(self.permissions): return fn(ctx, request) return wrap def __enter__(self): if not getattr(self.request, 'security'): raise HTTPUnauthorized() self.request.security.has_permissions(self.permissions) def __exit__(self, exc_type, exc_val, exc_tb): pass
{ "content_hash": "bc07ed538561e1a836f07240b86e6b0b", "timestamp": "", "source": "github", "line_count": 58, "max_line_length": 79, "avg_line_length": 28.603448275862068, "alnum_prop": 0.6154309825195902, "repo_name": "comynli/m", "id": "10880671b75ea59cefca2000d249fa44e5d5306f", "size": "1659", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "m/security/__init__.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Python", "bytes": "16011" } ], "symlink_target": "" }
""" oVirt dynamic inventory script ================================= Generates dynamic inventory file for oVirt. Script will return following attributes for each virtual machine: - id - name - host - cluster - status - description - fqdn - os_type - template - tags - statistics - devices When run in --list mode, virtual machines are grouped by the following categories: - cluster - tag - status Note: If there is some virtual machine which has has more tags it will be in both tag records. Examples: # Execute update of system on webserver virtual machine: $ ansible -i contrib/inventory/ovirt4.py webserver -m yum -a "name=* state=latest" # Get webserver virtual machine information: $ contrib/inventory/ovirt4.py --host webserver Author: Ondra Machacek (@machacekondra) """ import argparse import os import sys from collections import defaultdict try: import ConfigParser as configparser except ImportError: import configparser try: import json except ImportError: import simplejson as json try: import ovirtsdk4 as sdk import ovirtsdk4.types as otypes except ImportError: print('oVirt inventory script requires ovirt-engine-sdk-python >= 4.0.0') sys.exit(1) def parse_args(): """ Create command line parser for oVirt dynamic inventory script. """ parser = argparse.ArgumentParser( description='Ansible dynamic inventory script for oVirt.', ) parser.add_argument( '--list', action='store_true', default=True, help='Get data of all virtual machines (default: True).', ) parser.add_argument( '--host', help='Get data of virtual machines running on specified host.', ) parser.add_argument( '--pretty', action='store_true', default=False, help='Pretty format (default: False).', ) return parser.parse_args() def create_connection(): """ Create a connection to oVirt engine API. """ # Get the path of the configuration file, by default use # 'ovirt.ini' file in script directory: default_path = os.path.join( os.path.dirname(os.path.realpath(__file__)), 'ovirt.ini', ) config_path = os.environ.get('OVIRT_INI_PATH', default_path) # Create parser and add ovirt section if it doesn't exist: config = configparser.SafeConfigParser( defaults={ 'ovirt_url': os.environ.get('OVIRT_URL'), 'ovirt_username': os.environ.get('OVIRT_USERNAME'), 'ovirt_password': os.environ.get('OVIRT_PASSWORD'), 'ovirt_ca_file': os.environ.get('OVIRT_CAFILE'), } ) if not config.has_section('ovirt'): config.add_section('ovirt') config.read(config_path) # Create a connection with options defined in ini file: return sdk.Connection( url=config.get('ovirt', 'ovirt_url'), username=config.get('ovirt', 'ovirt_username'), password=config.get('ovirt', 'ovirt_password', raw=True), ca_file=config.get('ovirt', 'ovirt_ca_file'), insecure=config.get('ovirt', 'ovirt_ca_file') is None, ) def get_dict_of_struct(connection, vm): """ Transform SDK Vm Struct type to Python dictionary. """ if vm is None: return dict() vms_service = connection.system_service().vms_service() clusters_service = connection.system_service().clusters_service() vm_service = vms_service.vm_service(vm.id) devices = vm_service.reported_devices_service().list() tags = vm_service.tags_service().list() stats = vm_service.statistics_service().list() labels = vm_service.affinity_labels_service().list() groups = clusters_service.cluster_service( vm.cluster.id ).affinity_groups_service().list() return { 'id': vm.id, 'name': vm.name, 'host': connection.follow_link(vm.host).name if vm.host else None, 'cluster': connection.follow_link(vm.cluster).name, 'status': str(vm.status), 'description': vm.description, 'fqdn': vm.fqdn, 'os_type': vm.os.type, 'template': connection.follow_link(vm.template).name, 'tags': [tag.name for tag in tags], 'affinity_labels': [label.name for label in labels], 'affinity_groups': [ group.name for group in groups if vm.name in [vm.name for vm in connection.follow_link(group.vms)] ], 'statistics': dict( (stat.name, stat.values[0].datum) for stat in stats ), 'devices': dict( (device.name, [ip.address for ip in device.ips]) for device in devices if device.ips ), 'ansible_host': next((device.ips[0].address for device in devices if device.ips), None) } def get_data(connection, vm_name=None): """ Obtain data of `vm_name` if specified, otherwise obtain data of all vms. """ vms_service = connection.system_service().vms_service() clusters_service = connection.system_service().clusters_service() if vm_name: vm = vms_service.list(search='name=%s' % vm_name) or [None] data = get_dict_of_struct( connection=connection, vm=vm[0], ) else: vms = dict() data = defaultdict(list) for vm in vms_service.list(): name = vm.name vm_service = vms_service.vm_service(vm.id) cluster_service = clusters_service.cluster_service(vm.cluster.id) # Add vm to vms dict: vms[name] = get_dict_of_struct(connection, vm) # Add vm to cluster group: cluster_name = connection.follow_link(vm.cluster).name data['cluster_%s' % cluster_name].append(name) # Add vm to tag group: tags_service = vm_service.tags_service() for tag in tags_service.list(): data['tag_%s' % tag.name].append(name) # Add vm to status group: data['status_%s' % vm.status].append(name) # Add vm to affinity group: for group in cluster_service.affinity_groups_service().list(): if vm.name in [ v.name for v in connection.follow_link(group.vms) ]: data['affinity_group_%s' % group.name].append(vm.name) # Add vm to affinity label group: affinity_labels_service = vm_service.affinity_labels_service() for label in affinity_labels_service.list(): data['affinity_label_%s' % label.name].append(name) data["_meta"] = { 'hostvars': vms, } return data def main(): args = parse_args() connection = create_connection() print( json.dumps( obj=get_data( connection=connection, vm_name=args.host, ), sort_keys=args.pretty, indent=args.pretty * 2, ) ) if __name__ == '__main__': main()
{ "content_hash": "7b766d017c005f3d5fb8e0bac6baaa02", "timestamp": "", "source": "github", "line_count": 241, "max_line_length": 96, "avg_line_length": 29.282157676348547, "alnum_prop": 0.6001133626186765, "repo_name": "wwitzel3/awx", "id": "cf2f7ad3c98b1b178faa8c716ce0f65ede3c679d", "size": "7797", "binary": false, "copies": "22", "ref": "refs/heads/devel", "path": "awx/plugins/inventory/ovirt4.py", "mode": "33261", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "303046" }, { "name": "Dockerfile", "bytes": "5713" }, { "name": "HTML", "bytes": "496559" }, { "name": "JavaScript", "bytes": "3513112" }, { "name": "Makefile", "bytes": "21133" }, { "name": "PowerShell", "bytes": "10176" }, { "name": "Python", "bytes": "3904288" }, { "name": "Shell", "bytes": "13833" } ], "symlink_target": "" }
import os import __main__ as main import Lib.ModulLoader as mod from Lib.ClinqApp import ClinqApp import logging scriptDir = os.path.dirname(os.path.realpath(main.__file__)) logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(levelname)s %(message)s', datefmt='%m-%d %H:%M', filename=os.path.join(scriptDir ,'clinq.log'), filemode='w') # define a Handler which writes INFO messages or higher to the sys.stderr console = logging.StreamHandler() console.setLevel(logging.WARNING) # set a format which is mpler for console use formatter = logging.Formatter('%(levelname)s %(message)s') # tell the handler to use this format console.setFormatter(formatter) # add the handler to the root logger logging.getLogger('').addHandler(console) def main(): app = ClinqApp() modules = {} mod.LoadModules(modules,app) app.run() if __name__ == '__main__': main()
{ "content_hash": "d20f403560da02c5556feb85b6a7ffcf", "timestamp": "", "source": "github", "line_count": 38, "max_line_length": 73, "avg_line_length": 25.105263157894736, "alnum_prop": 0.6677148846960168, "repo_name": "Ahn1/Clinq", "id": "529c841abd7d43ba742f1787ada532a12417ed8f", "size": "977", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "daemon/clinq.py", "mode": "33261", "license": "mit", "language": [ { "name": "CSS", "bytes": "173256" }, { "name": "JavaScript", "bytes": "39218" }, { "name": "Python", "bytes": "26025" } ], "symlink_target": "" }
from hashlib import sha512 from openprocurement.api.utils import ( json_view, opresource, APIResource, save_tender, ROUTE_PREFIX, context_unpack ) from openprocurement.relocation.api.utils import ( extract_transfer, update_ownership, save_transfer ) from openprocurement.relocation.api.validation import ( validate_ownership_data, validate_tender_accreditation_level ) @opresource(name='Tender ownership', path='/tenders/{tender_id}/ownership', description="Tenders Ownership") class TenderResource(APIResource): @json_view(permission='create_tender', validators=(validate_tender_accreditation_level, validate_ownership_data,)) def post(self): tender = self.request.validated['tender'] data = self.request.validated['ownership_data'] if tender.transfer_token == sha512(data['transfer']).hexdigest(): location = self.request.route_path('Tender', tender_id=tender.id) location = location[len(ROUTE_PREFIX):] # strips /api/<version> transfer = extract_transfer(self.request, transfer_id=data['id']) if transfer.get('usedFor') and transfer.get('usedFor') != location: self.request.errors.add('body', 'transfer', 'Transfer already used') self.request.errors.status = 403 return else: self.request.errors.add('body', 'transfer', 'Invalid transfer') self.request.errors.status = 403 return update_ownership(tender, transfer) self.request.validated['tender'] = tender transfer.usedFor = location self.request.validated['transfer'] = transfer if save_transfer(self.request): self.LOGGER.info('Updated transfer relation {}'.format(transfer.id), extra=context_unpack(self.request, {'MESSAGE_ID': 'transfer_relation_update'})) if save_tender(self.request): self.LOGGER.info('Updated ownership of tender {}'.format(tender.id), extra=context_unpack(self.request, {'MESSAGE_ID': 'tender_ownership_update'})) return {'data': tender.serialize('view')}
{ "content_hash": "412c7129cd3ae3a7de495551137f936e", "timestamp": "", "source": "github", "line_count": 56, "max_line_length": 110, "avg_line_length": 40.55357142857143, "alnum_prop": 0.6261558784676354, "repo_name": "Leits/openprocurement.relocation.api", "id": "d1dc89a42c8d2d21763117348b29ccb43b162c51", "size": "2295", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "openprocurement/relocation/api/views/tender.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Python", "bytes": "115274" } ], "symlink_target": "" }
""" This module holds all view functions for the authentication module. These functions include the following: """ from flask import Blueprint, flash, redirect, render_template, request, session, url_for from app import logger from app.mod_auth.form import LoginForm, RegistrationForm from app.mod_auth.helper import onAuthRedirect, requireAuth, generateHash from app.mod_auth.model import AuthLevel, User auth = Blueprint('auth', __name__, template_folder = 'templates') @auth.route('/') def default(): """The default route for the authentication-module. """ return redirect(url_for('auth.info')) @auth.route('/register', methods = ['GET', 'POST']) @onAuthRedirect() def register(): """This function allows to register a new user to the system. Upon a GET request a RegistrationForm will be shown to the user. Upon a POST request the form will be validated and if valid the user will get assigned a AuthLevel and his password will be hashed. He will then be added to the database and redirect to the default route of the authentication-module. Should the form be invalid, the user will be shown the form again. """ form = RegistrationForm(request.form) if request.method == 'POST' and form.validate(): user = User() form.populate_obj(user) user.password = generateHash(user.password) user.authLevel = AuthLevel.USER user.save() logger.info('A user has been added.') flash('Your user account has been created.') return redirect(url_for('auth.login')) return render_template('auth/registration.html', form = form) @auth.route('/login', methods = ['GET', 'POST']) @onAuthRedirect() def login(): """This function logs a user into the system. Upon a GET request a LoginForm will be shown to the user. Upon a POST request the form will be validated and if valid the users specified password will be hashed and compared to the stored password. Should they be equal the user will be logged in (as such his User object will be stored in the session) and redirected to the default page of the authentication-module. Is this not the case or if the form was invalid in the first place, he will be shown the form again. """ form = LoginForm(request.form) if request.method == 'POST' and form.validate(): user = User.objects(username = form.username.data).first() if user is not None: if user.password == generateHash(form.password.data): session['user'] = user session['currency'] = u"\u20AC" return redirect(session.get('next', url_for('budget.showSummary'))) logger.info('User %s has logged in.' % user.username) flash('The specified username and/or password were incorrect.') return render_template('auth/login.html', form = form) @auth.route('/logout') @requireAuth() def logout(): """This function logs a user out of the system. Should the user be logged in, his User object will be poped from the session and he will be redirected to the default page for the authentication-module. Should he not be logged in, please see: app.mod_auth.helper.requireAuth """ logger.info('User %s has logged out.' % session.get('user')['username']) session.pop('user') return redirect(url_for('budget.showSummary')) @auth.route('/info') @requireAuth() def info(): return "This is a test."
{ "content_hash": "b7246de1394b25c8fae3dd2c357fc634", "timestamp": "", "source": "github", "line_count": 92, "max_line_length": 88, "avg_line_length": 39.55434782608695, "alnum_prop": 0.6564990381973069, "repo_name": "Zillolo/mana-vault", "id": "75c0459a598c975f7dce633a16fa081cb766d9fb", "size": "3639", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/mod_auth/controller.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "475" }, { "name": "HTML", "bytes": "7376" }, { "name": "JavaScript", "bytes": "484" }, { "name": "Python", "bytes": "16235" } ], "symlink_target": "" }
""" ************************* Plot classes and tools for use in Orange widgets ************************* The main class of this module is :obj:`.OWPlot`, from which all plots in visualization widgets should inherit. This module also contains plot elements, which are normally used by the :obj:`.OWPlot`, but can also be used directly or subclassed """ from .owplotgui import * from .owpalette import * from .owconstants import * try: from .owcurve import * from .owpoint import * from .owlegend import * from .owaxis import * from .owplot import * from .owtools import * except ImportError: pass
{ "content_hash": "dc2ff35ba2dfe5de87ad5e6100897eb6", "timestamp": "", "source": "github", "line_count": 27, "max_line_length": 88, "avg_line_length": 23.444444444444443, "alnum_prop": 0.655608214849921, "repo_name": "qusp/orange3", "id": "46adf8b04ee658d0182709b336a4a8b57884469e", "size": "633", "binary": false, "copies": "10", "ref": "refs/heads/master", "path": "Orange/widgets/utils/plot/__init__.py", "mode": "33188", "license": "bsd-2-clause", "language": [ { "name": "C", "bytes": "20412" }, { "name": "GLSL", "bytes": "75" }, { "name": "JavaScript", "bytes": "3025" }, { "name": "NSIS", "bytes": "19239" }, { "name": "Python", "bytes": "3378832" }, { "name": "Shell", "bytes": "37336" } ], "symlink_target": "" }
import json import os import pathlib import pytest import string from autorelease import github from autorelease.common import _determine_language, guess_language if not os.environ.get("GITHUB_TOKEN"): pytest.skip( "skipping tests that require a valid github token", allow_module_level=True ) def repo_name_to_test_name(repo_name: str) -> str: letters = [] for letter in repo_name: if letter in string.ascii_lowercase: letters.append(letter) elif letter in string.ascii_uppercase: if letters: letters.append("_") letters.append(letter.lower()) else: letters.append("_") return "test_guess_" + "".join(letters) def test_determine_language(): # determine_language() is the old function that depends on sloth's repo.json. # Use it to generate the code for test_guess_language() so we can confirm # the output is 100% the same. repos_json = (pathlib.Path(__file__).parent / "testdata" / "repos.json").read_text() repos = json.loads(repos_json)["repos"] python_tools_repo_names = [ "googleapis/releasetool", "googleapis/synthtool", ] repo_names = python_tools_repo_names + [repo["repo"] for repo in repos] languages = set() for name in repo_names: language = _determine_language(lambda: repos_json, name) languages.add((language, name)) for language, name in sorted(languages): test_name = repo_name_to_test_name(name) print( f"""def {test_name}(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert {repr(language)} == guess_language(gh, {repr(name)}) """ ) def test_guess_google_cloud_platform_cpp_docs_samples(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "cpp" == guess_language(gh, "GoogleCloudPlatform/cpp-docs-samples") def test_guess_googleapis_google_cloud_cpp(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "cpp" == guess_language(gh, "googleapis/google-cloud-cpp") def test_guess_google_cloud_platform_cloud_code_samples(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "dotnet" == guess_language(gh, "GoogleCloudPlatform/cloud-code-samples") def test_guess_google_cloud_platform_dotnet_docs_samples(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "dotnet" == guess_language(gh, "GoogleCloudPlatform/dotnet-docs-samples") def test_guess_google_cloud_platform_getting_started_dotnet(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "dotnet" == guess_language(gh, "GoogleCloudPlatform/getting-started-dotnet") def test_guess_google_cloud_platform_stackdriver_sandbox(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "dotnet" == guess_language(gh, "GoogleCloudPlatform/stackdriver-sandbox") def test_guess_googleapis_gapic_generator_csharp(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "dotnet" == guess_language(gh, "googleapis/gapic-generator-csharp") def test_guess_googleapis_gax_dotnet(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "dotnet" == guess_language(gh, "googleapis/gax-dotnet") def test_guess_googleapis_google_api_dotnet_client(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "dotnet" == guess_language(gh, "googleapis/google-api-dotnet-client") def test_guess_googleapis_google_cloud_dotnet(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "dotnet" == guess_language(gh, "googleapis/google-cloud-dotnet") def test_guess_google_cloud_platform_elixir_samples(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "elixir" == guess_language(gh, "GoogleCloudPlatform/elixir-samples") def test_guess_googleapis_elixir_google_api(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "elixir" == guess_language(gh, "googleapis/elixir-google-api") def test_guess_google_cloud_platform_golang_samples(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "go" == guess_language(gh, "GoogleCloudPlatform/golang-samples") def test_guess_googleapis_gapic_generator_go(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "go" == guess_language(gh, "googleapis/gapic-generator-go") def test_guess_googleapis_gax_go(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "go" == guess_language(gh, "googleapis/gax-go") def test_guess_googleapis_go_genproto(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "go" == guess_language(gh, "googleapis/go-genproto") def test_guess_googleapis_google_api_go_client(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "go" == guess_language(gh, "googleapis/google-api-go-client") def test_guess_googleapis_google_cloud_go(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "go" == guess_language(gh, "googleapis/google-cloud-go") def test_guess_googleapis_google_cloud_go_testing(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "go" == guess_language(gh, "googleapis/google-cloud-go-testing") def test_guess_google_cloud_platform_getting_started_java(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "GoogleCloudPlatform/getting-started-java") def test_guess_google_cloud_platform_java_docs_samples(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "GoogleCloudPlatform/java-docs-samples") def test_guess_googleapis_api_common_java(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/api-common-java") def test_guess_googleapis_gapic_generator(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/gapic-generator") def test_guess_googleapis_gax_java(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/gax-java") def test_guess_googleapis_google_api_java_client(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/google-api-java-client") def test_guess_googleapis_google_api_java_client_services(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/google-api-java-client-services") def test_guess_googleapis_google_auth_library_java(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/google-auth-library-java") def test_guess_googleapis_google_cloud_java(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/google-cloud-java") def test_guess_googleapis_google_http_java_client(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/google-http-java-client") def test_guess_googleapis_google_oauth_java_client(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/google-oauth-java-client") def test_guess_googleapis_java_accessapproval(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-accessapproval") def test_guess_googleapis_java_accesscontextmanager(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-accesscontextmanager") def test_guess_googleapis_java_analytics_admin(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-analytics-admin") def test_guess_googleapis_java_asset(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-asset") def test_guess_googleapis_java_automl(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-automl") def test_guess_googleapis_java_bigquery(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-bigquery") def test_guess_googleapis_java_bigqueryconnection(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-bigqueryconnection") def test_guess_googleapis_java_bigquerydatatransfer(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-bigquerydatatransfer") def test_guess_googleapis_java_bigqueryreservation(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-bigqueryreservation") def test_guess_googleapis_java_bigquerystorage(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-bigquerystorage") def test_guess_googleapis_java_bigtable(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-bigtable") def test_guess_googleapis_java_bigtable_emulator(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-bigtable-emulator") def test_guess_googleapis_java_bigtable_hbase(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-bigtable-hbase") def test_guess_googleapis_java_billing(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-billing") def test_guess_googleapis_java_billingbudgets(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-billingbudgets") def test_guess_googleapis_java_cloud_bom(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-cloud-bom") def test_guess_googleapis_java_cloudbuild(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-cloudbuild") def test_guess_googleapis_java_common_protos(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-common-protos") def test_guess_googleapis_java_compute(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-compute") def test_guess_googleapis_java_conformance_tests(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-conformance-tests") def test_guess_googleapis_java_container(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-container") def test_guess_googleapis_java_containeranalysis(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-containeranalysis") def test_guess_googleapis_java_core(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-core") def test_guess_googleapis_java_datacatalog(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-datacatalog") def test_guess_googleapis_java_datalabeling(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-datalabeling") def test_guess_googleapis_java_dataproc(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-dataproc") def test_guess_googleapis_java_datastore(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-datastore") def test_guess_googleapis_java_dialogflow(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-dialogflow") def test_guess_googleapis_java_dlp(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-dlp") def test_guess_googleapis_java_dns(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-dns") def test_guess_googleapis_java_document_ai(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-document-ai") def test_guess_googleapis_java_errorreporting(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-errorreporting") def test_guess_googleapis_java_firestore(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-firestore") def test_guess_googleapis_java_functions(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-functions") def test_guess_googleapis_java_game_servers(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-game-servers") def test_guess_googleapis_java_gcloud_maven_plugin(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-gcloud-maven-plugin") def test_guess_googleapis_java_grafeas(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-grafeas") def test_guess_googleapis_java_iam(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-iam") def test_guess_googleapis_java_iamcredentials(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-iamcredentials") def test_guess_googleapis_java_iot(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-iot") def test_guess_googleapis_java_irm(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-irm") def test_guess_googleapis_java_kms(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-kms") def test_guess_googleapis_java_language(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-language") def test_guess_googleapis_java_logging(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-logging") def test_guess_googleapis_java_logging_logback(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-logging-logback") def test_guess_googleapis_java_mediatranslation(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-mediatranslation") def test_guess_googleapis_java_memcache(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-memcache") def test_guess_googleapis_java_monitoring(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-monitoring") def test_guess_googleapis_java_monitoring_dashboards(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-monitoring-dashboards") def test_guess_googleapis_java_notebooks(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-notebooks") def test_guess_googleapis_java_notification(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-notification") def test_guess_googleapis_java_orgpolicy(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-orgpolicy") def test_guess_googleapis_java_os_config(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-os-config") def test_guess_googleapis_java_os_login(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-os-login") def test_guess_googleapis_java_phishingprotection(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-phishingprotection") def test_guess_googleapis_java_pubsub(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-pubsub") def test_guess_googleapis_java_pubsublite(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-pubsublite") def test_guess_googleapis_java_recaptchaenterprise(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-recaptchaenterprise") def test_guess_googleapis_java_recommendations_ai(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-recommendations-ai") def test_guess_googleapis_java_recommender(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-recommender") def test_guess_googleapis_java_redis(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-redis") def test_guess_googleapis_java_resourcemanager(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-resourcemanager") def test_guess_googleapis_java_scheduler(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-scheduler") def test_guess_googleapis_java_secretmanager(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-secretmanager") def test_guess_googleapis_java_securitycenter(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-securitycenter") def test_guess_googleapis_java_securitycenter_settings(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-securitycenter-settings") def test_guess_googleapis_java_servicedirectory(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-servicedirectory") def test_guess_googleapis_java_shared_config(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-shared-config") def test_guess_googleapis_java_shared_dependencies(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-shared-dependencies") def test_guess_googleapis_java_spanner(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-spanner") def test_guess_googleapis_java_spanner_jdbc(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-spanner-jdbc") def test_guess_googleapis_java_speech(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-speech") def test_guess_googleapis_java_storage(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-storage") def test_guess_googleapis_java_storage_nio(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-storage-nio") def test_guess_googleapis_java_talent(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-talent") def test_guess_googleapis_java_tasks(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-tasks") def test_guess_googleapis_java_texttospeech(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-texttospeech") def test_guess_googleapis_java_trace(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-trace") def test_guess_googleapis_java_translate(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-translate") def test_guess_googleapis_java_video_intelligence(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-video-intelligence") def test_guess_googleapis_java_vision(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-vision") def test_guess_googleapis_java_webrisk(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-webrisk") def test_guess_googleapis_java_websecurityscanner(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "java" == guess_language(gh, "googleapis/java-websecurityscanner") def test_guess_google_cloud_platform_nodejs_docs_samples(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "GoogleCloudPlatform/nodejs-docs-samples") def test_guess_google_cloud_platform_nodejs_getting_started(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "GoogleCloudPlatform/nodejs-getting-started") def test_guess_googleapis_cloud_debug_nodejs(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/cloud-debug-nodejs") def test_guess_googleapis_cloud_profiler_nodejs(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/cloud-profiler-nodejs") def test_guess_googleapis_cloud_trace_nodejs(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/cloud-trace-nodejs") def test_guess_googleapis_code_suggester(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/code-suggester") def test_guess_googleapis_gapic_generator_typescript(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/gapic-generator-typescript") def test_guess_googleapis_gax_nodejs(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/gax-nodejs") def test_guess_googleapis_gaxios(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/gaxios") def test_guess_googleapis_gcp_metadata(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/gcp-metadata") def test_guess_googleapis_gcs_resumable_upload(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/gcs-resumable-upload") def test_guess_googleapis_github_repo_automation(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/github-repo-automation") def test_guess_googleapis_google_api_nodejs_client(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/google-api-nodejs-client") def test_guess_googleapis_google_auth_library_nodejs(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/google-auth-library-nodejs") def test_guess_googleapis_google_cloud_node(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/google-cloud-node") def test_guess_googleapis_google_p___pem(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/google-p12-pem") def test_guess_googleapis_jsdoc_fresh(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/jsdoc-fresh") def test_guess_googleapis_jsdoc_region_tag(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/jsdoc-region-tag") def test_guess_googleapis_node_gtoken(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/node-gtoken") def test_guess_googleapis_nodejs_analytics_admin(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-analytics-admin") def test_guess_googleapis_nodejs_asset(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-asset") def test_guess_googleapis_nodejs_automl(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-automl") def test_guess_googleapis_nodejs_bigquery(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-bigquery") def test_guess_googleapis_nodejs_bigquery_connection(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-bigquery-connection") def test_guess_googleapis_nodejs_bigquery_data_transfer(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-bigquery-data-transfer") def test_guess_googleapis_nodejs_bigquery_reservation(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-bigquery-reservation") def test_guess_googleapis_nodejs_bigquery_storage(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-bigquery-storage") def test_guess_googleapis_nodejs_bigtable(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-bigtable") def test_guess_googleapis_nodejs_billing(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-billing") def test_guess_googleapis_nodejs_billing_budgets(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-billing-budgets") def test_guess_googleapis_nodejs_cloud_container(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-cloud-container") def test_guess_googleapis_nodejs_cloudbuild(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-cloudbuild") def test_guess_googleapis_nodejs_common(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-common") def test_guess_googleapis_nodejs_compute(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-compute") def test_guess_googleapis_nodejs_containeranalysis(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-containeranalysis") def test_guess_googleapis_nodejs_datacatalog(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-datacatalog") def test_guess_googleapis_nodejs_datalabeling(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-datalabeling") def test_guess_googleapis_nodejs_dataproc(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-dataproc") def test_guess_googleapis_nodejs_datastore(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-datastore") def test_guess_googleapis_nodejs_datastore_kvstore(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-datastore-kvstore") def test_guess_googleapis_nodejs_datastore_session(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-datastore-session") def test_guess_googleapis_nodejs_dialogflow(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-dialogflow") def test_guess_googleapis_nodejs_dlp(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-dlp") def test_guess_googleapis_nodejs_dns(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-dns") def test_guess_googleapis_nodejs_document_ai(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-document-ai") def test_guess_googleapis_nodejs_error_reporting(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-error-reporting") def test_guess_googleapis_nodejs_firestore(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-firestore") def test_guess_googleapis_nodejs_firestore_session(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-firestore-session") def test_guess_googleapis_nodejs_functions(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-functions") def test_guess_googleapis_nodejs_game_servers(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-game-servers") def test_guess_googleapis_nodejs_gce_images(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-gce-images") def test_guess_googleapis_nodejs_googleapis_common(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-googleapis-common") def test_guess_googleapis_nodejs_grafeas(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-grafeas") def test_guess_googleapis_nodejs_iot(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-iot") def test_guess_googleapis_nodejs_irm(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-irm") def test_guess_googleapis_nodejs_kms(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-kms") def test_guess_googleapis_nodejs_language(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-language") def test_guess_googleapis_nodejs_local_auth(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-local-auth") def test_guess_googleapis_nodejs_logging(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-logging") def test_guess_googleapis_nodejs_logging_bunyan(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-logging-bunyan") def test_guess_googleapis_nodejs_logging_winston(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-logging-winston") def test_guess_googleapis_nodejs_media_translation(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-media-translation") def test_guess_googleapis_nodejs_memcache(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-memcache") def test_guess_googleapis_nodejs_monitoring(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-monitoring") def test_guess_googleapis_nodejs_monitoring_dashboards(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-monitoring-dashboards") def test_guess_googleapis_nodejs_os_config(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-os-config") def test_guess_googleapis_nodejs_os_login(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-os-login") def test_guess_googleapis_nodejs_paginator(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-paginator") def test_guess_googleapis_nodejs_phishing_protection(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-phishing-protection") def test_guess_googleapis_nodejs_precise_date(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-precise-date") def test_guess_googleapis_nodejs_projectify(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-projectify") def test_guess_googleapis_nodejs_promisify(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-promisify") def test_guess_googleapis_nodejs_proto_files(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-proto-files") def test_guess_googleapis_nodejs_pubsub(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-pubsub") def test_guess_googleapis_nodejs_rcloadenv(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-rcloadenv") def test_guess_googleapis_nodejs_recaptcha_enterprise(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-recaptcha-enterprise") def test_guess_googleapis_nodejs_recommender(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-recommender") def test_guess_googleapis_nodejs_redis(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-redis") def test_guess_googleapis_nodejs_resource(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-resource") def test_guess_googleapis_nodejs_scheduler(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-scheduler") def test_guess_googleapis_nodejs_secret_manager(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-secret-manager") def test_guess_googleapis_nodejs_security_center(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-security-center") def test_guess_googleapis_nodejs_service_directory(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-service-directory") def test_guess_googleapis_nodejs_spanner(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-spanner") def test_guess_googleapis_nodejs_speech(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-speech") def test_guess_googleapis_nodejs_storage(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-storage") def test_guess_googleapis_nodejs_talent(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-talent") def test_guess_googleapis_nodejs_tasks(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-tasks") def test_guess_googleapis_nodejs_text_to_speech(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-text-to-speech") def test_guess_googleapis_nodejs_translate(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-translate") def test_guess_googleapis_nodejs_video_intelligence(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-video-intelligence") def test_guess_googleapis_nodejs_vision(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-vision") def test_guess_googleapis_nodejs_web_risk(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/nodejs-web-risk") def test_guess_googleapis_release_please(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/release-please") def test_guess_googleapis_repo_automation_bots(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/repo-automation-bots") def test_guess_googleapis_sloth(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/sloth") def test_guess_googleapis_teeny_request(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "nodejs" == guess_language(gh, "googleapis/teeny-request") def test_guess_google_cloud_platform_getting_started_php(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "php" == guess_language(gh, "GoogleCloudPlatform/getting-started-php") def test_guess_google_cloud_platform_php_docs_samples(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "php" == guess_language(gh, "GoogleCloudPlatform/php-docs-samples") def test_guess_googleapis_gax_php(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "php" == guess_language(gh, "googleapis/gax-php") def test_guess_googleapis_google_api_php_client(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "php" == guess_language(gh, "googleapis/google-api-php-client") def test_guess_googleapis_google_api_php_client_services(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "php" == guess_language(gh, "googleapis/google-api-php-client-services") def test_guess_googleapis_google_auth_library_php(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "php" == guess_language(gh, "googleapis/google-auth-library-php") def test_guess_googleapis_google_cloud_php(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "php" == guess_language(gh, "googleapis/google-cloud-php") def test_guess_googleapis_google_cloud_php_asset(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "php" == guess_language(gh, "googleapis/google-cloud-php-asset") def test_guess_googleapis_google_cloud_php_automl(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "php" == guess_language(gh, "googleapis/google-cloud-php-automl") def test_guess_googleapis_google_cloud_php_bigquery(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "php" == guess_language(gh, "googleapis/google-cloud-php-bigquery") def test_guess_googleapis_google_cloud_php_bigquerydatatransfer(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "php" == guess_language( gh, "googleapis/google-cloud-php-bigquerydatatransfer" ) def test_guess_googleapis_google_cloud_php_bigtable(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "php" == guess_language(gh, "googleapis/google-cloud-php-bigtable") def test_guess_googleapis_google_cloud_php_common_protos(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "php" == guess_language(gh, "googleapis/google-cloud-php-common-protos") def test_guess_googleapis_google_cloud_php_container(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "php" == guess_language(gh, "googleapis/google-cloud-php-container") def test_guess_googleapis_google_cloud_php_core(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "php" == guess_language(gh, "googleapis/google-cloud-php-core") def test_guess_googleapis_google_cloud_php_dataproc(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "php" == guess_language(gh, "googleapis/google-cloud-php-dataproc") def test_guess_googleapis_google_cloud_php_datastore(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "php" == guess_language(gh, "googleapis/google-cloud-php-datastore") def test_guess_googleapis_google_cloud_php_debugger(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "php" == guess_language(gh, "googleapis/google-cloud-php-debugger") def test_guess_googleapis_google_cloud_php_dialogflow(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "php" == guess_language(gh, "googleapis/google-cloud-php-dialogflow") def test_guess_googleapis_google_cloud_php_dlp(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "php" == guess_language(gh, "googleapis/google-cloud-php-dlp") def test_guess_googleapis_google_cloud_php_errorreporting(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "php" == guess_language(gh, "googleapis/google-cloud-php-errorreporting") def test_guess_googleapis_google_cloud_php_firestore(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "php" == guess_language(gh, "googleapis/google-cloud-php-firestore") def test_guess_googleapis_google_cloud_php_iot(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "php" == guess_language(gh, "googleapis/google-cloud-php-iot") def test_guess_googleapis_google_cloud_php_irm(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "php" == guess_language(gh, "googleapis/google-cloud-php-irm") def test_guess_googleapis_google_cloud_php_kms(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "php" == guess_language(gh, "googleapis/google-cloud-php-kms") def test_guess_googleapis_google_cloud_php_language(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "php" == guess_language(gh, "googleapis/google-cloud-php-language") def test_guess_googleapis_google_cloud_php_logging(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "php" == guess_language(gh, "googleapis/google-cloud-php-logging") def test_guess_googleapis_google_cloud_php_monitoring(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "php" == guess_language(gh, "googleapis/google-cloud-php-monitoring") def test_guess_googleapis_google_cloud_php_oslogin(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "php" == guess_language(gh, "googleapis/google-cloud-php-oslogin") def test_guess_googleapis_google_cloud_php_pubsub(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "php" == guess_language(gh, "googleapis/google-cloud-php-pubsub") def test_guess_googleapis_google_cloud_php_recommender(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "php" == guess_language(gh, "googleapis/google-cloud-php-recommender") def test_guess_googleapis_google_cloud_php_redis(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "php" == guess_language(gh, "googleapis/google-cloud-php-redis") def test_guess_googleapis_google_cloud_php_scheduler(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "php" == guess_language(gh, "googleapis/google-cloud-php-scheduler") def test_guess_googleapis_google_cloud_php_secret_manager(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "php" == guess_language(gh, "googleapis/google-cloud-php-secret-manager") def test_guess_googleapis_google_cloud_php_security_center(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "php" == guess_language(gh, "googleapis/google-cloud-php-security-center") def test_guess_googleapis_google_cloud_php_spanner(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "php" == guess_language(gh, "googleapis/google-cloud-php-spanner") def test_guess_googleapis_google_cloud_php_speech(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "php" == guess_language(gh, "googleapis/google-cloud-php-speech") def test_guess_googleapis_google_cloud_php_storage(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "php" == guess_language(gh, "googleapis/google-cloud-php-storage") def test_guess_googleapis_google_cloud_php_talent(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "php" == guess_language(gh, "googleapis/google-cloud-php-talent") def test_guess_googleapis_google_cloud_php_tasks(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "php" == guess_language(gh, "googleapis/google-cloud-php-tasks") def test_guess_googleapis_google_cloud_php_text_to_speech(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "php" == guess_language(gh, "googleapis/google-cloud-php-text-to-speech") def test_guess_googleapis_google_cloud_php_trace(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "php" == guess_language(gh, "googleapis/google-cloud-php-trace") def test_guess_googleapis_google_cloud_php_translate(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "php" == guess_language(gh, "googleapis/google-cloud-php-translate") def test_guess_googleapis_google_cloud_php_videointelligence(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "php" == guess_language(gh, "googleapis/google-cloud-php-videointelligence") def test_guess_googleapis_google_cloud_php_vision(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "php" == guess_language(gh, "googleapis/google-cloud-php-vision") def test_guess_googleapis_google_cloud_php_web_risk(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "php" == guess_language(gh, "googleapis/google-cloud-php-web-risk") def test_guess_googleapis_google_cloud_php_web_security_scanner(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "php" == guess_language( gh, "googleapis/google-cloud-php-web-security-scanner" ) def test_guess_google_cloud_platform_getting_started_python(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "GoogleCloudPlatform/getting-started-python") def test_guess_google_cloud_platform_python_docs_samples(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "GoogleCloudPlatform/python-docs-samples") def test_guess_googleapis_dialogflow_python_client_v_(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/dialogflow-python-client-v2") def test_guess_googleapis_doc_pipeline(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/doc-pipeline") def test_guess_googleapis_doc_templates(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/doc-templates") def test_guess_googleapis_gapic_generator_python(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/gapic-generator-python") def test_guess_googleapis_google_api_python_client(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/google-api-python-client") def test_guess_googleapis_google_auth_library_python(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/google-auth-library-python") def test_guess_googleapis_google_auth_library_python_oauthlib(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language( gh, "googleapis/google-auth-library-python-oauthlib" ) def test_guess_googleapis_google_cloud_python(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/google-cloud-python") def test_guess_googleapis_google_cloud_python_happybase(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/google-cloud-python-happybase") def test_guess_googleapis_google_resumable_media_python(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/google-resumable-media-python") def test_guess_googleapis_proto_plus_python(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/proto-plus-python") def test_guess_googleapis_python_access_approval(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-access-approval") def test_guess_googleapis_python_access_context_manager(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-access-context-manager") def test_guess_googleapis_python_analytics_admin(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-analytics-admin") def test_guess_googleapis_python_api_common_protos(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-api-common-protos") def test_guess_googleapis_python_api_core(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-api-core") def test_guess_googleapis_python_asset(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-asset") def test_guess_googleapis_python_audit_log(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-audit-log") def test_guess_googleapis_python_automl(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-automl") def test_guess_googleapis_python_bigquery(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-bigquery") def test_guess_googleapis_python_bigquery_connection(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-bigquery-connection") def test_guess_googleapis_python_bigquery_datatransfer(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-bigquery-datatransfer") def test_guess_googleapis_python_bigquery_reservation(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-bigquery-reservation") def test_guess_googleapis_python_bigquery_storage(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-bigquery-storage") def test_guess_googleapis_python_bigtable(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-bigtable") def test_guess_googleapis_python_billing(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-billing") def test_guess_googleapis_python_billingbudgets(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-billingbudgets") def test_guess_googleapis_python_cloud_core(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-cloud-core") def test_guess_googleapis_python_cloudbuild(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-cloudbuild") def test_guess_googleapis_python_container(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-container") def test_guess_googleapis_python_containeranalysis(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-containeranalysis") def test_guess_googleapis_python_crc__c(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-crc32c") def test_guess_googleapis_python_datacatalog(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-datacatalog") def test_guess_googleapis_python_datalabeling(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-datalabeling") def test_guess_googleapis_python_dataproc(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-dataproc") def test_guess_googleapis_python_datastore(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-datastore") def test_guess_googleapis_python_dlp(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-dlp") def test_guess_googleapis_python_dns(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-dns") def test_guess_googleapis_python_documentai(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-documentai") def test_guess_googleapis_python_error_reporting(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-error-reporting") def test_guess_googleapis_python_firestore(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-firestore") def test_guess_googleapis_python_functions(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-functions") def test_guess_googleapis_python_game_servers(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-game-servers") def test_guess_googleapis_python_grafeas(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-grafeas") def test_guess_googleapis_python_iam(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-iam") def test_guess_googleapis_python_iot(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-iot") def test_guess_googleapis_python_kms(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-kms") def test_guess_googleapis_python_language(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-language") def test_guess_googleapis_python_logging(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-logging") def test_guess_googleapis_python_media_translation(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-media-translation") def test_guess_googleapis_python_memcache(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-memcache") def test_guess_googleapis_python_monitoring(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-monitoring") def test_guess_googleapis_python_monitoring_dashboards(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-monitoring-dashboards") def test_guess_googleapis_python_ndb(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-ndb") def test_guess_googleapis_python_notebooks(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-notebooks") def test_guess_googleapis_python_org_policy(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-org-policy") def test_guess_googleapis_python_os_config(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-os-config") def test_guess_googleapis_python_oslogin(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-oslogin") def test_guess_googleapis_python_phishingprotection(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-phishingprotection") def test_guess_googleapis_python_pubsub(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-pubsub") def test_guess_googleapis_python_recaptcha_enterprise(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-recaptcha-enterprise") def test_guess_googleapis_python_recommendations_ai(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-recommendations-ai") def test_guess_googleapis_python_recommender(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-recommender") def test_guess_googleapis_python_redis(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-redis") def test_guess_googleapis_python_resource_manager(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-resource-manager") def test_guess_googleapis_python_runtimeconfig(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-runtimeconfig") def test_guess_googleapis_python_scheduler(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-scheduler") def test_guess_googleapis_python_secret_manager(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-secret-manager") def test_guess_googleapis_python_securitycenter(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-securitycenter") def test_guess_googleapis_python_service_directory(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-service-directory") def test_guess_googleapis_python_spanner(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-spanner") def test_guess_googleapis_python_spanner_django(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-spanner-django") def test_guess_googleapis_python_speech(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-speech") def test_guess_googleapis_python_storage(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-storage") def test_guess_googleapis_python_talent(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-talent") def test_guess_googleapis_python_tasks(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-tasks") def test_guess_googleapis_python_test_utils(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-test-utils") def test_guess_googleapis_python_texttospeech(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-texttospeech") def test_guess_googleapis_python_trace(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-trace") def test_guess_googleapis_python_translate(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-translate") def test_guess_googleapis_python_videointelligence(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-videointelligence") def test_guess_googleapis_python_vision(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-vision") def test_guess_googleapis_python_webrisk(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-webrisk") def test_guess_googleapis_python_websecurityscanner(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/python-websecurityscanner") def test_guess_googleapis_sample_tester(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/sample-tester") def test_guess_googleapis_docuploader(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/docuploader") def test_guess_googleapis_releasetool(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python" == guess_language(gh, "googleapis/releasetool") def test_guess_googleapis_synthtool(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "python_tool" == guess_language(gh, "googleapis/synthtool") def test_guess_google_cloud_platform_getting_started_ruby(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "ruby" == guess_language(gh, "GoogleCloudPlatform/getting-started-ruby") def test_guess_google_cloud_platform_ruby_docs_samples(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "ruby" == guess_language(gh, "GoogleCloudPlatform/ruby-docs-samples") def test_guess_googleapis_discovery_artifact_manager(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "ruby" == guess_language(gh, "googleapis/discovery-artifact-manager") def test_guess_googleapis_gapic_generator_ruby(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "ruby" == guess_language(gh, "googleapis/gapic-generator-ruby") def test_guess_googleapis_gax_ruby(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "ruby" == guess_language(gh, "googleapis/gax-ruby") def test_guess_googleapis_google_api_ruby_client(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "ruby" == guess_language(gh, "googleapis/google-api-ruby-client") def test_guess_googleapis_google_auth_library_ruby(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "ruby" == guess_language(gh, "googleapis/google-auth-library-ruby") def test_guess_googleapis_google_cloud_ruby(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "ruby" == guess_language(gh, "googleapis/google-cloud-ruby") def test_guess_googleapis_ruby_spanner_activerecord(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "ruby" == guess_language(gh, "googleapis/ruby-spanner-activerecord") def test_guess_googleapis_ruby_style(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "ruby" == guess_language(gh, "googleapis/ruby-style") def test_guess_googleapis_signet(): gh = github.GitHub(os.environ["GITHUB_TOKEN"]) assert "ruby" == guess_language(gh, "googleapis/signet")
{ "content_hash": "aa904a95fca948d278d315ba3e9b3a8e", "timestamp": "", "source": "github", "line_count": 1852, "max_line_length": 88, "avg_line_length": 35.05453563714903, "alnum_prop": 0.7112336532092851, "repo_name": "googleapis/releasetool", "id": "b8144b493d8f5b5c15c4cb291d2d3bc95073effb", "size": "65497", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "tests/common_test.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Jinja", "bytes": "711" }, { "name": "Python", "bytes": "268302" }, { "name": "Shell", "bytes": "8013" } ], "symlink_target": "" }
from setuptools import setup, find_packages setup( name="livereload_server", version='0.1', description="A livereloading HTTP server for static files", keywords='http,server,livereload', author='Prashanth Ellina', author_email="Use the github issues", url="https://github.com/prashanthellina/livereload_server", license='MIT License', install_requires=[ 'tornado', 'watchdog', ], package_dir={'livereload_server': 'livereload_server'}, packages=find_packages('.'), include_package_data=True, entry_points = { 'console_scripts': [ 'livereload_server = livereload_server:main', ], }, )
{ "content_hash": "888ad22729bece03806f81758967cfb6", "timestamp": "", "source": "github", "line_count": 25, "max_line_length": 63, "avg_line_length": 27.6, "alnum_prop": 0.6347826086956522, "repo_name": "prashanthellina/livereload_server", "id": "c5f8c54a0768e5fa763851ad18b9798d554b39bb", "size": "690", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "setup.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "4174" } ], "symlink_target": "" }
from msrest.serialization import Model class EventTypesListResult(Model): """Result of the List Event Types operation. :param value: A collection of event types :type value: list of :class:`EventType <azure.mgmt.eventgrid.models.EventType>` """ _attribute_map = { 'value': {'key': 'value', 'type': '[EventType]'}, } def __init__(self, value=None): self.value = value
{ "content_hash": "199d362745f128151697bf94e8539dbd", "timestamp": "", "source": "github", "line_count": 17, "max_line_length": 57, "avg_line_length": 24.764705882352942, "alnum_prop": 0.6294536817102138, "repo_name": "samedder/azure-cli", "id": "f6964bb6970fec16e578cef9d1c16c54f82caafa", "size": "1110", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "src/command_modules/azure-cli-eventgrid/azure/cli/command_modules/eventgrid/sdk/models/event_types_list_result.py", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "11279" }, { "name": "C++", "bytes": "275" }, { "name": "JavaScript", "bytes": "380" }, { "name": "Python", "bytes": "5627973" }, { "name": "Shell", "bytes": "25031" } ], "symlink_target": "" }
from parser.Parser import Parser, ParserUtils from schema.PgSequence import PgSequence class AlterSequenceParser(object): @staticmethod def parse(database, statement): parser = Parser(statement) parser.expect("ALTER", "SEQUENCE") sequenceName = parser.parseIdentifier() schemaName = ParserUtils.getSchemaName(sequenceName, database) schema = database.getSchema(schemaName) if schema is None: raise Exception("CannotFindSchema") objectName = ParserUtils.getObjectName(sequenceName); sequence = schema.sequences[objectName] if sequence is None: raise Exception("Cannot find sequence '%s' for statement '%s'. Missing CREATE SEQUENCE?" % (sequenceName, statement)) while not parser.expectOptional(";"): if (parser.expectOptional("OWNED", "BY")): if parser.expectOptional("NONE"): sequence.ownedBy = None else: sequence.ownedBy = parser.getExpression() else: parser.throwUnsupportedCommand()
{ "content_hash": "5163042a1417e8ec3bfa3183e8d18e02", "timestamp": "", "source": "github", "line_count": 32, "max_line_length": 129, "avg_line_length": 34.9375, "alnum_prop": 0.6332737030411449, "repo_name": "Dancer3809/PgDumpLoader", "id": "800d5c1a214d88993b86cb8852f5231f6dd59e7d", "size": "1118", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "PgDumpLoader/parser/AlterSequenceParser.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "69922" } ], "symlink_target": "" }