hexsha stringlengths 40 40 | size int64 3 1.03M | ext stringclasses 10
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 3 972 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 972 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 116k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 972 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 3 1.03M | avg_line_length float64 1.13 941k | max_line_length int64 2 941k | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
1bb9812fa74884455a49dc34a4482de30e18f910 | 212 | py | Python | dataset/__init__.py | A-ZHANG1/PSENet | 7e6c93f747288341a1d8409835e8de88bdddf603 | [
"Apache-2.0"
] | 2 | 2019-07-11T13:57:53.000Z | 2019-07-31T02:04:52.000Z | dataset/__init__.py | XueKX/PSENet_python3 | 966e0749032b1423f9904d03b32cdbb9d69f8554 | [
"Apache-2.0"
] | 1 | 2019-06-14T09:24:27.000Z | 2019-08-20T01:27:17.000Z | dataset/__init__.py | XueKX/PSENet_python3 | 966e0749032b1423f9904d03b32cdbb9d69f8554 | [
"Apache-2.0"
] | 1 | 2019-07-31T02:04:54.000Z | 2019-07-31T02:04:54.000Z | from dataset.icdar2015_loader import IC15Loader
from dataset.icdar2015_test_loader import IC15TestLoader
from dataset.ctw1500_loader import CTW1500Loader
from dataset.ctw1500_test_loader import CTW1500TestLoader | 42.4 | 57 | 0.90566 |
7aa19fee3d60ea8d9d4b69a55cfc89895c5d22d8 | 503 | py | Python | phiinsta/wsgi.py | denn-is-njeruh/Instaclone | a2f3c36822c419bfeadc56adccce00ce77e1d16e | [
"MIT"
] | null | null | null | phiinsta/wsgi.py | denn-is-njeruh/Instaclone | a2f3c36822c419bfeadc56adccce00ce77e1d16e | [
"MIT"
] | null | null | null | phiinsta/wsgi.py | denn-is-njeruh/Instaclone | a2f3c36822c419bfeadc56adccce00ce77e1d16e | [
"MIT"
] | null | null | null | """
WSGI config for phiinsta project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/howto/deployment/wsgi/
"""
import os
from dotenv import load_dotenv
from django.core.wsgi import get_wsgi_application
load_dotenv(os.path.join(os.path.dirname(os.path.dirname(__file__)), '.env'))
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'phiinsta.settings')
application = get_wsgi_application()
| 25.15 | 78 | 0.7833 |
2cea83e4025138e8d29b797811956344184332bb | 3,290 | py | Python | Demo/scripts/morse.py | 1byte2bytes/cpython | 7fbaeb819ca7b20dca048217ff585ec195e999ec | [
"Unlicense",
"TCL",
"DOC",
"AAL",
"X11"
] | 5 | 2022-03-26T21:53:36.000Z | 2022-03-30T21:47:20.000Z | Demo/scripts/morse.py | 1byte2bytes/cpython | 7fbaeb819ca7b20dca048217ff585ec195e999ec | [
"Unlicense",
"TCL",
"DOC",
"AAL",
"X11"
] | 6 | 2020-11-18T15:48:14.000Z | 2021-05-03T21:20:50.000Z | Demo/scripts/morse.py | 1byte2bytes/cpython | 7fbaeb819ca7b20dca048217ff585ec195e999ec | [
"Unlicense",
"TCL",
"DOC",
"AAL",
"X11"
] | 2 | 2015-07-16T08:14:13.000Z | 2022-03-27T01:55:17.000Z | # DAH should be three DOTs.
# Space between DOTs and DAHs should be one DOT.
# Space between two letters should be one DAH.
# Space between two words should be DOT DAH DAH.
import sys, math, audiodev
DOT = 30
DAH = 3 * DOT
OCTAVE = 2 # 1 == 441 Hz, 2 == 882 Hz, ...
morsetab = {
'A': '.-', 'a': '.-',
'B': '-...', 'b': '-...',
'C': '-.-.', 'c': '-.-.',
'D': '-..', 'd': '-..',
'E': '.', 'e': '.',
'F': '..-.', 'f': '..-.',
'G': '--.', 'g': '--.',
'H': '....', 'h': '....',
'I': '..', 'i': '..',
'J': '.---', 'j': '.---',
'K': '-.-', 'k': '-.-',
'L': '.-..', 'l': '.-..',
'M': '--', 'm': '--',
'N': '-.', 'n': '-.',
'O': '---', 'o': '---',
'P': '.--.', 'p': '.--.',
'Q': '--.-', 'q': '--.-',
'R': '.-.', 'r': '.-.',
'S': '...', 's': '...',
'T': '-', 't': '-',
'U': '..-', 'u': '..-',
'V': '...-', 'v': '...-',
'W': '.--', 'w': '.--',
'X': '-..-', 'x': '-..-',
'Y': '-.--', 'y': '-.--',
'Z': '--..', 'z': '--..',
'0': '-----',
'1': '.----',
'2': '..---',
'3': '...--',
'4': '....-',
'5': '.....',
'6': '-....',
'7': '--...',
'8': '---..',
'9': '----.',
',': '--..--',
'.': '.-.-.-',
'?': '..--..',
';': '-.-.-.',
':': '---...',
"'": '.----.',
'-': '-....-',
'/': '-..-.',
'(': '-.--.-',
')': '-.--.-',
'_': '..--.-',
' ': ' '
}
# If we play at 44.1 kHz (which we do), then if we produce one sine
# wave in 100 samples, we get a tone of 441 Hz. If we produce two
# sine waves in these 100 samples, we get a tone of 882 Hz. 882 Hz
# appears to be a nice one for playing morse code.
def mkwave(octave):
global sinewave, nowave
sinewave = ''
for i in range(100):
val = int(math.sin(math.pi * float(i) * octave / 50.0) * 30000)
sinewave = sinewave + chr((val >> 8) & 255) + chr(val & 255)
nowave = '\0' * 200
mkwave(OCTAVE)
def main():
import getopt, string
try:
opts, args = getopt.getopt(sys.argv[1:], 'o:p:')
except getopt.error:
sys.stderr.write('Usage ' + sys.argv[0] +
' [ -o outfile ] [ args ] ...\n')
sys.exit(1)
dev = None
for o, a in opts:
if o == '-o':
import aifc
dev = aifc.open(a, 'w')
dev.setframerate(44100)
dev.setsampwidth(2)
dev.setnchannels(1)
if o == '-p':
mkwave(string.atoi(a))
if not dev:
import audiodev
dev = audiodev.AudioDev()
dev.setoutrate(44100)
dev.setsampwidth(2)
dev.setnchannels(1)
dev.close = dev.stop
dev.writeframesraw = dev.writeframes
if args:
line = string.join(args)
else:
line = sys.stdin.readline()
while line:
mline = morse(line)
play(mline, dev)
if hasattr(dev, 'wait'):
dev.wait()
if not args:
line = sys.stdin.readline()
else:
line = ''
dev.close()
# Convert a string to morse code with \001 between the characters in
# the string.
def morse(line):
res = ''
for c in line:
try:
res = res + morsetab[c] + '\001'
except KeyError:
pass
return res
# Play a line of morse code.
def play(line, dev):
for c in line:
if c == '.':
sine(dev, DOT)
elif c == '-':
sine(dev, DAH)
else: # space
pause(dev, DAH + DOT)
pause(dev, DOT)
def sine(dev, length):
for i in range(length):
dev.writeframesraw(sinewave)
def pause(dev, length):
for i in range(length):
dev.writeframesraw(nowave)
if __name__ == '__main__' or sys.argv[0] == __name__:
main()
| 21.933333 | 68 | 0.459574 |
a08546a40801ffb454cd013dba61150c17a5f401 | 30,347 | py | Python | manila/tests/api/v2/test_share_networks.py | snpd25/manila | 9cf435c7f86a7b79e01af7b8bc88cd619e34cab4 | [
"Apache-2.0"
] | 3 | 2016-06-06T13:05:00.000Z | 2021-05-05T04:29:24.000Z | manila/tests/api/v2/test_share_networks.py | snpd25/manila | 9cf435c7f86a7b79e01af7b8bc88cd619e34cab4 | [
"Apache-2.0"
] | 1 | 2021-03-31T19:40:52.000Z | 2021-03-31T19:40:52.000Z | manila/tests/api/v2/test_share_networks.py | snpd25/manila | 9cf435c7f86a7b79e01af7b8bc88cd619e34cab4 | [
"Apache-2.0"
] | null | null | null | # Copyright 2014 NetApp
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import ddt
import mock
from oslo_db import exception as db_exception
from oslo_utils import timeutils
from six.moves.urllib import parse
from webob import exc as webob_exc
from manila.api.openstack import api_version_request as api_version
from manila.api.v2 import share_networks
from manila.db import api as db_api
from manila import exception
from manila import quota
from manila import test
from manila.tests.api import fakes
fake_share_network = {
'id': 'fake network id',
'project_id': 'fake project',
'created_at': timeutils.parse_strtime('2002-02-02', fmt="%Y-%m-%d"),
'updated_at': None,
'neutron_net_id': 'fake net id',
'neutron_subnet_id': 'fake subnet id',
'network_type': 'vlan',
'segmentation_id': 1000,
'cidr': '10.0.0.0/24',
'ip_version': 4,
'name': 'fake name',
'description': 'fake description',
'share_servers': [],
'security_services': []
}
fake_share_network_shortened = {
'id': 'fake network id',
'name': 'fake name',
}
fake_share_network_with_ss = {
'id': 'sn-id',
'project_id': 'fake',
'created_at': timeutils.parse_strtime('2001-01-01', fmt="%Y-%m-%d"),
'updated_at': None,
'neutron_net_id': '1111',
'neutron_subnet_id': '2222',
'network_type': 'local',
'segmentation_id': 2000,
'cidr': '8.0.0.0/12',
'ip_version': 6,
'name': 'test-sn',
'description': 'fake description',
'share_servers': [],
'security_services': [{'id': 'fake-ss-id'}]
}
fake_sn_with_ss_shortened = {
'id': 'sn-id',
'name': 'test-sn',
}
QUOTAS = quota.QUOTAS
@ddt.ddt
class ShareNetworkAPITest(test.TestCase):
def setUp(self):
super(ShareNetworkAPITest, self).setUp()
self.controller = share_networks.ShareNetworkController()
self.req = fakes.HTTPRequest.blank('/share-networks')
self.body = {share_networks.RESOURCE_NAME: {'name': 'fake name'}}
self.context = self.req.environ['manila.context']
def _check_share_network_view_shortened(self, view, share_nw):
self.assertEqual(share_nw['id'], view['id'])
self.assertEqual(share_nw['name'], view['name'])
def _check_share_network_view(self, view, share_nw):
self.assertEqual(share_nw['id'], view['id'])
self.assertEqual(share_nw['project_id'], view['project_id'])
self.assertEqual(share_nw['created_at'], view['created_at'])
self.assertEqual(share_nw['updated_at'], view['updated_at'])
self.assertEqual(share_nw['neutron_net_id'],
view['neutron_net_id'])
self.assertEqual(share_nw['neutron_subnet_id'],
view['neutron_subnet_id'])
self.assertEqual(share_nw['network_type'], view['network_type'])
self.assertEqual(share_nw['segmentation_id'],
view['segmentation_id'])
self.assertEqual(share_nw['cidr'], view['cidr'])
self.assertEqual(share_nw['ip_version'], view['ip_version'])
self.assertEqual(share_nw['name'], view['name'])
self.assertEqual(share_nw['description'], view['description'])
self.assertEqual(share_nw['created_at'], view['created_at'])
self.assertEqual(share_nw['updated_at'], view['updated_at'])
self.assertNotIn('shares', view)
self.assertNotIn('network_allocations', view)
self.assertNotIn('security_services', view)
@ddt.data(
{'neutron_net_id': 'fake_neutron_net_id'},
{'neutron_subnet_id': 'fake_neutron_subnet_id'},
{'neutron_net_id': 'fake', 'neutron_subnet_id': 'fake'})
def test_create_valid_cases(self, data):
data.update({'user_id': 'fake_user_id'})
body = {share_networks.RESOURCE_NAME: data}
result = self.controller.create(self.req, body)
data.pop('user_id', None)
for k, v in data.items():
self.assertIn(data[k], result['share_network'][k])
@ddt.data(
{'nova_net_id': 'foo', 'neutron_net_id': 'bar'},
{'nova_net_id': 'foo', 'neutron_subnet_id': 'quuz'},
{'nova_net_id': 'foo', 'neutron_net_id': 'bar',
'neutron_subnet_id': 'quuz'},
{'nova_net_id': 'fake_nova_net_id'})
def test_create_invalid_cases(self, data):
data.update({'user_id': 'fake_user_id'})
body = {share_networks.RESOURCE_NAME: data}
self.assertRaises(
webob_exc.HTTPBadRequest, self.controller.create, self.req, body)
@ddt.data(
{'neutron_net_id': 'fake_neutron_net_id'},
{'neutron_subnet_id': 'fake_neutron_subnet_id'},
{'neutron_net_id': 'fake', 'neutron_subnet_id': 'fake'})
def test_update_valid_cases(self, data):
body = {share_networks.RESOURCE_NAME: {'user_id': 'fake_user'}}
created = self.controller.create(self.req, body)
body = {share_networks.RESOURCE_NAME: data}
result = self.controller.update(
self.req, created['share_network']['id'], body)
for k, v in data.items():
self.assertIn(data[k], result['share_network'][k])
self._check_share_network_view(
result[share_networks.RESOURCE_NAME],
result['share_network'])
@ddt.data(
{'nova_net_id': 'foo', 'neutron_net_id': 'bar'},
{'nova_net_id': 'foo', 'neutron_subnet_id': 'quuz'},
{'nova_net_id': 'foo', 'neutron_net_id': 'bar',
'neutron_subnet_id': 'quuz'},
{'nova_net_id': 'fake_nova_net_id'},
)
def test_update_invalid_cases(self, data):
body = {share_networks.RESOURCE_NAME: {'user_id': 'fake_user'}}
created = self.controller.create(self.req, body)
body = {share_networks.RESOURCE_NAME: data}
self.assertRaises(
webob_exc.HTTPBadRequest,
self.controller.update,
self.req, created['share_network']['id'], body)
def test_create_nominal(self):
with mock.patch.object(db_api,
'share_network_create',
mock.Mock(return_value=fake_share_network)):
result = self.controller.create(self.req, self.body)
db_api.share_network_create.assert_called_once_with(
self.req.environ['manila.context'],
self.body[share_networks.RESOURCE_NAME])
self._check_share_network_view(
result[share_networks.RESOURCE_NAME],
fake_share_network)
def test_create_db_api_exception(self):
with mock.patch.object(db_api,
'share_network_create',
mock.Mock(side_effect=db_exception.DBError)):
self.assertRaises(webob_exc.HTTPBadRequest,
self.controller.create,
self.req,
self.body)
def test_create_wrong_body(self):
body = None
self.assertRaises(webob_exc.HTTPUnprocessableEntity,
self.controller.create,
self.req,
body)
def test_delete_nominal(self):
share_nw = fake_share_network.copy()
share_nw['share_servers'] = ['foo', 'bar']
self.mock_object(db_api, 'share_network_get',
mock.Mock(return_value=share_nw))
self.mock_object(db_api, 'share_instances_get_all_by_share_network',
mock.Mock(return_value=[]))
self.mock_object(self.controller.share_rpcapi, 'delete_share_server')
self.mock_object(db_api, 'share_network_delete')
self.controller.delete(self.req, share_nw['id'])
db_api.share_network_get.assert_called_once_with(
self.req.environ['manila.context'], share_nw['id'])
(db_api.share_instances_get_all_by_share_network.
assert_called_once_with(self.req.environ['manila.context'],
share_nw['id']))
self.controller.share_rpcapi.delete_share_server.assert_has_calls([
mock.call(self.req.environ['manila.context'], 'foo'),
mock.call(self.req.environ['manila.context'], 'bar')])
db_api.share_network_delete.assert_called_once_with(
self.req.environ['manila.context'], share_nw['id'])
def test_delete_not_found(self):
share_nw = 'fake network id'
self.mock_object(db_api, 'share_network_get',
mock.Mock(side_effect=exception.ShareNetworkNotFound(
share_network_id=share_nw)))
self.assertRaises(webob_exc.HTTPNotFound,
self.controller.delete,
self.req,
share_nw)
def test_quota_delete_reservation_failed(self):
share_nw = fake_share_network.copy()
share_nw['share_servers'] = ['foo', 'bar']
share_nw['user_id'] = 'fake_user_id'
self.mock_object(db_api, 'share_network_get',
mock.Mock(return_value=share_nw))
self.mock_object(db_api, 'share_instances_get_all_by_share_network',
mock.Mock(return_value=[]))
self.mock_object(self.controller.share_rpcapi, 'delete_share_server')
self.mock_object(db_api, 'share_network_delete')
self.mock_object(share_networks.QUOTAS, 'reserve',
mock.Mock(side_effect=Exception))
self.mock_object(share_networks.QUOTAS, 'commit')
self.controller.delete(self.req, share_nw['id'])
db_api.share_network_get.assert_called_once_with(
self.req.environ['manila.context'], share_nw['id'])
(db_api.share_instances_get_all_by_share_network.
assert_called_once_with(self.req.environ['manila.context'],
share_nw['id']))
self.controller.share_rpcapi.delete_share_server.assert_has_calls([
mock.call(self.req.environ['manila.context'], 'foo'),
mock.call(self.req.environ['manila.context'], 'bar')])
db_api.share_network_delete.assert_called_once_with(
self.req.environ['manila.context'], share_nw['id'])
share_networks.QUOTAS.reserve.assert_called_once_with(
self.req.environ['manila.context'],
project_id=share_nw['project_id'],
share_networks=-1,
user_id=share_nw['user_id']
)
self.assertFalse(share_networks.QUOTAS.commit.called)
def test_delete_in_use_by_share(self):
share_nw = fake_share_network.copy()
self.mock_object(db_api, 'share_network_get',
mock.Mock(return_value=share_nw))
self.mock_object(db_api, 'share_instances_get_all_by_share_network',
mock.Mock(return_value=['foo', 'bar']))
self.assertRaises(webob_exc.HTTPConflict,
self.controller.delete,
self.req,
share_nw['id'])
db_api.share_network_get.assert_called_once_with(
self.req.environ['manila.context'], share_nw['id'])
(db_api.share_instances_get_all_by_share_network.
assert_called_once_with(self.req.environ['manila.context'],
share_nw['id']))
def test_delete_in_use_by_share_group(self):
share_nw = fake_share_network.copy()
self.mock_object(db_api, 'share_network_get',
mock.Mock(return_value=share_nw))
self.mock_object(db_api, 'count_share_groups_in_share_network',
mock.Mock(return_value=2))
self.assertRaises(webob_exc.HTTPConflict,
self.controller.delete,
self.req,
share_nw['id'])
db_api.share_network_get.assert_called_once_with(
self.req.environ['manila.context'], share_nw['id'])
def test_show_nominal(self):
share_nw = 'fake network id'
with mock.patch.object(db_api,
'share_network_get',
mock.Mock(return_value=fake_share_network)):
result = self.controller.show(self.req, share_nw)
db_api.share_network_get.assert_called_once_with(
self.req.environ['manila.context'],
share_nw)
self._check_share_network_view(
result[share_networks.RESOURCE_NAME],
fake_share_network)
def test_show_not_found(self):
share_nw = 'fake network id'
test_exception = exception.ShareNetworkNotFound(
share_network_id=share_nw)
with mock.patch.object(db_api,
'share_network_get',
mock.Mock(side_effect=test_exception)):
self.assertRaises(webob_exc.HTTPNotFound,
self.controller.show,
self.req,
share_nw)
def test_index_no_filters(self):
networks = [fake_share_network]
with mock.patch.object(db_api,
'share_network_get_all_by_project',
mock.Mock(return_value=networks)):
result = self.controller.index(self.req)
db_api.share_network_get_all_by_project.assert_called_once_with(
self.context,
self.context.project_id)
self.assertEqual(1, len(result[share_networks.RESOURCES_NAME]))
self._check_share_network_view_shortened(
result[share_networks.RESOURCES_NAME][0],
fake_share_network_shortened)
def test_index_detailed(self):
networks = [fake_share_network]
with mock.patch.object(db_api,
'share_network_get_all_by_project',
mock.Mock(return_value=networks)):
result = self.controller.detail(self.req)
db_api.share_network_get_all_by_project.assert_called_once_with(
self.context,
self.context.project_id)
self.assertEqual(1, len(result[share_networks.RESOURCES_NAME]))
self._check_share_network_view(
result[share_networks.RESOURCES_NAME][0],
fake_share_network)
@mock.patch.object(db_api, 'share_network_get_all_by_security_service',
mock.Mock())
def test_index_filter_by_security_service(self):
db_api.share_network_get_all_by_security_service.return_value = [
fake_share_network_with_ss]
req = fakes.HTTPRequest.blank(
'/share_networks?security_service_id=fake-ss-id')
result = self.controller.index(req)
(db_api.share_network_get_all_by_security_service.
assert_called_once_with(req.environ['manila.context'],
'fake-ss-id'))
self.assertEqual(1, len(result[share_networks.RESOURCES_NAME]))
self._check_share_network_view_shortened(
result[share_networks.RESOURCES_NAME][0],
fake_sn_with_ss_shortened)
@mock.patch.object(db_api, 'share_network_get_all_by_project', mock.Mock())
def test_index_all_tenants_non_admin_context(self):
req = fakes.HTTPRequest.blank(
'/share_networks?all_tenants=1')
fake_context = req.environ['manila.context']
db_api.share_network_get_all_by_project.return_value = []
self.controller.index(req)
db_api.share_network_get_all_by_project.assert_called_with(
fake_context, fake_context.project_id)
@mock.patch.object(db_api, 'share_network_get_all', mock.Mock())
def test_index_all_tenants_admin_context(self):
db_api.share_network_get_all.return_value = [fake_share_network]
req = fakes.HTTPRequest.blank(
'/share_networks?all_tenants=1',
use_admin_context=True)
result = self.controller.index(req)
db_api.share_network_get_all.assert_called_once_with(
req.environ['manila.context'])
self.assertEqual(1, len(result[share_networks.RESOURCES_NAME]))
self._check_share_network_view_shortened(
result[share_networks.RESOURCES_NAME][0],
fake_share_network_shortened)
@mock.patch.object(db_api, 'share_network_get_all_by_project', mock.Mock())
def test_index_filter_by_project_id_non_admin_context(self):
req = fakes.HTTPRequest.blank(
'/share_networks?project_id=fake project')
fake_context = req.environ['manila.context']
db_api.share_network_get_all_by_project.return_value = []
self.controller.index(req)
db_api.share_network_get_all_by_project.assert_called_with(
fake_context, fake_context.project_id)
@mock.patch.object(db_api, 'share_network_get_all_by_project', mock.Mock())
def test_index_filter_by_project_id_admin_context(self):
db_api.share_network_get_all_by_project.return_value = [
fake_share_network_with_ss
]
req = fakes.HTTPRequest.blank(
'/share_networks?project_id=fake',
use_admin_context=True)
result = self.controller.index(req)
db_api.share_network_get_all_by_project.assert_called_once_with(
req.environ['manila.context'], 'fake')
self.assertEqual(1, len(result[share_networks.RESOURCES_NAME]))
self._check_share_network_view_shortened(
result[share_networks.RESOURCES_NAME][0],
fake_sn_with_ss_shortened)
@mock.patch.object(db_api, 'share_network_get_all_by_security_service',
mock.Mock())
def test_index_filter_by_ss_and_project_id_admin_context(self):
db_api.share_network_get_all_by_security_service.return_value = [
fake_share_network_with_ss
]
req = fakes.HTTPRequest.blank(
'/share_networks?security_service_id=fake-ss-id&project_id=fake',
use_admin_context=True)
result = self.controller.index(req)
(db_api.share_network_get_all_by_security_service.
assert_called_once_with(req.environ['manila.context'],
'fake-ss-id'))
self.assertEqual(1, len(result[share_networks.RESOURCES_NAME]))
self._check_share_network_view_shortened(
result[share_networks.RESOURCES_NAME][0],
fake_sn_with_ss_shortened)
@ddt.data(('name=fo', 0), ('description=d', 0),
('name=foo&description=d', 0),
('name=foo', 1), ('description=ds', 1),
('name~=foo&description~=ds', 2),
('name=foo&description~=ds', 1),
('name~=foo&description=ds', 1))
@ddt.unpack
@mock.patch.object(db_api, 'share_network_get_all_by_project',
mock.Mock())
def test_index_filter_by_name_and_description(
self, filter, share_network_number):
fake_objs = [{'name': 'fo2', 'description': 'd2', 'id': 'fake1'},
{'name': 'foo', 'description': 'ds', 'id': 'fake2'},
{'name': 'foo1', 'description': 'ds1', 'id': 'fake3'}]
db_api.share_network_get_all_by_project.return_value = fake_objs
req = fakes.HTTPRequest.blank(
'/share_networks?' + filter,
use_admin_context=True, version='2.36')
result = self.controller.index(req)
db_api.share_network_get_all_by_project.assert_called_with(
req.environ['manila.context'], self.context.project_id)
self.assertEqual(share_network_number,
len(result[share_networks.RESOURCES_NAME]))
if share_network_number > 0:
self._check_share_network_view_shortened(
result[share_networks.RESOURCES_NAME][0], fake_objs[1])
if share_network_number > 1:
self._check_share_network_view_shortened(
result[share_networks.RESOURCES_NAME][1], fake_objs[2])
@mock.patch.object(db_api, 'share_network_get_all_by_project',
mock.Mock())
def test_index_all_filter_opts(self):
valid_filter_opts = {
'created_before': '2001-02-02',
'created_since': '1999-01-01',
'neutron_net_id': '1111',
'neutron_subnet_id': '2222',
'network_type': 'local',
'segmentation_id': 2000,
'cidr': '8.0.0.0/12',
'ip_version': 6,
'name': 'test-sn'
}
db_api.share_network_get_all_by_project.return_value = [
fake_share_network,
fake_share_network_with_ss]
query_string = '/share-networks?' + parse.urlencode(sorted(
[(k, v) for (k, v) in list(valid_filter_opts.items())]))
for use_admin_context in [True, False]:
req = fakes.HTTPRequest.blank(query_string,
use_admin_context=use_admin_context)
result = self.controller.index(req)
db_api.share_network_get_all_by_project.assert_called_with(
req.environ['manila.context'],
'fake')
self.assertEqual(1, len(result[share_networks.RESOURCES_NAME]))
self._check_share_network_view_shortened(
result[share_networks.RESOURCES_NAME][0],
fake_sn_with_ss_shortened)
@mock.patch.object(db_api, 'share_network_get', mock.Mock())
def test_update_nominal(self):
share_nw = 'fake network id'
db_api.share_network_get.return_value = fake_share_network
body = {share_networks.RESOURCE_NAME: {'name': 'new name'}}
with mock.patch.object(db_api,
'share_network_update',
mock.Mock(return_value=fake_share_network)):
result = self.controller.update(self.req, share_nw, body)
db_api.share_network_update.assert_called_once_with(
self.req.environ['manila.context'],
share_nw,
body[share_networks.RESOURCE_NAME])
self._check_share_network_view(
result[share_networks.RESOURCE_NAME],
fake_share_network)
@mock.patch.object(db_api, 'share_network_get', mock.Mock())
def test_update_not_found(self):
share_nw = 'fake network id'
db_api.share_network_get.side_effect = exception.ShareNetworkNotFound(
share_network_id=share_nw)
self.assertRaises(webob_exc.HTTPNotFound,
self.controller.update,
self.req,
share_nw,
self.body)
@mock.patch.object(db_api, 'share_network_get', mock.Mock())
def test_update_invalid_key_in_use(self):
share_nw = fake_share_network.copy()
share_nw['share_servers'] = [{'id': 1}]
db_api.share_network_get.return_value = share_nw
body = {
share_networks.RESOURCE_NAME: {
'name': 'new name',
'user_id': 'new id',
},
}
self.assertRaises(webob_exc.HTTPForbidden,
self.controller.update,
self.req,
share_nw['id'],
body)
@mock.patch.object(db_api, 'share_network_get', mock.Mock())
@mock.patch.object(db_api, 'share_network_update', mock.Mock())
def test_update_valid_keys_in_use(self):
share_nw = fake_share_network.copy()
share_nw['share_servers'] = [{'id': 1}]
updated_share_nw = share_nw.copy()
updated_share_nw['name'] = 'new name'
updated_share_nw['description'] = 'new description'
db_api.share_network_get.return_value = share_nw
body = {
share_networks.RESOURCE_NAME: {
'name': updated_share_nw['name'],
'description': updated_share_nw['description'],
},
}
self.controller.update(self.req, share_nw['id'], body)
db_api.share_network_get.assert_called_once_with(self.context,
share_nw['id'])
db_api.share_network_update.assert_called_once_with(
self.context, share_nw['id'], body['share_network'])
@mock.patch.object(db_api, 'share_network_get', mock.Mock())
def test_update_db_api_exception(self):
share_nw = 'fake network id'
db_api.share_network_get.return_value = fake_share_network
body = {share_networks.RESOURCE_NAME: {'neutron_subnet_id':
'new subnet'}}
with mock.patch.object(db_api,
'share_network_update',
mock.Mock(side_effect=db_exception.DBError)):
self.assertRaises(webob_exc.HTTPBadRequest,
self.controller.update,
self.req,
share_nw,
body)
@ddt.data(*set(("1.0", "2.25", "2.26", api_version._MAX_API_VERSION)))
def test_action_add_security_service(self, microversion):
share_network_id = 'fake network id'
security_service_id = 'fake ss id'
body = {'add_security_service': {'security_service_id':
security_service_id}}
req = fakes.HTTPRequest.blank('/share-networks', version=microversion)
with mock.patch.object(self.controller, '_add_security_service',
mock.Mock()):
self.controller.action(req, share_network_id, body)
self.controller._add_security_service.assert_called_once_with(
req, share_network_id, body['add_security_service'])
@mock.patch.object(db_api, 'share_network_get', mock.Mock())
@mock.patch.object(db_api, 'security_service_get', mock.Mock())
def test_action_add_security_service_conflict(self):
share_network = fake_share_network.copy()
share_network['security_services'] = [{'id': 'security_service_1',
'type': 'ldap'}]
security_service = {'id': ' security_service_2',
'type': 'ldap'}
body = {'add_security_service': {'security_service_id':
security_service['id']}}
db_api.security_service_get.return_value = security_service
db_api.share_network_get.return_value = share_network
with mock.patch.object(share_networks.policy, 'check_policy',
mock.Mock()):
self.assertRaises(webob_exc.HTTPConflict,
self.controller.action,
self.req,
share_network['id'],
body)
db_api.share_network_get.assert_called_once_with(
self.req.environ['manila.context'], share_network['id'])
db_api.security_service_get.assert_called_once_with(
self.req.environ['manila.context'], security_service['id'])
share_networks.policy.check_policy.assert_called_once_with(
self.req.environ['manila.context'],
share_networks.RESOURCE_NAME,
'add_security_service',
)
@ddt.data(*set(("1.0", "2.25", "2.26", api_version._MAX_API_VERSION)))
def test_action_remove_security_service(self, microversion):
share_network_id = 'fake network id'
security_service_id = 'fake ss id'
body = {'remove_security_service': {'security_service_id':
security_service_id}}
req = fakes.HTTPRequest.blank('/share-networks', version=microversion)
with mock.patch.object(self.controller, '_remove_security_service',
mock.Mock()):
self.controller.action(req, share_network_id, body)
self.controller._remove_security_service.assert_called_once_with(
req, share_network_id, body['remove_security_service'])
@mock.patch.object(db_api, 'share_network_get', mock.Mock())
@mock.patch.object(share_networks.policy, 'check_policy', mock.Mock())
def test_action_remove_security_service_forbidden(self):
share_network = fake_share_network.copy()
share_network['share_servers'] = 'fake share server'
db_api.share_network_get.return_value = share_network
body = {
'remove_security_service': {
'security_service_id': 'fake id',
},
}
self.assertRaises(webob_exc.HTTPForbidden,
self.controller.action,
self.req,
share_network['id'],
body)
db_api.share_network_get.assert_called_once_with(
self.req.environ['manila.context'], share_network['id'])
share_networks.policy.check_policy.assert_called_once_with(
self.req.environ['manila.context'],
share_networks.RESOURCE_NAME,
'remove_security_service')
def test_action_bad_request(self):
share_network_id = 'fake network id'
body = {'bad_action': {}}
self.assertRaises(webob_exc.HTTPBadRequest,
self.controller.action,
self.req,
share_network_id,
body)
| 43.539455 | 79 | 0.610999 |
6340d9326852fc234af622542d58a35a22f97474 | 1,136 | py | Python | python_code/wxichat/wxichat.py | helight/helight_code | 67e39a94c9bf9f7ade89664afa5f7887271861b2 | [
"Apache-2.0"
] | 1 | 2017-07-16T14:32:34.000Z | 2017-07-16T14:32:34.000Z | python_code/wxichat/wxichat.py | helight/helight_code | 67e39a94c9bf9f7ade89664afa5f7887271861b2 | [
"Apache-2.0"
] | null | null | null | python_code/wxichat/wxichat.py | helight/helight_code | 67e39a94c9bf9f7ade89664afa5f7887271861b2 | [
"Apache-2.0"
] | null | null | null | # coding=utf8
import itchat
import requests
def get_response2(msg):
apiUrl = 'http://www.tuling123.com/openapi/api'
data = {
'key': 'd0860e4c209a4751815c867d5d37b5fd', # Tuling Key,API的值
'info': msg, # 发出去的消息
'userid': 'helight', # 用户名
}
r = requests.post(apiUrl, data=data).json() # post请求
return r.get('text')
def get_response(msg):
# apiUrl = 'http://openapi.tuling123.com/openapi/api/v2'
data = {
"reqType":0,
"perception": {
"inputText": {
"text": "附近的酒店"
}
},
"userInfo": {
"apiKey": "d0860e4c209a4751815c867d5d37b5fd",
"userId": "helight"
}
}
r = requests.post(apiUrl, data=data).json() # post请求
return r.get('text')
@itchat.msg_register(itchat.content.TEXT) # 用于接收来自朋友间的对话消息
def print_content(msg):
return get_response(msg['Text'])
@itchat.msg_register([itchat.content.TEXT], isGroupChat=True) # 用于接收群里面的对话消息
def print_content(msg):
return get_response(msg['Text'])
itchat.auto_login(True) # 通过微信扫描二维码登录
itchat.run() | 27.047619 | 76 | 0.590669 |
88e99a92163e9b35236541059eaca7483a0455af | 5,822 | py | Python | assets_datasets/stimuli_f0_labels.py | msaddler/pitchnet | 8e26034be177deff7447ade7f782a4a9581c2188 | [
"MIT"
] | 6 | 2021-12-21T05:38:03.000Z | 2022-03-31T21:05:56.000Z | assets_datasets/stimuli_f0_labels.py | msaddler/pitchnet | 8e26034be177deff7447ade7f782a4a9581c2188 | [
"MIT"
] | null | null | null | assets_datasets/stimuli_f0_labels.py | msaddler/pitchnet | 8e26034be177deff7447ade7f782a4a9581c2188 | [
"MIT"
] | 1 | 2022-03-28T19:33:53.000Z | 2022-03-28T19:33:53.000Z | import sys
import os
import h5py
import glob
import numpy as np
import argparse
from dataset_util import get_f0_bins, f0_to_label, f0_to_octave, f0_to_normalized
from dataset_util import label_to_f0, octave_to_f0, normalized_to_f0
def add_f0_label_to_hdf5(hdf5_filename,
source_f0_key,
f0_key='f0',
f0_label_key='f0_label',
f0_octave_key='f0_log2',
f0_normal_key='f0_lognormal',
f0_label_dtype=np.int64,
f0_bin_kwargs={},
f0_octave_kwargs={},
f0_normalization_kwargs={}):
'''
Function adds or recomputes f0 labels and normalized values in specified hdf5 file.
Args
----
hdf5_filename (str): source filename (this hdf5 file will be modified)
source_f0_key (str): source path to f0 values in the hdf5 dataset
f0_key (str): hdf5 output path for f0 values (dataset will be added or overwritten)
f0_label_key (str): output path path for f0 labels (dataset will be added or overwritten)
f0_octave_key (str): output path for f0 octave values (dataset will be added or overwritten)
f0_normal_key (str): output path for normalized f0 values (dataset will be added or overwritten)
f0_label_dtype (np.dtype): datatype for f0 label dataset
f0_bin_kwargs (dict): kwargs for `get_f0_bins()` (parameters for computing f0 label bins)
f0_octave_kwargs (dict): kwargs for `f0_to_octave()` (f0_ref for Hz to octave conversion, default is f0_ref=1.0)
f0_normalization_kwargs (dict): kwargs for `f0_to_normalized()` (parameters for normalizing f0 values)
'''
print('[ADDING F0 LABELS]: {}'.format(hdf5_filename))
print('source_f0_key=`{}`'.format(source_f0_key))
print('f0_key=`{}`, f0_label_key=`{}`, f0_normal_key=`{}`'.format(f0_key, f0_label_key, f0_normal_key))
hdf5_f = h5py.File(hdf5_filename, 'r+')
f0_bins = get_f0_bins(**f0_bin_kwargs)
f0_bins_024 = get_f0_bins(**{'f0_min':80., 'f0_max':1e3, 'binwidth_in_octaves':1/24})
f0_bins_048 = get_f0_bins(**{'f0_min':80., 'f0_max':1e3, 'binwidth_in_octaves':1/48})
f0_bins_096 = get_f0_bins(**{'f0_min':80., 'f0_max':1e3, 'binwidth_in_octaves':1/96})
f0_bins_192 = get_f0_bins(**{'f0_min':80., 'f0_max':1e3, 'binwidth_in_octaves':1/192})
f0_bins_384 = get_f0_bins(**{'f0_min':80., 'f0_max':1e3, 'binwidth_in_octaves':1/384})
output_dict = {
f0_key: hdf5_f[source_f0_key][:],
f0_label_key: f0_to_label(hdf5_f[source_f0_key][:], f0_bins),
f0_octave_key: f0_to_octave(hdf5_f[source_f0_key][:], **f0_octave_kwargs),
# f0_normal_key: f0_to_normalized(hdf5_f[source_f0_key][:], **f0_normalization_kwargs),
'f0_label_024': f0_to_label(hdf5_f[source_f0_key][:], f0_bins_024),
'f0_label_048': f0_to_label(hdf5_f[source_f0_key][:], f0_bins_048),
'f0_label_096': f0_to_label(hdf5_f[source_f0_key][:], f0_bins_096),
'f0_label_192': f0_to_label(hdf5_f[source_f0_key][:], f0_bins_192),
'f0_label_384': f0_to_label(hdf5_f[source_f0_key][:], f0_bins_384),
}
for key in output_dict.keys():
if (key in hdf5_f) and (not key == source_f0_key):
print('overwriting dataset: {}'.format(key))
hdf5_f[key][:] = output_dict[key]
elif (key in hdf5_f) and (key == source_f0_key):
print('source_f0_key and f0_key are equal: {}'.format(key))
else:
print('initializing dataset: {}'.format(key))
if 'label' in key:
dtype = f0_label_dtype
else:
dtype = output_dict[key].dtype
hdf5_f.create_dataset(key, output_dict[key].shape, dtype=dtype, data=output_dict[key])
print('... key=`{}`, min_value={}, max_value={}'.format(key, np.min(output_dict[key]), np.max(output_dict[key])))
hdf5_f.close()
print('[END]: {}'.format(hdf5_filename))
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Add f0 labels to dataset")
parser.add_argument('-r', '--hdf5_fn_regex', type=str,
help='regexp that globs all hdf5 files to process')
parser.add_argument('-skf', '--source_f0_key', type=str,
help='source path for f0 values')
parser.add_argument('-kf', '--f0_key', type=str, default='f0',
help='destination path for f0 values (if different from source_f0_key)')
parser.add_argument('-kfl', '--f0_label_key', type=str, default='f0_label',
help='destination path for f0 label values')
parser.add_argument('-kfo', '--f0_octave_key', type=str, default='f0_log2',
help='destination path for f0 octave values')
parser.add_argument('-kfn', '--f0_normal_key', type=str, default='f0_lognormal',
help='destination path for f0 normalized values')
args = parser.parse_args()
assert args.hdf5_fn_regex is not None, "-r (--hdf5_fn_regex) is a required argument"
assert args.source_f0_key is not None, "-skf (--source_f0_key) is a required argument"
hdf5_fn_list = sorted(glob.glob(args.hdf5_fn_regex))
for hdf5_filename in hdf5_fn_list:
print('=== {} ==='.format(hdf5_filename))
add_f0_label_to_hdf5(hdf5_filename, args.source_f0_key,
f0_key=args.f0_key,
f0_label_key=args.f0_label_key,
f0_octave_key=args.f0_octave_key,
f0_normal_key=args.f0_normal_key,
f0_label_dtype=np.int64,
f0_bin_kwargs={},
f0_normalization_kwargs={})
| 53.907407 | 121 | 0.63157 |
a07f3ae90bff8d2efcd96bacb0923da3928f0e99 | 811 | py | Python | Week_3/AdvanceDjango/manage.py | girisagar46/DjangoTrainingClass | 373f4151b2ee46ea8f76ffa344603014e87d9764 | [
"MIT"
] | null | null | null | Week_3/AdvanceDjango/manage.py | girisagar46/DjangoTrainingClass | 373f4151b2ee46ea8f76ffa344603014e87d9764 | [
"MIT"
] | null | null | null | Week_3/AdvanceDjango/manage.py | girisagar46/DjangoTrainingClass | 373f4151b2ee46ea8f76ffa344603014e87d9764 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "AdvanceDjango.settings")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)
| 35.26087 | 77 | 0.644883 |
614f909f8d8c7493a0a72a4fd6a20cb6caf2ea72 | 8,982 | py | Python | models/model/model_yolov3_x.py | yuanliangxie/YOLOv3_simple_baseline | 325e2963ae770e6f45912f3142941d3bddaf9d6e | [
"Apache-2.0"
] | 1 | 2022-02-26T10:13:08.000Z | 2022-02-26T10:13:08.000Z | models/model/model_yolov3_x.py | yuanliangxie/YOLOv3_simple_baseline | 325e2963ae770e6f45912f3142941d3bddaf9d6e | [
"Apache-2.0"
] | null | null | null | models/model/model_yolov3_x.py | yuanliangxie/YOLOv3_simple_baseline | 325e2963ae770e6f45912f3142941d3bddaf9d6e | [
"Apache-2.0"
] | null | null | null | import torch.nn as nn
import models.head.yolov3_head as yolov3_head
import models.backbone.new_darknet as darknet53
import models.backbone.neck as neck
import models.loss.yolo_loss_x_module as loss
from utils.logger import print_logger
from models.layer.layer_fundation import Conv2dBatchLeaky as Convolutional
import numpy as np
import torch
from utils.utils_select_device import select_device
class yolov3(nn.Module):
def __init__(self, config, logger=None, init_weight=True):
super().__init__()
self.backbone = darknet53.darknet53()
self.neck = neck.neck()
self.head = yolov3_head.yolov3_head(nAnchors=1, nClass=config["yolo"]["classes"])
self.loss = loss.yolo_loss_module(config, strides=[32, 16, 8])
if logger == None:
self.logger = print_logger()
else:
self.logger = logger
if init_weight:
self.__init_weights()
def forward(self, input, target=None):
features = self.backbone(input)
neck_features = self.neck(features)
yolo_loss_input = self.head(neck_features)
loss_or_output = self.loss(yolo_loss_input, target)
return loss_or_output #input=416,[13, 26, 52]
def __init_weights(self):
" Note :nn.Conv2d nn.BatchNorm2d'initing modes are uniform "
for m in self.modules():#
if isinstance(m, nn.Conv2d):
torch.nn.init.normal_(m.weight.data, 0.0, 0.01)
# torch.nn.init.constant_(m.weight.data,0.001)#在测试时为了看模型有没有弄错,进行的改动
if m.bias is not None:
m.bias.data.zero_()
self.logger.append("initing {}".format(m))
#print("initing {}".format(m))
elif isinstance(m, nn.BatchNorm2d):
torch.nn.init.constant_(m.weight.data, 1.0)
torch.nn.init.constant_(m.bias.data, 0.0)
self.logger.append("initing {}".format(m))
#print("initing {}".format(m))
def load_darknet_weights(self, weight_file, cutoff=52):#加载成功
"https://github.com/ultralytics/yolov3/blob/master/models.py"
#print("load darknet weights : ", weight_file)
self.logger.append("load darknet weights : "+weight_file)
with open(weight_file, 'rb') as f:
_ = np.fromfile(f, dtype=np.int32, count=5)
weights = np.fromfile(f, dtype=np.float32)
#print("weights.shape:{}".format(weights.shape))
self.logger.append("weights.shape:{}".format(weights.shape))
count = 0
ptr = 0
for m in self.backbone.modules():
if isinstance(m, Convolutional):
# only initing backbone conv's weights
if count == cutoff:
break
count += 1
#conv_layer = m._Convolutional__conv
for sub_m in m.modules():
if isinstance(sub_m, nn.Conv2d):
conv_layer = sub_m
elif isinstance(sub_m, nn.BatchNorm2d):
bn_layer = sub_m
# Load BN bias, weights, running mean and running variance
num_b = bn_layer.bias.numel() # Number of biases
# Bias
bn_b = torch.from_numpy(weights[ptr:ptr + num_b]).view_as(bn_layer.bias.data)
bn_layer.bias.data.copy_(bn_b)
ptr += num_b
# Weight
bn_w = torch.from_numpy(weights[ptr:ptr + num_b]).view_as(bn_layer.weight.data)
bn_layer.weight.data.copy_(bn_w)
ptr += num_b
# Running Mean
bn_rm = torch.from_numpy(weights[ptr:ptr + num_b]).view_as(bn_layer.running_mean)
bn_layer.running_mean.data.copy_(bn_rm)
ptr += num_b
# Running Var
bn_rv = torch.from_numpy(weights[ptr:ptr + num_b]).view_as(bn_layer.running_var)
bn_layer.running_var.data.copy_(bn_rv)
ptr += num_b
self.logger.append("loading weight {}".format(bn_layer))
#print("loading weight {}".format(bn_layer))
# else:
# # Load conv. bias
# num_b = conv_layer.bias.numel()
# conv_b = torch.from_numpy(weights[ptr:ptr + num_b]).view_as(conv_layer.bias.data)
# conv_layer.bias.data.copy_(conv_b)
# ptr += num_b
# Load conv. weights
num_w = conv_layer.weight.numel()
conv_w = torch.from_numpy(weights[ptr:ptr + num_w]).view_as(conv_layer.weight.data)
conv_layer.weight.data.copy_(conv_w)
ptr += num_w
self.logger.append("loading weight {}".format(conv_layer))
#print("loading weight {}".format(conv_layer))
#print("ptr:{}".format(ptr))
self.logger.append("ptr:{}".format(ptr))
if ptr == weights.shape[0]:
#print("convert success!")
self.logger.append("convert success!")
# def count_darknet_count(self):
# count = 0
# for m in self.backbone.modules():
# if isinstance(m, Convolutional):
# count += 1
# print("count:",count)
def load_darknet_pth_weights(self, pth_file, cutoff=52):
self.logger.append(("load darknet_coco_pth_weights : ", pth_file))
#print("load darknet_coco_pth_weights : ", pth_file)
count = 0
pretrain_coco_weight_darknet = torch.load(pth_file)
list_keys = list(pretrain_coco_weight_darknet.keys())
keys_count = 0
for m in self.backbone.modules():
if isinstance(m, Convolutional):
# only initing backbone conv's weights
if count == cutoff:
break
count += 1
#conv_layer = m._Convolutional__conv
for sub_m in m.modules():
if isinstance(sub_m, nn.Conv2d):
conv_layer = sub_m
elif isinstance(sub_m, nn.BatchNorm2d):
bn_layer = sub_m
if 'conv' in list_keys[keys_count]:
weight = pretrain_coco_weight_darknet[list_keys[keys_count]]
conv_layer.weight.data.copy_(weight)
keys_count +=1
if 'bn' in list_keys[keys_count]:
if "weight" in list_keys[keys_count]:
weight = pretrain_coco_weight_darknet[list_keys[keys_count]]
bn_layer.weight.data.copy_(weight)
keys_count += 1
if "bias" in list_keys[keys_count]:
bias = pretrain_coco_weight_darknet[list_keys[keys_count]]
bn_layer.bias.data.copy_(bias)
keys_count += 1
if "running_mean" in list_keys[keys_count]:
running_mean = pretrain_coco_weight_darknet[list_keys[keys_count]]
bn_layer.running_mean.data.copy_(running_mean)
keys_count += 1
if "running_var" in list_keys[keys_count]:
running_var = pretrain_coco_weight_darknet[list_keys[keys_count]]
bn_layer.running_var.data.copy_(running_var)
keys_count += 1
self.logger.append("count:{},keys_count:{}".format(count, keys_count))
#print("count:{},keys_count:{}".format(count, keys_count))
if keys_count == len(list_keys):
self.logger.append("convert success!")
#print("convert success!")
if __name__ == '__main__':
import train.Voc_data_preprocess.params_init_voc as params_init
config = params_init.TRAINING_PARAMS
device = select_device(0)
torch.cuda.manual_seed_all(1)
torch.backends.cudnn.deterministic = True
torch.manual_seed(1)
np.random.seed(1)
net = yolov3(config)
net.to(device)
# net.cpu()
# net.count_darknet_count()
# for idx, m in enumerate(net.backbone.layer[0].modules()):
# print(idx, "->", m)
# net.backbone.layer[0].parameters()
#pretrain_coco_weight_darknet = torch.load("darknet53_weights_pytorch.pth")
net.load_darknet_weights('../../weights/darknet53.conv.74')
#net.load_darknet_pth_weights(pth_file = "../../weights/darknet53_weights_pytorch.pth")
net.eval()
images = torch.ones((1,3,416,416)).to(device)
yolo_loss_input = net(images)
print(yolo_loss_input[0].shape)
print(yolo_loss_input[0])
"""
output:
tensor([ 1.1618e-05, -2.5806e-04, -1.8426e-04, -1.0144e-06, -8.8483e-05,
-2.9103e-05, -4.6486e-05, -5.9855e-05, -3.9318e-05, -4.0554e-05,
-6.2083e-05, 2.8495e-05, -2.7813e-04], grad_fn=<SliceBackward>)
"""
| 43.601942 | 103 | 0.571699 |
1c200bec1a191ba75df30f958193bbfbd97fc651 | 2,832 | py | Python | setup.py | OrangutanGaming/Mixer.py | b77030c1cade01f43459001fb4b111ee1cb933f7 | [
"MIT"
] | null | null | null | setup.py | OrangutanGaming/Mixer.py | b77030c1cade01f43459001fb4b111ee1cb933f7 | [
"MIT"
] | null | null | null | setup.py | OrangutanGaming/Mixer.py | b77030c1cade01f43459001fb4b111ee1cb933f7 | [
"MIT"
] | null | null | null | # MIT License
#
# Copyright (c) 2019 Nihaal Sangha (Orangutan)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import re
from setuptools import find_packages, setup
with open('requirements.txt') as f:
requirements = f.read().splitlines()
with open('mixer/__init__.py') as f:
version = re.search(r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]', f.read(), re.MULTILINE).group(1)
if not version:
raise RuntimeError('Version is not set')
if version.endswith(('a', 'b', 'rc')):
# Append version identifier based on commit count
try:
import subprocess
p = subprocess.Popen(['git', 'rev-list', '--count', 'HEAD'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = p.communicate()
if out:
version += out.decode('utf-8').strip()
p = subprocess.Popen(['git', 'rev-parse', '--short', 'HEAD'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = p.communicate()
if out:
version += '+g' + out.decode('utf-8').strip()
except Exception:
pass
with open('README.md') as f:
readme = f.read()
setup(
name="Mixer.py",
author="Nihaal Sangha (Orangutan)",
url="https://github.com/OrangutanGaming/Mixer.py",
project_urls={
"Issue tracker": "https://github.com/OrangutanGaming/Mixer.py/issues",
},
version=version,
packages=find_packages(),
license="MIT",
description="An async Mixer library",
long_description=readme,
long_description_content_type="text/markdown",
include_package_data=True,
install_requires=requirements,
python_requires='>=3.6',
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
]
)
| 35.848101 | 99 | 0.66702 |
1d4533bf9b8a0a9296b432f42dd192cdd0a4624f | 2,063 | py | Python | spinup/algos/td3/core.py | mksmsrkn/spinningup_pytorch | 1b1176126f293e44e0c2990cfda409b1e42409c9 | [
"MIT"
] | null | null | null | spinup/algos/td3/core.py | mksmsrkn/spinningup_pytorch | 1b1176126f293e44e0c2990cfda409b1e42409c9 | [
"MIT"
] | null | null | null | spinup/algos/td3/core.py | mksmsrkn/spinningup_pytorch | 1b1176126f293e44e0c2990cfda409b1e42409c9 | [
"MIT"
] | null | null | null | import numpy as np
import torch
from torch import nn
from gym.spaces import Box
class MLP(nn.Module):
def __init__(self, in_dim, hidden_sizes=(64,64), activation=nn.Tanh,
output_activation=None, output_scaler=1):
super(MLP, self).__init__()
self.output_scaler = output_scaler
layers = []
prev_h = in_dim
for h in hidden_sizes[:-1]:
layers.append(nn.Linear(prev_h, h))
layers.append(activation())
prev_h = h
layers.append(nn.Linear(h, hidden_sizes[-1]))
if output_activation:
try:
out = output_activation(-1)
except:
out = output_activation()
layers.append(out)
self.model = nn.Sequential(*layers)
def forward(self, x):
return self.model(x).squeeze() * self.output_scaler
# Credit: https://discuss.pytorch.org/t/how-do-i-check-the-number-of-parameters-of-a-model/4325/9
def count_vars(model):
return sum(p.numel() for p in model.parameters() if p.requires_grad)
"""
Actor-Critics
"""
class ActorCritic(nn.Module):
def __init__(self, state_dim, hidden_sizes=(400,300), activation=nn.ReLU,
output_activation=nn.Tanh, action_space=None):
super(ActorCritic, self).__init__()
assert isinstance(action_space, Box)
act_dim = action_space.shape[0]
act_limit = action_space.high[0]
self.policy = MLP(state_dim, list(hidden_sizes)+[act_dim], activation,
output_activation, output_scaler=act_limit)
self.q1 = MLP(state_dim + act_dim, list(hidden_sizes)+[1], activation, None)
self.q2 = MLP(state_dim + act_dim, list(hidden_sizes)+[1], activation, None)
def forward(self, x, a = None):
pi = self.policy(x)
if a is None:
return pi
else:
q1 = self.q1(torch.cat([x, a],dim=1))
q2 = self.q2(torch.cat([x, a],dim=1))
q1_pi = self.q1(torch.cat([x, pi],dim=1))
return pi, q1, q2, q1_pi | 36.192982 | 97 | 0.603975 |
161e549ad9a260fb517f2109fe75116ad5404978 | 15,039 | py | Python | src/rpdk/core/fragment/generator.py | rachfop/cloudformation-cli | 1a892173137b1eb47cc6c9f99191e4dbe15de4a2 | [
"Apache-2.0"
] | null | null | null | src/rpdk/core/fragment/generator.py | rachfop/cloudformation-cli | 1a892173137b1eb47cc6c9f99191e4dbe15de4a2 | [
"Apache-2.0"
] | null | null | null | src/rpdk/core/fragment/generator.py | rachfop/cloudformation-cli | 1a892173137b1eb47cc6c9f99191e4dbe15de4a2 | [
"Apache-2.0"
] | 1 | 2019-12-04T00:14:25.000Z | 2019-12-04T00:14:25.000Z | """
This class has two responsibilities:
1. generating a sample template fragment so the user has some initial
file fragment as an example.
The method "generate_sample_fragment" will be called as part of the init command.
2. generating schema for provided template fragments.
The method "generate_schema" will be called right before submission.
"""
import json
import logging
import os
from pathlib import Path
import cfnlint.config
import cfnlint.core
import yaml
from rpdk.core.data_loaders import resource_json
from rpdk.core.exceptions import FragmentValidationError
LOG = logging.getLogger(__name__)
FRAGMENT_DIR = "fragments"
SAMPLE_FRAGMENT_OUTPUT = "sample.json"
SCHEMA_NAME = "schema.json"
SAMPLE_FRAGMENT = "../data/examples/module/sample.json"
ALLOWED_EXTENSIONS = {".json", ".yaml", ".yml"}
RESOURCE_LIMIT = 500
OUTPUT_LIMIT = 200
MAPPING_LIMIT = 200
MAPPING_ATTRIBUTE_LIMIT = 200
TEMPLATE_FILE_SIZE_IN_BYTES_LIMIT = 1500000
class TemplateFragment: # pylint: disable=too-many-instance-attributes
def __init__(self, type_name, root=None):
self.root = Path(root) if root else Path.cwd()
self.fragment_dir = self.root / FRAGMENT_DIR
self.type_name = type_name
self.resource_limit = RESOURCE_LIMIT
self.output_limit = OUTPUT_LIMIT
self.mapping_limit = MAPPING_LIMIT
self.mapping_attribute_limit = MAPPING_ATTRIBUTE_LIMIT
self.template_file_size_in_bytes_limit = TEMPLATE_FILE_SIZE_IN_BYTES_LIMIT
LOG.debug("Fragment directory: %s", self.fragment_dir)
def generate_schema(self):
raw_fragments = self._read_raw_fragments()
schema = {}
properties = {}
schema["typeName"] = self.type_name
schema["description"] = "Schema for Module Fragment of type " + self.type_name
schema["properties"] = properties
schema["additionalProperties"] = True
if "Parameters" in raw_fragments:
properties["Parameters"] = self.__build_parameters(raw_fragments)
properties["Resources"] = self.__build_resources(raw_fragments)
self.__write_schema(schema)
return schema
def validate_fragments(self):
"""
This method makes sure that the fragments adhere
to the template fragment restrictions.
Note: Fn::ImportValue was checked when loading the fragments
since it can occur anywhere in the template.
"""
raw_fragments = self._read_raw_fragments()
self.__validate_file_size_limit()
self.__validate_resources(raw_fragments)
self.__validate_parameters(raw_fragments)
self.__validate_no_transforms_present(raw_fragments)
self.__validate_outputs(raw_fragments)
self.__validate_mappings(raw_fragments)
self.__validate_fragment_thru_cfn_lint(raw_fragments)
def __validate_fragment_thru_cfn_lint(self, raw_fragments):
lint_warnings = self.__get_cfn_lint_matches(raw_fragments)
if not lint_warnings:
LOG.warning("Module fragment is valid.")
else:
LOG.warning(
"Module fragment is probably valid, but there are "
"warnings/errors from cfn-lint "
"(https://github.com/aws-cloudformation/cfn-python-lint):"
)
for lint_warning in lint_warnings:
print(
"\t{} (from rule {})".format(
lint_warning.message, lint_warning.rule
),
)
def __validate_outputs(self, raw_fragments):
self.__validate_no_exports_present(raw_fragments)
self.__validate_output_limit(raw_fragments)
@staticmethod
def __get_cfn_lint_matches(raw_fragment):
filename = "temporary_fragment.json"
with open(filename, "w") as outfile:
json.dump(raw_fragment, outfile, indent=4)
template = cfnlint.decode.cfn_json.load(filename)
# Initialize the ruleset to be applied (no overrules, no excludes)
rules = cfnlint.core.get_rules([], [], [], [], False, [])
# Default region used by cfn-lint
regions = ["us-east-1"]
# Runs Warning and Error rules
matches = cfnlint.core.run_checks(filename, template, rules, regions)
os.remove(filename)
return matches
@staticmethod
def __validate_no_exports_present(raw_fragments):
if "Outputs" in raw_fragments:
for _output_name, output in raw_fragments["Outputs"].items():
if "Export" in output:
raise FragmentValidationError(
"Template fragment cannot contain any Export. "
"Found an Export statement in Output: " + _output_name
)
def __validate_output_limit(self, raw_fragments):
if "Outputs" in raw_fragments:
output_count = len(raw_fragments["Outputs"].items())
if output_count > self.output_limit:
raise FragmentValidationError(
"The Module template fragment has "
+ str(output_count)
+ " outputs but must not exceed the limit of "
+ str(self.output_limit)
+ " outputs"
)
def __validate_resources(self, raw_fragments):
if "Resources" not in raw_fragments:
raise FragmentValidationError(
"A Module template fragment must have a Resources section"
)
self.__validate_resource_limit(raw_fragments)
for _resource_name, resource in raw_fragments["Resources"].items():
if "Type" in resource:
if resource["Type"] == "AWS::CloudFormation::Stack":
raise FragmentValidationError(
"Template fragment can't contain nested stack."
)
if resource["Type"] == "AWS::CloudFormation::Macro":
raise FragmentValidationError(
"Template fragment can't contain any macro."
)
elif "Name" in resource:
if resource["Name"] == "AWS::Include":
raise FragmentValidationError(
"Template fragment can't use AWS::Include transform."
)
raise FragmentValidationError(
"Resource '" + _resource_name + "' is invalid"
)
else:
raise FragmentValidationError(
"Resource '" + _resource_name + "' has neither Type nor Name"
)
def __validate_resource_limit(self, raw_fragments):
resource_count = len(raw_fragments["Resources"].items())
if resource_count > self.resource_limit:
raise FragmentValidationError(
"The Module template fragment has "
+ str(resource_count)
+ " resources but must not exceed the limit of "
+ str(self.resource_limit)
+ " resources"
)
@staticmethod
def __validate_parameters(raw_fragments):
if "Parameters" in raw_fragments:
for _parameter_name, parameter in raw_fragments["Parameters"].items():
if "Type" not in parameter:
raise FragmentValidationError(
"Parameter '" + _parameter_name + "' must have a Type"
)
@staticmethod
def __validate_no_transforms_present(raw_fragments):
if "transform" in raw_fragments or "Transform" in raw_fragments:
raise FragmentValidationError(
"Template fragment can't contain transform section."
)
if "Fn::Transform" in raw_fragments:
raise FragmentValidationError(
"Template fragment can't contain any transform."
)
def __validate_mappings(self, raw_fragments):
self.__validate_mapping_limit(raw_fragments)
self.__validate_mapping_attribute_limit(raw_fragments)
def __validate_mapping_limit(self, raw_fragments):
if "Mappings" in raw_fragments:
mapping_count = len(raw_fragments["Mappings"].items())
if mapping_count > self.mapping_limit:
raise FragmentValidationError(
"The Module template fragment has "
+ str(mapping_count)
+ " mappings but must not exceed the limit of "
+ str(self.output_limit)
+ " mappings"
)
def __validate_mapping_attribute_limit(self, raw_fragments):
if "Mappings" in raw_fragments:
for _mapping_name, mapping in raw_fragments["Mappings"].items():
attribute_count = len(mapping.items())
if attribute_count > self.mapping_attribute_limit:
raise FragmentValidationError(
"The mapping "
+ _mapping_name
+ " has "
+ str(attribute_count)
+ " attributes but must not exceed the limit of "
+ str(self.output_limit)
+ " mapping attributes"
)
def __validate_file_size_limit(self):
total_size = self.__get_template_file_size_in_bytes()
if total_size > self.template_file_size_in_bytes_limit:
raise FragmentValidationError(
"The total file size of the template"
" fragments exceeds the CloudFormation Template size limit"
)
def __get_template_file_size_in_bytes(self):
return os.stat(self._get_fragment_file()).st_size
@staticmethod
def __build_resources(raw_fragments):
raw_resources = {}
resources = {}
for resource in raw_fragments["Resources"]:
raw_resources[resource] = {
"type": raw_fragments["Resources"][resource]["Type"]
}
resources_properties = {}
for resource in raw_resources:
type_object = {"type": "object", "properties": {}}
type_object["properties"]["Type"] = {
"type": "string",
"const": raw_resources[resource]["type"],
}
type_object["properties"]["Properties"] = {"type": "object"}
resources_properties[resource] = type_object
resources["properties"] = resources_properties
resources["type"] = "object"
resources["additionalProperties"] = False
return resources
@staticmethod
def __build_parameters(raw_fragments):
raw_parameters = {}
parameters = {}
for param in raw_fragments["Parameters"]:
param_type = raw_fragments["Parameters"][param]["Type"]
description = raw_fragments["Parameters"][param].get("Description")
raw_parameters[param] = {
"type": param_type.lower(),
"description": description,
}
parameter_properties = {}
for raw_param in raw_parameters:
description = raw_parameters[raw_param]["description"]
type_name = "object"
properties = {"Type": {"type": "string"}}
required = ["Type"]
parameter_properties[raw_param] = {
"type": type_name,
"properties": properties,
"required": required,
}
if description is not None:
parameter_properties[raw_param]["description"] = description
properties["Description"] = {"type": "string"}
required.append("Description")
parameters["type"] = "object"
parameters["properties"] = parameter_properties
return parameters
def __write_schema(self, schema):
def _write(f):
json.dump(schema, f, indent=4)
f.write("\n")
self._overwrite(self.root / SCHEMA_NAME, _write)
def generate_sample_fragment(self):
self._create_fragment_directory()
sample_json = self.__get_sample_fragment_json()
def _write(f):
json.dump(sample_json, f, indent=4)
f.write("\n")
self._overwrite(self.fragment_dir / SAMPLE_FRAGMENT_OUTPUT, _write)
@staticmethod
def __get_sample_fragment_json():
sample_json = resource_json(__name__, SAMPLE_FRAGMENT)
return sample_json
def _create_fragment_directory(self):
if not os.path.exists(self.fragment_dir):
os.mkdir(self.fragment_dir)
print("Directory ", self.fragment_dir, " Created ")
else:
print("Directory ", self.fragment_dir, " already exists")
def _read_raw_fragments(self):
return self._load_fragment(self._get_fragment_file())
def _load_fragment(self, fragment_file):
try:
with open(fragment_file, "r", encoding="utf-8") as f:
return yaml.safe_load(
self.__first_pass_syntax_check(self.__convert_function(f.read()))
)
except (json.JSONDecodeError, yaml.parser.ParserError) as e:
raise FragmentValidationError(
"Fragment file '{}' is invalid: {}".format(fragment_file, str(e))
) from e
def _get_fragment_file(self):
all_fragment_files = []
for root, _directories, files in os.walk(self.fragment_dir):
for f in files:
ext = os.path.splitext(f)[-1].lower()
if ext in ALLOWED_EXTENSIONS:
all_fragment_files.append(os.path.join(root, f))
if len(all_fragment_files) > 1:
raise FragmentValidationError(
"A Module can only consist of a "
"single template file, but there are "
+ str(len(all_fragment_files))
+ ": "
+ str(all_fragment_files)
)
return all_fragment_files[0]
@staticmethod
def _overwrite(path, contents):
LOG.debug("Overwriting '%s'", path)
with path.open("w", encoding="utf-8") as f:
if callable(contents):
contents(f)
else:
f.write(contents)
@staticmethod
def __first_pass_syntax_check(template):
if "Fn::ImportValue" in template:
raise FragmentValidationError(
"Template fragment can't contain any Fn::ImportValue."
)
return template
@staticmethod
def __convert_function(template):
"""
When generating schema, we don't care about the actual reference.
So the following will only make a valid YAML file.
"""
return (
template.replace("!Transform", "Fn::Transform")
.replace("!ImportValue", "Fn::ImportValue")
.replace("!", "")
)
| 38.561538 | 86 | 0.598777 |
fa81f048922ba814fbba542daa7a64363f914dfa | 3,880 | py | Python | userbot/modules/stat.py | bryanasfuk/Baphomet | bf3c3d9589511534ad848b3aa0b59e3d6b113282 | [
"Naumen",
"Condor-1.1",
"MS-PL"
] | 56 | 2021-04-13T13:22:07.000Z | 2022-02-28T04:08:19.000Z | userbot/modules/stat.py | bryanasfuk/Baphomet | bf3c3d9589511534ad848b3aa0b59e3d6b113282 | [
"Naumen",
"Condor-1.1",
"MS-PL"
] | 85 | 2021-04-11T17:00:29.000Z | 2022-03-31T22:16:35.000Z | userbot/modules/stat.py | bryanasfuk/Baphomet | bf3c3d9589511534ad848b3aa0b59e3d6b113282 | [
"Naumen",
"Condor-1.1",
"MS-PL"
] | 140 | 2021-04-13T00:25:11.000Z | 2022-03-31T05:28:22.000Z | """Count the Number of Dialogs you have in your Telegram Account
Syntax: .stats"""
import logging
import time
from telethon.events import NewMessage
from telethon.tl.custom import Dialog
from telethon.tl.types import Channel, Chat, User
from userbot.events import register
logging.basicConfig(
format='[%(levelname) 5s/%(asctime)s] %(name)s: %(message)s',
level=logging.WARNING)
logger = logging.getLogger(__name__)
@register(outgoing=True, pattern=r"^.stats(?: |$)(.*)")
async def stats(event: NewMessage.Event) -> None: # pylint: disable = R0912, R0914, R0915
"""Command to get stats about the account"""
await event.edit('`Collecting stats, Wait Master`')
start_time = time.time()
private_chats = 0
bots = 0
groups = 0
broadcast_channels = 0
admin_in_groups = 0
creator_in_groups = 0
admin_in_broadcast_channels = 0
creator_in_channels = 0
unread_mentions = 0
unread = 0
dialog: Dialog
async for dialog in event.client.iter_dialogs():
entity = dialog.entity
if isinstance(entity, Channel):
# participants_count = (await event.get_participants(dialog,
# limit=0)).total
if entity.broadcast:
broadcast_channels += 1
if entity.creator or entity.admin_rights:
admin_in_broadcast_channels += 1
if entity.creator:
creator_in_channels += 1
elif entity.megagroup:
groups += 1
# if participants_count > largest_group_member_count:
# largest_group_member_count = participants_count
if entity.creator or entity.admin_rights:
# if participants_count > largest_group_with_admin:
# largest_group_with_admin = participants_count
admin_in_groups += 1
if entity.creator:
creator_in_groups += 1
elif isinstance(entity, User):
private_chats += 1
if entity.bot:
bots += 1
elif isinstance(entity, Chat):
groups += 1
if entity.creator or entity.admin_rights:
admin_in_groups += 1
if entity.creator:
creator_in_groups += 1
unread_mentions += dialog.unread_mentions_count
unread += dialog.unread_count
stop_time = time.time() - start_time
full_name = inline_mention(await event.client.get_me())
response = f'🔸 **Stats for {full_name}** \n\n'
response += f'**Private Chats:** {private_chats} \n'
response += f' • `Users: {private_chats - bots}` \n'
response += f' • `Bots: {bots}` \n'
response += f'**Groups:** {groups} \n'
response += f'**Channels:** {broadcast_channels} \n'
response += f'**Admin in Groups:** {admin_in_groups} \n'
response += f' • `Creator: {creator_in_groups}` \n'
response += f' • `Admin Rights: {admin_in_groups - creator_in_groups}` \n'
response += f'**Admin in Channels:** {admin_in_broadcast_channels} \n'
response += f' • `Creator: {creator_in_channels}` \n'
response += f' • `Admin Rights: {admin_in_broadcast_channels - creator_in_channels}` \n'
response += f'**Unread:** {unread} \n'
response += f'**Unread Mentions:** {unread_mentions} \n\n'
response += f'__It Took:__ {stop_time:.02f}s \n'
await event.edit(response)
def make_mention(user):
if user.username:
return f"@{user.username}"
else:
return inline_mention(user)
def inline_mention(user):
full_name = user_full_name(user) or "No Name"
return f"[{full_name}](tg://user?id={user.id})"
def user_full_name(user):
names = [user.first_name, user.last_name]
names = [i for i in list(names) if i]
full_name = ' '.join(names)
return full_name
| 34.954955 | 94 | 0.615979 |
99f831688af10e350e3aa77b887eb7d544204017 | 19,226 | py | Python | rootfs/usr/lib/python3/dist-packages/serial/serialutil.py | kappaIO-Dev/kappaIO-sdk-armhf-crosscompile | 66fc5fc21e6235f7a3be72a7ccac68e2224b7fb2 | [
"MIT"
] | null | null | null | rootfs/usr/lib/python3/dist-packages/serial/serialutil.py | kappaIO-Dev/kappaIO-sdk-armhf-crosscompile | 66fc5fc21e6235f7a3be72a7ccac68e2224b7fb2 | [
"MIT"
] | 1 | 2018-04-15T22:59:15.000Z | 2018-04-15T22:59:15.000Z | rootfs/usr/lib/python3/dist-packages/serial/serialutil.py | kappaIO-Dev/kappaIO-sdk-armhf-crosscompile | 66fc5fc21e6235f7a3be72a7ccac68e2224b7fb2 | [
"MIT"
] | null | null | null | #! python
# Python Serial Port Extension for Win32, Linux, BSD, Jython
# see __init__.py
#
# (C) 2001-2010 Chris Liechti <cliechti@gmx.net>
# this is distributed under a free software license, see license.txt
# compatibility for older Python < 2.6
try:
bytes
bytearray
except (NameError, AttributeError):
# Python older than 2.6 do not have these types. Like for Python 2.6 they
# should behave like str. For Python older than 3.0 we want to work with
# strings anyway, only later versions have a true bytes type.
bytes = str
# bytearray is a mutable type that is easily turned into an instance of
# bytes
class bytearray(list):
# for bytes(bytearray()) usage
def __str__(self): return ''.join(self)
def __repr__(self): return 'bytearray(%r)' % ''.join(self)
# append automatically converts integers to characters
def append(self, item):
if isinstance(item, str):
list.append(self, item)
else:
list.append(self, chr(item))
# +=
def __iadd__(self, other):
for byte in other:
self.append(byte)
return self
def __getslice__(self, i, j):
return bytearray(list.__getslice__(self, i, j))
def __getitem__(self, item):
if isinstance(item, slice):
return bytearray(list.__getitem__(self, item))
else:
return ord(list.__getitem__(self, item))
def __eq__(self, other):
if isinstance(other, str):
other = bytearray(other)
return list.__eq__(self, other)
# all Python versions prior 3.x convert str([17]) to '[17]' instead of '\x11'
# so a simple bytes(sequence) doesn't work for all versions
def to_bytes(seq):
"""convert a sequence to a bytes type"""
b = bytearray()
for item in seq:
b.append(item) # this one handles int and str
return bytes(b)
# create control bytes
XON = to_bytes([17])
XOFF = to_bytes([19])
CR = to_bytes([13])
LF = to_bytes([10])
PARITY_NONE, PARITY_EVEN, PARITY_ODD, PARITY_MARK, PARITY_SPACE = 'N', 'E', 'O', 'M', 'S'
STOPBITS_ONE, STOPBITS_ONE_POINT_FIVE, STOPBITS_TWO = (1, 1.5, 2)
FIVEBITS, SIXBITS, SEVENBITS, EIGHTBITS = (5, 6, 7, 8)
PARITY_NAMES = {
PARITY_NONE: 'None',
PARITY_EVEN: 'Even',
PARITY_ODD: 'Odd',
PARITY_MARK: 'Mark',
PARITY_SPACE: 'Space',
}
class SerialException(IOError):
"""Base class for serial port related exceptions."""
class SerialTimeoutException(SerialException):
"""Write timeouts give an exception"""
writeTimeoutError = SerialTimeoutException("Write timeout")
portNotOpenError = ValueError('Attempting to use a port that is not open')
class FileLike(object):
"""An abstract file like class.
This class implements readline and readlines based on read and
writelines based on write.
This class is used to provide the above functions for to Serial
port objects.
Note that when the serial port was opened with _NO_ timeout that
readline blocks until it sees a newline (or the specified size is
reached) and that readlines would never return and therefore
refuses to work (it raises an exception in this case)!
"""
def __init__(self):
self.closed = True
def close(self):
self.closed = True
# so that ports are closed when objects are discarded
def __del__(self):
"""Destructor. Calls close()."""
# The try/except block is in case this is called at program
# exit time, when it's possible that globals have already been
# deleted, and then the close() call might fail. Since
# there's nothing we can do about such failures and they annoy
# the end users, we suppress the traceback.
try:
self.close()
except:
pass
def writelines(self, sequence):
for line in sequence:
self.write(line)
def flush(self):
"""flush of file like objects"""
pass
# iterator for e.g. "for line in Serial(0): ..." usage
def __next__(self):
line = self.readline()
if not line: raise StopIteration
return line
def __iter__(self):
return self
def readline(self, size=None, eol=LF):
"""read a line which is terminated with end-of-line (eol) character
('\n' by default) or until timeout."""
leneol = len(eol)
line = bytearray()
while True:
c = self.read(1)
if c:
line += c
if line[-leneol:] == eol:
break
if size is not None and len(line) >= size:
break
else:
break
return bytes(line)
def readlines(self, sizehint=None, eol=LF):
"""read a list of lines, until timeout.
sizehint is ignored."""
if self.timeout is None:
raise ValueError("Serial port MUST have enabled timeout for this function!")
lines = []
while True:
line = self.readline(eol=eol)
if line:
lines.append(line)
if line[-1] != eol: # was the line received with a timeout?
break
else:
break
return lines
def xreadlines(self, sizehint=None):
"""Read lines, implemented as generator. It will raise StopIteration on
timeout (empty read). sizehint is ignored."""
while True:
line = self.readline()
if not line: break
yield line
# other functions of file-likes - not used by pySerial
#~ readinto(b)
def seek(self, pos, whence=0):
raise IOError("file is not seekable")
def tell(self):
raise IOError("file is not seekable")
def truncate(self, n=None):
raise IOError("file is not seekable")
def isatty(self):
return False
class SerialBase(object):
"""Serial port base class. Provides __init__ function and properties to
get/set port settings."""
# default values, may be overridden in subclasses that do not support all values
BAUDRATES = (50, 75, 110, 134, 150, 200, 300, 600, 1200, 1800, 2400, 4800,
9600, 19200, 38400, 57600, 115200, 230400, 460800, 500000,
576000, 921600, 1000000, 1152000, 1500000, 2000000, 2500000,
3000000, 3500000, 4000000)
BYTESIZES = (FIVEBITS, SIXBITS, SEVENBITS, EIGHTBITS)
PARITIES = (PARITY_NONE, PARITY_EVEN, PARITY_ODD, PARITY_MARK, PARITY_SPACE)
STOPBITS = (STOPBITS_ONE, STOPBITS_ONE_POINT_FIVE, STOPBITS_TWO)
def __init__(self,
port = None, # number of device, numbering starts at
# zero. if everything fails, the user
# can specify a device string, note
# that this isn't portable anymore
# port will be opened if one is specified
baudrate=9600, # baud rate
bytesize=EIGHTBITS, # number of data bits
parity=PARITY_NONE, # enable parity checking
stopbits=STOPBITS_ONE, # number of stop bits
timeout=None, # set a timeout value, None to wait forever
xonxoff=False, # enable software flow control
rtscts=False, # enable RTS/CTS flow control
writeTimeout=None, # set a timeout for writes
dsrdtr=False, # None: use rtscts setting, dsrdtr override if True or False
interCharTimeout=None # Inter-character timeout, None to disable
):
"""Initialize comm port object. If a port is given, then the port will be
opened immediately. Otherwise a Serial port object in closed state
is returned."""
self._isOpen = False
self._port = None # correct value is assigned below through properties
self._baudrate = None # correct value is assigned below through properties
self._bytesize = None # correct value is assigned below through properties
self._parity = None # correct value is assigned below through properties
self._stopbits = None # correct value is assigned below through properties
self._timeout = None # correct value is assigned below through properties
self._writeTimeout = None # correct value is assigned below through properties
self._xonxoff = None # correct value is assigned below through properties
self._rtscts = None # correct value is assigned below through properties
self._dsrdtr = None # correct value is assigned below through properties
self._interCharTimeout = None # correct value is assigned below through properties
# assign values using get/set methods using the properties feature
self.port = port
self.baudrate = baudrate
self.bytesize = bytesize
self.parity = parity
self.stopbits = stopbits
self.timeout = timeout
self.writeTimeout = writeTimeout
self.xonxoff = xonxoff
self.rtscts = rtscts
self.dsrdtr = dsrdtr
self.interCharTimeout = interCharTimeout
if port is not None:
self.open()
def isOpen(self):
"""Check if the port is opened."""
return self._isOpen
# - - - - - - - - - - - - - - - - - - - - - - - -
# TODO: these are not really needed as the is the BAUDRATES etc. attribute...
# maybe i remove them before the final release...
def getSupportedBaudrates(self):
return [(str(b), b) for b in self.BAUDRATES]
def getSupportedByteSizes(self):
return [(str(b), b) for b in self.BYTESIZES]
def getSupportedStopbits(self):
return [(str(b), b) for b in self.STOPBITS]
def getSupportedParities(self):
return [(PARITY_NAMES[b], b) for b in self.PARITIES]
# - - - - - - - - - - - - - - - - - - - - - - - -
def setPort(self, port):
"""Change the port. The attribute portstr is set to a string that
contains the name of the port."""
was_open = self._isOpen
if was_open: self.close()
if port is not None:
if isinstance(port, str):
self.portstr = port
else:
self.portstr = self.makeDeviceName(port)
else:
self.portstr = None
self._port = port
self.name = self.portstr
if was_open: self.open()
def getPort(self):
"""Get the current port setting. The value that was passed on init or using
setPort() is passed back. See also the attribute portstr which contains
the name of the port as a string."""
return self._port
port = property(getPort, setPort, doc="Port setting")
def setBaudrate(self, baudrate):
"""Change baud rate. It raises a ValueError if the port is open and the
baud rate is not possible. If the port is closed, then the value is
accepted and the exception is raised when the port is opened."""
try:
self._baudrate = int(baudrate)
except TypeError:
raise ValueError("Not a valid baudrate: %r" % (baudrate,))
else:
if self._isOpen: self._reconfigurePort()
def getBaudrate(self):
"""Get the current baud rate setting."""
return self._baudrate
baudrate = property(getBaudrate, setBaudrate, doc="Baud rate setting")
def setByteSize(self, bytesize):
"""Change byte size."""
if bytesize not in self.BYTESIZES: raise ValueError("Not a valid byte size: %r" % (bytesize,))
self._bytesize = bytesize
if self._isOpen: self._reconfigurePort()
def getByteSize(self):
"""Get the current byte size setting."""
return self._bytesize
bytesize = property(getByteSize, setByteSize, doc="Byte size setting")
def setParity(self, parity):
"""Change parity setting."""
if parity not in self.PARITIES: raise ValueError("Not a valid parity: %r" % (parity,))
self._parity = parity
if self._isOpen: self._reconfigurePort()
def getParity(self):
"""Get the current parity setting."""
return self._parity
parity = property(getParity, setParity, doc="Parity setting")
def setStopbits(self, stopbits):
"""Change stop bits size."""
if stopbits not in self.STOPBITS: raise ValueError("Not a valid stop bit size: %r" % (stopbits,))
self._stopbits = stopbits
if self._isOpen: self._reconfigurePort()
def getStopbits(self):
"""Get the current stop bits setting."""
return self._stopbits
stopbits = property(getStopbits, setStopbits, doc="Stop bits setting")
def setTimeout(self, timeout):
"""Change timeout setting."""
if timeout is not None:
try:
timeout + 1 # test if it's a number, will throw a TypeError if not...
except TypeError:
raise ValueError("Not a valid timeout: %r" % (timeout,))
if timeout < 0: raise ValueError("Not a valid timeout: %r" % (timeout,))
self._timeout = timeout
if self._isOpen: self._reconfigurePort()
def getTimeout(self):
"""Get the current timeout setting."""
return self._timeout
timeout = property(getTimeout, setTimeout, doc="Timeout setting for read()")
def setWriteTimeout(self, timeout):
"""Change timeout setting."""
if timeout is not None:
if timeout < 0: raise ValueError("Not a valid timeout: %r" % (timeout,))
try:
timeout + 1 #test if it's a number, will throw a TypeError if not...
except TypeError:
raise ValueError("Not a valid timeout: %r" % timeout)
self._writeTimeout = timeout
if self._isOpen: self._reconfigurePort()
def getWriteTimeout(self):
"""Get the current timeout setting."""
return self._writeTimeout
writeTimeout = property(getWriteTimeout, setWriteTimeout, doc="Timeout setting for write()")
def setXonXoff(self, xonxoff):
"""Change XON/XOFF setting."""
self._xonxoff = xonxoff
if self._isOpen: self._reconfigurePort()
def getXonXoff(self):
"""Get the current XON/XOFF setting."""
return self._xonxoff
xonxoff = property(getXonXoff, setXonXoff, doc="XON/XOFF setting")
def setRtsCts(self, rtscts):
"""Change RTS/CTS flow control setting."""
self._rtscts = rtscts
if self._isOpen: self._reconfigurePort()
def getRtsCts(self):
"""Get the current RTS/CTS flow control setting."""
return self._rtscts
rtscts = property(getRtsCts, setRtsCts, doc="RTS/CTS flow control setting")
def setDsrDtr(self, dsrdtr=None):
"""Change DsrDtr flow control setting."""
if dsrdtr is None:
# if not set, keep backwards compatibility and follow rtscts setting
self._dsrdtr = self._rtscts
else:
# if defined independently, follow its value
self._dsrdtr = dsrdtr
if self._isOpen: self._reconfigurePort()
def getDsrDtr(self):
"""Get the current DSR/DTR flow control setting."""
return self._dsrdtr
dsrdtr = property(getDsrDtr, setDsrDtr, "DSR/DTR flow control setting")
def setInterCharTimeout(self, interCharTimeout):
"""Change inter-character timeout setting."""
if interCharTimeout is not None:
if interCharTimeout < 0: raise ValueError("Not a valid timeout: %r" % interCharTimeout)
try:
interCharTimeout + 1 # test if it's a number, will throw a TypeError if not...
except TypeError:
raise ValueError("Not a valid timeout: %r" % interCharTimeout)
self._interCharTimeout = interCharTimeout
if self._isOpen: self._reconfigurePort()
def getInterCharTimeout(self):
"""Get the current inter-character timeout setting."""
return self._interCharTimeout
interCharTimeout = property(getInterCharTimeout, setInterCharTimeout, doc="Inter-character timeout setting for read()")
# - - - - - - - - - - - - - - - - - - - - - - - -
_SETTINGS = ('baudrate', 'bytesize', 'parity', 'stopbits', 'xonxoff',
'dsrdtr', 'rtscts', 'timeout', 'writeTimeout', 'interCharTimeout')
def getSettingsDict(self):
"""Get current port settings as a dictionary. For use with
applySettingsDict"""
return dict([(key, getattr(self, '_'+key)) for key in self._SETTINGS])
def applySettingsDict(self, d):
"""apply stored settings from a dictionary returned from
getSettingsDict. it's allowed to delete keys from the dictionary. these
values will simply left unchanged."""
for key in self._SETTINGS:
if d[key] != getattr(self, '_'+key): # check against internal "_" value
setattr(self, key, d[key]) # set non "_" value to use properties write function
# - - - - - - - - - - - - - - - - - - - - - - - -
def __repr__(self):
"""String representation of the current port settings and its state."""
return "%s<id=0x%x, open=%s>(port=%r, baudrate=%r, bytesize=%r, parity=%r, stopbits=%r, timeout=%r, xonxoff=%r, rtscts=%r, dsrdtr=%r)" % (
self.__class__.__name__,
id(self),
self._isOpen,
self.portstr,
self.baudrate,
self.bytesize,
self.parity,
self.stopbits,
self.timeout,
self.xonxoff,
self.rtscts,
self.dsrdtr,
)
# - - - - - - - - - - - - - - - - - - - - - - - -
# compatibility with io library
def readable(self): return True
def writable(self): return True
def seekable(self): return False
def readinto(self, b):
data = self.read(len(b))
n = len(data)
try:
b[:n] = data
except TypeError as err:
import array
if not isinstance(b, array.array):
raise err
b[:n] = array.array('b', data)
return n
if __name__ == '__main__':
import sys
s = SerialBase()
sys.stdout.write('port name: %s\n' % s.portstr)
sys.stdout.write('baud rates: %s\n' % s.getSupportedBaudrates())
sys.stdout.write('byte sizes: %s\n' % s.getSupportedByteSizes())
sys.stdout.write('parities: %s\n' % s.getSupportedParities())
sys.stdout.write('stop bits: %s\n' % s.getSupportedStopbits())
sys.stdout.write('%s\n' % s)
| 36.481973 | 146 | 0.592947 |
9b7e44d8b103aa990596ff88f8d1942574a9bf98 | 2,322 | py | Python | src/crypto_dom/kraken/user_funding/deposit_methods.py | maxima-us/crypto-dom | d0c21638a96dfd03778f98f98e002c6be114717c | [
"MIT"
] | null | null | null | src/crypto_dom/kraken/user_funding/deposit_methods.py | maxima-us/crypto-dom | d0c21638a96dfd03778f98f98e002c6be114717c | [
"MIT"
] | 2 | 2021-01-15T15:37:21.000Z | 2021-01-25T10:10:52.000Z | src/crypto_dom/kraken/user_funding/deposit_methods.py | maxima-us/crypto-dom | d0c21638a96dfd03778f98f98e002c6be114717c | [
"MIT"
] | 1 | 2021-01-15T13:51:06.000Z | 2021-01-15T13:51:06.000Z | import typing
from decimal import Decimal
import pydantic
import stackprinter
stackprinter.set_excepthook(style="darkbg2")
from crypto_dom.kraken.definitions import ASSETCLASS, ASSET
# ============================================================
# DEPOSIT METHODS
# ============================================================
# doc: https://www.kraken.com/features/api#deposit-methods
URL = "https://api.kraken.com/0/private/DepositMethods"
METHOD = "POST"
# ------------------------------
# Sample Response
# ------------------------------
#[
# {
# 'fee': '0.0000000000',
# 'gen-address': True,
# 'limit': False,
# 'method': 'Ether (Hex)'
# }
# ]
# ------------------------------
# Request Model
# ------------------------------
class Request(pydantic.BaseModel):
"""Request model for endpoint POST https://api.kraken.com/0/private/DepositMethods
Model Fields:
-------------
aclass : str
Default = currency (optional)
asset : str enum
Asset being deposited
nonce : int
Always increasing unsigned 64 bit integer
"""
aclass: typing.Optional[ASSETCLASS]
asset: ASSET
nonce: pydantic.PositiveInt
# ------------------------------
# Response Model
# ------------------------------
class _Method(pydantic.BaseModel):
method: str # TODO should be Literal
limit: typing.Union[Decimal, bool]
fee: Decimal
address_setup_fee: typing.Optional[bool] = pydantic.Field(alias="address-setup-fee")
class _DepositMethodsResponse(pydantic.BaseModel):
# placeholder
data: typing.Tuple[_Method, ...]
# this class is just to be consistent with our API
class Response:
"""Validated Response for endpoint POST https://api.kraken.com/0/private/DepositMethods
Type: list of pydantic models
Model Fields:
-------------
method : str enum
Name of deposit method
limit : Union[Decimal, bool]
Maximum net amount that can be deposited right now, or false if no limit
fee: Decimal
address-setup-fee: bool
whether or not method has an address setup fee (optional)
"""
def __call__(self, response: dict):
_valid = _DepositMethodsResponse(data=response)
return _valid.data
| 22.990099 | 91 | 0.562016 |
c6a5b3901ebac2e0c88de912b380f36a24707e5f | 11,238 | py | Python | domino/domino.py | MrBenGriffin/or-tools | f8c473f016266f89fcbfef38bd9f3738371667b9 | [
"Apache-2.0"
] | 8 | 2021-02-24T15:19:01.000Z | 2022-03-22T15:12:45.000Z | domino/domino.py | MrBenGriffin/or-tools | f8c473f016266f89fcbfef38bd9f3738371667b9 | [
"Apache-2.0"
] | 1 | 2022-02-28T22:14:30.000Z | 2022-02-28T22:14:30.000Z | domino/domino.py | MrBenGriffin/or-tools | f8c473f016266f89fcbfef38bd9f3738371667b9 | [
"Apache-2.0"
] | null | null | null | # Copyright 2021 Ben Griffin
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Domino Puzzle Solver
This solves filling a space with a set of dominoes.
"""
from ortools.sat.python import cp_model
class DominoPuzzleSolver:
def __init__(self):
self.allow_gaps = False
self.maximising = False
self.status = cp_model.UNKNOWN
self.solver = cp_model.CpSolver()
self.solver.parameters.max_time_in_seconds = 6000
self.solver.parameters.num_search_workers = 6
self.solver.parameters.cp_model_presolve = False
self.solver.parameters.linearization_level = 0
self.solver.parameters.cp_model_probing_level = 0
self.pieces = None
self.ground = None
self.pos_items = None
self.presence = {}
self.placement = {}
def process(self, pieces, problem: dict):
"""
Given a problem and the pieces we construct a SAT model and then solve it.
"""
model = cp_model.CpModel()
"""
If gaps is true, a solution may include gaps.
"""
self.allow_gaps = 'gaps' in problem and problem['gaps']
"""
Define the space as given in the problem, defaulting to 7×8 rectangle
Define available pieces as given in the problem, defaulting to at least one of each piece.
"""
self.pieces = pieces
self.ground = problem['fill'] if 'fill' in problem else set((x, y) for y in range(7) for x in range(8))
available = problem['use'] if 'use' in problem else {k: 1 for k in pieces.names}
"""
For each lib piece that's being used (by default that's 112 = 4*28 - each of the 28 shapes rotated)..
See if it can be placed at each point in space, and if so, add it into the model as a potential presence
of the solution. Also add it's coordinates into a placement dict so that we can test for overlaps later.
"""
for (name, rot), piece in pieces.lib.items():
if name in available:
for point in self.ground:
if piece.legal(self.ground, point):
self.presence[(name, rot, point)] = model.NewBoolVar(f"{name, rot, point}")
self.placement[(name, rot, point)] = Domino(name, rot, point)
"""
Compose pos_items - this is the set of all points of each domino at each x,y in the space.
We may use this for constraint setting and for rendering.
"""
self.pos_items = {x: set() for x in self.ground}
for fix, p_bv in self.presence.items():
domino = self.placement[fix]
for point in domino.pos:
self.pos_items[point].add(fix)
for k in self.pos_items:
self.pos_items[k] = list(self.pos_items[k])
"""
Now do the domino bit - match numbers
"""
offsets = [(0, 1), (0, -1), (1, 0), (-1, 0)]
for (x, y) in self.ground:
legals = [(x+dx, y+dy) for dx, dy in offsets if (x+dx, y+dy) in self.ground]
for fix in self.pos_items[(x, y)]:
fbv = self.presence[fix]
main = self.placement[fix]
# m_up = main.rotation in [0, 180]
value = main.value_at((x, y))
connections = []
for pos in legals:
for fxo in self.pos_items[pos]:
other = self.placement[fxo]
# o_up = other.rotation in [0, 180]
# this constraint prevents 0-0 counting as an internal path.
if other != main and other.value_at(pos) == value:
connections.append(self.presence[fxo])
model.Add(sum(connections) == 1).OnlyEnforceIf(fbv)
"""
Constraints are:
1: Limit the count of free pieces as defined by the problem
2: Exactly piece per space of space
Compose free - the entire set of lib+offset pieces by each 'free' piece (eg, the set of all "K")
This will have a set of eg 28 dominoes, with each of the variables in it representing each position.
so that will be 194 for a 7*8 grid.
"""
free = {k: [bv for (name, pos, offset), bv in self.presence.items() if name == k] for k in available}
to_maximise = []
for k, allowed in available.items():
if allowed > 0:
model.Add(sum(free[k]) == allowed)
else:
model.Add(sum(free[k]) >= -allowed) # if we want at least 1, use -1
to_maximise += free[k]
if to_maximise:
model.Maximize(sum(to_maximise))
self.maximising = True
"""
For each xy, constrain the sum of pos_items to 1 (1 piece per x,y).
"""
for pt_points in self.pos_items.values():
items = [self.presence[i] for i in pt_points]
if self.allow_gaps:
model.Add(sum(items) <= 1)
else:
model.Add(sum(items) == 1)
"""
Can now solve.
"""
self.status = self.solver.Solve(model)
def show(self) -> bool:
if self.status in (cp_model.OPTIMAL, cp_model.FEASIBLE):
if self.maximising:
print(f"Solver places {int(self.solver.ObjectiveValue())} pieces in {self.solver.WallTime()}s")
else:
print(f"Solved challenge in {self.solver.WallTime()}s")
return True
else:
if self.status == cp_model.INFEASIBLE:
print(f"Solver says the challenge is infeasible after {self.solver.WallTime()}s.")
else:
print(f"Solver ran out of time.")
return False
@staticmethod
def box(*walls) -> str:
"""
:param walls: tuple of 4 bool: W,E,N,S
:return: related box drawing character.
"""
value = 0
for i, wall in enumerate(walls):
value |= 1 << i if wall else 0
return (' ', '╸', '╺', '═', '╹', '╝', '╚', '╩', '╻', '╗', '╔', '╦', '║', '╣', '╠', '╬')[value & 0x0F]
def draw(self):
"""
Derive grid size, based upon the min/max x,y values set in the space
"""
x1 = min(self.ground, key=lambda a: a[0])[0]
y1 = min(self.ground, key=lambda a: a[1])[1]
x2 = max(self.ground, key=lambda a: a[0])[0]
y2 = max(self.ground, key=lambda a: a[1])[1]
"""
Compose grid dict, based upon solver values
"""
grid = {}
for y in range(y1, y2 + 1):
for x in range(x1, x2 + 1):
grid[(x - x1, y - y1)] = None
if (x, y) in self.pos_items:
for maybe in self.pos_items[x, y]:
if self.solver.Value(self.presence[maybe]):
grid[(x - x1, y - y1)] = maybe
"""
Draw grid using box drawing characters
"""
x_dim = x2 + 1 - x1
y_dim = y2 + 1 - y1
for y in range(y_dim + 1):
line = ""
nw, sw, ne, se = '~', '~', '~', '~'
for x in range(x_dim + 1):
nw = grid[(x - 1, y - 1)] if 0 <= y - 1 < y_dim and 0 <= x - 1 < x_dim else '~'
ne = grid[(x - 0, y - 1)] if 0 <= y - 1 < y_dim and 0 <= x - 0 < x_dim else '~'
sw = grid[(x - 1, y - 0)] if 0 <= y - 0 < y_dim and 0 <= x - 1 < x_dim else '~'
se = grid[(x - 0, y - 0)] if 0 <= y - 0 < y_dim and 0 <= x - 0 < x_dim else '~'
line += self.box(nw != sw, ne != se, nw != ne, sw != se) + 3 * self.box(ne != se, ne != se, False, False)
print(line)
# now to do y-intermediate.
if y < y_dim:
line = ""
for x in range(x_dim):
p = grid[(x, y)]
domino = self.placement[p]
value = str(domino.value_at((x, y)))
w = grid[(x - 1, y)] if 0 <= x - 1 < x_dim else '~'
line += f'{self.box(False, False, p != w, p != w)} {value} '
print(f'{line}║')
class Domino:
"""
A domino is two adjacent squares, with a value in each square.
While subject to rotations, each number is fixed to an end.
"""
rot_map = {
0: [(0, 0), (1, 0)],
90: [(0, 0), (0, 1)],
180: [(0, 0), (1, 0)],
270: [(0, 0), (0, 1)]
}
def __init__(self, values: tuple, rotation=0, offset=(0, 0)):
self.l, self.r = values
self.x, self.y = offset
self.rotation = None
self.pts = None
self.pos = None
self.a, self.b = None, None
self.setup(rotation)
# def pos(self) -> list:
# return [(x + self.x, y + self.y) for (x, y) in self.pts]
def setup(self, theta: int):
self.rotation = theta
self.pts = Domino.rot_map[self.rotation]
self.pos = [(x + self.x, y + self.y) for (x, y) in self.pts]
self.a, self.b = (self.l, self.r) if theta in [0, 90] else (self.r, self.l)
def value_at(self, pos: tuple) -> [None, int]:
for i, pt in enumerate(self.pos):
if pt == pos:
return self.a if i == 0 else self.b
return None
def legal(self, ground: set, offs: tuple) -> bool:
"""
Given an (x,y) offset, and a 'space' of legal points check to see if a domino can
be legally placed in it.
"""
ox, oy = offs
for (x, y) in self.pts:
if (x + ox, y + oy) not in ground:
return False
return True
class DominoCollection:
"""
A domino is two adjacent squares, with a value in each square.
"""
def __init__(self, values):
self.lib = {(value, rot): Domino(value, rot) for value in values for rot in [0, 90, 180, 270]}
self.names = values
class Domino6Set(DominoCollection):
def __init__(self):
dominoes = set((x, y) for y in range(7) for x in range(y, 7))
super().__init__(dominoes)
class Domino5Set(DominoCollection):
def __init__(self):
dominoes = set((x, y) for y in range(6) for x in range(y, 6))
super().__init__(dominoes)
def example():
spaces = [(8, 7)] # should be 8,7
space = set()
for sx, sy in spaces:
space = set((x, y) for y in range(sy) for x in range(sx))
shapes = Domino6Set()
solver = DominoPuzzleSolver()
challenge = {
'gaps': False,
'fill': space
}
solver.process(shapes, challenge)
if solver.show():
solver.draw()
if __name__ == '__main__':
example()
| 37.585284 | 121 | 0.530699 |
a3814ad97aceb0ed00fa5ae279487eb1d90eae12 | 23,547 | py | Python | oneflow/python/framework/check_point_v2.py | sji15/oneflow | 523888bc251920c39021e7a0e063118d53c4cfd1 | [
"Apache-2.0"
] | null | null | null | oneflow/python/framework/check_point_v2.py | sji15/oneflow | 523888bc251920c39021e7a0e063118d53c4cfd1 | [
"Apache-2.0"
] | null | null | null | oneflow/python/framework/check_point_v2.py | sji15/oneflow | 523888bc251920c39021e7a0e063118d53c4cfd1 | [
"Apache-2.0"
] | null | null | null | """
Copyright 2020 The OneFlow Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
import numpy as np
from google.protobuf import text_format
import oneflow
import oneflow._oneflow_internal
import oneflow.core.operator.op_conf_pb2 as op_conf_pb
import oneflow.python.framework.config_util as config_util
import oneflow.python.framework.dtype as dtype_util
import oneflow.python.framework.runtime_mode as rt_mode
import oneflow.python.ops.initializer_util as initializer_util
import oneflow.core.job.initializer_conf_pb2 as initializer_conf_util
import oneflow.python.framework.id_util as id_util
import oneflow.python.framework.session_context as session_ctx
import oneflow.python.framework.remote_blob as remote_blob_util
import oneflow.python.lib.core.async_util as async_util
import oneflow.python.eager.boxing_util as boxing_util
import oneflow.python.eager.op_infer_util as op_infer_util
import oneflow.core.framework.variable_meta_info_pb2 as variable_meta_info_pb
import oneflow.core.framework.user_op_attr_pb2 as attr_value_pb
from oneflow.python.experimental import interface_op_read_and_write
import oneflow.core.register.logical_blob_id_pb2 as logical_blob_id_util
import oneflow.python.ops.get_variable as get_variable
from oneflow.python.oneflow_export import oneflow_export
import oneflow._oneflow_internal.oneflow.core.register.logical_blob_id as lbi_util
import oneflow._oneflow_internal
from oneflow._oneflow_internal import EagerBlobTrait
from typing import Any, Callable, Dict, List, Union, Sequence, Optional, Iterable, Tuple
META_INFO_FILENAME = "meta"
DATA_FILENAME = "out"
FAKE_JOB_NAME = "system_checkpoint"
OP_PREFIX = "system_checkpoint"
blob_register = oneflow._oneflow_internal.GetDefaultBlobRegister()
def sync_default_session_if_normal():
# TODO merge with same function in experimental/interface_op_read_and_write.py
if rt_mode.CurrentMode() == rt_mode.NORMAL_MODE:
oneflow.sync_default_session()
else:
# do nothing
pass
class FileBackendVariableBlob:
def __init__(
self,
var_dir: str,
dtype: Optional[oneflow.dtype] = None,
shape: Optional[Sequence[int]] = None,
):
data_path = os.path.join(var_dir, DATA_FILENAME)
assert os.path.isfile(data_path)
self.var_dir_ = var_dir
meta_info_path = os.path.join(self.var_dir_, META_INFO_FILENAME)
if os.path.exists(meta_info_path):
meta_info = variable_meta_info_pb.VariableMetaInfo()
with open(meta_info_path) as f:
text_format.Parse(f.read(), meta_info)
self.has_meta_info_ = True
else:
self.has_meta_info_ = False
if self.has_meta_info_:
assert dtype is None and shape is None
self.shape_ = tuple(meta_info.shape.dim)
self.dtype_ = dtype_util.convert_proto_dtype_to_oneflow_dtype(
meta_info.data_type
)
else:
if shape is not None and dtype is not None:
self.shape_ = shape
self.dtype_ = dtype
self.has_meta_info_ = True
elif shape is not None or dtype is not None:
raise RuntimeError("both or neither of shape and dtype should be None")
else:
pass
if self.has_meta_info_:
itemsize = np.dtype(
dtype_util.convert_oneflow_dtype_to_numpy_dtype(self.dtype_)
).itemsize
assert os.path.getsize(data_path) == np.prod(self.shape).item() * itemsize
@property
def file_path(self) -> str:
return os.path.join(self.var_dir_, DATA_FILENAME)
@property
def shape(self) -> Tuple[int]:
return self.shape_
@property
def quant_info(self):
raise NotImplementedError()
@property
def dtype(self) -> oneflow.dtype:
return self.dtype_
def numpy(self) -> np.ndarray:
if not self.has_meta_info_:
raise RuntimeError("This variable does not have meta info")
return np.fromfile(
self.file_path,
dtype=dtype_util.convert_oneflow_dtype_to_numpy_dtype(self.dtype),
).reshape(self.shape)
ValueContainer = Union[
EagerBlobTrait, FileBackendVariableBlob, np.ndarray, "oneflow.Tensor"
]
def _ElemCnt(shape):
return np.prod(shape).astype(int).item()
@oneflow_export("get_all_variables")
@session_ctx.try_init_default_session
def GetAllVariables() -> Dict[str, oneflow._oneflow_internal.EagerConsistentBlob]:
"""
Get all variables of all jobs as a dict.
"""
sync_default_session_if_normal()
sess = session_ctx.GetDefaultSession()
interface_ops = sess.interface_ops
variables = {}
for op in interface_ops:
op_attr = sess.OpAttribute4InterfaceOpName(op)
if op_attr.op_conf.WhichOneof("op_type") != "variable_conf":
continue
variables[op] = interface_op_read_and_write.GetEagerInterfaceBlob(op)
return variables
def _LoadSingleVariable(path: str) -> Optional[FileBackendVariableBlob]:
if os.path.isfile(os.path.join(path, DATA_FILENAME)):
return FileBackendVariableBlob(path)
return None
@oneflow_export("checkpoint.get", "load")
@session_ctx.try_init_default_session
def GetCheckpoint(
path: str,
) -> Union[Dict[str, FileBackendVariableBlob], FileBackendVariableBlob]:
"""
Load variable(s) from file system.
"""
assert os.path.isdir(path), "Directory {} doesn't exist!".format(path)
single_var = _LoadSingleVariable(path)
if single_var is not None:
return single_var
var_dict = {}
for f in os.listdir(path):
var_dir = os.path.join(path, f)
var = _LoadSingleVariable(var_dir)
if var is not None:
var_dict[f] = var
return var_dict
def _GetOpNameFromLbn(lbn):
return lbn.split("/")[0]
def _GetScopeSymbolIdFromEagerBlob(blob):
name = _GetOpNameFromLbn(blob.logical_blob_name)
sess = session_ctx.GetDefaultSession()
op_conf = sess.OpAttribute4InterfaceOpName(name).op_conf
scope_symbol_id = op_conf.scope_symbol_id
return scope_symbol_id
def _ReadSlice(
container: ValueContainer,
) -> Iterable[Tuple[Sequence[int], Sequence[int], np.ndarray]]:
"""
Return a generator which iterates over the input blob or array and yields
(start_nd_idx, stop_nd_idx, slice_np_array)
"""
if isinstance(container, oneflow.Tensor):
def ReadFromTensor(tensor, start_nd_idx, stop_nd_idx):
with tensor._placement_scope():
return _LogicalSlice(
tensor._blob_object, start_nd_idx, stop_nd_idx, None
)
yield from _ForEachSlice(container, ReadFromTensor)
elif isinstance(container, EagerBlobTrait):
def ReadFromEagerBlob(eager_blob, start_nd_idx, stop_nd_idx):
scope_symbol_id = _GetScopeSymbolIdFromEagerBlob(eager_blob)
return _LogicalSlice(
eager_blob.blob_object, start_nd_idx, stop_nd_idx, scope_symbol_id
)
yield from _ForEachSlice(container, ReadFromEagerBlob)
elif isinstance(container, FileBackendVariableBlob):
np_dtype = np.dtype(
dtype_util.convert_oneflow_dtype_to_numpy_dtype(container.dtype)
)
with open(container.file_path, "rb") as f:
def ReadFromFile(_, start_nd_idx, stop_nd_idx):
length = _ElemCnt(np.array(stop_nd_idx) - np.array(start_nd_idx))
slice = f.read(length * np_dtype.itemsize)
return np.frombuffer(slice, dtype=np_dtype,).reshape(
np.array(stop_nd_idx) - np.array(start_nd_idx)
)
yield from _ForEachSlice(container, ReadFromFile)
elif isinstance(container, np.ndarray):
def ReadFromNpArray(array, start_nd_idx, stop_nd_idx):
slice_objs = []
for start, stop in zip(start_nd_idx, stop_nd_idx):
slice_objs.append(slice(start, stop))
return array[tuple(slice_objs)]
yield from _ForEachSlice(container, ReadFromNpArray)
else:
raise RuntimeError("Unknown type: {}".format(type(container).__name__))
@oneflow_export("checkpoint.save")
@session_ctx.try_init_default_session
def SaveVarDict(
path: str,
var_dict: Optional[
Dict[str, Union[FileBackendVariableBlob, EagerBlobTrait]]
] = None,
) -> None:
"""
Save `var_dict` to `path`
"""
sync_default_session_if_normal()
if var_dict is None:
var_dict = GetAllVariables()
def IsFileOrNonEmptyDir(path):
if os.path.isfile(path):
return True
if os.path.isdir(path) and len(os.listdir(path)) != 0:
return True
return False
assert not IsFileOrNonEmptyDir(
path
), "{} is a file or non-empty directory! Note that flow.save is different from torch.save. It saves each weight as a separated file so that a directory instead of a file should be given.".format(
path
)
os.makedirs(path, exist_ok=True)
for name, var in var_dict.items():
meta_info = variable_meta_info_pb.VariableMetaInfo()
meta_info.shape.dim[:] = var.shape
meta_info.data_type = oneflow._oneflow_internal.deprecated.GetProtoDtype4OfDtype(
var.dtype
)
var_dir = os.path.join(path, name)
param_path = os.path.join(var_dir, DATA_FILENAME)
os.makedirs(os.path.dirname(param_path))
with open(param_path, "wb") as f:
for _, _, slice in _ReadSlice(var):
f.write(slice.tobytes())
with open(os.path.join(var_dir, META_INFO_FILENAME), "w") as f:
f.write(text_format.MessageToString(meta_info))
# write a empty file 'snapshot_done', indicating that
# the save process finishes normally
with open(os.path.join(path, "snapshot_done"), "w"):
pass
@oneflow_export("save")
def save(obj, save_dir):
return SaveVarDict(save_dir, obj)
def _LogicalSlice(
input_blob_object: oneflow._oneflow_internal.BlobObject,
start: Sequence[int],
stop: Sequence[int],
scope_symbol_id: int,
) -> np.ndarray:
"""
Construct a logical_slice op and run it by oneflow eager,
return the sliced result as a numpy ndarray
"""
op_name = id_util.UniqueStr(OP_PREFIX)
def AsyncSlice(Yield):
def build(builder):
op_conf = op_conf_pb.OperatorConf()
# device_tag doesn't matter for logical_slice op
device_tag = oneflow.current_scope().device_parallel_desc_symbol.device_tag
op_conf.device_tag = device_tag
op_conf.name = op_name
op_conf.user_conf.op_type_name = "logical_slice"
op_conf.user_conf.input["x"].s.append("{}/x_0".format(op_name))
op_conf.user_conf.output["y"].s.append("{}/y_0".format(op_name))
parallel_conf = input_blob_object.parallel_desc_symbol.parallel_conf
op_conf.user_conf.attr["parallel_conf"].at_string = str(parallel_conf)
op_conf.user_conf.attr["start"].at_list_int64.val[:] = start
op_conf.user_conf.attr["stop"].at_list_int64.val[:] = stop
op_conf.user_conf.attr["step"].at_list_int64.val[:] = [1] * len(start)
bn_in_op2blob_object = (
oneflow._oneflow_internal.deprecated.BnInOp2BlobObject()
)
bn_in_op2blob_object["x_0"] = input_blob_object
op_attribute = op_infer_util.Infer(
op_conf, bn_in_op2blob_object, scope_symbol_id
)
cfg_op_attribute = oneflow._oneflow_internal.deprecated.MakeOpAttributeByString(
str(op_attribute)
)
builder.StatelessCall(
cfg_op_attribute,
parallel_conf,
bn_in_op2blob_object,
boxing_util.BoxingTo,
)
Yield(bn_in_op2blob_object["y_0"])
oneflow._oneflow_internal.deprecated.LogicalRun(build)
lbi = lbi_util.LogicalBlobId()
lbi.set_op_name(op_name)
lbi.set_blob_name(op_name)
blob_object = async_util.Await(1, AsyncSlice)[0]
blob = oneflow._oneflow_internal.EagerConsistentBlob(
lbi,
blob_object=blob_object,
blob_register=blob_register,
job_name=FAKE_JOB_NAME,
)
return blob.numpy()
def _GetCpu0VariableBlobFromNumpy(
np_array: np.ndarray, dtype: oneflow.dtype
) -> oneflow._oneflow_internal.EagerConsistentBlob:
"""
Add a variable on cpu 0, and feed the value of `np_array`
Note: dtype argument cannot be eliminated by
convert_numpy_dtype_to_oneflow_dtype(np_array.dtype),
because np.int8 == np.char and
numpy_dtype_to_oneflow_dtype(oneflow_dtype_to_numpy_dtype(flow.int8))
may be flow.char
"""
with oneflow.scope.placement("cpu", "0:0"):
op_name = id_util.UniqueStr(OP_PREFIX)
op_conf = get_variable.GenerateVariableOpConf(
name=op_name,
shape=np_array.shape,
dtype=dtype,
initializer=initializer_util.zeros_initializer(dtype=dtype),
trainable=False,
)
current_parallel_desc_sym = oneflow.current_scope().device_parallel_desc_symbol
device_tag = current_parallel_desc_sym.device_tag
op_conf.device_tag = device_tag
op_attribute = op_infer_util.Infer(op_conf, {})
var_blob = get_variable.CreateEagerVariableBlob(
op_attribute, job_name=FAKE_JOB_NAME
)
interface_op_read_and_write.FeedValueToInterfaceBlobObject(
var_blob.blob_object, np_array
)
return var_blob
def _LogicalSliceAssign(
ref_blob_object: oneflow._oneflow_internal.BlobObject,
value_blob_object: oneflow._oneflow_internal.BlobObject,
start: Sequence[int],
stop: Sequence[int],
scope_symbol_id: Optional[int],
) -> None:
"""
Construct a logical_slice_assign op and run it by oneflow eager
"""
def BuildAssignInstruction(builder):
op_conf = op_conf_pb.OperatorConf()
# device_tag doesn't matter for logical_slice_assign op
device_tag = oneflow.current_scope().device_parallel_desc_symbol.device_tag
op_conf.device_tag = device_tag
op_name = id_util.UniqueStr(OP_PREFIX)
op_conf.name = op_name
op_conf.user_conf.op_type_name = "logical_slice_assign"
op_conf.user_conf.input["value"].s.append("{}/value_0".format(op_name))
op_conf.user_conf.input["ref"].s.append("{}/ref_0".format(op_name))
parallel_conf = ref_blob_object.parallel_desc_symbol.parallel_conf
op_conf.user_conf.attr["parallel_conf"].at_string = str(parallel_conf)
op_conf.user_conf.attr["start"].at_list_int64.val[:] = start
op_conf.user_conf.attr["stop"].at_list_int64.val[:] = stop
op_conf.user_conf.attr["step"].at_list_int64.val[:] = [1] * len(start)
bn_in_op2blob_object = oneflow._oneflow_internal.deprecated.BnInOp2BlobObject()
bn_in_op2blob_object["ref_0"] = ref_blob_object
bn_in_op2blob_object["value_0"] = value_blob_object
op_attribute = op_infer_util.Infer(
op_conf, bn_in_op2blob_object, scope_symbol_id
)
cfg_op_attribute = oneflow._oneflow_internal.deprecated.MakeOpAttributeByString(
str(op_attribute)
)
builder.StatelessCall(
cfg_op_attribute, parallel_conf, bn_in_op2blob_object, boxing_util.BoxingTo,
)
oneflow._oneflow_internal.deprecated.LogicalRun(BuildAssignInstruction)
def FeedValueToVariable(
var_blob: Union[oneflow._oneflow_internal.EagerConsistentBlob, "oneflow.Tensor"],
value: ValueContainer,
scope_symbol_id: Optional[int],
) -> None:
"""
Feed the value of `value` to the variable `var_blob`
"""
assert isinstance(
value, (EagerBlobTrait, FileBackendVariableBlob, np.ndarray, oneflow.Tensor)
), "Unknown value type: {}".format(type(value).__name__)
if isinstance(value, FileBackendVariableBlob):
if not value.has_meta_info_:
value = FileBackendVariableBlob(
value.var_dir_, var_blob.dtype, var_blob.shape
)
assert var_blob.shape == value.shape, "{} vs {}".format(var_blob.shape, value.shape)
if isinstance(value, np.ndarray):
value_flow_dtype = dtype_util.convert_numpy_dtype_to_oneflow_dtype(value.dtype)
else:
value_flow_dtype = value.dtype
assert var_blob.dtype == value_flow_dtype, "{} vs {}".format(
var_blob.dtype, value_flow_dtype
)
if isinstance(var_blob, oneflow.Tensor):
var_blob_object = var_blob._blob_object
else:
assert isinstance(var_blob, EagerBlobTrait)
var_blob_object = var_blob.blob_object
for start, stop, slice in _ReadSlice(value):
slice_value_blob = _GetCpu0VariableBlobFromNumpy(slice, var_blob.dtype)
_LogicalSliceAssign(
var_blob_object, slice_value_blob.blob_object, start, stop, scope_symbol_id,
)
@oneflow_export("load_variables")
@session_ctx.try_init_default_session
def LoadVariables(
value_dict: Dict[str, ValueContainer], ignore_mismatch: bool = True,
):
"""
Load value in `value_dict` into oneflow variables.
For example, if `value_dict` is {'x', np.ones(x_shape)},
the value of variable "x" will all ones.
If `ignore_mismatch` is False, an exception will be raised when
there is a name in `value_dict` not belonging to any variable.
"""
sync_default_session_if_normal()
all_vars = GetAllVariables()
for name, value in value_dict.items():
if name in all_vars:
var_blob = interface_op_read_and_write.GetEagerInterfaceBlob(name)
scope_symbol_id = _GetScopeSymbolIdFromEagerBlob(var_blob)
FeedValueToVariable(var_blob, value, scope_symbol_id)
else:
if not ignore_mismatch:
raise RuntimeError('"{}" is not a variable name'.format(name))
oneflow._oneflow_internal.eager.single_client.Sync()
def _ForEachSlice(
container: ValueContainer,
f: Union[
Callable[[EagerBlobTrait, Sequence[int], Sequence[int]], Any],
Callable[[FileBackendVariableBlob, Sequence[int], Sequence[int]], Any],
Callable[[np.ndarray, Sequence[int], Sequence[int]], Any],
],
):
"""
Slice container into slices whose size < SLICE_BYTES. For every slice,
yield start_nd_idx, stop_nd_idx and f(slice)
"""
assert isinstance(
container, (EagerBlobTrait, FileBackendVariableBlob, np.ndarray, oneflow.Tensor)
), "Unknown type: {}".format(type(container).__name__)
assert container.shape is not None
# For current implementation (transport data by grpc), SLICE_BYTES must be lower than 64M
SLICE_BYTES = 32 * 1024 * 1024
if isinstance(container, np.ndarray):
np_dtype = container.dtype
else:
np_dtype = np.dtype(
dtype_util.convert_oneflow_dtype_to_numpy_dtype(container.dtype)
)
SLICE_LEN = SLICE_BYTES // np_dtype.itemsize
start_idx = 0
size = _ElemCnt(container.shape)
cnt = 1
for axis in reversed(range(len(container.shape))):
cnt *= container.shape[axis]
if cnt > SLICE_LEN:
break
unit_size = _ElemCnt(tuple(container.shape)[axis + 1 :])
max_unit_num = SLICE_LEN // unit_size
while start_idx < size:
remainder = container.shape[axis]
while remainder > 0:
unit_num = max_unit_num if remainder >= max_unit_num else remainder
length = unit_num * unit_size
remainder -= unit_num
stop_idx = start_idx + length
start_nd_idx = np.unravel_index(start_idx, container.shape)
stop_nd_idx = np.unravel_index(stop_idx - 1, container.shape)
stop_nd_idx = tuple([x + 1 for x in stop_nd_idx])
yield start_nd_idx, stop_nd_idx, f(container, start_nd_idx, stop_nd_idx)
start_idx = stop_idx
def generate_values_by_initializer(initializer, shape, dtype):
np_dtype = np.dtype(dtype_util.convert_oneflow_dtype_to_numpy_dtype(dtype))
length = _ElemCnt(shape)
return np.array(initializer(length)).astype(np_dtype).reshape(shape)
def init_by_initializer_conf(
var_blob: Union[EagerBlobTrait, "oneflow.Tensor"],
initializer_conf: initializer_conf_util.InitializerConf,
sync_between_multi_machine: bool,
scope_symbol_id: Optional[int],
random_seed: int = 0,
):
initializer = initializer_util.GetInitializer(
initializer_conf, random_seed, var_blob.shape
)
# initializer is None if and only if the initializer_conf is empty_initializer
if initializer is None:
return
def GenerateValueAndAssign(var_blob, start_nd_idx, stop_nd_idx):
shape = np.array(stop_nd_idx) - np.array(start_nd_idx)
vals = generate_values_by_initializer(initializer, shape, var_blob.dtype)
if isinstance(var_blob, oneflow.Tensor):
var_blob_object = var_blob._blob_object
else:
assert isinstance(var_blob, EagerBlobTrait)
var_blob_object = var_blob.blob_object
slice_value_blob = _GetCpu0VariableBlobFromNumpy(vals, var_blob.dtype)
_LogicalSliceAssign(
var_blob_object,
slice_value_blob.blob_object,
start_nd_idx,
stop_nd_idx,
scope_symbol_id,
)
# we just want to run f on every slice without caring about the return value
for _ in _ForEachSlice(var_blob, GenerateValueAndAssign):
pass
if sync_between_multi_machine:
oneflow._oneflow_internal.eager.single_client.Sync()
def Init() -> None:
sync_default_session_if_normal()
sess = session_ctx.GetDefaultSession()
for op_name, var_blob in GetAllVariables().items():
var_conf = sess.OpAttribute4InterfaceOpName(op_name).op_conf.variable_conf
if not (
var_conf.HasField("initializer")
or var_conf.HasField("initialize_with_snapshot")
):
continue
if var_conf.HasField("initialize_with_snapshot"):
initialize_with_snapshot_conf = var_conf.initialize_with_snapshot
if initialize_with_snapshot_conf.HasField("key"):
snapshot_key = op_name
else:
snapshot_key = initialize_with_snapshot_conf.key
var_dir = os.path.dirname(
os.path.join(initialize_with_snapshot_conf.path, snapshot_key,)
)
LoadVariables({op_name: GetCheckpoint(var_dir)})
continue
scope_symbol_id = _GetScopeSymbolIdFromEagerBlob(var_blob)
init_by_initializer_conf(
var_blob, var_conf.initializer, False, scope_symbol_id, var_conf.random_seed
)
oneflow._oneflow_internal.eager.single_client.Sync()
| 37.199052 | 199 | 0.685098 |
480d1e5860b8d7e2e1793a124bc5631a62db03d3 | 2,761 | py | Python | mltoolkit/mldp/steps/collectors/unit_collector.py | stungkit/Copycat-abstractive-opinion-summarizer | 04fe5393a7bb6883516766b762f6a0c530e95375 | [
"MIT"
] | 51 | 2020-09-25T07:05:01.000Z | 2022-03-17T12:07:40.000Z | mltoolkit/mldp/steps/collectors/unit_collector.py | stungkit/Copycat-abstractive-opinion-summarizer | 04fe5393a7bb6883516766b762f6a0c530e95375 | [
"MIT"
] | 4 | 2020-10-19T10:00:22.000Z | 2022-03-14T17:02:47.000Z | mltoolkit/mldp/steps/collectors/unit_collector.py | stungkit/Copycat-abstractive-opinion-summarizer | 04fe5393a7bb6883516766b762f6a0c530e95375 | [
"MIT"
] | 22 | 2020-09-22T01:06:47.000Z | 2022-01-26T14:20:09.000Z | from .base_chunk_collector import BaseChunkCollector
from collections import OrderedDict
from mltoolkit.mldp.utils.tools import DataChunk
import numpy as np
class UnitCollector(BaseChunkCollector):
"""Collects a fixed number of data-units, then starts yielding them."""
def __init__(self, max_size):
super(UnitCollector, self).__init__(max_size)
# key: field_names, values: chunk field values in a list
self._chunk_data_collector = None
self.reset()
def compile_chunk(self):
"""Returns a compiled data-chunk."""
dc = DataChunk()
for k, v in self._chunk_data_collector.items():
if isinstance(v[0], np.ndarray):
dc[k] = np.concatenate(v)
elif isinstance(v[0], list):
dc[k] = []
for _v in v:
dc[k] += _v
else:
raise NotImplementedError
return dc
def _append(self, k, v):
self._validate_input_value(v)
if k not in self._chunk_data_collector:
self._chunk_data_collector[k] = []
self._chunk_data_collector[k].append(v)
def __getitem__(self, key):
return self._chunk_data_collector[key]
def __len__(self):
keys = list(self._chunk_data_collector.keys())
if len(keys) == 0:
return 0
return sum([len(el) for el in self._chunk_data_collector[keys[0]]])
def absorb_and_yield_if_full(self, data_chunk):
"""
Adds the data-chunk to the collector, yields a new data_chunk if the
collector is full.
"""
start_indx = 0
end_indx = len(data_chunk)
while start_indx < end_indx:
size_before = len(self)
if self.max_size is None:
tmp_end_indx = end_indx
else:
missing_count = self.max_size - size_before
tmp_end_indx = min(start_indx + missing_count, end_indx)
self._collect_missing_units(data_chunk, start_indx=start_indx,
end_indx=tmp_end_indx)
start_indx += (len(self) - size_before)
# if it's full yield and reset
if self.full():
yield self.compile_chunk()
self.reset()
def yield_remaining(self):
if len(self):
yield self.compile_chunk()
def _collect_missing_units(self, data_chunk, start_indx, end_indx):
"""Stores units from the data-chunk to the collector."""
# slice_indx = range(start_indx, end_indx)
for k in data_chunk:
self._append(k, data_chunk[k][start_indx:end_indx])
def reset(self):
self._chunk_data_collector = OrderedDict()
| 34.08642 | 76 | 0.596523 |
074013c839e98232d89d8cd87c8b97b465f8aba9 | 1,985 | py | Python | example/gbk2ig.py | GiugnoLab/PanDelo | 0aa4eb74c104be64e79138e902ce7dec84e01662 | [
"MIT"
] | 5 | 2018-12-12T13:34:26.000Z | 2022-03-03T00:26:54.000Z | example/gbk2ig.py | GiugnoLab/PanDelo | 0aa4eb74c104be64e79138e902ce7dec84e01662 | [
"MIT"
] | null | null | null | example/gbk2ig.py | GiugnoLab/PanDelo | 0aa4eb74c104be64e79138e902ce7dec84e01662 | [
"MIT"
] | 2 | 2020-01-12T19:26:00.000Z | 2020-03-11T20:05:23.000Z | #!/usr/bin/python3
import sys
from os import listdir
from os.path import isfile, join
import re
from Bio import SeqIO
genome_length = dict()
genome2cdstag = dict()
cdstag2genome = dict()
cdstag2product = dict()
cdsseqs = dict()
def read_gbk(ifile, genome_id):
print(genome_id)
genome_cdslist = genome2cdstag.get(genome_id, list())
for record in SeqIO.parse(ifile, "genbank"):
sequence_id = record.id
#genome_id = record.annotations['accessions'][-1]
print("\t"+genome_id+"\t"+sequence_id)
for feature in record.features:
#print(feature)
if (feature.type == 'source'):
genome_length[genome_id] = genome_length.get(genome_id, 0) + feature.location.end
elif (feature.type == 'CDS'):
if ('translation' in feature.qualifiers):
tag = (genome_id, sequence_id, feature.qualifiers['locus_tag'][0])
genome_cdslist.append(tag)
cdstag2genome[tag] = genome_id
cdsseqs[tag] = feature.qualifiers['translation'][0]
cdstag2product[tag] = (feature.qualifiers['product'][0]).replace('\t','')
genome2cdstag[genome_id] = genome_cdslist
idir = sys.argv[1]
ofile = sys.argv[2]
print("reading gbk files from", idir)
gbkfiles = [f for f in listdir(idir) if isfile(join(idir, f)) and re.match('^.+\.gbk$', f)]
print(gbkfiles)
for gbk in gbkfiles:
print(gbk)
read_gbk(idir + gbk, re.sub('\.gbk$', '', gbk))
uniques = dict()
print('writing to', ofile)
with open(ofile, 'w') as off:
for k in sorted(cdsseqs.keys()):
gen_id = k[0]+":"+k[1]
if gen_id not in uniques:
uniques[ gen_id ] = dict()
uniques[ gen_id ][k[2]] = uniques[ gen_id ].get(k[2],0) + 1
cc = uniques[ gen_id ][k[2]]
acc = k[0]+":"+k[1]+":"+k[2]+":"+str(cc)
#off.write(k[0]+"\t"+k[1]+"\t"+ cdstag2product[k] +"\n")
off.write(k[0]+"\t"+ acc +"\t"+ cdstag2product[k] +"\n")
off.write(cdsseqs[k]+"\n")
| 31.015625 | 97 | 0.606045 |
887b9fea5b9474d31ec24fbd769b7fc9f1347a4a | 705 | py | Python | bootcamp/notifications/urls.py | gusbakker/bootcamp | 822140e5c2142c24ee226e2a1d59fcf413e771ad | [
"MIT"
] | 1 | 2021-03-12T15:40:29.000Z | 2021-03-12T15:40:29.000Z | bootcamp/notifications/urls.py | gusbakker/bootcamp | 822140e5c2142c24ee226e2a1d59fcf413e771ad | [
"MIT"
] | 3 | 2020-05-11T23:27:38.000Z | 2020-05-20T13:53:13.000Z | bootcamp/notifications/urls.py | gusbakker/bootcamp | 822140e5c2142c24ee226e2a1d59fcf413e771ad | [
"MIT"
] | 1 | 2020-10-01T22:27:21.000Z | 2020-10-01T22:27:21.000Z | from django.conf.urls import url
from bootcamp.notifications import views
app_name = "notifications"
urlpatterns = [
url(r"^$", views.NotificationUnreadListView.as_view(), name="unread"),
url(r"^mark-as-read/(?P<slug>[-\w]+)/$", views.mark_as_read, name="mark_as_read"),
url(r"^mark-as-read-ajax/$", views.mark_as_read_ajax, name="mark_as_read_ajax"),
url(r"^mark-all-as-read/$", views.mark_all_as_read, name="mark_all_read"),
url(
r"^latest-notifications/$",
views.get_latest_notifications,
name="latest_notifications",
),
url(
r"^unread-notifications/$",
views.get_unread_notifications,
name="unread_notifications",
),
]
| 32.045455 | 86 | 0.66383 |
079a7ccc4491a860c0ba8d3fa61be901f12a8c5b | 241 | py | Python | authlib/jose/rfc7517/__init__.py | YPCrumble/authlib | 782a0fced780849418dc2a869528d10387e24b65 | [
"BSD-3-Clause"
] | 1 | 2020-08-04T08:29:39.000Z | 2020-08-04T08:29:39.000Z | authlib/jose/rfc7517/__init__.py | YPCrumble/authlib | 782a0fced780849418dc2a869528d10387e24b65 | [
"BSD-3-Clause"
] | 10 | 2020-09-30T05:41:05.000Z | 2021-11-03T08:55:31.000Z | authlib/jose/rfc7517/__init__.py | YPCrumble/authlib | 782a0fced780849418dc2a869528d10387e24b65 | [
"BSD-3-Clause"
] | 1 | 2020-07-28T17:05:40.000Z | 2020-07-28T17:05:40.000Z | """
authlib.jose.rfc7517
~~~~~~~~~~~~~~~~~~~~~
This module represents a direct implementation of
JSON Web Key (JWK).
https://tools.ietf.org/html/rfc7517
"""
from .models import Key, KeySet
__all__ = ['Key', 'KeySet']
| 17.214286 | 53 | 0.593361 |
01f98146f4c65f2070caf4af3b9d9a55426c16d3 | 2,325 | py | Python | homeassistant/components/notify/hangouts.py | XRyu/home-assistant | c9c707e368be159f0138a40d21fdea7a2a650ffe | [
"Apache-2.0"
] | 3 | 2019-01-31T13:41:37.000Z | 2020-05-20T14:22:18.000Z | homeassistant/components/notify/hangouts.py | XRyu/home-assistant | c9c707e368be159f0138a40d21fdea7a2a650ffe | [
"Apache-2.0"
] | null | null | null | homeassistant/components/notify/hangouts.py | XRyu/home-assistant | c9c707e368be159f0138a40d21fdea7a2a650ffe | [
"Apache-2.0"
] | 1 | 2021-05-31T08:13:56.000Z | 2021-05-31T08:13:56.000Z | """
Hangouts notification service.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/notify.hangouts/
"""
import logging
import voluptuous as vol
from homeassistant.components.notify import (ATTR_TARGET, PLATFORM_SCHEMA,
NOTIFY_SERVICE_SCHEMA,
BaseNotificationService,
ATTR_MESSAGE, ATTR_DATA)
from homeassistant.components.hangouts.const \
import (DOMAIN, SERVICE_SEND_MESSAGE, MESSAGE_DATA_SCHEMA,
TARGETS_SCHEMA, CONF_DEFAULT_CONVERSATIONS)
_LOGGER = logging.getLogger(__name__)
DEPENDENCIES = [DOMAIN]
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_DEFAULT_CONVERSATIONS): [TARGETS_SCHEMA]
})
NOTIFY_SERVICE_SCHEMA = NOTIFY_SERVICE_SCHEMA.extend({
vol.Optional(ATTR_TARGET): [TARGETS_SCHEMA],
vol.Optional(ATTR_DATA, default={}): MESSAGE_DATA_SCHEMA
})
def get_service(hass, config, discovery_info=None):
"""Get the Hangouts notification service."""
return HangoutsNotificationService(config.get(CONF_DEFAULT_CONVERSATIONS))
class HangoutsNotificationService(BaseNotificationService):
"""Send Notifications to Hangouts conversations."""
def __init__(self, default_conversations):
"""Set up the notification service."""
self._default_conversations = default_conversations
def send_message(self, message="", **kwargs):
"""Send the message to the Google Hangouts server."""
target_conversations = None
if ATTR_TARGET in kwargs:
target_conversations = []
for target in kwargs.get(ATTR_TARGET):
target_conversations.append({'id': target})
else:
target_conversations = self._default_conversations
messages = []
if 'title' in kwargs:
messages.append({'text': kwargs['title'], 'is_bold': True})
messages.append({'text': message, 'parse_str': True})
service_data = {
ATTR_TARGET: target_conversations,
ATTR_MESSAGE: messages,
ATTR_DATA: kwargs[ATTR_DATA]
}
return self.hass.services.call(
DOMAIN, SERVICE_SEND_MESSAGE, service_data=service_data)
| 33.695652 | 78 | 0.669677 |
90ea20d3a1e41814c1664989eb1e173bce6fbf21 | 1,559 | py | Python | plugins/dice.py | alissonlauffer/EduuRobot | ec6e43186c269ee090ed65722a9aa763e2d705fb | [
"MIT"
] | null | null | null | plugins/dice.py | alissonlauffer/EduuRobot | ec6e43186c269ee090ed65722a9aa763e2d705fb | [
"MIT"
] | null | null | null | plugins/dice.py | alissonlauffer/EduuRobot | ec6e43186c269ee090ed65722a9aa763e2d705fb | [
"MIT"
] | 1 | 2021-01-07T08:57:07.000Z | 2021-01-07T08:57:07.000Z | # Copyright (C) 2018-2020 Amano Team <contact@amanoteam.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
# the Software, and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import random
from config import bot, bot_username
async def dice(msg):
if msg.get('text'):
if msg['text'] == '/dados' or msg['text'] == '!dados' or msg['text'] == '/dados@' + bot_username:
dados = random.randint(1, 6)
await bot.sendMessage(msg['chat']['id'], '🎲 O dado parou no número: {}'.format(dados),
reply_to_message_id=msg['message_id'])
return True
| 48.71875 | 105 | 0.715202 |
d882d652a79fb9c6f47b0c22232a63d53dd17cfe | 2,051 | py | Python | python/sagemaker-multimodel-endpoint/examples/download_models.py | zaremb/aws-cdk-examples | 15df70601669504cec0cd62980bb73c955f9f779 | [
"Apache-2.0"
] | null | null | null | python/sagemaker-multimodel-endpoint/examples/download_models.py | zaremb/aws-cdk-examples | 15df70601669504cec0cd62980bb73c955f9f779 | [
"Apache-2.0"
] | null | null | null | python/sagemaker-multimodel-endpoint/examples/download_models.py | zaremb/aws-cdk-examples | 15df70601669504cec0cd62980bb73c955f9f779 | [
"Apache-2.0"
] | null | null | null | import requests
import os
import os.path
import tarfile
import shutil
resnet_18_params = (
"http://data.mxnet.io/models/imagenet/resnet/18-layers/resnet-18-0000.params"
)
resnet_18_symbols = (
"http://data.mxnet.io/models/imagenet/resnet/18-layers/resnet-18-symbol.json"
)
resnet_15_params = (
"http://data.mxnet.io/models/imagenet/resnet/152-layers/resnet-152-0000.params"
)
resnet_15_symbols = (
"http://data.mxnet.io/models/imagenet/resnet/152-layers/resnet-152-symbol.json"
)
synset = "http://data.mxnet.io/models/imagenet/synset.txt"
if not os.path.isdir("data/resnet_18"):
os.makedirs("data/resnet_18")
r = requests.get(resnet_18_params, allow_redirects=True)
open("data/resnet_18/resnet-18-0000.params", "wb").write(r.content)
r = requests.get(resnet_18_symbols, allow_redirects=True)
open("data/resnet_18/resnet-18-symbol.json", "wb").write(r.content)
r = requests.get(synset, allow_redirects=True)
open("data/resnet_18/synset.txt", "wb").write(r.content)
with open("data/resnet_18/resnet-18-shapes.json", "w") as file:
file.write('[{"shape": [1, 3, 224, 224], "name": "data"}]')
with tarfile.open("models/resnet_18.tar.gz", "w:gz") as tar:
tar.add("data/resnet_18", arcname=".")
if not os.path.isdir("data/resnet_152"):
os.makedirs("data/resnet_152")
r = requests.get(resnet_15_params, allow_redirects=True)
open("data/resnet_152/resnet-152-0000.params", "wb").write(r.content)
r = requests.get(resnet_15_symbols, allow_redirects=True)
open("data/resnet_152/resnet-152-symbol.json", "wb").write(r.content)
r = requests.get(synset, allow_redirects=True)
open("data/resnet_152/synset.txt", "wb").write(r.content)
with open("data/resnet_152/resnet-152-shapes.json", "w") as file:
file.write('[{"shape": [1, 3, 224, 224], "name": "data"}]')
with tarfile.open("models/resnet_152.tar.gz", "w:gz") as tar:
tar.add("data/resnet_152", arcname=".")
# Cleanup folder used for downloading models
shutil.rmtree("data")
| 33.622951 | 83 | 0.692833 |
514b729da09fcd2948bb18d663dfef1397bfd793 | 1,635 | py | Python | samples/core/visualization/table_test.py | rahulsmehta/pipelines | a0a8f1da8cb7ca53cde7717aa78e666b634fec75 | [
"Apache-2.0"
] | 2,860 | 2018-05-24T04:55:01.000Z | 2022-03-31T13:49:56.000Z | samples/core/visualization/table_test.py | rahulsmehta/pipelines | a0a8f1da8cb7ca53cde7717aa78e666b634fec75 | [
"Apache-2.0"
] | 7,331 | 2018-05-16T09:03:26.000Z | 2022-03-31T23:22:04.000Z | samples/core/visualization/table_test.py | rahulsmehta/pipelines | a0a8f1da8cb7ca53cde7717aa78e666b634fec75 | [
"Apache-2.0"
] | 1,359 | 2018-05-15T11:05:41.000Z | 2022-03-31T09:42:09.000Z | # Copyright 2021 The Kubeflow Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import kfp_server_api
import unittest
from pprint import pprint
from .table import table_pipeline
from kfp.samples.test.utils import KfpMlmdClient, run_pipeline_func, TestCase
import kfp
def verify(run: kfp_server_api.ApiRun, mlmd_connection_config,
argo_workflow_name: str, **kwargs):
t = unittest.TestCase()
t.maxDiff = None # we always want to see full diff
t.assertEqual(run.status, 'Succeeded')
client = KfpMlmdClient(mlmd_connection_config=mlmd_connection_config)
tasks = client.get_tasks(argo_workflow_name=argo_workflow_name)
pprint(tasks)
table_visualization = tasks['table-visualization']
output = [
a for a in table_visualization.outputs.artifacts
if a.name == 'mlpipeline_ui_metadata'
][0]
pprint(output)
t.assertEqual(
table_visualization.get_dict()['outputs']['artifacts'][0]['name'],
'mlpipeline_ui_metadata')
run_pipeline_func([
TestCase(
pipeline_func=table_pipeline,
mode=kfp.dsl.PipelineExecutionMode.V1_LEGACY)
])
| 32.7 | 77 | 0.73945 |
f0b8183df83c1287da18244d3d02f50dee6a65f0 | 3,719 | py | Python | CLIMATExScience/air-pollution-index/data-cleaning/2017-Kedah.py | MY-Climate-Observatory/myco-data | 5203fa63c7ce609bbc9bbc4186f55da78befdc50 | [
"CC-BY-4.0"
] | null | null | null | CLIMATExScience/air-pollution-index/data-cleaning/2017-Kedah.py | MY-Climate-Observatory/myco-data | 5203fa63c7ce609bbc9bbc4186f55da78befdc50 | [
"CC-BY-4.0"
] | null | null | null | CLIMATExScience/air-pollution-index/data-cleaning/2017-Kedah.py | MY-Climate-Observatory/myco-data | 5203fa63c7ce609bbc9bbc4186f55da78befdc50 | [
"CC-BY-4.0"
] | 1 | 2021-12-16T04:56:09.000Z | 2021-12-16T04:56:09.000Z | # -*- coding: utf-8 -*-
"""
20 May 2020
Author: Xiandi Ooi
We will first download the file from the source.
Then, the data is organized to ease future analysis. Specific changes are as follows:
1. Datetime variable added;
2. Numerical API values are extracted;
3. Dominant pollutant type are extracted
"""
import requests
import pandas as pd
def download_file(url, file_name):
"""
url: url of the file to be downloaded
file_name: file name to save the file.
This function takes the given url and download it as the given file name.
Note: the document will be saved to the current working directory, change if required.
"""
try:
response = requests.get(url)
except requests.exceptions.HTTPError as http_err:
print(f"HTTP error occurred: {http_err}")
except Exception as err:
print(f"Other error occurred: {err}")
else:
with open(file_name, "wb") as working_file:
working_file.write(response.content)
def clean_data(file_name):
"""
file_name: file to be cleaned
This function converts the data types in the original dataframe into more suitable type.
The good news is that the orginal dataframe is already in good shape so there's less to do.
"""
df_input = pd.read_excel(file_name, skiprows=3)
#Checking the basic information about the dataframe (optional)
#print(df_input.info())
#print(df_input.describe())
#Selecting columns
df_output = df_input.rename(columns = {"DATE/TIME":"Datetime"})
df_output.drop(df_output.tail(1).index,inplace=True)
#Pivoting the dataframe for its Area and State
df_final = pd.melt(df_output, id_vars=["Datetime"],
value_vars = ["Kompleks Sukan Langkawi, Kedah",
"SM Agama Kedah, Alor Star",
"Sek.Men. Keb. Tunku Ismail, Sungai Petani",
"Kulim Hitech Park, Kulim"],
var_name="Station", value_name="API")
area_directory = {"Kompleks Sukan Langkawi, Kedah": ("Langkawi", "Kedah"),
"SM Agama Kedah, Alor Star": ("Alor Setar", "Kedah"),
"Sek.Men. Keb. Tunku Ismail, Sungai Petani": ("Bakar Arang, Sg. Petani", "Kedah"),
"Kulim Hitech Park, Kulim": ("Kulim Hi-Tech", "Kedah")}
#There are some new stations added, the area are displayed in a similar manner as previous datasets
df_final["Site"] = df_final["Station"].map(area_directory)
df_final[["Area", "State"]] = pd.DataFrame(df_final["Site"].tolist(), index= df_final.index)
#Separating each API values into values and its dominant pollutant
df_final["API"].astype(str)
df_final["Dominant"] = df_final["API"].str.extract("(\D+)", expand = False)
df_final["API_Values"] = df_final["API"].str.extract("(\d+)", expand = False)
df_final["API_Values"] = pd.to_numeric(df_final["API_Values"], errors="coerce").fillna(0).astype(int)
df_final = df_final.drop(columns = ["Station", "Site", "API"])
#Checking the basic information about the final dataframe (optional)
#print(df_final.info())
#Export output to new csv file (edit path and name as needed)
df_output.to_csv(r"file_path\file_name.csv")
return df_final
def main():
url = "http://www.data.gov.my/data/ms_MY/dataset/4cabd8d3-b099-44a0-8f94-47809b9c63f4/resource/8f9e3f72-833e-4f65-8976-2c01179d97e7/download/kedah.xlsx"
file_name = "API_Kedah_2017.xlsx"
download_file(url, file_name)
clean_data(file_name)
if __name__ == "__main__":
main()
| 41.322222 | 156 | 0.640764 |
f578d816f6c8f272f9de3a34f8647bf90c9fe461 | 11,886 | py | Python | wand/resource.py | sbraz/wand | 447ddb727c87b8329483ea5d6a70924345f6821e | [
"MIT"
] | null | null | null | wand/resource.py | sbraz/wand | 447ddb727c87b8329483ea5d6a70924345f6821e | [
"MIT"
] | null | null | null | wand/resource.py | sbraz/wand | 447ddb727c87b8329483ea5d6a70924345f6821e | [
"MIT"
] | null | null | null | """:mod:`wand.resource` --- Global resource management
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
There is the global resource to manage in MagickWand API. This module
implements automatic global resource management through reference counting.
"""
import atexit
import contextlib
import ctypes
import warnings
from .api import library
from .compat import abc, string_type, text
from .exceptions import TYPE_MAP, WandException
from .version import MAGICK_VERSION_NUMBER
__all__ = ('genesis', 'limits', 'safe_copy', 'shutdown', 'terminus',
'DestroyedResourceError', 'Resource', 'ResourceLimits')
def genesis():
"""Instantiates the MagickWand API.
.. warning::
Don't call this function directly. Use :func:`increment_refcount()` and
:func:`decrement_refcount()` functions instead.
"""
library.MagickWandGenesis()
def terminus():
"""Cleans up the MagickWand API.
.. warning::
Don't call this function directly. Use :func:`increment_refcount()` and
:func:`decrement_refcount()` functions instead.
"""
if library.IsMagickWandInstantiated is None: # pragma no cover
library.MagickWandTerminus()
elif library.IsMagickWandInstantiated():
library.MagickWandTerminus()
allocation_map = {}
def allocate_ref(addr, deallocator):
global allocation_map
if len(allocation_map) == 0:
genesis()
if addr:
allocation_map[addr] = deallocator
def deallocate_ref(addr):
global allocation_map
if addr in allocation_map:
deallocator = allocation_map.pop(addr)
if callable(deallocator):
deallocator(addr)
@atexit.register
def shutdown():
global allocation_map
for addr in allocation_map:
deallocator = allocation_map.pop(addr)
if callable(deallocator):
deallocator(addr)
terminus()
def safe_copy(ptr):
"""Safely cast memory address to char pointer, convert to python string,
and immediately free resources.
:param ptr: The memory address to convert to text string.
:type ptr: :class:`ctypes.c_void_p`
:returns: :class:`tuple` (:class:`ctypes.c_void_p`, :class:`str`)
.. versionadded:: 0.5.3
"""
string = None
if bool(ptr):
string = text(ctypes.cast(ptr, ctypes.c_char_p).value)
ptr = library.MagickRelinquishMemory(ptr) # Force pointer to zero
return ptr, string
class Resource(object):
"""Abstract base class for MagickWand object that requires resource
management. Its all subclasses manage the resource semiautomatically
and support :keyword:`with` statement as well::
with Resource() as resource:
# use the resource...
pass
It doesn't implement constructor by itself, so subclasses should
implement it. Every constructor should assign the pointer of its
resource data into :attr:`resource` attribute inside of :keyword:`with`
:meth:`allocate()` context. For example::
class Pizza(Resource):
'''My pizza yummy.'''
def __init__(self):
with self.allocate():
self.resource = library.NewPizza()
.. versionadded:: 0.1.2
"""
#: (:class:`ctypes.CFUNCTYPE`) The :mod:`ctypes` predicate function
#: that returns whether the given pointer (that contains a resource data
#: usually) is a valid resource.
#:
#: .. note::
#:
#: It is an abstract attribute that has to be implemented
#: in the subclass.
c_is_resource = NotImplemented
#: (:class:`ctypes.CFUNCTYPE`) The :mod:`ctypes` function that destroys
#: the :attr:`resource`.
#:
#: .. note::
#:
#: It is an abstract attribute that has to be implemented
#: in the subclass.
c_destroy_resource = NotImplemented
#: (:class:`ctypes.CFUNCTYPE`) The :mod:`ctypes` function that gets
#: an exception from the :attr:`resource`.
#:
#: .. note::
#:
#: It is an abstract attribute that has to be implemented
#: in the subclass.
c_get_exception = NotImplemented
#: (:class:`ctypes.CFUNCTYPE`) The :mod:`ctypes` function that clears
#: an exception of the :attr:`resource`.
#:
#: .. note::
#:
#: It is an abstract attribute that has to be implemented
#: in the subclass.
c_clear_exception = NotImplemented
@property
def resource(self):
"""Internal pointer to the resource instance. It may raise
:exc:`DestroyedResourceError` when the resource has destroyed already.
"""
if getattr(self, 'c_resource', None) is None:
raise DestroyedResourceError(repr(self) + ' is destroyed already')
return self.c_resource
@resource.setter
def resource(self, resource):
# Delete the existing resource if there is one
if getattr(self, 'c_resource', None):
self.destroy()
if self.c_is_resource(resource):
self.c_resource = resource
allocate_ref(self.c_resource, self.c_destroy_resource)
else:
raise TypeError(repr(resource) + ' is an invalid resource')
@resource.deleter
def resource(self):
if getattr(self, 'c_resource', None):
deallocate_ref(self.c_resource)
self.c_resource = None
@contextlib.contextmanager
def allocate(self):
"""Allocates the memory for the resource explicitly. Its subclasses
should assign the created resource into :attr:`resource` attribute
inside of this context. For example::
with resource.allocate():
resource.resource = library.NewResource()
"""
yield self
def destroy(self):
"""Cleans up the resource explicitly. If you use the resource in
:keyword:`with` statement, it was called implicitly so have not to
call it.
"""
del self.resource
def get_exception(self):
"""Gets a current exception instance.
:returns: a current exception. it can be ``None`` as well if any
errors aren't occurred
:rtype: :class:`wand.exceptions.WandException`
"""
severity = ctypes.c_int()
desc = self.c_get_exception(self.resource, ctypes.byref(severity))
if severity.value == 0:
return
self.c_clear_exception(self.resource)
exc_cls = TYPE_MAP[severity.value]
message = desc.value
if not isinstance(message, string_type):
message = message.decode(errors='replace')
return exc_cls(message)
def raise_exception(self, stacklevel=1):
"""Raises an exception or warning if it has occurred."""
e = self.get_exception()
if isinstance(e, Warning):
warnings.warn(e, stacklevel=stacklevel + 1)
elif isinstance(e, Exception):
raise e
def make_blob(self, format=None):
raise NotImplementedError
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.destroy()
def __del__(self):
try:
self.destroy()
except DestroyedResourceError:
pass
class DestroyedResourceError(WandException, ReferenceError, AttributeError):
"""An error that rises when some code tries access to an already
destroyed resource.
.. versionchanged:: 0.3.0
It becomes a subtype of :exc:`wand.exceptions.WandException`.
"""
class ResourceLimits(abc.MutableMapping):
"""Wrapper for MagickCore resource limits.
Useful for dynamically reducing system resources before attempting risky,
or slow running, :class:`~wand.image.Image` operations.
For example::
from wand.image import Image
from wand.resource import limits
# Use 100MB of ram before writing temp data to disk.
limits['memory'] = 1024 * 1024 * 100
# Reject images larger than 1000x1000.
limits['width'] = 1000
limits['height'] = 1000
# Debug resources used.
with Image(filename='user.jpg') as img:
print('Using {0} of {1} memory'.format(limits.resource('memory'),
limits['memory']))
# Dump list of all limits.
for label in limits:
print('{0} => {1}'.format(label, limits[label]))
Available resource keys:
- ``'area'`` - Maximum `width * height` of a pixel cache before writing to
disk.
- ``'disk'`` - Maximum bytes used by pixel cache on disk before exception
is thrown.
- ``'file'`` - Maximum cache files opened at any given time.
- ``'height'`` - Maximum height of image before exception is thrown.
- ``'list_length'`` - Maximum images in sequence. Only available with
recent version of ImageMagick.
- ``'map'`` - Maximum memory map in bytes to allocated for pixel cache
before using disk.
- ``'memory'`` - Maximum bytes to allocated for pixel cache before using
disk.
- ``'thread'`` - Maximum parallel task sub-routines can spawn - if using
OpenMP.
- ``'throttle'`` - Total milliseconds to yield to CPU - if possible.
- ``'time'`` - Maximum seconds before exception is thrown.
- ``'width'`` - Maximum width of image before exception is thrown.
.. versionadded:: 0.5.1
"""
#: (:class:`tuple`) List of available resource types for ImageMagick-6.
_limits6 = ('undefined', 'area', 'disk', 'file', 'map', 'memory', 'thread',
'time', 'throttle', 'width', 'height')
#: (:class:`tuple`) List of available resource types for ImageMagick-7.
_limits7 = ('undefined', 'area', 'disk', 'file', 'height', 'map', 'memory',
'thread', 'throttle', 'time', 'width', 'list_length')
def __init__(self):
if MAGICK_VERSION_NUMBER < 0x700:
self.limits = self._limits6
else:
self.limits = self._limits7
def __getitem__(self, r):
return self.get_resource_limit(r)
def __setitem__(self, r, v):
self.set_resource_limit(r, v)
def __delitem__(self, r):
self[r] = 0
def __iter__(self):
return iter(self.limits)
def __len__(self):
return len(self.limits)
def _to_idx(self, resource):
"""Helper method to map resource string to enum value."""
return self.limits.index(resource)
def resource(self, resource):
"""Get the current value for the resource type.
:param resource: Resource type.
:type resource: :class:`basestring`
:rtype: :class:`numeric.Integral`
.. versionadded:: 0.5.1
"""
return library.MagickGetResource(self._to_idx(resource))
def get_resource_limit(self, resource):
"""Get the current limit for the resource type.
:param resource: Resource type.
:type resource: :class:`basestring`
:rtype: :class:`numeric.Integral`
.. versionadded:: 0.5.1
"""
return library.MagickGetResourceLimit(self._to_idx(resource))
def set_resource_limit(self, resource, limit):
"""Sets a new limit for resource type.
.. note::
The new limit value must be equal to, or less then, the maximum
limit defined by the :file:`policy.xml`. Any values set outside
normal bounds will be ignored silently.
:param resource: Resource type.
:type resource: :class:`basestring`
:param limit: New limit value.
:type limit: :class:`numeric.Integral`
.. versionadded:: 0.5.1
"""
genesis()
library.MagickSetResourceLimit(self._to_idx(resource), int(limit))
#: (:class:`ResourceLimits`) Helper to get & set Magick Resource Limits.
#:
#: .. versionadded:: 0.5.1
limits = ResourceLimits()
| 30.872727 | 79 | 0.62847 |
dbf3373dfe57bafb054426ad6c67ab31930533c6 | 67 | py | Python | Python/M01_ProgrammingBasics/L04_ForLoop/Lab/Solutions/P05_CharacterSequence.py | todorkrastev/softuni-software-engineering | cfc0b5eaeb82951ff4d4668332ec3a31c59a5f84 | [
"MIT"
] | null | null | null | Python/M01_ProgrammingBasics/L04_ForLoop/Lab/Solutions/P05_CharacterSequence.py | todorkrastev/softuni-software-engineering | cfc0b5eaeb82951ff4d4668332ec3a31c59a5f84 | [
"MIT"
] | null | null | null | Python/M01_ProgrammingBasics/L04_ForLoop/Lab/Solutions/P05_CharacterSequence.py | todorkrastev/softuni-software-engineering | cfc0b5eaeb82951ff4d4668332ec3a31c59a5f84 | [
"MIT"
] | 1 | 2022-02-23T13:03:14.000Z | 2022-02-23T13:03:14.000Z | text = input()
for each in range(len(text)):
print(text[each]) | 16.75 | 29 | 0.641791 |
b47668af830a6b5dadf1b31239675796b5f02e99 | 1,166 | py | Python | src/checksheet.py | buddseye/gmapspot | 353d6a59eb435c2526bbb2eca22009e4ae657b50 | [
"MIT"
] | null | null | null | src/checksheet.py | buddseye/gmapspot | 353d6a59eb435c2526bbb2eca22009e4ae657b50 | [
"MIT"
] | 4 | 2020-05-16T04:48:20.000Z | 2021-06-02T01:50:12.000Z | src/checksheet.py | buddseye/gmapspot | 353d6a59eb435c2526bbb2eca22009e4ae657b50 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
import sys
import csv
from copy import deepcopy
from argparse import ArgumentParser
from util.data import read_csv_to_dict, match_import_data
OUTPUT_FIELDNAMES = [
"index",
"id",
"title",
"number",
"release",
"description",
]
def get_args(args):
parser = ArgumentParser(args)
parser.add_argument("-i", "--import-file")
return parser.parse_args()
def update_dict(value, row_num, base={}):
outdict = deepcopy(base)
outdict["index"] = str(row_num)
outdict["id"] = value["id"]
outdict["title"] = value["title"]
outdict["number"] = ""
outdict["release"] = ""
outdict["description"] = ""
return outdict
def main():
args = get_args(sys.argv)
import_dict = read_csv_to_dict(args.import_file)
reader = csv.DictReader(sys.stdin, delimiter="\t")
writer = csv.DictWriter(sys.stdout, fieldnames=OUTPUT_FIELDNAMES)
writer.writeheader()
i = 0
for r in reader:
outdict = match_import_data(import_dict, r["id"])
outdict2 = update_dict(r, i, outdict)
writer.writerow(outdict2)
i += 1
if __name__ == "__main__":
main()
| 22.423077 | 69 | 0.639794 |
0a6dd9d3cdbb004730229f323e6487be6b159371 | 3,089 | py | Python | instagram/migrations/0001_initial.py | EmmanuelWanzala/instagramm | d5496216fd7f688f7d97ea3e083b2726fd697f7e | [
"MIT"
] | null | null | null | instagram/migrations/0001_initial.py | EmmanuelWanzala/instagramm | d5496216fd7f688f7d97ea3e083b2726fd697f7e | [
"MIT"
] | null | null | null | instagram/migrations/0001_initial.py | EmmanuelWanzala/instagramm | d5496216fd7f688f7d97ea3e083b2726fd697f7e | [
"MIT"
] | null | null | null | # Generated by Django 3.1.2 on 2020-10-20 14:43
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Profile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('profile_picture', models.ImageField(default='default.png', upload_to='images/')),
('bio', models.TextField(blank=True, default='My Bio', max_length=500)),
('name', models.CharField(blank=True, max_length=120)),
('location', models.CharField(blank=True, max_length=60)),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='profile', to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Post',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('image', models.ImageField(upload_to='posts/')),
('name', models.CharField(blank=True, max_length=250)),
('caption', models.CharField(blank=True, max_length=250)),
('created', models.DateTimeField(auto_now_add=True, null=True)),
('likes', models.ManyToManyField(blank=True, related_name='likes', to=settings.AUTH_USER_MODEL)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='posts', to='instagram.profile')),
],
options={
'ordering': ['-pk'],
},
),
migrations.CreateModel(
name='Follow',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('followed', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='followers', to='instagram.profile')),
('follower', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='following', to='instagram.profile')),
],
),
migrations.CreateModel(
name='Comment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('comment', models.TextField()),
('created', models.DateTimeField(auto_now_add=True, null=True)),
('post', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='comments', to='instagram.post')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='comments', to='instagram.profile')),
],
options={
'ordering': ['-pk'],
},
),
]
| 47.523077 | 145 | 0.594691 |
11759d9e0370d6cb979fc1c92421291807e15aa0 | 17,199 | py | Python | tensorflow_federated/python/learning/framework/encoding_utils.py | zhihansh/federated-oss | 38cfcb05702ff7297db76d3ccb5f5afef53ca09b | [
"Apache-2.0"
] | 1 | 2022-02-08T01:11:14.000Z | 2022-02-08T01:11:14.000Z | tensorflow_federated/python/learning/framework/encoding_utils.py | zhihansh/federated-oss | 38cfcb05702ff7297db76d3ccb5f5afef53ca09b | [
"Apache-2.0"
] | null | null | null | tensorflow_federated/python/learning/framework/encoding_utils.py | zhihansh/federated-oss | 38cfcb05702ff7297db76d3ccb5f5afef53ca09b | [
"Apache-2.0"
] | null | null | null | # Copyright 2019, The TensorFlow Federated Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# pytype: skip-file
# This modules disables the Pytype analyzer, see
# https://github.com/tensorflow/federated/blob/main/docs/pytype.md for more
# information.
"""Utilities for building broadcast and aggregate functions with encoding.
This file contains utilities for building `tff.templates.MeasuredProcess`
objects using `Encoder` class from `tensor_encoding` project, to realize
encoding (compression) of values being communicated between `SERVER`
and `CLIENTS`.
"""
import collections
from typing import Callable
import attr
import tensorflow as tf
import tree
from tensorflow_federated.python.common_libs import py_typecheck
from tensorflow_federated.python.core.api import computations
from tensorflow_federated.python.core.impl.federated_context import intrinsics
from tensorflow_federated.python.core.impl.types import computation_types
from tensorflow_federated.python.core.impl.types import placements
from tensorflow_federated.python.core.impl.types import type_analysis
from tensorflow_federated.python.core.impl.types import type_conversions
from tensorflow_federated.python.core.templates import measured_process
from tensorflow_federated.python.learning import model as model_lib
from tensorflow_federated.python.learning import model_utils
from tensorflow_model_optimization.python.core.internal import tensor_encoding
# Type aliases.
_ModelConstructor = Callable[[], model_lib.Model]
_EncoderConstructor = Callable[[tf.TensorSpec],
tensor_encoding.core.SimpleEncoder]
_ALLOWED_ENCODERS = (tensor_encoding.core.SimpleEncoder,
tensor_encoding.core.GatherEncoder,
tensor_encoding.core.EncoderComposer)
@attr.s(eq=False, frozen=True)
class _NestGatherEncoder(object):
"""Structure for holding `tf_computations` needed for encoded_sum."""
get_params_fn = attr.ib()
encode_fn = attr.ib()
decode_after_sum_fn = attr.ib()
update_state_fn = attr.ib()
zero_fn = attr.ib()
accumulate_fn = attr.ib()
merge_fn = attr.ib()
report_fn = attr.ib()
def build_encoded_broadcast_process(value_type, encoders):
"""Builds `MeasuredProcess` for `value_type`, to be encoded by `encoders`.
The returned `MeasuredProcess` has a next function with the TFF type
signature:
```
(<state_type@SERVER, {value_type}@CLIENTS> ->
<state=state_type@SERVER, result=value_type@SERVER, measurements=()@SERVER>)
```
Args:
value_type: The type of values to be broadcasted by the `MeasuredProcess`.
Either a `tff.TensorType` or a `tff.StructType`.
encoders: A collection of `SimpleEncoder` objects to be used for encoding
`values`. Must have the same structure as `values`.
Returns:
A `MeasuredProcess` of which `next_fn` encodes the input at `tff.SERVER`,
broadcasts the encoded representation and decodes the encoded representation
at `tff.CLIENTS`.
Raises:
ValueError: If `value_type` and `encoders` do not have the same structure.
TypeError: If `encoders` are not instances of `SimpleEncoder`, or if
`value_type` are not compatible with the expected input of the `encoders`.
"""
py_typecheck.check_type(
value_type, (computation_types.TensorType, computation_types.StructType))
_validate_value_type_and_encoders(value_type, encoders,
tensor_encoding.core.SimpleEncoder)
initial_state_fn, state_type = _build_initial_state_tf_computation(encoders)
@computations.federated_computation()
def initial_state_comp():
return intrinsics.federated_eval(initial_state_fn, placements.SERVER)
encode_fn, decode_fn = _build_encode_decode_tf_computations_for_broadcast(
state_type, value_type, encoders)
@computations.federated_computation(initial_state_comp.type_signature.result,
computation_types.FederatedType(
value_type, placements.SERVER))
def encoded_broadcast_comp(state, value):
"""Encoded broadcast federated_computation."""
empty_metrics = intrinsics.federated_value((), placements.SERVER)
new_state, encoded_value = intrinsics.federated_map(encode_fn,
(state, value))
client_encoded_value = intrinsics.federated_broadcast(encoded_value)
client_value = intrinsics.federated_map(decode_fn, client_encoded_value)
return measured_process.MeasuredProcessOutput(
state=new_state, result=client_value, measurements=empty_metrics)
return measured_process.MeasuredProcess(
initialize_fn=initial_state_comp, next_fn=encoded_broadcast_comp)
def _build_initial_state_tf_computation(encoders):
"""Utility for creating initial_state tf_computation."""
@computations.tf_computation
def initial_state_fn():
return tf.nest.map_structure(lambda e: e.initial_state(), encoders)
return initial_state_fn, initial_state_fn.type_signature.result
def _slice(encoders, nested_value, idx):
"""Takes a slice of nested values.
We use a collection of encoders to encode a collection of values. When a
method of the encoder returns a tuple, e.g., encode / decode params of the
get_params method, we need to recover the matching collection of encode params
and collection of decode params. This method is a utility to achieve this.
Args:
encoders: A collection of encoders.
nested_value: A collection of indexable values of the same structure as
`encoders`.
idx: An integer. Index of the values in `nested_value` along which to take
the slice.
Returns:
A collection of values of the same structure as `encoders`.
"""
return tree.map_structure_up_to(encoders, lambda t: t[idx], nested_value)
def _build_encode_decode_tf_computations_for_broadcast(state_type, value_type,
encoders):
"""Utility for creating encode/decode tf_computations for broadcast."""
@computations.tf_computation(state_type, value_type)
def encode(state, value):
"""Encode tf_computation."""
encoded_structure = tree.map_structure_up_to(
encoders, lambda state, value, e: e.encode(value, state), state, value,
encoders)
encoded_value = _slice(encoders, encoded_structure, 0)
new_state = _slice(encoders, encoded_structure, 1)
return new_state, encoded_value
@computations.tf_computation(encode.type_signature.result[1])
def decode(encoded_value):
"""Decode tf_computation."""
return tree.map_structure_up_to(encoders, lambda e, val: e.decode(val),
encoders, encoded_value)
return encode, decode
def _build_tf_computations_for_gather(state_type, value_type, encoders):
"""Utility for creating tf_computations for encoded sum and mean.
This method maps a collection of GatherEncoder objects to partial computations
for encoding a collection of values jointly, and adds a logic for computing
the number of summands in decode_before_sum, once for the entire collection,
not on a per-value basis.
Args:
state_type: A `tff.Type` describing the collection of states handled by
`encoders`.
value_type: A `tff.Type` describing the collection of values to be encoded
by `encoders`.
encoders: A collection of `GatherEncoder` objects.
Returns:
A `_NestGatherEncoder` namedtuple holding the relevant tf_computations.
"""
@computations.tf_computation(state_type)
def get_params_fn(state):
params = tree.map_structure_up_to(encoders, lambda e, s: e.get_params(s),
encoders, state)
encode_params = _slice(encoders, params, 0)
decode_before_sum_params = _slice(encoders, params, 1)
decode_after_sum_params = _slice(encoders, params, 2)
return encode_params, decode_before_sum_params, decode_after_sum_params
encode_params_type = get_params_fn.type_signature.result[0]
decode_before_sum_params_type = get_params_fn.type_signature.result[1]
decode_after_sum_params_type = get_params_fn.type_signature.result[2]
# TODO(b/139844355): Get rid of decode_before_sum_params.
# We pass decode_before_sum_params to the encode method, because TFF currently
# does not have a mechanism to make a tff.SERVER placed value available inside
# of intrinsics.federated_aggregate - in production, this could mean an
# intermediary aggregator node. So currently, we send the params to clients,
# and ask them to send them back as part of the encoded structure.
@computations.tf_computation(value_type, encode_params_type,
decode_before_sum_params_type)
def encode_fn(x, encode_params, decode_before_sum_params):
encoded_structure = tree.map_structure_up_to(
encoders, lambda e, *args: e.encode(*args), encoders, x, encode_params)
encoded_x = _slice(encoders, encoded_structure, 0)
state_update_tensors = _slice(encoders, encoded_structure, 1)
return encoded_x, decode_before_sum_params, state_update_tensors
state_update_tensors_type = encode_fn.type_signature.result[2]
# This is not a @computations.tf_computation because it will be used below
# when bulding the computations.tf_computations that will compose a
# intrinsics.federated_aggregate...
# @tf.function
def decode_before_sum_tf_function(encoded_x, decode_before_sum_params):
part_decoded_x = tree.map_structure_up_to(
encoders, lambda e, *args: e.decode_before_sum(*args), encoders,
encoded_x, decode_before_sum_params)
one = tf.constant((1,), tf.int32)
return part_decoded_x, one
# ...however, result type is needed to build the subsequent tf_compuations.
@computations.tf_computation(encode_fn.type_signature.result[0:2])
def tmp_decode_before_sum_fn(encoded_x, decode_before_sum_params):
return decode_before_sum_tf_function(encoded_x, decode_before_sum_params)
part_decoded_x_type = tmp_decode_before_sum_fn.type_signature.result
del tmp_decode_before_sum_fn # Only needed for result type.
@computations.tf_computation(part_decoded_x_type,
decode_after_sum_params_type)
def decode_after_sum_fn(summed_values, decode_after_sum_params):
part_decoded_aggregated_x, num_summands = summed_values
return tree.map_structure_up_to(
encoders,
lambda e, x, params: e.decode_after_sum(x, params, num_summands),
encoders, part_decoded_aggregated_x, decode_after_sum_params)
@computations.tf_computation(state_type, state_update_tensors_type)
def update_state_fn(state, state_update_tensors):
return tree.map_structure_up_to(encoders,
lambda e, *args: e.update_state(*args),
encoders, state, state_update_tensors)
# Computations for intrinsics.federated_aggregate.
@computations.tf_computation
def zero_fn():
values = tf.nest.map_structure(
lambda s: tf.zeros(s.shape, s.dtype),
type_conversions.type_to_tf_tensor_specs(part_decoded_x_type))
state_update_tensors = tf.nest.map_structure(
lambda s: tf.zeros(s.shape, s.dtype),
type_conversions.type_to_tf_tensor_specs(state_update_tensors_type))
return _accumulator_value(values, state_update_tensors)
accumulator_type = zero_fn.type_signature.result
state_update_aggregation_modes = tf.nest.map_structure(
lambda e: tuple(e.state_update_aggregation_modes), encoders)
@computations.tf_computation(accumulator_type,
encode_fn.type_signature.result)
def accumulate_fn(acc, encoded_x):
"""Internal accumulate function."""
value, params, state_update_tensors = encoded_x
part_decoded_value = decode_before_sum_tf_function(value, params)
new_values = tf.nest.map_structure(tf.add, acc['values'],
part_decoded_value)
new_state_update_tensors = tf.nest.map_structure(
_accmulate_state_update_tensor, acc['state_update_tensors'],
state_update_tensors, state_update_aggregation_modes)
return _accumulator_value(new_values, new_state_update_tensors)
@computations.tf_computation(accumulator_type, accumulator_type)
def merge_fn(acc1, acc2):
new_values = tf.nest.map_structure(tf.add, acc1['values'], acc2['values'])
new_state_update_tensors = tf.nest.map_structure(
_accmulate_state_update_tensor, acc1['state_update_tensors'],
acc2['state_update_tensors'], state_update_aggregation_modes)
return _accumulator_value(new_values, new_state_update_tensors)
@computations.tf_computation(accumulator_type)
def report_fn(acc):
return acc
return _NestGatherEncoder(
get_params_fn=get_params_fn,
encode_fn=encode_fn,
decode_after_sum_fn=decode_after_sum_fn,
update_state_fn=update_state_fn,
zero_fn=zero_fn,
accumulate_fn=accumulate_fn,
merge_fn=merge_fn,
report_fn=report_fn)
def _validate_encoder(encoder, value, encoder_type):
assert encoder_type in [
tensor_encoding.core.SimpleEncoder, tensor_encoding.core.GatherEncoder
]
if not isinstance(encoder, encoder_type):
raise TypeError('Provided encoder must be an instance of %s.' %
encoder_type)
if not encoder.input_tensorspec.is_compatible_with(
tf.TensorSpec(value.shape, value.dtype)):
raise TypeError('Provided encoder and value are not compatible.')
def _validate_value_type_and_encoders(value_type, encoders, encoder_type):
"""Validates if `value_type` and `encoders` are compatible."""
if isinstance(encoders, _ALLOWED_ENCODERS):
# If `encoders` is not a container, then `value_type` should be an instance
# of `tff.TensorType.`
if not isinstance(value_type, computation_types.TensorType):
raise ValueError(
'`value_type` and `encoders` do not have the same structure.')
_validate_encoder(encoders, value_type, encoder_type)
else:
# If `encoders` is a container, then `value_type` should be an instance of
# `tff.StructType.`
if not type_analysis.is_structure_of_tensors(value_type):
raise TypeError('`value_type` is not compatible with the expected input '
'of the `encoders`.')
value_tensorspecs = type_conversions.type_to_tf_tensor_specs(value_type)
tf.nest.map_structure(lambda e, v: _validate_encoder(e, v, encoder_type),
encoders, value_tensorspecs)
def _accmulate_state_update_tensor(a, b, mode):
"""Accumulates state_update_tensors according to aggregation mode."""
if mode == tensor_encoding.core.StateAggregationMode.SUM:
return a + b
elif mode == tensor_encoding.core.StateAggregationMode.MIN:
return tf.minimum(a, b)
elif mode == tensor_encoding.core.StateAggregationMode.MAX:
return tf.maximum(a, b)
elif mode == tensor_encoding.core.StateAggregationMode.STACK:
raise NotImplementedError(
'StateAggregationMode.STACK is not supported yet.')
else:
raise ValueError('Not supported state aggregation mode: %s' % mode)
def _accumulator_value(values, state_update_tensors):
return collections.OrderedDict(
values=values, state_update_tensors=state_update_tensors)
# TODO(b/138081552): Move to tff.learning when ready.
def build_encoded_broadcast_process_from_model(
model_fn: _ModelConstructor,
encoder_fn: _EncoderConstructor) -> measured_process.MeasuredProcess:
"""Builds `MeasuredProcess` for weights of model returned by `model_fn`.
This method creates a `SimpleEncoder` for every weight of model created by
`model_fn`, as returned by `encoder_fn`.
Args:
model_fn: A Python callable with no arguments function that returns a
`tff.learning.Model`.
encoder_fn: A Python callable with a single argument, which is expected to
be a `tf.Tensor` of shape and dtype to be encoded. The function must
return a `tensor_encoding.core.SimpleEncoder`, which expects a `tf.Tensor`
with compatible type as the input to its `encode` method.
Returns:
A `MeasuredProcess` for encoding and broadcasting the weights of model
created by `model_fn`.
Raises:
TypeError: If `model_fn` or `encoder_fn` are not callable objects.
"""
py_typecheck.check_callable(model_fn)
py_typecheck.check_callable(encoder_fn)
weight_type = model_utils.weights_type_from_model(model_fn)
weight_tensor_specs = type_conversions.type_to_tf_tensor_specs(weight_type)
encoders = tf.nest.map_structure(encoder_fn, weight_tensor_specs)
return build_encoded_broadcast_process(weight_type, encoders)
| 43.105263 | 80 | 0.748125 |
9614375892ed30c779d95a3e4a36c6b324e68707 | 1,563 | py | Python | nvflare/app_common/abstract/shareable_generator.py | Can-Zhao/NVFlare | 52d9dee9972b32d49d9a8add52a652ce3bb878b5 | [
"Apache-2.0"
] | 155 | 2021-08-05T18:05:09.000Z | 2022-03-27T15:32:56.000Z | nvflare/app_common/abstract/shareable_generator.py | Can-Zhao/NVFlare | 52d9dee9972b32d49d9a8add52a652ce3bb878b5 | [
"Apache-2.0"
] | 216 | 2021-12-01T06:07:12.000Z | 2022-03-30T23:34:02.000Z | nvflare/app_common/abstract/shareable_generator.py | Can-Zhao/NVFlare | 52d9dee9972b32d49d9a8add52a652ce3bb878b5 | [
"Apache-2.0"
] | 44 | 2021-11-24T16:03:29.000Z | 2022-03-24T23:28:39.000Z | # Copyright (c) 2021-2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from abc import ABC, abstractmethod
from nvflare.apis.fl_component import FLComponent
from nvflare.apis.fl_context import FLContext
from nvflare.apis.shareable import Shareable
from nvflare.app_common.abstract.learnable import Learnable
class ShareableGenerator(FLComponent, ABC):
@abstractmethod
def learnable_to_shareable(self, model: Learnable, fl_ctx: FLContext) -> Shareable:
"""
generate the initial Shareable from the Learnable object.
Args:
model: model object
fl_ctx: FLContext
Returns:
shareable
"""
pass
@abstractmethod
def shareable_to_learnable(self, shareable: Shareable, fl_ctx: FLContext) -> Learnable:
"""
construct the Learnable object from Shareable
Args:
shareable: shareable
fl_ctx: FLContext
Returns:
model object
"""
pass
| 29.490566 | 91 | 0.690979 |
6f70d1165dbd2f9680ce3a21b212bf433b63d0db | 2,370 | py | Python | rfmizer/action.py | JunglistMNSQ/rfm | f42fa1424edbc9c57c9cd27d8183574f72acbf5c | [
"BSD-3-Clause"
] | null | null | null | rfmizer/action.py | JunglistMNSQ/rfm | f42fa1424edbc9c57c9cd27d8183574f72acbf5c | [
"BSD-3-Clause"
] | 4 | 2021-06-04T23:58:19.000Z | 2021-09-22T19:38:00.000Z | rfmizer/action.py | JunglistMNSQ/rfm | f42fa1424edbc9c57c9cd27d8183574f72acbf5c | [
"BSD-3-Clause"
] | null | null | null | from django.db import connection
from re import sub
from .models import ActionLog, Rules, Tab, User
from .sms import RocketSMS
class BalanceExeption(Exception):
pass
class GetItems:
sender = None
@classmethod
def select_from_db(cls, _id):
with connection.cursor() as c:
c.execute(f'SELECT from_to FROM rfmizer_rules '
f'WHERE id = {_id}')
return c.fetchone()[0].split(',')
@classmethod
def get_active_users(cls):
return User.objects.filter(is_active=True)
@classmethod
def get_active_tabs(cls):
users = cls.get_active_users()
return Tab.objects.filter(owner__in=users, on_off=True)
@classmethod
def get_rules(cls):
tabs = cls.get_active_tabs()
return Rules.objects.filter(tab__in=tabs, on_off_rule=True)
class ActionRFMizer(GetItems):
@classmethod
def run_rfmizer(cls):
tabs = cls.get_active_tabs()
for tab in tabs:
tab.rfmizer()
return True
class ActionRocketSMS(ActionRFMizer):
sender = RocketSMS
@classmethod
def run_rules(cls):
rules_list = cls.get_rules()
for rule in rules_list:
owner, message = rule.owner, rule.message
login, pass_hash = (
owner.profile.sms_login, owner.profile.sms_pass
)
moves = cls.select_from_db(rule.id)
clients = rule.tab.clients.filter(
rfm_move__in=moves, rfm_flag=True
)
try:
for client in clients:
msg = sub(r'{name}', client.name, message)
phone = client.phone.as_e164
balance = cls.sender.check_balance(
login, pass_hash, msg
)
if balance[0]:
res = cls.sender.send_sms(
login, pass_hash, phone, msg
)
event = f'{res}. Баланс {balance[1]}'
ActionLog.get_event(event, owner)
client.set_last_sent()
else:
ActionLog.get_event(balance[2], owner)
raise BalanceExeption
except BalanceExeption:
break
return True
| 29.625 | 67 | 0.540084 |
1a759f24fbacbb23e6e3b76b5fca7d2c58b44904 | 1,761 | py | Python | initialarticle/forms.py | haykhman/online-newspaper | ebe38be63f3f037160532b33eb76bf9a22dbf171 | [
"MIT"
] | 1 | 2018-12-10T15:50:21.000Z | 2018-12-10T15:50:21.000Z | initialarticle/forms.py | haykhman/online-newspaper | ebe38be63f3f037160532b33eb76bf9a22dbf171 | [
"MIT"
] | null | null | null | initialarticle/forms.py | haykhman/online-newspaper | ebe38be63f3f037160532b33eb76bf9a22dbf171 | [
"MIT"
] | null | null | null | from django import forms
from django.utils.translation import gettext as _
from initialarticle.models import InitialArticle
from articlecategory.models import ArticleCategoryManager
class InitialArticleCreateForm(forms.Form):
def __init__(self, *args, **kwargs):
request = kwargs.pop('request')
super(InitialArticleCreateForm, self).__init__(*args, **kwargs)
self.fields['category'] = forms.ModelChoiceField(ArticleCategoryManager().get_article_categories(request), widget =forms.Select(attrs = {'class' : 'form-control'}))
keywords = forms.CharField(label= _('Keywords'), max_length = 105, widget = forms.TextInput(attrs = {'class' : 'form-control'}))
def save(self, user):
return InitialArticle.objects.create_initial_article(by = user, category = self.cleaned_data['category'], keywords = self.cleaned_data['keywords'])
class InitialArticleEditForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
request = kwargs.pop('request')
super(InitialArticleEditForm, self).__init__(*args, **kwargs)
self.fields['category'] = forms.ModelChoiceField(ArticleCategoryManager().get_article_categories(request), widget=forms.Select(attrs = {'class' : 'form-control'}))
self.fields['keywords'] = forms.CharField(label= _('Keywords'), max_length = 105, widget = forms.TextInput(attrs = {'class' : 'form-control'}))
class Meta:
model = InitialArticle
fields = ['category', 'keywords']
def save_(self, user, article_keywords_object, commit = True):
return InitialArticle.objects.edit_initial_article(article_keywords_object = article_keywords_object, by = user, category = self.cleaned_data['category'], keywords = self.cleaned_data['keywords'])
| 50.314286 | 204 | 0.725156 |
87630da2e905446f96d47504ffd9e382e33b45e4 | 3,263 | py | Python | sdk/cognitiveservices/azure-cognitiveservices-search-autosuggest/setup.py | dubiety/azure-sdk-for-python | 62ffa839f5d753594cf0fe63668f454a9d87a346 | [
"MIT"
] | 1 | 2022-02-01T18:50:12.000Z | 2022-02-01T18:50:12.000Z | sdk/cognitiveservices/azure-cognitiveservices-search-autosuggest/setup.py | ellhe-blaster/azure-sdk-for-python | 82193ba5e81cc5e5e5a5239bba58abe62e86f469 | [
"MIT"
] | null | null | null | sdk/cognitiveservices/azure-cognitiveservices-search-autosuggest/setup.py | ellhe-blaster/azure-sdk-for-python | 82193ba5e81cc5e5e5a5239bba58abe62e86f469 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
#-------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#--------------------------------------------------------------------------
import re
import os.path
from io import open
from setuptools import find_packages, setup
# Change the PACKAGE_NAME only to change folder and different name
PACKAGE_NAME = "azure-cognitiveservices-search-autosuggest"
PACKAGE_PPRINT_NAME = "Auto Suggest"
# a-b-c => a/b/c
package_folder_path = PACKAGE_NAME.replace('-', '/')
# a-b-c => a.b.c
namespace_name = PACKAGE_NAME.replace('-', '.')
# azure v0.x is not compatible with this package
# azure v0.x used to have a __version__ attribute (newer versions don't)
try:
import azure
try:
ver = azure.__version__
raise Exception(
'This package is incompatible with azure=={}. '.format(ver) +
'Uninstall it with "pip uninstall azure".'
)
except AttributeError:
pass
except ImportError:
pass
# Version extraction inspired from 'requests'
with open(os.path.join(package_folder_path, 'version.py')
if os.path.exists(os.path.join(package_folder_path, 'version.py'))
else os.path.join(package_folder_path, '_version.py'), 'r') as fd:
version = re.search(r'^VERSION\s*=\s*[\'"]([^\'"]*)[\'"]',
fd.read(), re.MULTILINE).group(1)
if not version:
raise RuntimeError('Cannot find version information')
with open('README.md', encoding='utf-8') as f:
readme = f.read()
with open('CHANGELOG.md', encoding='utf-8') as f:
changelog = f.read()
setup(
name=PACKAGE_NAME,
version=version,
description='Microsoft Azure {} Client Library for Python'.format(PACKAGE_PPRINT_NAME),
long_description=readme + '\n\n' + changelog,
long_description_content_type='text/markdown',
license='MIT License',
author='Microsoft Corporation',
author_email='azpysdkhelp@microsoft.com',
url='https://github.com/Azure/azure-sdk-for-python',
classifiers=[
'Development Status :: 4 - Beta',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3.10',
'License :: OSI Approved :: MIT License',
],
zip_safe=False,
packages=find_packages(exclude=[
'tests',
# Exclude packages that will be covered by PEP420 or nspkg
'azure',
'azure.cognitiveservices',
'azure.cognitiveservices.search',
]),
include_package_data=True,
package_data={
'pytyped': ['py.typed'],
},
install_requires=[
'msrest>=0.6.21',
'azure-common~=1.1',
'azure-mgmt-core>=1.2.0,<2.0.0',
],
extras_require={
":python_version<'3.0'": ['azure-cognitiveservices-search-nspkg'],
}
)
| 33.639175 | 91 | 0.610175 |
d747389d125bd8f730a256a8adb1828ce25d87f0 | 8,946 | py | Python | fixture/contacts.py | PaulRumyantsev/python_QA | 3d6809e49c2522da8cd912d2fe6f790df4293f5d | [
"Apache-2.0"
] | null | null | null | fixture/contacts.py | PaulRumyantsev/python_QA | 3d6809e49c2522da8cd912d2fe6f790df4293f5d | [
"Apache-2.0"
] | null | null | null | fixture/contacts.py | PaulRumyantsev/python_QA | 3d6809e49c2522da8cd912d2fe6f790df4293f5d | [
"Apache-2.0"
] | null | null | null | from model.contacts import Contacts
import re
from selenium.webdriver.support.ui import Select
class ContactHelper:
def __init__(self, app):
self.app = app
def open_home_page(self):
wd = self.app.wd
if not (wd.current_url.endswith("/addressbook/") and len(wd.find_elements_by_name("add")) > 0):
wd.find_element_by_link_text("home").click()
def return_to_home_page(self):
wd = self.app.wd
wd.find_element_by_link_text("home page").click()
def create(self, address):
wd = self.app.wd
# self.app.open_home_page()
self.open_contact_creation_page()
self.fill_contact_form(address)
# submit address creation
wd.find_element_by_xpath("//div[@id='content']/form/input[21]").click()
self.return_to_home_page()
self.contacts_cache = None
def fill_contact_form(self, address):
wd = self.app.wd
self.change_field_value("firstname", address.firstname)
self.change_field_value("middlename", address.middlename)
self.change_field_value("lastname", address.lastname)
self.change_field_value("title", address.title)
self.change_field_value("nickname", address.nickname)
self.change_field_value("company", address.company)
self.change_field_value("address", address.address)
self.change_field_value("home", address.homephone)
self.change_field_value("mobile", address.mobilephone)
self.change_field_value("work", address.workphone)
self.change_field_value("fax", address.fax)
self.change_field_value("email", address.email)
self.change_field_value("phone2", address.secondaryphone)
def change_field_value(self, field_firstname, text):
wd = self.app.wd
if text is not None:
wd.find_element_by_name(field_firstname).click()
wd.find_element_by_name(field_firstname).clear()
wd.find_element_by_name(field_firstname).send_keys(text)
def open_contact_creation_page(self):
wd = self.app.wd
wd.find_element_by_link_text("add new").click()
def delete_first_contact(self):
self.delete_contact_by_index(0)
def delete_contact_by_index(self, index):
wd = self.app.wd
self.app.open_home_page()
self.select_contact_by_index(index)
# delete contact
wd.find_element_by_xpath("//div[@id='content']/form[2]/div[2]/input").click()
wd.switch_to_alert().accept()
self.contacts_cache = None
def delete_contacts_by_id(self, id):
wd = self.app.wd
self.app.open_home_page()
self.select_contact_by_id(id)
# delete contact
wd.find_element_by_css_selector("input[value='Delete']").click()
wd.switch_to_alert().accept()
self.contacts_cache = None
def select_first_contact(self):
wd = self.app.wd
wd.find_element_by_name("selected[]").click()
def select_contact_by_index(self, index):
wd = self.app.wd
wd.find_elements_by_name("selected[]")[index].click()
def select_contact_by_id(self, id):
wd = self.app.wd
wd.find_element_by_css_selector("input[value='%s']" % id).click()
def modif_contact_by_index(self, index, new_contact_data):
wd = self.app.wd
self.app.open_home_page()
self.select_contact_by_index(index)
self.select_modif_contact_by_index(index)
# fill contact form
self.fill_contact_form(new_contact_data)
# update contact
wd.find_element_by_name("update").click()
self.return_to_home_page()
self.contacts_cache = None
def modif_contact_by_id(self, id, new_contact_data):
wd = self.app.wd
self.app.open_home_page()
self.select_contact_by_id(id)
self.select_modif_contact_by_id(id)
# fill contact form
self.fill_contact_form(new_contact_data)
# update contact
wd.find_element_by_name("update").click()
self.return_to_home_page()
self.contacts_cache = None
def select_modif_contact_by_index(self, index):
wd = self.app.wd
wd.find_elements_by_xpath("//img[@title='Edit']")[index].click()
def select_modif_contact_by_id(self, id):
wd = self.app.wd
wd.find_element_by_css_selector("a[href='edit.php?id=%s']" % id).click()
def modif_first_contact(self):
self.modif_contact_by_index(0)
def count(self):
wd = self.app.wd
self.app.open_home_page()
return len(wd.find_elements_by_name("selected[]"))
contacts_cache = None
def get_contacts_list(self):
if self.contacts_cache is None:
wd = self.app.wd
self.app.open_home_page()
self.contacts_cache = []
for row in wd.find_elements_by_name("entry"):
cells = row.find_elements_by_tag_name("td")
id = cells[0].find_element_by_tag_name("input").get_attribute("value")
lastname = cells[1].text
firstname = cells[2].text
address = cells[3].text
all_emails = cells[4].text
all_phones = cells[5].text
self.contacts_cache.append(Contacts(firstname=firstname, lastname=lastname, id=id, address=address,
all_phones_from_home_page=all_phones,
all_emails_from_home_page=all_emails))
return list(self.contacts_cache)
def open_contact_to_edit_by_index(self, index):
wd = self.app.wd
self.app.open_home_page()
row = wd.find_elements_by_name("entry")[index]
cell = row.find_elements_by_tag_name("td")[7]
cell.find_element_by_tag_name("a").click()
def open_contact_view_by_index(self, index):
wd = self.app.wd
self.app.open_home_page()
row = wd.find_elements_by_name("entry")[index]
cell = row.find_elements_by_tag_name("td")[6]
cell.find_element_by_tag_name("a").click()
def get_contact_info_from_edit_page(self, index):
wd = self.app.wd
self.open_contact_to_edit_by_index(index)
firstname = wd.find_element_by_name("firstname").get_attribute("value")
lastname = wd.find_element_by_name("lastname").get_attribute("value")
id = wd.find_element_by_name("id").get_attribute("value")
homephone = wd.find_element_by_name("home").get_attribute("value")
workphone = wd.find_element_by_name("work").get_attribute("value")
mobilephone = wd.find_element_by_name("mobile").get_attribute("value")
secondaryphone = wd.find_element_by_name("phone2").get_attribute("value")
address = wd.find_element_by_name("address").get_attribute("value")
email = wd.find_element_by_name("email").get_attribute("value")
email2 = wd.find_element_by_name("email2").get_attribute("value")
email3 = wd.find_element_by_name("email3").get_attribute("value")
return Contacts(firstname=firstname, lastname=lastname, id=id,
homephone=homephone, workphone=workphone, mobilephone=mobilephone,
secondaryphone=secondaryphone, address=address, email=email, email2=email2, email3=email3)
def get_contacts_from_view_page(self, index):
wd = self.app.wd
self.open_contact_view_by_index(index)
text = wd.find_element_by_id("content").text
homephone = re.search("H: (.*)", text).group(1)
workphone = re.search("W: (.*)", text).group(1)
mobilephone = re.search("M: (.*)", text).group(1)
fax = re.search("F: (.*)", text).group(1)
secondaryphone = re.search("P: (.*)", text).group(1)
return Contacts(homephone=homephone, workphone=workphone, mobilephone=mobilephone, fax=fax,
secondaryphone=secondaryphone)
def add_contacts_to_group(self, contact, group):
wd = self.app.wd
self.app.open_home_page()
wd.find_element_by_css_selector("input[value='%s']" % contact.id).click()
select = Select(wd.find_element_by_css_selector("select[name='to_group']"))
select.select_by_value('%s' % group.id)
wd.find_element_by_css_selector("input[value='Add to']").click()
wd.find_element_by_css_selector("a[href='./?group=%s']" % group.id).click()
def delete_contacts_from_group(self, contact, group):
wd = self.app.wd
self.open_home_page()
select = Select(wd.find_element_by_css_selector("select[name='group']"))
select.select_by_value('%s' % group.id)
wd.find_element_by_css_selector("input[id='%s']" % contact.id).click()
wd.find_element_by_css_selector("input[name='remove']").click()
wd.find_element_by_css_selector("a[href='./?group=%s']" % group.id).click()
| 42.398104 | 115 | 0.650347 |
bbbf032282bcd1ae49ca98936e58eeaf6c99e29c | 1,196 | py | Python | documentedlist/setup.py | djoshea/sphinxcontrib-matlabdomain | f1ae1e6defc206676d4490665c1cfcc4796b9df0 | [
"BSD-2-Clause"
] | 2 | 2017-04-25T17:00:35.000Z | 2021-02-14T03:57:16.000Z | documentedlist/setup.py | djoshea/sphinxcontrib-matlabdomain | f1ae1e6defc206676d4490665c1cfcc4796b9df0 | [
"BSD-2-Clause"
] | null | null | null | documentedlist/setup.py | djoshea/sphinxcontrib-matlabdomain | f1ae1e6defc206676d4490665c1cfcc4796b9df0 | [
"BSD-2-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
from setuptools import setup, find_packages
import os
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
requires = ['Sphinx>=0.6']
setup(
name='sphinxcontrib-documentedlist',
version='0.3',
url='http://bitbucket.org/birkenfeld/sphinx-contrib',
download_url='http://pypi.python.org/pypi/sphinxcontrib-documentedlist',
license='BSD',
author='Chintalagiri Shashank',
author_email='shashank@chintal.in',
description='Sphinx DocumentedList extension',
long_description=read('README.rst'),
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Sphinx :: Extension',
'Topic :: Documentation',
'Topic :: Utilities',
],
platforms='any',
packages=find_packages(),
include_package_data=True,
install_requires=requires,
namespace_packages=['sphinxcontrib'],
)
| 29.170732 | 76 | 0.651338 |
d48a244ea7d1d20767135e526fc8bf142e091cfb | 2,871 | py | Python | src/ewaluacja2021/migrations/0001_initial.py | iplweb/django-bpp | 85f183a99d8d5027ae4772efac1e4a9f21675849 | [
"BSD-3-Clause"
] | 1 | 2017-04-27T19:50:02.000Z | 2017-04-27T19:50:02.000Z | src/ewaluacja2021/migrations/0001_initial.py | mpasternak/django-bpp | 434338821d5ad1aaee598f6327151aba0af66f5e | [
"BSD-3-Clause"
] | 41 | 2019-11-07T00:07:02.000Z | 2022-02-27T22:09:39.000Z | src/ewaluacja2021/migrations/0001_initial.py | iplweb/bpp | f027415cc3faf1ca79082bf7bacd4be35b1a6fdf | [
"BSD-3-Clause"
] | null | null | null | # Generated by Django 3.0.14 on 2021-10-11 08:26
import django.db.models.deletion
from django.db import migrations, models
import ewaluacja2021.fields
class Migration(migrations.Migration):
initial = True
dependencies = [
("bpp", "0306_delete_ewaluacja2021liczbandlauczelni"),
]
operations = [
migrations.CreateModel(
name="LiczbaNDlaUczelni",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
(
"liczba_n",
ewaluacja2021.fields.LiczbaNField(decimal_places=4, max_digits=9),
),
(
"dyscyplina_naukowa",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to="bpp.Dyscyplina_Naukowa",
),
),
(
"uczelnia",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="bpp.Uczelnia"
),
),
],
options={
"verbose_name": "Liczba N dla uczelni",
"verbose_name_plural": "Liczby N dla uczelni",
"unique_together": {("uczelnia", "dyscyplina_naukowa")},
},
),
migrations.CreateModel(
name="LiczbaNDlaAutora",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
(
"liczba_n",
ewaluacja2021.fields.LiczbaNField(decimal_places=4, max_digits=9),
),
(
"autor",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="bpp.Autor"
),
),
(
"dyscyplina_naukowa",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to="bpp.Dyscyplina_Naukowa",
),
),
],
options={
"verbose_name": "liczba N dla autora",
"verbose_name_plural": "liczby N dla autora",
"unique_together": {("autor", "dyscyplina_naukowa")},
},
),
]
| 31.549451 | 86 | 0.405782 |
274bcff82164d9e3774bf1f0a6110319d1ec657f | 493 | py | Python | util/unormalize.py | pa0l09405/Master-Degree-Thesis | 2ee34da1e34d2ec6015456c691ef6624f3ebc622 | [
"BSD-3-Clause"
] | null | null | null | util/unormalize.py | pa0l09405/Master-Degree-Thesis | 2ee34da1e34d2ec6015456c691ef6624f3ebc622 | [
"BSD-3-Clause"
] | null | null | null | util/unormalize.py | pa0l09405/Master-Degree-Thesis | 2ee34da1e34d2ec6015456c691ef6624f3ebc622 | [
"BSD-3-Clause"
] | null | null | null | class UnNormalize(object):
def __init__(self, mean, std):
self.mean = mean
self.std = std
def __call__(self, tensor):
"""
Args:
tensor (Tensor): Tensor image of size (C, H, W) to be normalized.
Returns:
Tensor: Normalized image.
"""
#for t, m, s in zip(tensor, self.mean, self.std):
tensor.mul_(self.std).add_(self.mean)
# The normalize code -> t.sub_(m).div_(s)
return tensor | 30.8125 | 77 | 0.537525 |
579bb6760eac6b22bae98934a77cda95b7152df3 | 863 | py | Python | python/slideshot0.py | jitterbugs/hashcode2019 | 1c82fb278a063aea4146f479218941f94fc42a38 | [
"Apache-2.0"
] | null | null | null | python/slideshot0.py | jitterbugs/hashcode2019 | 1c82fb278a063aea4146f479218941f94fc42a38 | [
"Apache-2.0"
] | null | null | null | python/slideshot0.py | jitterbugs/hashcode2019 | 1c82fb278a063aea4146f479218941f94fc42a38 | [
"Apache-2.0"
] | null | null | null | #!/bin/python3
def get_ordered_slides(photosH, photosV):
print(len(photosH) + len(photosV)//2)
for p in photosH:
print(p.id)
for i in range(0, len(photosV), 2):
print(photosV[i].id, photosV[i+1].id)
class Photo:
def __init__(self, _id, orientation, tags):
self.id = _id
self.orientation = orientation
self.tags = tags
if __name__ == '__main__':
num_photos = int(input())
photosH = []
photosV = []
for i in range(num_photos):
inp = input().split()
if inp[0] == 'H':
photosH.append(Photo(i, inp[0], set(inp[2:])))
else:
photosV.append(Photo(i, inp[0], set(inp[2:])))
"""
for p in photosH:
print(p.orientation, p.tags)
for p in photosV:
print(p.orientation, p.tags)
"""
get_ordered_slides(photosH, photosV)
| 26.151515 | 58 | 0.565469 |
915603552aa694dbf46fb48f5724d9458c4863cb | 35,030 | py | Python | .history/neuroformer/model_perceiver_20220121144523.py | woanderer/neuroformer | df3462d55977b6c9adcb6753e7c474b8b76e8021 | [
"MIT"
] | null | null | null | .history/neuroformer/model_perceiver_20220121144523.py | woanderer/neuroformer | df3462d55977b6c9adcb6753e7c474b8b76e8021 | [
"MIT"
] | null | null | null | .history/neuroformer/model_perceiver_20220121144523.py | woanderer/neuroformer | df3462d55977b6c9adcb6753e7c474b8b76e8021 | [
"MIT"
] | null | null | null | # from code.transformer_vid.utils import convert_weights
# import rotary_embedding_torch
from torch.nn.modules.activation import GELU, ReLU
# from data.OneCombo3.trainer import TrainerConfig
import math
import numpy as np
import itertools
import logging
import torch
import torch.nn as nn
from torch.nn import functional as F
from torch.autograd import Variable
from torchvision.models.video import r3d_18
# from ResNet3D import r3d_18
from scipy.optimize import linear_sum_assignment
# from rotary_embedding_torch import apply_rotary_emb, RotaryEmbedding
from einops.layers.torch import Rearrange
logger = logging.getLogger(__name__)
def convert_weights(model: nn.Module):
"""Convert applicable model parameters to fp16"""
def _convert_weights_to_fp16(l):
if isinstance(l, (nn.Conv1d, nn.Conv2d, nn.Linear)): # nn.Conv3d,
l.weight.data = l.weight.data.half()
if l.bias is not None:
l.bias.data = l.bias.data.half()
if isinstance(l, nn.MultiheadAttention):
for attr in [*[f"{s}_proj_weight" for s in ["in", "q", "k", "v"]], "in_proj_bias", "bias_k", "bias_v"]:
tensor = getattr(l, attr)
if tensor is not None:
tensor.data = tensor.data.half()
for name in ["text_projection", "proj"]:
if hasattr(l, name):
attr = getattr(l, name)
if attr is not None:
attr.data = attr.data.half()
model.apply(_convert_weights_to_fp16)
class GPTConfig:
""" base GPT config, params common to all GPT versions """
embd_pdrop = 0.2
resid_pdrop = 0.2
attn_pdrop = 0.2
pos_pdrop = 0.2
temp_pdrop = 0.2
pos_emb = True
temp_emb = True
start_prune = 30
epoch = 0
def __init__(self, vocab_size, block_size, **kwargs):
self.vocab_size = vocab_size
self.block_size = block_size
for k, v in kwargs.items():
setattr(self, k, v)
class neuralGPTConfig:
""" base GPT config, params common to all GPT versions """
n = 0.4
im_drop = 0.2
id_drop = n
embd_pdrop = n
resid_pdrop = n
attn_pdrop = n
pos_pdrop = n
temp_pdrop = n
pos_emb = True
temp_emb = True
def __init__(self, vocab_size, block_size, **kwargs):
self.vocab_size = vocab_size
self.block_size = block_size
for k, v in kwargs.items():
setattr(self, k, v)
class GPT1Config(GPTConfig):
""" GPT-1 like network roughly 125M params """
n_layer = 12
n_head = 12
n_embd = 768
class VideoFeaturesExtractor(nn.Module):
"""
R3D: (3 x T x H x W)
H, W = 112
"""
def __init__(self):
super().__init__()
self.backbone = torch.nn.Sequential(*(list(r3d_18(pretrained=True).children())[:-2]))
convert_weights(self.backbone)
# # freeze backbone
# for k, v in self.backbone.named_parameters():
# v.requires_grad = False
def forward(self, x):
# B = Batch, T, C, Fm, H, W
features = self.backbone(x) # (B, C, T, H, W)
B, C, T, H, W = features.shape
features = features.permute(0, 2, 3, 4, 1)
features = features.view(B, -1, C)
return features
class VideoEncoder(nn.Module):
def __init__(self, n_embd):
super().__init__()
p1, p2 = 16
assert n_embd % (p1 * p2) == 0, "n_embd must be divisible by p1 * p2"
c = n_embd // (p1 * p2)
self.to_patch_embedding = nn.Sequential(
Rearrange(f'b {c} t (h {p1}) (w {p2}) -> b (t h w) ({p1} {p2} {c})', p1=p1, p2=p2)
)
def forward(self, x):
return self.to_patch_embedding(x)
class CausalSelfAttention(nn.Module):
"""
A vanilla multi-head masked self-attention layer with a projection at the end.
"""
def __init__(self, config):
super().__init__()
assert config.n_embd % config.n_head == 0
self.config = config
# key, query, value projections for all heads
self.key = nn.Linear(config.n_embd, config.n_embd)
self.query = nn.Linear(config.n_embd, config.n_embd)
self.value = nn.Linear(config.n_embd, config.n_embd)
# regularization
self.attn_drop = nn.Dropout(config.attn_pdrop)
self.resid_drop = nn.Dropout(config.resid_pdrop)
# output projection
self.proj = nn.Linear(config.n_embd, config.n_embd)
self.register_buffer("mask", self.build_mask(config.block_size))
self.n_head = config.n_head
self.att = None
self.T = config.block_size
# self.rotary_embedding = RotarySpatioTemporalEmbedding(config)
def build_mask(self, block_size):
mask = torch.tril(torch.ones((block_size, block_size)),
).view(1, 1, block_size, block_size)
return mask
def generate_sparse_mask(self, att, p, config):
"""
Generate a sparse mask according to p.
"""
assert p >= 0 and p <= 1, "p should be in [0, 1]"
T = config.block_size
mask = torch.rand((1, T)) < p
mask = mask.repeat(T, 1)
mask[0, 0] = False # don't mask 1st step
# check if any step is fully masked and umask it
idx_all_true = (True == torch.all(mask, dim=0)).nonzero()
for step in idx_all_true:
sampler = torch.distributions.Uniform(low=0, high=step.item()+1)
idx_false = sampler.sample((1,1)).long()
mask[step, idx_false] = False
# mask = mask.repeat(T, 1)
mask = mask.view(1, 1, T, T).cuda() if att.is_cuda else mask.view(1, 1, T, T)
att = att.masked_fill(mask, float('-inf'))
return att
def forward(self, x, pad=None, dtx=None):
# B = Batch, T = Sequence, C = n_embed
B, T, C = x.size()
# calculate query, key, values for all head in batch and move head forward to the batch dim
k = self.key(x).view(B, T, self.n_head, C // self.n_head).transpose(1, 2) # (B, nh, T, hs)
q = self.query(x).view(B, T, self.n_head, C // self.n_head).transpose(1, 2) # (B, nh, T, hs)
v = self.value(x).view(B, T, self.n_head, C // self.n_head).transpose(1, 2) # (B, nh, T, hs)
# # apply rotary embeddings
# if dtx is not None:
# q, k = self.rotary_embedding(q, k, dtx)
# causal self-attention; Self-attend: (B, nh, T, hs) x (B, nh, hs, T) -> (B, nh, T, T)
att = (q @ k.transpose(-2, -1)) * (1.0 / math.sqrt(k.size(-1)))
att = att.masked_fill(self.mask[:,:,:T,:T] == 0, float('-inf'))
if self.training:
att = self.generate_sparse_mask(att, 0.25, self.config)
if pad is not None:
for idx, i in enumerate(pad):
att[idx, :, :, self.T - i:] = float('-inf') # only able to see first padding token
att = F.softmax(att, dim=-1)
att = self.attn_drop(att)
self.att = att
y = att @ v # (B, nh, T, T) x (B, nh, T, hs) -> (B, nh, T, hs)
y = y.transpose(1, 2).contiguous().view(B, T, C) # re-assemble all head outputs side by side
# output projection
y = self.resid_drop(self.proj(y))
return y
class PositionalEmbedding(nn.Module):
""" Implement the PE function. """
def __init__(self, n_embd, p_drop, max_len=1500):
super().__init__()
self.dropout = nn.Dropout(p=p_drop)
# Compute the positional encodings once in log space.
pe = torch.zeros(max_len, n_embd)
position = torch.arange(0, max_len).unsqueeze(1)
div_term = torch.exp(torch.arange(0, n_embd, 2) *
-(math.log(10000.0) / n_embd))
pe[:, 0::2] = torch.sin(position * div_term)
pe[:, 1::2] = torch.cos(position * div_term)
pe = pe.unsqueeze(0)
self.register_buffer('pe', pe)
def forward(self, x):
x = Variable(self.pe[:, :x.size(1)],
requires_grad=False)
return self.dropout(x)
# class RotarySpatioTemporalEmbedding(nn.Module):
# """ Rotary temporal embeddings - block_size = id_blk_sz """
# def __init__(self, config):
# super().__init__()
# self.frame_block_size = config.frame_block_size
# self.id_block_size = config.id_block_size
# self.emb = RotaryEmbedding(dim=32)
# def forward(self, q, k, t):
# b = t.shape[0]
# tf = self.frame_block_size
# queries = []
# keys = []
# for B in range(b):
# im_temp_emb = torch.tensor([-0.5] * (tf//2) + [0.5] * (tf//2))
# im_pos_emb = torch.arange(self.frame_block_size)
# im_emb = torch.stack([im_temp_emb, im_pos_emb], dim=0)
# id_temp_emb = self.temp_emb(t[B], cache_key=self.block_size)
# freqs = self.emb(torch.cat(im_emb, id_temp_emb))
# queries.append(apply_rotary_emb(freqs, q[B][None, ...]))
# keys.append(apply_rotary_emb(freqs, k[B][None, ...]))
# q, k = torch.cat(queries), torch.cat(keys)
# return q, k
class TemporalEmbedding(nn.Module):
""" encoding temporal information using fourrier signals """
def __init__(self, n_embd, p_drop, max_len=1500):
super().__init__()
self.dropout = nn.Dropout(p=p_drop)
# Compute the positional encodings once in log space.
pe = torch.zeros(max_len, n_embd)
position = torch.arange(0, max_len).unsqueeze(1)
div_term = torch.exp(torch.arange(0, n_embd, 2) *
-(math.log(10000.0) / n_embd))
pe[:, 0::2] = torch.sin(position * div_term)
pe[:, 1::2] = torch.cos(position * div_term)
pe = pe.unsqueeze(0)
self.register_buffer('pe', pe)
def forward(self, x):
x = Variable(self.pe[:, :x.size(1)],
requires_grad=False)
return self.dropout(x)
class LearntTemporalEmbedding(nn.Module):
"""
Project B x T x 1 time sequence to
B x T x C
"""
def __init__(self, block_sz, n_embd, p_drop=0.2):
super().__init__()
self.temp_emb = nn.Sequential(
nn.Linear(1, n_embd // 2),
nn.GELU(),
nn.Linear(n_embd // 2, n_embd),
nn.Dropout(p_drop)
)
def forward(self, x):
return self.temp_emb(x.unsqueeze(-1))
class Decoder(nn.Module):
def __init__(self, config):
super().__init__()
# decoder_layer = nn.TransformerDecoderLayer(config.n_embd, config.n_head,
# activation='gelu', dropout=0.2, batch_first=True)
# self.decoder = nn.TransformerDecoder(decoder_layer, config.n_layer)
self.decoder = nn.Transformer(d_model=config.n_embd, nhead=config.n_head,
num_encoder_layers=3, num_decoder_layers=config.n_layer,
activation="gelu", dropout=0.4, batch_first=True)
self.register_buffer("tgt_mask", self.generate_square_subsequent_mask(config.id_block_size))
# self.register_buffer("tgt_pad_mask", self.generate_padding_mask(config.ids_block_size))
self.T = config.id_block_size
def generate_square_subsequent_mask(self, sz: int, pad=None):
r"""Generate a square mask for the sequence. The masked positions are filled with float('-inf').
Unmasked positions are filled with float(0.0).
"""
mask = (torch.triu(torch.ones(sz, sz), diagonal=0) == 1).transpose(0, 1)
mask = mask.float().masked_fill(mask == 0, float('-inf')).masked_fill(mask == 1, float(0.0))
return mask
def generate_padding_mask(self, sz: int, pad=None):
r"""Build a (B x T) mask that resides on the GPU and can be
manipulated by build_padding_mask according to padded sequence
"""
mask = torch.zeros(1, sz, dtype=torch.bool)
return mask
def generate_sparse_mask(self, sz: int, pad=None):
r""" Build a square mask that employs
teacher forcing according to P
"""
rand_mat = torch.rand(1, sz)
k = round(0.75 * sz)
k_th_quant = torch.topk(rand_mat, k, largest = False)[0][:,-1:]
bool_tensor = rand_mat <= k_th_quant
mask = torch.where(bool_tensor, torch.tensor(1), torch.tensor(0)).repeat(sz, 1)
mask = mask.float().masked_fill(mask == 0, float('-inf')).masked_fill(mask == 1, float(0.0))
return mask.cuda(self.tgt_mask.get_device()) if self.tgt_mask.is_cuda else mask
def build_padding_mask(self, tgt, pad):
# mask = self.tgt_pad_mask.repeat(tgt.shape[0], 1)
mask = torch.zeros(tgt.shape[0], self.T, dtype=torch.bool)
for B, P in enumerate(pad):
mask[B, self.T - P:] = True
return mask # .to(torch.cuda.current_device())
def forward(self, tgt, memory, pad):
# padding_mask = self.build_padding_mask(tgt, pad)
# tgt_mask = self.generate_sparse_mask(self.T) if self.training else self.tgt_mask
return self.decoder(src=memory, tgt=tgt, tgt_mask=self.tgt_mask,
tgt_key_padding_mask=None)
class ProjectNorm(nn.Module):
def __init__(self, feat_size, target_size):
super().__init__()
self.ln = nn.LayerNorm(feat_size)
self.mlp = nn.Sequential(
nn.Linear(feat_size, math.floor(2 * feat_size), bias=False),
nn.GELU(),
nn.Linear(math.floor(2 * feat_size), target_size, bias=False),
)
def forward(self, x):
return self.mlp(self.ln(x))
class TimeProjection(nn.Module):
def __init__(self, seq_size, id_seq_size, feat_size, target_size):
super().__init__()
self.mlp_seq = nn.Sequential(
nn.Linear(seq_size, id_seq_size),
nn.ReLU(),
nn.Dropout(p=0.3),
nn.Linear(id_seq_size, id_seq_size)
)
self.mlp_t = nn.Sequential(
nn.Linear(feat_size, feat_size // 2),
nn.ReLU(),
nn.Dropout(p=0.3),
nn.Linear(feat_size // 2, target_size)
)
def forward(self, x):
x = x.permute(0, 2, 1) # B, T, C -> B, C, T
x = self.mlp_seq(x) # B, C, T / 2
x = x.permute(0, 2, 1) # B, T / 2, C
return self.mlp_t(x) # B, T / 2, 1
class PSTHProjection(nn.Module):
"""Takes Last Output of Block -> (B, C)
Builds PSTH table
"""
def __init__(self, config):
super().__init__()
self.mlp = nn.Sequential(
nn.Linear(config.n_embd, 4 * config.n_embd, bias=False),
nn.Dropout(p=0.2),
nn.GELU(),
nn.Linear(config.n_embd * 4, config.id_vocab_size, bias=False)
)
def forward(self, x):
return self.mlp(x)
# class PSTHProjection(nn.Module):
# def __init__(self, config):
# super().__init__()
# self.mlp_seq = nn.Sequential(
# nn.Linear(config.id_block_size, config.id_block_size // 2, bias=False),
# nn.GELU(),
# nn.Dropout(p=0.2),
# nn.Linear(config.id_block_size // 2, 1, bias=False)
# )
# self.mlp_t = nn.Sequential(
# nn.Linear(config.n_embd, config.n_embd * 4, bias=False),
# nn.GELU(),
# nn.Dropout(p=0.2),
# nn.Linear(config.n_embd * 4, config.id_vocab_size, bias=False)
# )
# def forward(self, x):
# x = x.transpose(-1, -2) # B, T, C -> B, C, T
# x = self.mlp_seq(x) # B, C, 1
# x = x.transpose(-2, -1) # B, 1, Vocab_id
# return self.mlp_t(x)
class TimeRNN(nn.Module):
def __init__(self, feat_size, target_size):
super().__init__()
class Block(nn.Module):
""" an unassuming Transformer block """
def __init__(self, config):
super().__init__()
self.ln1 = nn.LayerNorm(config.n_embd)
self.ln2 = nn.LayerNorm(config.n_embd)
self.attn = CausalSelfAttention(config)
self.mlp = nn.Sequential(
nn.Linear(config.n_embd, 4 * config.n_embd),
nn.GELU(),
nn.Linear(4 * config.n_embd, config.n_embd),
nn.Dropout(config.resid_pdrop),
)
def forward(self, x, pad=None, dtx=None):
x = x + self.attn(self.ln1(x), pad)
x = x + self.mlp(self.ln2(x))
return x
class BlockSequential(nn.Sequential):
def forward(self, x, pad=None, dtx=None):
for module in self._modules.values():
x = module(x, pad, dtx)
return x
class DiceLossPSTH(nn.Module):
def __init__(self, size_average=True, smooth=1):
super().__init__()
def cross_entropy(self, input, target):
return torch.mean(-torch.sum(target * torch.log(input), 1))
def forward(self, logits, targets, smooth=1, class_weights=None):
total_logits = F.layer_norm(torch.sum(logits, dim=-2), [logits.size()[-1]])
# probs = F.log_softmax(logits, dim=-1)
probs = F.softmax(total_logits, dim=-1)
# logits = F.gelu(logits)
# probs = logits / (logits.max(dim=-1).values.unsqueeze(-1))
# flatten label and prediction tensors
outputs = probs.contiguous().view(-1)
targets = targets.contiguous().view(-1)
labels = torch.zeros_like(outputs)
labels[targets] = 1 / len(targets)
# intersection = (outputs * labels).sum()
# dice = (2. * intersection + smooth) / (outputs.sum() + labels.sum() + smooth)
return self.cross_entropy(outputs[None, ...], labels[None, ...])
class SetLoss(nn.Module):
def __init__(self):
super().__init__()
def cross_entropy(self, input, target):
return torch.mean(-torch.sum(target * torch.log(input), 1))
def forward(self, logits, targets):
targets = targets.contiguous().view(-1)
loss = 0
for n_step, n_logits in enumerate(logits):
n_logits = F.softmax(n_logits, dim=-1)
n_target = targets[n_step:]
n_target_dist = torch.zeros_like(n_logits)
if len(n_target) != 0:
n_target_dist[n_target] = 1 / len(n_target)
loss += self.cross_entropy(n_logits[None,...], n_target_dist[None, ...])
return loss / len(logits)
class TruncatedLoss(nn.Module):
def __init__(self, q=0.8, k=0.2, trainset_size=50000):
super(TruncatedLoss, self).__init__()
self.q = q
self.k = k
self.weight = torch.nn.Parameter(data=torch.ones(trainset_size, 1), requires_grad=False)
def forward(self, logits, targets, indexes):
p = F.softmax(logits, dim=-1)
Yg = torch.gather(p, 2, targets.unsqueeze(2))
loss = ((1-(Yg**self.q))/self.q)*self.weight[indexes] - ((1-(self.k**self.q))/self.q)*self.weight[indexes]
loss = torch.mean(loss)
return loss
def update_weight(self, logits, targets, indexes):
p = F.softmax(logits, dim=-1)
Yg = torch.gather(p, 2, targets.unsqueeze(2))
Lq = ((1-(Yg**self.q))/self.q)
Lqk = np.repeat(((1-(self.k**self.q))/self.q), targets.size(0))
Lqk = torch.from_numpy(Lqk).type(torch.cuda.FloatTensor)
Lqk = torch.unsqueeze(Lqk, 1)
condition = torch.gt(Lqk, Lq)
self.weight[indexes] = condition.type(torch.cuda.FloatTensor)
# class PSTHLOSS(nn.Module):
# def __init__(self):
# super().__init__()
# def forward(self, logits, targets):
# total_logits = torch.sum(logits, dim=-2) # sum over sequence dimension
# probs = F.softmax(total_logits, dim=-1)
# outptu
class HungarianMatcher(nn.Module):
def __init__(self):
super().__init__()
@torch.no_grad()
def forward(self, logits, targets):
T, C = logits.size()
probs = F.softmax(logits, dim=-1)
cost_id = (1 - probs[:, targets]).cpu().view(T, -1).unsqueeze(0)
indices = [linear_sum_assignment(c[i]) for i, c in enumerate(cost_id.split(len(targets), -1))]
return [(torch.as_tensor(i, dtype=torch.int64), torch.as_tensor(j, dtype=torch.int64)) for i, j in indices]
class KLDivLoss(nn.Module):
def __init__(self):
super().__init__()
self.log_softmax = nn.LogSoftmax(dim=-1)
self.KLdiv = nn.KLDivLoss()
def forward(self, logits, targets):
log_probs = self.log_softmax(logits)
return self.KLdiv(log_probs.long(), targets)
class PoissonCrossEntropyLoss(nn.Module):
def __init__(self):
super().__init__()
self.log_softmax = nn.LogSoftmax(dim=-1)
# self.softmax = nn.Softmax(dim=-1)
self.nll_poisson = nn.PoissonNLLLoss()
# self.nll_poisson = nn.NLLLoss()
def forward(self, logits, targets):
log_probs = self.log_softmax(logits)
return self.nll_poisson(log_probs, targets)
class GPT(nn.Module):
""" the full GPT language model, with a context size of block_size """
def __init__(self, config):
super().__init__()
self.device = 'cpu'
if torch.cuda.is_available():
self.device = torch.cuda.current_device()
self.config = config
# input embedding stem
self.n_embd = config.n_embd
self.tok_emb = nn.Embedding(config.id_vocab_size, config.n_embd)
self.pos_emb = PositionalEmbedding(config.n_embd, p_drop=0.2)
# self.pos_emb_id = nn.Parameter(torch.zeros(1, config.id_block_size, config.n_embd))
self.pos_emb_frames = nn.Parameter(torch.zeros(1, config.frame_block_size, config.n_embd))
# self.temp_emb = TemporalEmbedding(config.n_embd, p_drop=0.2)
# self.temp_emb = RotaryTemporalEmbedding(config.id_block_size)
self.temp_emb = LearntTemporalEmbedding(config.id_block_size, config.n_embd)
self.frame_temp_emb = LearntTemporalEmbedding(config.frame_block_size, config.n_embd)
self.id_drop = nn.Dropout(config.id_drop)
self.im_drop = nn.Dropout(config.im_drop)
self.drop = nn.Dropout(config.embd_pdrop)
# -- Visual Backbone -- #
# self.visual_backbone = VideoFeaturesExtractor()
self.video_encoder = VideoEncoder()
frame_temp_emb = torch.tensor(list(itertools.chain(*[[n * 0.05] * (config.frame_block_size//20) for n in range(20)]))).unsqueeze(0)
self.register_buffer("frame_temp_emb_seq", frame_temp_emb)
# -- Contrastive Loss -- ##
# self.proj_id = ProjectNorm(config.n_embd, config.n_embd)
# self.proj_vid = VidProjectNorm(config.n_embd, config.n_embd) # im_shape
## -- IM_Decoder -- ##
# self.blocks_id = BlockSequential(*[Block(config) for _ in range(2)])
# self.blocks_im = BlockSequential(*[Block(config) for _ in range(2)])
# self.ln_f_id = nn.LayerNorm(config.n_embd)
# self.ln_f_im = nn.LayerNorm(config.n_embd)
## -- Decoder -- ##
# self.ln_f = nn.LayerNorm(config.n_embd)
## GPT
# self.blocks = BlockSequential(*[Block(config) for _ in range(config.n_layer)])
# self.ln_f = nn.LayerNorm(config.n_embd)
## enc_dec
self.state_decoder = Decoder(config)
self.ln_f_state_dec = nn.LayerNorm(config.n_embd)
self.stimulus_decoder = Decoder(config)
self.ln_f_stimulus_dec = nn.LayerNorm(config.n_embd)
self.head = nn.Linear(config.n_embd, config.vocab_size, bias=False)
## -- Time -- ##
# self.proj_time = TimeProjection(config.block_size, config.id_block_size, config.n_embd, config.n_dt)
self.proj_time = ProjectNorm(config.n_embd, config.n_dt)
# self.proj_time = ProjectNorm(config.n_embd, 1)
## -- PSTH -- ##
# self.proj_psth = PSTHProjection(config)
# Loss
# self.dice_loss = DiceLossPSTH()
# self.poisson_loss = PoissonCrossEntropyLoss()
# self.hungarian_matcher = HungarianMatcher()
# self.kldiv_loss = KLDivLoss()
# self.truncated_loss = TruncatedLoss(trainset_size=config.data_size)
# self.set_loss = SetLoss()
# self.a = torch.tensor(0.5, requires_grad=True)
self.block_size = config.block_size
self.apply(self._init_weights)
if config.class_weights is not None:
for key in config.class_weights.keys():
self.register_buffer(f"class_weights_{key}", config.class_weights[key])
logger.info("number of parameters: %e", sum(p.numel() for p in self.parameters()))
def get_block_size(self):
return self.block_size
def _init_weights(self, module):
if isinstance(module, (nn.Linear, nn.Embedding)):
module.weight.data.normal_(mean=0.0, std=0.02)
if isinstance(module, nn.Linear) and module.bias is not None:
module.bias.data.zero_()
elif isinstance(module, nn.LayerNorm):
module.bias.data.zero_()
module.weight.data.fill_(1.0)
def configure_optimizers(self, train_config):
"""
Separates parameters into those who will experience weight decay and those that will not
"""
if train_config.decay_weights:
decay = set()
no_decay = set()
whitelist_weight_modules = (torch.nn.Linear, )
blacklist_weight_modules = (torch.nn.LayerNorm, torch.nn.Embedding)
for mn, m in self.named_modules():
for pn, p in m.named_parameters():
fpn = '%s.%s' % (mn, pn) if mn else pn # full param name
if pn.endswith('bias'):
# all biases will not be decayed
no_decay.add(fpn)
elif pn.endswith('weight') and isinstance(m, whitelist_weight_modules):
# weights of whitelist modules will be weight decayed
decay.add(fpn)
elif pn.endswith('weight') and isinstance(m, blacklist_weight_modules):
# weights of blacklist modules will NOT be weight decayed
no_decay.add(fpn)
else: no_decay.add(fpn)
# special case the position embedding parameter in the root GPT module as not decayed
black_list_mods = ['pos_emb', 'temp_emb']
for mods in black_list_mods:
for name, param in self.named_parameters():
if mods in name:
no_decay.add(name) # also pos_emb
# validate that we considered every parameter
param_dict = {pn: p for pn, p in self.named_parameters()}
no_decay -= decay & no_decay
inter_params = decay & no_decay
union_params = decay | no_decay
assert len(inter_params) == 0, "parameters %s made it into both decay/no_decay sets!" % (str(inter_params), )
assert len(param_dict.keys() - union_params) == 0, "parameters %s were not separated into either decay/no_decay set!" \
% (str(param_dict.keys() - union_params), )
# create the pytorch optimizer object
optim_groups = [
{"params": [param_dict[pn] for pn in sorted(list(decay))], "weight_decay": train_config.weight_decay},
{"params": [param_dict[pn] for pn in sorted(list(no_decay))], "weight_decay": 0.0},
]
optimizer = torch.optim.AdamW(optim_groups, lr=train_config.learning_rate, betas=train_config.betas)
else:
parameters = self.parameters()
optimizer = torch.optim.Adam(parameters, lr=train_config.learning_rate)
return optimizer
def process_features(self, x):
# batch, block_size, feature
p_idx = x['id_prev']
idx = x['id']
dtx = x['dt']
dtx_prev = x['dt_prev']
frames = self.video_encoder(x['frames'])
pad = x['pad']
b, t = idx.size()
# b_p, t_p = p_idx.size()
bf, tf = frames.size()[0:2]
# forward the GPT model
'''
positional and temporal embeddings implemented in multiple ways, learnt,
fourrier decomposition and in the case of time, just passed as is.
'''
# # Embeddings
prev_id_position_embeddings = self.pos_emb(p_idx)
prev_id_temporal_embeddings = self.temp_emb(dtx_prev.float())
id_position_embeddings = self.pos_emb(idx)
im_position_embeddings = self.pos_emb_frames
temporal_embeddings = self.temp_emb(dtx.float())
# Extract ID features
prev_token_embeddings = self.id_drop(self.tok_emb(p_idx) + prev_id_temporal_embeddings + prev_id_position_embeddings)
token_embeddings = self.tok_emb(idx) # each index maps to a (learnable) vector
token_embeddings = token_embeddings + temporal_embeddings + id_position_embeddings
token_embeddings = self.id_drop(token_embeddings)
# Extract image features and add time embeddings
im_temporal_embeddings = self.frame_temp_emb(self.frame_temp_emb_seq)
im_embeddings = frames # self.tok_emb(frames)
im_embeddings = im_embeddings + im_position_embeddings + im_temporal_embeddings
im_embeddings = self.im_drop(im_embeddings) # separate pos emb?
# Tidy up
features = dict()
features['id_prev'] = prev_token_embeddings
features['id'] = token_embeddings
features['frames'] = im_embeddings
return features, pad
def perceiver(self, features, pad):
x = self.state_decoder(tgt=features['id'], memory=features['id_prev'], pad=pad)
x = self.ln_f_state_dec(x)
x = self.stimulus_decoder(tgt=features['id'], memory=features['frames'], pad=pad)
x = self.ln_f_stimulus_dec(x)
logits = self.head(x)
return logits, x
def enc_dec(self, features, pad):
x = self.stimulus_decoder(tgt=features['id'], memory=features['frames'], pad=pad)
x = self.ln_f_stimulus_dec(x)
logits = self.head(x)
return logits, x
def GPTdecoder(self, features, pad, dtx=None):
# image + neural features
x = torch.cat((features['frames'], features['id']), dim=1)
# Decoder
x = self.blocks(x, pad, dtx) # (B, T, C)
x = self.ln_f(x)
logits = self.head(x)
# print(logits.shape) # (B, T, Vocab)
# logits_psth = x[:, -1] # (B, C)
return logits, x
def forward(self, x, targets=None):
idx = x['id']
dtx = x['dt']
frames = x['frames']
pad = x['pad']
b, t = idx.size()
# b, t = x['id'].shape[0], x['id'].shape[1] + x['id_prev'].shape[1]
bf, tf = frames.size()[0:2]
tf = self.config.frame_block_size
# assert t + tf == self.config.block_size, f"{tf} {t}"
# assert t <= self.block_size, "Cannot forward, model block size is exhausted"
features, pad = self.process_features(x)
logits, x = self.perceiver(features, pad)
# logits, x = self.enc_dec(features, pad)
# logits, x = self.GPTdecoder(features, pad)
time = self.proj_time(x) # (B, T_id, 1)
# print(x[:, 0].shape)
# psth = self.proj_psth(x) # (B, Vocab_id)
# if targets, calculate loss
# calculate loss on logits up to padding token for each batch
loss = None
loss_frames = 0
loss_id = []
loss_time = []
loss_dice = []
loss_psth = []
loss_hungarian = []
if targets is not None:
# loss_psth = self.dice_loss(psth, targets['modes'][:, tf:])
for B, P in enumerate(pad):
tf = 0
# im_logits = logits[B, :tf]
# im_targets = targets['frames'][B, :tf]
# loss_frames += F.cross_entropy(im_logits.view(-1, im_logits.size(-1)), im_targets.view(-1))
id_logits = logits[B, tf:tf + t - P]
id_targets = targets['id'][B, :t - P]
loss_id_ = F.cross_entropy(id_logits.view(-1, id_logits.size(-1)), id_targets.view(-1), weight=self.class_weights_id)
# if self.config.epoch >= 15:
# self.truncated_loss.update_weight(id_logits[None, ...], id_targets[None, ...], id_indexes[None, ...])
# loss_id_ = self.truncated_loss(id_logits[None, ...], id_targets[None, ...], id_indexes[None, ...])
time_preds = time[B, :t - P]
time_targets = targets['dt'][B, :t - P]
loss_time_ = F.cross_entropy(time_preds.view(-1, time_preds.size(-1)), time_targets.view(-1), weight=self.class_weights_dt)
# loss_time_ = F.mse_loss(time_preds.squeeze(-1), time_targets)
# loss_id_ = self.poisson_loss(id_logits.view(-1, id_logits.size(-1)), F.one_hot(id_targets, self.config.vocab_size))
# if len(id_targets) > 0:
# indices = self.hungarian_matcher(id_logits, id_targets)
# probs_matching, targets_matching = id_logits[indices[0][0]], id_targets[indices[0][1]]
# loss_hungarian_ = F.cross_entropy(probs_matching, targets_matching, weight=self.class_weights).to(self.device)
# loss_hungarian.append(loss_hungarian_)
# # psth = self.proj_psth(x[B, -1]) # from the EOS position
# loss_psth.append(torch.nan_to_num(self.set_loss(id_logits, id_targets)))
# loss_psth_ = self.dice_loss(id_logits, id_targets)
# loss_psth.append(torch.nan_to_num(loss_psth_))
loss_time.append(torch.nan_to_num(loss_time_))
loss_id.append(torch.nan_to_num(loss_id_))
loss = dict()
# loss['frames'] = loss_frames / (b / 3)
loss['id'] = sum(loss_id) / (b * 2) # sum(loss_id) / (b * 2) # / len(loss_id)
loss['time'] = sum(loss_time) / (b * 2)
# loss['dice'] = sum(loss_dice) / len(loss_dice)
# loss['dt'] = loss_time / (b * 50)
# loss['hungarian'] = sum(loss_hungarian) / (b * 2)
# loss['psth'] = sum(loss_psth) / (b * 2)
for key in list(loss):
if isinstance(loss[key], float):
del loss[key]
preds = dict()
preds['id'] = logits # [:, tf:] # only id logits
preds['dt'] = time
return preds, features, loss | 39.2713 | 139 | 0.581673 |
523374f885db97f70d2de29f35780ae6c92b58e0 | 2,164 | py | Python | networks/encoder.py | Lnna/OpenNRE-PyTorch | 907026a8bece7a867558087131cd1e97d41eb3f2 | [
"MIT"
] | 1 | 2019-08-23T09:06:56.000Z | 2019-08-23T09:06:56.000Z | networks/encoder.py | Lnna/OpenNRE-PyTorch | 907026a8bece7a867558087131cd1e97d41eb3f2 | [
"MIT"
] | null | null | null | networks/encoder.py | Lnna/OpenNRE-PyTorch | 907026a8bece7a867558087131cd1e97d41eb3f2 | [
"MIT"
] | null | null | null | import torch
import torch.autograd as autograd
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from torch.autograd import Variable
class _CNN(nn.Module):
def __init__(self, config):
super(_CNN, self).__init__()
self.config = config
self.in_channels = 1
self.in_height = self.config.max_length
# self.in_width = self.config.word_size + 2 * self.config.pos_size
self.in_width = self.config.word_size + 2 * self.config.pos_size+100
self.kernel_size = (self.config.window_size, self.in_width)
self.out_channels = self.config.hidden_size
self.stride = (1, 1)
self.padding = (1, 0)
self.cnn = nn.Conv2d(self.in_channels, self.out_channels, self.kernel_size, self.stride, self.padding)
def forward(self, embedding):
return self.cnn(embedding)
class _PiecewisePooling(nn.Module):
def __init(self):
super(_PiecewisePooling, self).__init__()
def forward(self, x, mask, hidden_size):
mask = torch.unsqueeze(mask, 1)
# print(x.shape)
x, _ = torch.max(mask + x, dim = 2)
x = x - 100
return x.view(-1, hidden_size * 3)
class _MaxPooling(nn.Module):
def __init__(self):
super(_MaxPooling, self).__init__()
def forward(self, x, hidden_size):
x, _ = torch.max(x, dim = 2)
return x.view(-1, hidden_size)
class PCNN(nn.Module):
def __init__(self, config):
super(PCNN, self).__init__()
self.config = config
self.mask = None
self.cnn = _CNN(config)
self.pooling = _PiecewisePooling()
self.activation = nn.ReLU()
def forward(self, embedding):
embedding = torch.unsqueeze(embedding, dim = 1)
# print(embedding.shape)
x = self.cnn(embedding)
x = self.pooling(x, self.mask, self.config.hidden_size)
# add by Ina Liu 20180117
# x=torch.cat([x,self.config.batch_lstm_out],1)
return self.activation(x)
class CNN(nn.Module):
def __init__(self, config):
super(CNN, self).__init__()
self.config = config
self.cnn = _CNN(config)
self.pooling = _MaxPooling()
self.activation = nn.ReLU()
def forward(self, embedding):
embedding = torch.unsqueeze(embedding, dim = 1)
x = self.cnn(embedding)
x = self.pooling(x, self.config.hidden_size)
return self.activation(x)
| 30.055556 | 104 | 0.715342 |
471acd91609438b556e2b1c570d719feeb427d6a | 2,695 | py | Python | item_synchronizer/helpers.py | bergercookie/item_synchronizer | 8d4c69af68bc1658b0f0a43de683f1ca132ace58 | [
"MIT"
] | 2 | 2021-11-26T22:41:36.000Z | 2021-12-06T01:23:20.000Z | item_synchronizer/helpers.py | bergercookie/item_synchronizer | 8d4c69af68bc1658b0f0a43de683f1ca132ace58 | [
"MIT"
] | null | null | null | item_synchronizer/helpers.py | bergercookie/item_synchronizer | 8d4c69af68bc1658b0f0a43de683f1ca132ace58 | [
"MIT"
] | null | null | null | """Helper functions and classes"""
from dataclasses import dataclass, field
from typing import MutableMapping, Set
from item_synchronizer.types import ID, DeleterFn, ItemGetterFn
@dataclass
class SideChanges:
"""Hold the items that are new, modified or deleted compared to the previous run.
The items related to deletions have indeed already been deleted but the corresponding
A <-> B mapping has not been updated yet. It's the responsibility of the Synchronizer class
to do so
"""
new: Set[ID] = field(default_factory=set)
modified: Set[ID] = field(default_factory=set)
deleted: Set[ID] = field(default_factory=set)
def __str__(self) -> str:
s = f"New Items: {len(self.new)}\n\t"
s += "\n\t".join(id_ for id_ in self.new)
s = s.rstrip("\n\t")
s += f"\nModified Items: {len(self.modified)}\n\t"
s += "\n\t".join(id_ for id_ in self.modified)
s = s.rstrip("\n\t")
s += f"\nDeleted Item: {len(self.deleted)}\n\t"
s += "\n\t".join(id_ for id_ in self.deleted)
s = s.rstrip("\t")
return s
def item_getter_handle_exc(item_getter: ItemGetterFn) -> ItemGetterFn:
"""ItemGetter decorator function that handles exception when handing over the item."""
def fn(*args, **kargs):
try:
return item_getter(*args, **kargs)
except KeyError:
return None
return fn
def delete_n_pop(deleter: DeleterFn, map_: MutableMapping) -> DeleterFn:
"""Wrapper function for deleting and popping an item from the given map mapping."""
delete_n_pop.__doc__ = deleter.__doc__
def fn(id_):
deleter(id_)
map_.pop(id_)
return fn
class TypeStats:
"""Container class for printing execution stats on exit - per type."""
def __init__(self, title: str):
self._title = title
self._created_new = 0
self._updated = 0
self._deleted = 0
self._errors = 0
self._sep = "-" * len(self._title)
def create_new(self):
"""Report an insertion event."""
self._created_new += 1
def update(self):
"""Report an update event."""
self._updated += 1
def delete(self):
"""Report a delete event."""
self._deleted += 1
def error(self):
"""Report an error during an event operation."""
self._errors += 1
def __str__(self) -> str:
s = (
f"{self._title}\n"
f"{self._sep}\n"
f"\t* Items created: {self._created_new}\n"
f"\t* Items updated: {self._updated}\n"
f"\t* Items deleted: {self._deleted}\n"
)
return s
| 28.368421 | 95 | 0.601484 |
dc69723dcd6f8188a4d459923b079f5464ab4239 | 16,297 | py | Python | CIFAR_main.py | InCogNiTo124/invertible-resnet | 630e886bcd50cc21dee0769d1a5b35100d3ff569 | [
"MIT"
] | 476 | 2019-06-06T00:46:03.000Z | 2022-03-28T15:14:28.000Z | CIFAR_main.py | ChaeJeongLee/invertible-resnet | 630e886bcd50cc21dee0769d1a5b35100d3ff569 | [
"MIT"
] | 24 | 2019-06-08T10:00:04.000Z | 2022-03-11T23:49:33.000Z | CIFAR_main.py | ChaeJeongLee/invertible-resnet | 630e886bcd50cc21dee0769d1a5b35100d3ff569 | [
"MIT"
] | 78 | 2019-06-06T08:00:48.000Z | 2022-03-08T13:16:31.000Z | """
Code for "Invertible Residual Networks"
http://proceedings.mlr.press/v97/behrmann19a.html
ICML, 2019
"""
import torch
import torch.backends.cudnn as cudnn
import torch.optim as optim
from torch.autograd import Variable
import numpy as np
import torchvision
import torchvision.transforms as transforms
import visdom
import os
import sys
import time
import argparse
import pdb
import random
import json
from models.utils_cifar import train, test, std, mean, get_hms, interpolate
from models.conv_iResNet import conv_iResNet as iResNet
from models.conv_iResNet import multiscale_conv_iResNet as multiscale_iResNet
parser = argparse.ArgumentParser(description='Train i-ResNet/ResNet on Cifar')
parser.add_argument('-densityEstimation', '--densityEstimation', dest='densityEstimation',
action='store_true', help='perform density estimation')
parser.add_argument('--optimizer', default="adamax", type=str, help="optimizer", choices=["adam", "adamax", "sgd"])
parser.add_argument('--lr', default=0.1, type=float, help='learning rate')
parser.add_argument('--coeff', default=0.9, type=float, help='contraction coefficient for linear layers')
parser.add_argument('--numTraceSamples', default=1, type=int, help='number of samples used for trace estimation')
parser.add_argument('--numSeriesTerms', default=1, type=int, help='number of terms used in power series for matrix log')
parser.add_argument('--powerIterSpectralNorm', default=5, type=int, help='number of power iterations used for spectral norm')
parser.add_argument('--weight_decay', default=5e-4, type=float, help='coefficient for weight decay')
parser.add_argument('--drop_rate', default=0.1, type=float, help='dropout rate')
parser.add_argument('--batch', default=128, type=int, help='batch size')
parser.add_argument('--init_batch', default=1024, type=int, help='init batch size')
parser.add_argument('--init_ds', default=2, type=int, help='initial downsampling')
parser.add_argument('--warmup_epochs', default=10, type=int, help='epochs for warmup')
parser.add_argument('--inj_pad', default=0, type=int, help='initial inj padding')
parser.add_argument('--epochs', default=200, type=int, help='number of epochs')
parser.add_argument('--nBlocks', nargs='+', type=int, default=[4, 4, 4])
parser.add_argument('--nStrides', nargs='+', type=int, default=[1, 2, 2])
parser.add_argument('--nChannels', nargs='+', type=int, default=[16, 64, 256])
parser.add_argument('--resume', default='', type=str, metavar='PATH',
help='path to latest checkpoint (default: none)')
parser.add_argument('-e', '--evaluate', dest='evaluate', action='store_true',
help='evaluate model on validation set')
parser.add_argument('-interpolate', '--interpolate', dest='interpolate', action='store_true', help='train iresnet')
parser.add_argument('-drop_two', '--drop_two', dest='drop_two', action='store_true', help='2d dropout on')
parser.add_argument('-nesterov', '--nesterov', dest='nesterov', action='store_true',
help='nesterov momentum')
parser.add_argument('-norm', '--norm', dest='norm', action='store_true',
help='compute norms of conv operators')
parser.add_argument('-analysisTraceEst', '--analysisTraceEst', dest='analysisTraceEst', action='store_true',
help='analysis of trace estimation')
parser.add_argument('-multiScale', '--multiScale', dest='multiScale', action='store_true',
help='use multiscale')
parser.add_argument('-fixedPrior', '--fixedPrior', dest='fixedPrior', action='store_true',
help='use fixed prior, default is learned prior')
parser.add_argument('-noActnorm', '--noActnorm', dest='noActnorm', action='store_true',
help='disable actnorm, default uses actnorm')
parser.add_argument('--nonlin', default="elu", type=str, choices=["relu", "elu", "sorting", "softplus"])
parser.add_argument('--dataset', default='cifar10', type=str, help='dataset')
parser.add_argument('--save_dir', default=None, type=str, help='directory to save results')
parser.add_argument('--vis_port', default=8097, type=int, help="port for visdom")
parser.add_argument('--vis_server', default="localhost", type=str, help="server for visdom")
parser.add_argument('--log_every', default=10, type=int, help='logs every x iters')
parser.add_argument('-log_verbose', '--log_verbose', dest='log_verbose', action='store_true',
help='verbose logging: sigmas, max gradient')
parser.add_argument('-deterministic', '--deterministic', dest='deterministic', action='store_true',
help='fix random seeds and set cuda deterministic')
def try_make_dir(d):
if not os.path.isdir(d):
os.mkdir(d)
try_make_dir('results')
def anaylse_trace_estimation(model, testset, use_cuda, extension):
# setup range for analysis
numSamples = np.arange(10)*10 + 1
numIter = np.arange(10)
# setup number of datapoints
testloader = torch.utils.data.DataLoader(testset, batch_size=128, shuffle=False, num_workers=2)
# TODO change
for batch_idx, (inputs, targets) in enumerate(testloader):
if use_cuda:
inputs, targets = inputs.cuda(), targets.cuda() # GPU settings
inputs, targets = Variable(inputs, requires_grad=True), Variable(targets)
# compute trace
out_bij, p_z_g_y, trace, gt_trace = model(inputs[:, :, :8, :8],
exact_trace=True)
trace = [t.cpu().numpy() for t in trace]
np.save('gtTrace'+extension, gt_trace)
np.save('estTrace'+extension, trace)
return
def test_spec_norm(model, in_shapes, extension):
i = 0
j = 0
params = [v for v in model.module.state_dict().keys() \
if "bottleneck" and "weight" in v \
and not "weight_u" in v \
and not "weight_orig" in v \
and not "bn1" in v and not "linear" in v]
print(len(params))
print(len(in_shapes))
svs = []
for param in params:
if i == 0:
input_shape = in_shapes[j]
else:
input_shape = in_shapes[j]
input_shape[1] = int(input_shape[1] // 4)
convKernel = model.module.state_dict()[param].cpu().numpy()
input_shape = input_shape[2:]
fft_coeff = np.fft.fft2(convKernel, input_shape, axes=[2, 3])
t_fft_coeff = np.transpose(fft_coeff)
U, D, V = np.linalg.svd(t_fft_coeff, compute_uv=True, full_matrices=False)
Dflat = np.sort(D.flatten())[::-1]
print("Layer "+str(j)+" Singular Value "+str(Dflat[0]))
svs.append(Dflat[0])
if i == 2:
i = 0
j+= 1
else:
i+=1
np.save('singular_values'+extension, svs)
return
def get_init_batch(dataloader, batch_size):
"""
gets a batch to use for init
"""
batches = []
seen = 0
for x, y in dataloader:
batches.append(x)
seen += x.size(0)
if seen >= batch_size:
break
batch = torch.cat(batches)
return batch
def main():
args = parser.parse_args()
if args.deterministic:
print("MODEL NOT FULLY DETERMINISTIC")
torch.manual_seed(1234)
torch.cuda.manual_seed(1234)
np.random.seed(1234)
random.seed(1234)
torch.backends.cudnn.deterministic=True
dens_est_chain = [
lambda x: (255. * x) + torch.zeros_like(x).uniform_(0., 1.),
lambda x: x / 256.,
lambda x: x - 0.5
]
if args.dataset == 'mnist':
assert args.densityEstimation, "Currently mnist is only supported for density estimation"
mnist_transforms = [transforms.Pad(2, 0), transforms.ToTensor(), lambda x: x.repeat((3, 1, 1))]
transform_train_mnist = transforms.Compose(mnist_transforms + dens_est_chain)
transform_test_mnist = transforms.Compose(mnist_transforms + dens_est_chain)
trainset = torchvision.datasets.MNIST(
root='./data', train=True, download=True, transform=transform_train_mnist)
testset = torchvision.datasets.MNIST(
root='./data', train=False, download=False, transform=transform_test_mnist)
args.nClasses = 10
in_shape = (3, 32, 32)
else:
if args.dataset == 'svhn':
train_chain = [transforms.Pad(4, padding_mode="symmetric"),
transforms.RandomCrop(32),
transforms.ToTensor()]
else:
train_chain = [transforms.Pad(4, padding_mode="symmetric"),
transforms.RandomCrop(32),
transforms.RandomHorizontalFlip(),
transforms.ToTensor()]
test_chain = [transforms.ToTensor()]
if args.densityEstimation:
transform_train = transforms.Compose(train_chain + dens_est_chain)
transform_test = transforms.Compose(test_chain + dens_est_chain)
else:
clf_chain = [transforms.Normalize(mean[args.dataset], std[args.dataset])]
transform_train = transforms.Compose(train_chain + clf_chain)
transform_test = transforms.Compose(test_chain + clf_chain)
if args.dataset == 'cifar10':
trainset = torchvision.datasets.CIFAR10(
root='./data', train=True, download=True, transform=transform_train)
testset = torchvision.datasets.CIFAR10(
root='./data', train=False, download=True, transform=transform_test)
args.nClasses = 10
elif args.dataset == 'cifar100':
trainset = torchvision.datasets.CIFAR100(
root='./data', train=True, download=True, transform=transform_train)
testset = torchvision.datasets.CIFAR100(
root='./data', train=False, download=True, transform=transform_test)
args.nClasses = 100
elif args.dataset == 'svhn':
trainset = torchvision.datasets.SVHN(
root='./data', split='train', download=True, transform=transform_train)
testset = torchvision.datasets.SVHN(
root='./data', split='test', download=True, transform=transform_test)
args.nClasses = 10
in_shape = (3, 32, 32)
# setup logging with visdom
viz = visdom.Visdom(port=args.vis_port, server="http://" + args.vis_server)
assert viz.check_connection(), "Could not make visdom"
if args.deterministic:
trainloader = torch.utils.data.DataLoader(trainset, batch_size=args.batch,
shuffle=True, num_workers=2, worker_init_fn=np.random.seed(1234))
testloader = torch.utils.data.DataLoader(testset, batch_size=args.batch,
shuffle=False, num_workers=2, worker_init_fn=np.random.seed(1234))
else:
trainloader = torch.utils.data.DataLoader(trainset, batch_size=args.batch, shuffle=True, num_workers=2)
testloader = torch.utils.data.DataLoader(testset, batch_size=args.batch, shuffle=False, num_workers=2)
def get_model(args):
if args.multiScale:
model = multiscale_iResNet(in_shape,
args.nBlocks, args.nStrides, args.nChannels,
args.init_ds == 2,
args.inj_pad, args.coeff, args.densityEstimation,
args.nClasses,
args.numTraceSamples, args.numSeriesTerms,
args.powerIterSpectralNorm,
actnorm=(not args.noActnorm),
learn_prior=(not args.fixedPrior),
nonlin=args.nonlin)
else:
model = iResNet(nBlocks=args.nBlocks, nStrides=args.nStrides,
nChannels=args.nChannels, nClasses=args.nClasses,
init_ds=args.init_ds,
inj_pad=args.inj_pad,
in_shape=in_shape,
coeff=args.coeff,
numTraceSamples=args.numTraceSamples,
numSeriesTerms=args.numSeriesTerms,
n_power_iter = args.powerIterSpectralNorm,
density_estimation=args.densityEstimation,
actnorm=(not args.noActnorm),
learn_prior=(not args.fixedPrior),
nonlin=args.nonlin)
return model
model = get_model(args)
# init actnrom parameters
init_batch = get_init_batch(trainloader, args.init_batch)
print("initializing actnorm parameters...")
with torch.no_grad():
model(init_batch, ignore_logdet=True)
print("initialized")
use_cuda = torch.cuda.is_available()
if use_cuda:
model.cuda()
model = torch.nn.DataParallel(model, range(torch.cuda.device_count()))
cudnn.benchmark = True
in_shapes = model.module.get_in_shapes()
else:
in_shapes = model.get_in_shapes()
# optionally resume from a checkpoint
if args.resume:
if os.path.isfile(args.resume):
print("=> loading checkpoint '{}'".format(args.resume))
checkpoint = torch.load(args.resume)
start_epoch = checkpoint['epoch']
best_objective = checkpoint['objective']
print('objective: '+str(best_objective))
model = checkpoint['model']
if use_cuda:
model.module.set_num_terms(args.numSeriesTerms)
else:
model.set_num_terms(args.numSeriesTerms)
print("=> loaded checkpoint '{}' (epoch {})"
.format(args.resume, checkpoint['epoch']))
else:
print("=> no checkpoint found at '{}'".format(args.resume))
try_make_dir(args.save_dir)
if args.analysisTraceEst:
anaylse_trace_estimation(model, testset, use_cuda, args.extension)
return
if args.norm:
test_spec_norm(model, in_shapes, args.extension)
return
if args.interpolate:
interpolate(model, testloader, testset, start_epoch, use_cuda, best_objective, args.dataset)
return
if args.evaluate:
test_log = open(os.path.join(args.save_dir, "test_log.txt"), 'w')
if use_cuda:
model.module.set_num_terms(args.numSeriesTerms)
else:
model.set_num_terms(args.numSeriesTerms)
model = torch.nn.DataParallel(model.module)
test(best_objective, args, model, start_epoch, testloader, viz, use_cuda, test_log)
return
print('| Train Epochs: ' + str(args.epochs))
print('| Initial Learning Rate: ' + str(args.lr))
elapsed_time = 0
test_objective = -np.inf
if args.optimizer == "adam":
optimizer = optim.Adam(model.parameters(), lr=args.lr, weight_decay=args.weight_decay)
elif args.optimizer == "adamax":
optimizer = optim.Adamax(model.parameters(), lr=args.lr, weight_decay=args.weight_decay)
else:
optimizer = optim.SGD(model.parameters(), lr=args.lr,
momentum=0.9, weight_decay=args.weight_decay, nesterov=args.nesterov)
with open(os.path.join(args.save_dir, 'params.txt'), 'w') as f:
f.write(json.dumps(args.__dict__))
train_log = open(os.path.join(args.save_dir, "train_log.txt"), 'w')
for epoch in range(1, 1+args.epochs):
start_time = time.time()
train(args, model, optimizer, epoch, trainloader, trainset, viz, use_cuda, train_log)
epoch_time = time.time() - start_time
elapsed_time += epoch_time
print('| Elapsed time : %d:%02d:%02d' % (get_hms(elapsed_time)))
print('Testing model')
test_log = open(os.path.join(args.save_dir, "test_log.txt"), 'w')
test_objective = test(test_objective, args, model, epoch, testloader, viz, use_cuda, test_log)
print('* Test results : objective = %.2f%%' % (test_objective))
with open(os.path.join(args.save_dir, 'final.txt'), 'w') as f:
f.write(str(test_objective))
if __name__ == '__main__':
main()
| 45.522346 | 125 | 0.627048 |
d9ad018f7887ba0234d2531cda590e4ead0103ed | 76,539 | py | Python | meshtastic/mesh_pb2.py | astryzia/Meshtastic-python | 55b9ab8bf10522dc82fa3eb49e356c6c1b80eb31 | [
"Apache-2.0"
] | null | null | null | meshtastic/mesh_pb2.py | astryzia/Meshtastic-python | 55b9ab8bf10522dc82fa3eb49e356c6c1b80eb31 | [
"Apache-2.0"
] | null | null | null | meshtastic/mesh_pb2.py | astryzia/Meshtastic-python | 55b9ab8bf10522dc82fa3eb49e356c6c1b80eb31 | [
"Apache-2.0"
] | 1 | 2021-12-24T22:44:03.000Z | 2021-12-24T22:44:03.000Z | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: mesh.proto
"""Generated protocol buffer code."""
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from . import portnums_pb2 as portnums__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='mesh.proto',
package='',
syntax='proto3',
serialized_options=b'\n\023com.geeksville.meshB\nMeshProtosH\003Z!github.com/meshtastic/gomeshproto',
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n\nmesh.proto\x1a\x0eportnums.proto\"\x94\x06\n\x08Position\x12\x12\n\nlatitude_i\x18\x01 \x01(\x0f\x12\x13\n\x0blongitude_i\x18\x02 \x01(\x0f\x12\x10\n\x08\x61ltitude\x18\x03 \x01(\x05\x12\x15\n\rbattery_level\x18\x04 \x01(\x05\x12\x0c\n\x04time\x18\t \x01(\x07\x12,\n\x0flocation_source\x18\n \x01(\x0e\x32\x13.Position.LocSource\x12,\n\x0f\x61ltitude_source\x18\x0b \x01(\x0e\x32\x13.Position.AltSource\x12\x15\n\rpos_timestamp\x18\x0c \x01(\x07\x12\x17\n\x0fpos_time_millis\x18\r \x01(\x05\x12\x14\n\x0c\x61ltitude_hae\x18\x0e \x01(\x11\x12\x15\n\ralt_geoid_sep\x18\x0f \x01(\x11\x12\x0c\n\x04PDOP\x18\x10 \x01(\r\x12\x0c\n\x04HDOP\x18\x11 \x01(\r\x12\x0c\n\x04VDOP\x18\x12 \x01(\r\x12\x14\n\x0cgps_accuracy\x18\x13 \x01(\r\x12\x14\n\x0cground_speed\x18\x14 \x01(\r\x12\x14\n\x0cground_track\x18\x15 \x01(\r\x12\x13\n\x0b\x66ix_quality\x18\x16 \x01(\r\x12\x10\n\x08\x66ix_type\x18\x17 \x01(\r\x12\x14\n\x0csats_in_view\x18\x18 \x01(\r\x12\x11\n\tsensor_id\x18\x19 \x01(\r\x12\x17\n\x0fpos_next_update\x18( \x01(\r\x12\x16\n\x0epos_seq_number\x18) \x01(\r\"n\n\tLocSource\x12\x16\n\x12LOCSRC_UNSPECIFIED\x10\x00\x12\x17\n\x13LOCSRC_MANUAL_ENTRY\x10\x01\x12\x17\n\x13LOCSRC_GPS_INTERNAL\x10\x02\x12\x17\n\x13LOCSRC_GPS_EXTERNAL\x10\x03\"\x85\x01\n\tAltSource\x12\x16\n\x12\x41LTSRC_UNSPECIFIED\x10\x00\x12\x17\n\x13\x41LTSRC_MANUAL_ENTRY\x10\x01\x12\x17\n\x13\x41LTSRC_GPS_INTERNAL\x10\x02\x12\x17\n\x13\x41LTSRC_GPS_EXTERNAL\x10\x03\x12\x15\n\x11\x41LTSRC_BAROMETRIC\x10\x04J\x04\x08\x07\x10\x08J\x04\x08\x08\x10\t\"\x96\x01\n\x04User\x12\n\n\x02id\x18\x01 \x01(\t\x12\x11\n\tlong_name\x18\x02 \x01(\t\x12\x12\n\nshort_name\x18\x03 \x01(\t\x12\x0f\n\x07macaddr\x18\x04 \x01(\x0c\x12 \n\x08hw_model\x18\x06 \x01(\x0e\x32\x0e.HardwareModel\x12\x13\n\x0bis_licensed\x18\x07 \x01(\x08\x12\x13\n\x04team\x18\x08 \x01(\x0e\x32\x05.Team\"\x1f\n\x0eRouteDiscovery\x12\r\n\x05route\x18\x02 \x03(\x07\"\xc5\x02\n\x07Routing\x12(\n\rroute_request\x18\x01 \x01(\x0b\x32\x0f.RouteDiscoveryH\x00\x12&\n\x0broute_reply\x18\x02 \x01(\x0b\x32\x0f.RouteDiscoveryH\x00\x12&\n\x0c\x65rror_reason\x18\x03 \x01(\x0e\x32\x0e.Routing.ErrorH\x00\"\xb4\x01\n\x05\x45rror\x12\x08\n\x04NONE\x10\x00\x12\x0c\n\x08NO_ROUTE\x10\x01\x12\x0b\n\x07GOT_NAK\x10\x02\x12\x0b\n\x07TIMEOUT\x10\x03\x12\x10\n\x0cNO_INTERFACE\x10\x04\x12\x12\n\x0eMAX_RETRANSMIT\x10\x05\x12\x0e\n\nNO_CHANNEL\x10\x06\x12\r\n\tTOO_LARGE\x10\x07\x12\x0f\n\x0bNO_RESPONSE\x10\x08\x12\x0f\n\x0b\x42\x41\x44_REQUEST\x10 \x12\x12\n\x0eNOT_AUTHORIZED\x10!B\t\n\x07variant\"{\n\x04\x44\x61ta\x12\x19\n\x07portnum\x18\x01 \x01(\x0e\x32\x08.PortNum\x12\x0f\n\x07payload\x18\x02 \x01(\x0c\x12\x15\n\rwant_response\x18\x03 \x01(\x08\x12\x0c\n\x04\x64\x65st\x18\x04 \x01(\x07\x12\x0e\n\x06source\x18\x05 \x01(\x07\x12\x12\n\nrequest_id\x18\x06 \x01(\x07\"\xe0\x02\n\nMeshPacket\x12\x0c\n\x04\x66rom\x18\x01 \x01(\x07\x12\n\n\x02to\x18\x02 \x01(\x07\x12\x0f\n\x07\x63hannel\x18\x03 \x01(\r\x12\x18\n\x07\x64\x65\x63oded\x18\x04 \x01(\x0b\x32\x05.DataH\x00\x12\x13\n\tencrypted\x18\x05 \x01(\x0cH\x00\x12\n\n\x02id\x18\x06 \x01(\x07\x12\x0f\n\x07rx_time\x18\x07 \x01(\x07\x12\x0e\n\x06rx_snr\x18\x08 \x01(\x02\x12\x11\n\thop_limit\x18\n \x01(\r\x12\x10\n\x08want_ack\x18\x0b \x01(\x08\x12&\n\x08priority\x18\x0c \x01(\x0e\x32\x14.MeshPacket.Priority\x12\x0f\n\x07rx_rssi\x18\r \x01(\x05\"[\n\x08Priority\x12\t\n\x05UNSET\x10\x00\x12\x07\n\x03MIN\x10\x01\x12\x0e\n\nBACKGROUND\x10\n\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10@\x12\x0c\n\x08RELIABLE\x10\x46\x12\x07\n\x03\x41\x43K\x10x\x12\x07\n\x03MAX\x10\x7f\x42\x10\n\x0epayloadVariant\"j\n\x08NodeInfo\x12\x0b\n\x03num\x18\x01 \x01(\r\x12\x13\n\x04user\x18\x02 \x01(\x0b\x32\x05.User\x12\x1b\n\x08position\x18\x03 \x01(\x0b\x32\t.Position\x12\x0b\n\x03snr\x18\x07 \x01(\x02\x12\x12\n\nlast_heard\x18\x04 \x01(\x07\"\xcb\x02\n\nMyNodeInfo\x12\x13\n\x0bmy_node_num\x18\x01 \x01(\r\x12\x0f\n\x07has_gps\x18\x02 \x01(\x08\x12\x11\n\tnum_bands\x18\x03 \x01(\r\x12\x14\n\x0cmax_channels\x18\x0f \x01(\r\x12\x12\n\x06region\x18\x04 \x01(\tB\x02\x18\x01\x12\x1f\n\x13hw_model_deprecated\x18\x05 \x01(\tB\x02\x18\x01\x12\x18\n\x10\x66irmware_version\x18\x06 \x01(\t\x12&\n\nerror_code\x18\x07 \x01(\x0e\x32\x12.CriticalErrorCode\x12\x15\n\rerror_address\x18\x08 \x01(\r\x12\x13\n\x0b\x65rror_count\x18\t \x01(\r\x12\x14\n\x0creboot_count\x18\n \x01(\r\x12\x1c\n\x14message_timeout_msec\x18\r \x01(\r\x12\x17\n\x0fmin_app_version\x18\x0e \x01(\r\"\xb5\x01\n\tLogRecord\x12\x0f\n\x07message\x18\x01 \x01(\t\x12\x0c\n\x04time\x18\x02 \x01(\x07\x12\x0e\n\x06source\x18\x03 \x01(\t\x12\x1f\n\x05level\x18\x04 \x01(\x0e\x32\x10.LogRecord.Level\"X\n\x05Level\x12\t\n\x05UNSET\x10\x00\x12\x0c\n\x08\x43RITICAL\x10\x32\x12\t\n\x05\x45RROR\x10(\x12\x0b\n\x07WARNING\x10\x1e\x12\x08\n\x04INFO\x10\x14\x12\t\n\x05\x44\x45\x42UG\x10\n\x12\t\n\x05TRACE\x10\x05\"\xe9\x01\n\tFromRadio\x12\x0b\n\x03num\x18\x01 \x01(\r\x12\x1d\n\x06packet\x18\x0b \x01(\x0b\x32\x0b.MeshPacketH\x00\x12\x1e\n\x07my_info\x18\x03 \x01(\x0b\x32\x0b.MyNodeInfoH\x00\x12\x1e\n\tnode_info\x18\x04 \x01(\x0b\x32\t.NodeInfoH\x00\x12 \n\nlog_record\x18\x07 \x01(\x0b\x32\n.LogRecordH\x00\x12\x1c\n\x12\x63onfig_complete_id\x18\x08 \x01(\rH\x00\x12\x12\n\x08rebooted\x18\t \x01(\x08H\x00\x42\x10\n\x0epayloadVariantJ\x04\x08\x02\x10\x03J\x04\x08\x06\x10\x07\"\xe1\x01\n\x07ToRadio\x12\x1d\n\x06packet\x18\x02 \x01(\x0b\x32\x0b.MeshPacketH\x00\x12&\n\tpeer_info\x18\x03 \x01(\x0b\x32\x11.ToRadio.PeerInfoH\x00\x12\x18\n\x0ewant_config_id\x18\x64 \x01(\rH\x00\x12\x14\n\ndisconnect\x18h \x01(\x08H\x00\x1a\x35\n\x08PeerInfo\x12\x13\n\x0b\x61pp_version\x18\x01 \x01(\r\x12\x14\n\x0cmqtt_gateway\x18\x02 \x01(\x08\x42\x10\n\x0epayloadVariantJ\x04\x08\x01\x10\x02J\x04\x08\x65\x10\x66J\x04\x08\x66\x10gJ\x04\x08g\x10h*\xac\x02\n\rHardwareModel\x12\t\n\x05UNSET\x10\x00\x12\x0c\n\x08TLORA_V2\x10\x01\x12\x0c\n\x08TLORA_V1\x10\x02\x12\x12\n\x0eTLORA_V2_1_1p6\x10\x03\x12\t\n\x05TBEAM\x10\x04\x12\x0f\n\x0bHELTEC_V2_0\x10\x05\x12\x0c\n\x08TBEAM0p7\x10\x06\x12\n\n\x06T_ECHO\x10\x07\x12\x10\n\x0cTLORA_V1_1p3\x10\x08\x12\x0b\n\x07RAK4631\x10\t\x12\x0f\n\x0bHELTEC_V2_1\x10\n\x12\x11\n\rLORA_RELAY_V1\x10 \x12\x0e\n\nNRF52840DK\x10!\x12\x07\n\x03PPR\x10\"\x12\x0f\n\x0bGENIEBLOCKS\x10#\x12\x11\n\rNRF52_UNKNOWN\x10$\x12\r\n\tPORTDUINO\x10%\x12\x0f\n\x0b\x41NDROID_SIM\x10&\x12\n\n\x06\x44IY_V1\x10\'*\xb5\x01\n\x04Team\x12\t\n\x05\x43LEAR\x10\x00\x12\x08\n\x04\x43YAN\x10\x01\x12\t\n\x05WHITE\x10\x02\x12\n\n\x06YELLOW\x10\x03\x12\n\n\x06ORANGE\x10\x04\x12\x0b\n\x07MAGENTA\x10\x05\x12\x07\n\x03RED\x10\x06\x12\n\n\x06MAROON\x10\x07\x12\n\n\x06PURPLE\x10\x08\x12\r\n\tDARK_BLUE\x10\t\x12\x08\n\x04\x42LUE\x10\n\x12\x08\n\x04TEAL\x10\x0b\x12\t\n\x05GREEN\x10\x0c\x12\x0e\n\nDARK_GREEN\x10\r\x12\t\n\x05\x42ROWN\x10\x0e*.\n\tConstants\x12\n\n\x06Unused\x10\x00\x12\x15\n\x10\x44\x41TA_PAYLOAD_LEN\x10\xed\x01*\xe1\x01\n\x11\x43riticalErrorCode\x12\x08\n\x04None\x10\x00\x12\x0e\n\nTxWatchdog\x10\x01\x12\x12\n\x0eSleepEnterWait\x10\x02\x12\x0b\n\x07NoRadio\x10\x03\x12\x0f\n\x0bUnspecified\x10\x04\x12\x13\n\x0fUBloxInitFailed\x10\x05\x12\x0c\n\x08NoAXP192\x10\x06\x12\x17\n\x13InvalidRadioSetting\x10\x07\x12\x12\n\x0eTransmitFailed\x10\x08\x12\x0c\n\x08\x42rownout\x10\t\x12\x11\n\rSX1262Failure\x10\n\x12\x0f\n\x0bRadioSpiBug\x10\x0b\x42\x46\n\x13\x63om.geeksville.meshB\nMeshProtosH\x03Z!github.com/meshtastic/gomeshprotob\x06proto3'
,
dependencies=[portnums__pb2.DESCRIPTOR,])
_HARDWAREMODEL = _descriptor.EnumDescriptor(
name='HardwareModel',
full_name='HardwareModel',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='UNSET', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='TLORA_V2', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='TLORA_V1', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='TLORA_V2_1_1p6', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='TBEAM', index=4, number=4,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='HELTEC_V2_0', index=5, number=5,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='TBEAM0p7', index=6, number=6,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='T_ECHO', index=7, number=7,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='TLORA_V1_1p3', index=8, number=8,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='RAK4631', index=9, number=9,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='HELTEC_V2_1', index=10, number=10,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='LORA_RELAY_V1', index=11, number=32,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='NRF52840DK', index=12, number=33,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='PPR', index=13, number=34,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='GENIEBLOCKS', index=14, number=35,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='NRF52_UNKNOWN', index=15, number=36,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='PORTDUINO', index=16, number=37,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='ANDROID_SIM', index=17, number=38,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DIY_V1', index=18, number=39,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=2906,
serialized_end=3206,
)
_sym_db.RegisterEnumDescriptor(_HARDWAREMODEL)
HardwareModel = enum_type_wrapper.EnumTypeWrapper(_HARDWAREMODEL)
_TEAM = _descriptor.EnumDescriptor(
name='Team',
full_name='Team',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='CLEAR', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='CYAN', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='WHITE', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='YELLOW', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='ORANGE', index=4, number=4,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='MAGENTA', index=5, number=5,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='RED', index=6, number=6,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='MAROON', index=7, number=7,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='PURPLE', index=8, number=8,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DARK_BLUE', index=9, number=9,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='BLUE', index=10, number=10,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='TEAL', index=11, number=11,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='GREEN', index=12, number=12,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DARK_GREEN', index=13, number=13,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='BROWN', index=14, number=14,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=3209,
serialized_end=3390,
)
_sym_db.RegisterEnumDescriptor(_TEAM)
Team = enum_type_wrapper.EnumTypeWrapper(_TEAM)
_CONSTANTS = _descriptor.EnumDescriptor(
name='Constants',
full_name='Constants',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='Unused', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DATA_PAYLOAD_LEN', index=1, number=237,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=3392,
serialized_end=3438,
)
_sym_db.RegisterEnumDescriptor(_CONSTANTS)
Constants = enum_type_wrapper.EnumTypeWrapper(_CONSTANTS)
_CRITICALERRORCODE = _descriptor.EnumDescriptor(
name='CriticalErrorCode',
full_name='CriticalErrorCode',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='None', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='TxWatchdog', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='SleepEnterWait', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='NoRadio', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='Unspecified', index=4, number=4,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='UBloxInitFailed', index=5, number=5,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='NoAXP192', index=6, number=6,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='InvalidRadioSetting', index=7, number=7,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='TransmitFailed', index=8, number=8,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='Brownout', index=9, number=9,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='SX1262Failure', index=10, number=10,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='RadioSpiBug', index=11, number=11,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=3441,
serialized_end=3666,
)
_sym_db.RegisterEnumDescriptor(_CRITICALERRORCODE)
CriticalErrorCode = enum_type_wrapper.EnumTypeWrapper(_CRITICALERRORCODE)
UNSET = 0
TLORA_V2 = 1
TLORA_V1 = 2
TLORA_V2_1_1p6 = 3
TBEAM = 4
HELTEC_V2_0 = 5
TBEAM0p7 = 6
T_ECHO = 7
TLORA_V1_1p3 = 8
RAK4631 = 9
HELTEC_V2_1 = 10
LORA_RELAY_V1 = 32
NRF52840DK = 33
PPR = 34
GENIEBLOCKS = 35
NRF52_UNKNOWN = 36
PORTDUINO = 37
ANDROID_SIM = 38
DIY_V1 = 39
CLEAR = 0
CYAN = 1
WHITE = 2
YELLOW = 3
ORANGE = 4
MAGENTA = 5
RED = 6
MAROON = 7
PURPLE = 8
DARK_BLUE = 9
BLUE = 10
TEAL = 11
GREEN = 12
DARK_GREEN = 13
BROWN = 14
Unused = 0
DATA_PAYLOAD_LEN = 237
globals()['None'] = 0
TxWatchdog = 1
SleepEnterWait = 2
NoRadio = 3
Unspecified = 4
UBloxInitFailed = 5
NoAXP192 = 6
InvalidRadioSetting = 7
TransmitFailed = 8
Brownout = 9
SX1262Failure = 10
RadioSpiBug = 11
_POSITION_LOCSOURCE = _descriptor.EnumDescriptor(
name='LocSource',
full_name='Position.LocSource',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='LOCSRC_UNSPECIFIED', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='LOCSRC_MANUAL_ENTRY', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='LOCSRC_GPS_INTERNAL', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='LOCSRC_GPS_EXTERNAL', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=561,
serialized_end=671,
)
_sym_db.RegisterEnumDescriptor(_POSITION_LOCSOURCE)
_POSITION_ALTSOURCE = _descriptor.EnumDescriptor(
name='AltSource',
full_name='Position.AltSource',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='ALTSRC_UNSPECIFIED', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='ALTSRC_MANUAL_ENTRY', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='ALTSRC_GPS_INTERNAL', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='ALTSRC_GPS_EXTERNAL', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='ALTSRC_BAROMETRIC', index=4, number=4,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=674,
serialized_end=807,
)
_sym_db.RegisterEnumDescriptor(_POSITION_ALTSOURCE)
_ROUTING_ERROR = _descriptor.EnumDescriptor(
name='Error',
full_name='Routing.Error',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='NONE', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='NO_ROUTE', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='GOT_NAK', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='TIMEOUT', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='NO_INTERFACE', index=4, number=4,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='MAX_RETRANSMIT', index=5, number=5,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='NO_CHANNEL', index=6, number=6,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='TOO_LARGE', index=7, number=7,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='NO_RESPONSE', index=8, number=8,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='BAD_REQUEST', index=9, number=32,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='NOT_AUTHORIZED', index=10, number=33,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=1142,
serialized_end=1322,
)
_sym_db.RegisterEnumDescriptor(_ROUTING_ERROR)
_MESHPACKET_PRIORITY = _descriptor.EnumDescriptor(
name='Priority',
full_name='MeshPacket.Priority',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='UNSET', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='MIN', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='BACKGROUND', index=2, number=10,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DEFAULT', index=3, number=64,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='RELIABLE', index=4, number=70,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='ACK', index=5, number=120,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='MAX', index=6, number=127,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=1704,
serialized_end=1795,
)
_sym_db.RegisterEnumDescriptor(_MESHPACKET_PRIORITY)
_LOGRECORD_LEVEL = _descriptor.EnumDescriptor(
name='Level',
full_name='LogRecord.Level',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='UNSET', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='CRITICAL', index=1, number=50,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='ERROR', index=2, number=40,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='WARNING', index=3, number=30,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='INFO', index=4, number=20,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DEBUG', index=5, number=10,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='TRACE', index=6, number=5,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=2351,
serialized_end=2439,
)
_sym_db.RegisterEnumDescriptor(_LOGRECORD_LEVEL)
_POSITION = _descriptor.Descriptor(
name='Position',
full_name='Position',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='latitude_i', full_name='Position.latitude_i', index=0,
number=1, type=15, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='longitude_i', full_name='Position.longitude_i', index=1,
number=2, type=15, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='altitude', full_name='Position.altitude', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='battery_level', full_name='Position.battery_level', index=3,
number=4, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='time', full_name='Position.time', index=4,
number=9, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='location_source', full_name='Position.location_source', index=5,
number=10, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='altitude_source', full_name='Position.altitude_source', index=6,
number=11, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='pos_timestamp', full_name='Position.pos_timestamp', index=7,
number=12, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='pos_time_millis', full_name='Position.pos_time_millis', index=8,
number=13, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='altitude_hae', full_name='Position.altitude_hae', index=9,
number=14, type=17, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='alt_geoid_sep', full_name='Position.alt_geoid_sep', index=10,
number=15, type=17, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='PDOP', full_name='Position.PDOP', index=11,
number=16, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='HDOP', full_name='Position.HDOP', index=12,
number=17, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='VDOP', full_name='Position.VDOP', index=13,
number=18, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='gps_accuracy', full_name='Position.gps_accuracy', index=14,
number=19, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='ground_speed', full_name='Position.ground_speed', index=15,
number=20, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='ground_track', full_name='Position.ground_track', index=16,
number=21, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='fix_quality', full_name='Position.fix_quality', index=17,
number=22, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='fix_type', full_name='Position.fix_type', index=18,
number=23, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='sats_in_view', full_name='Position.sats_in_view', index=19,
number=24, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='sensor_id', full_name='Position.sensor_id', index=20,
number=25, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='pos_next_update', full_name='Position.pos_next_update', index=21,
number=40, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='pos_seq_number', full_name='Position.pos_seq_number', index=22,
number=41, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
_POSITION_LOCSOURCE,
_POSITION_ALTSOURCE,
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=31,
serialized_end=819,
)
_USER = _descriptor.Descriptor(
name='User',
full_name='User',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='User.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='long_name', full_name='User.long_name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='short_name', full_name='User.short_name', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='macaddr', full_name='User.macaddr', index=3,
number=4, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='hw_model', full_name='User.hw_model', index=4,
number=6, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='is_licensed', full_name='User.is_licensed', index=5,
number=7, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='team', full_name='User.team', index=6,
number=8, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=822,
serialized_end=972,
)
_ROUTEDISCOVERY = _descriptor.Descriptor(
name='RouteDiscovery',
full_name='RouteDiscovery',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='route', full_name='RouteDiscovery.route', index=0,
number=2, type=7, cpp_type=3, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=974,
serialized_end=1005,
)
_ROUTING = _descriptor.Descriptor(
name='Routing',
full_name='Routing',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='route_request', full_name='Routing.route_request', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='route_reply', full_name='Routing.route_reply', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='error_reason', full_name='Routing.error_reason', index=2,
number=3, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
_ROUTING_ERROR,
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='variant', full_name='Routing.variant',
index=0, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
],
serialized_start=1008,
serialized_end=1333,
)
_DATA = _descriptor.Descriptor(
name='Data',
full_name='Data',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='portnum', full_name='Data.portnum', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='payload', full_name='Data.payload', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='want_response', full_name='Data.want_response', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='dest', full_name='Data.dest', index=3,
number=4, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='source', full_name='Data.source', index=4,
number=5, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='request_id', full_name='Data.request_id', index=5,
number=6, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1335,
serialized_end=1458,
)
_MESHPACKET = _descriptor.Descriptor(
name='MeshPacket',
full_name='MeshPacket',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='from', full_name='MeshPacket.from', index=0,
number=1, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='to', full_name='MeshPacket.to', index=1,
number=2, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='channel', full_name='MeshPacket.channel', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='decoded', full_name='MeshPacket.decoded', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='encrypted', full_name='MeshPacket.encrypted', index=4,
number=5, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='id', full_name='MeshPacket.id', index=5,
number=6, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='rx_time', full_name='MeshPacket.rx_time', index=6,
number=7, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='rx_snr', full_name='MeshPacket.rx_snr', index=7,
number=8, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='hop_limit', full_name='MeshPacket.hop_limit', index=8,
number=10, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='want_ack', full_name='MeshPacket.want_ack', index=9,
number=11, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='priority', full_name='MeshPacket.priority', index=10,
number=12, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='rx_rssi', full_name='MeshPacket.rx_rssi', index=11,
number=13, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
_MESHPACKET_PRIORITY,
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='payloadVariant', full_name='MeshPacket.payloadVariant',
index=0, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
],
serialized_start=1461,
serialized_end=1813,
)
_NODEINFO = _descriptor.Descriptor(
name='NodeInfo',
full_name='NodeInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='num', full_name='NodeInfo.num', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='user', full_name='NodeInfo.user', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='position', full_name='NodeInfo.position', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='snr', full_name='NodeInfo.snr', index=3,
number=7, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='last_heard', full_name='NodeInfo.last_heard', index=4,
number=4, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1815,
serialized_end=1921,
)
_MYNODEINFO = _descriptor.Descriptor(
name='MyNodeInfo',
full_name='MyNodeInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='my_node_num', full_name='MyNodeInfo.my_node_num', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='has_gps', full_name='MyNodeInfo.has_gps', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='num_bands', full_name='MyNodeInfo.num_bands', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='max_channels', full_name='MyNodeInfo.max_channels', index=3,
number=15, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='region', full_name='MyNodeInfo.region', index=4,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\030\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='hw_model_deprecated', full_name='MyNodeInfo.hw_model_deprecated', index=5,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\030\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='firmware_version', full_name='MyNodeInfo.firmware_version', index=6,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='error_code', full_name='MyNodeInfo.error_code', index=7,
number=7, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='error_address', full_name='MyNodeInfo.error_address', index=8,
number=8, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='error_count', full_name='MyNodeInfo.error_count', index=9,
number=9, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='reboot_count', full_name='MyNodeInfo.reboot_count', index=10,
number=10, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='message_timeout_msec', full_name='MyNodeInfo.message_timeout_msec', index=11,
number=13, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='min_app_version', full_name='MyNodeInfo.min_app_version', index=12,
number=14, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1924,
serialized_end=2255,
)
_LOGRECORD = _descriptor.Descriptor(
name='LogRecord',
full_name='LogRecord',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='message', full_name='LogRecord.message', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='time', full_name='LogRecord.time', index=1,
number=2, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='source', full_name='LogRecord.source', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='level', full_name='LogRecord.level', index=3,
number=4, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
_LOGRECORD_LEVEL,
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2258,
serialized_end=2439,
)
_FROMRADIO = _descriptor.Descriptor(
name='FromRadio',
full_name='FromRadio',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='num', full_name='FromRadio.num', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='packet', full_name='FromRadio.packet', index=1,
number=11, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='my_info', full_name='FromRadio.my_info', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='node_info', full_name='FromRadio.node_info', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='log_record', full_name='FromRadio.log_record', index=4,
number=7, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='config_complete_id', full_name='FromRadio.config_complete_id', index=5,
number=8, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='rebooted', full_name='FromRadio.rebooted', index=6,
number=9, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='payloadVariant', full_name='FromRadio.payloadVariant',
index=0, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
],
serialized_start=2442,
serialized_end=2675,
)
_TORADIO_PEERINFO = _descriptor.Descriptor(
name='PeerInfo',
full_name='ToRadio.PeerInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='app_version', full_name='ToRadio.PeerInfo.app_version', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='mqtt_gateway', full_name='ToRadio.PeerInfo.mqtt_gateway', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2808,
serialized_end=2861,
)
_TORADIO = _descriptor.Descriptor(
name='ToRadio',
full_name='ToRadio',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='packet', full_name='ToRadio.packet', index=0,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='peer_info', full_name='ToRadio.peer_info', index=1,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='want_config_id', full_name='ToRadio.want_config_id', index=2,
number=100, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='disconnect', full_name='ToRadio.disconnect', index=3,
number=104, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_TORADIO_PEERINFO, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='payloadVariant', full_name='ToRadio.payloadVariant',
index=0, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
],
serialized_start=2678,
serialized_end=2903,
)
_POSITION.fields_by_name['location_source'].enum_type = _POSITION_LOCSOURCE
_POSITION.fields_by_name['altitude_source'].enum_type = _POSITION_ALTSOURCE
_POSITION_LOCSOURCE.containing_type = _POSITION
_POSITION_ALTSOURCE.containing_type = _POSITION
_USER.fields_by_name['hw_model'].enum_type = _HARDWAREMODEL
_USER.fields_by_name['team'].enum_type = _TEAM
_ROUTING.fields_by_name['route_request'].message_type = _ROUTEDISCOVERY
_ROUTING.fields_by_name['route_reply'].message_type = _ROUTEDISCOVERY
_ROUTING.fields_by_name['error_reason'].enum_type = _ROUTING_ERROR
_ROUTING_ERROR.containing_type = _ROUTING
_ROUTING.oneofs_by_name['variant'].fields.append(
_ROUTING.fields_by_name['route_request'])
_ROUTING.fields_by_name['route_request'].containing_oneof = _ROUTING.oneofs_by_name['variant']
_ROUTING.oneofs_by_name['variant'].fields.append(
_ROUTING.fields_by_name['route_reply'])
_ROUTING.fields_by_name['route_reply'].containing_oneof = _ROUTING.oneofs_by_name['variant']
_ROUTING.oneofs_by_name['variant'].fields.append(
_ROUTING.fields_by_name['error_reason'])
_ROUTING.fields_by_name['error_reason'].containing_oneof = _ROUTING.oneofs_by_name['variant']
_DATA.fields_by_name['portnum'].enum_type = portnums__pb2._PORTNUM
_MESHPACKET.fields_by_name['decoded'].message_type = _DATA
_MESHPACKET.fields_by_name['priority'].enum_type = _MESHPACKET_PRIORITY
_MESHPACKET_PRIORITY.containing_type = _MESHPACKET
_MESHPACKET.oneofs_by_name['payloadVariant'].fields.append(
_MESHPACKET.fields_by_name['decoded'])
_MESHPACKET.fields_by_name['decoded'].containing_oneof = _MESHPACKET.oneofs_by_name['payloadVariant']
_MESHPACKET.oneofs_by_name['payloadVariant'].fields.append(
_MESHPACKET.fields_by_name['encrypted'])
_MESHPACKET.fields_by_name['encrypted'].containing_oneof = _MESHPACKET.oneofs_by_name['payloadVariant']
_NODEINFO.fields_by_name['user'].message_type = _USER
_NODEINFO.fields_by_name['position'].message_type = _POSITION
_MYNODEINFO.fields_by_name['error_code'].enum_type = _CRITICALERRORCODE
_LOGRECORD.fields_by_name['level'].enum_type = _LOGRECORD_LEVEL
_LOGRECORD_LEVEL.containing_type = _LOGRECORD
_FROMRADIO.fields_by_name['packet'].message_type = _MESHPACKET
_FROMRADIO.fields_by_name['my_info'].message_type = _MYNODEINFO
_FROMRADIO.fields_by_name['node_info'].message_type = _NODEINFO
_FROMRADIO.fields_by_name['log_record'].message_type = _LOGRECORD
_FROMRADIO.oneofs_by_name['payloadVariant'].fields.append(
_FROMRADIO.fields_by_name['packet'])
_FROMRADIO.fields_by_name['packet'].containing_oneof = _FROMRADIO.oneofs_by_name['payloadVariant']
_FROMRADIO.oneofs_by_name['payloadVariant'].fields.append(
_FROMRADIO.fields_by_name['my_info'])
_FROMRADIO.fields_by_name['my_info'].containing_oneof = _FROMRADIO.oneofs_by_name['payloadVariant']
_FROMRADIO.oneofs_by_name['payloadVariant'].fields.append(
_FROMRADIO.fields_by_name['node_info'])
_FROMRADIO.fields_by_name['node_info'].containing_oneof = _FROMRADIO.oneofs_by_name['payloadVariant']
_FROMRADIO.oneofs_by_name['payloadVariant'].fields.append(
_FROMRADIO.fields_by_name['log_record'])
_FROMRADIO.fields_by_name['log_record'].containing_oneof = _FROMRADIO.oneofs_by_name['payloadVariant']
_FROMRADIO.oneofs_by_name['payloadVariant'].fields.append(
_FROMRADIO.fields_by_name['config_complete_id'])
_FROMRADIO.fields_by_name['config_complete_id'].containing_oneof = _FROMRADIO.oneofs_by_name['payloadVariant']
_FROMRADIO.oneofs_by_name['payloadVariant'].fields.append(
_FROMRADIO.fields_by_name['rebooted'])
_FROMRADIO.fields_by_name['rebooted'].containing_oneof = _FROMRADIO.oneofs_by_name['payloadVariant']
_TORADIO_PEERINFO.containing_type = _TORADIO
_TORADIO.fields_by_name['packet'].message_type = _MESHPACKET
_TORADIO.fields_by_name['peer_info'].message_type = _TORADIO_PEERINFO
_TORADIO.oneofs_by_name['payloadVariant'].fields.append(
_TORADIO.fields_by_name['packet'])
_TORADIO.fields_by_name['packet'].containing_oneof = _TORADIO.oneofs_by_name['payloadVariant']
_TORADIO.oneofs_by_name['payloadVariant'].fields.append(
_TORADIO.fields_by_name['peer_info'])
_TORADIO.fields_by_name['peer_info'].containing_oneof = _TORADIO.oneofs_by_name['payloadVariant']
_TORADIO.oneofs_by_name['payloadVariant'].fields.append(
_TORADIO.fields_by_name['want_config_id'])
_TORADIO.fields_by_name['want_config_id'].containing_oneof = _TORADIO.oneofs_by_name['payloadVariant']
_TORADIO.oneofs_by_name['payloadVariant'].fields.append(
_TORADIO.fields_by_name['disconnect'])
_TORADIO.fields_by_name['disconnect'].containing_oneof = _TORADIO.oneofs_by_name['payloadVariant']
DESCRIPTOR.message_types_by_name['Position'] = _POSITION
DESCRIPTOR.message_types_by_name['User'] = _USER
DESCRIPTOR.message_types_by_name['RouteDiscovery'] = _ROUTEDISCOVERY
DESCRIPTOR.message_types_by_name['Routing'] = _ROUTING
DESCRIPTOR.message_types_by_name['Data'] = _DATA
DESCRIPTOR.message_types_by_name['MeshPacket'] = _MESHPACKET
DESCRIPTOR.message_types_by_name['NodeInfo'] = _NODEINFO
DESCRIPTOR.message_types_by_name['MyNodeInfo'] = _MYNODEINFO
DESCRIPTOR.message_types_by_name['LogRecord'] = _LOGRECORD
DESCRIPTOR.message_types_by_name['FromRadio'] = _FROMRADIO
DESCRIPTOR.message_types_by_name['ToRadio'] = _TORADIO
DESCRIPTOR.enum_types_by_name['HardwareModel'] = _HARDWAREMODEL
DESCRIPTOR.enum_types_by_name['Team'] = _TEAM
DESCRIPTOR.enum_types_by_name['Constants'] = _CONSTANTS
DESCRIPTOR.enum_types_by_name['CriticalErrorCode'] = _CRITICALERRORCODE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Position = _reflection.GeneratedProtocolMessageType('Position', (_message.Message,), {
'DESCRIPTOR' : _POSITION,
'__module__' : 'mesh_pb2'
# @@protoc_insertion_point(class_scope:Position)
})
_sym_db.RegisterMessage(Position)
User = _reflection.GeneratedProtocolMessageType('User', (_message.Message,), {
'DESCRIPTOR' : _USER,
'__module__' : 'mesh_pb2'
# @@protoc_insertion_point(class_scope:User)
})
_sym_db.RegisterMessage(User)
RouteDiscovery = _reflection.GeneratedProtocolMessageType('RouteDiscovery', (_message.Message,), {
'DESCRIPTOR' : _ROUTEDISCOVERY,
'__module__' : 'mesh_pb2'
# @@protoc_insertion_point(class_scope:RouteDiscovery)
})
_sym_db.RegisterMessage(RouteDiscovery)
Routing = _reflection.GeneratedProtocolMessageType('Routing', (_message.Message,), {
'DESCRIPTOR' : _ROUTING,
'__module__' : 'mesh_pb2'
# @@protoc_insertion_point(class_scope:Routing)
})
_sym_db.RegisterMessage(Routing)
Data = _reflection.GeneratedProtocolMessageType('Data', (_message.Message,), {
'DESCRIPTOR' : _DATA,
'__module__' : 'mesh_pb2'
# @@protoc_insertion_point(class_scope:Data)
})
_sym_db.RegisterMessage(Data)
MeshPacket = _reflection.GeneratedProtocolMessageType('MeshPacket', (_message.Message,), {
'DESCRIPTOR' : _MESHPACKET,
'__module__' : 'mesh_pb2'
# @@protoc_insertion_point(class_scope:MeshPacket)
})
_sym_db.RegisterMessage(MeshPacket)
NodeInfo = _reflection.GeneratedProtocolMessageType('NodeInfo', (_message.Message,), {
'DESCRIPTOR' : _NODEINFO,
'__module__' : 'mesh_pb2'
# @@protoc_insertion_point(class_scope:NodeInfo)
})
_sym_db.RegisterMessage(NodeInfo)
MyNodeInfo = _reflection.GeneratedProtocolMessageType('MyNodeInfo', (_message.Message,), {
'DESCRIPTOR' : _MYNODEINFO,
'__module__' : 'mesh_pb2'
# @@protoc_insertion_point(class_scope:MyNodeInfo)
})
_sym_db.RegisterMessage(MyNodeInfo)
LogRecord = _reflection.GeneratedProtocolMessageType('LogRecord', (_message.Message,), {
'DESCRIPTOR' : _LOGRECORD,
'__module__' : 'mesh_pb2'
# @@protoc_insertion_point(class_scope:LogRecord)
})
_sym_db.RegisterMessage(LogRecord)
FromRadio = _reflection.GeneratedProtocolMessageType('FromRadio', (_message.Message,), {
'DESCRIPTOR' : _FROMRADIO,
'__module__' : 'mesh_pb2'
# @@protoc_insertion_point(class_scope:FromRadio)
})
_sym_db.RegisterMessage(FromRadio)
ToRadio = _reflection.GeneratedProtocolMessageType('ToRadio', (_message.Message,), {
'PeerInfo' : _reflection.GeneratedProtocolMessageType('PeerInfo', (_message.Message,), {
'DESCRIPTOR' : _TORADIO_PEERINFO,
'__module__' : 'mesh_pb2'
# @@protoc_insertion_point(class_scope:ToRadio.PeerInfo)
})
,
'DESCRIPTOR' : _TORADIO,
'__module__' : 'mesh_pb2'
# @@protoc_insertion_point(class_scope:ToRadio)
})
_sym_db.RegisterMessage(ToRadio)
_sym_db.RegisterMessage(ToRadio.PeerInfo)
DESCRIPTOR._options = None
_MYNODEINFO.fields_by_name['region']._options = None
_MYNODEINFO.fields_by_name['hw_model_deprecated']._options = None
# @@protoc_insertion_point(module_scope)
| 44.03855 | 7,367 | 0.746404 |
d61ea8fd3f4d975ecdefbac4980c06818afcf9b3 | 84 | py | Python | Uh-OH/backend/scrape/apps.py | SamR71/Office-Hours | 912fe67d67a810656d6aeb0c4a363b0473cf76e7 | [
"MIT"
] | 5 | 2020-01-25T20:15:08.000Z | 2020-01-25T20:39:21.000Z | Uh-OH/backend/scrape/apps.py | SamR71/Office-Hours | 912fe67d67a810656d6aeb0c4a363b0473cf76e7 | [
"MIT"
] | 39 | 2020-02-12T22:29:32.000Z | 2022-02-26T23:21:18.000Z | Uh-OH/backend/scrape/apps.py | SamR71/Office-Hours | 912fe67d67a810656d6aeb0c4a363b0473cf76e7 | [
"MIT"
] | null | null | null | from django.apps import AppConfig
class ScrapeConfig(AppConfig):
name = 'scrape'
| 14 | 33 | 0.77381 |
3eb2ede24bb76429bff3b068c3078294b1dbd2e6 | 17 | py | Python | pharmpy/__init__.py | mederrata/pharmpy | e523abd325d29369f8fbcf705d84a5d7c60534bb | [
"Apache-2.0"
] | 14 | 2019-08-23T17:55:57.000Z | 2022-03-29T13:37:20.000Z | pharmpy/__init__.py | paulzuradzki/pharmpy | e523abd325d29369f8fbcf705d84a5d7c60534bb | [
"Apache-2.0"
] | 2 | 2020-10-26T01:15:01.000Z | 2020-10-31T13:53:27.000Z | pharmpy/__init__.py | paulzuradzki/pharmpy | e523abd325d29369f8fbcf705d84a5d7c60534bb | [
"Apache-2.0"
] | 7 | 2020-07-26T18:40:31.000Z | 2021-06-25T08:55:58.000Z | name = "pharmpy"
| 8.5 | 16 | 0.647059 |
67ae4b2e07e404852cf107b4e932c74617ebe93a | 165 | py | Python | src/compas_viewers/meshviewer/__init__.py | GeneKao/compas_viewers | 6ccaa058de176478a131c54d91c4c6e2dcd52a88 | [
"MIT"
] | 5 | 2019-12-02T09:20:58.000Z | 2020-09-05T20:00:44.000Z | src/compas_viewers/meshviewer/__init__.py | GeneKao/compas_viewers | 6ccaa058de176478a131c54d91c4c6e2dcd52a88 | [
"MIT"
] | 8 | 2019-11-24T18:32:17.000Z | 2020-11-18T13:18:04.000Z | src/compas_viewers/meshviewer/__init__.py | GeneKao/compas_viewers | 6ccaa058de176478a131c54d91c4c6e2dcd52a88 | [
"MIT"
] | 4 | 2019-11-29T10:21:56.000Z | 2020-08-08T16:53:27.000Z | from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
from .app import MeshViewer
__all__ = ['MeshViewer']
| 18.333333 | 38 | 0.830303 |
371abf9e22f2e6b7d5ade6f5109acfa912f35bd4 | 2,065 | py | Python | ndh/models.py | nim65s/ndh | 4945bf1f0f59bd08bdd04dd2640b3e9759fa6774 | [
"BSD-2-Clause"
] | null | null | null | ndh/models.py | nim65s/ndh | 4945bf1f0f59bd08bdd04dd2640b3e9759fa6774 | [
"BSD-2-Clause"
] | 25 | 2017-09-23T11:22:00.000Z | 2022-03-14T19:48:22.000Z | ndh/models.py | nim65s/ndh | 4945bf1f0f59bd08bdd04dd2640b3e9759fa6774 | [
"BSD-2-Clause"
] | null | null | null | """Helpers for models."""
from typing import Any
from django.db import models
from django.urls import reverse
from django.utils.safestring import mark_safe
from autoslug import AutoSlugField # type: ignore
from .querysets import NameOrderedQuerySet
from .utils import full_url
class Links:
"""A mixin to get links."""
id: int
pk: int
slug: str
_meta: Any
absolute_url_detail: bool = True
def get_absolute_url(self) -> str:
"""Get the absolute url for a queryset or an instance."""
app, model = self._meta.app_label, self._meta.model_name
if self.absolute_url_detail:
return reverse(
f"{app}:{model}",
kwargs={"slug": self.slug}
if hasattr(self, "slug")
else {"pk": self.pk},
)
else:
return reverse(f"{app}:{model}s")
def get_full_url(self) -> str:
"""Get the protocol + domain + absolute_url."""
return full_url(self.get_absolute_url())
def get_admin_url(self) -> str:
"""Get the admin url for an instance."""
return reverse(
f"admin:{self._meta.app_label}_{self._meta.model_name}_change",
args=[self.id],
)
def get_link(self) -> str:
"""Get the HTML link for this absolute_url."""
return mark_safe(f'<a href="{self.get_absolute_url()}">{self}</a>')
class TimeStampedModel(models.Model):
"""Mixin to timestamp a model."""
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
class Meta:
"""Meta."""
abstract = True
class NamedModel(models.Model):
"""Mixin to name and slugify a model."""
name = models.CharField(max_length=200, unique=True)
slug = AutoSlugField(populate_from="name", unique=True)
objects = NameOrderedQuerySet.as_manager()
class Meta:
"""Meta."""
abstract = True
def __str__(self) -> str:
"""Get the name of the instance."""
return self.name
| 25.8125 | 75 | 0.610654 |
6778149e2bf0f6fb343e5b0f3624ee58467218c0 | 2,906 | py | Python | python/paddle/static/__init__.py | LWhite027/PaddleBox | b14bcdf285dd8829e11ab12cc815ac1b1ab62694 | [
"Apache-2.0"
] | 10 | 2021-05-12T07:20:32.000Z | 2022-03-04T08:21:56.000Z | python/paddle/static/__init__.py | AFLee/Paddle | 311b3b44fc7d51d4d66d90ab8a3fc0d42231afda | [
"Apache-2.0"
] | 1 | 2021-01-07T11:00:58.000Z | 2021-01-07T11:00:58.000Z | python/paddle/static/__init__.py | AFLee/Paddle | 311b3b44fc7d51d4d66d90ab8a3fc0d42231afda | [
"Apache-2.0"
] | 18 | 2021-05-19T08:01:49.000Z | 2022-02-11T03:11:32.000Z | # Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# TODO: import framework api under this directory
__all__ = [
'append_backward', 'gradients', 'Executor', 'global_scope', 'scope_guard',
'BuildStrategy', 'CompiledProgram', 'Print', 'py_func', 'ExecutionStrategy',
'name_scope', 'ParallelExecutor', 'program_guard', 'WeightNormParamAttr',
'default_main_program', 'default_startup_program', 'Program', 'data',
'InputSpec', 'save', 'load', 'save_inference_model', 'load_inference_model',
'load_program_state', 'set_program_state', 'cpu_places', 'cuda_places',
'Variable'
]
from . import nn
from .io import save_inference_model #DEFINE_ALIAS
from .io import load_inference_model #DEFINE_ALIAS
from ..fluid import Scope #DEFINE_ALIAS
from .input import data #DEFINE_ALIAS
from .input import InputSpec #DEFINE_ALIAS
from ..fluid.executor import Executor #DEFINE_ALIAS
from ..fluid.executor import global_scope #DEFINE_ALIAS
from ..fluid.executor import scope_guard #DEFINE_ALIAS
from ..fluid.backward import append_backward #DEFINE_ALIAS
from ..fluid.backward import gradients #DEFINE_ALIAS
from ..fluid.compiler import BuildStrategy #DEFINE_ALIAS
from ..fluid.compiler import CompiledProgram #DEFINE_ALIAS
from ..fluid.compiler import ExecutionStrategy #DEFINE_ALIAS
from ..fluid.framework import default_main_program #DEFINE_ALIAS
from ..fluid.framework import default_startup_program #DEFINE_ALIAS
from ..fluid.framework import Program #DEFINE_ALIAS
from ..fluid.framework import name_scope #DEFINE_ALIAS
from ..fluid.framework import program_guard #DEFINE_ALIAS
from ..fluid.framework import cpu_places #DEFINE_ALIAS
from ..fluid.framework import cuda_places #DEFINE_ALIAS
from ..fluid.framework import Variable #DEFINE_ALIAS
from ..fluid.layers.control_flow import Print #DEFINE_ALIAS
from ..fluid.layers.nn import py_func #DEFINE_ALIAS
from ..fluid.parallel_executor import ParallelExecutor #DEFINE_ALIAS
from ..fluid.param_attr import WeightNormParamAttr #DEFINE_ALIAS
from ..fluid.io import save #DEFINE_ALIAS
from ..fluid.io import load #DEFINE_ALIAS
from ..fluid.io import load_program_state #DEFINE_ALIAS
from ..fluid.io import set_program_state #DEFINE_ALIAS
from ..fluid.layers import create_parameter #DEFINE_ALIAS
from ..fluid.layers import create_global_var #DEFINE_ALIAS
| 50.103448 | 80 | 0.789057 |
337b6fc5665b5bcd3901ea94db3424f4ee264755 | 3,884 | py | Python | kaixiang/settings.py | zhangpotato/kx_backmanage | 5a484c19616a77a75698f63b8fa220ee027c0c17 | [
"Apache-2.0"
] | null | null | null | kaixiang/settings.py | zhangpotato/kx_backmanage | 5a484c19616a77a75698f63b8fa220ee027c0c17 | [
"Apache-2.0"
] | null | null | null | kaixiang/settings.py | zhangpotato/kx_backmanage | 5a484c19616a77a75698f63b8fa220ee027c0c17 | [
"Apache-2.0"
] | null | null | null | """
Django settings for kaixiang project.
Generated by 'django-admin startproject' using Django 2.1.1.
For more information on this file, see
https://docs.djangoproject.com/en/2.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.1/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '(==$)%b$fsc@ro=pcs9*^4pcgfhp$&4t(nhi)qecao@b-n4afk'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['*', ]
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'home',
'cars',
'orderbasic',
'person',
'traffic',
'finance',
'user_manager',
# 'login',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
# 'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'kaixiang.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')]
,
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.media',
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'kaixiang.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.1/ref/settings/#databases
DATABASES = {
# 'default': {
# 'ENGINE': 'django.db.backends.sqlite3',
# 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
# }
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'kaixiang',
'USER': 'root',
'PASSWORD': '12345678',
'HOST': '127.0.0.1',
'PORT': '3306',
},
}
# DATABASES_APPS_MAPPING = {
# 'orderBasic': 'b2b',
# 'comment': 'comment',
# 'home': 'home',
# 'data_produce_com': 'b2b'
# }
# DATABASE_ROUTERS = ['kaixiang.database_app_router.DatabaseAppsRouter']
# Password validation
# https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.1/topics/i18n/
LANGUAGE_CODE = 'zh-hans'
TIME_ZONE = 'Asia/Shanghai'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.1/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = [
os.path.join(BASE_DIR, "static"),
]
SESSION_ENGINE = 'django.contrib.sessions.backends.cache'
| 26.421769 | 91 | 0.667611 |
fdcd4f78a215eb76ccdbddfc02f5cf96f2bce1b6 | 3,976 | py | Python | examples/chaos_pendulum/chaos_pendulum.py | JanMV/pydy | 22c6a3965853bb3641c63e493717976d775034e2 | [
"BSD-3-Clause"
] | 298 | 2015-01-31T11:43:22.000Z | 2022-03-15T02:18:21.000Z | examples/chaos_pendulum/chaos_pendulum.py | JanMV/pydy | 22c6a3965853bb3641c63e493717976d775034e2 | [
"BSD-3-Clause"
] | 359 | 2015-01-17T16:56:42.000Z | 2022-02-08T05:27:08.000Z | examples/chaos_pendulum/chaos_pendulum.py | JanMV/pydy | 22c6a3965853bb3641c63e493717976d775034e2 | [
"BSD-3-Clause"
] | 109 | 2015-02-03T13:02:45.000Z | 2021-12-21T12:57:21.000Z | #!/usr/bin/env python
# This script generates the equations of motion for a double pendulum where the
# bob rotates about the pendulum rod. It can be shown to be chaotic when
# simulated.
# import sympy and the mechanics module
import numpy as np
import matplotlib.pyplot as plt
import sympy as sym
import sympy.physics.mechanics as me
from pydy.system import System
from pydy.viz import Cylinder, Plane, VisualizationFrame, Scene
# Enable pretty printing.
me.init_vprinting()
# declare the constants #
# gravity
g = sym.symbols('g')
mA, mB, lB = sym.symbols('m_A, m_B, L_B')
# plate dimensions
w, h = sym.symbols('w, h')
# declare the coordinates and speeds and their derivatives #
# theta : angle of the rod
# phi : angle of the plate relative to the rod
# omega : angular speed of the rod
# alpha : angular speed of the plate
theta, phi, omega, alpha = me.dynamicsymbols('theta phi omega alpha')
# reference frames #
# create a Newtonian reference frame
N = me.ReferenceFrame('N')
# create a reference for the rod, A, and the plate, B
A = me.ReferenceFrame('A')
B = me.ReferenceFrame('B')
# orientations #
# the rod rotates with respect to the Newtonian reference frame about the x
# axis
A.orient(N, 'Axis', (theta, N.y))
# the plate rotates about the rod's primay axis
B.orient(A, 'Axis', (phi, A.z))
# positions #
# origin of the Newtonian reference frame
No = me.Point('No')
# create a point for the mass centers of the two bodies
Ao = me.Point('Ao')
Bo = me.Point('Bo')
# define the positions of the mass centers relative to the Newtonian origin
lA = (lB - h / 2) / 2
Ao.set_pos(No, lA * A.z)
Bo.set_pos(No, lB * A.z)
# kinematical differential equations #
kinDiffs = (omega - theta.diff(),
alpha - phi.diff())
# angular velocities
A.set_ang_vel(N, omega * N.y)
B.set_ang_vel(A, alpha * A.z)
# linear velocities and accelerations #
No.set_vel(N, 0) # the newtonian origin is fixed
Ao.v2pt_theory(No, N, A)
Bo.v2pt_theory(No, N, A)
# central inertia
IAxx = sym.S(1) / 12 * mA * (2 * lA)**2
IAyy = IAxx
IAzz = 0
IA = (me.inertia(A, IAxx, IAyy, IAzz), Ao)
IBxx = sym.S(1) / 12 * mB * h**2
IByy = sym.S(1) / 12 * mB * (w**2 + h**2)
IBzz = sym.S(1) / 12 * mB * w**2
IB = (me.inertia(B, IBxx, IByy, IBzz), Bo)
# rigid bodies
rod = me.RigidBody('rod', Ao, A, mA, IA)
plate = me.RigidBody('plate', Bo, B, mB, IB)
# forces #
# add the gravitional force to each body
rod_gravity = (Ao, mA * g * N.z)
plate_gravity = (Bo, mB * g * N.z)
# equations of motion with Kane's method
# make a tuple of the bodies and forces
bodies = (rod, plate)
loads = (rod_gravity, plate_gravity)
# create a Kane object with respect to the Newtonian reference frame
kane = me.KanesMethod(N, q_ind=(theta, phi), u_ind=(omega, alpha),
kd_eqs=kinDiffs)
# calculate Kane's equations
fr, frstar = kane.kanes_equations(loads, bodies)
sys = System(kane)
sys.constants = {lB: 0.2, # m
h: 0.1, # m
w: 0.2, # m
mA: 0.01, # kg
mB: 0.1, # kg
g: 9.81, # m/s**2
}
sys.initial_conditions = {theta: np.deg2rad(90.0),
phi: np.deg2rad(0.5),
omega: 0,
alpha: 0}
sys.times = np.linspace(0, 10, 500)
x = sys.integrate()
plt.plot(sys.times, x)
plt.legend([sym.latex(s, mode='inline') for s in sys.coordinates + sys.speeds])
# visualize
rod_shape = Cylinder(2 * lA, 0.005, color='red')
plate_shape = Plane(h, w, color='blue')
v1 = VisualizationFrame('rod',
A.orientnew('rod', 'Axis', (sym.pi / 2, A.x)),
Ao,
rod_shape)
v2 = VisualizationFrame('plate',
B.orientnew('plate', 'Body',
(sym.pi / 2, sym.pi / 2, 0), 'XZX'),
Bo,
plate_shape)
scene = Scene(N, No, v1, v2, system=sys)
scene.display()
| 26.684564 | 79 | 0.614185 |
021e1a9f2fff4df3920edbf5bf3da20eac8eef2a | 1,936 | py | Python | config/wsgi.py | zee93/dex_lab | 1884c0cbeb4320eb08eb23eb8b92b92ebd50701c | [
"MIT"
] | 1 | 2021-05-11T21:30:57.000Z | 2021-05-11T21:30:57.000Z | config/wsgi.py | zee93/dex_lab | 1884c0cbeb4320eb08eb23eb8b92b92ebd50701c | [
"MIT"
] | null | null | null | config/wsgi.py | zee93/dex_lab | 1884c0cbeb4320eb08eb23eb8b92b92ebd50701c | [
"MIT"
] | null | null | null | """
WSGI config for Dexter Django Lab project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
import sys
from django.core.wsgi import get_wsgi_application
# This allows easy placement of apps within the interior
# dex_lab directory.
app_path = os.path.dirname(os.path.abspath(__file__)).replace('/config', '')
sys.path.append(os.path.join(app_path, 'dex_lab'))
if os.environ.get('DJANGO_SETTINGS_MODULE') == 'config.settings.production':
from raven.contrib.django.raven_compat.middleware.wsgi import Sentry
# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
# if running multiple sites in the same mod_wsgi process. To fix this, use
# mod_wsgi daemon mode with each site in its own daemon process, or use
# os.environ["DJANGO_SETTINGS_MODULE"] = "config.settings.production"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.production")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
application = get_wsgi_application()
if os.environ.get('DJANGO_SETTINGS_MODULE') == 'config.settings.production':
application = Sentry(application)
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
| 44 | 79 | 0.795455 |
5a8738d5aa97253f5f211442934d2e052cbf2f59 | 2,040 | py | Python | tests/test.py | ice1x/prefix_tree | 299c530838ede8317ac35f14ce5a1e13d139b855 | [
"Apache-2.0"
] | null | null | null | tests/test.py | ice1x/prefix_tree | 299c530838ede8317ac35f14ce5a1e13d139b855 | [
"Apache-2.0"
] | null | null | null | tests/test.py | ice1x/prefix_tree | 299c530838ede8317ac35f14ce5a1e13d139b855 | [
"Apache-2.0"
] | null | null | null | import unittest
from src.prefix_tree.trie import Trie
IVAN_31_TT = {
'name': 'иван',
'age': 31,
'gender': True,
'type': True
}
IRINA_23_FT = {
'name': 'ирина',
'age': 23,
'gender': False,
'type': True
}
IO_3_TT = {
'name': 'ио',
'age': 3,
'gender': True,
'type': True
}
IVANOVICH_51_TN = {
'name': 'иванович',
'age': 51,
'gender': True,
'type': None
}
TEST_DATA_SMALL = [
IVAN_31_TT,
IRINA_23_FT,
IO_3_TT,
IVANOVICH_51_TN
]
class TestTrieMethods(unittest.TestCase):
SETUP_DONE = False
@classmethod
def setUpClass(cls) -> None:
cls.trie = Trie()
if TestTrieMethods.SETUP_DONE:
return
for person in TEST_DATA_SMALL:
cls.trie.insert(
person['name'],
{
'name': person['name'],
'age': person['age'],
'gender': person['gender'],
'type': person['type']
}
)
TestTrieMethods.SETUP_DONE = True
def test_no_output_increment(self):
"""
Regression
"""
res1 = self.trie._get_by_prefix('иван')[:]
res2 = self.trie._get_by_prefix('иван')[:]
self.assertEqual(res1, res2)
def test_get_by_prefix_sort_desc_by(self):
"""
Regression
"""
res = self.trie.get_by_prefix_sort_desc_by('ив', 'age')
self.assertEqual(res, [IVANOVICH_51_TN, IVAN_31_TT])
def test_len(self):
"""
Regression
"""
res = self.trie._get_by_prefix('%%')
self.assertEqual(len(res), len(TEST_DATA_SMALL))
def test_get_by_prefix_and_query(self):
"""
Regression
"""
res = self.trie.get_by_prefix_and_query("и", {"type": True, "gender": False})
self.assertEqual(res, [IRINA_23_FT])
if __name__ == '__main__':
unittest.main()
| 22.417582 | 85 | 0.509804 |
89ff9bc35f192a736d8db751ef84ccecc03c277f | 239 | py | Python | tests/test_api/test_auth/test_authentication/base/__init__.py | Max-Zhenzhera/my_vocab_backend | f93d0c7c7f4a45fce47eb7ce74cfcda195b13a72 | [
"MIT"
] | 1 | 2021-11-18T16:25:22.000Z | 2021-11-18T16:25:22.000Z | tests/test_api/test_auth/test_authentication/base/__init__.py | Max-Zhenzhera/my_vocab_backend | f93d0c7c7f4a45fce47eb7ce74cfcda195b13a72 | [
"MIT"
] | null | null | null | tests/test_api/test_auth/test_authentication/base/__init__.py | Max-Zhenzhera/my_vocab_backend | f93d0c7c7f4a45fce47eb7ce74cfcda195b13a72 | [
"MIT"
] | null | null | null | from .terminating_refresh_session_route import BaseTestTerminatingRefreshSessionRoute
from .user_creation_route import BaseTestUserCreationRoute
__all__ = [
'BaseTestTerminatingRefreshSessionRoute',
'BaseTestUserCreationRoute'
]
| 26.555556 | 85 | 0.857741 |
14cb3be55357a5a787421a51e08f3f8d7320359a | 33,460 | py | Python | ska-tmc/ska-dish-master-mid/src/ska_dish_master_mid/dish_master_behaviour.py | ska-telescope/tmc-prototype | 4138274e933d4b05f7fe9fc34a11c417b6d0d336 | [
"BSD-3-Clause"
] | 3 | 2019-01-10T11:49:36.000Z | 2019-07-19T03:32:52.000Z | ska-tmc/ska-dish-master-mid/src/ska_dish_master_mid/dish_master_behaviour.py | ska-telescope/tmc-prototype | 4138274e933d4b05f7fe9fc34a11c417b6d0d336 | [
"BSD-3-Clause"
] | 19 | 2019-01-07T14:50:26.000Z | 2019-10-02T13:25:23.000Z | ska-tmc/ska-dish-master-mid/src/ska_dish_master_mid/dish_master_behaviour.py | ska-telescope/tmc-prototype | 4138274e933d4b05f7fe9fc34a11c417b6d0d336 | [
"BSD-3-Clause"
] | 1 | 2018-12-21T13:39:23.000Z | 2018-12-21T13:39:23.000Z | # -*- coding: utf-8 -*-
"""
override class with command handlers for dsh-lmc.
"""
# Standard python imports
import enum
import logging
from collections import namedtuple
# Tango import
from tango import DevState, Except, ErrSeverity
# Additional import
from ska_ser_logging import configure_logging
configure_logging()
MODULE_LOGGER = logging.getLogger(__name__)
AzEl = namedtuple("AzEl", ["azim", "elev"])
class OverrideDish(object):
TS_IDX = 0
AZIM_IDX = 1
ELEV_IDX = 2
# az & el limits for desired/achieved pointing
MAINT_AZIM = 90.0
STOW_ELEV_POSITION = 85.0
MAX_DESIRED_AZIM = 270.0
MIN_DESIRED_AZIM = -270.0
MAX_DESIRED_ELEV = 90.0
MIN_DESIRED_ELEV = 15.0
# unit for drive rate in degrees per second
AZIM_DRIVE_MAX_RATE = 3.0
ELEV_DRIVE_MAX_RATE = 1.0
# ack code interpretation
OK = 0
FAILED = 2
# limit on number of desiredPointing samples to keep
# (calls to pre_update happen once per second)
MAX_SAMPLE_HISTORY = 2400
# initialise positions to match achievedPointing and
# desiredPointing values in ska_mpi_dsh_lmc.fgo
requested_position = AzEl(azim=0.0, elev=30.0)
actual_position = AzEl(azim=0.0, elev=30.0)
desired_pointings = []
# Latest update between programTrackTable and desiredPointing
last_coordinate_update_timestamp = 0.0
def _configureband(self, model, band_number):
_allowed_modes = ("STANDBY_FP", "OPERATE", "STOW")
ds_indexer_position = model.sim_quantities["dsIndexerPosition"]
configured_band = model.sim_quantities["configuredBand"]
dish_mode_quantity = model.sim_quantities["dishMode"]
dish_mode = get_enum_str(dish_mode_quantity)
if dish_mode in _allowed_modes:
set_enum(dish_mode_quantity, "CONFIG", model.time_func())
model.logger.info(
"Configuring DISH to operate in frequency band {}.".format(band_number)
)
# TODO (p.dube 19-05-2021) Implement sleep in a background thread to allow the
# dishMode to remain in 'CONFIG' to simulate the real DSH LMC.
set_enum(ds_indexer_position, "B{}".format(band_number), model.time_func())
set_enum(configured_band, "B{}".format(band_number), model.time_func())
model.logger.info(
"Done configuring DISH to operate in frequency band {}.".format(band_number)
)
model.logger.info("DISH reverting back to '{}' mode.".format(dish_mode))
set_enum(dish_mode_quantity, dish_mode, model.time_func())
else:
Except.throw_exception(
"DISH Command Failed",
"DISH is not in {} mode.".format(_allowed_modes),
"ConfigureBand{}()".format(band_number),
ErrSeverity.WARN,
)
def action_configureband1(
self, model, tango_dev=None, data_input=None
): # pylint: disable=W0613
"""This command triggers the Dish to transition to the CONFIGURE Dish Element
Mode, and returns to the caller. To configure the Dish to operate in frequency
band 1. On completion of the band configuration, Dish will automatically
revert to the previous Dish mode (OPERATE or STANDBY_FP).
:param model: tango_simlib.model.Model
:param data_input: None
:raises DevFailed: dishMode is not in any of the allowed modes (STANDBY_FP, OPERATE, STOW).
"""
self._configureband(model, "1")
def action_configureband2(
self, model, tango_dev=None, data_input=None
): # pylint: disable=W0613
"""This command triggers the Dish to transition to the CONFIGURE Dish Element
Mode, and returns to the caller. To configure the Dish to operate in frequency
band 2. On completion of the band configuration, Dish will automatically
revert to the previous Dish mode (OPERATE or STANDBY_FP).
:param model: tango_simlib.model.Model
:param data_input: None
:raises DevFailed: dishMode is not in any of the allowed modes (STANDBY_FP, OPERATE, STOW).
"""
self._configureband(model, "2")
def action_configureband3(
self, model, tango_dev=None, data_input=None
): # pylint: disable=W0613
"""This command triggers the Dish to transition to the CONFIGURE Dish Element
Mode, and returns to the caller. To configure the Dish to operate in frequency
band 3. On completion of the band configuration, Dish will automatically
revert to the previous Dish mode (OPERATE or STANDBY_FP).
:param model: tango_simlib.model.Model
:param data_input: None
:raises DevFailed: dishMode is not in any of the allowed modes (STANDBY_FP, OPERATE, STOW).
"""
self._configureband(model, "3")
def action_configureband4(
self, model, tango_dev=None, data_input=None
): # pylint: disable=W0613
"""This command triggers the Dish to transition to the CONFIGURE Dish Element
Mode, and returns to the caller. To configure the Dish to operate in frequency
band 4. On completion of the band configuration, Dish will automatically
revert to the previous Dish mode (OPERATE or STANDBY_FP).
:param model: tango_simlib.model.Model
:param data_input: None
:raises DevFailed: dishMode is not in any of the allowed modes (STANDBY_FP, OPERATE, STOW).
"""
self._configureband(model, "4")
def action_configureband5a(
self, model, tango_dev=None, data_input=None
): # pylint: disable=W0613
"""This command triggers the Dish to transition to the CONFIGURE Dish Element
Mode, and returns to the caller. To configure the Dish to operate in frequency
band 5a. On completion of the band configuration, Dish will automatically
revert to the previous Dish mode (OPERATE or STANDBY_FP).
:param model: tango_simlib.model.Model
:param data_input: None
:raises DevFailed: dishMode is not in any of the allowed modes (STANDBY_FP, OPERATE, STOW).
"""
self._configureband(model, "5a")
def action_configureband5b(
self, model, tango_dev=None, data_input=None
): # pylint: disable=W0613
"""This command triggers the Dish to transition to the CONFIGURE Dish Element
Mode, and returns to the caller. To configure the Dish to operate in frequency
band 5b. On completion of the band configuration, Dish will automatically
revert to the previous Dish mode (OPERATE or STANDBY_FP).
:param model: tango_simlib.model.Model
:param data_input: None
:raises DevFailed: dishMode is not in any of the allowed modes (STANDBY_FP, OPERATE, STOW).
"""
self._configureband(model, "5b")
def action_configureband5c(
self, model, tango_dev=None, data_input=None
): # pylint: disable=W0613
"""This command triggers the Dish to transition to the CONFIGURE Dish Element
Mode, and returns to the caller. To configure the Dish to operate in frequency
band 5c. On completion of the band configuration, Dish will automatically
revert to the previous Dish mode (OPERATE or STANDBY_FP).
:param model: tango_simlib.model.Model
:param data_input: None
:raises DevFailed: dishMode is not in any of the allowed modes (STANDBY_FP, OPERATE, STOW).
"""
self._configureband(model, "5c")
def _throw_exception(self, command, allowed_modes):
Except.throw_exception(
"DISH Command Failed",
"DISH is not in {} mode.".format(allowed_modes),
"{}()".format(command),
ErrSeverity.WARN,
)
def action_lowpower(
self, model, tango_dev=None, data_input=None
): # pylint: disable=W0613
"""This command triggers the Dish to transition to the LOW power
state. All subsystems go into a low power state to power only the
essential equipment. Specifically the Helium compressor will be set
to a low power consumption, and the drives will be disabled. When
issued a STOW command while in LOW power, the DS controller
should be able to turn the drives on, stow the dish and turn the
drives off again. The purpose of this mode is to enable the
observatory to perform power management (load curtailment), and
also to conserve energy for non-operating dishes.
:param model: tango_simlib.model.Model
:param data_input: None
:raises DevFailed: dishMode is not in any of the allowed modes (STOW, MAINTENANCE).
"""
_allowed_modes = ("STOW", "MAINTENANCE")
dish_mode = get_enum_str(model.sim_quantities["dishMode"])
if dish_mode in _allowed_modes:
set_enum(model.sim_quantities["powerState"], "LOW", model.time_func())
model.logger.info("Dish transitioning to 'LOW' power state.")
else:
self._throw_exception("LowPower", _allowed_modes)
def _reset_pointing_state(self, model):
action = "NONE"
pointing_state_quantity = model.sim_quantities["pointingState"]
pointing_state = get_enum_str(pointing_state_quantity)
if pointing_state != action:
model.logger.info("Current pointingState is {}.".format(pointing_state))
set_enum(pointing_state_quantity, action, model.time_func())
model.logger.info("pointingState reset to 'NONE'.")
else:
model.logger.warning("pointingState is already '{}'.".format(action))
def action_setmaintenancemode(
self, model, tango_dev=None, data_input=None
): # pylint: disable=W0613
"""This command triggers the Dish to transition to the MAINTENANCE
Dish Element Mode, and returns to the caller. To go into a state that
is safe to approach the Dish by a maintainer, and to enable the
Engineering interface to allow direct access to low level control and
monitoring by engineers and maintainers. This mode will also enable
engineers and maintainers to upgrade SW and FW. Dish also enters
this mode when an emergency stop button is pressed.
:param model: tango_simlib.model.Model
:param data_input: None
:raises DevFailed: dishMode is not in any of the allowed modes (STANDBY_LP, STANDBY_FP).
"""
maintenance = "MAINTENANCE"
_allowed_modes = ("STANDBY_LP", "STANDBY_FP")
dish_mode_quantity = model.sim_quantities["dishMode"]
dish_mode = get_enum_str(dish_mode_quantity)
if dish_mode == maintenance:
model.logger.info("Dish is already in '%s' mode", maintenance)
return
if dish_mode in _allowed_modes:
elev = self.MIN_DESIRED_ELEV
desiredPointing = [0.0] * len(
model.sim_quantities["desiredPointing"].last_val
)
desiredPointing[self.TS_IDX] = model.time_func()
desiredPointing[self.AZIM_IDX] = self.MAINT_AZIM
desiredPointing[self.ELEV_IDX] = elev
model.sim_quantities["desiredPointing"].set_val(
desiredPointing, model.time_func()
)
set_enum(dish_mode_quantity, maintenance, model.time_func())
model.logger.info("Dish transitioned to the '%s' mode.", maintenance)
self._reset_pointing_state(model)
else:
self._throw_exception("SetMaintenanceMode", _allowed_modes)
tango_dev.set_state(DevState.DISABLE)
model.logger.info("Dish state set to 'DISABLE'.")
def action_setoperatemode(
self, model, tango_dev=None, data_input=None
): # pylint: disable=W0613
"""This command triggers the Dish to transition to the OPERATE Dish
Element Mode, and returns to the caller. This mode fulfils the main
purpose of the Dish, which is to point to designated directions while
capturing data and transmitting it to CSP.
:param model: tango_simlib.model.Model
:param data_input: None
:raises DevFailed: dishMode is not in any of the allowed modes (STANDBY_FP).
"""
operate = "OPERATE"
_allowed_modes = ("STANDBY_FP",)
dish_mode_quantity = model.sim_quantities["dishMode"]
dish_mode = get_enum_str(dish_mode_quantity)
if dish_mode == operate:
model.logger.info("Dish is already in '%s' mode", operate)
return
if dish_mode in _allowed_modes:
configuredBand = model.sim_quantities["configuredBand"]
band_error_labels = ["NONE", "UNKNOWN", "ERROR", "UNDEFINED"]
if configuredBand in band_error_labels:
Except.throw_exception(
"DISH Command Failed",
"Configured band is {}.".format(configuredBand),
"SetOperateMode()",
ErrSeverity.WARN,
)
set_enum(dish_mode_quantity, operate, model.time_func())
model.logger.info("Dish transitioned to the %s Dish Element Mode.", operate)
pointing_state_quantity = model.sim_quantities["pointingState"]
set_enum(pointing_state_quantity, "READY", model.time_func())
model.logger.info("Dish pointing state set to 'READY'.")
else:
self._throw_exception("SetOperateMode", _allowed_modes)
tango_dev.set_state(DevState.ON)
model.logger.info("Dish state set to 'ON'.")
def action_setstandbyfpmode(
self, model, tango_dev=None, data_input=None
): # pylint: disable=W0613
"""This command triggers the Dish to transition to the STANDBY_FP Dish
Element Mode, and returns to the caller. To prepare all subsystems
for active observation, once a command is received by TM to go to the
FULL_POWER mode.
:param model: tango_simlib.model.Model
:param data_input: None
:raises DevFailed: dishMode is not in any of the allowed modes (STANDBY_LP, STOW, OPERATE, MAINTENANCE).
"""
standby_fp = "STANDBY_FP"
_allowed_modes = ("STANDBY_LP", "STOW", "OPERATE", "MAINTENANCE")
dish_mode_quantity = model.sim_quantities["dishMode"]
dish_mode = get_enum_str(dish_mode_quantity)
if dish_mode == standby_fp:
model.logger.info("Dish is already in '%s' mode", standby_fp)
return
if dish_mode in _allowed_modes:
set_enum(dish_mode_quantity, standby_fp, model.time_func())
model.logger.info(
"Dish transitioned to the '%s' Dish Element Mode.", standby_fp
)
self._reset_pointing_state(model)
else:
self._throw_exception("SetStandbyFPMode", _allowed_modes)
tango_dev.set_state(DevState.STANDBY)
model.logger.info("Dish state set to 'STANDBY'.")
def action_setstandbylpmode(
self, model, tango_dev=None, data_input=None
): # pylint: disable=W0613
"""This command triggers the Dish to transition to the STANDBY_LP Dish Element
Mode, and returns to the caller. Standby_LP is the default mode when the Dish
is configured for low power consumption, and is the mode wherein Dish ends after
a start up procedure.
:param model: tango_simlib.model.Model
:param data_input: None
:raises DevFailed: dishMode is not in any of the allowed modes
(OFF, STARTUP, SHUTDOWN, STANDBY_FP, MAINTENANCE, STOW, CONFIG, OPERATE).
"""
standby_lp = "STANDBY_LP"
_allowed_modes = (
"OFF",
"STARTUP",
"SHUTDOWN",
"STANDBY_FP",
"MAINTENANCE",
"STOW",
"CONFIG",
"OPERATE",
)
dish_mode_quantity = model.sim_quantities["dishMode"]
dish_mode = get_enum_str(dish_mode_quantity)
if dish_mode == standby_lp:
model.logger.info("Dish is already in '%s' mode", standby_lp)
return
if dish_mode in _allowed_modes:
set_enum(dish_mode_quantity, standby_lp, model.time_func())
model.logger.info(
"Dish transitioned to the '%s' Dish Element Mode.", standby_lp
)
self._reset_pointing_state(model)
else:
self._throw_exception("SetStandbyLPMode", _allowed_modes)
tango_dev.set_state(DevState.STANDBY)
model.logger.info("Dish state set to 'STANDBY'.")
def action_setstowmode(
self, model, tango_dev=None, data_input=None
): # pylint: disable=W0613
"""This command triggers the Dish to transition to the STOW Dish
Element Mode, and returns to the caller. To point the dish in a
direction that minimises the wind loads on the structure, for survival
in strong wind conditions. The Dish is able to observe in the stow
position, for the purpose of transient detection.
:param model: tango_simlib.model.Model
:param data_input: None
:raises DevFailed: dishMode is not in any of the allowed modes
(OFF, STARTUP, SHUTDOWN, STANDBY_LP, STANDBY_FP, MAINTENANCE, CONFIG, OPERATE).
"""
stow = "STOW"
_allowed_modes = (
"OFF",
"STARTUP",
"SHUTDOWN",
"STANDBY_LP",
"STANDBY_FP",
"MAINTENANCE",
"CONFIG",
"OPERATE",
)
dish_mode_quantity = model.sim_quantities["dishMode"]
dish_mode = get_enum_str(dish_mode_quantity)
if dish_mode == stow:
model.logger.info("Dish is already in '%s' mode", stow)
return
if dish_mode in _allowed_modes:
# movement to stow position is handled in find_next_position
set_enum(dish_mode_quantity, stow, model.time_func())
model.logger.info("Dish transitioned to the '%s' Dish Element Mode.", stow)
self._reset_pointing_state(model)
else:
self._throw_exception("SetStowMode", _allowed_modes)
tango_dev.set_state(DevState.DISABLE)
model.logger.info("Dish state set to 'DISABLE'.")
def action_startcapture(
self, model, tango_dev=None, data_input=None
): # pylint: disable=W0613
"""Triggers the dish to start capturing the data on the configured band.
:param model: tango_simlib.model.Model
:param data_input: None
:raises DevFailed: dishMode is not in any of the allowed modes (OPERATE) or
configuredBand is (NONE, UNKNOWN, ERROR, UNDEFINED).
"""
_allowed_modes = ("OPERATE",)
dish_mode_quantity = model.sim_quantities["dishMode"]
dish_mode = get_enum_str(dish_mode_quantity)
if dish_mode in _allowed_modes:
configuredBand = model.sim_quantities["configuredBand"]
band_error_labels = ["NONE", "UNKNOWN", "ERROR", "UNDEFINED"]
if configuredBand in band_error_labels:
Except.throw_exception(
"DISH Command Failed",
"configuredBand is {}.".format(configuredBand),
"StartCapture()",
ErrSeverity.WARN,
)
model.sim_quantities["capturing"].set_val(True, model.time_func())
model.logger.info("Attribute 'capturing' set to True.")
else:
self._throw_exception("StartCapture", _allowed_modes)
model.logger.info("'StartCapture' command executed successfully.")
def action_stopcapture(
self, model, tango_dev=None, data_input=None
): # pylint: disable=W0613
"""Triggers the dish to stop capturing the data on the configured band.
:param model: tango_simlib.model.Model
:param data_input: None
"""
if model.sim_quantities["capturing"]:
model.sim_quantities["capturing"].set_val(False, model.time_func())
model.logger.info("Attribute 'capturing' set to False.")
self._change_pointing_state(model, "READY", ("OPERATE",))
model.logger.info("'StopCapture' command executed successfully.")
def _change_pointing_state(self, model, action, allowed_modes):
dish_mode_quantity = model.sim_quantities["dishMode"]
dish_mode = get_enum_str(dish_mode_quantity)
if dish_mode not in allowed_modes:
self._throw_exception(action, allowed_modes)
pointing_state_quantity = model.sim_quantities["pointingState"]
pointing_state = get_enum_str(pointing_state_quantity)
if pointing_state != action:
set_enum(pointing_state_quantity, action, model.time_func())
model.logger.info("Dish pointingState set to {}.".format(action))
else:
model.logger.warning("pointingState is already '{}'.".format(action))
def action_track(
self, model, tango_dev=None, data_input=None
): # pylint: disable=W0613
"""The Dish is tracking the commanded pointing positions within the
specified TRACK pointing accuracy.
:param model: tango_simlib.model.Model
:param data_input: None
:raises DevFailed: dishMode is not in any of the allowed modes (OPERATE).
"""
# pointing state is changed to TRACK when dish is in the requested position
self._change_pointing_state(model, "SLEW", ("OPERATE",))
model.logger.info("'Track' command executed successfully.")
def action_trackstop(
self, model, tango_dev=None, data_input=None
): # pylint: disable=W0613
"""The Dish will stop tracking but will not apply brakes.
Stops movement, but doesn't clear tables/queues.
:param model: tango_simlib.model.Model
:param data_input: None
:raises DevFailed: dishMode is not in any of the allowed modes (OPERATE).
"""
self._change_pointing_state(model, "READY", ("OPERATE",))
model.logger.info("'TrackStop' command executed successfully.")
def action_resettracktable(
self, model, tango_dev=None, data_input=None
): # pylint: disable=W0613
"""Resets the coordinates in the queue. Clear ACU's table (should show number of
coordinates drops to zero)
:param model: tango_simlib.model.Model
:param data_input: None
"""
program_track_quantity = model.sim_quantities["programTrackTable"]
track_table_size = len(program_track_quantity.last_val)
default_values = [0.0] * track_table_size
model.sim_quantities["programTrackTable"].set_val(
default_values, model.time_func()
)
model.logger.info("'ResetTrackTable' command executed successfully.")
def action_resettracktablebuffer(
self, model, tango_dev=None, data_input=None
): # pylint: disable=W0613
"""Resets the Dish LMC's buffer. (In our case it's desired_pointings)
:param model: tango_simlib.model.Model
:param data_input: None
"""
self.desired_pointings = []
model.logger.info("'ResetTrackTableBuffer' command executed successfully.")
def action_slew(
self, model, tango_dev=None, data_input=None
): # pylint: disable=W0613
"""The Dish moves to the commanded pointing angle at the maximum
speed, as defined by the specified slew rate.
:param model: tango_simlib.model.Model
:param data_input: list
[0]: Azimuth
[1]: Elevation
:raises DevFailed: dishMode is not in any of the allowed modes (OPERATE).
"""
# TODO (KM 19-11-2020) Set the data_input to desiredPointing
self._change_pointing_state(model, "SLEW", ("OPERATE",))
model.logger.info("'Slew' command executed successfully.")
def action_scan(
self, model, tango_dev=None, data_input=None
): # pylint: disable=W0613
"""The Dish is tracking the commanded pointing positions within the
specified SCAN pointing accuracy.
:param model: tango_simlib.model.Model
:param data_input: None
:raises DevFailed: dishMode is not in any of the allowed modes (OPERATE).
"""
self._change_pointing_state(model, "SCAN", ("OPERATE",))
model.logger.info("'Scan' command executed successfully.")
def find_next_position(self, desired_pointings, model, sim_time):
"""Return the latest desiredPointing not in the future, or last requested."""
best_pointing = None
dish_mode = get_enum_str(model.sim_quantities["dishMode"])
# move to stow position regardless of timestamp
if dish_mode == "STOW":
return AzEl(azim=self.actual_position.azim, elev=self.STOW_ELEV_POSITION)
for pointing in desired_pointings:
timestamp = pointing[self.TS_IDX] / 1000.0 # convert ms to sec
if timestamp <= sim_time:
best_pointing = pointing
else:
break # all other samples are in the future
if best_pointing is not None:
return AzEl(
azim=best_pointing[self.AZIM_IDX], elev=best_pointing[self.ELEV_IDX]
)
else:
# no useful samples, so use last requested position
return self.requested_position
@staticmethod
def is_movement_allowed(model):
pointing_state = get_enum_str(model.sim_quantities["pointingState"])
dish_mode = get_enum_str(model.sim_quantities["dishMode"])
return pointing_state in ["SLEW", "TRACK", "SCAN"] or dish_mode == "STOW"
def is_on_target(self):
actual = self.actual_position
target = self.requested_position
return self._almost_equal(actual.azim, target.azim) and self._almost_equal(
actual.elev, target.elev
)
def update_movement_attributes(self, model, sim_time):
self.set_lock_attribute(model, self.is_on_target())
self.set_achieved_pointing_attribute(model, sim_time, self.actual_position)
self.set_track_pointing_state_on_target(model)
@staticmethod
def set_lock_attribute(model, target_reached):
target_lock = model.sim_quantities["targetLock"]
if target_lock.last_val != target_reached:
target_lock.last_val = target_reached
model.logger.info("Attribute 'targetLock' set to %s.", target_reached)
def set_track_pointing_state_on_target(self, model):
pointing_state = get_enum_str(model.sim_quantities["pointingState"])
target_lock = model.sim_quantities["targetLock"].last_val
# update the pointing state to TRACK when the dish arrives on target
if target_lock and pointing_state == "SLEW":
self._change_pointing_state(model, "TRACK", ("OPERATE",))
def set_achieved_pointing_attribute(self, model, sim_time, position):
achievedPointing = [0, 0, 0]
achievedPointing[self.TS_IDX] = sim_time * 1000.0 # millisecond timestamp
achievedPointing[self.AZIM_IDX] = position.azim
achievedPointing[self.ELEV_IDX] = position.elev
model.sim_quantities["achievedPointing"].set_val(achievedPointing, sim_time)
def get_rate_limited_position(self, current_pos, next_pos, dt):
# calc required deltas in az and el
required_delta_azim = abs(current_pos.azim - next_pos.azim)
required_delta_elev = abs(current_pos.elev - next_pos.elev)
# calc max deltas in az and el due to speed limits
max_slew_azim = self.AZIM_DRIVE_MAX_RATE * dt
max_slew_elev = self.ELEV_DRIVE_MAX_RATE * dt
# limit
allowed_delta_azim = min(max_slew_azim, required_delta_azim)
allowed_delta_elev = min(max_slew_elev, required_delta_elev)
# get direction signs: +1 or -1
sign_azim = get_direction_sign(current_pos.azim, next_pos.azim)
sign_elev = get_direction_sign(current_pos.elev, next_pos.elev)
return AzEl(
azim=(current_pos.azim + sign_azim * allowed_delta_azim),
elev=(current_pos.elev + sign_elev * allowed_delta_elev),
)
def ensure_within_mechanical_limits(self, next_pos):
if (
next_pos.azim > self.MAX_DESIRED_AZIM
or next_pos.azim < self.MIN_DESIRED_AZIM
):
Except.throw_exception(
"Skipping dish movement.",
"Desired azimuth angle '%s' is out of pointing limits %s."
% (next_pos.azim, [self.MIN_DESIRED_AZIM, self.MAX_DESIRED_AZIM]),
"ensure_within_mechanical_limits()",
ErrSeverity.WARN,
)
elif (
next_pos.elev > self.MAX_DESIRED_ELEV
or next_pos.elev < self.MIN_DESIRED_ELEV
):
Except.throw_exception(
"Skipping dish movement.",
"Desired elevation angle '%s' is out of pointing limits %s."
% (next_pos.elev, [self.MIN_DESIRED_ELEV, self.MAX_DESIRED_ELEV]),
"ensure_within_mechanical_limits()",
ErrSeverity.WARN,
)
def move_towards_target(self, model, sim_time, dt):
next_requested_pos = self.find_next_position(self.desired_pointings, model, sim_time)
self.requested_position = next_requested_pos
self.ensure_within_mechanical_limits(next_requested_pos)
next_achievable_pos = self.get_rate_limited_position(
self.actual_position, next_requested_pos, dt
)
self.actual_position = next_achievable_pos
def get_new_unverified_pointings(self, model):
"""Return the latest list of coordinates
:param model: Model
The device Model
:return: list
- Empty if no updates have occured since the last time
- 1 entry of desiredPointing if it is the latest
- All the entries of programTrackTable if it is the latest (7 in testing)
"""
programTrackTable_last_update = model.sim_quantities[
"programTrackTable"
].last_update_time
desiredPointing_last_update = model.sim_quantities[
"desiredPointing"
].last_update_time
if programTrackTable_last_update > desiredPointing_last_update:
if programTrackTable_last_update > self.last_coordinate_update_timestamp:
self.last_coordinate_update_timestamp = programTrackTable_last_update
all_values = model.sim_quantities["programTrackTable"].last_val
assert len(all_values) % 3 == 0, (
"Length of programTrackTable should ",
"be divisble by 3",
)
# Group in 3s
return list(map(list, zip(*(iter(all_values),) * 3)))
else:
if desiredPointing_last_update > self.last_coordinate_update_timestamp:
self.last_coordinate_update_timestamp = desiredPointing_last_update
return [model.sim_quantities["desiredPointing"].last_val]
return []
def get_new_valid_pointings(self, model):
unverified_pointings = self.get_new_unverified_pointings(model)
now_millisec = model.time_func() * 1000.0
return [
pointing
for pointing in unverified_pointings
if pointing[self.TS_IDX] >= now_millisec
]
def update_desired_pointing_history(self, model):
latest_pointings = self.get_new_valid_pointings(model)
self.desired_pointings.extend(latest_pointings)
if len(self.desired_pointings) > self.MAX_SAMPLE_HISTORY:
# drop older samples
self.desired_pointings = self.desired_pointings[-self.MAX_SAMPLE_HISTORY :]
def pre_update(self, model, sim_time, dt):
if self.is_movement_allowed(model):
self.update_desired_pointing_history(model)
self.move_towards_target(model, sim_time, dt)
self.update_movement_attributes(model, sim_time)
else:
model.logger.debug("Skipping quantity updates - movement not allowed")
def _almost_equal(self, x, y, abs_threshold=5e-3):
"""Takes two values return true if they are almost equal"""
return abs(x - y) <= abs_threshold
def get_enum_str(quantity):
"""Returns the enum label of an enumerated data type
:param quantity: object
The quantity object of a DevEnum attribute
:return: str
Current string value of a DevEnum attribute
"""
EnumClass = enum.IntEnum("EnumLabels", quantity.meta["enum_labels"], start=0)
return EnumClass(quantity.last_val).name
def set_enum(quantity, label, timestamp):
"""Sets the quantity last_val attribute to index of label
:param quantity: object
The quantity object from model
:param label: str
The desired label from enum list
:param timestamp: float
The time now
"""
value = quantity.meta["enum_labels"].index(label)
quantity.set_val(value, timestamp)
def get_direction_sign(here, there):
"""Return sign (+1 or -1) required to move from here to there."""
return 1 if here < there else -1
| 42.787724 | 112 | 0.652421 |
32d547b799ed54b3367d4da0803a7ac95a156b2d | 8,453 | py | Python | bika/lims/browser/reports/productivity_analysesperformedpertotal.py | hocinebendou/bika.gsoc | 85bc0c587de7f52073ae0e89bddbc77bf875f295 | [
"MIT"
] | null | null | null | bika/lims/browser/reports/productivity_analysesperformedpertotal.py | hocinebendou/bika.gsoc | 85bc0c587de7f52073ae0e89bddbc77bf875f295 | [
"MIT"
] | null | null | null | bika/lims/browser/reports/productivity_analysesperformedpertotal.py | hocinebendou/bika.gsoc | 85bc0c587de7f52073ae0e89bddbc77bf875f295 | [
"MIT"
] | null | null | null | from Products.CMFCore.utils import getToolByName
from Products.Five.browser.pagetemplatefile import ViewPageTemplateFile
from bika.lims import bikaMessageFactory as _
from bika.lims.browser import BrowserView
from bika.lims.browser.reports.selection_macros import SelectionMacrosView
from plone.app.layout.globals.interfaces import IViewView
from zope.interface import implements
class Report(BrowserView):
implements(IViewView)
default_template = ViewPageTemplateFile("templates/productivity.pt")
template = ViewPageTemplateFile(
"templates/productivity_analysesperformedpertotal.pt")
def __init__(self, context, request, report=None):
super(Report, self).__init__(context, request)
self.report = report
self.selection_macros = SelectionMacrosView(self.context, self.request)
def __call__(self):
parms = []
titles = []
# Apply filters
self.contentFilter = {'portal_type': 'Analysis'}
val = self.selection_macros.parse_daterange(self.request,
'getDateRequested',
_('Date Requested'))
if val:
self.contentFilter[val['contentFilter'][0]] = val['contentFilter'][1]
parms.append(val['parms'])
titles.append(val['titles'])
# Query the catalog and store results in a dictionary
analyses = self.bika_analysis_catalog(self.contentFilter)
if not analyses:
message = _("No analyses matched your query")
self.context.plone_utils.addPortalMessage(message, "error")
return self.default_template()
groupby = self.request.form.get('GroupingPeriod', '')
if (groupby != ''):
parms.append({"title": _("Grouping period"), "value": _(groupby)})
datalines = {}
footlines = {}
totalcount = len(analyses)
totalpublishedcount = 0
totalperformedcount = 0
for analysis in analyses:
analysis = analysis.getObject()
ankeyword = analysis.getKeyword()
antitle = analysis.getServiceTitle()
daterequested = analysis.created()
group = ''
if groupby == 'Day':
group = self.ulocalized_time(daterequested)
elif groupby == 'Week':
group = daterequested.strftime(
"%Y") + ", " + daterequested.strftime("%U")
elif groupby == 'Month':
group = daterequested.strftime(
"%B") + " " + daterequested.strftime("%Y")
elif groupby == 'Year':
group = daterequested.strftime("%Y")
else:
group = ''
dataline = {'Group': group, 'Requested': 0, 'Performed': 0,
'Published': 0, 'Analyses': {}}
anline = {'Analysis': antitle, 'Requested': 0, 'Performed': 0,
'Published': 0}
if (group in datalines):
dataline = datalines[group]
if (ankeyword in dataline['Analyses']):
anline = dataline['Analyses'][ankeyword]
grouptotalcount = dataline['Requested'] + 1
groupperformedcount = dataline['Performed']
grouppublishedcount = dataline['Published']
anltotalcount = anline['Requested'] + 1
anlperformedcount = anline['Performed']
anlpublishedcount = anline['Published']
workflow = getToolByName(self.context, 'portal_workflow')
arstate = workflow.getInfoFor(analysis.aq_parent, 'review_state', '')
if (arstate == 'published'):
anlpublishedcount += 1
grouppublishedcount += 1
totalpublishedcount += 1
if (analysis.getResult()):
anlperformedcount += 1
groupperformedcount += 1
totalperformedcount += 1
group_performedrequested_ratio = float(groupperformedcount) / float(
grouptotalcount)
group_publishedperformed_ratio = groupperformedcount > 0 and float(
grouppublishedcount) / float(groupperformedcount) or 0
anl_performedrequested_ratio = float(anlperformedcount) / float(
anltotalcount)
anl_publishedperformed_ratio = anlperformedcount > 0 and float(
anlpublishedcount) / float(anlperformedcount) or 0
dataline['Requested'] = grouptotalcount
dataline['Performed'] = groupperformedcount
dataline['Published'] = grouppublishedcount
dataline['PerformedRequestedRatio'] = group_performedrequested_ratio
dataline['PerformedRequestedRatioPercentage'] = ('{0:.0f}'.format(
group_performedrequested_ratio * 100)) + "%"
dataline['PublishedPerformedRatio'] = group_publishedperformed_ratio
dataline['PublishedPerformedRatioPercentage'] = ('{0:.0f}'.format(
group_publishedperformed_ratio * 100)) + "%"
anline['Requested'] = anltotalcount
anline['Performed'] = anlperformedcount
anline['Published'] = anlpublishedcount
anline['PerformedRequestedRatio'] = anl_performedrequested_ratio
anline['PerformedRequestedRatioPercentage'] = ('{0:.0f}'.format(
anl_performedrequested_ratio * 100)) + "%"
anline['PublishedPerformedRatio'] = anl_publishedperformed_ratio
anline['PublishedPerformedRatioPercentage'] = ('{0:.0f}'.format(
anl_publishedperformed_ratio * 100)) + "%"
dataline['Analyses'][ankeyword] = anline
datalines[group] = dataline
# Footer total data
total_performedrequested_ratio = float(totalperformedcount) / float(
totalcount)
total_publishedperformed_ratio = totalperformedcount > 0 and float(
totalpublishedcount) / float(totalperformedcount) or 0
footline = {'Requested': totalcount,
'Performed': totalperformedcount,
'Published': totalpublishedcount,
'PerformedRequestedRatio': total_performedrequested_ratio,
'PerformedRequestedRatioPercentage': ('{0:.0f}'.format(
total_performedrequested_ratio * 100)) + "%",
'PublishedPerformedRatio': total_publishedperformed_ratio,
'PublishedPerformedRatioPercentage': ('{0:.0f}'.format(
total_publishedperformed_ratio * 100)) + "%"}
footlines['Total'] = footline
self.report_data = {'parameters': parms,
'datalines': datalines,
'footlines': footlines}
if self.request.get('output_format', '') == 'CSV':
import csv
import StringIO
import datetime
fieldnames = [
'Group',
'Analysis',
'Requested',
'Performed',
'Published',
]
output = StringIO.StringIO()
dw = csv.DictWriter(output, extrasaction='ignore',
fieldnames=fieldnames)
dw.writerow(dict((fn, fn) for fn in fieldnames))
for group_name, group in datalines.items():
for service_name, service in group['Analyses'].items():
dw.writerow({
'Group': group_name,
'Analysis': service_name,
'Requested': service['Requested'],
'Performed': service['Performed'],
'Published': service['Published'],
})
report_data = output.getvalue()
output.close()
date = datetime.datetime.now().strftime("%Y%m%d%H%M")
setheader = self.request.RESPONSE.setHeader
setheader('Content-Type', 'text/csv')
setheader("Content-Disposition",
"attachment;filename=\"analysesperformedpertotal_%s.csv\"" % date)
self.request.RESPONSE.write(report_data)
else:
return {'report_title': _('Analyses performed as % of total'),
'report_data': self.template()}
| 44.026042 | 88 | 0.572814 |
9e9ff814bf1199394d4aeebff9a51b1443230c79 | 439 | py | Python | package_name/module.py | EmilRamsvik/DataPlot | eead77dd20197952f26e13abcd4288b05abf4dba | [
"MIT"
] | null | null | null | package_name/module.py | EmilRamsvik/DataPlot | eead77dd20197952f26e13abcd4288b05abf4dba | [
"MIT"
] | 1 | 2021-10-03T16:00:30.000Z | 2021-10-03T16:01:51.000Z | package_name/module.py | EmilRamsvik/DataPlot | eead77dd20197952f26e13abcd4288b05abf4dba | [
"MIT"
] | null | null | null | """
Module provides a simple cubic_rectification function.
"""
import numpy as np
import pandas as pd
# plotting tools
import plotly
import plotly.express as px
# import matplotlib.pyplot as plt
import plotly.figure_factory as ff
import plotly.graph_objects as go
class Layout:
def __init__(self):
pass
def patriotic(self):
go.Layout()
class dataplot:
def __init__(self, df: pd.DataFrame):
pass
| 15.678571 | 54 | 0.710706 |
342790f3e1859ece20ca94c4821423036d5968dd | 639 | py | Python | tests/unit/xgb/__init__.py | mikemckiernan/models | 674a3edf14c62bf0d5490f5433eed15abec416f8 | [
"Apache-2.0"
] | null | null | null | tests/unit/xgb/__init__.py | mikemckiernan/models | 674a3edf14c62bf0d5490f5433eed15abec416f8 | [
"Apache-2.0"
] | 1 | 2022-03-08T13:41:41.000Z | 2022-03-15T17:42:08.000Z | tests/unit/xgb/__init__.py | mikemckiernan/models | 674a3edf14c62bf0d5490f5433eed15abec416f8 | [
"Apache-2.0"
] | null | null | null | #
# Copyright (c) 2021, NVIDIA CORPORATION.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import pytest
pytest.importorskip("xgboost")
| 31.95 | 74 | 0.757433 |
f1db81addee468d94e7f0bc6e478d634e1878bec | 626 | py | Python | parquet_integration/bench_read.py | ives9638/arrow2 | b86508e67ca4b08c4544626f7bbe55cf5bd02961 | [
"Apache-2.0"
] | 1 | 2022-02-20T00:10:06.000Z | 2022-02-20T00:10:06.000Z | parquet_integration/bench_read.py | ives9638/arrow2 | b86508e67ca4b08c4544626f7bbe55cf5bd02961 | [
"Apache-2.0"
] | null | null | null | parquet_integration/bench_read.py | ives9638/arrow2 | b86508e67ca4b08c4544626f7bbe55cf5bd02961 | [
"Apache-2.0"
] | null | null | null | import timeit
import io
import pyarrow.parquet
def bench(log2_size: int, datatype: str):
with open(f"fixtures/pyarrow3/v1/benches_{2**log2_size}.parquet", "rb") as f:
data = f.read()
data = io.BytesIO(data)
def f():
pyarrow.parquet.read_table(data, columns=[datatype])
seconds = timeit.Timer(f).timeit(number=512) / 512
microseconds = seconds * 1000 * 1000
print(f"read {datatype} 2^{log2_size} time: {microseconds:.2f} us")
#for i in range(10, 22, 2):
# bench(i, "int64")
for i in range(10, 22, 2):
bench(i, "string")
for i in range(10, 22, 2):
bench(i, "bool")
| 23.185185 | 81 | 0.629393 |
b20de80e6da6837d61cd84cbe13d6eb832469068 | 17,240 | py | Python | tests/test_strings.py | MadyDixit/algorithms | 76b0dafce7fd788651f23dba92bae4c19a822f5f | [
"MIT"
] | 1 | 2020-02-21T13:12:00.000Z | 2020-02-21T13:12:00.000Z | tests/test_strings.py | XZYCR7/algorithms-3 | 38173ae71780f0cb7f44373867a2ec559b239056 | [
"MIT"
] | 27 | 2020-02-21T14:02:28.000Z | 2020-02-27T12:21:42.000Z | tests/test_strings.py | XZYCR7/algorithms-3 | 38173ae71780f0cb7f44373867a2ec559b239056 | [
"MIT"
] | 1 | 2020-10-26T00:43:44.000Z | 2020-10-26T00:43:44.000Z | from algorithms.strings import (
add_binary,
match_symbol, match_symbol_1, bracket,
decode_string,
delete_reoccurring_characters,
domain_name_1, domain_name_2,
encode, decode,
group_anagrams,
int_to_roman,
is_palindrome, is_palindrome_reverse,
is_palindrome_two_pointer, is_palindrome_stack,
is_rotated, is_rotated_v1,
license_number,
make_sentence,
is_merge_recursive, is_merge_iterative,
multiply,
is_one_edit, is_one_edit2,
rabin_karp,
ultra_pythonic, iterative, recursive, pythonic,
reverse_vowel,
reverse_words,
roman_to_int,
strip_url_params1, strip_url_params2, strip_url_params3,
is_valid_coordinates_0, is_valid_coordinates_1,
is_valid_coordinates_regular_expression,
word_squares,
convert_morse_word, unique_morse,
judge_circle,
strong_password,
caesar_cipher,
contain_string,
count_binary_substring,
repeat_string,
text_justification,
min_distance,
longest_common_prefix_v1, longest_common_prefix_v2, longest_common_prefix_v3,
rotate,
first_unique_char,
repeat_substring,
atbash,
knuth_morris_pratt
)
import unittest
class TestAddBinary(unittest.TestCase):
"""[summary]
Test for the file add_binary.py
Arguments:
unittest {[type]} -- [description]
"""
def test_add_binary(self):
self.assertEqual("100", add_binary("11", "1"))
self.assertEqual("101", add_binary("100", "1"))
self.assertEqual("10", add_binary("1", "1"))
class TestBreakingBad(unittest.TestCase):
"""[summary]
Test for the file breaking_bad.py
Arguments:
unittest {[type]} -- [description]
"""
def setUp(self):
self.words = ['Amazon', 'Microsoft', 'Google']
self.symbols = ['i', 'Am', 'cro', 'le', 'abc']
self.result = ['M[i]crosoft', '[Am]azon', 'Mi[cro]soft', 'Goog[le]']
def test_match_symbol(self):
self.assertEqual(self.result, match_symbol(self.words, self.symbols))
def test_match_symbol_1(self):
self.assertEqual(['[Am]azon', 'Mi[cro]soft', 'Goog[le]'], match_symbol_1(self.words, self.symbols))
def test_bracket(self):
self.assertEqual(('[Am]azon', 'Mi[cro]soft', 'Goog[le]'), bracket(self.words, self.symbols))
class TestDecodeString(unittest.TestCase):
"""[summary]
Test for the file decode_string.py
Arguments:
unittest {[type]} -- [description]
"""
def test_decode_string(self):
self.assertEqual("aaabcbc", decode_string("3[a]2[bc]"))
self.assertEqual("accaccacc", decode_string("3[a2[c]]"))
class TestDeleteReoccurring(unittest.TestCase):
"""[summary]
Test for the file delete_reoccurring.py
Arguments:
unittest {[type]} -- [description]
"""
def test_delete_reoccurring_characters(self):
self.assertEqual("abc", delete_reoccurring_characters("aaabcccc"))
class TestDomainExtractor(unittest.TestCase):
"""[summary]
Test for the file domain_extractor.py
Arguments:
unittest {[type]} -- [description]
"""
def test_valid(self):
self.assertEqual(domain_name_1("https://github.com/SaadBenn"), "github")
def test_invalid(self):
self.assertEqual(domain_name_2("http://google.com"), "google")
class TestEncodeDecode(unittest.TestCase):
"""[summary]
Test for the file encode_decode.py
Arguments:
unittest {[type]} -- [description]
"""
def test_encode(self):
self.assertEqual("4:keon2:is7:awesome", encode("keon is awesome"))
def test_decode(self):
self.assertEqual(['keon', 'is', 'awesome'], decode("4:keon2:is7:awesome"))
class TestGroupAnagrams(unittest.TestCase):
"""[summary]
Test for the file group_anagrams.py
Arguments:
unittest {[type]} -- [description]
"""
def test_group_anagrams(self):
self.assertEqual([['eat', 'tea', 'ate'], ['tan', 'nat'], ['bat']], \
group_anagrams(["eat", "tea", "tan", "ate", "nat", "bat"]))
class TestIntToRoman(unittest.TestCase):
"""[summary]
Test for the file int_to_roman.py
Arguments:
unittest {[type]} -- [description]
"""
def test_int_to_roman(self):
self.assertEqual("DCXLIV", int_to_roman(644))
self.assertEqual("I", int_to_roman(1))
self.assertEqual("MMMCMXCIX", int_to_roman(3999))
class TestIsPalindrome(unittest.TestCase):
"""[summary]
Test for the file is_palindrome.py
Arguments:
unittest {[type]} -- [description]
"""
def test_is_palindrome(self):
# 'Otto' is a old german name.
self.assertTrue(is_palindrome("Otto"))
self.assertFalse(is_palindrome("house"))
def test_is_palindrome_reverse(self):
# 'Otto' is a old german name.
self.assertTrue(is_palindrome_reverse("Otto"))
self.assertFalse(is_palindrome_reverse("house"))
def test_is_palindrome_two_pointer(self):
# 'Otto' is a old german name.
self.assertTrue(is_palindrome_two_pointer("Otto"))
self.assertFalse(is_palindrome_two_pointer("house"))
def test_is_palindrome_stack(self):
# 'Otto' is a old german name.
self.assertTrue(is_palindrome_stack("Otto"))
self.assertFalse(is_palindrome_stack("house"))
class TestIsRotated(unittest.TestCase):
"""[summary]
Test for the file is_rotated.py
Arguments:
unittest {[type]} -- [description]
"""
def test_is_rotated(self):
self.assertTrue(is_rotated("hello", "hello"))
self.assertTrue(is_rotated("hello", "llohe"))
self.assertFalse(is_rotated("hello", "helol"))
self.assertFalse(is_rotated("hello", "lloh"))
self.assertTrue(is_rotated("", ""))
def test_is_rotated_v1(self):
self.assertTrue(is_rotated_v1("hello", "hello"))
self.assertTrue(is_rotated_v1("hello", "llohe"))
self.assertFalse(is_rotated_v1("hello", "helol"))
self.assertFalse(is_rotated_v1("hello", "lloh"))
self.assertTrue(is_rotated_v1("", ""))
class TestRotated(unittest.TestCase):
def test_rotate(self):
self.assertEqual("llohe", rotate("hello", 2))
self.assertEqual("hello", rotate("hello", 5))
self.assertEqual("elloh", rotate("hello", 6))
self.assertEqual("llohe", rotate("hello", 7))
class TestLicenseNumber(unittest.TestCase):
"""[summary]
Test for the file license_number.py
Arguments:
unittest {[type]} -- [description]
"""
def test_license_number(self):
self.assertEqual("a-b-c-d-f-d-d-f", license_number("a-bc-dfd-df", 1))
self.assertEqual("ab-cd-fd-df", license_number("a-bc-dfd-df", 2))
self.assertEqual("ab-cdf-ddf", license_number("a-bc-dfd-df", 3))
self.assertEqual("abcd-fddf", license_number("a-bc-dfd-df", 4))
self.assertEqual("abc-dfddf", license_number("a-bc-dfd-df", 5))
class TestMakeSentence(unittest.TestCase):
"""[summary]
Test for the file make_sentence.py
Arguments:
unittest {[type]} -- [description]
"""
def test_make_sentence(self):
dictionarys = ["", "app", "let", "t", "apple", "applet"]
word = "applet"
self.assertTrue(make_sentence(word, dictionarys))
class TestMergeStringChecker(unittest.TestCase):
"""[summary]
Test for the file merge_string_checker.py
Arguments:
unittest {[type]} -- [description]
"""
def test_is_merge_recursive(self):
self.assertTrue(is_merge_recursive("codewars", "cdw", "oears"))
def test_is_merge_iterative(self):
self.assertTrue(is_merge_iterative("codewars", "cdw", "oears"))
class TestMultiplyStrings(unittest.TestCase):
"""[summary]
Test for the file multiply_strings.py
Arguments:
unittest {[type]} -- [description]
"""
def test_multiply(self):
self.assertEqual("23", multiply("1", "23"))
self.assertEqual("529", multiply("23", "23"))
self.assertEqual("0", multiply("0", "23"))
self.assertEqual("1000000", multiply("100", "10000"))
class TestOneEditDistance(unittest.TestCase):
"""[summary]
Test for the file one_edit_distance.py
Arguments:
unittest {[type]} -- [description]
"""
def test_is_one_edit(self):
self.assertTrue(is_one_edit("abc", "abd"))
self.assertFalse(is_one_edit("abc", "aed"))
self.assertFalse(is_one_edit("abcd", "abcd"))
def test_is_one_edit2(self):
self.assertTrue(is_one_edit2("abc", "abd"))
self.assertFalse(is_one_edit2("abc", "aed"))
self.assertFalse(is_one_edit2("abcd", "abcd"))
class TestRabinKarp(unittest.TestCase):
"""[summary]
Test for the file rabin_karp.py
Arguments:
unittest {[type]} -- [description]
"""
def test_rabin_karp(self):
self.assertEqual(3, rabin_karp("abc", "zsnabckfkd"))
self.assertEqual(None, rabin_karp("abc", "zsnajkskfkd"))
class TestReverseString(unittest.TestCase):
"""[summary]
Test for the file reverse_string.py
Arguments:
unittest {[type]} -- [description]
"""
def test_recursive(self):
self.assertEqual("ereht olleh", recursive("hello there"))
def test_iterative(self):
self.assertEqual("ereht olleh", iterative("hello there"))
def test_pythonic(self):
self.assertEqual("ereht olleh", pythonic("hello there"))
def test_ultra_pythonic(self):
self.assertEqual("ereht olleh", ultra_pythonic("hello there"))
class TestReverseVowel(unittest.TestCase):
"""[summary]
Test for the file reverse_vowel.py
Arguments:
unittest {[type]} -- [description]
"""
def test_reverse_vowel(self):
self.assertEqual("holle", reverse_vowel("hello"))
class TestReverseWords(unittest.TestCase):
"""[summary]
Test for the file reverse_words.py
Arguments:
unittest {[type]} -- [description]
"""
def test_reverse_words(self):
self.assertEqual("pizza like I and kim keon am I", \
reverse_words("I am keon kim and I like pizza"))
class TestRomanToInt(unittest.TestCase):
"""[summary]
Test for the file roman_to_int.py
Arguments:
unittest {[type]} -- [description]
"""
def test_roman_to_int(self):
self.assertEqual(621, roman_to_int("DCXXI"))
self.assertEqual(1, roman_to_int("I"))
self.assertEqual(3999, roman_to_int("MMMCMXCIX"))
# class TestStripUrlParams(unittest.TestCase):
# """[summary]
# Test for the file strip_urls_params.py
# Arguments:
# unittest {[type]} -- [description]
# """
# def test_strip_url_params1(self):
# self.assertEqual(strip_url_params1("www.saadbenn.com?a=1&b=2&a=2"), "www.saadbenn.com?a=1&b=2")
# self.assertEqual(strip_url_params1("www.saadbenn.com?a=1&b=2", ['b']), "www.saadbenn.com?a=1")
# def test_strip_url_params2(self):
# self.assertEqual(strip_url_params2("www.saadbenn.com?a=1&b=2&a=2"), "www.saadbenn.com?a=1&b=2")
# self.assertEqual(strip_url_params2("www.saadbenn.com?a=1&b=2", ['b']), "www.saadbenn.com?a=1")
# def test_strip_url_params3(self):
# self.assertEqual(strip_url_params3("www.saadbenn.com?a=1&b=2&a=2"), "www.saadbenn.com?a=1&b=2")
# self.assertEqual(strip_url_params3("www.saadbenn.com?a=1&b=2", ['b']), "www.saadbenn.com?a=1")
class TestValidateCoordinates(unittest.TestCase):
"""[summary]
Test for the file validate_coordinates.py
Arguments:
unittest {[type]} -- [description]
"""
def test_valid(self):
valid_coordinates = ["-23, 25", "4, -3", "90, 180", "-90, -180"]
for coordinate in valid_coordinates:
self.assertTrue(is_valid_coordinates_0(coordinate))
def test_invalid(self):
invalid_coordinates = ["23.234, - 23.4234", "99.234, 12.324", "6.325624, 43.34345.345", "0, 1,2", "23.245, 1e1"]
for coordinate in invalid_coordinates:
self.assertFalse(is_valid_coordinates_0(coordinate))
class TestWordSquares(unittest.TestCase):
"""[summary]
Test for the file word_squares.py
Arguments:
unittest {[type]} -- [description]
"""
def test_word_squares(self):
self.assertEqual([['wall', 'area', 'lead', 'lady'], ['ball', 'area', 'lead', 'lady']], \
word_squares(["area", "lead", "wall", "lady", "ball"]))
class TestUniqueMorse(unittest.TestCase):
def test_convert_morse_word(self):
self.assertEqual("--...-.", convert_morse_word("gin"))
self.assertEqual("--...--.", convert_morse_word("msg"))
def test_unique_morse(self):
self.assertEqual(2, unique_morse(["gin", "zen", "gig", "msg"]))
class TestJudgeCircle(unittest.TestCase):
def test_judge_circle(self):
self.assertTrue(judge_circle("UDLRUD"))
self.assertFalse(judge_circle("LLRU"))
class TestStrongPassword(unittest.TestCase):
def test_strong_password(self):
self.assertEqual(3, strong_password(3, "Ab1"))
self.assertEqual(1, strong_password(11, "#Algorithms"))
class TestCaesarCipher(unittest.TestCase):
def test_caesar_cipher(self):
self.assertEqual("Lipps_Asvph!", caesar_cipher("Hello_World!", 4))
self.assertEqual("okffng-Qwvb", caesar_cipher("middle-Outz", 2))
class TestContainString(unittest.TestCase):
def test_contain_string(self):
self.assertEqual(-1, contain_string("mississippi", "issipi"))
self.assertEqual(0, contain_string("Hello World", ""))
self.assertEqual(2, contain_string("hello", "ll"))
class TestCountBinarySubstring(unittest.TestCase):
def test_count_binary_substring(self):
self.assertEqual(6, count_binary_substring("00110011"))
self.assertEqual(4, count_binary_substring("10101"))
self.assertEqual(3, count_binary_substring("00110"))
class TestCountBinarySubstring(unittest.TestCase):
def test_repeat_string(self):
self.assertEqual(3, repeat_string("abcd", "cdabcdab"))
self.assertEqual(4, repeat_string("bb", "bbbbbbb"))
class TestTextJustification(unittest.TestCase):
def test_text_justification(self):
self.assertEqual(["This is an",
"example of text",
"justification. "],
text_justification(["This", "is", "an", "example", "of", "text", "justification."]
, 16)
)
self.assertEqual(["What must be",
"acknowledgment ",
"shall be "],
text_justification(["What", "must", "be", "acknowledgment", "shall", "be"]
, 16)
)
class TestMinDistance(unittest.TestCase):
def test_min_distance(self):
self.assertEqual(2, min_distance("sea", "eat"))
self.assertEqual(6, min_distance("abAlgocrithmf", "Algorithmmd"))
class TestLongestCommonPrefix(unittest.TestCase):
def test_longest_common_prefix(self):
# Test first solution
self.assertEqual("fl", longest_common_prefix_v1(["flower","flow","flight"]))
self.assertEqual("", longest_common_prefix_v1(["dog","racecar","car"]))
# Test second solution
self.assertEqual("fl", longest_common_prefix_v2(["flower","flow","flight"]))
self.assertEqual("", longest_common_prefix_v2(["dog","racecar","car"]))
# Test third solution
self.assertEqual("fl", longest_common_prefix_v3(["flower","flow","flight"]))
self.assertEqual("", longest_common_prefix_v3(["dog","racecar","car"]))
class TestFirstUniqueChar(unittest.TestCase):
def test_first_unique_char(self):
self.assertEqual(0, first_unique_char("leetcode"))
self.assertEqual(2, first_unique_char("loveleetcode"))
class TestRepeatSubstring(unittest.TestCase):
def test_repeat_substring(self):
self.assertTrue(repeat_substring("abab"))
self.assertFalse(repeat_substring("aba"))
self.assertTrue(repeat_substring("abcabcabcabc"))
class TestAtbashCipher(unittest.TestCase):
"""[summary]
Test for the file atbash_cipher.py
Arguments:
unittest {[type]} -- [description]
"""
def test_atbash_cipher(self):
self.assertEqual("zyxwvutsrqponml", atbash("abcdefghijklmno"))
self.assertEqual("KbgslM", atbash("PythoN"))
self.assertEqual("AttaCK at DawN", atbash("ZggzXP zg WzdM"))
self.assertEqual("ZggzXP zg WzdM", atbash("AttaCK at DawN"))
class TestKnuthMorrisPratt(unittest.TestCase):
"""[summary]
Test for the file knuth_morris_pratt.py
Arguments:
unittest {[type]} -- [description]
"""
def test_knuth_morris_pratt(self):
self.assertEqual([0, 1, 2, 3, 4], knuth_morris_pratt("aaaaaaa", "aaa"))
self.assertEqual([0, 4], knuth_morris_pratt("abcdabc", "abc"))
self.assertEqual([], knuth_morris_pratt("aabcdaab", "aba"))
if __name__ == "__main__":
unittest.main()
| 31.345455 | 120 | 0.642285 |
3ef928db56f0ad2f1f67a8e2266fdd8ca0b8c515 | 497 | py | Python | ryanthtrablog/posts/models.py | ryanthtra/ryanthtra-blog | 6df889e3f58635309ae44614154ef3d810a923b9 | [
"MIT"
] | null | null | null | ryanthtrablog/posts/models.py | ryanthtra/ryanthtra-blog | 6df889e3f58635309ae44614154ef3d810a923b9 | [
"MIT"
] | null | null | null | ryanthtrablog/posts/models.py | ryanthtra/ryanthtra-blog | 6df889e3f58635309ae44614154ef3d810a923b9 | [
"MIT"
] | null | null | null | from django.db import models
# Create your models here.
class Post(models.Model):
title = models.CharField(max_length = 255)
publish_date = models.DateTimeField()
image = models.ImageField(upload_to='media/')
body = models.TextField()
# Custom title for post object in admin
def __str__(self):
return self.title
def publish_date_pretty(self):
return self.publish_date.strftime('%b %e %Y')
def summary(self):
# limit character count to 100
return self.body[:100] | 26.157895 | 49 | 0.716298 |
c46108b28b4d4a1fa9a465094c0ce5f48adfb6c3 | 318,468 | py | Python | languages/es.py | ptressel/sahana-eden-madpub | b16418b36d0fb781fd045f7e7edd1a30259a1f35 | [
"MIT"
] | 1 | 2016-01-01T12:22:48.000Z | 2016-01-01T12:22:48.000Z | languages/es.py | ptressel/sahana-eden-madpub | b16418b36d0fb781fd045f7e7edd1a30259a1f35 | [
"MIT"
] | null | null | null | languages/es.py | ptressel/sahana-eden-madpub | b16418b36d0fb781fd045f7e7edd1a30259a1f35 | [
"MIT"
] | 1 | 2020-04-29T13:58:31.000Z | 2020-04-29T13:58:31.000Z | # coding: utf8
{
' (leave empty to detach account)': ' (leave empty to detach account)',
' Source Type': ' Tipo de Fuente',
' by ': 'por ',
' is envisioned to be composed of several sub-modules that work together to provide complex functionality for the management of relief and project items by an organization. This includes an intake system, a warehouse management system, commodity tracking, supply chain management, fleet management, procurement, financial tracking and other asset and resource management capabilities.': ' se prevé que se compone de varios sub-módulos que funcionan conjuntamente para proporcionar una funcionalidad compleja para la gestión de socorro y elementos de proyecto por una organización. Esto incluye un sistema de admisión, un sistema de gestión de almacenes, seguimiento de productos, gestión de la cadena de suministro, gestión de flotas, la contratación, supervisión financiera y otros activos y capacidades de gestión de recursos.',
' is the main communications hub of the Sahana system. It is used to send alerts and/or messages using SMS & Email to various groups and individuals before, during and after a disaster.': 'es el eje principal de comunicaciones del sistema Sahana. Se utiliza para enviar alertas y/o mensajes a través de SMS y por correo electrónico a varios grupos e individuales antes, durante y después de un desastre.',
' on ': 'encendido',
'"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN': '"actualizar" es una expresión opcional, como "campo1 = \'nuevovalor\'". No se puede actualizar o eliminar los resultados de un JOIN',
'# of Houses Damaged': '# de Viviendas dañadas',
'# of Houses Destroyed': '# de Viviendas destruidas',
'# of International Staff': '# of International Staff',
'# of National Staff': '# of National Staff',
'# of People Affected': '# de Personas Afectadas',
'# of People Deceased': '# de Personas fallecidas',
'# of People Injured': '# de Personas accidentadas',
'# of Vehicles': '# of Vehicles',
'%Y-%m-%d': 'Y-% m-% d%',
'%Y-%m-%d %H:%M': 'Y-% m-%% d% H:% M',
'%Y-%m-%d %H:%M:%S': '%Y-%m-%d %H:%M:%S',
'%Y-%m-%d %H:%M:00': 'Y-% m-%% d% H:% M: 00',
'%s rows deleted': '%s filas eliminadas',
'%s rows updated': '%s filas actualizadas',
"'Sounds-like' name search allowing search even the spelling of the name is not known exactly": "'Suena como' búsqueda de nombres que permite la búsqueda si incluso la ortografía del nombre no se conoce con exactitud",
'(Constraints Only)': '(Únicamente Restricciones)',
') & then click on the map below to adjust the Lat/Lon fields:': ') Y después para ajustar los campos Lat/Lon debe hacer clic en el mapa que se encuentra abajo:',
'* Required Fields': '* Required Fields',
'0-15 minutes': '0-15 minutos',
'1 Assessment': '1 Evaluacion',
'1 location, shorter time, can contain multiple Tasks': '1 ubicación, tiempo corto, puede contener múltiples tareas',
'1-3 days': '1-3 días',
'1. Fill the necessary fields in BLOCK letters.': '1. Fill the necessary fields in BLOCK letters.',
'15-30 minutes': '15-30 minutos',
'2 different options are provided here currently:': 'Dos diferentes opciones son actualmente ofrecidas aquí:',
'2. Always use one box per letter and leave one box space to seperate words.': '2. Always use one box per letter and leave one box space to seperate words.',
'2x4 Car': 'Automóvil 2x4',
'30-60 minutes': '30-60 minutos',
'4-7 days': '4-7 días',
'4x4 Car': 'Automóvil 4x4',
'8-14 days': '8-14 días',
'A Reference Document such as a file, URL or contact person to verify this data. You can type the 1st few characters of the document name to link to an existing document.': 'Un Documento de Referencia como un archivo, URL o persona de contacto para verificar esta información. Usted puede digitar los 1ros caracteres del nombre del documento para enlazar a un domumento existente.',
'A Warehouse is a physical place to store items.': 'A Warehouse is a physical place to store items.',
'A Warehouse/Site is a physical location with an address and GIS data where Items are Stored. It can be a Building, a particular area in a city or anything similar.': 'A Warehouse/Site is a physical location with an address and GIS data where Items are Stored. It can be a Building, a particular area in a city or anything similar.',
'A brief description of the group (optional)': 'Una breve descripción del grupo (opcional)',
'A collection of Feature Classes which can be displayed together on a map or exported together.': 'Una colección de Clases de Características las cual es se pueden proyectar juntas en un mapa, o ser exportadas juntas. ',
'A file downloaded from a GPS containing a series of geographic points in XML format.': 'A file downloaded from a GPS containing a series de geographic points in XML format.',
'A file in GPX format taken from a GPS whose timestamps can be correlated with the timestamps on the photos to locate them on the map.': 'Un archivo en un fomato GPX tomado de un GPS con marcas de tiempo puede ser correlacionado con las marcas de tiempo en las fotos para localizarlos en el mapa.',
'A library of digital resources, such as photos, documents and reports': 'Librería digital de recursos como fotos, documentos y reportes',
'A place within a Site like a Shelf, room, bin number etc.': 'A place within a Site like a Shelf, room, bin number etc.',
'A practical example can be of a report of lost person. Now if one machine register him to be found on 16th August and another machine registers him to found on 17th August, then e.g. Newer timestamp will replace data entry of your machine with that of foriegn machine because that is newer one.': 'Un ejemplo práctico puede ser de un informe de la persona perdida. Ahora bien, si una máquina le registro que se ha encontrado el 16 de agosto y otra máquina lo inscribe para fundar el 17 de agosto, a continuación, por ejemplo, nuevos timestamp sustituirá a la entrada de datos de la máquina con la de la máquina exterior debido a que es más reciente un.',
'A snapshot of the bin or additional documents that contain supplementary information about it can be uploaded here.': 'A snapshot of the bin or additional documents that contain supplementary information about it can be uploaded here.',
'A snapshot of the location or additional documents that contain supplementary information about the Site Location can be uploaded here.': 'Una foto instantánea de la localización o documentos adicionales que contienen información complementaria sobre la ubicación del sitio se pueden subir aquí.',
'A snapshot of the location or additional documents that contain supplementary information about the Site can be uploaded here.': 'A snapshot of the location or additional documents that contain supplementary information about the Site can be uploaded here.',
'A survey series with id %s does not exist. Please go back and create one.': 'A inspección series with id %s does not exist. Please go back and create one.',
'ABOUT': 'ABOUT',
'ABOUT THIS MODULE': 'ACERCA DE ESTE MODULO',
'ACCESS DATA': 'DATOS DE ACCESO',
'ADL': 'ADL',
'ANY': 'ANY',
'API is documented here': 'API is documented here',
'Ability to Fill Out Surveys': 'Ability to Fill Out Inspecciones',
'Ability to customize the list of details tracked at a Shelter': 'Posibilidad de personalizar la lista de detalles de seguimiento en un Refugio',
'Ability to customize the list of human resource tracked at a Shelter': 'Posibilidad de personalizar la lista de seguimiento de los recursos humanos en un refugio',
'Ability to customize the list of important facilities needed at a Shelter': 'Posibilidad de personalizar la lista de servicios importantes necesitados en un Refugio',
'Ability to track partial fulfillment of the request': 'Capacidad de seguimiento de cumplimiento parcial de la solicitud',
'Ability to view Results of Completed and/or partially filled out Surveys': 'Ability to view Results de Completed and/or partially filled out Inspecciones',
'Abkhazia': 'Abjasia',
'About': 'Acerca',
'About Sahana': 'Acerca de Sahana',
'About Sahana Eden': 'Acerca de Sahana Eden ',
'About this module': 'About this module',
'Access denied': 'Acceso denegado ',
'Accessibility of Affected Location': 'Accessibility de Affected Location',
'Account registered, however registration is still pending approval - please wait until confirmation received.': 'Account registered, however registration is still pending approval - please wait until confirmation received.',
'Accuracy': 'Precisión',
'Acronym': 'Acronimo',
"Acronym of the organization's name, eg. IFRC.": "Acronym of the organization's name, eg. IFRC.",
'Actionable': 'Procesable',
'Actionable by all targeted recipients': 'Recurribles por todos los destinatarios especificados',
'Actionable only by designated exercise participants; exercise identifier SHOULD appear in <note>': 'Actionable only by designated exercise participants; exercise identifier SHOULD appear in <note>',
'Actioned?': 'Actioned?',
'Active Problems': 'Problemas Activos',
'Activities': 'Actividades',
'Activities matching Assessments:': 'Activities matching Assessments:',
'Activities of boys 13-17yrs before disaster': 'Activities of boys 13-17yrs before disaster',
'Activities of boys 13-17yrs now': 'Activities of boys 13-17yrs now',
'Activities of boys <12yrs before disaster': 'Activities of boys <12yrs before disaster',
'Activities of boys <12yrs now': 'Activities of boys <12yrs now',
'Activities of girls 13-17yrs before disaster': 'Actividades de las niñas de 13-17 años antes del desastre',
'Activities of girls 13-17yrs now': 'Activities of girls 13-17yrs now',
'Activities of girls <12yrs before disaster': 'Activities of girls <12yrs before disaster',
'Activities of girls <12yrs now': 'Activities of girls <12yrs now',
'Activities:': 'Actividades:',
'Activity': 'Activity',
'Activity Added': 'Actividad Agregado',
'Activity Deleted': 'Actividad Eliminado',
'Activity Details': 'Actividad Detalles',
'Activity Report': 'Reporte de Actividad',
'Activity Reports': 'Reportes de Actividad',
'Activity Type': 'Tipo de Actividad',
'Activity Updated': 'Actividad Actualizada',
'Add': 'Agregar',
'Add Activity': 'Agregar Actividad',
'Add Activity Report': 'Agregar Reporte de Actividad',
'Add Activity Type': 'Agregar Actividad Tipo',
'Add Address': 'Agregar Dirección',
'Add Aid Request': 'Agregar Solicitud de Ayuda',
'Add Assessment': 'Agregar Assessment',
'Add Assessment Summary': 'Add Assessment Summary',
'Add Baseline': 'Add Baseline',
'Add Baseline Type': 'Add Baseline Type',
'Add Bed Type': 'Add Bed Type',
'Add Bin Type': 'Agregar Tipo de Repositorio',
'Add Bins': 'Agregar Repositorios',
'Add Budget': 'Agregar Presupuesto',
'Add Bundle': 'Agregar Paquete',
'Add Catalog': 'Agregar Catalogo',
'Add Catalog Item': 'Add Catalog Item',
'Add Catalog.': 'Agregar Catalogo.',
'Add Category': 'Agregar Categoria',
'Add Category<>Sub-Category<>Catalog Relation': 'Add Category<>Sub-Category<>Catalog Relation',
'Add Category<>Sub-Category<>Catalog Relation ': 'Agregar Categoría<>Sub-Categoría<>Relación de Catálogo ',
'Add Cholera Treatment Capability Information': 'Add Cholera Treatment Capability Information',
'Add Cluster': 'Agregar Agrupación',
'Add Cluster Subsector': 'Add Cluster Subsector',
'Add Config': 'Agregar Configuración',
'Add Contact': 'Agregar Contacto',
'Add Contact Information': 'Agregar Contact Information',
'Add Disaster Victims': 'Agregar Víctimas de Desastre',
'Add Distribution': 'Agregar Distribution',
'Add Distribution.': 'Add Distribution.',
'Add Donor': 'Agregar Donor',
'Add Feature Class': 'Agregar Clase de Funciones',
'Add Feature Group': 'Agregar Grupo de Características',
'Add Feature Layer': 'Agregar Feature Layer',
'Add Find Report': 'Agregar Reporte de Busqueda',
'Add Flood Report': 'Agregar Inundación Report',
'Add GIS Feature': 'Agregar Función GIS',
'Add Group': 'Agregar Grupo',
'Add Group Member': 'Agregar Group Member',
'Add Group Membership': 'Agregar Membresía de Grupo',
'Add Hospital': 'Agregar Hospital',
'Add Identification Report': 'Agregar Reporte de Identificación',
'Add Identity': 'Agregar Identidad',
'Add Image': 'Agregar Imágen',
'Add Impact': 'Add Impact',
'Add Impact Type': 'Add Impact Type',
'Add Incident': 'Agregar Incident',
'Add Incident Report': 'Agregar Incident Report',
'Add Incoming Email': 'Agregar Email Entrante',
'Add Incoming SMS': 'Agregar SMS Entrante',
'Add Inventory Item': 'Agregar insumo en Centro de Acopio',
'Add Inventory Store': 'Agregar Centro de Acopio',
'Add Item': 'Agregar Artículo',
'Add Item (s)': 'Agregar Artículo (s)',
'Add Item Catalog': 'Agregar Catálogo de Artículos',
'Add Item Catalog ': 'Agregar Catálogo de Artículos',
'Add Item Catalog Category ': 'Agregar Categoría de Catálogo de Artículos',
'Add Item Category': 'Agregar Categoría de Artículos',
'Add Item Packet': 'Add Item Packet',
'Add Item Sub-Category': 'Agregar Sub-Categoría de Artículos',
'Add Item Sub-Category ': 'Agregar Sub-Categoría de Artículos',
'Add Item to Shipment': 'Add Item to Shipment',
'Add Key': 'Agregar Llave',
'Add Kit': 'Agregar Equipo',
'Add Layer': 'Agregar Capa',
'Add Line': 'Agregar Línea',
'Add Location': 'Agregar Lugar',
'Add Locations': 'Agregar Ubicación',
'Add Log Entry': 'Agregar Entrada de Registro',
'Add Marker': 'Agregar Marcador',
'Add Member': 'Agregar Member',
'Add Membership': 'Agregar Afiliación',
'Add Message': 'Agregar Mensaje',
'Add Metadata': 'Agregar Metadata',
'Add Need': 'Add Need',
'Add Need Type': 'Add Need Type',
'Add New': 'Add New',
'Add New ': 'Agregar Nueva ',
'Add New Activity': 'Agregar Nueva Actividad',
'Add New Address': 'Agregar Nueva Dirección',
'Add New Aid Request': 'Agregar Nueva Petición de Auxilio ',
'Add New Assessment': 'Agregar Nueva Assessment',
'Add New Assessment Summary': 'Add New Assessment Summary',
'Add New Baseline': 'Add New Baseline',
'Add New Baseline Type': 'Add New Baseline Type',
'Add New Bin': 'Agregar Nuevo Repositorio',
'Add New Bin Type': 'Agregar Nuevo Tipo de Repositorio',
'Add New Budget': 'Agregar Nuevo Presupuesto',
'Add New Bundle': 'Agregar Nuevo Paquete',
'Add New Catalog Item': 'Add New Catalog Item',
'Add New Cluster': 'Agregar Nueva Agrupación',
'Add New Cluster Subsector': 'Add New Cluster Subsector',
'Add New Config': 'Agregar Nueva Configuración',
'Add New Contact': 'Agregar Nuevo Contacto',
'Add New Distribution': 'Agregar Nueva Distribution',
'Add New Distribution Item': 'Agregar Nueva Distribution Insumo',
'Add New Document': 'Agregar Nueva Document',
'Add New Donor': 'Agregar Donante Nuevo ',
'Add New Entry': 'Agregar Nueva Entrada',
'Add New Feature Class': 'Agregar Nueva Clase de Funcionalidad',
'Add New Feature Group': 'Agregar Nuevo Grupo de Funcionalidad',
'Add New Feature Layer': 'Agregar Nueva Feature Layer',
'Add New Find Report': 'Agregar Nuevo de Reporte de Búsqueda',
'Add New Flood Report': 'Agregar Nueva Inundación Report',
'Add New Group': 'Agregar Nuevo Grupo',
'Add New Group Membership': 'Agregar Nueva Membresía de Grupo',
'Add New Hospital': 'Agregar Nuevo Hospital',
'Add New Identity': 'Agregar Nueva Identidad',
'Add New Image': 'Agregar Nueva Imágen',
'Add New Impact': 'Add New Impact',
'Add New Impact Type': 'Add New Impact Type',
'Add New Incident': 'Agregar Nuevo Incidente ',
'Add New Incident Report': 'Agregar Nuevo Reporte de Incidente',
'Add New Inventory Item': 'Agregar Nuevo Artículo para Inventario',
'Add New Inventory Store': 'Add New Inventory Store',
'Add New Item': 'Agregar Nuevo Artículo',
'Add New Item Catalog': 'Agregar Nuevo Catálogo de Artículos',
'Add New Item Catalog Category': 'Agregar Nueva Categoría de Catálogo de Artículos',
'Add New Item Category': 'Agregar Nueva Insumo Category',
'Add New Item Packet': 'Add New Item Packet',
'Add New Item Sub-Category': 'Agregar Nueva Sub-Categoría de Artículos',
'Add New Item to Kit': 'Agregar Nuevo Artículo a Kit',
'Add New Key': 'Agregar Nueva Llave',
'Add New Kit': 'Agregar Nuevo Equipo',
'Add New Layer': 'Agregar Nueva Capa',
'Add New Location': 'Agregar Nuevo Lugar',
'Add New Log Entry': 'Agregar Nueva Entrada de Registro',
'Add New Marker': 'Agregar Nuevo Marcador',
'Add New Member': 'Agregar Nueva Member',
'Add New Membership': 'Agregar Nueva Membresía',
'Add New Metadata': 'Agregar Nueva Metadata',
'Add New Need': 'Add New Need',
'Add New Need Type': 'Add New Need Type',
'Add New New Inventory Store': 'Agregar Nueva Inventory Store',
'Add New Note': 'Add New Note',
'Add New Office': 'Agregar Nueva Oficina',
'Add New Organization': 'Agregar Nueva Organización',
'Add New Partner': 'Agregar Nuevo Asociado',
'Add New Peer': 'Agregar Nueva Peer',
'Add New Person': 'Agregar Nueva Persona',
'Add New Photo': 'Agregar Nueva Photo',
'Add New Position': 'Agregar Nueva Posición',
'Add New Problem': 'Agregar Nueva Problema',
'Add New Project': 'Agregar Nuevo Proyecto',
'Add New Projection': 'Agregar Nueva Proyección',
'Add New Rapid Assessment': 'Add New Rapid Assessment',
'Add New Received Item': 'Add New Received Item',
'Add New Record': 'Agregar Nuevo Registro',
'Add New Relief Item': 'Agregar Nueva Auxilio Insumo',
'Add New Report': 'Agregar Nuevo Informe',
'Add New Request': 'Agregar Nueva Requerimiento',
'Add New Request Item': 'Agregar Nueva Requerimiento Insumo',
'Add New Resource': 'Agregar Nuevo Recurso',
'Add New Response': 'Agregar Nueva Respuesta',
'Add New River': 'Agregar Nueva River',
'Add New Role': 'Agregar Nuevo Rol',
'Add New Role to User': 'Agregar Nuevo Rol a Usuario',
'Add New School District': 'Agregar Nueva School District',
'Add New School Report': 'Agregar Nueva School Report',
'Add New Sector': 'Agregar Nuevo Sector',
'Add New Sent Item': 'Add New Sent Item',
'Add New Setting': 'Agregar Nuevo Ajuste',
'Add New Shelter': 'Agregar Nuevo Albergue',
'Add New Shelter Service': 'Agregar nuevo servicio para albergue',
'Add New Shelter Type': 'Agregar nuevo tipo de albergue',
'Add New Shipment to Send': 'Add New Shipment to Send',
'Add New Site': 'Agregar Nuevo Sitio',
'Add New Skill': 'Agregar Nueva Habilidad',
'Add New Skill Type': 'Agregar Nueva Habilidad Tipo',
'Add New Solution': 'Agregar Nueva Solution',
'Add New Source': 'Agregar Nueva Fuente',
'Add New Staff': 'Agregar Nueva Staff',
'Add New Staff Type': 'Agregar Nuevo Tipo de Personal',
'Add New Storage Location': 'Agregar Nuevo Lugar de Almacenamiento',
'Add New Survey Answer': 'Agregar Nueva Inspección Answer',
'Add New Survey Question': 'Agregar Nueva Inspección Question',
'Add New Survey Section': 'Agregar Nueva Inspección Section',
'Add New Survey Series': 'Agregar Nueva Inspección Series',
'Add New Survey Template': 'Agregar Nueva Inspección Template',
'Add New Task': 'Agregar Nueva Tarea',
'Add New Team': 'Agregar Nueva Team',
'Add New Theme': 'Agregar Nuevo Tema',
'Add New Ticket': 'Agregar Nueva Ticket',
'Add New Track': 'Agregar Nueva Pista',
'Add New Unit': 'Agregar Nueva Unidad',
'Add New User': 'Agregar Nuevo Usuario',
'Add New User to Group': 'Agregar Nuevo Usuario a Grupo',
'Add New User to Role': 'Agregar Nuevo Usuario a Rol',
'Add New Warehouse': 'Add New Warehouse',
'Add New Warehouse Item': 'Add New Warehouse Item',
'Add Note': 'Add Note',
'Add Office': 'Agregar Oficina',
'Add Organization': 'Agregar Organización',
'Add Partner': 'Agregar Asociado',
'Add Peer': 'Agregar Peer',
'Add Person': 'Agregar Persona',
'Add Personal Effects': 'Agregar Efectos Personales',
'Add Photo': 'Añadir Foto',
'Add Point': 'Agregar Punto',
'Add Polygon': 'Agregar Polígono',
'Add Position': 'Agregar Posición',
'Add Problem': 'Agregar Problema',
'Add Project': 'Agregar Proyecto',
'Add Projection': 'Add Projection',
'Add Projections': 'Agregar Proyecciones',
'Add Question': 'Agregar Question',
'Add Rapid Assessment': 'Add Rapid Assessment',
'Add Recipient': 'Agregar Destinatario',
'Add Recipient Site': 'Agregar Sitio Recipiente',
'Add Recipient Site.': 'Agregar Sitio Recipiente.',
'Add Record': 'Agregar Registro',
'Add Recovery Report': 'Agregar Reporte de Recuperación',
'Add Reference Document': 'Agregar Reference Document',
'Add Relief Item': 'Agregar Auxilio Insumo',
'Add Report': 'Agregar Reporte',
'Add Request': 'Agregar Petición',
'Add Request Detail': 'Agregar Detalle de Petición',
'Add Request Item': 'Agregar Requerimiento Item',
'Add Resource': 'Agregar Recurso',
'Add Response': 'Agregar Response',
'Add River': 'Agregar River',
'Add Role': 'Agregar Rol',
'Add School District': 'Agregar School District',
'Add School Report': 'Agregar School Report',
'Add Section': 'Agregar Section',
'Add Sector': 'Add Sector',
'Add Sender Organization': 'Agregar Organización Remitente',
'Add Sender Site': 'Agregar Sitio Remitente',
'Add Sender Site.': 'Agregar Sitio Recipiente.',
'Add Service Profile': 'Agregar Perfil de Servicio',
'Add Setting': 'Agregar Ajuste',
'Add Shelter': 'Agregar Refugio',
'Add Shelter Service': 'Agregar Servicio de Refugio',
'Add Shelter Type': 'Agregar Refugio Tipo',
'Add Shipment Transit Log': 'Agregar Registro de Tránsito de Envío',
'Add Shipment/Way Bills': 'Agregar Billetes de Envío/Ruta',
'Add Site': 'Agregar Sitio',
'Add Site ': 'Agregar Sitio ',
'Add Skill': 'Agregar Habilidad',
'Add Skill Type': 'Agregar Habilidad Tipo',
'Add Skill Types': 'Agregar Tipos de Habilidades',
'Add Solution': 'Agregar Solution',
'Add Source': 'Agregar Fuente',
'Add Staff': 'Agregar Personal',
'Add Staff Type': 'Agregar Tipo de Personal',
'Add Status': 'Add Status',
'Add Storage Bin ': 'Agregar Repositorio de Almacenamiento ',
'Add Storage Bin Type': 'Agregar Tipo de Repositorio de Almacenamiento',
'Add Storage Location': 'Agregar Lugar de Almacenamiento',
'Add Storage Location ': 'Agregar Lugar de Almacenamiento ',
'Add Sub-Category': 'Agregar Sub-Categoría',
'Add Subscription': 'Agregar Subscription',
'Add Survey Answer': 'Agregar Respuesta Cuestionario',
'Add Survey Question': 'Agregar Pregunta a Cuestionario',
'Add Survey Section': 'Agregar Inspección Section',
'Add Survey Series': 'Agregar Inspección Series',
'Add Survey Template': 'Agregar Inspección Template',
'Add Task': 'Agregar Tarea',
'Add Team': 'Agregar Team',
'Add Theme': 'Agregar Tema',
'Add Ticket': 'Agregar Ticket',
'Add Unit': 'Agregar Unidad',
'Add Unit ': 'Agregar Unidad ',
'Add User': 'Agregar Usuario',
'Add Volunteer': 'Add Volunteer',
'Add Volunteer Registration': 'Agregar Registro de Voluntario',
'Add Warehouse': 'Add Warehouse',
'Add Warehouse Item': 'Add Warehouse Item',
'Add a New Inventory Location': 'Agregar Nueva Locación de Inventario',
'Add a New Relief Item': 'Agregar Nuevo Insumo de Ayuda',
'Add a Person': 'Agregar una Persona',
'Add a Reference Document such as a file, URL or contact person to verify this data.': 'Agregar a Reference Document such as a file, URL or contact person to verify this data.',
'Add a Reference Document such as a file, URL or contact person to verify this data. If you do not enter a Reference Document, your email will be displayed instead.': 'Add a Reference Document such as a file, URL or contact person to verify this data. If you do not enter a Reference Document, your email will be displayed instead.',
'Add a Volunteer': 'Add a Volunteer',
'Add a new Relief Item.': 'Add a new Relief Item.',
'Add a new Site from where the Item is being sent.': 'Añadir un nuevo Sitio de donde el artículo está siendo enviado.',
'Add a new Site where the Item is being sent to.': 'Add a new Site where the Item is being sent to.',
'Add an Photo.': 'Añadir una Foto.',
'Add an image, such as a Photo.': 'Agregar una imagen, como una foto.',
'Add main Item Category.': 'Agregar Categoría de Artículo Principal.',
'Add main Item Sub-Category.': 'Agregar Sub-Categoría de Artículo Principal.',
'Add new Group': 'Agregar nuevo Grupo',
'Add new Individual': 'Agregar Nuevo Individuo',
'Add new person.': 'Agregar nueva persona.',
'Add new position.': 'Add new position.',
'Add new project.': 'Agregar nuevo proyecto.',
'Add new staff role.': 'Añadir rol a nuevo personal.',
'Add new staff.': 'Agregar nuevo personal.',
'Add the Storage Bin Type.': 'Add the Storage Bin Type.',
'Add the Storage Location where this bin is located.': 'Add the Storage Location where this bin is located.',
'Add the Storage Location where this this Bin belongs to.': 'Adicionar la Ubicación de Almacenamiento a donde este Contenedor pertenece.',
'Add the main Warehouse/Site information where this Bin belongs to.': 'Add the main Warehouse/Site information where this Bin belongs to.',
'Add the main Warehouse/Site information where this Item is to be added.': 'Añadir la Información de Bodega/Sitio principal donde este Item se va a agregar.',
'Add the main Warehouse/Site information where this Storage location is.': 'Add the main Warehouse/Site information where this Storage location is.',
'Add the unit of measure if it doesnt exists already.': 'Agregar la unidad de medida si esta aún no existe.',
'Add to Bundle': 'Agregar a Paquete',
'Add to Catalog': 'Agregar a Catálogo',
'Add to Feature Group': 'Agregar a Grupo de Funciones',
'Add to budget': 'Agregar a presupuesto',
'Add/Edit/Remove Layers': 'Agregar/Editar/Remover Capas',
'Additional Beds / 24hrs': 'Camas Adicionales / 24 hrs',
'Additional Comments': 'Comentarios Adicionales',
"Additional quantity quantifier – e.g. '4x5'.": "Additional quantity quantifier – e.g. '4x5'.",
'Additional quantity quantifier – i.e. “4x5”.': 'Cuantificador cantidad adicional - ej. "4x5".',
'Address': 'Dirección',
'Address Details': 'Detalles de Dirección',
'Address Type': 'Tipo de Dirección',
'Address added': 'Dirección agregada',
'Address deleted': 'Dirección eliminada',
'Address updated': 'Dirección actualizada',
'Addresses': 'Direcciones',
'Adequate': 'Adecuado',
'Adequate food and water available': 'Adequate food and water available',
'Adjust Item(s) Quantity': 'Ajustar Cantidad de Artículo(s)',
'Adjust Items due to Theft/Loss': 'Ajustar Artículo por Robo/Pérdida',
'Admin': 'Administración',
'Admin Email': 'Administrador Email',
'Admin Name': 'Administrador Name',
'Admin Tel': 'Administrador Tel',
'Administración de inventarios': 'Administración de inventarios',
'Administration': 'Administración',
'Administrator': 'Administrador',
'Admissions/24hrs': 'Admisiones/24hrs',
'Adolescent (12-20)': 'Adolescentes (12-20)',
'Adolescent participating in coping activities': 'Adolescentes que participan en las actividades de afrontamiento',
'Adult (21-50)': 'Adultos (21-50)',
'Adult ICU': 'Adultos UCI',
'Adult Psychiatric': 'Adultos Psiquiatría',
'Adult female': 'Mujer adulta',
'Adult male': 'Adult male',
'Adults in prisons': 'Adults in prisons',
'Advanced Bin Search': 'Búsqueda Avanzada de Repositorio',
'Advanced Catalog Search': 'Búsqueda Avanzada de Catálogo',
'Advanced Category Search': 'Búsqueda Avanzada de Categoría',
'Advanced Item Search': 'Búsqueda Avanzada de Artículos',
'Advanced Location Search': 'Búsqueda Avanzada de Lugares',
'Advanced Site Search': 'Búsqueda Avanzada de Sitios',
'Advanced Sub-Category Search': 'Búsqueda Avanzada de Sub-Categorías',
'Advanced Unit Search': 'Búsqueda Avanzada de Unidades',
'Advanced:': 'Avanzado: ',
'Advisory': 'Recomendación',
'Affectees Families settled in the school belong to district': 'Affectees Families settled in the school belong to district',
'Afghanistan': 'Afganistán',
'After clicking on the Vote button ... (#TODO [String]) Please select the one item from each pair that you prefer over the other.': 'Después de hacer clic en el botón de votar ... (# TODO String []) Por favor, seleccione el elemento de una de cada par que prefiera sobre el otro.',
'After clicking on the button, a set of paired items will be shown one by one. Please select the one solution from each pair that you prefer over the other.': 'After clicking on the button, a set de paired items will be shown one by one. Please select the one solution from each pair that you prefer over the other.',
'Age Group': 'Grupo de Edad',
'Age group': 'Age group',
'Age group does not match actual age.': 'Grupo de edad no es igual a la edad actual.',
'Aggravating factors': 'Aggravating factors',
'Aggregate Items': 'Artículos Agregados',
'Agriculture': 'Agricultura',
'Aid Management': 'Manejo de Ayuda',
'Aid Request': 'Petición de Ayuda',
'Aid Request Details': 'Auxilio Requerimiento Detalles',
'Aid Request added': 'Auxilio Requerimiento agregado',
'Aid Request deleted': 'Auxilio Requerimiento eliminada',
'Aid Request updated': 'Auxilio Requerimiento updated',
'Aid Requests': 'Solicitudes de Ayuda',
'Air Transport Service': 'Servicio de Transporte Aéreo',
'Air tajin': 'Air tajin',
'Aircraft Crash': 'Aircraft Crash',
'Aircraft Hijacking': 'Aircraft Hijacking',
'Airport Closure': 'Cierre de Aeropuerto',
'Airspace Closure': 'Airspace Closure',
'Albania': 'Albania',
'Alcohol': 'Alcohol',
'Alcoholics': 'Alcohólicos',
'Alert': 'Alert',
'Alertas de Inundación': 'Alertas de Inundación',
'Algeria': 'Argelia',
'All': 'All',
'All Inbound & Outbound Messages are stored here': 'All Inbound & Outbound Messages are stored here',
'All Locations': 'Todas las Ubicaciones',
'All Pledges': 'Todos los compromisos',
'All Requested Items': 'Todos los Insumos Solicitados',
'All Resources': 'All Resources',
'All data is able to be shared with other sites in real time.': 'Toda la información es capaz de ser compartida con otros sitios en tiempo real.',
'All data provided by the Sahana Software Foundation from this site is licenced under a Creative Commons Attribution licence. However, not all data originates here. Please consult the source field of each entry.': 'Todos los datos facilitados por la Fundación de Software Sahana de este sitio están licenciados bajo una licencia Creative Commons. Sin embargo, no todos los datos se originan aquí. Por favor, consulte el campo de origen de cada entrada.',
'Allowed to push': 'Allowed to push',
'Allows a Budget to be drawn up': 'Permite Elaborar un Presupuesto',
'Allows authorized users to control which layers are available to the situation map.': 'Permite a los usuarios autorizados para controlar las capas que están disponibles para el mapa de situación.',
'Allows authorized users to upload multiple features into the situation map.': 'Permite a los usuarios autorizados de cargar múltiples características al mapa de situación.',
'Already in this Feature Group!': 'Ya disponible en este Grupo de Funcionalidades!',
'Alternative infant nutrition in use': 'Alternative infant nutrition in use',
'Alternative places for studying': 'Alternative places for studying',
'Alternative places for studying available': 'Alternative places for studying available',
'Ambulance Service': 'Servicio de Ambulancia',
'An Inventory Store is a physical place which contains Relief Items available to be Distributed.': 'An Inventory Store is a physical place which contains Relief Items available to be Distributed.',
'An intake system, a warehouse management system, commodity tracking, supply chain management, procurement and other asset and resource management capabilities.': 'An intake system, a warehouse management system, commodity tracking, supply chain management, procurement and other asset and resource management capabilities.',
'An interactive map the situation.': 'Un mapa interactivo para la situación.',
'Analysis of Completed Surveys': 'Analysis de Completed Inspecciones',
'Anamnesis': 'Amnesia',
'Andorra': 'Andorra',
'Angola': 'Angola',
'Animal Die Off': 'Animal Die Off',
'Animal Feed': 'Alimentación de Animales ',
'Animals': 'Animales',
'Answer Choices (One Per Line)': 'Responder elecciones (Una por línea)',
'Anthropolgy': 'Antropología',
'Antibiotics available': 'Antibiotics available',
'Antibiotics needed per 24h': 'Antibiotics needed per 24h',
'Antigua and Barbuda': 'Antigua y Barbuda',
'Any available Metadata in the files will be read automatically, such as Timestamp, Author, Latitude & Longitude.': 'Cualquier metadatos disponibles en los archivos se leen automáticamente, como marca de hora, el autor, Latitud y Longitud.',
'Any comments about this sync partner.': 'Algún comentario acerca del socio sincronizado.',
'Apparent Age': 'Edad Aparente',
'Apparent Gender': 'Género Aparente',
'Appearance': 'Apariencia',
'Appropriate clothing available': 'Ropa apropiada disponible',
'Appropriate cooking equipment/materials in HH': 'Equipo apropiado para cocinar/materiales en HH',
'Approx. number of cases/48h': 'Approx. number of cases/48h',
'Approximately how many children under 5 with diarrhea in the past 48 hours?': 'Approximately how many children under 5 with diarrhea in the past 48 hours?',
'Archive not Delete': 'Archivo no Eliminado',
'Arctic Outflow': 'Arctic Outflow',
'Are basic medical supplies available for health services since the disaster?': 'Are basic medical supplies available for health services since the disaster?',
'Are breast milk substitutes being used here since the disaster?': 'Se han utilizado sustitutos de leche materna después del desastre?',
'Are the areas that children, older people, and people with disabilities live in, play in and walk through on a daily basis physically safe?': 'Are the areas that children, older people, and people with disabilities live in, play in and walk through on a daily basis physically safe?',
'Are the chronically ill receiving sufficient care and assistance?': 'Are the chronically ill receiving sufficient care and assistance?',
'Are there adults living in prisons in this area?': 'Are there adults living in prisons in this area?',
'Are there alternative places for studying?': 'Are there alternative places for studying?',
'Are there cases of diarrhea among children under the age of 5?': '¿Existen casos de diarrea entre los niños menores de 5 años?',
'Are there children living in adult prisons in this area?': 'Are there children living in adult prisons in this area?',
'Are there children living in boarding schools in this area?': 'Are there children living in boarding schools in this area?',
'Are there children living in homes for disabled children in this area?': '¿Hay niños que viven en hogares para niños con discapacidad en esta área?',
'Are there children living in juvenile detention in this area?': '¿Hay niños que viven en detención juvenil en esta área?',
'Are there children living in orphanages in this area?': '¿En esta area se encuentran niños viviendo en orfanatos?',
'Are there children with chronical illnesses in your community?': 'Are there children with chronical illnesses in your community?',
'Are there health services functioning for the community since the disaster?': '¿Hay servicios de salud funcionando para la comunidad desde el desastre?',
'Are there older people living in care homes in this area?': '¿Hay personas mayores que viven en hogares especializados en esta área?',
'Are there older people with chronical illnesses in your community?': 'Are there older people with chronical illnesses in your community?',
'Are there people with chronical illnesses in your community?': 'Are there people with chronical illnesses in your community?',
'Are there separate latrines for women and men available?': 'Are there separate latrines for women and men available?',
'Are there staff present and caring for the residents in these institutions?': 'Are there staff present and caring for the residents in these institutions?',
'Area': 'Zona',
'Argentina': 'Argentina',
'Armenia': 'Armenia',
'Artificial eye left': 'Ojo Artificial Izquierdo',
'Artificial eye right': 'Ojo Artificial Derecho',
'Assessment': 'Evaluación',
'Assessment Details': 'Assessment Detalles',
'Assessment Reported': 'Assessment Reported',
'Assessment Summaries': 'Assessment Summaries',
'Assessment Summary Details': 'Assessment Summary Details',
'Assessment Summary added': 'Assessment Summary added',
'Assessment Summary deleted': 'Assessment Summary deleted',
'Assessment Summary updated': 'Assessment Summary updated',
'Assessment Type': 'Assessment Type',
'Assessment Type: ': 'Tipo de Asesoría:',
'Assessment added': 'Assessment agregado',
'Assessment deleted': 'Assessment eliminada',
'Assessment updated': 'Evaluación actualizada',
'Assessments': 'Evaluaciones',
'Assessments Needs vs. Activities': 'Assessments Needs vs. Activities',
'Assessments and Activities': 'Assessments and Activities',
'Assessments are structured reports done by Professional Organizations': 'Evaluaciones son reportes estructurados desarrollados por Organizaciones Profesionales',
'Assessments are structured reports done by Professional Organizations - data includes WFP Assessments': 'Evaluaciones son reportes estructurados desarrollados por Organizaciones Profesionales - la información incluye evaluaciones WFP',
'Assessments:': 'Assessments:',
'Assessor': 'Assessor',
'Assign Storage Location': 'Asignar Lugar de Almacenamiento',
'Assign to Org.': 'Assign to Org.',
'Assigned': 'Asignado',
'Assigned To': 'Asignado a',
'Assigned to': 'Asignado a',
'Assistance for immediate repair/reconstruction of houses': 'Assistance for immediate repair/reconstruction of houses',
'Assistant': 'Asistente',
'Assisted Family Care': 'Asistida por Cuidados Familiares',
'Assisted Self-care': 'Asistida por Sí Misma',
'At/Visited Location (not virtual)': 'Arte/Ubicación visitada (no virtual)',
'Attend to information sources as described in <instruction>': 'Attend to information sources as described in <instruction>',
'Attribution': 'Attribution',
'Audit Read': 'Auditoría de Lectura',
'Audit Write': 'Auditoría de Escritura',
'Australia': 'Australia',
'Austria': 'Austria',
"Authenticate system's Twitter account": "Authenticate system's Twitter account",
'Authentication failed!': 'Autentificación fallida!',
'Authentication information of foreign server.': 'Información de autenticación del servidor externo.',
'Author': 'Autor',
'Automatic Database Synchronization History': 'Sincronización automática de bases de datos Historia',
'Automotive': 'Automotor',
'Availability': 'Disponibilidad ',
'Available Beds': 'Camas Disponibles',
'Available Messages': 'Available Messages',
'Available Records': 'Registros Disponibles',
'Available Recovery Reports': 'Informes disponibles de recuperación',
'Available databases and tables': 'Bases de datos y tablas disponibles',
'Available from': 'Disponible desde',
'Available in Viewer?': 'Disponible en Visualizador?',
'Available until': 'Disponible hasta',
'Avalanche': 'Avalanche',
'Avoid the subject event as per the <instruction>': 'Avoid the subject event as per the <instruction>',
'Azerbaijan': 'Azerbaiyán',
'Babies who are not being breastfed, what are they being fed on?': 'Babies who are not being breastfed, what are they being fed on?',
'Baby And Child Care': 'Cuidado del bebé y del niño',
'Background Colour': 'Color de fondo',
'Background Colour for Text blocks': 'Color de fondo para los bloques de texto',
'Bahai': 'Bahai',
'Bahamas': 'Bahamas',
'Bahrain': 'Bahrein',
'Baldness': 'Baldness',
'Balochi': 'Balochi',
'Banana': 'Banano',
'Bangladesh': 'Bangladesh',
'Bank/micro finance': 'Banco/micro financiación',
'Barbados': 'Barbados',
'Base Layer?': 'Capa Base?',
'Base Layers': 'Capas Base',
'Base Unit': 'Unidad Base',
'Baseline Number of Beds': 'Línea de referencia de número de camas',
'Baseline Type': 'Baseline Type',
'Baseline Type Details': 'Baseline Type Details',
'Baseline Type added': 'Baseline Type added',
'Baseline Type deleted': 'Baseline Type deleted',
'Baseline Type updated': 'Baseline Type updated',
'Baseline Types': 'Baseline Types',
'Baseline added': 'Baseline added',
'Baseline deleted': 'Baseline deleted',
'Baseline number of beds of that type in this unit.': 'Baseline number de beds de that type in this unit.',
'Baseline updated': 'Baseline updated',
'Baselines': 'Baselines',
'Baselines Details': 'Baselines Details',
'Basic Assessment': 'Basic Assessment',
'Basic Assessment Reported': 'Basic Assessment Reported',
'Basic Details': 'Detalles Básicos',
'Basic information on the requests and donations, such as category, the units, contact details and the status.': 'Información básica sobre las solicitudes y las donaciones, como la categoría, las unidades, los datos de contacto y el estado.',
'Basic medical supplies available prior to disaster': 'Basic medical supplies available prior to disaster',
'Basic medical supplies available since disaster': 'Basic medical supplies available since disaster',
'Basic reports on the Shelter and drill-down by region': 'Informes básicos acerca del Refugio y desglose por región',
'Baud': 'Baudios',
'Baud rate to use for your modem - The default is safe for most cases': 'Baud rate to use for your modem - The default is safe for most cases',
'Beacon Service URL': 'Beacon Service URL',
'Bed Capacity': 'Capacidad de Camas',
'Bed Capacity per Unit': 'Capacidad de camas por unidad',
'Bed Type': 'Tipo de cama',
'Bed type already registered': 'Bed type already registered',
'Bedding materials available': 'Materiales de cama disponibles',
'Belarus': 'Belarús',
'Belgium': 'Bélgica',
'Belize': 'Belice',
'Beneficiary Type': 'Tipo de Beneficiario',
'Benin': 'Benin',
'Bhuddist': 'Budista',
'Bhutan': 'Bhután',
'Biological Hazard': 'Biological Hazard',
'Biscuits': 'Biscuits',
'Blizzard': 'Blizzard',
'Blood Type (AB0)': 'Sangre Tipo (AB0)',
'Blowing Snow': 'Viento de Nieve',
'Boat': 'Boat',
'Bodies found': 'Cuerpos encontrados',
'Bodies recovered': 'Cuerpos recuperados',
'Bodily Constitution': 'Constitución Corporal',
'Body': 'Cuerpo',
'Body Find': 'Buscar Cuerpo',
'Body Finds': 'Búsquedas de Cuerpos',
'Body Recovery': 'Recuperación de Cuerpos',
'Body Recovery Reports': 'Reportes de Recuperación de Cuerpos',
'Body Recovery Request': 'Body Recovery Requerimiento',
'Body Recovery Requests': 'Solicitudes de Recuperación de Cuerpos',
'Body hair, Colour': 'Cabellos, Color',
'Body hair, Extent': 'Pelo corporal, Largo',
'Bolivia': 'Bolivia',
'Bomb': 'Bomba',
'Bomb Explosion': 'Bomb Explosion',
'Bomb Threat': 'Bomb Threat',
'Border Colour for Text blocks': 'Color de borde para los bloques de texto',
'Bosnia and Herzegovina': 'Bosnia y Herzegovina',
'Both': 'Ambos',
'Botswana': 'Botswana',
'Bounding Box Insets': 'Bounding Box Insets',
'Bounding Box Size': 'Bounding Box Size',
'Boys 13-18 yrs in affected area': 'Boys 13-18 yrs in affected area',
'Boys 13-18 yrs not attending school': 'Boys 13-18 yrs not attending school',
'Boys 6-12 yrs in affected area': 'Boys 6-12 yrs in affected area',
'Boys 6-12 yrs not attending school': 'Los niños de 6-12 años no asistiendo a la escuela',
'Brazil': 'Brasil',
'Breast milk substitutes in use since disaster': 'Breast milk substitutes in use since disaster',
'Breast milk substitutes used prior to disaster': 'Breast milk substitutes used prior to disaster',
'Bricks': 'Bricks',
'Bridge Closed': 'Puente Cerrado',
'Brunei': 'Brunei',
'Bucket': 'Bucket',
'Buddhist': 'Budista',
'Budget': 'Presupuesto',
'Budget Details': 'Datos del Presupuesto ',
'Budget Updated': 'Presupuesto Actualizado',
'Budget added': 'Presupuesto añadido',
'Budget deleted': 'Presupuesto eliminado',
'Budget updated': 'Presupuesto actualizado',
'Budgeting Module': 'Módulo de presupuestos',
'Budgets': 'Presupuestos',
'Buffer': 'Buffer',
'Building Aide': 'Ayudante de construcción',
'Building Collapsed': 'Edificio Colapsado',
'Built using the Template agreed by a group of NGOs working together as the': 'Construido utilizando la plantilla acordada por un grupo de ONGs trabajando conjuntamente como',
'Bulgaria': 'Bulgaria',
'Bulk Uploader': 'Cargador en Masa',
'Bundle': 'Paquete',
'Bundle Contents': 'Bundle Contents',
'Bundle Details': 'Paquete de Datos',
'Bundle Updated': 'Paquete de Actualización',
'Bundle added': 'Paquete añadido',
'Bundle deleted': 'Paquete eliminado',
'Bundle updated': 'Paquete de actualización',
'Bundles': 'Paquetes',
'Bunion': 'Juanete',
'Burkina Faso': 'Burkina Faso',
'Burn': 'Quemar',
'Burn ICU': 'UCI para Quemados',
'Burned/charred': 'Burned/charred',
'Burundi': 'Burundi',
'Business damaged': 'Business damaged',
'By Warehouse': 'By Warehouse',
'CAP': 'PAC',
'CBA Women': 'Mujeres CBA',
'CSS file %s not writable - unable to apply theme!': 'CSS archivo %s no puede ser escrito - no se puede aplicar el tema!',
'Calculate': 'Calcular',
'Cambodia': 'Cambodia',
'Cameroon': 'Camerún',
'Camp': 'Camp',
'Camp Coordination/Management': 'Camp Coordination/Management',
'Can be grouped together into Feature Groups': 'Can be grouped together into Feature Groups',
'Can users register themselves for authenticated login access?': 'Can users register themselves for authenticated login access?',
"Can't import tweepy": "Can't import tweepy",
'Canada': 'Canadá',
'Cancel': 'Cancel',
'Cancelled': 'Cancelado ',
'Candidate Matches for Body %s': 'Candidate Matches for Body %s',
'Canned Fish': 'Canned Fish',
'Cannot be empty': 'No puede estar vacío',
'Cannot delete whilst there are linked records. Please delete linked records first.': 'No se puede eliminar mientras hayan registros vinculados. Por favor, elimine los registros vinculados en primer lugar.',
'Capacity': 'Capacidad',
'Capacity (Max Persons)': 'Capacity (Max Persons)',
'Capacity (W x D X H)': 'Capacidad (W x DXH)',
'Cape Verde': 'Cabo Verde',
'Capture Contact Information': 'Captura de Información de contacto',
'Capture Identity Information': 'Captura de Información de Identidad',
'Capture Information on Disaster Victim groups (Tourists, Passengers, Families, etc.)': 'Captura de Información sobre Desastres grupos de víctimas (turistas, pasajeros, familiares, etc)',
'Capture Information on each disaster victim': 'Captura de Información de cada víctima del desastre',
'Capturing organizational information of a relief organization and all the projects they have in the region': 'La captura de información organizacional y de todos los proyectos que una organización de ayuda tiene en la región',
'Capturing the essential services each Volunteer is providing and where': 'La captura de los servicios esenciales de cada voluntario es proporcionar y dónde',
'Capturing the projects each organization is providing and where': 'La captura de los proyectos que cada organización está ofreciendo y dónde',
'Cardiology': 'Cardiología',
'Care Report': 'Cuidado Informe',
'Care Strategy': 'Estrategia de Atención',
'Cash available to restart business': 'Efectivo disponible para volver a comenzar negocio',
'Cassava': 'Cassava',
'Casual Labor': 'Labores casuales',
'Catalog': 'Catálogo',
'Catalog Item': 'Catalog Item',
'Catalog Item added': 'Catalog Item added',
'Catalog Item deleted': 'Catalog Item deleted',
'Catalog Item updated': 'Catalog Item updated',
'Catalog Items': 'Catálogo de Productos',
'Catalog Name': 'Nombre de Catálogo',
'Category': 'Categoría',
'Category: ': 'Categoría:',
'Category<>Sub-Category<>Catalog Relation': 'Categoría<>Sub-Categoría<>Relación Catálogo',
'Category<>Sub-Category<>Catalog Relation added': 'Categoría<>Sub-Categoría<>Relación Catálogo> sumó',
'Category<>Sub-Category<>Catalog Relation deleted': 'Categoría<>Sub-Categoría<>Relación Catálogo> eliminado',
'Category<>Sub-Category<>Catalog Relation updated': 'Categoría<>Sub-Categoría<>Relación Catálogo> actualización',
'Central African Republic': 'República Centroafricana',
'Central point to record details on People': 'Punto Central para registrar detalles de personas',
'Chad': 'Chad',
'Change Password': 'Cambiar contraseña',
'Chat on IRC': 'Chat en IRC',
'Check for errors in the URL, maybe the address was mistyped.': 'Check for errors in the URL, maybe the address was mistyped.',
'Check if the URL is pointing to a directory instead of a webpage.': 'Revise si el URL está apuntando a un directorio en lugar de una página web.',
'Check outbox for the message status': 'Check outbox for the message status',
'Check to delete': 'Verifique para eliminar',
'Check to delete:': 'Check to delete:',
'Check-In': 'Check-In',
'Check-Out': 'Check-Out',
'Check-in': 'Check-in',
'Check-out': 'Check-out',
'Checklist': 'Lista de verificación',
'Checklist created': 'Lista creada',
'Checklist deleted': 'Lista de verificación de borrado',
'Checklist of Operations': 'Lista de verificación de las operaciones',
'Checklist updated': 'Lista actualizada',
'Chemical Hazard': 'Chemical Hazard',
'Chemical, Biological, Radiological, Nuclear or High-Yield Explosive threat or attack': 'Chemical, Biological, Radiological, Nuclear or High-Yield Explosive threat or attack',
'Chewing tobacco': 'Tabaco de mascar',
'Chicken': 'Pollo',
'Child': 'Child',
'Child (2-11)': 'Niño (2-11)',
'Child (< 18 yrs)': 'Niño (< 18 años)',
'Child Abduction Emergency': 'Child Abduction Emergency',
'Child headed households (<18 yrs)': 'Hogares con cabezas menores de edad (<18 años)',
'Children (2-5 years)': 'Children (2-5 years)',
'Children (5-15 years)': 'Children (5-15 years)',
'Children (< 2 years)': 'Niños (< 2 años)',
'Children in adult prisons': 'Children in adult prisons',
'Children in boarding schools': 'Children in boarding schools',
'Children in homes for disabled children': 'Niños en hogares para niños discapacitados',
'Children in juvenile detention': 'Children in juvenile detention',
'Children in orphanages': 'Children in orphanages',
'Children living on their own (without adults)': 'Children living on their own (without adults)',
'Children not enrolled in new school': 'Children not enrolled in new school',
'Children orphaned by the disaster': 'Niños huérfanos por el desastre',
'Children separated from their parents/caregivers': 'Niños separados de sus padres/cuidadores',
'Children that have been sent to safe places': 'Niños que han sido enviados para salvar lugares',
'Children who have disappeared since the disaster': 'Niños que han desaparecidos desde el desastre',
'Children with chronical illnesses': 'Children with chronical illnesses',
'Chile': 'Chile',
'Chin, Inclination': 'Mentón, Inclinación',
'Chin, Shape': 'Mentón, Forma',
'Chin, Size': 'Mentón, Tamaño',
'China': 'China',
'Chinese': 'Chino',
'Chinese (Taiwan)': 'Chino (Taiwan)',
'Cholera Treatment': 'Cholera Treatment',
'Cholera Treatment Capability': 'Cholera Treatment Capability',
'Cholera Treatment Center': 'Cholera Treatment Center',
'Cholera-Treatment-Center': 'Cholera-Treatment-Center',
'Choose Manually': 'Choose Manually',
'Choosing Skill and Resources of Volunteers': 'Escoger habilidades y recursos de los voluntarios',
'Christian': 'Cristiano',
'Church': 'Iglesia',
'Cigarettes': 'Cigarrillos',
'Cigars': 'Cigarros',
'Circumstances of disappearance, other victims/witnesses who last saw the missing person alive.': 'Circumstances de disappearance, other victims/witnesses who last saw the missing person alive.',
'Civil Emergency': 'Civil Emergency',
'Clear Selection': 'Borrar selección',
'Cleft chin': 'Mentón hendido',
'Click here to open log': 'Haga clic aquí para abrir registro',
"Click on 'Pledge' button in the left-hand column to make a Pledge to match a request for aid.": "Click on 'Pledge' button in the left-hand column to make a Pledge to match a request for aid.",
'Click on an ID in the left-hand column to make a Pledge to match a request for aid.': 'Haga clic en una identificación en la columna de la izquierda para hacer una promesa para que coincida con una solicitud de ayuda.',
'Click on the link ': 'Click en el enlace ',
'Client IP': 'IP del cliente',
'Clinical Laboratory': 'Laboratorio Clínico',
'Clinical Operations': 'Operaciones Clínicas',
'Clinical Status': 'Estado Clínico',
'Close map': 'Close map',
'Closed': 'Cerrado',
'Closure': 'Closure',
'Clothing': 'Ropa',
'Cluster': 'Cluster',
'Cluster Details': 'Detalles Agrupamiento',
'Cluster Distance': 'Distancia de Grupo',
'Cluster Subsector': 'Cluster Subsector',
'Cluster Subsector Details': 'Cluster Subsector Details',
'Cluster Subsector added': 'Cluster Subsector added',
'Cluster Subsector deleted': 'Cluster Subsector deleted',
'Cluster Subsector updated': 'Cluster Subsector updated',
'Cluster Subsectors': 'Cluster Subsectors',
'Cluster Threshold': 'Umbral de Grupo',
'Cluster added': 'Cluster agregado',
'Cluster deleted': 'Cluster eliminada',
'Cluster updated': 'Cluster updated',
'Cluster(s)': 'Cluster(s)',
'Clusters': 'Clusters',
'Code': 'Código',
'Code: ': 'Código:',
'Cold Wave': 'Ola fría',
'Collective center': 'Collective center',
'Colombia': 'Colombia',
'Colour for Underline of Subheadings': 'Color de subrayado de las subpartidas',
'Colour of Buttons when hovering': 'Color de los botones cuando se pasa',
'Colour of bottom of Buttons when not pressed': 'Color de fondo de los botones cuando no se presiona',
'Colour of bottom of Buttons when pressed': 'Color de fondo de los botones cuando se pulsa',
'Colour of dropdown menus': 'Colores de los menús desplegables',
'Colour of selected Input fields': 'Color de los campos de entrada seleccionada',
'Colour of selected menu items': 'El color de los elementos seleccionados del menú',
'Column Choices (One Per Line': 'Selección de Columna (Una Por Linea',
'Combined Method': 'Método combinado',
'Come back later.': 'Vuelve más tarde.',
'Come back later. Everyone visiting this site is probably experiencing the same problem as you.': 'Come back later. Everyone visiting this site is probably experiencing the same problem as you.',
'Comments': 'Comentarios',
'Commiting a changed spreadsheet to the database': 'Commiting a changed spreadsheet to the database',
'Communication problems': 'Communication problems',
'Communications': 'Comunicaciones',
'Community Centre': 'Community Centre',
'Community Health Center': 'Community Health Center',
'Community Member': 'Miembro de la Comunidad',
'Comoros': 'Comoras',
'Complete Database Synchronized': 'Base de datos completamente sincronizada',
'Complete Unit Label for e.g. meter for m.': 'Completar Unidad de Etiqueta. Por ej; metro por m.',
'Completed': 'Terminado',
'Completion status': 'Terminación del estado',
'Complexion': 'Complexion',
'Compose': 'Componer',
'Compromised': 'Comprometida',
'Config': 'Configurar',
'Config Details': 'Detalles de configuración',
'Config added': 'Configuración añadida',
'Config deleted': 'Configuración eliminada',
'Config updated': 'Configuración actualizada',
'Configs': 'Configuraciones',
'Configure Run-time Settings': 'Configurar ajustes de tiempos de ejecución',
'Confirmed': 'Confirmado',
'Confirmed Incidents': 'Confirmed Incidents',
'Conflict Details': 'Conflict Details',
'Conflict Resolution': 'Conflict Resolution',
'Congo, Democratic Republic of the (Congo-Kinshasa)': 'Congo, República Democrática del (Congo-Kinshasa)',
'Congo, Republic of the (Congo-Brazzaville)': 'Congo, República del (Congo-Brazzaville)',
'Consumable': 'Consumible',
'Contact': 'Contacto',
'Contact Data': 'Datos de contacto',
'Contact Details': 'Datos de contacto',
'Contact Information': 'Contact Information',
'Contact Method': 'Método de contacto',
'Contact Person': 'Persona de contacto',
'Contact added': 'Contacto añadido',
'Contact deleted': 'Contacto eliminado',
'Contact details': 'Detalles del contacto',
'Contact information added': 'Información de contacto añadido',
'Contact information deleted': 'Información de contacto eliminado',
'Contact information updated': 'Información de contacto actualizada',
'Contact person in case of news or further questions (if different from reporting person). Include telephone number, address and email as available.': 'Contact person in case of news or further questions (if different from reporting person). Include telephone number, address and email as available.',
'Contact person(s) in case of news or further questions (if different from reporting person). Include telephone number, address and email as available.': 'Contact person(s) in case of news or further questions (if different from reporting person). Include telephone number, address and email as available.',
'Contact updated': 'Contacto actualizado',
'Contact us': 'Contáctenos',
'Contacts': 'Contactos',
'Contacts: ': 'Contacts: ',
'Contents': 'Contenido',
'Contradictory values!': 'Valores contradictorios!',
'Contributor': 'Colaborador',
'Conversion Tool': 'Herramienta de conversión',
'Cooking NFIs': 'Cooking NFIs',
'Cooking Oil': 'Cooking Oil',
'Coordinate Conversion': 'Coordinate Conversion',
'Copy': 'Copiar',
'Copy any data from the one to be deleted into the one to keep': 'Copy any data from the one to be eliminada into the one to keep',
'Corn': 'Maíz',
'Cost Type': 'Tipo de Costo',
'Cost per Megabyte': 'Costo por Megabyte',
'Cost per Minute': 'Costo por Minuto',
'Costa Rica': 'Costa Rica',
'Counselling': 'Consejería',
'Country': 'País',
'Country of Residence': 'País de residencia',
'Create & manage Distribution groups to receive Alerts': 'Crear y administrar grupos de distribución para recibir alertas',
'Create Checklist': 'Crear Lista de Comprobación',
'Create Group Entry': 'Crear Grupo de Entrada',
'Create Impact Assessment': 'Crear Evaluaciones de Impacto',
'Create Import Job': 'Crear Trabajo de Importación',
'Create Mobile Impact Assessment': 'Crear Evaluaciones de Impacto Móvil',
'Create New Import Job': 'Crear Nuevo Trabajo de Importación',
'Create Rapid Assessment': 'Crear Evaluaciones Rápidas',
'Create Request': 'Crear Petición',
'Create Task': 'Create Task',
'Create a group entry in the registry.': 'Crear una entrada de grupo en el registro.',
'Create, enter, and manage surveys.': 'Create, enter, and manage surveys.',
'Creation of Surveys': 'Creation de Inspecciones',
'Crime': 'Crimen',
'Criteria': 'Criterios',
'Croatia': 'Croacia',
'Cuba': 'Cuba',
'Currency': 'Moneda',
'Current Group Members': 'Miembros del Grupo Actual',
'Current Group Memberships': 'Membresías de Grupo Actuales',
'Current Identities': 'Identidades Actuales',
'Current Location': 'Current Location',
'Current Log Entries': 'Entradas de Registro Actuales',
'Current Memberships': 'Miembros Actuales',
'Current Notes': 'Current Notes',
'Current Registrations': 'Current Registrations',
'Current Status': 'Current Status',
'Current Team Members': 'Current Equipo Members',
'Current Twitter account': 'Current Twitter account',
'Current greatest needs of vulnerable groups': 'Current greatest needs of vulnerable groups',
'Current main income sources': 'Mayor fuente de ingreso actual',
'Current major expenses': 'Current major expenses',
'Current number of patients': 'Current number of patients',
'Current problems, categories': 'Current problems, categories',
'Current problems, details': 'Current problems, details',
'Current request': 'Solicitud Actual',
'Current response': 'Respuesta Actual',
'Current session': 'Sesiones Actuales',
'Current type of health problems, adults': 'Current type of health problems, adults',
'Current type of health problems, children': 'Problemas actuales de salúd, niños',
'Current type of source for drinking water': 'Tipo actual de fuente de agua potable',
'Current type of source for sanitary water': 'Current type of source for sanitary water',
'Currently your system has default username and password. Username and Password are required by foriegn machines to sync data with your computer. You may set a username and password so that only those machines can fetch and submit data to your machines which your grant access by sharing your password.': 'En la actualidad el sistema tiene nombre de usuario y contraseña por defecto. Nombre de usuario y la contraseña son requeridos por las máquinas extranjeras para sincronizar los datos con el ordenador. Usted puede configurar un nombre de usuario y contraseña para que sólo las máquinas pueden recoger y enviar datos a sus máquinas que tu permitir el acceso al compartir su contraseña.',
'Custom Database Resource (e.g., anything defined as a resource in Sahana)': 'Recurso de la Base de datos personalizada (ej. cualquier cosa que se define como un recurso en Sahana)',
'Customisable category of aid': 'Categoría de ayuda personalizado',
'Cyprus': 'Chipre',
'Czech Republic': 'República Checa',
"Côte d'Ivoire": 'Costa de Marfil',
'DECISION': 'DECISION',
'DNA Profile': 'Perfil de ADN',
'DNA Profiling': 'Perfiles de ADN',
'Daily': 'Diario',
'Dam Overflow': 'Desbordamiento de la Represa ',
'Dangerous Person': 'Persona Peligrosa',
'Dashboard': 'Tablero de Instrumentos',
'Data import policy': 'Data import policy',
'Data not available': 'Datos no disponibles',
'Data uploaded': 'Datos subidos',
'Database': 'Base de Datos',
'Date': 'Fecha',
'Date & Time': 'Fecha y hora',
'Date Requested': 'Date Requested',
'Date Required': 'Date Required',
'Date and Time': 'Fecha y hora',
'Date and Time of Goods receipt. By default shows the current time but can be modified by editing in the drop down list.': 'Fecha y hora de entrada de mercancías. Por defecto muestra la hora actual, pero se puede modificar mediante la edición de la lista desplegable.',
'Date and time this report relates to.': 'Date and time this report relates to.',
'Date of Birth': 'Fecha de Nacimiento',
'Date of Latest Information on Beneficiaries Reached': 'Fecha de la información más reciente sobre beneficiarios alcanzados',
'Date of Report': 'Fecha de Informe',
'Date/Time': 'Fecha/Hora',
'Date/Time of Find': 'Fecha / Hora de Buscar',
'Date/Time of disappearance': 'Date/Time de disappearance',
'De-duplicator': 'De-duplicador',
'Dead Body Details': 'Detalles de Cuerpo Fallecido',
'Dead Body Reports': 'Informes de Cuerpos Muertos',
'Deaths in the past 24h': 'Deaths in the past 24h',
'Deaths/24hrs': 'Muertes/24hrs',
'Debug': 'Depurar',
'Deceased': 'Deceased',
'Decimal Degrees': 'Decimal Gradoss',
'Decomposed': 'Decomposed',
'Default Height of the map window. In Window layout the map maximises to fill the window, so no need to set a large value here.': 'Default Height of the map window. In Window layout the map maximises to fill the window, so no need to set a large value here.',
'Default Marker': 'Marcador Predeterminado',
'Default Width of the map window. In Window layout the map maximises to fill the window, so no need to set a large value here.': 'Default Width of the map window. In Window layout the map maximises to fill the window, so no need to set a large value here.',
'Default synchronization policy': 'Directiva predeterminada de sincronización',
'Defaults': 'Predeterminados',
'Defaults updated': 'Predeterminados actualizados',
'Defecation area for animals': 'Defecation area for animals',
'Defines the icon used for display of features on handheld GPS.': 'Define el ícono utilizado para la visualización de las características en el GPS de mano.',
'Defines the icon used for display of features on interactive map & KML exports. A Marker assigned to an individual Location is set if there is a need to override the Marker assigned to the Feature Class. If neither are defined, then the Default Marker is used.': 'Defines the icon used for display of features on interactive map & KML exports. A Marker assigned to an individual Location is set if there is a need to override the Marker assigned to the Feature Class. If neither are defined, then the Default Marker is used.',
'Defines the marker used for display & the attributes visible in the popup.': 'Defines the marker used for display & the attributes visible in the popup.',
'Degrees must be a number between -180 and 180': 'Degrees must be a number between -180 and 180',
'Degrees must be between -180 and 180': 'Gradoss must be between -180 and 180',
'Dehydration': 'Dehydration',
'Delete': 'Eliminar',
'Delete Aid Request': 'Eliminar Petición de Auxilio',
'Delete Assessment': 'Eliminar Evaluación',
'Delete Assessment Summary': 'Delete Assessment Summary',
'Delete Baseline': 'Delete Baseline',
'Delete Baseline Type': 'Delete Baseline Type',
'Delete Budget': 'Eliminar Budget',
'Delete Bundle': 'Eliminar Bundle',
'Delete Catalog Item': 'Delete Catalog Item',
'Delete Cluster Subsector': 'Delete Cluster Subsector',
'Delete Config': 'Eliminar Config',
'Delete Distribution': 'Eliminar Distribution',
'Delete Distribution Item': 'Eliminar Elemento de Distribución',
'Delete Document': 'Eliminar Document',
'Delete Donor': 'Eliminar Donor',
'Delete Entry': 'Delete Entry',
'Delete Feature Class': 'Eliminar Feature Class',
'Delete Feature Layer': 'Eliminar Capa Característica',
'Delete Find Report': 'Eliminar Reporte de Búsqueda',
'Delete Group': 'Eliminar Grupo',
'Delete Hospital': 'Eliminar Hospital',
'Delete Image': 'Eliminar Imagen',
'Delete Impact': 'Delete Impact',
'Delete Impact Type': 'Delete Impact Type',
'Delete Incident': 'Eliminar Incident',
'Delete Incident Report': 'Eliminar Incident Report',
'Delete Inventory Item': 'Eliminar Inventory Insumo',
'Delete Inventory Store': 'Eliminar Inventory Store',
'Delete Item': 'Eliminar Artículo',
'Delete Item Category': 'Eliminar Insumo Category',
'Delete Item Packet': 'Delete Item Packet',
'Delete Key': 'Eliminar Key',
'Delete Kit': 'Eliminar equipo',
'Delete Layer': 'Eliminar Capa',
'Delete Location': 'Eliminar Ubicación',
'Delete Marker': 'Eliminar Marker',
'Delete Membership': 'Eliminar Membership',
'Delete Message': 'Eliminar Message',
'Delete Metadata': 'Eliminar Metadata',
'Delete Need': 'Delete Need',
'Delete Need Type': 'Delete Need Type',
'Delete Office': 'Eliminar Office',
'Delete Old': 'Eliminar Old',
'Delete Organization': 'Eliminar Organization',
'Delete Peer': 'Delete Peer',
'Delete Person': 'Eliminar Persona',
'Delete Photo': 'Eliminar Foto',
'Delete Project': 'Eliminar Project',
'Delete Projection': 'Eliminar Proyección',
'Delete Rapid Assessment': 'Delete Rapid Assessment',
'Delete Received Item': 'Delete Received Item',
'Delete Received Shipment': 'Delete Received Shipment',
'Delete Record': 'Eliminar Registro',
'Delete Recovery Report': 'Eliminar Reporte de Recuperación',
'Delete Report': 'Borrar',
'Delete Request': 'Eliminar Petición',
'Delete Request Item': 'Eliminar Requerimiento Insumo',
'Delete Resource': 'Eliminar Recurso',
'Delete Section': 'Eliminar Section',
'Delete Sector': 'Delete Sector',
'Delete Sent Item': 'Delete Sent Item',
'Delete Sent Shipment': 'Delete Sent Shipment',
'Delete Service Profile': 'Eliminar Perfil de Servicio',
'Delete Setting': 'Eliminar Configuración',
'Delete Skill': 'Eliminar Habilidad',
'Delete Skill Type': 'Eliminar Habilidad Tipo',
'Delete Staff Type': 'Eliminar Staff Tipo',
'Delete Status': 'Delete Status',
'Delete Subscription': 'Eliminar Subscription',
'Delete Survey Answer': 'Eliminar Inspección Answer',
'Delete Survey Question': 'Eliminar Inspección Question',
'Delete Survey Section': 'Eliminar Inspección Section',
'Delete Survey Series': 'Eliminar Inspección Series',
'Delete Survey Template': 'Eliminar Cuestionario Plantilla',
'Delete Unit': 'Eliminar Unidad',
'Delete User': 'Eliminar User',
'Delete Volunteer': 'Delete Volunteer',
'Delete Warehouse': 'Delete Warehouse',
'Delete Warehouse Item': 'Delete Warehouse Item',
'Delete from Server?': 'Eliminar del servidor?',
'Delivered': 'Entregado',
'Delphi Decision Maker': 'Toma de decisiones del Delphi',
'Delphi toma de decisiones': 'Toma de decisiones de Delphi',
'Demographic': 'Perfil Demográfico',
'Demonstrations': 'Demostraciones',
'Denmark': 'Dinamarca',
'Dental Examination': 'Examen Dental',
'Dental Profile': 'Perfil Dental',
'Department/Unit Name': 'Departamento / Unidad Nombre',
'Deployment': 'Despliegue',
'Describe the condition of the roads to your hospital.': 'Describe the condition of the roads to your hospital.',
'Describe the procedure which this record relates to (e.g. "medical examination")': 'Describe the procedure which this record relates to (e.g. "medical examination")',
'Description': 'Descripción',
'Description of Bin Type': 'Description of Bin Type',
'Description of Contacts': 'Descripción de Contactos',
'Description of defecation area': 'Descripción de área de defecación',
'Description of drinking water source': 'Description de drinking water source',
'Description of sanitary water source': 'Description de sanitary water source',
'Description of water source before the disaster': 'Description de water source before the disaster',
'Description: ': 'Descripción: ',
'Descriptive Text (e.g., Prose, etc)': 'Descriptive Text (e.g., Prose, etc)',
'Designated for': 'Designado para',
'Desire to remain with family': 'Deseo de permanecer con la familia',
'Destination': 'Destino',
"Detailed address of the site for informational/logistics purpose. Please note that you can add GIS/Mapping data about this site in the 'Location' field mentioned below.": "Dirección detallada del sitio para propósitos de información/logística. Por favor tener en cuenta que es posible agregar datos de SIG/Mapa sobre este sitio en el campo 'Locación' mencionado abajo.",
'Details': 'Detalles',
'Dialysis': 'Diálisis',
'Diarrhea': 'Diarrhea',
'Diarrhea among children under 5': 'Diarrea entre los niños menores de 5',
'Dignitary Visit': 'Dignitary Visit',
'Dimensions of the storage bin. Input in the following format 1 x 2 x 3 for width x depth x height followed by choosing the unit from the drop down list.': 'Dimensions of the storage bin. Input in the following format 1 x 2 x 3 for width x depth x height followed by choosing the unit from the drop down list.',
'Dimensions of the storage location. Input in the following format 1 x 2 x 3 for width x depth x height followed by choosing the unit from the drop down list.': 'Dimensions of the storage location. Input in the following format 1 x 2 x 3 for width x depth x height followed by choosing the unit from the drop down list.',
'Direction': 'Direction',
'Disabilities': 'Discapacidad',
'Disabled': 'Deshabilitado',
'Disabled participating in coping activities': 'Disabled participating in coping activities',
'Disabled?': 'Movilidad reducida?',
'Disaster Victim Identification': 'Identificación de Víctimas de Desastres',
'Disaster Victim Registry': 'Víctimas de Desastres del Registro',
'Disaster clean-up/repairs': 'Desastre limpiar/reparos',
'Discharge (cusecs)': 'Discharge (cusecs)',
'Discharges/24hrs': 'Descargas/24hrs',
'Discussion Forum': 'Foro de Discusión',
'Discussion Forum on item': 'Foro de debate sobre el tema',
'Disease vectors': 'Disease vectors',
'Diseases': 'Enfermedades',
'Dispatch': 'Envío',
'Dispatch Items': 'Despacho artículos',
'Dispensary': 'Dispensario',
'Displaced': 'Displaced',
'Displaced Populations': 'Displaced Populations',
'Display Polygons?': 'Mostrar Polígonos?',
'Display Routes?': 'Display Routes?',
'Display Tracks?': 'Display Tracks?',
'Display Waypoints?': 'Display Waypoints?',
'Dispose': 'Disponer',
'Dispose Expired/Unusable Items': 'Eliminar de vencimiento / inutilizable artículos',
'Distance between defecation area and water source': 'Distancia entre el área de la defecación y la fuente de agua',
'Distance between latrines and temporary shelter in meters': 'Distancia entre las letrinas y sitio de hospedaje temporal en metros',
'Distance between shelter and latrines': 'Distance between shelter and latrines',
'Distance(Kms)': 'Distance(Kms)',
'Distribution': 'Distribución',
'Distribution Details': 'Distribution Detalles',
'Distribution Groups': 'Grupos de distribución',
'Distribution Item': 'Distribution Insumo',
'Distribution Item Details': 'Distribution Insumo Detalles',
'Distribution Item added': 'Distribution Artículo agregado',
'Distribution Item deleted': 'Distribution Insumo eliminada',
'Distribution Item updated': 'Distribution Insumo updated',
'Distribution Items': 'Distribution Insumos',
'Distribution added': 'Distribution agregado',
'Distribution deleted': 'Distribution eliminada',
'Distribution groups': 'Grupos de distribución',
'Distribution updated': 'Distribución actualizada',
'Distributions': 'Distribuciones',
'District': 'Distrito',
'Djibouti': 'Djibouti',
'Do adolescent and youth in your community participate in activities that help them cope with the disaster? (ex. meetings, religious activities, volunteer in the community clean-up, etc)': 'Do adolescent and youth in your community participate in activities that help them cope with the disaster? (ex. meetings, religious activities, volunteer in the community clean-up, etc)',
'Do households each have at least 2 containers (10-20 litres each) to hold water?': '¿Tienen los hogares al menos 2 contenedores (10-20 litros cada uno) para guardar agua?',
'Do households have appropriate equipment and materials to cook their food (stove, pots, dished plates, and a mug/drinking vessel, etc)?': 'Do households have appropriate equipment and materials to cook their food (stove, pots, dished plates, and a mug/drinking vessel, etc)?',
'Do households have bedding materials available (tarps, plastic mats, blankets)?': 'Do households have bedding materials available (tarps, plastic mats, blankets)?',
'Do households have household water storage containers?': '¿Los hogares tienen contenedores de almacenamiento de agua?',
'Do minority members in your community participate in activities that help them cope with the disaster? (ex. meetings, religious activities, volunteer in the community clean-up, etc)': 'Do minority members in your community participate in activities that help them cope with the disaster? (ex. meetings, religious activities, volunteer in the community clean-up, etc)',
'Do older people in your community participate in activities that help them cope with the disaster? (ex. meetings, religious activities, volunteer in the community clean-up, etc)': 'Do older people in your community participate in activities that help them cope with the disaster? (ex. meetings, religious activities, volunteer in the community clean-up, etc)',
'Do people have at least 2 full sets of clothing (shirts, pants/sarong, underwear)?': 'Las personas tienen al menos 2 cambios de ropa (camisetas, pantalones, ropa interior)?',
'Do people have reliable access to sufficient sanitation/hygiene items (bathing soap, laundry soap, shampoo, toothpaste and toothbrush)?': '¿Tienen las personas acceso seguro a suficientes artículos de higiene (jabón de baño, jabón de ropa, champú, pasta de dientes y cepillo de dientes)?',
'Do people with disabilities in your community participate in activities that help them cope with the disaster? (ex. meetings, religious activities, volunteer in the community clean-up, etc)': 'Do people with disabilities in your community participate in activities that help them cope with the disaster? (ex. meetings, religious activities, volunteer in the community clean-up, etc)',
'Do women and girls have easy access to sanitary materials?': '¿Las mujeres y las jóvenes tienen acceso fácil a materiales sanitarios?',
'Do women in your community participate in activities that help them cope with the disaster? (ex. meetings, religious activities, volunteer in the community clean-up, etc)': 'Do women in your community participate in activities that help them cope with the disaster? (ex. meetings, religious activities, volunteer in the community clean-up, etc)',
'Do you have access to cash to restart your business?': 'Do you have access to cash to restart your business?',
'Do you know of any incidents of violence?': 'Do you know of any incidents of violence?',
'Do you know of children living on their own (without adults)?': 'Hay niños viviendo por sí solos (sin adultos)?',
'Do you know of children separated from their parents or caregivers?': 'Do you know of children separated from their parents or caregivers?',
'Do you know of children that have been orphaned by the disaster?': 'Conocías a niños huérfanos antes del desastre?',
'Do you know of children that have been sent to safe places?': 'Do you know of children that have been sent to safe places?',
'Do you know of children that have disappeared without explanation in the period since the disaster?': 'Do you know of children that have disappeared without explanation in the period since the disaster?',
'Do you know of older people who are primary caregivers of children?': 'Do you know of older people who are primary caregivers of children?',
'Do you know of parents/caregivers missing children?': 'Conoces a los padres/cuidadores de los niños desaparecidos?',
'Do you prefer': '¿Prefiere',
'Do you really want to delete these records?': 'Do you really want to delete these records?',
'Do you want to over-write the file metadata with new default values?': '¿Desea sobrescribir el archivo de metadatos con nuevos valores por defecto?',
'Do you want to receive this shipment?': 'Do you want to receive this shipment?',
'Do you want to send this shipment?': 'Do you want to send this shipment?',
'Document': 'Documento',
'Document Details': 'Detalles del Documento',
'Document Scan': 'Escaneo de documentos',
'Document added': 'Agregó documento',
'Document deleted': 'Documento eliminado',
'Document updated': 'Documento actualizado',
'Document: ': 'Documento: ',
'Documents': 'Documentos',
'Documents and Images': 'Documentos e Imágenes',
'Documents and Photos': 'Documentos y Fotos',
'Does this facility provide a cholera treatment center?': 'Does this facility provide a cholera treatment center?',
'Doing nothing (no structured activity)': 'Haciendo nada (sin actividad estructurada)',
'Dollars': 'Dólares',
'Domestic chores': 'Tareas Domésticas',
'Dominica': 'Dominica',
'Dominican Republic': 'República Dominicana',
'Donation Phone #': 'Donación # de teléfono',
'Donor': 'Donante',
'Donor Details': 'Donor Detalles',
'Donor added': 'Donante agregado',
'Donor deleted': 'Donor eliminada',
'Donor updated': 'Actualización de Donantes ',
'Donors': 'Donors',
'Donors Report': 'Reporte de donantes',
'Door frame': 'Door frame',
'Draft': 'Borrador',
'Drainage': 'Drainage',
'Drawing up a Budget for Staff & Equipment across various Locations.': 'La elaboración de un presupuesto para el personal y equipo a través de varios lugares.',
'Drill Down by Group': 'Profundizar por Grupo',
'Drill Down by Incident': 'Drill Down por Incidente',
'Drill Down by Shelter': 'Drill Down por Vivienda',
'Driving License': 'Carnet de conducir',
'Drought': 'Drought',
'Drugs': 'Drogas',
'Dug Well': 'Dug Well',
'Duplicate?': 'Duplicate?',
'Duration': 'Duration',
'Dust Storm': 'Dust Storm',
'Dwellings': 'Viviendas',
'E-mail': 'E-mail',
'EMS Reason': 'EMS Reason',
'EMS Status': 'Estado de EMS',
'EMS Status Reason': 'EMS Razón de estado',
'EMS Traffic Status': 'Tráfico de Estado EMS',
'ER Status': 'ER Status',
'ER Status Reason': 'ER Status Reason',
'Early Recovery': 'Early Recovery',
'Ears, angle': 'Orejas, ángulo',
'Ears, size': 'Orejas, tamaño',
'Earthquake': 'Terremoto ',
'East Timor': 'Timor Oriental',
'Easy access to sanitation items for women/girls': 'Acceso fácil a artículos de sanidad para mujeres/niñas',
'Ecuador': 'Ecuador',
'Edit': 'Editar',
'Edit ': 'Editar ',
'Edit Activity': 'Editar Actividad',
'Edit Address': 'Editar dirección',
'Edit Aid Request': 'Editar Petición de Auxilio',
'Edit Application': 'Editar aplicación',
'Edit Assessment': 'Editar Assessment',
'Edit Assessment Summary': 'Edit Assessment Summary',
'Edit Baseline': 'Edit Baseline',
'Edit Baseline Type': 'Edit Baseline Type',
'Edit Budget': 'Editar Presupuesto',
'Edit Bundle': 'Editar Bundle',
'Edit Catalog Item': 'Edit Catalog Item',
'Edit Category<>Sub-Category<>Catalog Relation': 'Categoría Editar<>Sub-Categoría<>Relación Catálogo>',
'Edit Cluster': 'Editar Cluster',
'Edit Cluster Subsector': 'Edit Cluster Subsector',
'Edit Config': 'Editar Configuración',
'Edit Contact': 'Modificar Contacto',
'Edit Contact Information': 'Editar Contact Information',
'Edit Contents': 'Editar Contenidos',
'Edit Defaults': 'Editar Predeterminados',
'Edit Description': 'Editar Descripción',
'Edit Details': 'Editar Detalles',
'Edit Disaster Victims': 'Editar Víctimas de Desastres',
'Edit Distribution': 'Editar Distribution',
'Edit Distribution Item': 'Editar Distribution Insumo',
'Edit Document': 'Editar Document',
'Edit Donor': 'Editar Donor',
'Edit Email': 'Editar Correo',
'Edit Email Settings': 'Editar Email Settings',
'Edit Feature Class': 'Modificar Clase de Funciones',
'Edit Feature Group': 'Editar Grupo de Funciones',
'Edit Feature Layer': 'Editar Feature Layer',
'Edit Flood Report': 'Editar Inundación Report',
'Edit Gateway Settings': 'Editar Configuración de Puerto',
'Edit Group': 'Editar Grupo',
'Edit Hospital': 'Editar Hospital',
'Edit Identification Report': 'Editar Reporte de Identificación',
'Edit Identity': 'Editar Identidad',
'Edit Image': 'Editar Imagen',
'Edit Image Details': 'Editar detalles de imagen',
'Edit Impact': 'Edit Impact',
'Edit Impact Type': 'Edit Impact Type',
'Edit Incident': 'Editar Incidente',
'Edit Incident Report': 'Editar Informe Incidente',
'Edit Inventory Item': 'Editar Inventory Insumo',
'Edit Inventory Store': 'Editar Inventory Store',
'Edit Item': 'Editar elemento',
'Edit Item Catalog': 'Editar Catálogo de Elementos',
'Edit Item Catalog Categories': 'Editar categorías Catálogo del artículo',
'Edit Item Category': 'Editar Insumo Category',
'Edit Item Packet': 'Edit Item Packet',
'Edit Item Sub-Categories': 'Editar Sub-Categorías del artículo',
'Edit Key': 'Tecla de edición',
'Edit Kit': 'Editar Kit',
'Edit Layer': 'Modificar capa',
'Edit Location': 'Editar ubicación',
'Edit Log Entry': 'Editar entrada del registro',
'Edit Map Services': 'Editar Servicios de Mapas',
'Edit Marker': 'Editar Marcador',
'Edit Membership': 'Editar Afiliación',
'Edit Message': 'Editar Message',
'Edit Messaging Settings': 'Editar Messaging Settings',
'Edit Metadata': 'Editar Metadatos',
'Edit Mobile Settings': 'Editar Configuración del móvil',
'Edit Modem Settings': 'Editar Modem Settings',
'Edit Need': 'Edit Need',
'Edit Need Type': 'Edit Need Type',
'Edit Note': 'Edit Note',
'Edit Office': 'Editar Oficina',
'Edit Options': 'Edit Options',
'Edit Organization': 'Editar Organización',
'Edit Parameters': 'Editar Parámetros',
'Edit Partner': 'Editar Asociado',
'Edit Peer': 'Editar Peer',
'Edit Peer Details': 'Edit Peer Details',
'Edit Person': 'Editar Persona',
'Edit Person Details': 'Editar datos de la persona',
'Edit Personal Effects Details': 'Efectos Editar Datos Personales',
'Edit Photo': 'Editar Photo',
'Edit Pledge': 'Editar Compromiso',
'Edit Position': 'Editar Posición',
'Edit Problem': 'Editar Problema',
'Edit Profile': 'Editar Perfil',
'Edit Project': 'Editar Proyecto',
'Edit Projection': 'Editar Proyección',
'Edit Rapid Assessment': 'Edit Rapid Assessment',
'Edit Received Item': 'Edit Received Item',
'Edit Received Shipment': 'Edit Received Shipment',
'Edit Record': 'Editar Registro',
'Edit Recovery Details': 'Editar Detalles de Recuperación',
'Edit Registration': 'Edit Registration',
'Edit Registration Details': 'Edit Registration Details',
'Edit Relief Item': 'Editar Artículo de Ayuda',
'Edit Report': 'Editar Informe',
'Edit Request': 'Edit Request',
'Edit Request Details': 'Editar detalles de peticiones ',
'Edit Request Item': 'Editar Requerimiento Insumo',
'Edit Resource': 'Editar Recurso',
'Edit Response': 'Editar Respuesta',
'Edit River': 'Editar River',
'Edit Role': 'Editar Rol',
'Edit SMS': 'Editar SMS',
'Edit School District': 'Editar School District',
'Edit School Report': 'Editar School Report',
'Edit Sector': 'Editar Sector',
'Edit Sent Item': 'Edit Sent Item',
'Edit Setting': 'Editar Configuración',
'Edit Settings': 'Editar',
'Edit Shelter': 'Modificar Refugio',
'Edit Shelter Service': 'Editar Servicio de Refugio',
'Edit Shelter Type': 'Editar Tipo de Albergue',
'Edit Shipment Transit Log': 'Edición del envío de Tránsito',
'Edit Shipment to Send': 'Edit Shipment to Send',
'Edit Shipment/Way Bills': 'Envío Editar/Camino proyectos de ley',
'Edit Shipment<>Item Relation': 'Envío Editar<>Tema relación',
'Edit Site': 'Editar Sitio',
'Edit Skill': 'Editar Habilidad',
'Edit Skill Type': 'Editar Tipo de Habilidad',
'Edit Solution': 'Editar Solución',
'Edit Source': 'Modificar origen',
'Edit Staff': 'Editar Staff',
'Edit Staff Type': 'Editar Tipo de Personal',
'Edit Storage Bin Type(s)': 'Almacenamiento Editar Bin Tipo (s)',
'Edit Storage Bins': 'Editar compartimientos de almacenaje',
'Edit Storage Location': 'Editar ubicación de almacenamiento',
'Edit Subscription': 'Editar Subscription',
'Edit Survey Answer': 'Editar Respuesta Cuestionario',
'Edit Survey Question': 'Editar Inspección Question',
'Edit Survey Section': 'Editar Inspección Section',
'Edit Survey Series': 'Editar Inspección Series',
'Edit Survey Template': 'Editar Inspección Template',
'Edit Sync Settings': 'Editar configuración de sincronización',
'Edit Task': 'Editar Tarea',
'Edit Team': 'Editar Team',
'Edit Theme': 'Editar Tema',
'Edit Themes': 'Editar Temas',
'Edit Ticket': 'Editar Boleto',
'Edit Track': 'Editar Camino',
'Edit Tropo Settings': 'Edit Tropo Settings',
'Edit Unit': 'Editar Unidad',
'Edit User': 'Editar Usuario',
'Edit Volunteer Details': 'Edit Volunteer Details',
'Edit Volunteer Registration': 'Editar Registro Voluntario',
'Edit Warehouse': 'Edit Warehouse',
'Edit Warehouse Item': 'Edit Warehouse Item',
'Edit a Missing Person': 'Editar una persona desaparecida',
'Edit current record': 'Editar el registro actual',
'Edit message': 'Editar message',
'Edit the Application': 'Editar la aplicación',
'Editable?': 'Editable?',
'Education': 'Educación ',
'Education materials received': 'Materiales educativos recibidos',
'Education materials, source': 'Education materials, source',
'Effects Inventory': 'Efectos de inventario',
'Eggs': 'Eggs',
'Egypt': 'Egipto',
'Either a shelter or a location must be specified': 'Un alberque o una locación debe ser nombrado',
'Either file upload or document URL required.': 'Either file upload or document URL required.',
'Either file upload or image URL required.': 'Se requiere un archivo cargado o URL de la imagen.',
'El Salvador': 'El Salvador',
'Elderly person headed households (>60 yrs)': 'Elderly person headed households (>60 yrs)',
'Electrical': 'Eléctrico',
'Electricity': 'Electricidad',
'Elevated': 'Elevado',
'Email': 'Correo electrónico',
'Email Details': 'Email Detalles',
'Email InBox': 'Correo electrónico Bandeja de entrada',
'Email OutBox': 'Correo electrónico Bandeja de salida',
'Email Settings': 'Email Settings',
'Email added': 'Email añadido',
'Email address verified, however registration is still pending approval - please wait until confirmation received.': 'Email address verified, however registration is still pending approval - please wait until confirmation received.',
'Email created': 'Correo electrónico creado',
'Email deleted': 'Correo electrónico eliminado',
'Email settings updated': 'Configuración de correo electrónico actualizada',
'Email updated': 'Correo electrónico actualizada',
'Embalming': 'Embalsamamiento',
'Embassy': 'Embajada',
'Emergency Capacity Building project': 'proyecto de Construcción de Capacidad ante Emergencias',
'Emergency Department': 'Departamento de Emergencia',
'Emergency Shelter': 'Emergency Shelter',
'Emergency Support Facility': 'Apoyo al Centro de Emergencias Médicas',
'Emergency Support Service': 'Emergency Support Service',
'Emergency Telecommunications': 'Emergency Telecommunications',
'Enable/Disable Layers': 'Activar / Desactivar Capas',
'Enabled': 'Habilitado',
'Enabled?': 'Habilitado?',
'End date': 'Fecha de finalización',
'End date should be after start date': 'End date should be after start date',
'End of Period': 'End de Period',
'English': 'Inglés',
'Enter Coordinates in Deg Min Sec': 'Introduzca las Coordenadas en Grados, Minutos, Segundos',
'Enter Coordinates:': 'Enter Coordinates:',
'Enter a GPS Coord': 'Enter a GPS Coord',
'Enter a date before': 'Introduzca una fecha antes de',
'Enter a location': 'Enter a location',
'Enter a name for the spreadsheet you are uploading (mandatory).': 'Escriba un nombre para la hoja de cálculo que está cargando (obligatorio).',
'Enter a new support request.': 'Introduzca una nueva solicitud de soporte.',
'Enter a summary of the request here.': 'Hacer un resumen de la petición aquí. ',
'Enter a unique label!': 'Enter a unique label!',
'Enter a valid email': 'Enter a valid email',
'Enter tags separated by commas.': 'Enter tags separated by commas.',
'Enter the same password as above': 'Enter the same password as above',
'Enter your firstname': 'Enter your firstname',
'Entering a phone number is optional, but doing so allows you to subscribe to receive SMS messages.': 'Ingresar un número de teléfono es opcional, pero hacerlo permite que te suscribas para recibir mensajes SMS.',
'Entity Type': 'Tipo de entidad',
'Entry deleted': 'Entry deleted',
'Equatorial Guinea': 'Guinea Ecuatorial',
'Equipment': 'Equipo',
'Eritrea': 'Eritrea',
'Error encountered while applying the theme.': 'Detectado un error al aplicar el tema.',
'Error in message': 'Error en mensaje',
'Error logs for "%(app)s"': 'Los registros de errores de "%(app)s"',
'Errors': 'Errores',
'Estimated # of households who are affected by the emergency': '# Estimado de hogares que han sido afectados por la emergencia',
'Estimated # of people who are affected by the emergency': 'Estimated # de people who are affected by the emergency',
'Estimated total number of people in institutions': 'Número estimado de total de personas en instituciones',
'Estonia': 'Estonia',
'Ethiopia': 'Etiopía',
'Euros': 'Euros',
'Evacuating': 'Evacuación',
'Evacuation': 'Evacuación',
'Evaluate the information in this message. (This value SHOULD NOT be used in public warning applications.)': 'Evaluar la información contenida en este mensaje. (Este valor NO DEBE ser utilizado en aplicaciones de alerta a la población.)',
'Event Time': 'Hora del evento',
'Event Type': 'Tipo de evento',
'Event type': 'Event type',
'Example': 'Example',
'Exceeded': 'Excedido',
'Excreta disposal': 'Excreta disposal',
'Execute a pre-planned activity identified in <instruction>': 'Execute a pre-planned activity identified in <instruction>',
'Existing food stocks, main dishes': 'Existing food stocks, main dishes',
'Existing food stocks, side dishes': 'Existing food stocks, side dishes',
'Expected In': 'En espera',
'Expected Out': 'Previstos de salida',
'Expiry Time': 'Tiempo de caducidad',
'Explosive Hazard': 'Explosive Hazard',
'Export': 'Exportación',
'Export Data': 'Exportación de datos',
'Export Database as CSV': 'Exportar base de datos como CSV',
'Export in GPX format': 'Exportar en formato GPX',
'Export in KML format': 'Exportar en formato KML',
'Export in OSM format': 'Export in OSM format',
'Export in PDF format': 'Exportar en formato PDF',
'Export in RSS format': 'Exportar en formato RSS',
'Export in XLS format': 'Exportar en formato XLS',
'External Features': 'Las características externas',
'Eye Color': 'Color de Ojos',
'Eyebrows, Peculiarities': 'Cejas, las peculiaridades',
'Eyebrows, Shape': 'Cejas, Forma',
'Eyebrows, Thickness': 'Cejas, Espesor',
'Eyes, Colour': 'Ojos, Color',
'Eyes, Distance between Eyes': 'Ojos, distancia entre ojos',
'Eyes, Peculiarities': 'Ojos, Peculiaridades',
'Eyes, Shade': 'Ojos, Sombra',
'FAX': 'FAX',
'Face': 'Cara',
'Facebook': 'Facebook',
'Facial hair, Colour': 'Vello facial, Color',
'Facial hair, Type': 'Vello facial, Tipo',
'Facial hair, color': 'Vello facial, color',
'Facial hair, type': 'Vello facial, tipo',
'Facial hear, length': 'Vello facial, tamaño',
"Facilitate uploading of missing person's photograph": 'Facilitar la subida de fotografía personal de persona desaparecida',
'Facility Operations': 'Fondo para Operaciones',
'Facility Status': 'Fondo para el Estado',
'Facility Type': 'Tipo de Instalaciones',
'Factors affecting school attendance': 'Factores que afectan la asistencia a la escuela',
'Failed!': 'No se pudo!',
'Falling Object Hazard': 'Falling Object Hazard',
'Families/HH': 'Familias/HH',
'Family': 'Familia',
'Family Care': 'Cuidado Familiar',
'Family tarpaulins received': 'Cubierta de Lona Familiar, recibida ',
'Family tarpaulins, source': 'Family tarpaulins, source',
'Family/friends': 'Familia/amigos',
'Farmland/fishing material assistance, Rank': 'Farmland/fishing material assistance, Rank',
'Fax': 'Fax',
'Feature': 'Funcionalidad',
'Feature Class': 'Clase de Funcionalidad',
'Feature Class Details': 'Detalles de características de clase',
'Feature Class added': 'Características agregada',
'Feature Class deleted': 'Feature Class eliminado',
'Feature Class updated': 'Feature Class actualizado',
'Feature Classes': 'Clases de funciones',
'Feature Classes are collections of Locations (Features) of the same type': 'Feature Classes are collections de Locations (Features) de the same type',
'Feature Classes to Feature Groups': 'Clases de Características a Grupos de Características',
'Feature Group': 'Reportaje Grupo',
'Feature Group Details': 'Detalles del grupo de funciones',
'Feature Group Updated': 'Grupo de características Actualizado',
'Feature Group added': 'Reportaje Grupo añadido',
'Feature Group deleted': 'Característica grupo eliminado',
'Feature Group updated': 'Reportaje Grupo actualización',
'Feature Groups': 'Grupos de funciones',
'Feature Layer Details': 'Feature Layer Detalles',
'Feature Layer added': 'Feature Layer agregado',
'Feature Layer deleted': 'Feature Layer eliminada',
'Feature Layer updated': 'Características de la actualización de la capa',
'Feature Layers': 'Feature Layers',
'Feature Namespace': 'Caracterisitcas de espaciado de nombre',
'Feature Type': 'Tipo de función',
'Features': 'Características',
'Features Include': 'Las características incluyen',
'Feet, Condition': 'Los pies, en la condición',
'Feet, Nails': 'Los pies, las uñas',
'Feet, Shape': 'Pies, Forma',
'Female': 'Female',
'Female headed households': 'Female headed households',
'Few': 'Pocos',
'Field Hospital': 'Hospital de campaña',
'Fields tagged with a star': 'Los campos marcados con un asterisco',
'Fiji': 'Fiji',
'File': 'File',
'Fill in Latitude': 'Fill in Latitude',
'Fill in Longitude': 'Fill in Longitude',
'Filter': 'Filter',
'Filter Field': 'Campo Filtrado',
'Filter Value': 'Filter Value',
'Filtered search of aid pledges and requests': 'Búsqueda filtrada de promesas de ayuda y solicitudes',
'Find': 'Buscar',
'Find Dead Body Report': 'Find Dead Body Report',
'Find Details': 'Buscar Detalles',
'Find Recovery Report': 'Buscar recuperación Informe',
'Find Report added': 'Encontrar Informe añadido',
'Find Report deleted': 'Encontrar Informe eliminado',
'Find Report updated': 'Buscar Reporte actualizado',
'Find Volunteers': 'Find Volunteers',
'Find by Name': 'Buscar por Nombre',
'Find report': 'Buscar informe',
'Finder': 'Buscador',
'Fingerprint': 'Huella dactilar',
'Fingerprinting': 'Huellas digitales',
'Fingerprints': 'Huellas Dactilares',
'Finish': 'Finalizar',
'Finished Jobs': 'Finished Jobs',
'Finland': 'Finlandia',
'Fire': 'Fire',
'Fire suppression and rescue': 'Fire suppression and rescue',
'First Name': 'Primer Nombre',
'First name': 'Primer Nombre',
'Fishing': 'Fishing',
'Flash Flood': 'Inundación Inmediata',
'Flash Freeze': 'Flash Freeze',
'Fleet Management': 'Gestión de Flotas',
'Flexible Impact Assessments': 'Evaluaciones de Impacto Flexible',
'Flood': 'Inundación',
'Flood Alerts': 'Alertas de Inundación',
'Flood Alerts show water levels in various parts of the country': 'Alertas de Inundación, mostrar niveles de agua en varias partes del país',
'Flood Report': 'Reporte de Inundación',
'Flood Report Details': 'Informe Detallado sobre Inundación',
'Flood Report added': 'Inundación Report agregado',
'Flood Report deleted': 'Inundación Report eliminada',
'Flood Report updated': 'Inundación Report updated',
'Flood Reports': 'Inundación Reports',
'Flow Status': 'Flow Status',
'Focal Point': 'Punto Focal',
'Fog': 'Fog',
'Food': 'Comida',
'Food Supply': 'Food Supply',
'Food assistance available/expected': 'Ayuda alimentaria disponible/esperada',
'Footer': 'Pie de página',
'Footer file %s missing!': 'Pie de página %s archivo que falta!',
'For Eden instances enter the application base URL, e.g. http://sync.sahanfoundation.org/eden, for other peers the URL of the synchronization interface.': 'Para las instancias Eden escriba la dirección URL base de la aplicación, por ejemplo, http://sync.sahanfoundation.org/eden, para otros compañeros la dirección URL de la interfaz de sincronización.',
'For POP-3 this is usually 110 (995 for SSL), for IMAP this is usually 143 (993 for IMAP).': 'Para POP-3 usualmente es 110 (995 para SSL), para IMAP es usualmente 143 (993 para IMAPS)',
'For a country this would be the ISO2 code, for a Town, it would be the Airport Locode.': 'For a country this would be the ISO2 code, for a Town, it would be the Airport Locode.',
'For each sync partner, there is a default sync job that runs after a specified interval of time. You can also set up more sync jobs which could be customized on your needs. Click the link on the right to get started.': 'Por cada socio de sincronización, hay un trabajo de sincronización predeterminado que corre después de un intervalo de tiempo específico. También es posible instalar más trabajos de sincronización personalizados según tus necesidades. Haz click en el enlace a la derecha para iniciar.',
'For enhanced security, you are recommended to enter a username and password, and notify administrators of other machines in your organization to add this username and password against your UUID in Synchronization -> Sync Partners': 'Para mayor seguridad, se recomienda introducir un nombre de usuario y contraseña, y notificar a los administradores de otras máquinas de su organización para agregar este nombre de usuario y contraseña contra su UUID en Sincronización -> Socios Sync',
'For live help from the Sahana community on using this application, go to': 'Para obtener ayuda en vivo desde la comunidad Sahana acerca del uso de esta aplicación, vaya al',
'For messages that support alert network internal functions': 'For messages that support alert network internal functions',
'For more details on the Sahana Eden system, see the': 'Para más detalles acerca del sistema Sahana Eden, consulte el',
'For more details on the Sahana system, see the': 'Para más detalles acerca el sistema Sahana, consulte el',
'For more information, see ': 'Para obtener más información, consulte',
'For:': 'For:',
'Forehead, Height': 'Frente, Estatura',
'Forehead, Inclination': 'Frente, Inclinación',
'Forehead, Width': 'Frente, Ancho',
'Forest Fire': 'Forest Fire',
'Formal camp': 'Campamento formal',
'Format': 'Formato',
'Forms': 'Formularios',
'Found': 'Found',
'France': 'Francia',
'Freezing Drizzle': 'Freezing Drizzle',
'Freezing Rain': 'Lluvia Helada',
'Freezing Spray': 'Freezing Spray',
'French': 'Francés',
'Friday': 'Friday',
'From Location': 'From Location',
'From Warehouse': 'From Warehouse',
'Frost': 'Frost',
'Fuel': 'Combustible',
'Full': 'Completo',
'Full beard': 'Lleno barba',
'Fullscreen Map': 'Mapa de tamaño completo',
'Functional Tests': 'Pruebas funcionales',
'Functions available': 'Las funciones disponibles',
'Funding Organisation': 'Funding Organisation',
'Funding Organization': 'Organización de Financiamiento',
'Funeral': 'Funeral',
'GIS Reports of Shelter': 'Reportes de Sistemas de Información Geográfico del Refugio',
'GIS integration to view location details of the Shelter': 'Integración con Sistemas de Información Geográficos SIG para ver detalles de ubicación del Refugio',
'GPS': 'GPS',
'GPS Marker': 'Marcador GPS',
'GPS Track': 'GPS Track',
'GPS Track File': 'GPS de archivos de pista',
'GPX Track': 'GPX Track',
'GRN': 'GRN',
'Gabon': 'Gabón',
'Gale Wind': 'Gale Wind',
'Gap Analysis': 'Análisis de Deficiencias',
'Gap Analysis Map': 'Mapa de Análisis de Brecha',
'Gap Analysis Report': 'Reporte de Análisis de Brecha',
'Gap Map': 'Gap Map',
'Gap Report': 'Gap Report',
'Gateway Settings': 'Configuración de Entrada de Puerto',
'Gateway settings updated': 'Gateway settings updated',
'Gender': 'Género',
'General Medical/Surgical': 'Médica / quirúrgica',
'General Skills': 'Habilidades Generales',
'General emergency and public safety': 'General emergency and public safety',
'Generator': 'Generador',
'Geocoder Selection': 'Geocoder Selection',
'Geometry Name': 'Geometry Name',
'Geonames.org search requires Internet connectivity!': 'Búsqueda por Geonames.org requiere conexión a Internet!',
'Geophysical (inc. landslide)': 'Geophysical (inc. landslide)',
'Georgia': 'Georgia',
'Geraldo module not available within the running Python - this needs installing for PDF output!': 'Geraldo no disponibles en el módulo de ejecución de Python - esto debe instalar para la salida del pdf!',
'Geraldo module not available within the running Python - this needs installing to do PDF Reporting!': 'Geraldo no disponibles en el módulo de ejecución de Python - esto tiene que ver la instalación de PDF de Información!',
'Germany': 'Alemania',
'Ghana': 'Ghana',
'Girls 13-18 yrs in affected area': 'Girls 13-18 yrs in affected area',
'Girls 13-18 yrs not attending school': 'Jóvenes mujeres de 13-18 años que no asisten a la escuela',
'Girls 6-12 yrs in affected area': 'Niñas ente 6-12 años que se encuentran en el área afectada',
'Girls 6-12 yrs not attending school': 'Girls 6-12 yrs not attending school',
'Give a brief description of the image, e.g. what can be seen where on the picture (optional).': 'Presente una descripción breve de la imagen, ej. lo que puede ser visto en la imagen (opcional).',
'Give information about where and when you have seen the person': 'Suministre información sobre dónde y cuándo han visto a la persona',
'Give information about where and when you have seen them': 'Give information about where and when you have seen them',
'Global Messaging Settings': 'Configuración Global de Mensajería',
'Goatee': 'Perilla',
'Goitre': 'Bocio',
'Government': 'Gobierno',
'Government UID': 'Gobierno UID',
'Government UUID': 'Gobierno UUID',
'Government building': 'Edificio de gobierno',
'Grade': 'Grado',
'Greece': 'Grecia',
'Greek': 'Greek',
'Grenada': 'Granada',
'Group': 'Grupo',
'Group %(group_id)s created': 'Grupo %(group_id)s creado',
'Group Details': 'Detalles del grupo',
'Group ID': 'ID de grupo',
'Group Member added': 'Group Member agregado',
'Group Members': 'Miembros del grupo',
'Group Membership Details': 'Detalles del grupo de Socios',
'Group Membership added': 'Pertenencia a grupos añadido',
'Group Membership deleted': 'Pertenencia a grupo eliminado',
'Group Membership updated': 'Grupo de Afiliación actualizada',
'Group Memberships': 'Grupo de Membresías',
'Group Title': 'Grupo Título',
'Group Type': 'Tipo de grupo',
'Group Updated': 'Grupo Actualizado',
'Group added': 'Grupo añadido',
'Group deleted': 'Grupo eliminado',
'Group description': 'Descripción del grupo',
'Group name': 'Nombre del grupo',
'Group type': 'Tipo de grupo',
'Group updated': 'Grupo de actualización',
"Grouping by 'Family Unit' or other group category": 'La agrupación por "Unidad Familiar" o categoría otro grupo',
'Groups': 'Grupos',
'Groups removed': 'Grupos retirados',
'Guatemala': 'Guatemala',
'Guest': 'Invitado',
'Guinea': 'Guinea',
'Guinea-Bissau': 'Guinea-Bissau',
'Guyana': 'Guayana',
'HF': 'HF',
'Hail': 'granizo',
'Hair Color': 'Hair Color',
'Hair Length': 'Hair Length',
'Hair Style': 'Estilo de Cabello ',
'Hair of the head, Baldness (extent)': 'Pelo de la cabeza, la calvicie (medida)',
'Hair of the head, Baldness (location)': 'Pelo de la cabeza, la calvicie (ubicación)',
'Hair of the head, Colour': 'Pelo de la cabeza, Color',
'Hair of the head, Length': 'Pelo de la cabeza, largo',
'Hair of the head, Parting': 'Pelo de la cabeza, despedida',
'Hair of the head, Shade of colour': 'Pelo de la cabeza, sombra de color',
'Hair of the head, Style': 'Pelo de la cabeza, Estilo',
'Hair of the head, Thickness': 'Pelo de la cabeza, Espesor',
'Hair of the head, Type': 'Pelo de la cabeza, tipo',
'Hair-piece': 'Secador de pieza',
'Haiti': 'Haití',
'Hands, Nail length': 'Las manos, la longitud de las uñas',
'Hands, Nail peculiarities': 'Las manos, las peculiaridades del clavo',
'Hands, Nicotine': 'Las manos, la nicotina',
'Hands, Shape': 'Manos, Forma',
'Hands, Size': 'Las manos, el tamaño',
'Has data from this Reference Document been entered into Sahana?': 'Has data from this Reference Document been entered into Sahana?',
'Has the safety and security of women and children in your community changed since the emergency?': '¿Ha cambiado la seguridad de mujeres y niños en su comunidad desde la emergencia?',
'Has your business been damaged in the course of the disaster?': 'Has your business been damaged in the course of the disaster?',
'Have households received any shelter/NFI assistance or is assistance expected in the coming days?': 'Have households received any shelter/NFI assistance or is assistance expected in the coming days?',
'Have normal food sources been disrupted?': 'Have normal food sources been disrupted?',
'Have schools received or are expecting to receive any assistance?': 'Have schools received or are expecting to receive any assistance?',
'Have the people received or are you expecting any medical or food assistance in the coming days?': '¿La gente ha recibido o están esperando asistencia médica o alimentos en los próximos días?',
'Hazard': 'Riesgo',
'Hazard Pay': 'Peligro de pago',
'Hazardous Material': 'Hazardous Material',
'Hazardous Road Conditions': 'Hazardous Road Conditions',
'Head': 'Cabeza',
'Head form, front': 'Formato de cabecera, frente',
'Head form, profile': 'Formato de cabecera, perfil',
'Header Background': 'Encabezado Antecedentes',
'Header background file %s missing!': 'Cabecera del archivo de cabecera %s perdido',
'Headquarters': 'Sede',
'Health': 'Salud',
'Health Org UUID': 'Salud Org UUID',
'Health care assistance, Rank': 'Health care assistance, Rank',
'Health center': 'Centro de salud',
'Health center with beds': 'Centro de salud con camas',
'Health center without beds': 'Centro de salud sin camas',
'Health services functioning prior to disaster': 'Servicios de salud en funcionamiento antes del desastre',
'Health services functioning since disaster': 'Servicios de salud funcionando desde el desastre',
'Healthcare Worker': 'Healthcare Worker',
'Heat Wave': 'Heat Wave',
'Heat and Humidity': 'Heat and Humidity',
'Height': 'Altura',
'Height (cm)': 'Estatura (cm)',
'Help': 'Ayuda',
'Helps to monitor status of hospitals': 'Ayuda a monitorear el estado de hospitales',
'Helps to report and search for Missing Persons': 'Ayuda a reportar y buscar personas perdidas',
'Here are the solution items related to the problem.': 'Here are the solution insumos related to the problem.',
'Here you will find all synchronization attempts made by either your machine or foreign machines for data exchange. This also lists data exchanges made using Sahana API.': 'Aquí encontrarás todos los intentos de sincronización ya sea la máquina o máquinas extranjeras para el intercambio de datos. Esto también listas de los intercambios de datos realizadas con Sahana API.',
'High': 'Alto',
'High Water': 'Nivel Máximo de Agua',
'Hindu': 'Hindú',
'History': 'Historia',
'Hit the back button on your browser to try again.': 'Presione el botón de regresar en su explorador para intentarlo nuevamente.',
'Holiday Address': 'Dirección de vacaciones',
'Home': 'Inicio',
'Home Address': 'Dirección de Hogar',
'Home Country': 'País de Origen',
'Home Crime': 'Crimen Familiar',
'Honduras': 'Honduras',
'Hospital': 'Clínica',
'Hospital Details': 'Detalles Hospital',
'Hospital Management': 'Gestión Hospitalaria',
'Hospital Status Report': 'Hospital Informe de situación',
'Hospital information added': 'Información Clínica agregada',
'Hospital information deleted': 'Hospital de la información suprimida',
'Hospital information updated': 'Hospital de información actualizada',
'Hospital status assessment.': 'Hospital de evaluación de la situación.',
'Hospitales': 'Hospitales',
'Hospitals': 'Hospitales',
'Hot Spot': 'Punto Caliente',
'Hourly': 'Horario',
'Household kits received': 'Kits de hogares recibidos',
'Household kits, source': 'Household kits, source',
'How did boys 13-17yrs spend most of their time prior to the disaster?': 'Antes del desastre, qué actividades eran las más realizadas por niños entre 13 y 17 años?',
'How did boys <12yrs spend most of their time prior to the disaster?': 'How did boys <12yrs spend most of their time prior to the disaster?',
'How did boys girls 13-17yrs spend most of their time prior to the disaster?': 'How did boys girls 13-17yrs spend most of their time prior to the disaster?',
'How did girls <12yrs spend most of their time prior to the disaster?': 'How did girls <12yrs spend most of their time prior to the disaster?',
'How do boys 13-17yrs spend most of their time now?': '¿En qué pasan ahora la mayor parte del tiempo los niños de 13-17 años?',
'How do boys <12yrs spend most of their time now?': 'How do boys <12yrs spend most of their time now?',
'How do girls 13-17yrs spend most of their time now?': '¿Cómo las niñas de 13 a 17 años pasan la mayor parte de su tiempo ahora?',
'How do girls <12yrs spend most of their time now?': 'How do girls <12yrs spend most of their time now?',
'How does it work?': '¿Cómo funciona?',
'How is this person affected by the disaster? (Select all that apply)': 'How is this person affected by the disaster? (Select all that apply)',
'How long does it take you to reach the available water resources? Specify the time required to go there and back, including queuing time, by foot.': '¿Cuánto tiempo te lleva llegar hasta las fuentes disponibles de agua? Especifica el tiempo que te lleva llegar allá y regresar, incluyendo el tiempo de cola, caminando:',
'How long does it take you to walk to the health service?': '¿Cuánto le toma caminar para el servicio de salud?',
'How long will the food last?': '¿Cuánto durará la comida?',
'How long will this water resource last?': '¿Cuánto durará este recurso de agua?',
'How many Boys (0-17 yrs) are Dead due to the crisis': 'Cuantos Niños (0-17 yrs) se encuentras muertos por la crisis',
'How many Boys (0-17 yrs) are Injured due to the crisis': 'Cuantos Boys (0-17 yrs) are Injured due to the crisis',
'How many Boys (0-17 yrs) are Missing due to the crisis': 'Cuantos Boys (0-17 yrs) are Missing due to the crisis',
'How many Girls (0-17 yrs) are Dead due to the crisis': 'Cuantos Girls (0-17 yrs) are Dead due to the crisis',
'How many Girls (0-17 yrs) are Injured due to the crisis': 'Cuantos Girls (0-17 yrs) are Injured due to the crisis',
'How many Girls (0-17 yrs) are Missing due to the crisis': 'Cuantos Girls (0-17 yrs) are Missing due to the crisis',
'How many Men (18 yrs+) are Dead due to the crisis': 'Cuantos Men (18 yrs+) are Dead due to the crisis',
'How many Men (18 yrs+) are Injured due to the crisis': 'Cuantos Men (18 yrs+) are Injured due to the crisis',
'How many Men (18 yrs+) are Missing due to the crisis': 'Cuantos Men (18 yrs+) are Missing due to the crisis',
'How many Women (18 yrs+) are Dead due to the crisis': 'Cuantos Women (18 yrs+) are Dead due to the crisis',
'How many Women (18 yrs+) are Injured due to the crisis': 'Cuantas Mujeres (18 yrs+) han sido lesionadas debido a la crisis',
'How many Women (18 yrs+) are Missing due to the crisis': 'Cuantos Women (18 yrs+) are Missing due to the crisis',
'How many days will the supplies last?': '¿Por cuántos días durarán los recursos?',
'How many doctors in the health centers are still actively working?': 'How many doctors in the health centers are still actively working?',
'How many houses are uninhabitable (uninhabitable = foundation and structure destroyed)?': 'Cuantas casas se encuentran inhabilitables? inhabilitables (inhabitable=cimiento y estructura destruida)?',
'How many houses suffered damage but remain usable (usable = windows broken, cracks in walls, roof slightly damaged)?': 'How many houses suffered damage but remain usable (usable = windows broken, cracks in walls, roof slightly damaged)?',
'How many latrines are available in the village/IDP centre/Camp?': 'How many latrines are available in the village/IDP centre/Camp?',
'How many midwives in the health centers are still actively working?': 'How many midwives in the health centers are still actively working?',
'How many new cases have been admitted to this facility in the past 24h?': 'How many new cases have been admitted to this facility in the past 24h?',
'How many nurses in the health centers are still actively working?': '¿Cuántas enfermeras en los centros de salud todavía se encuentran trabajando activamente?',
'How many of the patients with the disease died in the past 24h at this facility?': 'How many of the patients with the disease died in the past 24h at this facility?',
'How many of the primary school age boys (6-12) in the area are not attending school?': 'How many of the primary school age boys (6-12) in the area are not attending school?',
'How many of the primary school age girls (6-12) in the area are not attending school?': 'How many of the primary school age girls (6-12) in the area are not attending school?',
'How many of the primary/secondary schools are now open and running a regular schedule of class?': 'How many of the primary/secondary schools are now open and running a regular schedule of class?',
'How many of the secondary school age boys (13-18) in the area are not attending school?': 'How many of the secondary school age boys (13-18) in the area are not attending school?',
'How many of the secondary school age girls (13-18) in the area are not attending school?': '¿Cuántas jóvenes de edad de escuela secundaria (13-18) en el área no están asistiendo a la escuela?',
'How many patients with the disease are currently hospitalized at this facility?': 'How many patients with the disease are currently hospitalized at this facility?',
'How many primary school age boys (6-12) are in the affected area?': 'How many primary school age boys (6-12) are in the affected area?',
'How many primary school age girls (6-12) are in the affected area?': 'How many primary school age girls (6-12) are in the affected area?',
'How many primary/secondary schools were opening prior to the disaster?': 'How many primary/secondary schools were opening prior to the disaster?',
'How many secondary school age boys (13-18) are in the affected area?': 'How many secondary school age boys (13-18) are in the affected area?',
'How many secondary school age girls (13-18) are in the affected area?': 'How many secondary school age girls (13-18) are in the affected area?',
'How many teachers have been affected by the disaster (affected = unable to work)?': 'How many teachers have been affected by the disaster (affected = unable to work)?',
'How many teachers worked in the schools prior to the disaster?': 'How many teachers worked in the schools prior to the disaster?',
'How much detail is seen. A high Zoom level means lot of detail, but not a wide area. A low Zoom level means seeing a wide area, but not a high level of detail.': 'How much detail is seen. A high Zoom level means lot of detail, but not a wide area. A low Zoom level means seeing a wide area, but not a high level of detail.',
'Human Resources': 'Recursos Humanos',
'Humanitarian NGO': 'Humanitarian NGO',
'Hungary': 'Hungría',
'Hurricane': 'Huracán',
'Hurricane Force Wind': 'Hurricane Force Wind',
'Hygiene': 'Higiene',
'Hygiene NFIs': 'Hygiene NFIs',
'Hygiene kits received': 'Hygiene kits received',
'Hygiene kits, source': 'Kits de higiene, fuente',
'Hygiene practice': 'Hygiene practice',
'Hygiene problems': 'Hygiene problems',
'ICT': 'TIC',
'ID': 'Identificación',
'ID Label': 'Etiqueta de identificación',
'ID Label: ': 'ID Label: ',
'ID Tag': 'Etiqueta de Identificación',
'ID Tag Number': 'ID Tag Number',
'ID type': 'Tipo de identificación',
'IT': 'IT',
'Ice Pressure': 'Presión de hielo',
'Iceberg': 'Iceberg',
'Iceland': 'Islandia',
'Ideally a full URL to the source file, otherwise just a note on where data came from.': 'Idelamnete una página URL completa la fuente del archivo, de otra manera una simple nota que indique de donde viene el dato.',
'Identificación de Víctimas de Desastres': 'Identificación de Víctimas de Desastres',
'Identification': 'Identificación',
'Identification Report': 'Informe de Identificación',
'Identification Reports': 'Informes de identificación',
'Identification Status': 'Identificación de estado',
'Identification label of the Storage bin.': 'Etiqueta de identificación del contenedor de Almacenamiento.',
'Identified as': 'Identified as',
'Identified by': 'Identified by',
'Identity': 'Identidad',
'Identity Details': 'Detalles de Identidad',
'Identity added': 'Identidad añadido',
'Identity deleted': 'Identidad eliminado',
'Identity updated': 'Identidad actualizada',
'If Unit = m, Base Unit = Km, then multiplicator is 0.0001 since 1m = 0.001 km.': 'Si la Unidad = m, la Unidad Base = Km, entonces el multiplicador es 0.0001 dado que 1m = 0.001 km.',
'If enabled then a log is maintained of all records a user accesses. If disabled then it can still be enabled on a per-module basis.': 'If enabled then a log is maintained of all records a user accesses. If disabled then it can still be enabled on a per-module basis.',
'If enabled then a log is maintained of all records a user edits. If disabled then it can still be enabled on a per-module basis.': 'If enabled then a log is maintained of all records a user edits. If disabled then it can still be enabled on a per-module basis.',
'If no marker defined then the system default marker is used': 'Si no hay marcador definido entonces el marcador por defecto del sistema será utilizado',
'If no, specify why': 'En caso negativo, indique por qué',
'If the location is a geographic area, then state at what level here.': 'If the location is a geographic area, then state at what level here.',
'If this is set to True then mails will be deleted from the server after downloading.': 'If this is set to True then mails will be deleted from the server after downloading.',
'If this record should be restricted then select which role is required to access the record here.': 'If this record should be restricted then select which role is required to access the record here.',
'If this record should be restricted then select which role(s) are permitted to access the record here.': 'If this record should be restricted then select which role(s) are permitted to access the record here.',
"If this setting is enabled then all deleted records are just flagged as deleted instead of being really deleted. They will appear in the raw database access but won't be visible to normal users.": 'Si este parámetro se habilita, todos los registros eliminados serán etiquetados como eliminados, en lugar de ser efectivamente eliminados. Aparecerán en la base de datos maestra pero no estarán visibles a los usuarios regulares.',
'If yes, specify what and by whom': 'If yes, specify what and by whom',
'If yes, which and how': 'If yes, which and how',
"If you cannot find the person you want to register as a volunteer, you can add them by clicking 'Add Person' below:": "If you cannot find the person you want to register as a volunteer, you can add them by clicking 'Add Person' below:",
"If you cannot find the person you want to report missing, you can add them by clicking 'Add Person' below:": "If you cannot find the person you want to report missing, you can add them by clicking 'Add Person' below:",
"If you cannot find the record of the person you want to report missing, you can add it by clicking 'Add Person' below:": 'Si no encuentra el registro de la persona que usted quiere reportar como desaparecida, usted puede añadirlo haciendo clic en `Agregar Persona´ a continuación:',
'If you do not enter a Reference Document, your email will be displayed to allow this data to be verified.': 'Si no se introduce un Documento de Referencia, el correo electrónico se muestra para permitir que estos datos sean verificados.',
'If you know what the Geonames ID of this location is then you can enter it here.': 'If you know what the Geonames ID of this location is then you can enter it here.',
'If you know what the OSM ID of this location is then you can enter it here.': 'Si conoces la identificación OSM de esta ubicación lo puede colocar aquí.',
'If you need to add a new document then you can click here to attach one.': 'If you need to add a new document then you can click here to attach one.',
'If you run multiple servers in a network, you would probably see this place listing some other machines. Sahana can automatically pick servers in your organization (if they have sync username and password of your machine or if it is set to default) and add them to your list of machines to perform synchronization with. You can modify individual sync policy for each server. You can also add username and password of that server to retrieve and send data to that server. You can also manually add other servers.': 'Si ejecuta varios servidores en una red, es probable ver que enumera algunas otras máquinas. Sahana puede recoger automáticamente los servidores de la organización (si tienen nombre de usuario y la contraseña de sincronización de la máquina o si se ha definido por defecto) y agregarlos a su lista de máquinas para realizar la sincronización con ellas. Puede modificar la directiva de sincronización individual para cada servidor. También puede agregar nombre de usuario y la contraseña de ese servidor para recuperar y enviar datos a ese servidor. Adicionalmente, puede agregar manualmente otros servidores.',
'If you would like to help, then please': 'If you would like to help, then please',
'Illegal Immigrant': 'Illegal Immigrant',
'Image': 'Imagen',
'Image Details': 'Detalles de la imagen',
'Image Tags': 'Image Tags',
'Image Type': 'Tipo de imagen',
'Image Upload': 'De subida de imágenes',
'Image added': 'Imagen añadido',
'Image deleted': 'Imagen eliminado',
'Image updated': 'Imagen actualizada',
'Image/Attachment': 'Image/Attachment',
'Image/Other Attachment': 'Imagen / Otros datos adjuntos',
'Imagery': 'Imágenes',
'Images': 'Imágenes',
'Immediate reconstruction assistance, Rank': 'Immediate reconstruction assistance, Rank',
'Impact Assessments': 'Evaluaciones de Impacto',
'Impact Details': 'Impact Details',
'Impact Type': 'Impact Type',
'Impact Type Details': 'Impact Type Details',
'Impact Type added': 'Impact Type added',
'Impact Type deleted': 'Impact Type deleted',
'Impact Type updated': 'Impact Type updated',
'Impact Types': 'Impact Types',
'Impact added': 'Impact added',
'Impact deleted': 'Impact deleted',
'Impact updated': 'Impact updated',
'Impacts': 'Impacts',
'Import': 'Importación',
'Import & Export Data': 'Importación y Exportación de Datos',
'Import Data': 'Importar datos',
'Import Job': 'Importación de Empleo',
'Import Jobs': 'Importación Empleo',
'Import and Export': 'Importaciones y exportaciones ',
'Import from Ushahidi Instance': 'Importar de Instancia de Ushahidi',
'Import if Master': 'Importar si es Maestro',
'Import job created': 'Trabajo de importación creado',
'Import multiple tables as CSV': 'Importar múltiples tablas como CSV',
'Import/Export': 'Importación y Exportación',
'Import/Master': 'Import/Master',
'Important': 'Importante',
'Importantly where there are no aid services being provided': 'Destacar dónde no se están prestando servicios de asistencia',
'Imported': 'Imported',
'Importing data from spreadsheets': 'Importing data from spreadsheets',
'Improper decontamination': 'Improper decontamination',
'Improper handling of dead bodies': 'Improper handling of dead bodies',
'In GeoServer, this is the Layer Name. Within the WFS getCapabilities, this is the FeatureType Name part after the colon(:).': 'In GeoServer, this is the Layer Name. Within the WFS getCapabilities, this is the FeatureType Name part after the colon(:).',
'In GeoServer, this is the Workspace Name. Within the WFS getCapabilities, this is the FeatureType Name part before the colon(:).': 'In GeoServer, this is the Workspace Name. Within the WFS getCapabilities, this is the FeatureType Name part before the colon(:).',
'In Inventories': 'En Inventarios',
'In Process': 'In Process',
'In Progress': 'En Progreso',
'In Transit': 'En Tránsito',
'In general, what are the greatest needs of older people, people with disabilities, children, youth and women in your community?': 'En general, ¿cuáles son las necesidades más grandes de las personas mayores, personas con discapacidades, niños, jóvenes y mujeres en su comunidad?',
'Inbound Mail Settings': 'Inbound Mail Settings',
'Incident': 'Incident',
'Incident Categories': 'Incident Categorias',
'Incident Details': 'Incident Detalles',
'Incident Report': 'Incident Report',
'Incident Report Details': 'Informe Detallado sobre Incidente',
'Incident Report added': 'Incident Report agregado',
'Incident Report deleted': 'Incident Report eliminada',
'Incident Report updated': 'Incident Report updated',
'Incident Reporting': 'Reportes de Incidentes',
'Incident Reporting System': 'Sistema de Reporte de Incidentes',
'Incident Reports': 'Informes de Incidentes',
'Incident added': 'Incident agregado',
'Incident deleted': 'Incident eliminada',
'Incident updated': 'Incidente actualizado',
'Incidents': 'Incidentes ',
'Incoming': 'Incoming',
'Incomplete': 'Incomplete',
'India': 'India',
'Individuals': 'Individuals',
'Indonesia': 'Indonesia',
'Industrial Crime': 'Industrial Crime',
'Industry Fire': 'Industry Fire',
'Industry close to village/camp': 'Industry close to village/camp',
'Infant (0-1)': 'Infantil (0-1)',
'Infectious Disease': 'Enfermedades Infecciosas',
'Infectious Diseases': 'Enfermedades Infecciosas',
'Infestation': 'Infestation',
'Informal Leader': 'Informal Leader',
'Informal camp': 'Informal camp',
'Information gaps': 'Information gaps',
'Infusion catheters available': 'Infusion catheters available',
'Infusion catheters need per 24h': 'Infusion catheters need per 24h',
'Infusion catheters needed per 24h': 'Infusion catheters needed per 24h',
'Infusions available': 'Infusions available',
'Infusions needed per 24h': 'Infusions needed per 24h',
'Injuries': 'Lesiones',
'Input Job': 'Entrada Empleo',
'Instance Type': 'Instance Tipo',
'Instance URL': 'Instance URL',
'Instant Porridge': 'Avena Instantáneo',
"Instead of automatically syncing from other peers over the network, you can also sync from files, which is necessary where there's no network. You can use this page to import sync data from files and also export data to sync files. Click the link on the right to go to this page.": "Instead of automatically syncing from other peers over the network, you can also sync from files, which is necessary where there's no network. You can use this page to import sync data from files and also export data to sync files. Click the link on the right to go to this page.",
'Institution': 'Institution',
'Insufficient': 'Insuficiente',
'Insufficient vars: Need module, resource, jresource, instance': 'Insuficiente VAR: ¿Necesita el módulo, un recurso jresource, instancia',
'Intake Items': 'Consumo de Artículos',
'Intergovernmental Organisation': 'Intergovernmental Organisation',
'Internal Features': 'Características Interior',
'Internal State': 'Interior del Estado',
'International NGO': 'ONG Internacional',
'International Organization': 'Organización Internacional',
'International Staff': 'Personal de las oficinas',
'Intervention': 'Intervention',
'Interview taking place at': 'Interview taking place at',
'Invalid': 'Invalid',
'Invalid Organisation ID!': 'Organización no válido de identificación!',
'Invalid Organization ID!': 'Invalid Organization ID!',
'Invalid Query': 'Consulta no válida',
'Invalid UUID!': 'UUID no válido!',
'Invalid email': 'Correo electrónico no válido',
'Invalid login': 'Invalid login',
'Invalid request!': 'Solicitud no válida!',
'Invalid ticket': 'Tiquete no válido',
'Inventories': 'Inventories',
'Inventories with Item': 'Inventarios con Artículo',
'Inventories with Items': 'Inventario con Artículos',
'Inventory': 'Inventario ',
'Inventory Item Details': 'Detalles Artículos Inventario',
'Inventory Item added': 'Insumo de inventario agregado',
'Inventory Item deleted': 'Inventory Insumo eliminada',
'Inventory Item updated': 'Inventory Insumo updated',
'Inventory Items': 'Inventory Insumos',
'Inventory Location': 'Inventario de Ubicación',
'Inventory Management': 'Administración de inventarios',
'Inventory Store': 'Lugar de almacenamiento',
'Inventory Store Details': 'Detalle Almacenamiento Inventario',
'Inventory Store added': 'Inventory Store agregado',
'Inventory Store deleted': 'Inventory Store eliminada',
'Inventory Store updated': 'Inventory Store updated',
'Inventory Stores': 'Inventario de Tiendas',
'Inventory of Effects': 'Inventario de los efectos',
'Inventory/Ledger': 'Inventario / Ledger',
'Iran': 'Irán',
'Iraq': 'Irak',
'Ireland': 'Irlanda',
'Is adequate food and water available for these institutions?': 'Is adequate food and water available for these institutions?',
'Is it safe to collect water?': 'Is it safe to collect water?',
'Is there any industrial or agro-chemical production close to the affected area/village?': 'Existe la producción de algun químico agricultural o industrial cerca al pueblo/area afectada?',
'Israel': 'Israel ',
'Issuing Authority': 'Autoridad expedidora',
'It gives four options: No Sync, Newer Timestamp, Keep All, Replace All': 'Le da cuatro opciones: No Sync, marca de hora más reciente, Guardar todos, Reemplazar todo',
'It is built using the Template agreed by a group of NGOs working together as the': 'It is built using the Template agreed by a group of NGOs working together as the',
'It is suggested to open the 2 locations into new tabs so that it can be decided which is the best one to keep out of the 2.': 'It is suggested to open the 2 locations into new tabs so that it can be decided which is the best one to keep out of the 2.',
'Italy': 'Italia',
'Item': 'Artículo',
'Item Added to Shipment': 'Item Added to Shipment',
'Item Catalog Categories': 'Tema Categorías Catálogo',
'Item Catalog Category': 'Categoría del artículo Catálogo',
'Item Catalog Category Details': 'Detalles del artículo Catálogo Categoría',
'Item Catalog Category added': 'Catálogo del artículo Categoría añadido',
'Item Catalog Category deleted': 'Catálogo del artículo Categoría eliminada',
'Item Catalog Category updated': 'Categoría del artículo Catálogo actualizado',
'Item Catalog Details': 'Detalles del artículo Catálogo',
'Item Catalog added': 'Catálogo del artículo añadido',
'Item Catalog deleted': 'Catálogo del artículo suprimido',
'Item Catalog updated': 'Catálogo del artículo actualizada',
'Item Catalogs': 'Tema Catálogos',
'Item Categories': 'Insumo Categorias',
'Item Category': 'Insumo Category',
'Item Category Details': 'Insumo Category Detalles',
'Item Category added': 'Insumo Category agregado',
'Item Category deleted': 'Artículo de Categoría eliminado',
'Item Category updated': 'Insumo Category updated',
'Item Details': 'Detalles del artículo',
'Item Packet Details': 'Item Packet Details',
'Item Packet added': 'Item Packet added',
'Item Packet deleted': 'Item Packet deleted',
'Item Packet updated': 'Item Packet updated',
'Item Packets': 'Item Packets',
'Item Sub-Categories': 'Sub-Categorías del artículo',
'Item Sub-Category': 'Partida Sub-Categoría',
'Item Sub-Category Details': 'Partida Sub-Categoría Detalles',
'Item Sub-Category added': 'Partida Sub-Categoría añadido',
'Item Sub-Category deleted': 'Partida Sub-Categoría eliminada',
'Item Sub-Category updated': 'Elemento Sub-Categoría actualizada',
'Item added': 'Artículo agregado',
'Item already in Bundle!': 'Tema ya en paquete!',
'Item already in Kit!': 'Elemento ya se encuentra en el Kit!',
'Item already in budget!': 'Tema que ya en el presupuesto!',
'Item deleted': 'Punto suprimido',
'Item updated': 'Tema actualizado',
'Items': 'Artículos',
'Items Sent from Warehouse': 'Items Sent from Warehouse',
'Jamaica': 'Jamaica',
'Japan': 'Japón',
'Japanese': 'Japanese',
'Jerry can': 'Jerry can',
'Jew': 'Judio',
'Job Title': 'Título del empleo',
'Jobs': 'Jobs',
'Jordan': 'Jordania',
'Just Once': 'Just Once',
'KPIs': 'KPI',
'Kazakhstan': 'Kazajstán',
'Keep All': 'Mantener todas las',
'Keep Local': 'Keep Local',
'Kenya': 'Kenia',
'Key': 'Clave',
'Key Details': 'Detalles de la Tecla',
'Key added': 'Añadido clave',
'Key deleted': 'Clave eliminado',
'Key updated': 'Clave actualizada',
'Keys': 'Claves',
'Kiribati': 'Kiribati',
'Kit': 'Equipo',
'Kit Contents': 'Contenido del Kit',
'Kit Details': 'Kit Detalles',
'Kit Updated': 'Kit de actualización',
'Kit added': 'Kit añadido',
'Kit deleted': 'Kit de borrado',
'Kit updated': 'Kit de actualización',
'Kits': 'Kits',
'Known Identities': 'Identidades conocidos',
'Known incidents of violence against women/girls': 'Known incidents of violence against women/girls',
'Known incidents of violence since disaster': 'Known incidents of violence since disaster',
'Korea, North': 'Corea del Norte',
'Korea, South': 'Corea del Sur',
'Kosovo': 'Kosovo',
'Kuwait': 'Kuwait',
'Kyrgyzstan': 'Kirguistán',
'LICENCE': 'LICENCIA',
'LICENSE': 'LICENSE',
'LMS Administration': 'LMS Administración',
'Label': 'Etiqueta',
'Lack of material': 'Lack of material',
'Lack of school uniform': 'Lack of school uniform',
'Lack of supplies at school': 'Lack de supplies at school',
'Lack of transport to school': 'Lack de transport to school',
'Lactating women': 'Lactating women',
'Lahar': 'Lahar',
'Landslide': 'Landslide',
'Language': 'Idioma',
'Laos': 'Laos',
'Last Name': 'Apellido',
'Last known location': 'Last known location',
'Last name': 'Apellido',
'Last synchronization on': 'Last synchronization on',
'Last synchronization time': 'Last synchronization time',
'Last updated by': 'Last updated by',
'Last updated on': 'Last updated on',
'Latitude': 'Latitud',
'Latitude & Longitude': 'Latitud y Longitud',
'Latitude is North-South (Up-Down). Latitude is zero on the equator and positive in the northern hemisphere and negative in the southern hemisphere.': 'Latitud es Norte-Sur (Arriba-Abajo). Latitud es cero sobre el ecuador y positiva en el hemisferio norte y negativo en el hemisferio sur. ',
'Latitude should be between': 'La Latitud debe estar entre',
'Latvia': 'Letonia',
'Law enforcement, military, homeland and local/private security': 'Fuerzas del orden, militar, local o de seguridad privada',
'Layer': 'Capa',
'Layer Details': 'Detalles de la Capa',
'Layer added': 'Capa añadido',
'Layer deleted': 'Capa eliminado',
'Layer updated': 'Capa de actualización',
'Layers': 'Capas',
'Layers updated': 'Capas actualización',
'Layout': 'Layout',
'Lebanon': 'Líbano',
'Left-to-Right': 'De izquierda a derecha',
'Legend Format': 'Formato de leyenda',
'Length': 'Longitud',
'Lesotho': 'Lesoto',
'Level': 'Level',
"Level is higher than parent's": "Level is higher than parent's",
'Liberia': 'Liberia',
'Library support not available for OpenID': 'No hay soporte disponible en la librería para OpenID ',
'Libya': 'Libia',
'Liechtenstein': 'Liechtenstein',
'Line': 'Línea',
'Link Item & Shipment': 'Enlace de Elemento y envío',
'Link an Item & Shipment': 'Vincular un elemento y envío',
'Linked Records': 'Linked Records',
'Linked records': 'Registros enlazados',
'Lips, Shape': 'Labios, Forma',
'List': 'Lista',
'List ': 'Lista',
'List / Add Baseline Types': 'List / Add Baseline Types',
'List / Add Impact Types': 'List / Add Impact Types',
'List / Add Services': 'Lista / Agregar Services',
'List / Add Types': 'Listar / Agregar Tipos',
'List Activities': 'Listar Actividades',
'List Addresses': 'Lista de direcciones',
'List Aid Requests': 'Listar Solicitudes de Auxilio',
'List All': 'Todos los artículos',
'List All Entries': 'List All Entries',
'List All Group Memberships': 'Todos los artículos Grupo de Miembros',
'List All Memberships': 'Listar Todas las Membresías',
'List Assessment Summaries': 'List Assessment Summaries',
'List Assessments': 'Listar Evaluaciones',
'List Baseline Types': 'List Baseline Types',
'List Baselines': 'List Baselines',
'List Body Finds': 'Consejo de lista de búsquedas',
'List Budgets': 'Lista de Presupuestos',
'List Bundles': 'Paquetes Lista',
'List Catalog Items': 'List Catalog Items',
'List Category<>Sub-Category<>Catalog Relation': 'Lista de categorías<>Sub-Categoría<>Relación Catálogo>',
'List Checklists': 'Listas Lista',
'List Cluster Subsectors': 'List Cluster Subsectors',
'List Clusters': 'Listar Clusters',
'List Configs': 'Configs Lista',
'List Conflicts': 'List Conflicts',
'List Contacts': 'Lista de contactos',
'List Distribution Items': 'Listar Distribution Insumos',
'List Distributions': 'Listar Distribuciones',
'List Documents': 'Listar Documents',
'List Donors': 'Listar Donantes',
'List Feature Classes': 'Clases Lista de características',
'List Feature Groups': 'Lista de características de Grupos',
'List Feature Layers': 'Listar Feature Layers',
'List Finds': 'Encuentra la lista',
'List Flood Reports': 'Listar Inundación Reports',
'List Found People': 'Lista encontró personas',
'List Groups': 'Grupos Lista',
'List Groups/View Members': 'Grupos Lista / Ver miembros',
'List Hospitals': 'Lista de Hospitales',
'List Identities': 'Listar Identidades',
'List Images': 'Listar Imágenes ',
'List Impact Assessments': 'Listar Evaluaciones de Impacto',
'List Impact Types': 'List Impact Types',
'List Impacts': 'List Impacts',
'List Incident Reports': 'Listar Incident Reports',
'List Incidents': 'Listar Incidents',
'List Inventory Items': 'Listar Inventory Insumos',
'List Inventory Stores': 'Listar Locaciones de Inventario',
'List Item Catalog Categories': 'Categorías de la Lista Catálogo del artículo',
'List Item Catalogs': 'Lista de elementos Catálogos',
'List Item Categories': 'Listar Insumo Categorias',
'List Item Packets': 'List Item Packets',
'List Item Sub-Categories': 'Sub-Categorías lista de elementos',
'List Items': 'Elementos de lista',
'List Keys': 'Teclas de lista',
'List Kits': 'Lista de Kits',
'List Layers': 'Capas Lista',
'List Locations': 'Listar ubicaciones',
'List Log Entries': 'Entradas lista de registro',
'List Markers': 'Los marcadores de lista',
'List Members': 'List Members',
'List Memberships': 'Participación en la lista',
'List Messages': 'Lista de Mensajes',
'List Metadata': 'Lista de los metadatos',
'List Missing People': 'Lista de Personas Desaparecidas',
'List Missing Persons': 'Listar Personas Desaparecidas',
'List Need Types': 'List Need Types',
'List Needs': 'List Needs',
'List Notes': 'List Notes',
'List Offices': 'Lista de Oficinas',
'List Organisations': 'Organizaciones Lista',
'List Organizations': 'Lista de Organizaciones',
'List Partners': 'Lista Parejas',
'List Peers': 'List Peers',
'List People': 'La gente lista',
'List Personal Effects': 'Lista de Efectos Personales',
'List Persons': 'Lista de Personas',
'List Photos': 'Lista de fotos',
'List Positions': 'Posiciones Lista',
'List Problems': 'List Problemas',
'List Projections': 'Lista de Proyecciones',
'List Projects': 'Lista de Proyectos',
'List Rapid Assessments': 'Listar Evaluaciones Rápidas',
'List Received Items': 'List Received Items',
'List Received Shipments': 'List Received Shipments',
'List Records': 'Lista de Registros',
'List Registrations': 'Lista de Registros',
'List Relief Items': 'List Auxilio Insumos',
'List Reports': 'Informes de la lista',
'List Request Items': 'List Requerimiento Insumos',
'List Requests': 'Lista de Solicitudes',
'List Resources': 'Lista de Recursos',
'List Responses': 'Lista Respuestas',
'List Rivers': 'Listado de Ríos ',
'List Roles': 'Funciones de lista',
'List School Districts': 'List School Districts',
'List School Reports': 'List School Reports',
'List Sections': 'List Sections',
'List Sector': 'List Sector',
'List Sectors': 'Sectores Lista',
'List Sent Items': 'List Sent Items',
'List Service Profiles': 'Perfiles Lista de Servicios',
'List Settings': 'Configuración de la lista',
'List Shelter Services': 'List Albergue Services',
'List Shelter Types': 'Listar Tipos de Refugio',
'List Shelters': 'Refugios Lista',
'List Shipment Transit Logs': 'Lista de Registros de Tránsito envío',
'List Shipment/Way Bills': 'Lista de envío / Camino proyectos de ley',
'List Shipment<>Item Relation': 'Envío de lista<>Tema relación',
'List Shipments': 'List Shipments',
'List Sites': 'Lista de Sitios',
'List Skill Types': 'List Habilidad Tipos',
'List Skills': 'Lista de Habilidades',
'List Solutions': 'List Solutions',
'List Sources': 'Lista de fuentes',
'List Staff': 'Listar empleados',
'List Staff Types': 'Tipos de Lista Personal',
'List Status': 'List Status',
'List Storage Bin Type(s)': 'Lista de almacenamiento Tipo Bin (s)',
'List Storage Bins': 'Papeleras Lista de almacenamiento',
'List Storage Location': 'Lista de ubicación de almacenamiento',
'List Subscriptions': 'List Subscriptions',
'List Survey Answers': 'List Inspección Answers',
'List Survey Questions': 'Lista Preguntas Cuestionario',
'List Survey Sections': 'List Inspección Sections',
'List Survey Series': 'List Inspección Series',
'List Survey Templates': 'List Inspección Templates',
'List Tasks': 'Tareas Lista',
'List Teams': 'Listar Equipos',
'List Themes': 'Listar Temas',
'List Tickets': 'Listar Tiquetes',
'List Tracks': 'Lista de temas',
'List Units': 'Lista de Unidades',
'List Users': 'Lista de Usuarios',
'List Volunteers': 'List Volunteers',
'List Warehouse Items': 'List Warehouse Items',
'List Warehouses': 'List Warehouses',
'List all': 'Listar todos',
'List of Items': 'Lista de artículos',
'List of Missing Persons': 'List of Missing Persons',
'List of Peers': 'List of Peers',
'List of Reports': 'List de Reports',
'List of Requests': 'Lista de Solicitudes',
'List of Spreadsheets': 'List de Spreadsheets',
'List of Spreadsheets uploaded': 'List de Spreadsheets uploaded',
'List of Volunteers for this skills set': 'List of Volunteers for this skills set',
'List of addresses': 'Lista de direcciones',
'List unidentified': 'Lista sin identificar',
'List/Add': 'Listar/Agregar',
'Lists "who is doing what & where". Allows relief agencies to coordinate their activities': 'Lista "quién está haciendo qué y dónde?". Permite coordinar actividades a las agencias de ayuda.',
'Lithuania': 'Lituania',
'Live Help': 'Ayuda en Vivo',
'Livelihood': 'Subsistencia',
'Load Cleaned Data into Database': 'Load Cleaned Data into Database',
'Load Details': 'Cargar Detalles',
'Load Raw File into Grid': 'Load Raw File into Grid',
'Load the details to help decide which is the best one to keep out of the 2.': 'Cargue los detalles para ayudar a decidir cuál es el mejor para mantenerse fuera de 2.',
'Loading': 'Loading',
'Loading Locations...': 'Loading Locations...',
'Local Name': 'Nombre Local',
'Local Names': 'Nombres Locales',
'Location': 'Ubicación',
'Location 1': 'Location 1',
'Location 2': 'Location 2',
'Location De-duplicated': 'Ubicación No-duplicada',
'Location Details': 'Detalles de la Localización',
'Location added': 'Ubicación añadido',
'Location deleted': 'Ubicación eliminada',
'Location details': 'Detalles de la Ubicación ',
'Location updated': 'Ubicación actualizada',
'Location: ': 'Location: ',
'Locations': 'Ubicaciones',
'Locations De-duplicator': 'Ubicación De dupliacods',
'Locations of this level need to have a parent of level': 'Locations of this level need to have a parent of level',
'Locations should be different!': 'Las ubicaciones deben ser diferentes!',
'Lockdown': 'Lockdown',
'Log': 'Historial',
'Log Entry Details': 'Entrar detalles del ingreso',
'Log entry added': 'Entrada de registro añadido',
'Log entry deleted': 'Entrada del registro eliminado',
'Log entry updated': 'Entrada de registro actualizado',
'Logged in': 'Identificados',
'Logged out': 'Desconectado',
'Login': 'Ingreso',
'Logistics': 'Logística',
'Logistics Management': 'Gestión Logística',
'Logistics Management System': 'Sistema de Gestión de Logística',
'Logo': 'Logo',
'Logo file %s missing!': 'Archivo logo %s está perdido!',
'Logout': 'Desconectarse',
'Long Text': 'Long Text',
'Longitude': 'Longitud',
'Longitude is West - East (sideways). Latitude is North-South (Up-Down). Latitude is zero on the equator and positive in the northern hemisphere and negative in the southern hemisphere. Longitude is zero on the prime meridian (Greenwich Mean Time) and is positive to the east, across Europe and Asia. Longitude is negative to the west, across the Atlantic and the Americas. These need to be added in Decimal Degrees.': 'Longitude is West - East (sideways). Latitude is North-South (Up-Down). Latitude is zero on the equator and positive in the northern hemisphere and negative in the southern hemisphere. Longitude is zero on the prime meridian (Greenwich Mean Time) and is positive to the east, across Europe and Asia. Longitude is negative to the west, across the Atlantic and the Americas. These need to be added in Decimal Degrees.',
'Longitude is West - East (sideways). Longitude is zero on the prime meridian (Greenwich Mean Time) and is positive to the east, across Europe and Asia. Longitude is negative to the west, across the Atlantic and the Americas.': 'Longitud es Este - Oeste (a los lados). La Longitud es cero en el primer meridiano (GMT) y es positivo hacia el este, a través de Europa y Asia. La Longitud es negativa hacia el oeste, a través del Atlántico y las Américas.',
'Longitude should be between': 'Longitude should be between',
'Looting': 'Looting',
'Lost': 'Lost',
'Lost Password': 'Recuperar contraseña',
'Low': 'Bajo',
'Luxembourg': 'Luxemburgo',
'MOH UUID': 'Ministerio de Salud UUID',
'Macedonia': 'Macedonia',
'Machine with which data was exchanged.': 'Máquina con que los datos se han intercambiado.',
'Madagascar': 'Madagascar',
'Magnetic Storm': 'Magnetic Storm',
'Main cash source': 'Principal fuente de efectivo',
'Main income sources before disaster': 'Main income sources before disaster',
'Major outward damage': 'Gran daño externo',
'Make Pledge': 'Hacer Promesa',
'Make Request': 'Make Request',
'Make a Request': 'Realizar una petición',
'Make a Request for Aid': 'Make a Request for Aid',
'Make a request': 'Solicite más información',
'Make preparations per the <instruction>': 'Make preparations per the <instruction>',
'Malawi': 'Malawi',
'Malaysia': 'Malasia',
'Maldives': 'Maldivas',
'Male': 'Male',
'Mali': 'Malí',
'Malnutrition present prior to disaster': 'Malnutrition present prior to disaster',
'Malta': 'Malta',
'Manage': 'Administrar',
'Manage Category': 'Administrar Categoría',
'Manage Images': 'Manejar Imágenes',
'Manage Item catalog': 'Administrar artículo catálogo',
'Manage Items Catalog': 'Gestión de Catálogo de Artículos',
'Manage Kits': 'Administrar Kits',
'Manage Relief Item Catalogue': 'Administrar Catálogo de Artículos de Ayuda',
'Manage Sub-Category': 'Administrar Subcategoría',
'Manage Users & Roles': 'Administrar usuarios y roles',
'Manage Warehouses': 'Gestion de Bodegas',
'Manage Warehouses/Sites': 'Gestión de Bodegas/Sitios',
'Manage requests of hospitals for assistance.': 'Gestionar las solicitudes de los hospitales para recibir asistencia.',
'Manage volunteers by capturing their skills, availability and allocation': 'Administrar voluntarios capturando sus habilidades, disponibilidad y asignaciones',
'Manager': 'Gerente',
'Managing Office': 'Managing Office',
'Managing, Storing and Distributing Items.': 'Gestión, Almacenamiento y Distribución de Artículos.',
'Managing, Storing and Distributing Relief Items': 'Gestión, Almacenamiento y Distribución de Artículos de Ayuda',
'Managing, Storing and Distributing Relief Items.': 'Gestión, Almacenamiento y Distribución de Artículos de Ayuda.',
'Mandatory. In GeoServer, this is the Layer Name. Within the WFS getCapabilities, this is the FeatureType Name part after the colon(:).': 'En GeoServer, este es el Nombre de Capa. Dentro de WFS getCapabilities, esta es la parte Nombre Tipo de Característica tras los dos puntos (:).',
'Mandatory. The URL to access the service.': 'Mandatory. The URL to access the service.',
'Manual': 'Manual',
'Manual Synchronization': 'Sincronización Manual',
'Many': 'Many',
'Map': 'Mapa',
'Map Height': 'Mapa de Altura',
'Map Service Catalogue': 'Mapa del Catálogo de Servicios',
'Map Settings': 'Configuración de Mapa',
'Map Viewing Client': 'Mapa Videos de Clientes',
'Map Width': 'Ancho de Mapa',
'Map of Hospitals': 'Map de Hospitals',
'Mapa': 'Mapa',
'Mapping': 'Cartografía',
'Marine Security': 'Marine Security',
'Marital Status': 'Estado Civil',
'Marker': 'Marcador',
'Marker Details': 'Marcador Detalles',
'Marker added': 'Marcador añadido',
'Marker deleted': 'Marcador eliminado',
'Marker updated': 'Marcador de actualización',
'Markers': 'Marcadores',
'Marshall Islands': 'Las Islas Marshall',
'Master Message Log': 'Registro de Mensaje Principal ',
'Master Message Log to process incoming reports & requests': 'Registro Maestro de Mensajes para procesar reportes entrantes y peticiones',
'Match Percentage': 'Match Percentage',
'Match percentage indicates the % match between these two records': 'Match percentage indicates the % match between these two records',
'Matching Records': 'Registros coincidentes',
'Matrix of Choices (Multiple Answers)': 'Matrix de Choices (Multiple Answers)',
'Matrix of Choices (Only one answer)': 'Matrix de Choices (Only one answer)',
'Matrix of Text Fields': 'Matrix de Text Fields',
'Mauritania': 'Mauritania',
'Mauritius': 'Mauricio',
'Max Persons per Dwelling': 'Max Persons per Dwelling',
'Maximum Weight': 'Maximum Weight',
'Maximum weight capacity of the Storage Location followed by choosing the unit from the drop down list.': 'Maximum weight capacity of the Storage Location followed by choosing the unit from the drop down list.',
'Maximum weight capacity of the items the storage bin can contain. followed by choosing the unit from the drop down list.': 'Capacidad máxima de peso que los elementos de la bandeja de almacenamiento puede contener. seguido por la elección de la unidad de la lista desplegable.',
'Measure Area: Click the points around the polygon & end with a double-click': 'Medida Zona: Haga clic en los puntos alrededor del polígono y terminar con un doble clic',
'Measure Length: Click the points along the path & end with a double-click': 'Medida Longitud: Haga clic en los puntos a lo largo del camino y terminar con un doble clic',
'Measures': 'Medidas',
'Media Manager': 'Media Manager',
'Medical Attention': 'Atención Médica',
'Medical Staff': 'Personal Médico',
'Medical Supplies': 'Suministros médicos',
'Medical and public health': 'Medical and public health',
'Medicine': 'Medicina',
'Medium': 'Medio',
'Megabytes per Month': 'Megabytes por Mes',
'Members': 'Miembros ',
'Membership': 'Afiliación',
'Membership Details': 'Datos de los miembros',
'Membership added': 'Afiliación añadida',
'Membership deleted': 'Composición eliminado',
'Membership updated': 'Composición actualizada',
'Memberships': 'Membresías',
'Mensajería': 'Mensajería',
'Mental': 'Mental',
'Message': 'Mensaje',
'Message Details': 'Detalles del Mensaje',
'Message Variable': 'Message Variable',
'Message added': 'Mensaje agregado',
'Message deleted': 'Message eliminada',
'Message sent to outbox': 'Mensaje enviado a la bandeja de salida',
'Message updated': 'Mensaje actualizado',
'Message variable': 'Message variable',
'Messages': 'Mensajes',
'Messaging': 'Mensajería',
'Messaging Module': 'Módulo de Mensajería',
'Messaging settings updated': 'Configuración de mensajería actualizada',
'Metadata': 'Metadatos',
'Metadata Details': 'Detalles de los metadatos',
'Metadata added': 'Metadatos añadido',
'Metadata can be supplied here to be applied to all uploaded photos, if desired.': 'Los metadatos pueden ser suministrados aquí para ser aplicado a todas las fotos subidas, si lo desea.',
'Metadata deleted': 'Metadatos eliminado',
'Metadata updated': 'Actualizados los metadatos',
'Meteorite': 'Meteorite',
'Meteorological (inc. flood)': 'Meteorological (inc. inundación)',
'Method used': 'Método empleado',
'Mexico': 'México',
'Micronesia': 'Micronesia',
'Micronutrient malnutrition prior to disaster': 'Micronutrient malnutrition prior to disaster',
'Middle Name': 'Middle Name',
'Migrants or ethnic minorities': 'Migrantes o minorías étnicas',
'Military': 'Militar',
'Minimum Bounding Box': 'Minimum Bounding Box',
'Minorities participating in coping activities': 'Minorías participando en actividades de cocina',
'Minutes must be a number between 0 and 60': 'Minutes must be a number between 0 and 60',
'Minutes must be between 0 and 60': 'Minutos deben ser entre 0 y 60',
'Minutes per Month': 'Minutos por Mes',
'Minutes should be a number greater than 0 and less than 60': 'Minutes should be a number greater than 0 and less than 60',
'Minutes should be greater than 0 and less than 60': 'Minutos deben ser más que 0 y menos que 60',
'Miscellaneous': 'Misceláneo',
'Missing': 'Desaparecida',
'Missing Person': 'La persona desaparecida',
'Missing Person Details': 'Detalles de personas desaparecidas',
'Missing Person Reports': 'Reportes de personas desaparecidas',
'Missing Persons': 'Personas Deparecidas',
'Missing Persons Registry': 'Registro de Personas Desaparecidas',
'Missing Persons Report': 'Reporte de personas desaparecidas',
'Missing Report': 'Reporte de desapariciones',
'Missing Senior Citizen': 'Personas mayores desaparecidas',
'Missing Vulnerable Person': 'Persona vulnerable desaparecida',
'Mobile': 'Teléfono celular',
'Mobile Assess.': 'Mobile Assess.',
'Mobile Basic Assessment': 'Mobile Basic Assessment',
'Mobile Phone': 'Teléfono móvil',
'Mobile Phone #': 'Mobile Phone #',
'Mobile Settings': 'Configuración del telefono celular',
'Mobile settings updated': 'Configuración móvil actualizado',
'Mode': 'Modo',
'Modem Settings': 'Modem Settings',
'Modem settings updated': 'Modem settings updated',
'Moderator': 'Moderator',
'Modify Feature: Select the feature you wish to deform & then Drag one of the dots to deform the feature in your chosen manner': 'Modificar Reportaje: Seleccione la característica que desee para deformar y arrastre uno de los puntos a deformar la característica en su forma elegida',
'Modify Information on groups and individuals': 'Modificar información sobre los grupos e individuos',
'Modifying data in spreadsheet before importing it to the database': 'Modifying data in spreadsheet before importing it to the database',
'Module': 'Módulo',
'Module Administration': 'Módulo de Administración',
'Module disabled!': 'Module disabled!',
'Module provides access to information on current Flood Levels.': 'Modulo ofrece acceso a información acerca de los niveles actuales de inundación.',
'Module stores structured reports done by Professional Organisations - currently data includes WFP Assessments & School Reports.': 'Module stores structured reports done by Professional Organisations - currently data includes WFP Evaluaciones & School Reports.',
'Module stores structured reports done by Professional Organisations - currently data includes WFP Assessments.': 'Reportes estructurados sobre módulos de almacenamiento realizado por Organizaciones Profesionales - datos actualmente influyen Evaluación WFP. ',
'Module stores structured reports done by Professional Organisations.': 'Module stores structured reports done by Professional Organisations.',
'Module stores structured reports done by Professional Organizations - currently data includes WFP Assessments.': 'Module stores structured reports done by Professional Organizations - currently data includes WFP Assessments.',
'Moldova': 'Moldavia',
'Monaco': 'Mónaco',
'Monday': 'Lunes',
'Mongolia': 'Mongolia',
'Montenegro': 'Montenegro',
'Monthly Cost': 'Costo mensual',
'Monthly Salary': 'Salario Mensual',
'Months': 'Meses',
'Morgue Status': 'Morgue Estado',
'Morgue Units Available': 'Morgue Unidades Disponibles',
'Morocco': 'Marruecos',
'Mosque': 'Mezquita',
'Motorcycle': 'Motorcycle',
'Moustache': 'Bigote',
'Mouth, Size': 'Boca, tamaño',
'Move Feature: Drag feature to desired location': 'Mover: Medida de seguridad Arrastre a la ubicación deseada',
'Movements (Filter In/Out/Lost)': 'Movimientos (filtro de entrada / salida / Lost)',
'Mozambique': 'Mozambique',
'MultiPolygon': 'MultiPolígono',
'Multiple': 'Multiple',
'Multiple Choice (Multiple Answers)': 'Multiple Choice (Multiple Answers)',
'Multiple Choice (Only One Answer)': 'Multiple Choice (Only One Answer)',
'Multiple Text Fields': 'Multiple Text Fields',
'Multiplicator': 'Multiplicador',
'Muslim': 'Musulmán',
'My Tasks': 'Mis tareas',
'Myanmar': 'Myanmar',
'Módulo de Tickets': 'Módulo de Tiquetes',
'Módulo de presupuestos': 'Módulo de presupuestos',
'N/A': 'N/A',
"NB SMS requests are filtered to just those which are 'actionable', whilst the Tweet requests are unfiltered, so that is likely to be a good place to start Searching.": 'NB solicitudes de SMS se filtran a sólo aquellos que son "acciones concretas", mientras que las peticiones son Tweet sin filtrar, de modo que es probable que sea un buen lugar para comenzar la búsqueda.',
'Nagorno-Karabakh': 'Nagorno-Karabaj',
'Name': 'Nombre',
'Name and/or ID': 'Nombre y/o identificación ',
'Name and/or ID Label': 'Nombre y / o Etiqueta de Identificación',
'Name of School': 'Name de School',
'Name of Storage Bin Type.': 'Nombre del Tipo de Contenedor de Almacenamiento',
'Name of the file (& optional sub-path) located in static which should be used for the background of the header.': 'Name of the file (& optional sub-path) located in static which should be used for the background of the header.',
'Name of the file (& optional sub-path) located in static which should be used for the top-left image.': 'Name of the file (& optional sub-path) located in static which should be used for the top-left image.',
'Name of the file (& optional sub-path) located in views which should be used for footer.': 'Name of the file (& optional sub-path) located in views which should be used for footer.',
'Name of the person in local language and script (optional).': 'Name de the person in local language and script (optional).',
'Name of the unit or department this report refers to. Leave empty if your hospital has no subdivisions.': 'Name de the unit or department this report refers to. Leave empty if your hospital has no subdivisions.',
'Name or ID': 'Nombre o ID',
'Names can be added in multiple languages': 'Names can be agregado in multiple languages',
'Namibia': 'Namibia',
'National ID Card': 'Documento de Identidad nacional',
'National NGO': 'ONG Nacional',
'National Staff': 'Nacional de Personal',
'Nationality': 'Nacionalidad',
'Nationality of the person.': 'Nationality de the person.',
'Nauru': 'Nauru',
'Nautical Accident': 'Nautical Accident',
'Nautical Hijacking': 'Nautical Hijacking',
'Neck, Length': 'Del cuello, longitud',
'Neck, Peculiarities': 'Cuello, Peculiaridades',
'Neck, Shape': 'Cuello, Forma',
'Need Type': 'Need Type',
'Need Type Details': 'Need Type Details',
'Need Type added': 'Need Type added',
'Need Type deleted': 'Need Type deleted',
'Need Type updated': 'Need Type updated',
'Need Types': 'Need Types',
"Need a 'url' argument!": '¿Necesita una url 'argumento!',
'Need added': 'Need added',
'Need deleted': 'Need deleted',
'Need to configure Twitter Authentication': 'Need to configure Twitter Authentication',
'Need to select 2 Locations': 'Need to select 2 Locations',
'Need to specify a Budget!': 'Necesidad de especificar un presupuesto!',
'Need to specify a Kit!': 'Need to specify a Kit!',
'Need to specify a Resource!': 'Need to specify a Resource!',
'Need to specify a budget!': 'Necesidad de especificar un presupuesto!',
'Need to specify a bundle!': 'Necesidad de especificar un paquete!',
'Need to specify a feature group!': 'Necesidad de especificar un grupo de función!',
'Need to specify a group!': 'Necesidad de especificar un grupo!',
'Need to specify a kit!': 'Necesidad de especificar un kit!',
'Need to specify a location to search for.': 'Necesidad de especificar una ubicación para buscar.',
'Need to specify a role!': 'Necesidad de especificar un papel!',
'Need to specify a table!': 'Necesidad de especificar una tabla!',
'Need to specify a user!': 'Necesidad de especificar un usuario!',
'Need updated': 'Need updated',
'Needs': 'Needs',
'Needs Details': 'Needs Details',
'Needs elaboration!!!': 'Needs elaboration!!!',
'Needs to reduce vulnerability to violence': 'Needs to reduce vulnerability to violence',
'Negative Flow Isolation': 'Aislamiento flujo negativo',
'Neighbourhood': 'Neighbourhood',
'Neonatal ICU': 'UCI Neonatal',
'Neonatology': 'Neonatología',
'Nepal': 'Nepal',
'Netherlands': 'Países Bajos',
'Network': 'Red',
'Neurology': 'Neurología',
'New': 'Nuevo',
'New Assessment reported from': 'New Assessment reported from',
'New Body Find': 'Encuentra cuerpo nuevo',
'New Checklist': 'Nueva lista de verificación',
'New Group': 'Nuevo grupo',
'New Peer': 'New Peer',
'New Problem': 'Nuevo Problema',
'New Record': 'Nuevo registro',
'New Report': 'Nuevo informe',
'New Request': 'Nueva Requerimiento',
'New Solution Choice': 'Escogencia de Nuevas Soluciones ',
'New Synchronization Peer': 'New Synchronization Peer',
'New Zealand': 'Nueva Zelanda',
'New cases in the past 24h': 'New cases in the past 24h',
'Newer Timestamp': 'Los nuevos Timestamp',
'News': 'Noticias',
'Next': 'Next',
'Next View': 'Ver siguiente',
'Nicaragua': 'Nicaragua',
'Niger': 'Níger',
'Nigeria': 'Nigeria',
'No': 'No',
'No ': 'No ',
'No Activities Found': 'No Activities Found',
'No Addresses currently registered': 'No existen direcciones registradas actualmente',
'No Aid Requests currently registered': 'No existen Solicitudes de Ayuda registradas actualmente',
'No Assessment Summaries currently registered': 'No Assessment Summaries currently registered',
'No Assessments currently registered': 'No Evaluaciones currently registered',
'No Baseline Types currently registered': 'No Baseline Types currently registered',
'No Baselines currently registered': 'No Baselines currently registered',
'No Budgets currently registered': 'No Presupuestos actualmente registrados',
'No Bundles currently registered': 'N Paquetes actualmente registrados',
'No Catalog Items currently registered': 'No Catalog Items currently registered',
'No Category<>Sub-Category<>Catalog Relation currently registered': 'Categoría n<>Sub-Categoría<>Relación Catálogo> actualmente registrados',
'No Checklist available': 'N Lista de verificación disponibles',
'No Cluster Subsectors currently registered': 'No Cluster Subsectors currently registered',
'No Clusters currently registered': 'No hay Agrupamientos registrados actualmente',
'No Configs currently defined': 'No hay Configuración definida actualmente',
'No Contacts currently registered': 'N Contactos registrados actualmente',
'No Details currently registered': 'No Detalles actualmente registrados',
'No Distribution Items currently registered': 'No hay artículos de distrubición registradas actualmente',
'No Distributions currently registered': 'No Distribuciones currently registered',
'No Documents found': 'No Documents found',
'No Donors currently registered': 'No Donors currently registered',
'No Emails currently in InBox': 'No hay mensajes de correo electrónico actualmente en Bandeja de entrada',
'No Emails currently in OutBox': 'No hay mensajes de correo electrónico actualmente en Bandeja de salida',
'No Emails currently in Sent': 'No hay mensajes de correo electrónico en la actualidad en Sent',
'No Feature Classes currently defined': 'No hay clases de funciones definidas actualmente',
'No Feature Groups currently defined': 'Grupos de funciones no está definido actualmente',
'No Feature Layers currently defined': 'No Feature Layers currently defined',
'No Flood Reports currently registered': 'No Inundación Reports currently registered',
'No Group Memberships currently registered': 'N Grupo de Miembros registrados actualmente',
'No Groups currently defined': 'No hay grupos definidos actualmente',
'No Groups currently registered': 'N Grupos registrados actualmente',
'No Hospitals currently registered': 'No hay Clínicas registrados actualmente',
'No Identification Report Available': 'No Informe de identificación disponibles',
'No Identities currently registered': 'No Identidades actualmente registrados',
'No Image': 'No hay imágen',
'No Image currently defined': 'N de la imagen actualmente definidos',
'No Images currently registered': 'No hay imágenes registradas en la actualidad',
'No Impact Types currently registered': 'No Impact Types currently registered',
'No Impacts currently registered': 'No Impacts currently registered',
'No Incident Reports currently registered': 'No Incident Reports currently registered',
'No Incidents currently registered': 'No Incidents currently registered',
'No Inventory Items currently registered': 'No Inventory Insumos currently registered',
'No Inventory Stores currently registered': 'No hay almacenes de Inventario registrados actualmente',
'No Item Catalog Category currently registered': 'Catálogo del artículo n º Categoría actualmente registrados',
'No Item Catalog currently registered': 'Catálogo del artículo n actualmente registrados',
'No Item Categories currently registered': 'No se encuentra ningún Artículo de Categoría registrado actualmente',
'No Item Packets currently registered': 'No Item Packets currently registered',
'No Item Sub-Category currently registered': 'Ningún artículo Subcategoría actualmente registrados',
'No Item currently registered': 'Ningún artículo actualmente registrados',
'No Items currently registered': 'No hay elementos registrados actualmente',
'No Items currently requested': 'No Insumos currently requested',
'No Keys currently defined': 'No está definido actualmente claves',
'No Kits currently registered': 'No Kits actualmente registrados',
'No Locations currently available': 'No Lugares disponibles en la actualidad',
'No Locations currently registered': 'No existen lugares registrados actualmente',
'No Markers currently available': 'No hay marcadores disponibles en la actualidad',
'No Members currently registered': 'No existen Miembros registrados actualmente',
'No Memberships currently defined': 'No hay Membresías definidas actualmente',
'No Memberships currently registered': 'No hay Membresía actualmente registrada',
'No Messages currently in Outbox': 'Actualmente no existen mensajes en la Bandeja de Entrada',
'No Metadata currently defined': 'N de metadatos definidos actualmente',
'No Need Types currently registered': 'No Need Types currently registered',
'No Needs currently registered': 'No Needs currently registered',
'No Offices currently registered': 'N Oficinas actualmente registrados',
'No Offices found!': 'No Offices found!',
'No Organisations registered!': 'N Las organizaciones inscritas!',
'No Organizations currently registered': 'Las organizaciones que actualmente no registrados',
'No Organizations registered!': 'No hay Organizaciones registradas!',
'No Packets for Item': 'No Packets for Item',
'No Partners currently registered': 'No socios registrados actualmente',
'No Peers currently registered': 'No Peers currently registered',
'No People currently registered': 'Nadie ha registrado en',
'No People currently registered in this shelter': 'No People currently registered in this shelter',
'No Persons currently registered': 'N Las personas actualmente registradas',
'No Persons currently reported missing': 'No Persons currently reported missing',
'No Persons found': 'No hay Personas encontradas',
'No Photos found': 'No Photos found',
'No Presence Log Entries currently registered': 'No hay Entradas al Registro de Presencia actualmente',
'No Problems currently defined': 'No Problemas currently defined',
'No Projections currently defined': 'No está definido actualmente Proyecciones',
'No Projects currently registered': 'No hay proyectos registrados actualmente',
'No Rapid Assessments currently registered': 'No Rapid Assessments currently registered',
'No Received Items currently registered': 'No Received Items currently registered',
'No Received Shipments': 'No Received Shipments',
'No Records currently available': 'No hay registros disponibles actualmente',
'No Records matching the query': 'No Records matching the query',
'No Reports currently registered': 'No Reports currently registered',
'No Request Items currently registered': 'No Request Items currently registered',
'No Request Shipments': 'No Request Shipments',
'No Requests have been made yet': 'No Requests have been made yet',
'No Requests match this criteria': 'No Requests match this criteria',
'No Responses currently registered': 'No hay Respuestas registradas actualmente',
'No Rivers currently registered': 'No Rivers currently registered',
'No Roles currently defined': 'No existen funciones definidas actualmente',
"No SMS's currently in InBox": 'N SMS actualmente en Bandeja de entrada',
"No SMS's currently in OutBox": 'N SMS actualmente en Bandeja de salida',
"No SMS's currently in Sent": 'N de SMS enviados en la actualidad',
'No School Districts currently registered': 'No hay Distritos Escolares actualmente registrados',
'No School Reports currently registered': 'No School Reports currently registered',
'No Sections currently registered': 'No hay secciones registradas actualmente',
'No Sectors currently registered': 'N Sectores actualmente registrados',
'No Sent Items currently registered': 'No Sent Items currently registered',
'No Sent Shipments': 'No Sent Shipments',
'No Settings currently defined': 'No hay ningún ajuste definido actualmente',
'No Shelter Services currently registered': 'No Albergue Services currently registered',
'No Shelter Types currently registered': 'No Albergue Tipos currently registered',
'No Shelters currently registered': 'No hay refugios actualmente registrados',
'No Shipment Transit Logs currently registered': 'No Y Tránsito Registros actualmente registrados',
'No Shipment/Way Bills currently registered': 'No hay Cargas/Recibos de porte actualmente registrados',
'No Shipment<>Item Relation currently registered': 'No Y<>Tema Relación actualmente registrados',
'No Sites currently registered': 'No hay Sitios registrados actualmente',
'No Skill Types currently set': 'No Habilidad Tipos currently set',
'No Solutions currently defined': 'No Solutions currently defined',
'No Sources currently registered': 'N Fuentes ha registrado en',
'No Staff Types currently registered': 'No Tipos de personal actualmente inscritos',
'No Staff currently registered': 'No hay personal actualmente inscrito',
'No Storage Bin Type currently registered': 'No recipiente de almacenamiento Tipo actualmente registrados',
'No Storage Bins currently registered': 'No hay compartimientos de almacenaje actualmente registrados',
'No Storage Locations currently registered': 'No ubicaciones de almacenamiento actualmente registrados',
'No Subscription available': 'No Subscription available',
'No Survey Answers currently registered': 'No Inspección Answers currently registered',
'No Survey Questions currently registered': 'No Inspección Questions currently registered',
'No Survey Sections currently registered': 'No Inspección Sections currently registered',
'No Survey Series currently registered': 'No Inspección Series currently registered',
'No Survey Template currently registered': 'No se registra ninguna Plantilla de Cuestionario actualmente',
'No Sync': 'N Sync',
'No Tasks with Location Data': 'No Tasks with Location Data',
'No Themes currently defined': 'No hay Temas definidos actualmente',
'No Tickets currently registered': 'No hay Tiquetes registrados actualmente',
'No Tracks currently available': 'No hay pistas disponibles en la actualidad',
'No Units currently registered': 'N unidades actualmente registradas',
'No Users currently registered': 'No existen usuarios registrados actualmente',
'No Volunteers currently registered': 'No Volunteers currently registered',
'No Warehouse Items currently registered': 'No Warehouse Items currently registered',
'No Warehouses currently registered': 'No Warehouses currently registered',
'No Warehouses match this criteria': 'No Warehouses match this criteria',
'No access at all': 'No hay ningún acceso',
'No access to this record!': 'No hay acceso a este registro!',
'No action recommended': 'No action recommended',
'No conflicts logged': 'No conflicts logged',
'No contact information available': 'No contact information available',
'No contacts currently registered': 'Aún no hay miembros registrados actualmente',
'No data in this table - cannot create PDF!': 'No hay datos en esta tabla - no se puede crear PDF!',
'No databases in this application': 'No hay bases de datos en esta solicitud',
'No entries found': 'No entries found',
'No entries matching the query': 'No entries matching the query',
'No finds currently registered': 'No se encuentra registrado actualmente',
'No import jobs': 'No hay trabajos de importación',
'No linked records': 'No hay registros enlazados',
'No location known for this person': 'No location known for this person',
'No locations found for members of this team': 'No locations found for members of this team',
'No locations registered at this level': 'No locations registered at this level',
'No log entries matching the query': 'No log entries matching the query',
'No matching records found.': 'No matching records found.',
'No messages in the system': 'No messages in the system',
'No notes available': 'No notes available',
'No of Families Settled in the Schools': 'No de Families Settled in the Schools',
'No of Families to whom Food Items are Available': 'No a Familias que tienen acceso a artículos de alimentos',
'No of Families to whom Hygiene is Available': 'No de Families to whom Hygiene is Available',
'No of Families to whom Non-Food Items are Available': 'No de Families to whom Non-Food Insumos are Available',
'No of Female Students (Primary To Higher Secondary) in the Total Affectees': 'No de Female Students (Primary To Higher Secondary) in the Total Affectees',
'No of Female Teachers & Other Govt Servants in the Total Affectees': 'No de Female Teachers & Other Govt Servants in the Total Affectees',
'No of Male Students (Primary To Higher Secondary) in the Total Affectees': 'No de Male Students (Primary To Higher Secondary) in the Total Affectees',
'No of Male Teachers & Other Govt Servants in the Total Affectees': 'No de Male Teachers & Other Govt Servants in the Total Affectees',
'No of Rooms Occupied By Flood Affectees': 'No de Rooms Occupied By Inundación Affectees',
'No peers currently registered': 'No peers currently registered',
'No pending registrations found': 'No pending registrations found',
'No pending registrations matching the query': 'No pending registrations matching the query',
'No person record found for current user.': 'No hay ningún registro de persona que se encuentre el usuario actual.',
'No positions currently registered': 'No hay posiciones actualmente registrados',
'No problem group defined yet': 'No problem group defined yet',
'No projects currently registered': 'No hay proyectos registrados actualmente',
'No records matching the query': 'No records matching the query',
'No records to delete': 'No hay registros para eliminar',
'No recovery reports available': 'No hay informes disponibles sobre la recuperación',
'No report available.': 'No hay informe disponible.',
'No reports available.': 'No hay informes disponibles',
'No reports currently available': 'No hay informes disponibles en la actualidad',
'No requests currently registered': 'Ninguna solicitud actualmente registrada',
'No requests found': 'Ninguna solicitud encontrada',
'No resources currently registered': 'No hay recursos actualmente registrados',
'No resources currently reported': 'No hay recursos notificados actualmente',
'No service profile available': 'N perfil de servicio disponibles',
'No skills currently set': 'No skills currently set',
'No status information available': 'No status information available',
'No sync permitted!': 'No hay sincronización permitidos!',
'No synchronization': 'No hay sincronización',
'No tasks currently registered': 'Las competencias de la actualidad social',
'No template found!': 'No se encontró plantilla!',
'No units currently registered': 'No existen unidades registrados actualmente',
'No volunteer information registered': 'No hay información de voluntariado ',
'Non-medical Staff': 'Personal no médico',
'None': 'Ninguno',
'None (no such record)': 'Ninguno (sin registro de este tipo)',
'Noodles': 'Noodles',
'Normal': 'Normal',
'Normal food sources disrupted': 'Fuentes normales de comida interrumpidas',
'Northern Cyprus': 'Chipre del Norte',
'Norway': 'Noruega',
'Nose, Angle': 'Nariz, Angulo',
'Nose, Curve': 'Nariz, Curva',
'Nose, shape': 'Nariz, la forma',
'Nose, size': 'Nariz, tamaño',
'Not Applicable': 'No se aplica',
'Not Authorised!': 'No Autorizado!',
'Not Possible': 'No es posible',
'Not Set': 'No establecida',
'Not authorised!': 'No autorizada!',
'Not installed or incorrectly configured.': 'No instalado o configurado incorrectamente.',
'Note': 'Note',
'Note Details': 'Note Details',
'Note Status': 'Note Status',
'Note Type': 'Note Type',
'Note added': 'Note added',
'Note deleted': 'Note deleted',
"Note that the dropdowns won't refresh automatically. Refresh the page if you wish to verify that the locations have gone.": "Note that the dropdowns won't refresh automatically. Refresh the page if you wish to verify that the locations have gone.",
'Note that this list only shows active volunteers. To see all people registered in the system, do a search from the home screen instead.': 'Note that this list only shows active volunteers. To see all people registered in the system, do a search from the home screen instead.',
'Note updated': 'Note updated',
'Notes': 'Notes',
'Notice to Airmen': 'Aviso para la Fuerza Aerea',
'Number': 'Number',
'Number of Columns': 'Number de Columns',
'Number of Patients': 'Número de Pacientes',
'Number of Rows': 'Number de Rows',
'Number of Vehicles': 'Número de vehículos',
'Number of additional beds of that type expected to become available in this unit within the next 24 hours.': 'Number de additional beds de that type expected to become available in this unit within the next 24 hours.',
'Number of alternative places for studying': 'Número de lugares alternativos para estudiar',
'Number of available/vacant beds of that type in this unit at the time of reporting.': 'Number de available/vacant beds de that type in this unit at the time de reporting.',
'Number of deaths during the past 24 hours.': 'Números de muertes en las últimas 24 horas. ',
'Number of discharged patients during the past 24 hours.': 'Number de discharged patients during the past 24 hours.',
'Number of doctors': 'Número de médicos',
'Number of doctors actively working': 'Number of doctors actively working',
'Number of houses damaged, but usable': 'Number of houses damaged, but usable',
'Number of houses destroyed/uninhabitable': 'Number of houses destroyed/uninhabitable',
'Number of in-patients at the time of reporting.': 'Number de in-patients at the time de reporting.',
'Number of latrines': 'Number of latrines',
'Number of midwives actively working': 'Number of midwives actively working',
'Number of newly admitted patients during the past 24 hours.': 'Número de pacientes nuevos admitidos durante las últimas 24 horas. ',
'Number of non-medical staff': 'Número de personal no médico',
'Number of nurses': 'Número de enfermeras',
'Number of nurses actively working': 'Number of nurses actively working',
'Number of private schools': 'Número de colegios privados',
'Number of public schools': 'Number de public schools',
'Number of religious schools': 'Number de religious schools',
'Number of schools damaged but usable': 'Number of schools damaged but usable',
'Number of schools destroyed/uninhabitable': 'Number of schools destroyed/uninhabitable',
'Number of schools open before disaster': 'Number of schools open before disaster',
'Number of schools open now': 'Number of schools open now',
'Number of teachers affected by disaster': 'Number of teachers affected by disaster',
'Number of teachers before disaster': 'Number of teachers before disaster',
'Number of vacant/available beds in this hospital. Automatically updated from daily reports.': 'Number of vacant/available beds in this hospital. Automatically updated from daily reports.',
'Number of vacant/available units to which victims can be transported immediately.': 'Number of vacant/available units to which victims can be transported immediately.',
'Number or Label on the identification tag this person is wearing (if any).': 'Number or Label on the identification tag this person is wearing (if any).',
'Number/Percentage of affected population that is Female & Aged 0-5': 'Number/Percentage de affected population that is Female & Aged 0-5',
'Number/Percentage of affected population that is Female & Aged 13-17': 'Number/Percentage de affected population that is Female & Aged 13-17',
'Number/Percentage of affected population that is Female & Aged 18-25': 'Number/Percentage de affected population that is Female & Aged 18-25',
'Number/Percentage of affected population that is Female & Aged 26-60': 'Number/Percentage de affected population that is Female & Aged 26-60',
'Number/Percentage of affected population that is Female & Aged 6-12': 'Number/Percentage de affected population that is Female & Aged 6-12',
'Number/Percentage of affected population that is Female & Aged 61+': 'Number/Percentage de affected population that is Female & Aged 61+',
'Number/Percentage of affected population that is Male & Aged 0-5': 'Número/Porcentaje de populación afectada Masculina de Edades 0-5',
'Number/Percentage of affected population that is Male & Aged 13-17': 'Number/Percentage de affected population that is Male & Aged 13-17',
'Number/Percentage of affected population that is Male & Aged 18-25': 'Número / Porcentaje de la población afectada masculina entre edades de 18 a 25',
'Number/Percentage of affected population that is Male & Aged 26-60': 'Número/Porcentaje de populación Masculina afectada de Edades 26-60 ',
'Number/Percentage of affected population that is Male & Aged 6-12': 'Number/Percentage de affected population that is Male & Aged 6-12',
'Number/Percentage of affected population that is Male & Aged 61+': 'Number/Percentage de affected population that is Male & Aged 61+',
'Numbers Only': 'Números Solamente',
'Nurse': 'Enfermera',
'Nursery Beds': 'Camas infantiles',
'Nursing Information Manager': 'Enfermería gestor de la información',
'Nutrition': 'Nutrición',
'OK': 'OK',
'OR Reason': 'O Razón',
'OR Status': 'O Estado',
'OR Status Reason': 'O Estado de la Razón',
'Observer': 'Observer',
'Obstetrics/Gynecology': 'Obstetricia/Ginecología',
'Office': 'Oficina',
'Office Address': 'Dirección de la oficina',
'Office Details': 'Información de Oficina',
'Office added': 'Oficina añadido',
'Office deleted': 'Oficina eliminada',
'Office updated': 'Oficina de actualización',
'Offices': 'Oficinas',
'Offline Sync': 'Offline Sync',
'Offline Sync (from USB/File Backup)': 'Offline Sync (from USB/File Backup)',
'Old': 'Old',
'Older people as primary caregivers of children': 'Personas mayores como cuidadoras primarias de los niños',
'Older people in care homes': 'Personas mayores en hogares especializados',
'Older people participating in coping activities': 'Older people participating in coping activities',
'Older people with chronical illnesses': 'Older people with chronical illnesses',
'Older person (>60 yrs)': 'Older person (>60 yrs)',
'Oman': 'Omán',
'On by default?': '¿Encendido por defecto?',
'On by default? (only applicable to Overlays)': 'On by default? (only applicable to Overlays)',
'On-site Hospitalization': 'En las instalaciones de hospitalización',
'One Time Cost': 'Una vez los costos',
'One time cost': 'Una vez los costos',
'One-time': 'Una sola vez',
'One-time costs': 'Gastos no recurrentes',
'Only showing accessible records!': 'Sólo se muestran los registros accesibles!',
'Oops! Something went wrong...': 'Caray! Algo salió mal...',
'Oops! something went wrong on our side.': 'Oops! something went wrong on our side.',
'Open': 'Abierto',
'Open area': 'Área abierta',
'Open in New Tab': 'Open in Nueva Tab',
'Open recent': 'Open recent',
'Operating Rooms': 'Quirófanos',
'Operation': 'Operación',
'Optional link to an Incident which this Assessment was triggered by.': 'Optional link to an Incident which this Assessment was triggered by.',
'Optional. In GeoServer, this is the Workspace Namespace URI. Within the WFS getCapabilities, this is the FeatureType Name part before the colon(:).': 'Optional. In GeoServer, this is the Workspace Namespace URI. Within the WFS getCapabilities, this is the FeatureType Name part before the colon(:).',
"Optional. The name of the geometry column. In PostGIS this defaults to 'the_geom'.": "Optional. The name of the geometry column. In PostGIS this defaults to 'the_geom'.",
'Options': 'Opciones',
'Ordered list ... (#TODO [String])': 'Lista ordenada ... (# TODO String [])',
'Organisation': 'Organización',
'Organization': 'Organización',
'Organization Details': 'Detalles de Organización',
'Organization Registry': 'Registro de Organizaciones',
'Organization added': 'Organización añadida',
'Organization deleted': 'Organización eliminada',
'Organization updated': 'Organización actualizada',
'Organization: ': 'Organization: ',
'Organizations': 'Organizaciones',
'Origin': 'Origen',
'Origin of the separated children': 'Origin of the separated children',
'Other': 'Otro',
'Other (describe)': 'Other (describe)',
'Other (specify)': 'Otra (especificar)',
'Other Evidence': 'Otras pruebas',
'Other Faucet/Piped Water': 'Otros Grifo/Acueducto',
'Other Isolation': 'Aislamiento Otros',
'Other Name': 'El otro nombre',
'Other activities of boys 13-17yrs': 'Otras actividades de niños de 13-17 años ',
'Other activities of boys 13-17yrs before disaster': 'Other activities de boys 13-17yrs before disaster',
'Other activities of boys <12yrs': 'Other activities de boys <12yrs',
'Other activities of boys <12yrs before disaster': 'Other activities de boys <12yrs before disaster',
'Other activities of girls 13-17yrs': 'Otras actividades de niñas de 13-17años',
'Other activities of girls 13-17yrs before disaster': 'Otras actividades de niñas de 13-17 años antes del desastre',
'Other activities of girls<12yrs': 'Other activities de girls<12yrs',
'Other activities of girls<12yrs before disaster': 'Other activities de girls<12yrs before disaster',
'Other alternative infant nutrition in use': 'Otra alternatva de nutrición infantil en uso',
'Other alternative places for study': 'Lugares alternativos para estudiar',
'Other assistance needed': 'Otros tipos de asistencia necesarios',
'Other assistance, Rank': 'Otra ayuda, Rango',
'Other current health problems, adults': 'Otros problemas actuales de salud en adultos',
'Other current health problems, children': 'Other current health problems, children',
'Other events': 'Other events',
'Other factors affecting school attendance': 'Other factors affecting school attendance',
'Other major expenses': 'Other major expenses',
'Other school assistance received': 'Other school assistance received',
'Other school assistance, details': 'Other school assistance, details',
'Other school assistance, source': 'Other school assistance, source',
'Other side dishes in stock': 'Otros acompañamientos en existencia',
'Other types of water storage containers': 'Other types de water storage containers',
'Other ways to obtain food': 'Otras formas de obtener comida',
'Outbound Mail settings are configured in models/000_config.py.': 'Outbound Mail settings are configured in models/000_config.py.',
'Outbox': 'Bandeja de salida',
'Outgoing SMS Handler': 'Outgoing SMS Handler',
'Outgoing SMS handler': 'Outgoing SMS handler',
'Overland Flow Flood': 'Overland Flow Inundación',
'Overlays': 'Capas Temáticas',
'Owned Resources': 'Owned Resources',
'PDAM': 'PDAM',
'PF Number': 'Número PF',
'PIN': 'PIN',
'PIN number ': 'PIN number ',
'PL Women': 'Mujeres',
'Packet': 'Packet',
'Pakistan': 'Pakistán',
'Palau': 'Palau',
'Pan Map: keep the left mouse button pressed and drag the map': 'Pan Mapa: mantener el botón izquierdo del ratón y arrastre el mapa',
'Panama': 'Panamá',
'Papua New Guinea': 'Papua Nueva Guinea',
'Paraguay': 'Paraguay',
'Parameters': 'Parámetros',
'Parent': 'Padre',
'Parent Office': 'Parent Office',
"Parent level should be higher than this record's level. Parent level is": "Parent level should be higher than this record's level. Parent level is",
'Parent needs to be of the correct level': 'Parent needs to be of the correct level',
'Parent needs to be set': 'Parent needs to be set',
'Parent needs to be set for locations of level': 'Parent needs to be set for locations of level',
'Parents/Caregivers missing children': 'Parents/Caregivers missing children',
'Partial Database Synchronization': 'La sincronización de bases de datos parciales',
'Participant': 'Participante',
'Partner Details': 'Socio Detalles',
'Partner added': 'Socio añadido',
'Partner deleted': 'Socio eliminado',
'Partner updated': 'Socio actualizado',
'Partners': 'Socios',
'Pashto': 'Pashto',
'Passport': 'Pasaporte',
'Password': 'Contraseña',
"Password fields don't match": 'Los campos de contraseña no coinciden',
'Password for authentication at the peer. Note that only HTTP Basic authentication is supported.': 'Password for authentication at the peer. Note that only HTTP Basic authentication is supported.',
'Pathology': 'Patología',
'Patients': 'Pacientes',
'Pediatric ICU': 'UCI Pediátrica',
'Pediatric Psychiatric': 'Psiquiatría Pediátrica',
'Pediatrics': 'Pediatría',
'Peer': 'Par',
'Peer Details': 'Peer Detalles',
'Peer Registration': 'Peer Registration',
'Peer Registration Details': 'Peer Registration Details',
'Peer Registration Request': 'Peer Registration Request',
'Peer Type': 'Peer Type',
'Peer UID': 'Peer UID',
'Peer added': 'Peer agregado',
'Peer deleted': 'Peer eliminada',
'Peer not allowed to push': 'Peer not allowed to push',
'Peer registration request added': 'Peer registration request added',
'Peer registration request deleted': 'Peer registration request deleted',
'Peer registration request updated': 'Peer registration request updated',
'Peer updated': 'Peer updated',
'Peers': 'Peers',
'Pending Requests': 'Pending Requests',
'People': 'Personas',
'People Needing Food': 'People Needing Food',
'People Needing Shelter': 'People Needing Shelter',
'People Needing Water': 'People Needing Water',
'People Trapped': 'Personas Trapped',
'People with chronical illnesses': 'People with chronical illnesses',
'Person': 'Persona',
'Person 1': 'Person 1',
'Person 1, Person 2 are the potentially duplicate records': 'Person 1, Person 2 are the potentially duplicate records',
'Person 2': 'Person 2',
'Person Data': 'Person Data',
'Person De-duplicator': 'Person De-duplicator',
'Person Details': 'Detalles de Persona',
'Person Finder': 'Person Finder',
'Person Management': 'Gestión de Personas',
'Person Registry': 'Registro de Personas',
'Person added': 'Persona añadida',
'Person deleted': 'Persona eliminada',
'Person details updated': 'Detalles de persona actualizados',
'Person found': 'Persona encontrada',
'Person interviewed': 'Persona entrevistada',
'Person missing': 'Person missing',
'Person reporting': 'Persona que reporta',
'Person updated': 'Persona actualizada',
'Person who has actually seen the person/group.': 'Person who has actually seen the person/group.',
'Person who is reporting about the presence.': 'Persona que está reportando acerca de la presencia.',
'Person who observed the presence (if different from reporter).': 'Persona que observó la presencia (si es diferente al que lo reporta).',
'Person/Group': 'Person/Group',
'Personal Data': 'Personal Data',
'Personal Effects': 'Efectos Personales',
'Personal Effects Details': 'Detalles Efectos Personales',
'Personal impact of disaster': 'Personal impact of disaster',
'Personas Deparecidas': 'Personas Deparecidas',
'Persons': 'Personas',
'Persons per Dwelling': 'Personas por vivienda',
'Persons with disability (mental)': 'Persona con discapacidad (mental)',
'Persons with disability (physical)': 'Persons with disability (physical)',
'Peru': 'Perú',
'Philippines': 'Filipinas',
'Phone': 'Teléfono',
'Phone 1': 'Teléfono 1',
'Phone 2': 'Teléfono 2',
'Phone Number': 'Número de teléfono',
"Phone number to donate to this organization's relief efforts.": 'Número telefónico para donar a los esfuerzos de ayuda de esta organización.',
'Phone/Business': 'Teléfono / Negocios',
'Phone/Emergency': 'Teléfono de emergencia',
'Phone/Exchange': 'Teléfono / Cambio',
'Photo': 'Foto',
'Photo Details': 'Photo Detalles',
'Photo added': 'Foto agregada',
'Photo deleted': 'Photo eliminada',
'Photo updated': 'Foto actualizada',
'Photograph': 'Fotografía',
'Photos': 'Fotos',
'Physical': 'Físico',
'Physical Description': 'Descripción física',
'Picture upload and finger print upload facility': 'Transfer de imagenes y la facilidad de huellas digitales subir',
'Pipe': 'Tubo',
'Place for solid waste disposal': 'Place for solid waste disposal',
'Place of Recovery': 'Lugar de la recuperación',
'Place of find': 'Lugar del hallazgo',
'Places the children have been sent to': 'Lugares donde los niños han sido enviados',
'Planning': 'Planificación',
'Playing': 'Playing',
"Please come back after sometime if that doesn't help.": 'Por favor regrese después si eso no ayuda.',
'Please correct all errors.': 'Please correct all errors.',
'Please enter a First Name': 'Por favor introduzca un primer nombre',
'Please enter a Google Key if you wish to use Google Layers': 'Por favor ingrese una contraseña de Google si desea utilizar capas Google',
'Please enter a Yahoo Key if you wish to use Yahoo Layers': 'Por favor ingrese una contraseña de Yahoo si usted desea utilizar capas Yahoo',
'Please enter a valid email address': 'Por favor ingresar un correo de electrónico valido ',
'Please enter the first few letters of the Person/Group for the autocomplete.': 'Please enter the first few letters of the Person/Group for the autocomplete.',
'Please enter the recipient': 'Por favor introduzca el receptor',
'Please fill this!': 'Por favor, rellene este!',
'Please report here where you are:': 'Please report here where you are:',
'Please select another level': 'Please select another level',
'Please specify any problems and obstacles with the proper handling of the disease, in detail (in numbers, where appropriate). You may also add suggestions the situation could be improved.': 'Please specify any problems and obstacles with the proper handling of the disease, in detail (in numbers, where appropriate). You may also add suggestions the situation could be improved.',
'Please use this field to record any additional information, including a history of the record if it is updated.': 'Please use this field to record any additional information, including a history de the record if it is updated.',
'Please use this field to record any additional information, such as Ushahidi instance IDs. Include a history of the record if it is updated.': 'Please use this field to record any additional information, such as Ushahidi instance IDs. Include a history of the record if it is updated.',
'Pledge': 'Comromiso',
'Pledge Aid': 'Pledge Aid',
'Pledge Aid to match these Requests': 'Promesa de Ayuda que coincide con estas Solicitudes',
'Pledge Status': 'Promesa de Estado',
'Pledge Support': 'Compromiso de Apoyo',
'Pledged': 'Promesas',
'Pledges': 'Promesas de contribución',
'Point': 'Punto',
'Poisoning': 'Poisoning',
'Poisonous Gas': 'Poisonous Gas',
'Poland': 'Polonia',
'Police': 'Police',
'Policy': 'Política',
'Pollution and other environmental': 'Polución y otros ambientes',
'Polygon': 'Polígono',
'Population': 'Population',
'Porridge': 'Mazamorra',
'Port': 'Port',
'Port Closure': 'Puerto Cerrado',
'Portugal': 'Portugal',
'Position Details': 'Posición Detalles',
'Position added': 'Posición añadido',
'Position deleted': 'Posición eliminado',
'Position type': 'Posición tipo',
'Position updated': 'Posición actualización',
'Positions': 'Posiciones',
'Postcode': 'Código postal',
'Poultry': 'Aves de Corral',
'Poultry restocking, Rank': 'Poultry restocking, Rank',
'Pounds': 'Libras',
'Power Failure': 'Fallo de luz',
'Powered by Sahana Eden': 'Desarrollado por Sahana Eden',
'Preferred Name': 'Nombre preferido',
'Pregnant women': 'Mujeres embarazadas ',
'Preliminary': 'Preliminar',
'Presence': 'Presence',
'Presence Condition': 'Presencia Condición',
'Presence Log': 'Presencia Registrarse',
"Press the 'Delete Old' button to have all records which reference this one be repointed at the new one & then the old record will be deleted.": "Oprima el botón de 'Eliminar anterior' para que todos los registros con referencia a éste sean apuntados al nuevo y el registro viejo sea eliminado.",
'Previous': 'Previous',
'Previous View': 'Vista anterior',
'Primary Name': 'Primary Name',
'Principal': 'Principal',
'Priority': 'Prioridad',
'Priority Level': 'Prioridad',
'Private': 'Privado',
'Problem': 'Problema',
'Problem Administration': 'Problema Administrativa',
'Problem Details': 'Problema Detalles',
'Problem Group': 'Problemaa Grupo',
'Problem Title': 'Problema Título',
'Problem added': 'Problema agregado',
'Problem deleted': 'Problema eliminada',
'Problem updated': 'Problema actualizado',
'Problems': 'Problemas',
'Procedure': 'Procedimiento',
'Procurements': 'Contrataciones',
'Product Description': 'Descripción del producto',
'Product Name': 'Nombre del producto',
'Professional Care': 'Professional Care',
'Profile': 'Perfil',
'Project': 'Proyecto',
'Project Activities': 'Actividades del Proyecto ',
'Project Details': 'Detalles del Proyecto',
'Project Management': 'Project Management',
'Project Status': 'Situación del proyecto',
'Project Tracking': 'Seguimiento a Proyectos',
'Project added': 'Proyecto añadido',
'Project deleted': 'Proyecto eliminado',
'Project has no Lat/Lon': 'Project has no Lat/Lon',
'Project updated': 'Proyecto de actualización',
'Projection': 'Proyección',
'Projection Details': 'Detalles de proyección',
'Projection added': 'Proyección añadido',
'Projection deleted': 'Proyección eliminado',
'Projection updated': 'Proyección actualizada',
'Projections': 'Proyecciones',
'Projects': 'Proyectos',
'Projects Report': 'Proyectos de Informe',
'Prominent Adams apple': 'Prominentes Adams manzana',
'Protected resource': 'Protected resource',
'Protection': 'Protection',
'Provide Metadata for your media files': 'Proporcione metadatos para archivos multimedia',
'Provide a password': 'Ofrece una contraseña',
'Province': 'Departamento',
'Proxy-server': 'Servidor Proxy',
'Psychiatrics/Adult': 'Psiquiatría / Adultos',
'Psychiatrics/Pediatric': 'Psiquiatría/Pediátrica',
'Pubic hair, Colour': 'El vello púbico, Color',
'Pubic hair, Extent': 'El vello púbico, el alcance',
'Public': 'Public',
'Public Event': 'Public Event',
'Public and private transportation': 'Public and private transportation',
'Pull tickets from external feed': 'Tome tiquetes desde una fuente externa',
'Punjabi': 'Punjabi',
'Push tickets to external system': 'Push tickets to external system',
'Put a choice in the box': 'Seleccione una opción en la caja',
'Pyroclastic Flow': 'Pyroclastic Flow',
'Pyroclastic Surge': 'Oleadan Piroclástica',
'Python Serial module not available within the running Python - this needs installing to activate the Modem': 'Python Serial module not available within the running Python - this needs installing to activate the Modem',
'Qatar': 'Qatar',
'Quantity': 'Cantidad',
'Quarantine': 'Cuarentena',
'Queries': 'Queries',
'Query': 'Query',
'Query Feature': 'Consulta de características',
'Queryable?': '¿Cuestionable?',
'RECORD A': 'RECORD A',
'RECORD B': 'RECORD B',
'RESPONSE': 'RESPONSE',
'RPC Service URL': 'URL del servicio RPC',
'Race': 'Race',
'Race group': 'Carrera de grupo',
'Race, complexion': 'La raza, la tez',
'Radiological Hazard': 'Riesgo Radiológico',
'Radiology': 'Radiología',
'Railway Accident': 'Accidente de Ferrocarril ',
'Railway Hijacking': 'Railway Hijacking',
'Rain Fall': 'Cantidad de Lluvia',
'Rapid Assessment': 'Asesoría Rápida',
'Rapid Assessment Details': 'Rapid Assessment Details',
'Rapid Assessment added': 'Rapid Assessment added',
'Rapid Assessment deleted': 'Rapid Assessment deleted',
'Rapid Assessment updated': 'Rapid Assessment updated',
'Rapid Assessments': 'Evaluaciones Rápidas',
'Rapid Assessments & Flexible Impact Assessments': 'Rapid Assessments & Flexible Impact Assessments',
'Rapid Close Lead': 'Rápido Close Lead',
'Rating Scale': 'Escala de Calificación',
'Raw Database access': 'Primas de base de datos de acceso',
'Real World Arbitrary Units': 'Real unidades arbitrarias Mundial',
'Receive': 'Recibir',
'Receive Items': 'Receive Items',
'Receive Shipment': 'Receive Shipment',
'Received': 'Received',
'Received By': 'Received By',
'Received Item Details': 'Received Item Details',
'Received Item added': 'Received Item added',
'Received Item deleted': 'Received Item deleted',
'Received Item updated': 'Received Item updated',
'Received Items added to Warehouse Items': 'Received Items added to Warehouse Items',
'Received Shipment Details': 'Received Shipment Details',
'Received Shipment canceled': 'Received Shipment canceled',
'Received Shipment updated': 'Received Shipment updated',
'Received Shipments': 'Received Shipments',
'Recipient': 'Recipient',
'Recipients': 'Destinatarios',
'Record %(id)s created': 'Registro %(id)s creado',
'Record %(id)s updated': 'Registro %(id)s actualizada',
'Record Details': 'Registro de Datos',
'Record ID': 'Número de identificación de registro',
'Record Saved': 'Record Saved',
'Record added': 'Registro añadido',
'Record deleted': 'Registro eliminado',
'Record last updated': 'Registro actualizada por última vez',
'Record not found!': 'Registro no encontrado!',
'Record updated': 'Registro actualizado',
'Records': 'Registros',
'Recovery': 'Recuperación',
'Recovery Reports': 'Recuperar reportes',
'Recovery Request': 'Recovery Requerimiento',
'Recovery Request added': 'Recovery Requerimiento agregado',
'Recovery Request deleted': 'Recovery Requerimiento eliminada',
'Recovery Request updated': 'Petición de Recuperación actualizada',
'Recovery Requests': 'Peticiónes de Recuperación',
'Recovery report added': 'Informe de recuperación añadido',
'Recovery report deleted': 'Informe de recuperación eliminado',
'Recovery report updated': 'Reporte de recuperación actualizado',
'Recurring': 'Periódico',
'Recurring Cost': 'Costo recurrente',
'Recurring cost': 'Costos recurrentes',
'Recurring costs': 'Costos periódicos',
'Reference Document': 'Documento de Referencia ',
'Refers to default syncronization policy adopted if data entry recieved from other machine is already present in your machine.': 'Se refiere a la política por defecto sincronización adoptada si la entrada de datos recibidos de otras máquinas ya está presente en su máquina.',
'Regional': 'Regional',
'Register': 'Registrarse',
'Register Person': 'Persona Registrada',
'Register Person into this Shelter': 'Register Person into this Shelter',
'Register them as a volunteer': 'Register them as a volunteer',
'Registered People': 'Registered People',
'Registered users can': 'Los usuarios registrados pueden ',
'Registering ad-hoc volunteers willing to contribute': 'El registro ad-hoc de voluntarios dispuestos a contribuir',
'Registration': 'Inscripción',
'Registration Details': 'Registration Details',
'Registration added': 'Registration added',
'Registration entry deleted': 'Registration entry deleted',
'Registration key': 'Clave de registro',
'Registration successful': 'Registro satisfactorio',
'Registration updated': 'Registration updated',
'Registro de Organización': 'Registro de Organización',
'Registro de Refugios': 'Registro de Refugios',
'Rehabilitation/Long Term Care': 'Rehabilitación y cuidados a largo plazo',
'Reliable access to sanitation/hygiene items': 'Acceso confiable a los artículos de sanidad/higiene',
'Relief': 'Ayuda',
'Relief Item': 'Artículo de Ayuda',
'Relief Item Catalog': 'Catálogo de Artículos de Ayuda',
'Relief Item added': 'Auxilio Insumo agregado',
'Relief Item updated': 'Auxilio Insumo updated',
'Relief Items': 'Auxilio Insumos',
'Relief Items stored in Inventories in different locations': 'Artículos de ayuda guardados en inventarios de diferentes lugares',
'Relief Team': 'Equipo de Socorro',
'Religion': 'Religión',
'Religious Leader': 'Religious Leader',
'Relocate as instructed in the <instruction>': 'Relocate as instructed in the <instruction>',
'Remove': 'Eliminar',
'Remove Feature: Select the feature you wish to remove & press the delete key': 'Retire Reportaje: Seleccione la función que desea eliminar y presione la tecla de borrar',
'Repeat your password': 'Repita su contraseña',
'Replace': 'Replace',
'Replace All': 'Reemplazar todo',
'Replace if Master': 'Replace if Master',
'Replace if Newer': 'Reemplazar si es mas reciente',
'Replace with Remote': 'Replace with Remote',
'Replace/Master': 'Sustituir/Maestro',
'Replace/Newer': 'Sustituir/Más nuevo',
'Report': 'Informe',
'Report Another Assessment...': 'Report Another Assessment...',
'Report Details': 'Report Detalles',
'Report Resource': 'Informe de Recursos',
'Report Type': 'Tipo de Informe',
'Report Types Include': 'Report Tipos Include',
'Report a Bug': 'Informar de un error',
'Report a Found Person': 'Informe de una persona declarada',
'Report a Missing Person': 'Informe a una persona desaparecida',
'Report a Problem with the Software': 'Informar de un problema con el Software',
'Report added': 'Informe agregado',
'Report deleted': 'Informe eliminado',
'Report my location': 'Report my location',
'Report that person missing': 'Report that person missing',
'Report the contributing factors for the current EMS status.': 'Report the contributing factors for the current EMS status.',
'Report the contributing factors for the current OR status.': 'Informar sobre los factores contribuyentes al status OR en la actualidad.',
'Report the person as found': 'Report the person as found',
'Report them as found': 'Report them as found',
'Report them missing': 'Report them missing',
'Report updated': 'Informe actualizado',
'ReportLab module not available within the running Python - this needs installing for PDF output!': 'ReportLab no disponibles en el módulo de ejecución de Python - esto debe instalar para la salida del pdf!',
'ReportLab module not available within the running Python - this needs installing to do PDF Reporting!': 'ReportLab no disponibles en el módulo de ejecución de Python - esto tiene que ver la instalación de PDF de Información!',
'Reported By': 'Reported By',
'Reporter': 'Reportero',
'Reporter Name': 'Reporter Name',
'Reporter: ': 'Reporter: ',
'Reportes de Incidentes': 'Reportes de Incidentes',
'Reporting on the projects in the region': 'Presentación de informes sobre los proyectos en la región',
'Reports': 'Informes',
'Request': 'Solicitud',
'Request Added': 'Request Added',
'Request Aid': 'Solicitud de ayuda',
'Request Canceled': 'Request Canceled',
'Request Detail': 'Solicitud de Detalle',
'Request Details': 'Detalles solicitud',
'Request Item': 'Request Item',
'Request Item Details': 'Requerimiento Insumo Detalles',
'Request Item added': 'Petición de insumo agregado',
'Request Item deleted': 'Elemento de Petición eliminado',
'Request Item updated': 'Solicitud de insumo actualizado',
'Request Items': 'Requerimiento Insumos',
'Request Management': 'Solicitud de gestión',
'Request Type': 'Tipo de solicitud',
'Request Updated': 'Request Updated',
'Request added': 'Request added',
'Request deleted': 'Request deleted',
'Request for Role Upgrade': 'Petición para ascenso de Rol',
'Request updated': 'Request updated',
'Request, Response & Session': 'Solicitud, Respuesta y sesión',
'Requested': 'Pedido',
'Requested By Location': 'Requested By Location',
'Requested From Warehouse': 'Requested From Warehouse',
'Requested by': 'Requested by',
'Requested on': 'Requested on',
'Requester': 'Requester',
'Requestor': 'Requerimientoor',
'Requests': 'Solicitudes',
'Requests for Item': 'Solicitudes por Artículo',
'Required by other servers.': 'Requerido por otros servidores.',
'Requires Login!': 'Requires Login!',
'Requires login': 'Requiere inicio de sesión',
'Rescue and recovery': 'Rescue and recovery',
'Reset': 'Restablecer',
'Reset Password': 'Reset Password',
'Reset form': 'Limpiar forma',
'Resize Feature: Select the feature you wish to resize & then Drag the associated dot to your desired size': 'Cambiar el tamaño de características: Seleccione la función que desea cambiar el tamaño y luego arrastrar el punto asociado a su tamaño deseado',
'Resolve': 'Resolve',
'Resolve Conflict': 'Resolve Conflict',
'Resolve link brings up a new screen which helps to resolve these duplicate records and update the database.': 'Resolve link brings up a new screen which helps to resolve these duplicate records and update the database.',
'Resource': 'Recurso',
'Resource Details': 'Detalles de recursos',
'Resource added': 'Recursos añadido',
'Resource deleted': 'Recursos eliminado',
'Resource updated': 'Recursos actualización',
'Resources': 'Recursos',
'Respiratory Infections': 'Respiratory Infections',
'Response': 'Respuesta',
'Response Details': 'Response Detalles',
'Response added': 'Response agregado',
'Response deleted': 'Respuesta eliminada',
'Response updated': 'Response updated',
'Responses': 'Responses',
'Restricted Access': 'Acceso restringido',
'Restrictions': 'Restricciones',
'Results': 'Results',
'Resume Sync': 'Resume Sync',
'Retail Crime': 'Delitos menores',
'Retrieve Password': 'Recuperar Contraseña',
'Review the situation on maps.': 'Examen de la situación en los mapas.',
'Rice': 'Rice',
'Right now, your system is set default synchronization scheme. You are currently able to synchronize your server with other servers.': 'En este momento, su sistema está configurado por defecto de un régimen de sincronización. Usted está actualmente en condiciones de sincronizar el servidor con otros servidores.',
'Right-hand headline': 'Right-hand headline',
'Right-to-Left': 'De derecha a izquierda',
'Riot': 'Riot',
'River': 'River',
'River Details': 'Detalles Rio',
'River added': 'River agregado',
'River deleted': 'Rio eliminado',
'River updated': 'Rio actualizado',
'Rivers': 'Rivers',
'Road Accident': 'Accidente en la Via',
'Road Closed': 'Road Closed',
'Road Conditions': 'Road Conditions',
'Road Delay': 'Demora en la carretera',
'Road Hijacking': 'Road Hijacking',
'Road Usage Condition': 'Condiciones de Uso de la Carretera',
'Role': 'Papel',
'Role Details': 'Detalles Rol',
'Role Required': 'Role Required',
'Role Updated': 'Rol Actualizado',
'Role added': 'Papel añadido',
'Role deleted': 'Papel eliminado',
'Role updated': 'Papel actualización',
'Role-based': 'Role-based',
'Roles': 'Roles',
'Roles Permitted': 'Roles Permitted',
'Roman': 'Romano',
'Romania': 'Rumania',
'Roof tile': 'Rode tile',
'Rotate Feature: Select the feature you wish to rotate & then Drag the associated dot to rotate to your desired location': 'Rotación de funciones: Seleccione la función que desea girar y luego arrastrar el punto asociadas a rotar a su localización deseada',
'Row Choices (One Per Line)': 'Row Choices (One Per Line)',
'Rows in table': 'Filas en el cuadro',
'Rows selected': 'Filas seleccionadas',
'Run Functional Tests': 'Ejecutar Pruebas Funcionales',
'Run Interval': 'Run Interval',
'Running Cost': 'Coste de explotación',
'Russia': 'Rusia',
'Rwanda': 'Ruanda',
'Rápido Evaluaciones': 'Evaluaciones Rápidas',
'SITUATION': 'SITUATION',
'SMS': 'SMS',
'SMS Details': 'Detalles de SMS',
'SMS InBox': 'Buzón de SMS',
'SMS OutBox': 'Bandeja de salida de SMS',
'SMS added': 'SMS añadido',
'SMS created': 'SMS crea',
'SMS deleted': 'SMS suprimido',
'SMS updated': 'SMS actualizado',
'Safe environment for vulnerable groups': 'Ambiente seguro para grupos vulnerables',
'Safety of children and women affected by disaster': 'Safety of children and women affected by disaster',
'Sahana Administrator': 'Sahana Administrador',
'Sahana Agasti': 'Sahana Agasti',
'Sahana Blue': 'Sahana Azul',
'Sahana Community Chat': 'Chat de la Comunidad Sahana',
'Sahana Eden': 'Sahana Eden',
'Sahana Eden <= Other sync (Sahana Agasti, Ushahidi, etc.)': 'Sincronización Sahana Eden <= Otro (Sahana Agasti, Ushahidi, etc.)',
'Sahana Eden <=> Other': 'Sahana Eden <=> Otro',
'Sahana Eden <=> Other (Sahana Agasti, Ushahidi, etc.)': 'Sahana Eden <=> Other (Sahana Agasti, Ushahidi, etc.)',
'Sahana Eden <=> Other sync (Sahana Agasti, Ushahidi, etc.)': 'Sahana Eden <=> Other sync (Sahana Agasti, Ushahidi, etc.)',
'Sahana Eden <=> Sahana Eden': 'El Edén Sahana <=> El Edén Sahana',
'Sahana Eden <=> Sahana Eden sync': ' Sahana Eden <=> Sincronización Sahana Eden ',
'Sahana Eden Disaster Management Platform': 'Sahana Eden Disaster Management Platform',
'Sahana Eden Website': 'sitio web de Sahana Eden',
'Sahana Eden is a family of applications that provide solutions to coordination and collaboration for organisations working in disaster management.': 'Sahana Eden es una familia de aplicaciones que provee soluciones a la coordinación y colaboración para organizaciones que trabajan en gestión de desastres.',
'Sahana Eden is a family of applications that provide solutions to coordination and collaboration for organisations working in disaster management. The following modules are available': 'Sahana Eden es una familia de aplicaciones que provee soluciones para la coordinación y colaboración para organizaciones trabajando en manejo de desastres. Los siguientes módulos se encuentran disponibles',
'Sahana FOSS Disaster Management System': 'Sahana FOSS Sistema de Gestión de Desastres',
'Sahana Green': 'Sahana Verde',
'Sahana Login Approval Pending': 'Pendiente la aprobación de Sahana del Inicio de Sesión',
'Sahana Steel': 'Sahana Acero',
'Sahana Website': 'sitio web de Sahana',
'Sahana access granted': 'Sahana access granted',
'Sahana has to hook to a network port other than port being used by website (normally port 80). If your firewall blocks this port you have change it to any other free port. For information on eligible ports, see': 'Sahana tiene que conectar a un puerto de red no sea el puerto utilizado por sitio web (normalmente el puerto 80). Si tu cortafuegos bloquea este puerto tiene cambiarlo por cualquier otro puerto libre. Para obtener información sobre los puertos elegibles, consulte',
'Sahana is a collection of web based disaster management applications that provides solutions to large-scale humanitarian coordination and collaboration in disaster situation and its aftermath. Sahana consists of several modules for following functionalities': 'Sahana es una colección de aplicaciones basadas en web de gestión de desastres que proporciona soluciones para la coordinación humanitaria a gran escala y la colaboración en situación de desastre y sus secuelas. Sahana consta de varios módulos para los siguientes funcionalidades',
'Sahana: new request has been made. Please login to see if you can fulfil the request.': 'Sahana: new request has been made. Please login to see if you can fulfil the request.',
'Saint Kitts and Nevis': 'Saint Kitts y Nevis',
'Saint Lucia': 'Santa Lucía',
'Saint Vincent and the Grenadines': 'San Vicente y las Granadinas',
'Salted Fish': 'Pescado con sal',
'Salvage material usable from destroyed houses': 'Material útil salvado de casas destruidas',
'Salvage material usable from destroyed schools': 'Salvage material usable from destroyed schools',
'Samoa': 'Samoa',
'San Marino': 'San Marino ',
'Sanitation problems': 'Sanitation problems',
'Satellite': 'Satélite',
'Satellite Office': 'Oficina Satélite',
'Saturday': 'Saturday',
'Saudi Arabia': 'Arabia Saudita',
'Save': 'Guardar',
'Save any Changes in the one you wish to keep': 'Guardar cualquier cambio dentro de el que desees guardar',
'Save: Default Lat, Lon & Zoom for the Viewport': 'Guardar: Lat defecto, Lon y zoom para la ventana gráfica',
'Saved.': 'Guardado.',
'Saving...': 'Guardando ...',
'Scale of Results': 'Escala de Resultados',
'Scanned File': 'Escaneadas Archivo',
'Schedule': 'Schedule',
'School': 'Escuela',
'School Closure': 'School Closure',
'School Code': 'School Code',
'School District': 'School District',
'School District Details': 'Detalles Escuela Distrital',
'School District added': 'School District agregado',
'School District deleted': 'School District eliminada',
'School District updated': 'School District updated',
'School Districts': 'School Districts',
'School Lockdown': 'School Lockdown',
'School Report Details': 'School Report Detalles',
'School Report added': 'School Report agregado',
'School Report deleted': 'School Report eliminada',
'School Report updated': 'School Report updated',
'School Reports': 'School Reports',
'School Teacher': 'School Teacher',
'School assistance received/expected': 'Asistencia escolar recibida/esperada',
'School destroyed': 'School destroyed',
'School heavily damaged': 'School heavily damaged',
'School tents received': 'School tents received',
'School tents, source': 'Carpas de colegios, fuente',
'School used for other purpose': 'School used for other purpose',
'School/studying': 'Escuela/estudiando',
'Schools': 'Escuelas ',
'Search': 'Búsqueda',
'Search ': 'Busqueda ',
'Search & List Bin Types': 'Busca y Tipos Lista Bin',
'Search & List Bins': 'Busca y lista Bins',
'Search & List Catalog': 'Busca y Lista de catálogos',
'Search & List Category': 'Busca y Lista de Categoría',
'Search & List Items': 'Busca y Listado de ejemplares',
'Search & List Locations': 'Busca y lista de ubicaciones',
'Search & List Site': 'Busca y lista de la web',
'Search & List Sub-Category': 'Busca y Lista de Subcategoría',
'Search & List Unit': 'Unidad de Búsqueda y Lista',
'Search Activities': 'Busqueda Activities',
'Search Activity Report': 'Informe de Actividad de la búsqueda',
'Search Addresses': 'Busca direcciones',
'Search Aid Requests': 'Búsqueda de Solicitudes de Ayuda',
'Search Assessment Summaries': 'Search Assessment Summaries',
'Search Assessments': 'Busqueda Evaluaciones',
'Search Baseline Type': 'Search Baseline Type',
'Search Baselines': 'Search Baselines',
'Search Budgets': 'Buscar Presupuestos',
'Search Bundles': 'Buscar Paquetes',
'Search Catalog Items': 'Search Catalog Items',
'Search Category<>Sub-Category<>Catalog Relation': 'Categoría Texto buscado:<>Sub-Categoría<>Relación Catálogo>',
'Search Checklists': 'Buscar Listas',
'Search Cluster Subsectors': 'Search Cluster Subsectors',
'Search Clusters': 'Busqueda Clusters',
'Search Configs': 'Buscar Configs',
'Search Contact Information': 'Busqueda Contact Information',
'Search Contacts': 'Buscar Contactos',
'Search Distribution Items': 'Busqueda Distribution Insumos',
'Search Distributions': 'Busqueda Distribuciones',
'Search Documents': 'Busqueda Documents',
'Search Donors': 'Busqueda Donantes',
'Search Email InBox': 'Buscar buzón de correo electrónico',
'Search Email OutBox': 'Buscar Correo electrónico Bandeja de salida',
'Search Feature Class': 'Buscar la clase de funciones',
'Search Feature Groups': 'Buscar en los grupos de funciones',
'Search Feature Layers': 'Busqueda Características',
'Search Find Report': 'Búsqueda Encontrar Informe',
'Search Flood Reports': 'Busqueda Inundación Reports',
'Search Geonames': 'Busqueda Geonames',
'Search Groups': 'Grupos Buscar',
'Search Hospitals': 'Buscar Hospitales',
'Search Identity': 'Búsqueda de identidad',
'Search Images': 'Búsqueda de imágenes',
'Search Impact Type': 'Search Impact Type',
'Search Impacts': 'Search Impacts',
'Search Incident Reports': 'Busqueda Incident Reports',
'Search Incidents': 'Busqueda Incidents',
'Search Inventory Items': 'Busqueda Inventory Insumos',
'Search Inventory Stores': 'Busqueda Inventory Stores',
'Search Item Catalog Category(s)': 'Búsqueda Categoría Catálogo del artículo (s)',
'Search Item Catalog(s)': 'Búsqueda del artículo Catálogo (s)',
'Search Item Categories': 'Busqueda Categorias de Artículos',
'Search Item Packets': 'Search Item Packets',
'Search Item Sub-Category(s)': 'Buscar subtema Categoría (s)',
'Search Items': 'Buscar artículos',
'Search Keys': 'Claves de Búsqueda',
'Search Kits': 'Buscar Juegos',
'Search Layers': 'Capas de la búsqueda',
'Search Locations': 'Ubicaciones de búsqueda',
'Search Log Entry': 'Buscar entrada de registro',
'Search Markers': 'Buscar marcadores',
'Search Member': 'Busqueda Member',
'Search Membership': 'Búsqueda de miembros',
'Search Memberships': 'Buscar Membresías',
'Search Metadata': 'Búsqueda de metadatos',
'Search Need Type': 'Search Need Type',
'Search Needs': 'Search Needs',
'Search Notes': 'Search Notes',
'Search Offices': 'Búsqueda de los servicios',
'Search Organisations': 'Buscar organizaciones',
'Search Organizations': 'Organizaciones de la búsqueda',
'Search Partners': 'Buscar socios',
'Search Peer': 'Search Peer',
'Search Peers': 'Busqueda Peers',
'Search People': 'Buscar personas',
'Search Personal Effects': 'Buscar Efectos Personales',
'Search Persons': 'Buscar personas',
'Search Photos': 'Busqueda Photos',
'Search Positions': 'Puestos disponibles',
'Search Problems': 'Búsqueda Problemas',
'Search Projections': 'Proyecciones de la búsqueda',
'Search Projects': 'Buscar proyectos',
'Search Rapid Assessments': 'Search Rapid Assessments',
'Search Received Items': 'Search Received Items',
'Search Received Shipments': 'Search Received Shipments',
'Search Records': 'Buscar Informes',
'Search Recovery Reports': 'Busqueda Recovery Reports',
'Search Registations': 'Buscar Registros',
'Search Registration Request': 'Search Registration Request',
'Search Report': 'Informe de búsqueda',
'Search Reports': 'Busqueda Reports',
'Search Request': 'Busqueda Solicitud',
'Search Request Items': 'Busqueda Requerimiento Insumos',
'Search Requests': 'Buscar Solicitudes',
'Search Resources': 'Búsqueda de Recursos',
'Search Responses': 'Busqueda Responses',
'Search Rivers': 'Busqueda Rivers',
'Search Roles': 'Buscar Roles',
'Search SMS InBox': 'Buscar Buzón de SMS',
'Search SMS OutBox': 'Buscar SMS Bandeja de salida',
'Search School Districts': 'Busqueda School Districts',
'Search School Reports': 'Busqueda School Reports',
'Search Sections': 'Búsqueda Secciones',
'Search Sectors': 'Sectores de la búsqueda',
'Search Sent Email': 'Buscar Correos Electrónicos Enviados',
'Search Sent Items': 'Search Sent Items',
'Search Sent SMS': 'La búsqueda ha enviado SMS',
'Search Sent Shipments': 'Search Sent Shipments',
'Search Service Profiles': 'Búsqueda de perfiles de servicio',
'Search Settings': 'Configuración de búsqueda',
'Search Shelter Services': 'Busqueda Albergue Services',
'Search Shelter Types': 'Busqueda Albergue Tipos',
'Search Shelters': 'Buscar Refugios',
'Search Shipment Transit Logs': 'Tránsito de la búsqueda envío Registros',
'Search Shipment/Way Bills': 'El envío de la búsqueda / Camino proyectos de ley',
'Search Shipment<>Item Relation': 'El envío de búsqueda<>Tema relación',
'Search Site(s)': 'La búsqueda del sitio (s)',
'Search Skill Types': 'Busqueda Habilidad Tipos',
'Search Skills': 'Busqueda Habilidades',
'Search Solutions': 'Busqueda Solutions',
'Search Sources': 'Fuentes de búsqueda',
'Search Staff': 'Busqueda Staff',
'Search Staff Types': 'Tipos de búsqueda de personal',
'Search Status': 'Search Status',
'Search Storage Bin Type(s)': 'Almacenamiento de la búsqueda Bin Tipo (s)',
'Search Storage Bin(s)': 'Buscar cuba de almacenamiento (s)',
'Search Storage Location(s)': 'Buscar Ubicación de almacenamiento (s)',
'Search Subscriptions': 'Búsqueda Subscripciones',
'Search Tasks': 'Tareas de búsqueda',
'Search Teams': 'Busqueda Teams',
'Search Themes': 'Buscar Temas',
'Search Tickets': 'Busqueda Tickets',
'Search Tracks': 'Buscar Pista',
'Search Twitter Tags': 'Búsqueda por Etiquetas de Twitter',
'Search Units': 'Buscar unidades',
'Search Users': 'Buscar usuario',
'Search Volunteer Registrations': 'Buscar el registro de voluntarios',
'Search Volunteers': 'Search Volunteers',
'Search Warehouse Items': 'Search Warehouse Items',
'Search Warehouses': 'Search Warehouses',
'Search and Edit Group': 'Buscar y Editar grupo',
'Search and Edit Individual': 'Búsqueda y Edición individual',
'Search by ID Tag': 'Busqueda by ID Tag',
'Search by Skill Types': 'Search by Skill Types',
'Search for Items': 'Búsqueda de artículos',
'Search for a Hospital': 'Búsqueda por Hospital',
'Search for a Location': 'Busqueda for a Location',
'Search for a Person': 'Búsqueda por persona',
'Search for a Project': 'Búsqueda por proyecto',
'Search for a Request': 'Búsqueda por solicitud',
'Search here for a person in order to:': 'Search here for a person in order to:',
"Search here for a person's record in order to:": "Search here for a person's record in order to:",
'Search messages': 'Busqueda messages',
'Searching for different groups and individuals': 'Búsqueda por diferentes grupos e individuos',
'Secondary Server (Optional)': 'Secondary Server (Optional)',
'Seconds must be a number between 0 and 60': 'Segundos debe ser un número entre 0 y 60',
'Seconds must be between 0 and 60': 'Segundos deben ser entre 0 y 60',
'Section Details': 'Detalles de Sección',
'Section deleted': 'Section eliminada',
'Section updated': 'Sección actualizado',
'Sections': 'Sections',
'Sector': 'Sector',
'Sector Details': 'Sector Details',
'Sector added': 'Sector added',
'Sector deleted': 'Sector deleted',
'Sector updated': 'Sector updated',
'Sectors': 'Sectors',
'Security': 'Seguridad',
'Security Policy': 'Política de seguridad',
'Security Status': 'De estado de seguridad',
'Security problems': 'Security problems',
'Seen': 'Visto',
'Select 2 potential locations from the dropdowns.': 'Select 2 potential locations from the dropdowns.',
'Select Body': 'Seleccione Cuerpo',
'Select Items from this Warehouse': 'Select Items from this Warehouse',
'Select Photos': 'Seleccionar fotos',
'Select a location': 'Select a location',
"Select a person in charge for status 'assigned'": "Select a person in charge for status 'assigned'",
'Select a question from the list': 'Seleccione una pregunta de la lista',
'Select all that apply': 'Seleccione todo lo que aplique',
'Select an Organisation to see a list of offices': 'Seleccione una organización para conocer una lista de oficinas',
'Select an Organization to see a list of offices': 'Seleccione una Organización para ver una lista de las oficinas',
'Select the overlays for Assessments and Activities relating to each Need to identify the gap.': 'Select the overlays for Assessments and Activities relating to each Need to identify the gap.',
'Select the person assigned to this role for this project.': 'Seleccionar a la persona asignada a este rol para este proyecto.',
'Select the person associated with this scenario.': 'Seleccione la persona asociada con este escenario.',
'Selects whether to use a Modem, Tropo or other Gateway for sending out SMS': 'Selects whether to use a Modem, Tropo or other Gateway for sending out SMS',
'Selects whether to use the gateway or the Modem for sending out SMS': 'Selects whether to use the gateway or the Modem for sending out SMS',
'Self Registration': 'Autorregistro',
'Self-care': 'Cuidado de sí mismo',
'Self-registration': 'Self-registration',
'Send': 'Send',
'Send & Receive Email messages (e.g. for alerting)': 'Enviar y recibir mensajes de correo electrónico (por ejemplo, para alertar a)',
'Send & Receive SMS messages (e.g. for alerting)': 'Enviar y recibir mensajes SMS (por ejemplo, para alertar a)',
'Send Alerts using Email &/or SMS': 'Enviar alertas mediante correo electrónico y / o SMS',
'Send Email': 'Enviar correo',
'Send Mail': 'Send Mail',
'Send Notification': 'Send Notification',
'Send SMS': 'Enviar SMS',
'Send Shipment': 'Send Shipment',
'Send message': 'Enviar mensaje',
'Send new message': 'Enviar nuevo mensaje ',
'Sender': 'Remitente',
'Sends & Receives Alerts via Email & SMS': 'Envía y Recibe Alertas via Email & SMS',
'Senegal': 'Senegal ',
'Senior (50+)': 'Mayor (50 +)',
'Sensitivity': 'Sensibilidad',
'Sent': 'Sent',
'Sent Email': 'Enviado Correo electrónico',
'Sent Item Details': 'Sent Item Details',
'Sent Item deleted': 'Sent Item deleted',
'Sent Item updated': 'Sent Item updated',
'Sent SMS': 'SMS enviados',
'Sent Shipment canceled': 'Sent Shipment canceled',
'Sent Shipment updated': 'Sent Shipment updated',
'Separate latrines for women and men': 'Separate latrines for women and men',
'Seraiki': 'Seraiki',
'Serbia': 'Serbia',
'Series': 'Series',
'Server': 'Server',
'Service': 'Servicio',
'Service Catalogue': 'Catálogo de Servicios',
'Service or Facility': 'Servicio',
'Service profile added': 'Perfil de servicio añadido',
'Service profile deleted': 'Perfil de servicio suprimido',
'Service profile updated': 'Perfil de servicio actualizado',
'Services': 'Servicios',
'Services Available': 'Servicios disponibles',
'Setting Details': 'Configuración de los detalles',
'Setting added': 'Configuración añadido',
'Setting deleted': 'Configuración eliminado',
'Setting updated': 'Configuración de actualización',
'Settings': 'Configuración',
'Settings updated': 'Configuración de actualización',
'Settings were reset because authenticating with Twitter failed': 'Settings were reset because authenticating with Twitter failed',
'Severity': 'Severity',
'Severity:': 'Severity:',
'Seychelles': 'Seychelles',
'Share a common Marker (unless over-ridden at the Feature level)': 'Compartir un Marcador común (a menos que prevalezca al nivel de Característica)',
'Shelter': 'Abrigo',
'Shelter & Essential NFIs': 'Albergue & Essential NFIs',
'Shelter Details': 'Albergue Detalles',
'Shelter Manager': 'Administrador de Refugios',
'Shelter Name': 'Nombre de Refugio',
'Shelter Registry': 'Registro de Albergues',
'Shelter Service': 'Servicios del Refugio',
'Shelter Service Details': 'Detalles del Servicio de albergue',
'Shelter Service added': 'Albergue Service agregado',
'Shelter Service deleted': 'Albergue Service eliminada',
'Shelter Service updated': 'Servicio de Refugio actualizado',
'Shelter Services': 'Servicios de Albergues',
'Shelter Type': 'Tipo de Refugio',
'Shelter Type Details': 'Albergue Tipo Detalles',
'Shelter Type added': 'Albergue Tipo agregado',
'Shelter Type deleted': 'Albergue Tipo eliminada',
'Shelter Type updated': 'Albergue Tipo updated',
'Shelter Types': 'Tipos de Refugio',
'Shelter Types and Services': 'Albergue Tipos and Services',
'Shelter added': 'Refugio añadido',
'Shelter deleted': 'Refugio eliminado',
'Shelter updated': 'Refugio actualización',
'Shelter/NFI assistance received/expected': 'Shelter/NFI assistance received/expected',
'Shelters': 'Refugios',
'Shipment Created': 'Shipment Created',
'Shipment Details': 'Shipment Details',
'Shipment Items': 'Shipment Items',
'Shipment Transit Log Details': 'Detalles del envío de Tránsito',
'Shipment Transit Log added': 'El envío de Tránsito agregó',
'Shipment Transit Log deleted': 'El envío de Tránsito eliminado',
'Shipment Transit Log updated': 'El envío de Tránsito actualizado',
'Shipment Transit Logs': 'Registros y tránsito de embarques',
'Shipment/Way Bill added': 'Envío Way Bill añadido',
'Shipment/Way Bills': 'Letras del envío / Camino',
'Shipment/Way Bills Details': 'Envío Detalles de las cartas de porte',
'Shipment/Way Bills deleted': 'Envío cartas de porte eliminado',
'Shipment/Way Bills updated': 'Envío cartas de porte actualización',
'Shipment<>Item Relation added': 'Envío<>Tema Relación añadido',
'Shipment<>Item Relation deleted': 'Envío<>Artículo Relación eliminada',
'Shipment<>Item Relation updated': 'Envío<>Tema Relación actualizada',
'Shipment<>Item Relations': 'Envío<>Relación de Artículos',
'Shipment<>Item Relations Details': 'Envío<>Detalles de Relación de Artículo',
'Shipments': 'Los envíos',
'Shipments To': 'Shipments To',
'Shooting': 'Shooting',
'Short Assessment': 'Short Assessment',
'Short Description': 'Short Description',
'Short Description: ': 'Short Description: ',
'Show Checklist': 'Lista de verificación Mostrar',
'Show on map': 'Mostrar en el mapa',
'Sierra Leone': 'Sierra Leona',
'Sindhi': 'Sindhi',
'Singapore': 'Singapur',
'Site': 'Sitio',
'Site Address': 'Site Address',
'Site Administration': 'Administración del Sitio',
'Site Description': 'Site Description',
'Site Details': 'Detalles del Sitio',
'Site ID': 'Site ID',
'Site Location Description': 'Descripción de la Localización de Lugar',
'Site Location Name': 'Site Location Name',
'Site Manager': 'Administrador del Sitio',
'Site Name': 'Nombre del sitio',
'Site added': 'Locación agregada',
'Site deleted': 'Sitio eliminado',
'Site updated': 'Actualización del sitio web',
'Site/Warehouse': 'Sitio / Galería',
'Sites': 'Sitios',
'Situation Awareness': 'Situación de Conciencia',
'Situation Awareness & Geospatial Analysis': 'Conciencia Situacional & Análisis Geoespacial',
'Situation Map': 'Mapa de situación',
'Sketch': 'Dibujo',
'Skill': 'Habilidad',
'Skill Details': 'Detalles Habilidades',
'Skill Status': 'Skill Status',
'Skill Type Details': 'Habilidad Tipo Detalles',
'Skill Type added': 'Tipo de Habilidad agregado',
'Skill Type deleted': 'Tipo Habilidad eliminada',
'Skill Type updated': 'Habilidad Tipo updated',
'Skill Types': 'Tipos de Habilidad',
'Skill added': 'Habilidad agregado',
'Skill deleted': 'Habilidad eliminada',
'Skill updated': 'Habilidad updated',
'Skills': 'Habilidades',
'Skype ID': 'Skype ID',
'Slovakia': 'Eslovaquia',
'Slovenia': 'Eslovenia',
'Small Trade': 'Small Trade',
'Smoke': 'Smoke',
'Smoking habits': 'Hábito de fumar',
'Snow Fall': 'Snow Fall',
'Snow Squall': 'Tormenta de Nieve ',
'Social': 'Social',
'Solicitudes': 'Requerimientos',
'Solid waste': 'Solid waste',
'Solomon Islands': 'Las Islas Salomón',
'Solution': 'Solution',
'Solution Details': 'Solution Detalles',
'Solution Item': 'Solución del artículo',
'Solution added': 'Solution agregado',
'Solution deleted': 'Solution eliminada',
'Solution updated': 'Solution updated',
'Solutions': 'Solutions',
'Somalia': 'Somalia',
'Somaliland': 'Somalilandia',
'Some': 'Some',
'Sorry - the server has a problem, please try again later.': 'Lo sentimos - el servidor tiene un problema, por favor, inténtelo de nuevo más tarde.',
'Sorry that location appears to be outside the area of the Parent.': 'Sorry that location appears to be outside the area of the Parent.',
'Sorry that location appears to be outside the area supported by this deployment.': 'Sorry that location appears to be outside the area supported by this deployment.',
'Sorry, I could not understand your request': 'Sorry, I could not understand your request',
'Sorry, only users with the MapAdmin role are allowed to edit these locations': 'Lo siento, solo usuarios con el rol MapAdmin tienen permitidos editar esas localizaciones',
'Sorry, something went wrong.': 'Sorry, something went wrong.',
'Sorry, that page is forbidden for some reason.': 'Lo sentimos, por alguna razón esa página está prohibida.',
'Sorry, that service is temporary unavailable.': 'Lo sentimos, este servicio no se encuentra disponible temporalmente.',
'Sorry, there are no addresses to display': 'Sorry, there are no addresses to display',
"Sorry, things didn't get done on time.": "Sorry, things didn't get done on time.",
"Sorry, we couldn't find that page.": "Sorry, we couldn't find that page.",
'Source': 'Fuente',
'Source Details': 'Detalles Fuente',
'Source ID': 'Fuente ID',
'Source Time': 'Fuente Time',
'Source Type': 'Tipo de fuente',
'Source added': 'Fuente añadido',
'Source deleted': 'Fuente eliminado',
'Source of Information': 'Fuente de Información',
'Source updated': 'Fuente actualización',
'Sources': 'Fuentes',
'South Africa': 'Sudáfrica',
'South Ossetia': 'Osetia del Sur',
'Space Debris': 'Space Debris',
'Spain': 'España',
'Spanish': 'Spanish',
'Special Ice': 'Special Ice',
'Special Marine': 'Marino Especial',
'Special needs': 'Necesidades especiales',
'Specialized Hospital': 'Hospital Especializado',
'Specific Area (e.g. Building/Room) within the Location that this Person/Group is seen.': 'Specific Area (e.g. Building/Room) within the Location that this Person/Group is seen.',
'Specific locations need to have a parent of level': 'Specific locations need to have a parent of level',
'Specify a descriptive title for the image.': 'Especificar un título descrptivo para esta imagen.',
'Specify the bed type of this unit.': 'Specify the bed type de this unit.',
'Specify the minimum sustainability in weeks or days.': 'Specify the minimum sustainability in weeks or days.',
'Specify the number of available sets': 'Specify the number of available sets',
'Specify the number of available units (adult doses)': 'Specify the number of available units (adult doses)',
'Specify the number of available units (litres) of Ringer-Lactate or equivalent solutions': 'Specify the number of available units (litres) of Ringer-Lactate or equivalent solutions',
'Specify the number of sets needed per 24h': 'Specify the number of sets needed per 24h',
'Specify the number of units (adult doses) needed per 24h': 'Specify the number of units (adult doses) needed per 24h',
'Specify the number of units (litres) of Ringer-Lactate or equivalent solutions needed per 24h': 'Specify the number of units (litres) of Ringer-Lactate or equivalent solutions needed per 24h',
'Spherical Mercator?': 'Mercator esférica?',
'Spreadsheet Importer': 'Spreadsheet Importer',
'Spreadsheet uploaded': 'Hoja de cálculo subida',
'Spring': 'Spring',
'Squall': 'Squall',
'Sri Lanka': 'Sri Lanka',
'Staff': 'Personal',
'Staff 2': 'Personal 2',
'Staff Details': 'Staff Detalles',
'Staff Type Details': 'Detalles del tipo de personal',
'Staff Type added': 'Tipo de Personal añadido',
'Staff Type deleted': 'Personal Tipo eliminado',
'Staff Type updated': 'Tipo de Personal actualizado',
'Staff Types': 'Tipos de Personal',
'Staff added': 'Staff agregado',
'Staff deleted': 'Staff eliminada',
'Staff present and caring for residents': 'Staff present and caring for residents',
'Staff updated': 'Staff updated',
'Staffing': 'Dotación de personal',
'Start date': 'Fecha de inicio',
'Start of Period': 'Start de Period',
'State': 'State',
'Stationery': 'Papelería',
'Status': 'Estado',
'Status Report': 'Informe de situación',
'Status added': 'Status added',
'Status deleted': 'Status deleted',
'Status of clinical operation of the facility.': 'Status of clinical operation of the facility.',
'Status of general operation of the facility.': 'Status of general operation of the facility.',
'Status of morgue capacity.': 'Status of morgue capacity.',
'Status of operations of the emergency department of this hospital.': 'Estado de operaciones del departamento de emergencia de este hospital.',
'Status of security procedures/access restrictions in the hospital.': 'Estado de los procedimientos de seguridad y restricciones de acceso en el hospital.',
'Status of the operating rooms of this hospital.': 'Estado de las salas de operaciones de este hospital.',
'Status updated': 'Status updated',
'Storage Bin': 'Storage Bin',
'Storage Bin Details': 'Almacenamiento de Datos Bin',
'Storage Bin Number': 'Almacenamiento Número Bin',
'Storage Bin Type': 'Storage Bin Type',
'Storage Bin Type Details': 'Almacenamiento de Datos Bin Tipo',
'Storage Bin Type added': 'Recipiente de almacenamiento Tipo añadido',
'Storage Bin Type deleted': 'Recipiente de almacenamiento Tipo eliminado',
'Storage Bin Type updated': 'Recipiente de almacenamiento Tipo actualización',
'Storage Bin Types': 'Tipos de almacenamiento Bin',
'Storage Bin added': 'Recipiente de almacenamiento añadido',
'Storage Bin deleted': 'Recipiente de almacenamiento eliminado',
'Storage Bin updated': 'Recipiente de almacenamiento actualización',
'Storage Bins': 'Almacenamiento de Contenedores',
'Storage Location': 'Ubicación de almacenamiento',
'Storage Location Details': 'Detalles Ubicación de almacenamiento',
'Storage Location ID': 'Identificación de localización de almacenamiento',
'Storage Location Name': 'Lugar Nombre de almacenamiento',
'Storage Location added': 'Ubicación de almacenamiento añadido',
'Storage Location deleted': 'Ubicación de almacenamiento eliminado',
'Storage Location updated': 'Ubicación de almacenamiento actualización',
'Storage Locations': 'Ubicaciones de almacenamiento',
'Store spreadsheets in the Eden database': 'Store spreadsheets in the Eden database',
'Storm Force Wind': 'Fuertes ráfagas de viento',
'Storm Surge': 'Storm Surge',
'Stowaway': 'Stowaway',
'Street': 'Calle',
'Street (add.)': 'Calle (dirección)',
'Street (continued)': 'Calle (continuada)',
'Street Address': 'Dirección',
'Strong Wind': 'Strong Wind',
'Sub Category': 'Sub Categoría',
'Sub-type': 'Subtipo',
'SubType': 'Subtipo',
'Subject': 'Tema',
'Submission Succesful': 'Presentación exitosa',
'Submission successful - please wait': 'Submission successful - please wait',
'Submission successful - please wait...': 'Presentado con éxito - por favor espere...',
'Submit': 'Enviar',
'Submitting information about the individual such as identification numbers, physical appearance, last seen location, status, etc': 'Envío de información sobre el individuo, tales como números de identificación, el aspecto físico, la ubicación visto por última vez, el estado, etc',
'Subscription Details': 'Subscription Detalles',
'Subscription added': 'Subscription agregado',
'Subscription deleted': 'Subscription eliminada',
'Subscription updated': 'Subscription updated',
'Subscriptions': 'Suscripciones',
'Subsistence Cost': 'Costo de subsistencia',
'Sudan': 'Sudán',
'Sufficient care/assistance for chronically ill': 'Sufficient care/assistance for chronically ill',
'Suggest not changing this field unless you know what you are doing.': 'Se sugiere no cambiar este campo a menos que sepa lo que está haciendo.',
'Summary': 'Summary',
'Sunday': 'Sunday',
'Support Request': 'Solicitud de Soporte',
'Supports the decision making of large groups of Crisis Management Experts by helping the groups create ranked list.': 'Soporta la toma de decisiones de grandes grupos de expertos en administración de crisis ayudando a crear los grupos de clasificación pertinentes',
'Sure you want to delete this object?': '¿Seguro que quieres eliminar este objeto?',
'Surgery': 'Cirugía',
'Suriname': 'Apellido',
'Survey Answer': 'Inspección Answer',
'Survey Answer Details': 'Inspección Answer Detalles',
'Survey Answer added': 'Inspección Answer agregado',
'Survey Answer deleted': 'Respuesta Cuestionario eliminada',
'Survey Answer updated': 'Inspección Answer updated',
'Survey Module': 'Survey Module',
'Survey Name': 'Inspección Name',
'Survey Question': 'Inspección Question',
'Survey Question Details': 'Detalles de preguntas de Cuestionario',
'Survey Question Display Name': 'Inspección Question Display Name',
'Survey Question added': 'Inspección Question agregado',
'Survey Question deleted': 'Inspección Question eliminada',
'Survey Question updated': 'Inspección Question updated',
'Survey Section': 'Inspección Section',
'Survey Section Details': 'Inspección Section Detalles',
'Survey Section Display Name': 'Inspección Section Display Name',
'Survey Section added': 'Inspección Section agregado',
'Survey Section deleted': 'Sección Cuestionario eliminada',
'Survey Section updated': 'Cuestionario de Sección actualizado',
'Survey Series': 'Inspección Series',
'Survey Series Details': 'Inspección Series Detalles',
'Survey Series Name': 'Nombre Serie Cuestionario',
'Survey Series added': 'Inspección Series agregado',
'Survey Series deleted': 'Inspección Series eliminada',
'Survey Series updated': 'Inspección Series updated',
'Survey Template': 'Plantilla de las Encuestas',
'Survey Template Details': 'Inspección Template Detalles',
'Survey Template added': 'Inspección Template agregado',
'Survey Template deleted': 'Inspección Template eliminada',
'Survey Template updated': 'Inspección Template updated',
'Survey Templates': 'Inspección Templates',
'Swaziland': 'Swazilandia',
'Sweden': 'Suecia',
'Switch this on to use individual CSS/Javascript files for diagnostics during development.': 'Switch this on to use individual CSS/Javascript files for diagnostics during development.',
'Switzerland': 'Suiza',
'Symbology': 'Simbología',
'Sync Conflicts': 'Sync Conflicts',
'Sync History': 'Sync Historia',
'Sync Now': 'Sincronizar ahora',
'Sync Partners': 'Sync Socios',
'Sync Partners are instances or peers (SahanaEden, SahanaAgasti, Ushahidi, etc.) that you want to sync information with. Click on the link on the right to go the page where you can add sync partners, search for sync partners and modify them.': 'Socios de Sincronización son instancias o compañeros (SahanaEden, SahanaAgasti, Ushahidi, etc) con las que usted desea sincronizar información. Haga clic en el enlace de la derecha para ir a la página donde usted puede agregar los socios de sincronización, buscar socios de sincronización y modificarlos.',
'Sync Password': 'Sync Password',
'Sync Policy': 'Sync Policy',
'Sync Pools': 'Sync Pools',
'Sync Schedule': 'Sync Schedule',
'Sync Schedules': 'Sync Schedules',
'Sync Settings': 'Ajustes de sincronización',
'Sync Settings updated': 'Ajustes de sincronización actualizada',
'Sync Username': 'Sync Username',
'Sync process already started on ': 'Proceso de sincronización ya se inició',
'Synchronisation': 'Sincronización',
'Synchronisation - Sync Now': 'Synchronisation - Sync Now',
'Synchronisation History': 'Historia de sincronización',
'Synchronization': 'Sincronización',
'Synchronization Conflicts': 'Synchronization Conflicts',
'Synchronization Details': 'Synchronization Details',
'Synchronization History': 'Synchronization History',
'Synchronization Peers': 'Synchronization Peers',
'Synchronization Settings': 'Configuración de Sincronización',
'Synchronization allows you to share data that you have with others and update your own database with latest data from other peers. This page provides you with information about how to use the synchronization features of Sahana Eden': 'Synchronization allows you to share data that you have with others and update your own database with latest data from other peers. This page provides you with information about how to use the synchronization features of Sahana Eden',
'Synchronization not configured': 'Synchronization not configured',
'Synchronization not configured.': 'Synchronization not configured.',
'Synchronization settings updated': 'Synchronization settings updated',
'Syncronisation History': 'Historia de Sincronización',
'Syncronisation Schedules': 'Sincronización de Horarios',
'Syria': 'Siria',
'System allows the tracking & discovery of Items stored in Locations.': 'System allows the tracking & discovery of Insumos stored in Locations.',
'System is a central online repository where all relief organizations, relief workers, government agents and camp sites for displaced personnel can coordinate the supply of aid with their demand. It allows users to allocate the available resources to fulfill the demands effectively and efficiently.': 'Del sistema es un repositorio en línea central donde todas las organizaciones de socorro, personal de socorro, los agentes del gobierno y campamentos para el personal de desplazados puede coordinar el suministro de ayuda con su demanda. Permite a los usuarios asignar los recursos disponibles para satisfacer las demandas de manera eficaz y eficiente.',
'System keeps track of all Volunteers working in the disaster region. It captures not only the places where they are active, but also captures information on the range of services they are providing in each area.': 'El sistema lleva seguimiento de todos los voluntarios trabajando en la zona de desastre. Registra también los lugares donde ellos se encuentran activos y los servicios que proveen en cada área.',
"System's Twitter account updated": "System's Twitter account updated",
'São Tomé and Príncipe': 'Santo Tomé y Príncipe',
'TC': 'TC',
'Table name': 'Nombre de la tabla',
'Tags': 'Etiquetas',
'Taiwan': 'Taiwan',
'Tajikistan': 'Tayikistán',
'Take shelter in place or per <instruction>': 'Tomar refugio en lugar o por <instrucción>',
'Tanzania': 'Tanzania',
'Task Details': 'Tarea Detalles',
'Task List': 'Lista de tareas',
'Task Status': 'Estado de la tarea',
'Task added': 'Tarea añadida',
'Task deleted': 'Acción suprimió',
'Task status': 'Estado de la tarea',
'Task updated': 'Tarea actualización',
'Tasks': 'Tareas',
'Team': 'Equipo ',
'Team Description': 'Equipo Description',
'Team Details': 'Equipo Detalles',
'Team Head': 'Equipo Head',
'Team Id': 'Equipo Id',
'Team Leader': 'Jefe de equipo',
'Team Member added': 'Equipo Member agregado',
'Team Members': 'Equipo Members',
'Team Name': 'Equipo Name',
'Team Type': 'Tipo de Equipo',
'Team added': 'Equipo agregado',
'Team deleted': 'Equipo eliminada',
'Team updated': 'Equipo updated',
'Teams': 'Equipos',
'Technical testing only, all recipients disregard': 'Technical testing only, all recipients disregard',
'Teeth': 'Dientes',
'Teeth, Dentures': 'Dientes, prótesis dentales',
'Teeth, Gaps between front teeth': 'Dientes, Espacios entre los dientes delanteros',
'Teeth, Missing teeth': 'Dientes, dientes que faltan',
'Teeth, Toothless': 'Los dientes, sin dientes',
'Telecommunications': 'Telecommunications',
'Telephone': 'Teléfono',
'Telephony': 'Telefonía',
'Temp folder %s not writable - unable to apply theme!': 'Directorio Temporal %s no se puede escribir - imposible de aplicar el tema!',
'Template file %s not readable - unable to apply theme!': 'Plantilla %s no se puede leer el archivo - imposible de aplicar el tema!',
'Templates': 'Plantillas',
'Terrorism': 'Terrorism',
'Tertiary Server (Optional)': 'Tertiary Server (Optional)',
'Test Results': 'Resultados de pruebas',
'Text': 'Texto',
'Text Colour for Text blocks': 'Color del texto para bloques de texto',
'Text Direction': 'Dirección del texto',
'Text before each Text Field (One per line)': 'Text before each Text Field (One per line)',
'Text in Message': 'El texto de mensaje',
'Text in Message: ': 'Text in Message: ',
'Thailand': 'Tailandia',
'Thanks for your assistance': 'Gracias por su asistencia',
'The': 'El',
'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1 == db.table2.field2" results in a SQL JOIN.': 'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1 == db.table2.field2" results in a SQL JOIN.',
'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.': 'El "query" es una condición como "db.table1.field1 == \'valor\'". Algo así como "db.table1.field1 == db.table2.field2" resulta en un JOIN de SQL.',
'The Area which this Site is located within.': 'El Area dentro que este Sitio se encuentra ubicado.',
'The Assessments module allows field workers to send in assessments.': 'El modulo de Evaluaciones permite a los trabajadores de campo ingresar evaluaciones.',
'The Author of this Document (optional)': 'The Author de this Document (optional)',
'The Current Location of the Person/Group, which can be general (for Reporting) or precise (for displaying on a Map). Enter a few characters to search from available locations.': 'The Current Location of the Person/Group, which can be general (for Reporting) or precise (for displaying on a Map). Enter a few characters to search from available locations.',
'The District for this Report.': 'The District for this Report.',
"The Donor(s) for this project. Multiple values can be selected by holding down the 'Control' key.": "The Donor(s) for this project. Multiple values can be selected by holding down the 'Control' key.",
'The Gambia': 'Gambia',
'The Group whose members can edit data in this record.': 'El Grupo cuyos miembros pueden editar datos en este registro.',
'The Incident Reporting System allows the General Public to Report Incidents & have these Tracked.': 'El modulo de Reporte de Incidentes permite al público general reportar incidentes y hacerles seguimiento.',
'The Location of this Site, which can be general (for Reporting) or precise (for displaying on a Map).': 'The Location de this Site, which can be general (for Reporting) or precise (for displaying on a Map).',
'The Location the Person has come from, which can be general (for Reporting) or precise (for displaying on a Map). Enter a few characters to search from available locations.': 'The Location the Person has come from, which can be general (for Reporting) or precise (for displaying on a Map). Enter a few characters to search from available locations.',
'The Location the Person is going to, which can be general (for Reporting) or precise (for displaying on a Map). Enter a few characters to search from available locations.': 'The Location the Person is going to, which can be general (for Reporting) or precise (for displaying on a Map). Enter a few characters to search from available locations.',
'The Media Library provides a catalogue of digital media.': 'El módulo de Documentos y Fotos ofrece un catálogo de los medios digitales.',
'The Messaging Module is the main communications hub of the Sahana system. It is used to send alerts and/or messages using SMS & Email to various groups and individuals before, during and after a disaster.': 'El modulo de Mensajería es el núcleo principal de comunicaciones del sistema Sahana, Es utilizado para enviar alertas y/o mensajes utilizando SMS y correo electrónic a varios grupos e individuos antes, durante y después de un desastre.',
'The Office this record is associated with.': 'The Office this record is associated with.',
'The Organisation which is funding this Activity.': 'La Organización que está financiando esta Actividad.',
'The Organization Registry keeps track of all the relief organizations working in the disaster region. It captures not only the places where they are active, but also captures information on the range of projects they are providing in each area.': 'El Registro de Organizaciones permite un seguimiento de todas las organizaciones de auxilio que están trabajando en la región de desastre. Permite capturar no solamente los lugares donde ellas está activas, sino también información acerca del rango de proyectos que ellas están sirviendo en cada área.',
'The Organization this record is associated with.': 'La Organización con la que esta grabación se encuentra asociada.',
'The Organization which is funding this Activity.': 'La Organización que está financiando esta Actividad.',
'The Project Tracking module allows the creation of Activities to meet Gaps in Needs Assessments.': 'El modulo de Seguimiento de Proyectos permite la creación de actividades para cerrar brechas en las necesidades evaluadas.',
'The Rapid Assessments Module stores structured reports done by Professional Organisations.': 'The Rápido Evaluaciones Module stores structured reports done by Professional Organisations.',
'The Rapid Assessments Module stores structured reports done by Professional Organizations.': 'The Rapid Assessments Module stores structured reports done by Professional Organizations.',
'The Request this record is associated with.': 'The Requerimiento this record is associated with.',
'The Role this person plays within this Office/Project.': 'El papel que esta persona desempeña dentro de esta Oficina/Proyecto.',
'The Role this person plays within this hospital.': 'The Role this person plays within this hospital.',
"The Sector(s) this organization works in. Multiple values can be selected by holding down the 'Control' key.": "El sector(es) de esta organización funciona en. Valores múltiplwa pueden ser seleccionadas mientras se unde el teclado de 'Control'.",
'The Shelter Registry tracks all shelters and stores basic details regarding them. It collaborates with other modules to track people associated with a shelter, the services available etc.': 'El módulo de Refugios permite realizar el seguimiento de todos los refugios y almacena detalles básicos acerca de ellos. Colabora con otros módulos para hacer seguimiento a personas asociadas con un refugio, los servicios disponibles, etc.',
'The Shelter this Request is from (optional).': 'The Albergue this Requerimiento nis from (optional).',
'The Source this information came from.': 'The Fuente this information came from.',
'The URL for the GetCapabilities of a WMS Service whose layers you want accessible via the Map.': 'The URL for the GetCapabilities of a WMS Service whose layers you want accessible via the Map.',
"The URL of the image file. If you don't upload an image file, then you must specify its location here.": "The URL of the image file. If you don't upload an image file, then you must specify its location here.",
'The URL of your web gateway without the post parameters': 'The URL of your web gateway without the post parameters',
'The URL to access the service.': 'The URL to access the service.',
'The Unique Identifier (UUID) as assigned to this facility by the government.': 'The Unique Identifier (UUID) as assigned to this facility by the government.',
'The area was ': 'El área fue',
'The attribute within the KML which is used for the title of popups.': 'The attribute within the KML which is used for the title of popups.',
'The attribute(s) within the KML which are used for the body of popups. (Use a space between attributes)': 'The attribute(s) within the KML which are used for the body of popups. (Use a space between attributes)',
'The body height (crown to heel) in cm.': 'The body height (crown to heel) in cm.',
'The category of the Item.': 'La categoría del Artículo',
'The contact person for this organization.': 'La persona de contacto para esta organización. ',
'The country the person usually lives in.': 'The country the person usually lives in.',
'The default policy for data import from this peer.': 'La política por defecto para la importación de datos desde este par.',
'The descriptive name of the peer.': 'Nombre descriptivo del par.',
'The duplicate record will be deleted': 'The duplicate record will be deleted',
'The entered unit links to this unit. For e.g. if you are entering m for meter then choose kilometer(if it exists) and enter the value 0.001 as multiplicator.': 'The entered unit links to this unit. For e.g. if you are entering m for meter then choose kilometer(if it exists) and enter the value 0.001 as multiplicator.',
'The first or only name of the person (mandatory).': 'The first or only name of the person (mandatory).',
'The following modules are available': 'Los siguientes módulos están disponibles',
'The hospital this record is associated with.': 'El hospital al cual está asociado este registro.',
'The item is designated to be sent for specific project, population, village or other earmarking of the donation such as a Grant Code.': 'El artículo es designado a ser enviado para un proyecto específico, pueblo o otro tipo de la donación asi como un Código de Acceso. ',
'The language to use for notifications.': 'The language to use for notifications.',
'The last known location of the missing person before disappearance.': 'The last known location of the missing person before disappearance.',
'The last known location of the missing person.': 'The last known location of the missing person.',
'The length was ': 'La longitud fue de ',
'The list of Item categories are maintained by the Administrators.': 'La lista de categorías del artículo son mantenidos por los administradores.',
'The name to be used when calling for or directly addressing the person (optional).': 'The name to be used when calling for or directly addressing the person (optional).',
'The next screen will allow you to detail the number of people here & their needs.': 'The next screen will allow you to detail the number of people here & their needs.',
'The next screen will allow you to enter a detailed list of items and quantities, if appropriate...': 'La siguiente pantalla te permitirá ingresar a una lista detallada de objetos y cantidades, sea el adecuado...',
'The number of tiles around the visible map to download. Zero means that the 1st page loads faster, higher numbers mean subsequent panning is faster.': 'The number of tiles around the visible map to download. Zero means that the 1st page loads faster, higher numbers mean subsequent panning is faster.',
'The person at the location who is reporting this incident (optional)': 'The person at the location who is reporting this incident (optional)',
'The person reporting about the missing person.': 'La persona que reporta acerca de la persona perdida.',
'The person reporting the missing person.': 'The person reporting the missing person.',
"The person's manager within this Office/Project.": 'El administrador de personas dentro de esta Oficina/Proyecto.',
'The post variable containing the phone number': 'The post variable containing the phone number',
'The post variable on the URL used for sending messages': 'El nombre varia sobre la URL utilizada para enviar mensajes',
'The post variables other than the ones containing the message and the phone number': 'Las variables pos-diferentes a las que contienen el mensaje y el número telefónico',
'The request this record is associated with.': 'La petición con la que esta grabación se encuentra asociada. ',
'The scanned copy of this document.': 'The scanned copy of this document.',
'The serial port at which the modem is connected - /dev/ttyUSB0, etc on linux and com1, com2, etc on Windows': 'The serial port at which the modem is connected - /dev/ttyUSB0, etc on linux and com1, com2, etc on Windows',
'The server did not receive a timely response from another server that it was accessing to fill the request by the browser.': 'El servidor no recibió una respuesta a tiempo de otro servidor que fue accedido para llenar el requerimiento del navegador.',
'The server received an incorrect response from another server that it was accessing to fill the request by the browser.': 'El servidor recibió una respuesta incorrecta de otro servidor que estaba siendo accedido para cumplir la solicitud del explorador.',
'The simple policy allows anonymous users to Read & registered users to Edit. The full security policy allows the administrator to set permissions on individual tables or records - see models/zzz.py.': 'La política simple permite a los usuarios anónimos a Leer y a los usuarios registrados a Editar. La política de seguridad completa permite al administrador establecer permisos sobre tablas individuales o registros - ver models/zzz.py.',
'The subject event no longer poses a threat or concern and any follow on action is described in <instruction>': 'El evento ya no posee un riesgo ni preocupación y cualquier acción se describe como <instrucción>',
'The title of the WMS Browser panel in the Tools panel.': 'The title of the WMS Browser panel in the Tools panel.',
'The token associated with this application on': 'The token associated with this application on',
'The unique identifier of the peer. Leave blank if the peer is no Sahana Eden instance, it will be auto-assigned in that case.': 'The unique identifier of the peer. Leave blank if the peer is no Sahana Eden instance, it will be auto-assigned in that case.',
'The unique identifier which identifies this instance to other instances.': 'El identificador único que identifica este ejemplos a otros ejemplos. ',
'The weight in kg.': 'The weight in kg.',
'Theme': 'Tema',
'Theme Details': 'Tema Detalles',
'Theme added': 'Tema añadido',
'Theme deleted': 'Tema eliminado',
'Theme updated': 'Tema actualizado',
'Themes': 'Temas',
'There are errors': 'There are errors',
'There are multiple records at this location': 'Hay múltiples registros en esta ubicación',
'There are not sufficient items in the store to send this shipment': 'There are not sufficient items in the store to send this shipment',
'There was a problem, sorry, please try again later.': 'Ha habido un problema, lo siento, por favor, inténtelo de nuevo más tarde.',
'These are settings for Inbound Mail.': 'Esas son las configuraciones para el Correo de Entrada.',
'These are the Incident Categories visible to normal End-Users': 'These are the Incident Categories visible to normal End-Users',
'These are the default settings for all users. To change settings just for you, click ': 'These are the default settings for all users. To change settings just for you, click ',
'They': 'Ellos',
'This appears to be a duplicate of ': 'This appears to be a duplicate of ',
'This file already exists on the server as': 'This file already exists on the server as',
'This form allows the administrator to remove a duplicate location by 1st updating all references to it by a different location.': 'Este formulario permite al administrador eliminar una ubicación duplicada actualizando 1ro todas las referencias a él por una ubicación diferente.',
'This form allows the administrator to remove a duplicate location.': 'Este formulario permite al administrador eliminar una ubicación duplicada.',
'This is the way to transfer data between machines as it maintains referential integrity.': 'This is the way to transfer data between machines as it maintains referential integrity.',
'This is the way to transfer data between machines as it maintains referential integrity...duplicate data should be removed manually 1st!': 'This is the way to transfer data between machines as it maintains referential integrity...duplicate data should be removed manually 1st!',
'This might be due to a temporary overloading or maintenance of the server.': 'Esto puede ser debido a una sobrecarga temporal o el mantenimiento del servidor.',
'This module assists the management of fatalities and the identification of the deceased.': 'Este módulo ayuda a la gestión de las muertes y la identificación de los fallecidos.',
'This page provides you with information about how to use the automatic synchronization feature of Sahana': 'Esta página le ofrece información acerca de cómo utilizar la característica de sincronización automática de Sahana',
'This page shows you logs of past syncs. Click on the link below to go to this page.': 'This page shows you logs of past syncs. Click on the link below to go to this page.',
'This screen allows you to upload a collection of photos to the server.': 'Esta pantalla le permite cargar una colección de fotos en el servidor.',
'Thunderstorm': 'Thunderstorm',
'Thursday': 'Thursday',
'Ticket': 'Boleto',
'Ticket Details': 'Detalles Tiquete',
'Ticket added': 'Tiquete agregado',
'Ticket deleted': 'Tiquete eliminado',
'Ticket updated': 'Ticket updated',
'Ticketing Module': 'Módulo de Tickets',
'Tickets': 'Entradas',
'Time Stamp': 'Marca de Fecha',
'Time at which data was exchanged.': 'Hora en que datos fueron intercambiados.',
'Time needed to collect water': 'Time needed to collect water',
'Time of Request': 'Momento de la solicitud',
'Timestamp': 'Timestamp',
'Title': 'Título',
'To Location': 'To Location',
'To access Sahana documentation, go to': 'Para acceder a la documentación Sahana, vaya a',
'To begin the sync process, click the button on the right => ': 'Para iniciar el proceso de sincronización, presionar el botón a la derecha =>',
'To begin the sync process, click this button => ': 'Para empezar el proceso de sincronización, hacer clic en este botón =>',
'To delete': 'To delete',
'To edit OpenStreetMap, you need to edit the OpenStreetMap settings in models/000_config.py': 'To edit OpenStreetMap, you need to edit the OpenStreetMap settings in models/000_config.py',
"To search for a body, enter the ID label of the body. You may use % as wildcard. Press 'Search' without input to list all bodies.": 'Para buscar un cuerpo, entrar en la etiqueta de ID del cuerpo. Usted puede usar % como comodín. Presione "Buscar" sin ninguna entrada para listar todos los cuerpos.',
"To search for a body, enter the ID tag number of the body. You may use % as wildcard. Press 'Search' without input to list all bodies.": "To search for a body, enter the ID tag number of the body. You may use % as wildcard. Press 'Search' without input to list all bodies.",
"To search for a hospital, enter any of the names or IDs of the hospital, separated by spaces. You may use % as wildcard. Press 'Search' without input to list all hospitals.": "To search for a hospital, enter any of the names or IDs of the hospital, separated by spaces. You may use % as wildcard. Press 'Search' without input to list all hospitals.",
"To search for a hospital, enter any part of the name or ID. You may use % as wildcard. Press 'Search' without input to list all hospitals.": 'Para buscar un hospital, ingrese cualquier parte del nombre o de su identifiación. Usted puede usar % como comodín. Si presiona "Buscar" sin ninguna entrada se listarán todos los hospitales.',
"To search for a location, enter the name. You may use % as wildcard. Press 'Search' without input to list all locations.": "To search for a location, enter the name. You may use % as wildcard. Press 'Search' without input to list all locations.",
"To search for a person, enter any of the first, middle or last names and/or an ID number of a person, separated by spaces. You may use % as wildcard. Press 'Search' without input to list all persons.": "Para buscar una persona, ingrese ya sea su nombre o apellido y/o un Número de Identificación de la persona, separados por espacios. Usted puede usar % como comodín. Para listar todas las personas, presione 'Buscar' sin ninguna entrada.",
"To search for a request, enter some of the text that you are looking for. You may use % as wildcard. Press 'Search' without input to list all requests.": "Para buscar una petición, ingrese parte del texto que usted está buscando. Puede utilizar % como comodín. Presione 'Buscar' sin ninguna entrada para listar todas las peticiones.",
'To submit a new job, use the': 'Para presentar un nuevo trabajo, utilice la',
'To variable': 'Para variable',
'Togo': 'Togo',
'Tonga': 'Tonga',
'Tools': 'Instrumentos',
'Tornado': 'Tornado',
'Total # of Beneficiaries Reached ': 'Total # of Beneficiaries Reached ',
'Total # of Target Beneficiaries': 'Total # de Target Beneficiaries',
'Total # of households of site visited': 'Total # de households de site visited',
'Total Beds': 'Camas totales',
'Total Beneficiaries': 'Total Beneficiaries',
'Total Cost per Megabyte': 'Costo total por Megabyte',
'Total Cost per Minute': 'Costo total por minuto',
'Total Households': 'Total Households',
'Total Monthly': 'Total Mensual',
'Total Monthly Cost': 'Costo total mensual',
'Total Monthly Cost: ': 'Costo total mensual:',
'Total No of Affectees (Including Students, Teachers & Others)': 'Total No de Affectees (Including Students, Teachers & Others)',
'Total No of Female Affectees (Including Students, Teachers & Others)': 'Total No de Female Affectees (Including Students, Teachers & Others)',
'Total No of Male Affectees (Including Students, Teachers & Others)': 'Total No de Male Affectees (Including Students, Teachers & Others)',
'Total No of Students (Primary To Higher Secondary) in the Total Affectees': 'Total No de Students (Primary To Higher Secondary) in the Total Affectees',
'Total No of Teachers & Other Govt Servants in the Total Affectees': 'Número Total de Profesores & Otros Servidores Gubernamentales en el Total de Afectados',
'Total One-time Costs': 'Total de gastos no recurrentes',
'Total Persons': 'Total Persons',
'Total Recurring Costs': 'Total de costos recurrentes',
'Total Unit Cost': 'Coste unitario total',
'Total Unit Cost: ': 'Costo total de la unidad:',
'Total Units': 'Unidades totales',
'Total number of beds in this hospital. Automatically updated from daily reports.': 'Total number of beds in this hospital. Automatically updated from daily reports.',
'Total number of houses in the area': 'Número total de casas en el área',
'Total number of schools in affected area': 'Total number de schools in affected area',
'Total population of site visited': 'Total population de site visited',
'Totals for Budget:': 'Los totales para el Presupuesto:',
'Totals for Bundle:': 'Los totales para el paquete:',
'Totals for Kit:': 'Los totales para el Kit:',
'Tourist Group': 'Grupo de Turismo',
'Town': 'Ciudad',
'Traces internally displaced people (IDPs) and their needs': 'Traza personas desplazadas internamente (PDIs) y sus necesidades',
'Tracing': 'Rastreo',
'Track': 'Pista',
'Track Details': 'Detalles de pista',
'Track deleted': 'Pista eliminado',
'Track updated': 'Pista actualizada',
'Track uploaded': 'Pista subida',
'Tracking and Tracing of Persons and Groups': 'Seguimiento y ubicación de las Personas y Grupos',
'Tracking of Projects, Activities and Tasks': 'Tracking of Projects, Activities and Tasks',
'Tracking of basic information on the location, facilities and size of the Shelters': 'Seguimiento de información básica sobre la ubicación, las instalaciones y el tamaño de los albergues',
'Tracks': 'Temas de',
'Tracks requests for aid and matches them against donors who have pledged aid': 'Sigue las peticiones para ayuda y los compara con los donantes que han prometido ayuda',
'Tracks the location, distibution, capacity and breakdown of victims in Shelters': 'Monitorea la ubicación, distribución, capacidad and composición de victimas en refugios',
'Traffic Report': 'Traffic Report',
'Transfer': 'Transfer',
'Transit': 'Tránsito',
'Transition Effect': 'Efecto de transición',
'Transnistria': 'Transnistria',
'Transparent?': 'Transparente?',
'Transport': 'Transporte',
'Transportation assistance, Rank': 'Transportation assistance, Rank',
'Trauma Center': 'Centro de Trauma',
'Travel Cost': 'Costo de viaje',
'Treatments': 'Tratamientos',
'Tree': 'Árbol',
'Trinidad and Tobago': 'Trinidad y Tobago',
'Tropical Storm': 'Tropical Storm',
'Tropo Messaging Token': 'Tropo Messaging Token',
'Tropo Settings': 'Tropo Settings',
'Tropo Voice Token': 'Tropo Voice Token',
'Tropo settings updated': 'Tropo settings updated',
'Truck': 'Camión',
'Try checking the URL for errors, maybe it was mistyped.': 'Intenta chequiar para errores del URL, pudo ser un error de ortografía.',
'Try hitting refresh/reload button or trying the URL from the address bar again.': 'Try hitting refresh/reload button or trying the URL from the address bar again.',
'Try refreshing the page or hitting the back button on your browser.': 'Try refreshing the page or hitting the back button on your browser.',
'Tsunami': 'Tsunami',
'Tuesday': 'Martes',
'Tunisia': 'Túnez',
'Turkey': 'Turquía',
'Turkmenistan': 'Turkmenistán',
'Tuvalu': 'Tuvalu',
'Twitter': 'Gorjeo',
'Twitter ID or #hashtag': 'Twitter ID or #hashtag',
'Twitter Settings': 'Twitter Settings',
'Type': 'Tipo',
'Type of cause': 'Tipo de cause',
'Type of latrines': 'Tipo de letrinas',
'Type of place for defecation': 'Tipo de lugar para la defecación',
'Type of water source before the disaster': 'Tipo de water source before the disaster',
'Types of health services available': 'Tipos de servicios de salud disponibles',
'Types of water storage containers available': 'Types of water storage containers available',
'UID': 'UID',
'UN': 'Naciones Unidas',
'URL': 'URL',
'URL of the Ushahidi instance': 'URL de the Ushahidi instance',
'URL: ': 'URL: ',
'UTC Offset': 'UTC',
'UUID of foreign Sahana server': 'UUID de servidor de extranjeros Sahana',
'Uganda': 'Uganda',
'Ukraine': 'Ucrania',
'Unable to parse CSV file!': 'No es posible analizar archivo CSV!',
'Understaffed': 'Understaffed',
'Unidentified': 'No identificado',
'Union Council': 'Consejo de Unión',
'Unit': 'Unidad',
'Unit Bed Capacity': 'Unidad de Capacidad de camas',
'Unit Cost': 'Costo por unidad',
'Unit Details': 'Unidad de Información',
'Unit Name': 'Nombre de la Unidad ',
'Unit Set': 'Unidad de Juego',
'Unit Short Code for e.g. m for meter.': 'Unit Short Code for e.g. m for meter.',
'Unit added': 'Unidad añadido',
'Unit deleted': 'Unidad eliminada',
'Unit updated': 'Unidad de actualización',
'United Arab Emirates': 'Emiratos Árabes Unidos',
'United Kingdom': 'Reino Unido',
'United States': 'Estados Unidos',
'Units': 'Unidades',
'Units of Measure': 'Unidades de medida',
'Unknown': 'Desconocido',
'Unknown Peer': 'Unknown Peer',
'Unknown type of facility': 'Tipo desconocido de la instalación',
'Unresolved Conflicts': 'Unresolved Conflicts',
'Unselect to disable the modem': 'Unselect to disable the modem',
'Unsent': 'Unsent',
'Unskilled': 'No calificados',
'Unsupported data format!': 'Formato de datos no compatible!',
'Unsupported method!': 'Método no compatible!',
'Update': 'Actualización',
'Update Activity Report': 'Actualización de Informe de Actividades',
'Update Cholera Treatment Capability Information': 'Update Cholera Treatment Capability Information',
'Update Find Report': 'Informe de actualización Buscar',
'Update Import Job': 'Actualización de Empleo de importación',
'Update Request': 'Actualizar Requerimiento',
'Update Service Profile': 'Actualiza tu perfil de servicio',
'Update Task Status': 'Actualización de estado de tareas',
'Update Unit': 'Actualización de la Unidad',
'Update if Master': 'Actualice si es Maestro',
'Update if Newer': 'Update if Newer',
'Update your current ordered list': 'Actualizar your current ordered list',
'Update/Master': 'Actualizar/Master',
'Update/Newer': 'Actualizar/Renovar',
'Upload': 'Subir',
'Upload Photos': 'Subir fotos',
'Upload Spreadsheet': 'Upload Spreadsheet',
'Upload Track': 'Cargar la pista',
'Upload a Spreadsheet': 'Upload a Spreadsheet',
"Upload an image file here. If you don't upload an image file, then you must specify its location in the URL field.": 'Subir imagen de archivo aquí. Si no sube ninguna imagen de archivo entonces debe especificar su ubicación dentro del campo URL.',
'Urban Fire': 'Fuego Urbano',
'Urban area': 'Área urbana',
'Urdu': 'Urdu',
'Urgent': 'Urgent',
'Uruguay': 'Uruguay',
'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.': 'Utilice (...)&(...) para Y, (...)|(...) para O, y ~(...) para NO construir consultas más complejas.',
'Use default': 'Use default',
'Use default from feature class': 'Use default from feature class',
'Use these links to download data that is currently in the database.': 'Utilice estos enlaces para descargar los datos que está actualmente en la base de datos.',
'Use this link to review the situation.': 'Utilice este enlace para revisar la situación.',
'Use this space to add a description about the Bin Type.': 'Utilice este espacio para añadir una descripción sobre el tipo de Contenedor.',
'Use this space to add a description about the site location.': 'Use this space to add a description about the site location.',
'Use this space to add a description about the warehouse/site.': 'Use this space to add a description about the warehouse/site.',
'Use this space to add additional comments and notes about the Site/Warehouse.': 'Utilice este espacio para comentarios y notas adicionales sobre el Sitio/Bodega.',
'Used to import data from spreadsheets into the database': 'Used to import data from spreadsheets into the database',
'User': 'Usuario',
'User %(id)s Logged-in': 'Usuario %(id)s ha ingresado',
'User %(id)s Logged-out': 'Usuario %(id)s ha salido',
'User %(id)s Registered': 'Usuario %(id)s registrado',
'User Details': 'Datos del Usuario',
'User ID': 'Identificación de usuario',
'User Management': 'Gestión de usuarios',
'User Profile': 'User Profile',
'User Requests': 'Solicitudes del Usuario',
'User Updated': 'Usuario Actualizado',
'User added': 'Usuario añadido',
'User already has this role': 'User already has this role',
'User already in Group!': 'El usuario ya se encuentra en el grupo!',
'User deleted': 'Usuario borrado',
'User has no Email address!': 'Usuario no tiene dirección de correo electrónico!',
'User has no SMS address!': 'Usuario no tiene dirección SMS!',
'User updated': 'Usuario actualizado',
'Username': 'Nombre de usuario',
'Username & Password': 'Nombre de Usuario y Contraseña',
'Username for authentication at the peer. Note that only HTTP Basic authentication is supported.': 'Username for authentication at the peer. Note that only HTTP Basic authentication is supported.',
'Users': 'Usuario',
'Users can collaboratively add markers of what is occuring.': 'Los usuarios pueden colaborar para añadir marcadores de lo que está ocurriendo.',
'Users removed': 'Usuario eliminado',
'Ushahidi': 'Ushahidi',
'Usual food sources in the area': 'Usual food sources in the area',
'Utility, telecommunication, other non-transport infrastructure': 'Utility, telecommunication, other non-transport infrastructure',
'Uzbekistan': 'Uzbekistán',
'VHF': 'VHF',
'Vanuatu': 'Vanuatu',
'Various Reporting functionalities': 'Varias funciones de información',
'Vatican City': 'Ciudad del Vaticano',
'Vehicle': 'Vehículo',
'Vehicle Crime': 'Vehicle Crime',
'Vehicle Types': 'Tipos de vehículos',
'Vendor': 'Vendedor',
'Venezuela': 'Venezuela',
'Verified': 'Verificado',
'Verified?': '¿Verificado?',
'Verify Password': 'Verifique su contraseña',
'Verify password': 'Verify password',
'Version': 'Version',
'Very High': 'Muy Alto',
'Victim': 'Víctima',
'Vietnam': 'Vietnam',
'View & Edit Pledges': 'Ver y Editar Promesas',
'View Alerts received using either Email or SMS': 'Ver las alertas recibidas mediante correo electrónico o SMS',
'View Email InBox': 'Ver buzón de correo electrónico',
'View Email OutBox': 'Ver Bandeja de salida de correo electrónico',
'View Fullscreen Map': 'Ver Mapa de tamaño completo',
'View Image': 'Ver imagen',
'View On Map': 'Ver en el Mapa',
'View Outbox': 'Ver Bandeja de salida',
'View Requests & Pledge Aid': 'Ver Peticiones y Promesas de Ayuda',
'View Requests for Aid': 'Ver Solicitudes de Ayuda',
'View SMS InBox': 'Ver Buzón de SMS',
'View SMS OutBox': 'Ver SMS Bandeja de salida',
'View Sent Email': 'Ver Enviado Correo electrónico',
'View Sent SMS': 'Ver SMS enviados',
'View Settings': 'Configuración de vista',
'View Situation Map': 'Ver Mapa de situación',
'View Tickets': 'View Tickets',
"View and/or update details of the person's record": 'Ver y/o actualizar detalles del registro de la persona',
'View and/or update their details': 'View and/or update their details',
'View or update the status of a hospital.': 'Ver o actualizar el estado de un hospital.',
'View pending requests and pledge support.': 'Ver las solicitudes en curso y apoyar el compromiso.',
'View the hospitals on a map.': 'View the hospitals on a map.',
'View/Edit Person Details': 'Ver o modificar datos de la persona',
"View/Edit the Database directly (caution: doesn't respect the framework rules!)": 'Ver o modificar la base de datos directamente (advertencia: no respeta las normas marco!)',
'Village': 'Pueblo',
'Village Leader': 'Líder del Pueblo',
'Visible?': 'Visible?',
'Visual Recognition': 'Reconocimiento Visual',
'Volcanic Ash Cloud': 'Volcanic Ash Cloud',
'Volcanic Event': 'Eventos Volcánicos ',
'Volume - Fluids': 'Volumen - Líquidos',
'Volume - Solids': 'Volumen - Sólidos',
'Volume Capacity': 'Capacidad de Volumen',
'Volume/Dimensions': 'Volumen y Dimensiones',
'Voluntarios': 'Voluntarios ',
'Volunteer': 'Voluntario',
'Volunteer Data': 'Volunteer Data',
'Volunteer Details': 'Volunteer Details',
'Volunteer Management': 'Volunteer Management',
'Volunteer Project': 'Proyecto de Voluntariado',
'Volunteer Registration': 'Registro de Voluntarios',
'Volunteer Registrations': 'Registros de voluntariado',
'Volunteer Registry': 'Registro de Voluntarios',
'Volunteer Request': 'Volunteer Requerimiento',
'Volunteer Status': 'Voluntarios de estado',
'Volunteer added': 'Volunteer added',
'Volunteer deleted': 'Volunteer deleted',
'Volunteer details updated': 'Volunteer details updated',
'Volunteer registration added': 'El registro de voluntarios añadido',
'Volunteer registration deleted': 'El registro de voluntarios eliminado',
'Volunteer registration updated': 'El registro de voluntarios actualización',
'Volunteers': 'Voluntarios',
'Volunteers were notified!': 'Volunteers were notified!',
'Vote': 'Votación',
'Votes': 'Votos',
'W-LAN': 'W-LAN',
'WASH': 'WASH',
'WHIRF': 'WHIRF',
'WMS Browser Name': 'Nombre del Buscador WMS',
'WMS Browser URL': 'WMS Browser URL',
'Walking Only': 'Walking Only',
'Walking time to the health service': 'Walking time to the health service',
'Warehouse': 'Almacén',
'Warehouse Details': 'Warehouse Details',
'Warehouse Item': 'Warehouse Item',
'Warehouse Item Details': 'Warehouse Item Details',
'Warehouse Item added': 'Warehouse Item added',
'Warehouse Item deleted': 'Warehouse Item deleted',
'Warehouse Item updated': 'Warehouse Item updated',
'Warehouse Items': 'Warehouse Items',
'Warehouse Management': 'Gestión de almacenes',
'Warehouse added': 'Warehouse added',
'Warehouse deleted': 'Warehouse deleted',
'Warehouse updated': 'Warehouse updated',
'Warehouse/Sites Registry': 'Galería / Sitios de Registro',
'Warehouses': 'Bodegas',
'WatSan': 'WatSan',
'Water': 'Agua',
'Water Sanitation Hygiene': 'Water Sanitation Hygiene',
'Water gallon': 'Water gallon',
'Water storage containers available for HH': 'Water storage containers available for HH',
'Water storage containers sufficient per HH': 'Water storage containers sufficient per HH',
'Water supply': 'Water supply',
'Waterspout': 'Tromba marina',
'Way Bill(s)': 'Proyecto de Ley Way (s)',
"We have no active problem. That's great!": 'No tenemos ningún problema activo. Eso es genial!',
'We have tried': 'Hemos tratado de',
'Website': 'Sitio web',
'Wednesday': 'Wednesday',
'Weekly': 'Weekly',
'Weight': 'Peso',
'Weight (kg)': 'Peso (kg)',
'Welcome to the Sahana Eden Disaster Management System': 'Bienvenido al Sistema de Manejo de Desastres Sahana Eden',
'Welcome to the Sahana FOSS Disaster Management System': 'Bienvenido al Sistema de Gestión de Desastres Sahana FOSS',
'Welcome to the Sahana Portal at ': 'Bienvenido al Portal Sahana en ',
'Well-Known Text': 'Texto bien conocido',
'Were basic medical supplies available for health services prior to the disaster?': 'Were basic medical supplies available for health services prior to the disaster?',
'Were breast milk substitutes used prior to the disaster?': 'Were breast milk substitutes used prior to the disaster?',
'Were there cases of malnutrition in this area prior to the disaster?': 'Were there cases of malnutrition in this area prior to the disaster?',
'Were there health services functioning for the community prior to the disaster?': 'Were there health services functioning for the community prior to the disaster?',
'Were there reports or evidence of outbreaks of any micronutrient malnutrition disorders before the emergency?': '¿Existen informes o pruebas de los brotes de cualquier trastorno malnutrición de micronutrientes antes de la emergencia?',
'What are the factors affecting school attendance?': 'What are the factors affecting school attendance?',
"What are the people's normal ways to obtain food in this area?": "What are the people's normal ways to obtain food in this area?",
'What are your main sources of cash to restart your business?': 'What are your main sources of cash to restart your business?',
'What are your main sources of income now?': '¿Cuáles son sus principales fuentes de ingresos ahora?',
'What do you spend most of your income on now?': 'What do you spend most of your income on now?',
'What food stocks exist? (main dishes)': 'What food stocks exist? (main dishes)',
'What food stocks exist? (side dishes)': '¿Qué reservas de alimentos existen? (acompañamientos)',
'What is the estimated total number of people in all of these institutions?': '¿Cuál es el número total estimado de personas en todas esas instituciones?',
'What is your major source of clean water for daily use (ex: washing, cooking, bathing)?': '¿Cuál es tu mayor fuente de agua limpia de uso diario (ej: lavar, cocinar, bañarse)?',
'What is your major source of drinking water?': 'What is your major source of drinking water?',
"What should be done to reduce women and children's vulnerability to violence?": "What should be done to reduce women and children's vulnerability to violence?",
'What type of latrines are available in the village/IDP centre/Camp?': 'What type of latrines are available in the village/IDP centre/Camp?',
'What type of salvage material can be used from destroyed houses?': 'What type of salvage material can be used from destroyed houses?',
'What type of salvage material can be used from destroyed schools?': '¿Qué tipo de material recuperado de escuelas destruidas puede ser utilizado?',
'What types of health problems do children currently have?': 'What types of health problems do children currently have?',
'What types of health problems do people currently have?': 'Qué tipos de problemas de salud las personas tienen actualmente?',
'What types of health services are still functioning in the affected area?': 'What types of health services are still functioning in the affected area?',
'What types of household water storage containers are available?': '¿Qué tipos de envases domésticos de almacenamineto de agua estan disponibles?',
'What were your main sources of income before the disaster?': 'Cuales eran sus mayores fuentes de ingresos antes del desastre?',
'Wheat': 'Trigo',
'When a map is displayed that focuses on a collection of points, the map is zoomed to show just the region bounding the points. This value adds a small mount of distance outside the points. Without this, the outermost points would be on the bounding box, and might not be visible.': 'When a map is displayed that focuses on a collection of points, the map is zoomed to show just the region bounding the points. This value adds a small mount of distance outside the points. Without this, the outermost points would be on the bounding box, and might not be visible.',
'When a map is displayed that focuses on a collection of points, the map is zoomed to show just the region bounding the points. This value gives a minimum width and height in degrees for the region shown. Without this, a map showing a single point would not show any extent around that point. After the map is displayed, it can be zoomed as desired.': 'When a map is displayed that focuses on a collection of points, the map is zoomed to show just the region bounding the points. This value gives a minimum width and height in degrees for the region shown. Without this, a map showing a single point would not show any extent around that point. After the map is displayed, it can be zoomed as desired.',
"When syncing data with others, conflicts happen in cases when two (or more) parties want to sync information which both of them have modified, i.e. conflicting information. Sync module tries to resolve such conflicts automatically but in some cases it can't. In those cases, it is up to you to resolve those conflicts manually, click on the link on the right to go to this page.": "When syncing data with others, conflicts happen in cases when two (or more) parties want to sync information which both of them have modified, i.e. conflicting information. Sync module tries to resolve such conflicts automatically but in some cases it can't. In those cases, it is up to you to resolve those conflicts manually, click on the link on the right to go to this page.",
'Where are the alternative places for studying?': 'Where are the alternative places for studying?',
'Where are the separated children originally from?': '¿De dónde son originalmente los niños que fueron separados?',
'Where do the majority of people defecate?': 'Where do the majority of people defecate?',
'Where have the children been sent?': '¿Donde han sido enviados los niños?',
'Where is solid waste disposed in the village/camp?': 'Where is solid waste disposed in the village/camp?',
'Whereabouts': 'Paradero',
'Whether this is a Sahana Eden, Sahana Agasti, Ushahidi or Other instance.': 'Si esta es Sahana Eden, Sahana Agasti, Ushahidi u otra instancia.',
'Which API function was called, it can only have two values: getdata refers to data export operation and putdata refers to data import operation.': '¿Qué función de la API se llamaba, sólo puede tener dos valores: GetData se refiere a la operación de exportación de datos y putdata refiere a los datos operación de importación.',
'Whiskers': 'Bigotes',
'Who is doing what and where': '¿Quién está haciendo qué y dónde',
'Who usually collects water for the family?': 'Who usually collects water for the family?',
'Width': 'Ancho',
'Wig': 'Peluca',
'Wikipedia': 'Wikipedia',
'Wild Fire': 'Wild Fire',
'Wind Chill': 'Wind Chill',
'Window frame': 'Marco de ventana',
'Winter Storm': 'Tormenta Invernal',
'Without mentioning any names or indicating anyone, do you know of any incidents of violence against women or girls occuring since the disaster?': 'Without mentioning any names or indicating anyone, do you know of any incidents of violence against women or girls occuring since the disaster?',
'Women of Child Bearing Age': 'Women de Child Bearing Age',
'Women participating in coping activities': 'Mujeres participando en actividades para afrontar ',
'Women who are Pregnant or in Labour': 'Women who are Pregnant or in Labour',
'Womens Focus Groups': 'Grupos Focales de Mujeres ',
'Wooden plank': 'Tablón de madera',
'Wooden poles': 'Postes de madera',
'Working hours end': 'Horario de trabajo final',
'Working hours start': 'Las horas de trabajo comenzará',
'Working or other to provide money/food': 'Working or other to provide money/food',
'Would you like to display the photos on the map?': '¿Desea mostrar las fotos en el mapa?',
'X-Ray': 'X-Ray',
'XMPP': 'XMPP',
'XSL Template Not Found: ': 'No se encuentra la plantilla XSL:',
'XSL Transformation Error: ': 'XSL Transformación de error:',
'XSLT Template Not Found: ': 'No se ha encontrado plantilla XSLT:',
'XSLT Transformation Error: ': 'XSLT Transformación de error:',
'Yemen': 'Yemen',
'Yes': 'Yes',
'You are attempting to delete your own account - are you sure you want to proceed?': 'You are attempting to delete your own account - are you sure you want to proceed?',
'You are currently reported missing!': 'You are currently reported missing!',
'You can add information about your organization here. It is the information which other servers can read about you.': 'Puede agregar información sobre su organización aquí. Es la información que otros servidores se puede leer acerca de usted.',
'You can change the configuration of synchronization module in the Settings section. This configuration includes your UUID (unique identification number), sync schedules, beacon service and so on. Click the following link to go to the Sync Settings page.': 'You can change the configuration of synchronization module in the Settings section. This configuration includes your UUID (unique identification number), sync schedules, beacon service and so on. Click the following link to go to the Sync Settings page.',
'You can click on the map below to select the Lat/Lon fields:': 'You can click on the map below to select the Lat/Lon fields:',
'You can click on the map to select the Lat/Lon fields. Longitude is West - East (sideways). Latitude is North-South (Up-Down). Latitude is zero on the equator and positive in the northern hemisphere and negative in the southern hemisphere. Longitude is zero on the prime meridian (Greenwich Mean Time) and is positive to the east, across Europe and Asia. Longitude is negative to the west, across the Atlantic and the Americas. This needs to be added in Decimal Degrees.': 'You can click on the map to select the Lat/Lon fields. Longitude is West - East (sideways). Latitude is North-South (Up-Down). Latitude is zero on the equator and positive in the northern hemisphere and negative in the southern hemisphere. Longitude is zero on the prime meridian (Greenwich Mean Time) and is positive to the east, across Europe and Asia. Longitude is negative to the west, across the Atlantic and the Americas. This needs to be added in Decimal Degrees.',
'You can select the Draw tool (': 'You can select the Draw tool (',
'You can set the modem settings for SMS here.': 'Puede establecer la configuración del módem para SMS aquí.',
'You can use the Conversion Tool to convert from either GPS coordinates or Degrees/Minutes/Seconds.': 'You can use the Conversion Tool to convert from either GPS coordinates or Degrees/Minutos/Seconds.',
"You have personalised settings, so changes made here won't be visible to you. To change your personalised settings, click ": 'Has personalizado la configuración, los cambios realizados aquí no seran visibles. Para cambiar configuración personalizada, click',
"You have unsaved changes. Click Cancel now, then 'Save' to save them. Click OK now to discard them.": "You have unsaved changes. Click Cancel now, then 'Save' to save them. Click OK now to discard them.",
"You haven't made any calculations": "You haven't made any calculations",
'You must be logged in to register volunteers.': 'Usted debe abrir una sesión para registrar voluntarios.',
'You must be logged in to report persons missing or found.': 'Debes iniciar sesión para reportar a personas desparaecidas o encontradas.',
'You must provide a series id to proceed.': 'Usted debe proporcionar una serie de identificación para proceder.',
'You should edit Twitter settings in models/000_config.py': 'You should edit Twitter settings in models/000_config.py',
'Your action is required. Please approve user %s asap: ': 'Se requiere su atención. Por favor apruebe al usuario %s tan pronto como sea posible: ',
'Your current ordered list ... (#TODO [String])': 'Su lista actual ordenó ... (# TODO String [])',
'Your current ordered list of solution items is shown below. You can change it by voting again.': 'Su lista ordenada de items de solución actual es mostrada abajo. Usted puede cambiarlo votando nuevamente.',
'Your post was added successfully.': 'Tu contribución ha sido agregada satisfactoriamente.',
'Your system has been assigned a unique identification (UUID), which other computers around you can use to identify you. To view your UUID, you may go to Synchronization -> Sync Settings. You can also see other settings on that page.': 'Your system has been assigned a unique identification (UUID), which other computers around you can use to identify you. To view your UUID, you may go to Synchronization -> Sync Settings. You can also see other settings on that page.',
'Your unique identification key. It is a 16 character word (aka string). Other servers in your organization will recognize you from this.': 'Su clave de identificación única. Es una palabra de 16 caracteres (string aka). Otros servidores en su organización le reconoce de esta.',
'ZIP/Postcode': 'ZIP / Código postal',
'Zambia': 'Zambia',
'Zeroconf Description': 'Descripción Zeroconf',
'Zeroconf Port': 'Zeroconf Puerto',
'Zimbabwe': 'Zimbabue',
'Zinc roof': 'Zinc roof',
'Zoom': 'Ampliar',
'Zoom In: click in the map or use the left mouse button and drag to create a rectangle': 'Acercar: haga clic en el mapa o utiliza el botón izquierdo del ratón y arrastre para crear un rectángulo',
'Zoom Levels': 'Los niveles de zoom',
'Zoom Out: click in the map or use the left mouse button and drag to create a rectangle': 'Alejar: haga clic en el mapa o utiliza el botón izquierdo del ratón y arrastre para crear un rectángulo',
'Zoom to maximum map extent': 'Zoom para asignar la máxima medida',
'accepted': 'aceptado',
'act': 'acto',
'active': 'activo',
'added': 'agregado',
'advanced': 'avanzado',
'all records': 'all records',
'allows a budget to be developed based on staff & equipment costs, including any admin overheads.': 'permite un presupuesto para ser desarrollada por el personal y los costos de equipo, incluidos los gastos generales de administración.',
'allows for creation and management of surveys to assess the damage following a natural disaster.': 'allows for creation and management of inspecciones to assess the damage following a natural disaster.',
'an individual/team to do in 1-2 days': 'para realizar por un individuo/equipo en 1-2 días',
'angular': 'angular',
'approved': 'aprovado',
'arched': 'arqueado',
'are mandatory and must be filled': 'son obligatorios y deben ser llenados',
'artificial': 'artificial',
'assigned': 'asignado',
'average': 'average',
'beginning': 'comienzo',
'belongs to': 'pertenece a',
'bitten short': 'mordida corta',
'black': 'negro',
'blond': 'rubio',
'blue': 'azul',
'box': 'caja',
'braided': 'trenzado',
'broad': 'general',
'brown': 'marrón',
'c/o Name': 'c / o Nombre',
'can be used to extract data from spreadsheets and put them into database tables.': 'can be used to extract data from spreadsheets and put them into database tables.',
'cancelled': 'cancelado',
'caucasoid': 'caucasoides',
'check all': 'revisar todo',
'checked': 'comprobado',
'circular': 'circular',
'clearly receding': 'claro retroceso',
'click for more details': 'click for more details',
'close-set': 'muy juntos',
'collateral event': 'collateral event',
'completed': 'terminado',
'concave': 'cóncavo',
'confirmed': 'confirmed',
'consider': 'considerar',
'constraint_id': 'constraint_id',
'convex': 'convexo',
'criminal intent': 'criminal intent',
'critical': 'crítico',
'cross-eyed': 'bizco',
'crud': 'porquería',
'curly': 'rizado',
'currently registered': 'currently registered',
'daily': 'diario',
'dark': 'oscuro',
'data uploaded': 'datos cargados',
'database': 'base de datos',
'database %s select': 'base de datos %s seleccionada',
'db': 'bd',
'deep': 'profundo',
'defective': 'defectuoso',
'deferred': 'diferido',
'delete': 'borrar',
'delete all checked': 'eliminar todos controlados',
'deleted': 'eliminado',
'denied': 'negado',
'description': 'descripción',
'design': 'diseño',
'diseased': 'enfermos',
'displaced': 'displaced',
'divorced': 'divorciado',
'done!': 'hecho!',
'dyed': 'teñido',
'edit': 'editar',
'editor': 'editor',
'embedded': 'incrustado',
'enclosed area': 'enclosed area',
'export as csv file': 'de exportación como archivo csv',
'fat': 'fat',
'feedback': 'realimentación',
'female': 'femenino',
'final report': 'final report',
'flatfooted': 'pies planos',
'flush latrine with septic tank': 'flush latrine with septic tank',
'follow-up assessment': 'follow-up assessment',
'forehead': 'frente',
'form data': 'los datos del formulario',
'from Twitter': 'from Twitter',
'from_id': 'de_ID',
'full': 'completo',
'getting': 'conseguir',
'green': 'verde',
'grey': 'gris',
'groove': 'ranura',
'heavy': 'pesado',
'here': 'aquí',
'high': 'alto',
'highly critical': 'muy crítico',
'horizontal': 'horizontal',
'hourly': 'hourly',
'households': 'hogares',
'how to deal with duplicate data found between your machine and that particular sahana instance.': 'cómo hacer frente a los datos duplicados encontrados entre su máquina y esa instancia Sahana particular.',
'human error': 'human error',
'identified': 'identified',
'ignore': 'pasar por alto',
'immediately': 'inmediatamente',
'implanted': 'implantados',
'improvement': 'mejora',
'in Deg Min Sec format': 'en formato Grados Minutos Segundos',
'in GPS format': 'in GPS format',
'inactive': 'inactivo',
'initial assessment': 'initial assessment',
'injured': 'injured',
'insert new': 'insertar nuevos',
'insert new %s': 'insertar nuevo %s',
'invalid': 'inválido',
'invalid request': 'petición inválida',
'invalid ticket': 'tiquete no es válido',
'is a central online repository where information on all the disaster victims and families, especially identified casualties, evacuees and displaced people can be stored. Information like name, age, contact number, identity card number, displaced location, and other details are captured. Picture and finger print details of the people can be uploaded to the system. People can also be captured by group for efficiency and convenience.': 'es un repositorio central en línea con información de todas las víctimas de catástrofes y las familias, especialmente identificadas las víctimas, los evacuados y las personas desplazadas pueden ser almacenados. La información como nombre, edad, número de teléfono, número de tarjeta de identidad, ubicación desplazados, y otros detalles son capturados. Foto y detalles de huellas digitales de las personas se pueden cargar en el sistema. Las personas también pueden ser capturados por el grupo de eficiencia y conveniencia.',
'is a central online repository where information on all the people can be stored.': 'es un repositorio central en línea donde puede obtenerse información sobre todas las personas que almacenan.',
'is an online bulletin board of missing and found people. It captures information about the people missing and found, as well as information of the person seeking them, increasing the chances of people finding each other. For example if two members of a family unit is looking for the head of the family, we can use this data at least to connect those two family members.': 'es un tablón de anuncios en línea de los desaparecidos y las personas encontradas. Captura información sobre las personas desaparecidas y encontradas, así como la información de la persona que las solicita, aumentando las posibilidades de que las personas se encuentren. Por ejemplo, si dos miembros de una unidad familiar está buscando la cabeza de la familia, podemos utilizar estos datos como mínimo para conectar los dos miembros de la familia.',
'joining': 'uniendo',
'keeps track of all Volunteers working in the disaster region. It captures not only the places where they are active, but also captures information on the range of services they are providing in each area.': 'realiza un seguimiento de todos los Voluntarios que trabajan en la región del desastre. Captura no sólo los lugares donde actúan, sino que también capta la información sobre la gama de servicios que están prestando en cada área.',
'keeps track of all incoming tickets allowing them to be categorised & routed to the appropriate place for actioning.': 'mantiene nota de todos los tiquetes entrantes permitiendoles ser categorizados & enrutados al lugar apropiado para la acción.',
'keeps track of all the relief organizations working in the disaster region. It captures not only the places where they are active, but also captures information on the range of projects they are providing in each area.': 'realiza un seguimiento de todas las organizaciones de socorro que trabajan en la región del desastre. Captura no sólo los lugares donde actúan, sino que también capta la información sobre la gama de proyectos que están prestando en cada área.',
'kilogram': 'kilogram',
'kit': 'kit',
'large': 'grande',
'latrines': 'latrines',
'left': 'izquierda',
'legend URL': 'leyenda URL',
'light': 'luz',
'liter': 'liter',
'login': 'iniciar una sesión',
'long': 'largo',
'long>12cm': 'largo>12 cm',
'low': 'bajo',
'lower': 'inferior',
'male': 'masculino',
'manicured': 'cuidados',
'manual': 'manual',
'married': 'casado',
'maxExtent': 'maxExtent',
'maxResolution': 'Resolución máxima',
'medium': 'medio',
'medium<12cm': 'media<12cm;',
'menu item': 'elemento de menú',
'message': 'mensaje',
'message_id': 'message_id',
'meter': 'meter',
'meter cubed': 'meter cubed',
'meters': 'metros',
'middle': 'medio',
'misshapen': 'deforme',
'mixed': 'mixto',
'module allows the site administrator to configure various options.': 'módulo permite configurar las diversas opciones al administrador del sitio web.',
'module assists the management of human resources.': 'módulo de ayuda a la gestión de los recursos humanos.',
'module helps monitoring the status of hospitals.': 'módulo de ayuda para supervisar el estado de los hospitales.',
'module provides a mechanism to collaboratively provide an overview of the developing disaster, using online mapping (GIS).': 'module provides a mechanism to collaboratively provide an overview of the developing disaster, using online mapping (GIS).',
'module provides a mechanism to collaboratively provide an overview of the developing disaster, using online mapping (GIS). You can add markers and pictures to pin point incidents on the map.': 'módulo ofrece un mecanismo de colaboración para proporcionar una visión general de la catástrofe en desarrollo, utilizando la cartografía en línea (SIG). Puede agregar marcadores y las imágenes a la clavija incidentes punto en el mapa.',
'mongoloid': 'mongoloide',
'more': 'mas',
'n/a': 'n/a',
'narrow': 'estrecho',
'natural': 'natural',
'natural hazard': 'peligro natural',
'negroid': 'negroide',
'never': 'nunca',
'new': 'nuevo',
'new record inserted': 'nuevo registro insertado',
'next 100 rows': 'próximos 100 filas',
'no': 'no',
'non-critical': 'no críticos',
'none': 'ninguno',
'normal': 'normal',
'not accessible - no cached version available!': 'no accesible - no hay una versión en caché disponible!',
'not accessible - using cached version from': 'no accesible - utilizando la versión en caché de',
'not applicable': 'no aplicable',
'not needed': 'not needed',
'not specified': 'no especificado',
'not writable - unable to cache GeoRSS layers!': 'no se puede escribir - no GeoRSS caché capas!',
'not writable - unable to cache KML layers!': 'no se puede escribir - incapaz de caché KML capas!',
'num Zoom Levels': 'num niveles de zoom',
'once': 'once',
'open defecation': 'open defecation',
'operational intent': 'intención operacional',
'or import from csv file': 'o importar de csv',
'other': 'otro',
'oval': 'oval',
'over': 'encima',
'over one hour': 'over one hour',
'pack of 10': 'pack of 10',
'painted': 'pintado',
'part': 'parte',
'pending': 'pending',
'people': 'personas',
'piece': 'piece',
'pit': 'pit',
'pit latrine': 'pit latrine',
'plucked': 'desplumados',
'pointed': 'señaló',
'pointheaded': 'puntuda',
'postponed': 'aplazado',
'preliminary template or draft, not actionable in its current form': 'preliminary template or draft, not actionable in its current form',
'previous 100 rows': '100 filas anteriores',
'primary incident': 'incidente primario',
'problem connecting to twitter.com - please refresh': 'problem connecting to twitter.com - please refresh',
'pronounced': 'pronunciado',
'protruding': 'saliente',
'pyramidal': 'piramidal',
'pyserial module not available within the running Python - this needs installing for SMS!': 'módulo pyserial no se dispone en el funcionamiento Python - esto debe instalar para SMS!',
'quadrangular': 'cuadrangular',
'receding': 'retroceso',
'record does not exist': 'no hay ningún registro',
'record id': 'identificación de registro',
'records deleted': 'registros eliminados',
'rectangular': 'rectangular',
'red': 'rojo',
'refresh': 'refrescar',
'reported': 'reported',
'reports successfully imported.': 'informes importado correctamente.',
'retired': 'jubilado',
'retry': 'reintento',
'review': 'revisión',
'right': 'derecha',
'river': 'river',
'round': 'ronda',
'sack 20kg': 'sack 20kg',
'sack 50kg': 'sack 50kg',
'secondary effect': 'secondary effect',
'see comment': 'ver comentarios',
'selected': 'seleccionado',
'separated': 'separados',
'separated from family': 'separado de la familia',
'shallow': 'poco profundo',
'shaved': 'afeitado',
'shift_end': 'shift_end',
'shift_start': 'shift_start',
'short': 'corto',
'short<6cm': 'corto<6cm',
'sides': 'lados',
'sign-up now': 'sign-up now',
'simple': 'simple',
'single': 'solo',
'slender': 'esbelto',
'slight': 'leve',
'slightly receding': 'retrocediendo ligeramente',
'slim': 'slim',
'small': 'pequeño',
'squint-eyed': 'bizco',
'state': 'estado',
'straight': 'recto',
'streaked': 'rayado',
'suffered financial losses': 'sufrió pérdidas financieras',
'supports nurses in the field to assess the situation, report on their activities and keep oversight.': 'apoya las enfermeras sobre el terreno para evaluar la situación, informar sobre sus actividades y mantener la supervisión.',
'table': 'tabla',
'table_name': 'table_name',
'tall': 'alto',
'tattooed': 'tatuado',
'technical failure': 'technical failure',
'thick': 'espesor',
'thin': 'delgado',
'this': 'esta',
'time': 'tiempo',
'times and it is still not working. We give in. Sorry.': 'veces y sigue sin funcionar. Nos rendimos. Lo sentimos.',
'to access the system': 'en el sistema',
'to reset your password': 'to reset your password',
'to verify your email': 'para verificar tu correo electrónico',
'to_id': 'to_id',
'ton': 'ton',
'tonsure': 'tonsura',
'total': 'total',
'turned down': 'rechazado',
'turned up': 'se presentó',
'turning grey': 'se vuelve gris',
'tweepy module not available within the running Python - this needs installing for non-Tropo Twitter support!': 'tweepy module not available within the running Python - this needs installing for non-Tropo Twitter support!',
'unable to parse csv file': 'no se puede analizar el archivo csv',
'unapproved': 'no aprobados',
'uncheck all': 'desactive todas las',
'unidentified': 'no identificado',
'uninhabitable = foundation and structure destroyed': 'uninhabitable = foundation and structure destroyed',
'unknown': 'desconocido',
'unspecified': 'sin especificar',
'updated': 'actualizado',
'updates only': 'updates only',
'upper': 'superior',
'upper+lower': '+ Superior inferior',
'urgent': 'urgente',
'vertical': 'vertical',
'view': 'vista',
'vm_action': 'acción_vm',
'wavy': 'ondulado',
'weekly': 'weekly',
'white': 'blanco',
'wide': 'ancho',
'wider area, longer term, usually contain multiple Activities': 'area ámplia, larga duración, usualmente contiene multiples actividades',
'widowed': 'viudo',
'window': 'window',
'windows broken, cracks in walls, roof slightly damaged': 'ventantas rotas, paredes con grietas, techo un poco dañado',
'wish': 'deseo',
'within human habitat': 'en el hábitat humano',
'xlwt module not available within the running Python - this needs installing for XLS output!': 'xlwt no disponibles en el módulo de ejecución de Python - esto debe instalar para la salida XLS!',
'xlwt module not available within the running Python - this needs installing to do XLS Reporting!': 'xlwt no disponibles en el módulo de ejecución de Python - esto tiene que ver la instalación de Reporting XLS!',
'yes': 'sí',
}
| 63.414576 | 1,127 | 0.741261 |
c4953b074826ea3b2d92d4e48c9d4ccb1547c9a1 | 4,662 | py | Python | timesketch/lib/analyzers/yetiindicators.py | rgayon/timesketch | 5b055a580652b85c594b7383ef3c7747ba956b4f | [
"Apache-2.0"
] | 4 | 2018-11-01T16:13:31.000Z | 2022-03-18T12:09:25.000Z | timesketch/lib/analyzers/yetiindicators.py | rgayon/timesketch | 5b055a580652b85c594b7383ef3c7747ba956b4f | [
"Apache-2.0"
] | null | null | null | timesketch/lib/analyzers/yetiindicators.py | rgayon/timesketch | 5b055a580652b85c594b7383ef3c7747ba956b4f | [
"Apache-2.0"
] | 1 | 2021-11-16T00:01:18.000Z | 2021-11-16T00:01:18.000Z | """Index analyzer plugin for Yeti indicators."""
from __future__ import unicode_literals
import re
from flask import current_app
import requests
from timesketch.lib.analyzers import interface
from timesketch.lib.analyzers import manager
from timesketch.lib import emojis
class YetiIndicators(interface.BaseSketchAnalyzer):
"""Index analyzer for Yeti threat intel indicators."""
NAME = 'yetiindicators'
DEPENDENCIES = frozenset(['domain'])
def __init__(self, index_name, sketch_id):
"""Initialize the Index Analyzer.
Args:
index_name: Elasticsearch index name
"""
super(YetiIndicators, self).__init__(index_name, sketch_id)
self.intel = {}
self.yeti_api_root = current_app.config.get('YETI_API_ROOT')
self.yeti_api_key = current_app.config.get('YETI_API_KEY')
def get_neighbors(self, entity_id):
"""Retrieves a list of neighbors associated to a given entity.
Args:
entity_id (str): STIX ID of the entity to get associated inticators
from. (typically an Intrusion Set or an Incident)
Returns:
A list of JSON objects describing a Yeti object.
"""
results = requests.post(
self.yeti_api_root + '/entities/{0:s}/neighbors/'.format(entity_id),
headers={'X-Yeti-API': self.yeti_api_key},
)
if results.status_code != 200:
return []
neighbors = []
for neighbor in results.json().get('vertices', {}).values():
neighbors.append(neighbor)
return neighbors
def get_indicators(self, indicator_type):
"""Populates the intel attribute with entities from Yeti."""
results = requests.post(
self.yeti_api_root + '/indicators/filter/',
json={'name': '', 'type': indicator_type},
headers={'X-Yeti-API': self.yeti_api_key},
)
if results.status_code != 200:
return
self.intel = {item['id']: item for item in results.json()}
for item in results.json():
item['compiled_regexp'] = re.compile(item['pattern'])
self.intel[item['id']] = item
def mark_event(self, indicator, event, neighbors):
"""Anotate an event with data from indicators and neighbors.
Tags with skull emoji, adds a comment to the event.
"""
event.add_emojis([emojis.get_emoji('SKULL')])
tags = []
for n in neighbors:
slug = re.sub(r'[^a-z0-9]', '-', n['name'].lower())
slug = re.sub(r'-+', '-', slug)
tags.append(slug)
event.add_tags(tags)
event.commit()
msg = 'Indicator match: "{0:s}" ({1:s})\n'.format(
indicator['name'], indicator['id'])
msg += 'Related entities: {0!s}'.format(
[n['name'] for n in neighbors])
event.add_comment(msg)
event.commit()
def run(self):
"""Entry point for the analyzer.
Returns:
String with summary of the analyzer result
"""
if not self.yeti_api_root or not self.yeti_api_key:
return 'No Yeti configuration settings found, aborting.'
self.get_indicators('x-regex')
entities_found = set()
events = self.event_stream(query_string='*',
return_fields=['message'])
total_matches = 0
matching_indicators = set()
for event in events:
for _id, indicator in self.intel.items():
regexp = indicator['compiled_regexp']
if regexp.search(event.source['message']):
total_matches += 1
matching_indicators.add(indicator['id'])
neighbors = self.get_neighbors(indicator['id'])
self.mark_event(indicator, event, neighbors)
for n in neighbors:
entities_found.add('{0:s}:{1:s}'.format(
n['name'], n['type']
))
if not total_matches:
return 'No indicators were found in the timeline.'
for entity in entities_found:
name, _type = entity.split(':')
self.sketch.add_view(
'Indicator matches for {0:s} ({1:s})'.format(name, _type),
self.NAME,
query_string='tag:"{0:s}"'.format(name))
return '{0:d} events matched {1:d} indicators. [{2:s}]'.format(
total_matches, len(matching_indicators), ', '.join(entities_found))
manager.AnalysisManager.register_analyzer(YetiIndicators)
| 35.318182 | 80 | 0.578936 |
d50ecfa3738fb2256bd18f22e9893a672dccd33c | 14,988 | py | Python | mcpAlgorithm.py | octeufer/Annotate_Optimize | 32d9cecc0159882d3f962990aba07168c4a023f5 | [
"Apache-2.0"
] | null | null | null | mcpAlgorithm.py | octeufer/Annotate_Optimize | 32d9cecc0159882d3f962990aba07168c4a023f5 | [
"Apache-2.0"
] | null | null | null | mcpAlgorithm.py | octeufer/Annotate_Optimize | 32d9cecc0159882d3f962990aba07168c4a023f5 | [
"Apache-2.0"
] | null | null | null | '''
Author:Octeufer
2014/1/10
'''
import random
import hashlib
import numpy as np
class greedymcp:
def __init__(self):
pass
def FindMaxClique(self,graph,maxTime,targetCliqueSize):
clique = list()
time = 0
timeBestClique = 0
timeRestart = 0
nodeToAdd = -1
nodeToDrop = -1
randomNode = random.randint(0,graph.shape[0]-1)
print "Adding node %d" %randomNode
clique.append(randomNode)
bestClique = list()
bestSize = len(bestClique)
timeBestClique = time
possibleAdd = self.MakePossibleAdd(graph,clique)
oneMissing = self.MakeOneMissing(graph,clique)
while time < maxTime and bestSize < targetCliqueSize:
time = time + 1
cliqueChanged = False
if len(possibleAdd) > 0:
nodeToAdd = self.GetNodeToAdd(graph,possibleAdd)
print "Adding node %d" %nodeToAdd
clique.append(nodeToAdd)
clique.sort()
cliqueChanged = True
if len(clique) > bestSize:
bestSize = len(clique)
bestClique = list()
bestClique.extend(clique)
timeBestClique = time
if cliqueChanged == False:
if len(clique) > 0:
nodeToDrop = self.GetNodeToDrop(graph,clique,oneMissing)
print "Dropping node %d" %nodeToDrop
clique.remove(nodeToDrop)
clique.sort()
cliqueChanged = True
restart = 2 * bestSize
if (time - timeBestClique) > restart and (time - timeRestart) > restart:
print "Restart"
timeRestart = time
seedNode = random.randint(0,graph.shape[0]-1)
clique = list()
possibleAdd = self.MakePossibleAdd(graph,clique)
oneMissing = self.MakeOneMissing(graph,clique)
return bestClique
def MakePossibleAdd(self,graph,clique):
def FormsALargerClique(graph,clique,node):
for i in range(len(clique)):
if graph[clique[i],node] == 0:
return False
return True
result = list()
result = [i for i in range(len(graph)) if FormsALargerClique(graph,clique,i) == True]
return result
def GetNodeToAdd(self,graph,possibleAdd):
l = len(possibleAdd)
if l==1:
return possibleAdd[0]
maxDegree = 0
for i in range(l):
currNode = possibleAdd[i]
degreeOfCurrentNode = 0
for j in range(l):
otherNode = possibleAdd[j]
if graph[currNode,otherNode] == 1:
degreeOfCurrentNode = degreeOfCurrentNode + 1
if degreeOfCurrentNode > maxDegree:
maxDegree = degreeOfCurrentNode
candidates = list()
for i in range(l):
currNode = possibleAdd[i]
degreeOfCurrentNode = 0
for j in range(l):
otherNode = possibleAdd[j]
if graph[currNode,otherNode] == 1:
degreeOfCurrentNode = degreeOfCurrentNode + 1
if degreeOfCurrentNode == maxDegree:
candidates.append(currNode)
return candidates[random.randint(0,len(candidates)-1)]
def GetNodeToDrop(self,graph,clique,oneMissing):
lc = len(clique)
lm = len(oneMissing)
if lc == 1:
return clique[0]
maxCount = 0
for i in range(lc):
currCliqueNode = clique[i]
countNotAdjacent = 0
for j in range(lm):
currOneMissingNode = oneMissing[j]
if graph[currCliqueNode,currOneMissingNode] == 0:
countNotAdjacent = countNotAdjacent + 1
if countNotAdjacent > maxCount:
maxCount = countNotAdjacent
candidates = list()
for i in range(lc):
currCliqueNode = clique[i]
countNotAdjacent = 0
for j in range(lm):
currOneMissingNode = oneMissing[j]
if graph[currCliqueNode,currOneMissingNode] == 0:
countNotAdjacent = countNotAdjacent + 1
if countNotAdjacent == maxCount:
candidates.append(currCliqueNode)
return candidates[random.randint(0,len(candidates)-1)]
def MakeOneMissing(self,graph,clique):
count = 0
result = list()
for i in range(graph.shape[0]):
if (graph[i]>0).sum() < len(clique):continue
if i in clique:continue
for j in range(len(clique)):
if graph[i,clique[j]] == 1:
count = count + 1
if count == len(clique) - 1:
result.append(i)
return result
class tabumcp:
def __init__(self,graph):
self.graph = graph
self.clique = list()
self.time = 0
self.timeBestClique = 0
self.timeRestart = 0
self.prohibitPeriod = 1
self.timeProhibitChanged = 0
self.lastMoved = np.zeros((graph.shape[0]),np.int32)
self.history = {}
def FindMaxClique(self,maxTime):
nodeToAdd = -1
nodeToDrop = -1
iter = 0
seedNode = -1
temp = [self.lastMoved[i] for i in range(len(self.lastMoved)) if self.lastMoved[i] == 0]
if len(temp)>0:
seedNode = temp[random.randint(0,len(temp)-1)]
else:
seedNode = random.randint(0,self.graph.shape[0]-1)
#randomNode = random.randint(0,graph.shape[0]-1)
#print "Adding node %d" %randomNode
self.clique = list()
self.clique.append(seedNode)
bestClique = list()
bestSize = len(bestClique)
self.timeBestClique = self.time
possibleAdd = self.MakePossibleAdd(self.graph,self.clique)
oneMissing = self.MakeOneMissing(self.graph,self.clique)
while iter < maxTime and bestSize < self.graph.shape[0]:
iter = iter + 1
self.time = self.time + 1
cliqueChanged = False
if len(possibleAdd) > 0:
#nodeToAdd = self.GetNodeToAdd(graph,possibleAdd)
allowedAdd = self.SelectAllowedNodes(possibleAdd,self.time,self.prohibitPeriod,self.lastMoved)
if len(allowedAdd)>0:
nodeToAdd = self.GetNodeToAdd(self.graph,allowedAdd,possibleAdd)
#print "Adding node %d" %nodeToAdd
self.clique.append(nodeToAdd)
self.lastMoved[nodeToAdd] = self.time
self.clique.sort()
cliqueChanged = True
if len(self.clique) > bestSize:
bestSize = len(self.clique)
bestClique = list()
bestClique.extend(self.clique)
self.timeBestClique = self.time
if cliqueChanged == False:
if len(self.clique) > 0:
#nodeToDrop = self.GetNodeToDrop(graph,clique,oneMissing)
allowedInClique = self.SelectAllowedNodes(self.clique,self.time,self.prohibitPeriod,self.lastMoved)
if len(allowedInClique)>0:
nodeToDrop = self.GetNodeToDrop(self.graph,allowedInClique,oneMissing)
#print "Dropping node %d" %nodeToDrop
self.clique.remove(nodeToDrop)
self.lastMoved[nodeToDrop] = self.time
self.clique.sort()
cliqueChanged = True
if cliqueChanged == False:
if len(self.clique) > 0:
nodeToDrop = self.clique[random.randint(0,len(self.clique)-1)]
self.clique.remove(nodeToDrop)
self.lastMoved[nodeToDrop] = self.time
self.clique.sort()
cliqueChanged = True
restart = 2 * bestSize
if (self.time - self.timeBestClique) > restart and (self.time - self.timeRestart) > restart:
#print "Restarting with prohibit period %d" %self.prohibitPeriod
self.timeRestart = self.time
self.prohibitPeriod = 1
self.timeProhibitChanged = self.time
self.history = {}
self.seedNode = -1
temp = [self.lastMoved[i] for i in range(len(self.lastMoved)) if self.lastMoved[i] == 0]
if len(temp)>0:
seedNode = temp[random.randint(0,len(temp)-1)]
else:
seedNode = random.randint(0,self.graph.shape[0]-1)
#seedNode = random.randint(0,graph.shape[0]-1)
self.clique = list()
self.clique.append(seedNode)
possibleAdd = self.MakePossibleAdd(self.graph,self.clique)
oneMissing = self.MakeOneMissing(self.graph,self.clique)
self.prohibitPeriod,self.timeProhibitChanged = self.UpdateProhibitPeriod(self.graph,self.clique,bestSize,self.history,self.time,self.prohibitPeriod,self.timeProhibitChanged)
return bestClique
def MakePossibleAdd(self,graph,clique):
def FormsALargerClique(graph,clique,node):
for i in range(len(clique)):
if graph[clique[i],node] == 0:
return False
return True
result = list()
result = [i for i in range(len(graph)) if FormsALargerClique(graph,clique,i) == True]
return result
def GetNodeToAdd(self,graph,allowedAdd,possibleAdd):
l = len(allowedAdd)
lp = len(possibleAdd)
if l==1:
return allowedAdd[0]
maxDegree = 0
for i in range(l):
currNode = allowedAdd[i]
degreeOfCurrentNode = 0
for j in range(lp):
otherNode = possibleAdd[j]
if graph[currNode,otherNode] == 1:
degreeOfCurrentNode = degreeOfCurrentNode + 1
if degreeOfCurrentNode > maxDegree:
maxDegree = degreeOfCurrentNode
candidates = list()
for i in range(l):
currNode = allowedAdd[i]
degreeOfCurrentNode = 0
for j in range(lp):
otherNode = possibleAdd[j]
if graph[currNode,otherNode] == 1:
degreeOfCurrentNode = degreeOfCurrentNode + 1
if degreeOfCurrentNode == maxDegree:
candidates.append(currNode)
return candidates[random.randint(0,len(candidates)-1)]
def GetNodeToDrop(self,graph,allowedInClique,oneMissing):
lc = len(allowedInClique)
lm = len(oneMissing)
if lc == 1:
return allowedInClique[0]
maxCount = 0
for i in range(lc):
currCliqueNode = allowedInClique[i]
countNotAdjacent = 0
for j in range(lm):
currOneMissingNode = oneMissing[j]
if graph[currCliqueNode,currOneMissingNode] == 0:
countNotAdjacent = countNotAdjacent + 1
if countNotAdjacent > maxCount:
maxCount = countNotAdjacent
candidates = list()
for i in range(lc):
currCliqueNode = allowedInClique[i]
countNotAdjacent = 0
for j in range(lm):
currOneMissingNode = oneMissing[j]
if graph[currCliqueNode,currOneMissingNode] == 0:
countNotAdjacent = countNotAdjacent + 1
if countNotAdjacent == maxCount:
candidates.append(currCliqueNode)
return candidates[random.randint(0,len(candidates)-1)]
def MakeOneMissing(self,graph,clique):
count = 0
result = list()
for i in range(graph.shape[0]):
if (graph[i]>0).sum() < len(clique):continue
if i in clique:continue
for j in range(len(clique)):
if graph[i,clique[j]] == 1:
count = count + 1
if count == len(clique) - 1:
result.append(i)
return result
def SelectAllowedNodes(self,listOfNodes,time,prohibitPeriod,lastMoved):
result = list()
if len(listOfNodes)==0:
return result
for i in range(len(listOfNodes)):
currNode = listOfNodes[i]
if time > lastMoved[currNode] + prohibitPeriod:
result.append(currNode)
return result
def UpdateProhibitPeriod(self,graph,clique,bestSize,history,time,prohibitPeriod,timeProhibitChanged):
result = prohibitPeriod
cliqueInfo = CliqueInfo(clique,time)
if history.has_key(cliqueInfo.GetHashCode()):
ci = history[cliqueInfo.GetHashCode()]
intervalSinceLastVisit = time - ci.lastSeen
ci.lastSeen = time
if intervalSinceLastVisit < 2 * (graph.shape[0] - 1):
timeProhibitChanged = time
if prohibitPeriod + 1 < 2 * bestSize:
return prohibitPeriod + 1,timeProhibitChanged
else:
return 2 * bestSize,timeProhibitChanged
else:
history[cliqueInfo.GetHashCode()] = cliqueInfo
if time - timeProhibitChanged > 10 * bestSize:
timeProhibitChanged = time
if prohibitPeriod - 1 > 1:
return prohibitPeriod - 1,timeProhibitChanged
else:
return 1,timeProhibitChanged
else:
return result,timeProhibitChanged
class CliqueInfo:
def __init__(self,clique,lastSeen):
self.clique = list()
self.clique.extend(clique)
self.lastSeen = lastSeen
def GetHashCode(self):
strc = ""
for i in range(len(self.clique)):
if i==len(self.clique) - 1:
strc = strc + str(self.clique[i])
else:
strc = strc + str(self.clique[i]) + " "
return hashlib.sha1(strc).hexdigest()
| 39.03125 | 186 | 0.522618 |
a6d3862b6c91c441db2cc4780ee37ed4552fb11e | 1,702 | py | Python | nexus_constructor/geometry/disk_chopper/chopper_details.py | trnielsen/nexus-constructor | 65efb6eedca30250b75f142dd29a46bc909958df | [
"BSD-2-Clause"
] | null | null | null | nexus_constructor/geometry/disk_chopper/chopper_details.py | trnielsen/nexus-constructor | 65efb6eedca30250b75f142dd29a46bc909958df | [
"BSD-2-Clause"
] | null | null | null | nexus_constructor/geometry/disk_chopper/chopper_details.py | trnielsen/nexus-constructor | 65efb6eedca30250b75f142dd29a46bc909958df | [
"BSD-2-Clause"
] | null | null | null | import numpy as np
from nexus_constructor.unit_utils import (
calculate_unit_conversion_factor,
RADIANS,
METRES,
)
TWO_PI = np.pi * 2
class ChopperDetails:
def __init__(
self,
slits: int,
slit_edges: np.ndarray,
radius: float,
slit_height: float,
angle_units: str,
slit_height_units: str,
radius_units: str,
):
"""
Class for storing the chopper input given by the user.
:param slits: The number of slits in the disk chopper.
:param slit_edges: The list of slit edge angles in the disk chopper.
:param radius: The radius of the slit chopper.
:param slit_height: The slit height.
:param angle_units: The units of the slit edges.
:param slit_height_units: The units for the slit length.
:param radius_units: The units for the radius.
"""
self._slits = slits
self._radius = radius
self._slit_height = slit_height
# Convert the angles to radians and make sure they are all less then two pi
slit_edges_factor = calculate_unit_conversion_factor(angle_units, RADIANS)
self._slit_edges = [(edge * slit_edges_factor) % TWO_PI for edge in slit_edges]
self._slit_height *= calculate_unit_conversion_factor(slit_height_units, METRES)
self._radius *= calculate_unit_conversion_factor(radius_units, METRES)
@property
def slits(self):
return self._slits
@property
def slit_edges(self):
return self._slit_edges
@property
def radius(self):
return self._radius
@property
def slit_height(self):
return self._slit_height
| 28.847458 | 88 | 0.656287 |
df8f11ed59627f70424c789949c107046871e1d0 | 748 | py | Python | cboard/cboard/urls.py | darjeeling/cboard | 2050be5878f3a8e8da4fc6b1a5e196d7a9715123 | [
"Apache-2.0"
] | null | null | null | cboard/cboard/urls.py | darjeeling/cboard | 2050be5878f3a8e8da4fc6b1a5e196d7a9715123 | [
"Apache-2.0"
] | null | null | null | cboard/cboard/urls.py | darjeeling/cboard | 2050be5878f3a8e8da4fc6b1a5e196d7a9715123 | [
"Apache-2.0"
] | null | null | null | """cboard URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path
urlpatterns = [
path('admin/', admin.site.urls),
]
| 34 | 77 | 0.708556 |
c8a5277f95b8bd079d73a984bfce40fdacec517b | 1,772 | py | Python | aliyun-python-sdk-ccc/aliyunsdkccc/request/v20200701/AddPersonalNumbersToUserRequest.py | yndu13/aliyun-openapi-python-sdk | 12ace4fb39fe2fb0e3927a4b1b43ee4872da43f5 | [
"Apache-2.0"
] | null | null | null | aliyun-python-sdk-ccc/aliyunsdkccc/request/v20200701/AddPersonalNumbersToUserRequest.py | yndu13/aliyun-openapi-python-sdk | 12ace4fb39fe2fb0e3927a4b1b43ee4872da43f5 | [
"Apache-2.0"
] | null | null | null | aliyun-python-sdk-ccc/aliyunsdkccc/request/v20200701/AddPersonalNumbersToUserRequest.py | yndu13/aliyun-openapi-python-sdk | 12ace4fb39fe2fb0e3927a4b1b43ee4872da43f5 | [
"Apache-2.0"
] | null | null | null | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkccc.endpoint import endpoint_data
class AddPersonalNumbersToUserRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'CCC', '2020-07-01', 'AddPersonalNumbersToUser')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_UserId(self):
return self.get_query_params().get('UserId')
def set_UserId(self,UserId):
self.add_query_param('UserId',UserId)
def get_NumberList(self):
return self.get_query_params().get('NumberList')
def set_NumberList(self,NumberList):
self.add_query_param('NumberList',NumberList)
def get_InstanceId(self):
return self.get_query_params().get('InstanceId')
def set_InstanceId(self,InstanceId):
self.add_query_param('InstanceId',InstanceId) | 35.44 | 77 | 0.765801 |
58ac32faa90819da860b11701e94019fa979a8c3 | 4,080 | py | Python | code/data_processing.py | porcelluscavia/project-callihan-tureski | e0dcd2c4972cf12c03bd4dad7aef9cd0bb6f8587 | [
"MIT"
] | 1 | 2020-03-07T11:16:13.000Z | 2020-03-07T11:16:13.000Z | code/data_processing.py | porcelluscavia/project-callihan-tureski | e0dcd2c4972cf12c03bd4dad7aef9cd0bb6f8587 | [
"MIT"
] | null | null | null | code/data_processing.py | porcelluscavia/project-callihan-tureski | e0dcd2c4972cf12c03bd4dad7aef9cd0bb6f8587 | [
"MIT"
] | 1 | 2020-06-02T09:57:41.000Z | 2020-06-02T09:57:41.000Z | from nltk.corpus import stopwords
from nltk.stem.wordnet import WordNetLemmatizer
import string
from gensim import corpora
import os
import numpy as np
from chinese_processing import ChineseStopwords
import logging
def load_texts_from_directory(path_to_documents, subset=None):
"""
Loads all .txt files from directory into a list.
:param subset:
:param path_to_documents:
:return list of documents:
"""
files = sorted(os.listdir(path_to_documents))
if subset is not None:
files = files[subset[0]:subset[1]]
docs = []
keywords = []
filenames = []
for f in files:
filenames.append(f)
doc = ''
with open(os.path.join(path_to_documents, f), 'r', encoding='utf-8') as file:
for i, l in enumerate(file.readlines()):
if i is 0:
keywords.append(l.replace('%%%', '').strip().split('|')[:-1])
else:
doc += l.strip() + ' '
file.close()
docs.append(doc)
return docs, keywords, filenames
def docs2matrix(docs):
"""
Transforms a list of documents into a bag of words matrix suitable for the LDA model.
:param docs:
:return bag of words matrix:
"""
# [token for doc in docs for token in doc]
term_dictionary = corpora.Dictionary(docs)
doc_matrix = [term_dictionary.doc2bow(doc) for doc in docs]
logging.info("Len of raw corpus: %d | Len of matrix: %d" % (len(docs), len(doc_matrix)))
return doc_matrix, term_dictionary
class Processing:
def __init__(
self,
stopword_lang='english'
):
self.lang = stopword_lang
if self.lang == 'chinese':
self.stopwords = set(ChineseStopwords().chinese_stopwords)
else:
self.stopwords = set(stopwords.words(self.lang))
self.punctuation = set(string.punctuation)
self.lemmatize = WordNetLemmatizer()
def cleaning(self, document):
"""
Cleans document of stopwords and punctuation. Stopwords are specified at initialization of Processing.
Lemmatizes for all languages except Chinese.
:param document:
:return tokenized and cleaned document:
"""
remove_punct = ''.join(i for i in document.lower() if i not in self.punctuation)
tokenized = [i for i in remove_punct.split() if i not in self.stopwords]
if self.lang is not 'chinese':
# Lemmatizes if not chinese
tokenized = [self.lemmatize.lemmatize(i) for i in tokenized]
return tokenized
def clean_docs(self, docs):
"""
Cleans all documents in a list
:param docs:
:return list of cleaned documents:
"""
cleaned = [self.cleaning(doc) for doc in docs]
print(cleaned[0])
return cleaned
def cluster_data(self, doc_matrix, ldamodel, to_csv=False, keywords=None, filenames=None, num_categories=-1):
"""
Gets cluster data. Writes to CSV if to_csv=True
:param doc_matrix:
:param ldamodel:
:param to_csv:
:param keywords:
:param filenames:
:param num_categories:
:return:
"""
test_clusters = []
for doc in doc_matrix:
scores = ldamodel[doc]
# TODO check argmax
test_clusters.append(scores[np.argmax([s[1] for s in scores])][0])
if to_csv and keywords is not None and filenames is not None and num_categories is not -1:
filename = '%s_%d_categories.csv' % (self.lang, num_categories)
with open(filename, 'w', encoding='utf-8') as f:
f.write('file,language,num_categories,cluster,keywords')
for i, fn in enumerate(filenames):
f.write('\n%s,%s,%d,%d,%s' % (
fn,
self.lang,
num_categories,
test_clusters[i],
'|'.join(keywords[i])
))
return test_clusters
| 34.871795 | 113 | 0.588725 |
b970bd910601b707fe4f8f5f4bc6bb2dae2ef5de | 667 | py | Python | libs/template_exception/template_exception/rbac.py | 1995chen/python-common-libs | dcf36376ae244426fb8da5b76b96fe8e0c3911af | [
"MIT"
] | null | null | null | libs/template_exception/template_exception/rbac.py | 1995chen/python-common-libs | dcf36376ae244426fb8da5b76b96fe8e0c3911af | [
"MIT"
] | null | null | null | libs/template_exception/template_exception/rbac.py | 1995chen/python-common-libs | dcf36376ae244426fb8da5b76b96fe8e0c3911af | [
"MIT"
] | null | null | null | # -*- coding: UTF-8 -*-
from typing import List
from .base import SSOException
class AuthorizedFailException(SSOException):
"""
认证失败
"""
pass
class TokenInvalidException(SSOException):
"""
异常的Token
"""
pass
class SSOServerException(SSOException):
"""
SSO服务异常
"""
pass
class UserResourceNotFoundException(SSOException):
"""
用户不存在
"""
pass
class PermissionsDenyException(SSOException):
"""
权限不足
"""
def __init__(self, user_roles: List[str], require_roles: List[str]):
message: str = f"need {require_roles}, but provide {user_roles}"
super().__init__(message)
| 14.822222 | 72 | 0.629685 |
b59c4fd26930fce881c73300486a70cca060f879 | 678 | py | Python | resources/lib/core/zbextension.py | adamprice2/ZattooBox | aae55c23f429bd6c0924a35c0e120b08df8d78f6 | [
"BSD-2-Clause"
] | 37 | 2015-01-22T15:43:36.000Z | 2021-03-03T13:42:24.000Z | resources/lib/core/zbextension.py | adamprice2/ZattooBox | aae55c23f429bd6c0924a35c0e120b08df8d78f6 | [
"BSD-2-Clause"
] | 48 | 2015-01-04T15:23:07.000Z | 2022-02-15T02:28:18.000Z | resources/lib/core/zbextension.py | adamprice2/ZattooBox | aae55c23f429bd6c0924a35c0e120b08df8d78f6 | [
"BSD-2-Clause"
] | 24 | 2015-02-17T12:38:25.000Z | 2021-03-01T11:31:54.000Z | # coding=utf-8
##################################
# ZattooBox extensions
# Base Class
# (c) 2014-2020 Pascal Nançoz
##################################
import os
class ZBExtension(object):
ZapiSession = None
ZBProxy = None
ExtensionsPath = None
def __init__(self, zapiSession, zbProxy):
self.ZapiSession = zapiSession
self.ZBProxy = zbProxy
self.ExtensionsPath = os.path.join(zbProxy.SourcePath, 'resources/data/extensions/')
self.init()
def init(self):
raise NotImplementedError('Not implemented')
def get_items(self):
raise NotImplementedError('Not implemented')
def activate_item(self, title, args):
raise NotImplementedError('Not implemented')
| 20.545455 | 86 | 0.672566 |
05fa40efc53d2b7a0f6ffdaeea9b1b7f65a1938f | 741 | py | Python | vectors/cryptography_vectors/__about__.py | wdscxsj/cryptography | 94590a9aecc9e5ef6fc8eda52bae43643a4c44bd | [
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause"
] | 2 | 2015-10-08T21:28:42.000Z | 2020-08-15T10:03:49.000Z | vectors/cryptography_vectors/__about__.py | wdscxsj/cryptography | 94590a9aecc9e5ef6fc8eda52bae43643a4c44bd | [
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause"
] | 99 | 2021-02-13T23:35:41.000Z | 2022-03-31T03:09:24.000Z | vectors/cryptography_vectors/__about__.py | wdscxsj/cryptography | 94590a9aecc9e5ef6fc8eda52bae43643a4c44bd | [
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause"
] | 3 | 2016-04-11T20:22:38.000Z | 2018-09-20T20:39:54.000Z | # This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
__all__ = [
"__title__",
"__summary__",
"__uri__",
"__version__",
"__author__",
"__email__",
"__license__",
"__copyright__",
]
__title__ = "cryptography_vectors"
__summary__ = "Test vectors for the cryptography package."
__uri__ = "https://github.com/pyca/cryptography"
__version__ = "35.0.0.dev1"
__author__ = "The Python Cryptographic Authority and individual contributors"
__email__ = "cryptography-dev@python.org"
__license__ = "BSD or Apache License, Version 2.0"
__copyright__ = "Copyright 2013-2021 %s" % __author__
| 26.464286 | 79 | 0.723347 |
2553cf69fd467d6e33065ec1a0e96600b615ff3c | 1,310 | py | Python | python3.7/cookiecutter-aws-sam-eventBridge-python/{{cookiecutter.project_name}}/hello_world/app.py | tgpadua/aws-sam-cli-app-templates | 2feaee402ad7376e1ca0b2f9126831cd67171df5 | [
"Apache-2.0"
] | 1 | 2020-08-14T16:09:58.000Z | 2020-08-14T16:09:58.000Z | python3.7/cookiecutter-aws-sam-eventBridge-python/{{cookiecutter.project_name}}/hello_world/app.py | johnjdailey/aws-sam-cli-app-templates | 58d91ed5820072d8a60c02d0c5b6410f5ab8193b | [
"Apache-2.0"
] | 3 | 2021-06-02T00:50:23.000Z | 2021-06-22T13:00:57.000Z | python3.7/cookiecutter-aws-sam-eventBridge-python/{{cookiecutter.project_name}}/hello_world/app.py | johnjdailey/aws-sam-cli-app-templates | 58d91ed5820072d8a60c02d0c5b6410f5ab8193b | [
"Apache-2.0"
] | null | null | null | # import requests
from model.aws.ec2 import Marshaller
from model.aws.ec2 import AWSEvent
from model.aws.ec2.ec2_instance_state_change_notification import EC2InstanceStateChangeNotification
def lambda_handler(event, context):
"""Sample Lambda function reacting to EventBridge events
Parameters
----------
event: dict, required
Event Bridge EC2 State Change Events Format
Event doc: https://docs.aws.amazon.com/eventbridge/latest/userguide/event-types.html#ec2-event-type
context: object, required
Lambda Context runtime methods and attributes
Context doc: https://docs.aws.amazon.com/lambda/latest/dg/python-context-object.html
Returns
------
The same input event file
"""
#Deserialize event into strongly typed object
awsEvent:AWSEvent = Marshaller.unmarshall(event, AWSEvent)
ec2StateChangeNotification:EC2InstanceStateChangeNotification = awsEvent.detail
#Execute business logic
print("Instance " + ec2StateChangeNotification.instance_id + " transitioned to " + ec2StateChangeNotification.state)
#Make updates to event payload
awsEvent.detail_type = "HelloWorldFunction updated event of " + awsEvent.detail_type;
#Return event for further processing
return Marshaller.marshall(awsEvent)
| 32.75 | 120 | 0.748092 |
a1c35b3bace17b0c8572f259295a7518261ba059 | 540 | py | Python | cat/udl/tennis/ScoreMessage.py | ratchetmdt/internationalization-python | fb33e3ee2d5c7eeca7ae3977b20d27444533aa6d | [
"Apache-2.0"
] | null | null | null | cat/udl/tennis/ScoreMessage.py | ratchetmdt/internationalization-python | fb33e3ee2d5c7eeca7ae3977b20d27444533aa6d | [
"Apache-2.0"
] | null | null | null | cat/udl/tennis/ScoreMessage.py | ratchetmdt/internationalization-python | fb33e3ee2d5c7eeca7ae3977b20d27444533aa6d | [
"Apache-2.0"
] | null | null | null | import gettext
import os
import sys
from gettext import gettext as _
appdir = os.path.dirname(sys.argv[0])
appdir = os.path.abspath(appdir)
localedir = os.path.join(appdir, "locales")
gettext.install('bundle', localedir, "utf-8")
class ScoreMessage(object):
LOVE_ALL = _("Love-All")
FIFTEEN_ALL = _("Fifteen-All")
THIRTY_ALL = _("Thirty-All")
DEUCE = _("Deuce")
ADVANTAGE = _("Advantage ")
WIN_FOR = _("Win for ")
LOVE = _("Love")
FIFTEEN = _("Fifteen")
THIRTY = _("Thirty")
FORTY = _("Forty")
| 20.769231 | 45 | 0.642593 |
43ed3ba014c035f7c5b773fabbd8b5b106ed3103 | 977 | py | Python | Sketches/MPS/Experiments/Likefile2/likefile/TestLikeFile.py | sparkslabs/kamaelia_orig | 24b5f855a63421a1f7c6c7a35a7f4629ed955316 | [
"Apache-2.0"
] | 12 | 2015-10-20T10:22:01.000Z | 2021-07-19T10:09:44.000Z | Sketches/MPS/Experiments/Likefile2/likefile/TestLikeFile.py | sparkslabs/kamaelia_orig | 24b5f855a63421a1f7c6c7a35a7f4629ed955316 | [
"Apache-2.0"
] | 2 | 2015-10-20T10:22:55.000Z | 2017-02-13T11:05:25.000Z | Sketches/MPS/Experiments/Likefile2/likefile/TestLikeFile.py | sparkslabs/kamaelia_orig | 24b5f855a63421a1f7c6c7a35a7f4629ed955316 | [
"Apache-2.0"
] | 6 | 2015-03-09T12:51:59.000Z | 2020-03-01T13:06:21.000Z | #!/usr/bin/python
import time
from background import background
from Kamaelia.UI.Pygame.Text import Textbox, TextDisplayer
from LikeFile import LikeFile
background().start()
import Queue
TD = LikeFile(
TextDisplayer(position=(20, 90),
text_height=36,
screen_width=900,
screen_height=200,
background_color=(130,0,70),
text_color=(255,255,255)
)
).activate()
TB = LikeFile(
Textbox(position=(20, 340),
text_height=36,
screen_width=900,
screen_height=400,
background_color=(130,0,70),
text_color=(255,255,255)
)
).activate()
message = "hello\n"
while 1:
time.sleep(1)
try:
data = TB.get("outbox")
print data
message = data
except Queue.Empty:
pass
TD.put(message, "inbox")
| 24.425 | 58 | 0.520983 |
ab3a7a96598883c484260453190949792c7d9e2d | 951 | py | Python | relogic/logickit/modules/gen_repr.py | Impavidity/relogic | f647106e143cd603b95b63e06ea530cdd516aefe | [
"MIT"
] | 24 | 2019-07-20T02:10:21.000Z | 2022-03-15T07:13:07.000Z | relogic/logickit/modules/gen_repr.py | One-paper-luck/relogic | f647106e143cd603b95b63e06ea530cdd516aefe | [
"MIT"
] | 3 | 2019-11-28T04:19:25.000Z | 2019-11-30T23:29:19.000Z | relogic/logickit/modules/gen_repr.py | One-paper-luck/relogic | f647106e143cd603b95b63e06ea530cdd516aefe | [
"MIT"
] | 5 | 2019-11-27T03:12:07.000Z | 2021-12-08T11:45:43.000Z | import torch.nn as nn
import torch.nn.functional as F
import torch
from relogic.logickit.base import utils
from relogic.logickit.utils import utils
class GenRepr(nn.Module):
def __init__(self, config, task_name, n_classes=None):
super(GenRepr, self).__init__()
self.config = config
self.task_name = task_name
def forward(self, *inputs, **kwargs):
encoding_results = kwargs.pop("encoding_results", None)
if encoding_results is not None and "selected_non_final_layers_features" in encoding_results:
features = encoding_results["selected_non_final_layers_features"][0]
# We assume only one layer for now
else:
features = kwargs.pop("features")
text_mask = (kwargs["input_mask"]).float()
features_sum = torch.sum(features * text_mask.unsqueeze(-1), -2)
length = torch.sum(kwargs["input_mask"] > 0, dim=-1)
features_avg = features_sum / length.unsqueeze(-1).float()
return features_avg | 35.222222 | 97 | 0.726604 |
82b1d5b5e45af1c4921bf3e8ca812c75898f14a5 | 4,225 | py | Python | tests/callbacks/test_save_callback.py | zurutech/ashpy | 16d53100c943abf4b051b27cfd025df0a6e7c8c9 | [
"Apache-2.0"
] | 89 | 2019-07-05T11:57:24.000Z | 2021-11-22T04:25:11.000Z | tests/callbacks/test_save_callback.py | zurutech/ashpy | 16d53100c943abf4b051b27cfd025df0a6e7c8c9 | [
"Apache-2.0"
] | 58 | 2019-07-09T09:59:19.000Z | 2021-05-21T15:29:53.000Z | tests/callbacks/test_save_callback.py | zurutech/ashpy | 16d53100c943abf4b051b27cfd025df0a6e7c8c9 | [
"Apache-2.0"
] | 11 | 2019-07-08T08:39:06.000Z | 2021-08-23T12:15:16.000Z | # Copyright 2019 Zuru Tech HK Limited. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test Save Callback."""
import os
from pathlib import Path
from typing import Tuple
import pytest
from ashpy.callbacks import SaveCallback, SaveFormat, SaveSubFormat
from ashpy.models.gans import ConvDiscriminator, ConvGenerator
from tests.utils.fake_training_loop import FakeAdversarialTraining
COMPATIBLE_FORMAT_AND_SUB_FORMAT = [
(SaveFormat.WEIGHTS, SaveSubFormat.TF),
(SaveFormat.WEIGHTS, SaveSubFormat.H5),
(SaveFormat.MODEL, SaveSubFormat.TF),
]
INCOMPATIBLE_FORMAT_AND_SUB_FORMAT = [(SaveFormat.MODEL, SaveSubFormat.H5)]
@pytest.mark.parametrize("save_format_and_sub_format", COMPATIBLE_FORMAT_AND_SUB_FORMAT)
def test_save_callback_compatible(
tmpdir,
save_format_and_sub_format: Tuple[SaveFormat, SaveSubFormat],
save_dir: Path,
):
"""Test the integration between callbacks and trainer."""
save_format, save_sub_format = save_format_and_sub_format
_test_save_callback_helper(tmpdir, save_format, save_sub_format, save_dir)
save_dirs = os.listdir(save_dir)
# 2 folders: generator and discriminator
assert len(save_dirs) == 2
for model_dir in save_dirs:
assert save_format.name() in [
x.split(os.path.sep)[-1] for x in os.listdir(save_dir / model_dir)
]
@pytest.mark.parametrize(
"save_format_and_sub_format", INCOMPATIBLE_FORMAT_AND_SUB_FORMAT
)
def test_save_callback_incompatible(
tmpdir,
save_format_and_sub_format: Tuple[SaveFormat, SaveSubFormat],
save_dir: Path,
):
"""Test the integration between callbacks and trainer."""
save_format, save_sub_format = save_format_and_sub_format
with pytest.raises(NotImplementedError):
_test_save_callback_helper(tmpdir, save_format, save_sub_format, save_dir)
# assert no folder has been created
assert not save_dir.exists()
def _test_save_callback_helper(tmpdir, save_format, save_sub_format, save_dir: Path):
image_resolution = (28, 28)
layer_spec_input_res = (7, 7)
layer_spec_target_res = (7, 7)
kernel_size = 5
channels = 1
# model definition
generator = ConvGenerator(
layer_spec_input_res=layer_spec_input_res,
layer_spec_target_res=image_resolution,
kernel_size=kernel_size,
initial_filters=32,
filters_cap=16,
channels=channels,
)
discriminator = ConvDiscriminator(
layer_spec_input_res=image_resolution,
layer_spec_target_res=layer_spec_target_res,
kernel_size=kernel_size,
initial_filters=16,
filters_cap=32,
output_shape=1,
)
callbacks = [
SaveCallback(
models=[generator, discriminator],
save_dir=save_dir,
verbose=1,
save_format=save_format,
save_sub_format=save_sub_format,
)
]
FakeAdversarialTraining(
tmpdir, callbacks=callbacks, generator=generator, discriminator=discriminator,
)()
def test_save_callback_type_error(save_dir: str,):
"""
Test that the SaveCallback raises a TypeError.
Test that the SaveCallback raises a TypeError when wrong save_format
or save sub-format is passed.
"""
with pytest.raises(TypeError):
SaveCallback(
models=[],
save_dir=save_dir,
verbose=1,
save_format="save_format",
save_sub_format=SaveSubFormat.TF,
)
with pytest.raises(TypeError):
SaveCallback(
models=[],
save_dir=save_dir,
verbose=1,
save_format=SaveFormat.WEIGHTS,
save_sub_format="sub-format",
)
| 30.615942 | 88 | 0.707692 |
025cd9bc4966667923ac5933c8fa860fbea6492d | 8,752 | py | Python | term.py | kolyat/chainsyn | a3be6c503f5d707aa6350c8fcb43f399c9092473 | [
"MIT"
] | null | null | null | term.py | kolyat/chainsyn | a3be6c503f5d707aa6350c8fcb43f399c9092473 | [
"MIT"
] | null | null | null | term.py | kolyat/chainsyn | a3be6c503f5d707aa6350c8fcb43f399c9092473 | [
"MIT"
] | null | null | null | # Copyright (c) 2016-2022 Kirill 'Kolyat' Kiselnikov
# This file is the part of chainsyn, released under modified MIT license
# See the file LICENSE.txt included in this distribution
"""Main module of chainsyn"""
import os
import datetime
import curses
import re
import config
from core import processing, tools
def is_file(raw_path):
"""Check if input data is a file name
:param raw_path: possible path to file
:return: True if raw_path is file
:return: False if raw_path is not file
"""
if os.path.isfile(os.path.normpath(raw_path)):
return True
else:
return False
def generate_chain_info():
"""Generate info for manually entered chain"""
return 'chainsyn-{}'.format(
datetime.datetime.today().strftime('%Y%m%d-%H%M%S'))
def selection_mode(screen):
"""Switch to selection mode
:param screen: main window
"""
curses.noecho()
curses.cbreak()
screen.keypad(True)
def input_mode(screen):
"""Switch to input mode
:param screen: main window
"""
curses.echo()
curses.nocbreak()
screen.keypad(False)
def print_results(screen, chain):
"""Print results of processing
:param chain: Chain object
:param screen: main window
"""
screen.clear()
# Print results
if chain.dna1:
screen.addstr('{} - first DNA chain\n\n'.format(chain.info))
for n in chain.dna1:
screen.addstr(n, nucleo_color_pattern[n])
screen.refresh()
screen.getkey()
screen.addstr('\n\n\n')
if chain.dna2:
screen.addstr('{} - second DNA chain\n\n'.format(chain.info))
for n in chain.dna2:
screen.addstr(n, nucleo_color_pattern[n])
screen.refresh()
screen.getkey()
screen.addstr('\n\n\n')
if chain.rna:
screen.addstr('{} - RNA chain\n\n'.format(chain.info))
for n in chain.rna:
screen.addstr(n, nucleo_color_pattern[n])
screen.refresh()
screen.getkey()
screen.addstr('\n\n\n')
if chain.protein:
screen.addstr('{} - protein chain\n\n'.format(chain.info))
for n in chain.protein:
screen.addstr(n, abc_color_pattern[n])
screen.refresh()
screen.getkey()
screen.addstr('\n\n\n')
# Print stats
if chain.stats.get('nucleotides'):
screen.addstr('Number of nucleotides: {}\n'
''.format(chain.stats['nucleotides']))
if chain.stats.get('codons'):
screen.addstr('Number of codons: {}\n'.format(chain.stats['codons']))
if type(chain.stats.get('gc-content')) == float:
screen.addstr('GC-content: {:f} %\n'.format(chain.stats['gc_content']))
if chain.stats.get('mass'):
screen.addstr('Protein\'s mass: {}\n'.format(chain.stats['mass']))
screen.getkey()
def main(screen):
"""Main function
:param screen: main window
"""
def driver(process):
"""Common function which consists of user input, processing, writing to
file and results printing
:param process: type of process: replication, transcription,
translation
:return True: on success
:return False: if fails
"""
if process not in menu_items.keys():
raise tools.RoutineErr('Driver call error: unknown process - {}'
''.format(process))
# User input
base = str()
if process in ('replication', 'transcription'):
base = 'DNA'
if process == 'translation':
base = 'RNA'
screen.clear()
screen.addstr('Enter source {} '
'or path to source file in FASTA format\n'.format(base))
screen.addstr('> ')
screen.refresh()
input_mode(screen)
y, x = screen.getyx()
input_data = screen.getstr(y, x)
selection_mode(screen)
screen.addstr('\n')
input_str = re.sub('\s+', '', input_data.decode())
source = dict()
if is_file(input_str):
try:
source.update(tools.from_file(input_str))
except tools.RoutineErr as err:
screen.addstr('{}\n'.format(str(err)))
screen.getkey()
return False
else:
source.update({generate_chain_info(): input_str.upper()})
# Process source data
chains = list()
for s in source:
chain = processing.Chain(s, source[s])
try:
if process == 'replication':
chain.replicate()
if process == 'transcription':
chain.transcribe()
if process == 'translation':
chain.translate()
except processing.ProcessingErr as err:
screen.addstr('{}\n'.format(str(err)))
screen.getkey()
finally:
chain.collect_stats()
chains.append(chain)
# Export to text file
if config.EXPORT_ENABLED:
for chain in chains:
try:
tools.to_file(config.EXPORT_DIR, chain)
except tools.RoutineErr as err:
screen.addstr('{}\n'.format(str(err)))
screen.getkey()
# Print results
for chain in chains:
print_results(screen, chain)
# Init colors if supported
if curses.has_colors():
# Set up dark gray if possible
if curses.can_change_color():
curses.init_color(curses.COLOR_WHITE, 70, 70, 70)
# Init color pairs
curses.init_pair(1, curses.COLOR_YELLOW, curses.COLOR_BLACK)
curses.init_pair(2, curses.COLOR_GREEN, curses.COLOR_BLACK)
curses.init_pair(3, curses.COLOR_CYAN, curses.COLOR_BLACK)
curses.init_pair(4, curses.COLOR_MAGENTA, curses.COLOR_BLACK)
curses.init_pair(5, curses.COLOR_WHITE, curses.COLOR_BLACK)
curses.init_pair(6, curses.COLOR_RED, curses.COLOR_BLACK)
else:
for i in (1, 2, 3, 4, 5, 6):
curses.init_pair(i, curses.COLOR_WHITE, curses.COLOR_BLACK)
# Init color for nucleotides
nucleo_color_pattern.update({
'A': curses.color_pair(6),
'T': curses.color_pair(4),
'U': curses.color_pair(1),
'C': curses.color_pair(2),
'G': curses.color_pair(3)
})
# Init color pattern for amino acids
abc_color_pattern.update({
'F': curses.color_pair(1),
'L': curses.color_pair(1),
'S': curses.color_pair(2),
'P': curses.color_pair(1),
'H': curses.color_pair(3),
'Q': curses.color_pair(2),
'Y': curses.color_pair(2),
'*': curses.color_pair(5),
'C': curses.color_pair(2),
'W': curses.color_pair(1),
'R': curses.color_pair(3),
'I': curses.color_pair(1),
'M': curses.color_pair(1),
'T': curses.color_pair(2),
'N': curses.color_pair(2),
'K': curses.color_pair(3),
'V': curses.color_pair(1),
'A': curses.color_pair(1),
'D': curses.color_pair(4),
'E': curses.color_pair(4),
'G': curses.color_pair(1)
})
# Init main window
screen.scrollok(True)
selection_mode(screen)
screen.clear()
# Main cycle
menu_items = {
'replication': '1',
'transcription': '2',
'translation': '3',
'exit': '0'
}
while True:
screen.clear()
screen.addstr('\n')
screen.addstr('========\n')
screen.addstr('chainsyn\n')
screen.addstr('========\n')
screen.addstr('\n\n')
screen.addstr('Main menu\n')
screen.addstr('\n')
screen.addstr('{} - Replication (DNA -> DNA)\n'
''.format(menu_items['replication']))
screen.addstr('{} - Transcription (DNA -> RNA)\n'
''.format(menu_items['transcription']))
screen.addstr('{} - Translation (RNA -> protein)\n'
''.format(menu_items['translation']))
screen.addstr('{} - Exit\n'.format(menu_items['exit']))
screen.addstr('\n')
screen.refresh()
item = screen.getkey()
if item == menu_items['replication']:
driver('replication')
if item == menu_items['transcription']:
driver('transcription')
if item == menu_items['translation']:
driver('translation')
if item == menu_items['exit']:
break
input_mode(screen)
curses.endwin()
if __name__ == '__main__':
nucleo_color_pattern, abc_color_pattern = dict(), dict()
curses.wrapper(main)
| 31.941606 | 79 | 0.562614 |
9f9d695a628865ea7d53e3b504399c18db607c13 | 838 | py | Python | app/dbCompressor.py | DepthDeluxe/dot11sniffer | 4c283122e158a854b940136a9675d3e0515ee219 | [
"MIT"
] | 4 | 2016-11-29T16:09:54.000Z | 2021-11-12T10:34:49.000Z | app/dbCompressor.py | DepthDeluxe/dot11sniffer | 4c283122e158a854b940136a9675d3e0515ee219 | [
"MIT"
] | null | null | null | app/dbCompressor.py | DepthDeluxe/dot11sniffer | 4c283122e158a854b940136a9675d3e0515ee219 | [
"MIT"
] | 1 | 2021-11-12T10:34:49.000Z | 2021-11-12T10:34:49.000Z | import zlib
import pickle as cPickle
import pymongo
import bson
class Compress:
def __init__(self):
self.client = pymongo.MongoClient("gouda.bucknell.edu")
self.collection = self.client.cheddar.compress
self.timedb = self.client.cheddar.times
def upload(self,time,data):
pickled = cPickle.dumps(data)
compressed = zlib.compress(pickled,9)
update = {}
update['$set'] = {'data':bson.binary.Binary(compressed)}
self.collection.update({'_id':time},update,upsert=True)
def download(self,time):
compressed = self.collection.find_one({'_id':time})['data']
pickled = zlib.decompress(str(compressed))
data = cPickle.loads(pickled)
return {'_id':time,'data':data}
def remove(self,time):
self.timedb.delete_one({'_id':time})
| 31.037037 | 67 | 0.647971 |
ec39279854858df1d016d98494521a87706033f7 | 9,901 | py | Python | rr/tests/test_serviceprovider.py | UniversityofHelsinki/sp-registry | b1336b89788c076bf93f61b97b5469a99acd902c | [
"MIT"
] | null | null | null | rr/tests/test_serviceprovider.py | UniversityofHelsinki/sp-registry | b1336b89788c076bf93f61b97b5469a99acd902c | [
"MIT"
] | 1 | 2020-08-10T13:16:58.000Z | 2020-08-18T06:30:20.000Z | rr/tests/test_serviceprovider.py | UniversityofHelsinki/sp-registry | b1336b89788c076bf93f61b97b5469a99acd902c | [
"MIT"
] | null | null | null | from django.contrib.auth.models import Group, User
from django.test import RequestFactory, TestCase
from django.urls import reverse
from rr.models.serviceprovider import ServiceProvider
from rr.views.serviceprovider import ServiceProviderList
class ServiceProviderListTestCase(TestCase):
def setUp(self):
self.factory = RequestFactory()
self.user = User.objects.create(username='tester')
self.superuser = User.objects.create(username="superuser", is_superuser=True)
self.admin_sp = ServiceProvider.objects.create(entity_id='test:entity:1', service_type='saml')
self.user_sp = ServiceProvider.objects.create(entity_id='https://sp2.example.org/sp', service_type='saml',
production=True)
self.user_sp.admins.add(self.user)
self.group = Group.objects.create(name='testgroup')
self.admin_sp.admin_groups.add(self.group)
def test_sp_view_list_denies_anonymous(self):
response = self.client.get(reverse('serviceprovider-list'), follow=True)
self.assertRedirects(response,
reverse('login') + '?next=' + reverse('serviceprovider-list'))
response = self.client.post(reverse('serviceprovider-list'), follow=True)
self.assertRedirects(response,
reverse('login') + '?next=' + reverse('serviceprovider-list'))
def test_sp_view_list_normal_user(self):
request = self.factory.get(reverse('serviceprovider-list'))
request.user = self.user
response = ServiceProviderList.as_view()(request)
self.assertEqual(response.status_code, 200)
self.assertTrue(self.admin_sp not in response.context_data['object_list'])
self.assertTrue(self.user_sp in response.context_data['object_list'])
def test_sp_view_list_normal_user_group_permissions(self):
self.user.groups.add(self.group)
request = self.factory.get(reverse('serviceprovider-list'))
request.user = self.user
response = ServiceProviderList.as_view()(request)
self.assertEqual(response.status_code, 200)
self.assertTrue(self.admin_sp in response.context_data['object_list'])
self.assertTrue(self.user_sp in response.context_data['object_list'])
def test_sp_view_list_super_user(self):
request = self.factory.get(reverse('serviceprovider-list'))
request.user = self.superuser
response = ServiceProviderList.as_view()(request)
self.assertEqual(response.status_code, 200)
self.assertTrue(self.admin_sp in response.context_data['object_list'])
self.assertTrue(self.user_sp in response.context_data['object_list'])
class ServiceProviderDetailTestCase(TestCase):
def setUp(self):
self.factory = RequestFactory()
self.user = User.objects.create(username='tester')
self.superuser = User.objects.create(username="superuser", is_superuser=True)
self.admin_sp = ServiceProvider.objects.create(entity_id='test:entity:1', service_type='saml')
self.user_sp = ServiceProvider.objects.create(entity_id='https://sp2.example.org/sp', service_type='saml')
self.user_sp.admins.add(self.user)
def test_sp_view_denies_anonymous(self):
response = self.client.get(reverse('summary-view', kwargs={'pk': self.user_sp.pk}), follow=True)
self.assertRedirects(response,
reverse('login') + '?next=' + reverse('summary-view', kwargs={'pk': self.user_sp.pk}))
response = self.client.post(reverse('summary-view', kwargs={'pk': self.user_sp.pk}), follow=True)
self.assertRedirects(response,
reverse('login') + '?next=' + reverse('summary-view', kwargs={'pk': self.user_sp.pk}))
def test_sp_view_denies_unauthorized_user(self):
self.client.force_login(self.user)
response = self.client.get(reverse('summary-view', kwargs={'pk': self.admin_sp.pk}))
self.assertEqual(response.status_code, 404)
response = self.client.post(reverse('summary-view', kwargs={'pk': self.admin_sp.pk}))
self.assertEqual(response.status_code, 200)
self.assertIn('You should not be here.', response.content.decode())
def test_sp_view_summary(self):
self.client.force_login(self.user)
response = self.client.get(reverse('summary-view', kwargs={'pk': self.user_sp.pk}))
self.assertEqual(response.status_code, 200)
def test_sp_view_validation_message(self):
self.client.force_login(self.user)
response = self.client.get(reverse('summary-view', kwargs={'pk': self.user_sp.pk}))
self.assertIn('Waiting for validation', response.content.decode())
def test_sp_view_validation_message_superuser(self):
self.client.force_login(self.superuser)
response = self.client.get(reverse('summary-view', kwargs={'pk': self.user_sp.pk}))
self.assertIn('Validate changes', response.content.decode())
def test_sp_view_modify_before_validate(self):
self.client.force_login(self.superuser)
modified_date = self.user_sp.updated_at.strftime("%Y%m%d%H%M%S%f")
self.user_sp.save()
response = self.client.post(reverse('summary-view', kwargs={'pk': self.user_sp.pk}),
{'validate_changes': 'ok', 'modified_date': modified_date},
follow=True)
self.assertIn('Validate changes', response.content.decode())
self.assertEqual(response.status_code, 200)
def test_sp_view_validate(self):
self.client.force_login(self.superuser)
modified_date = self.user_sp.updated_at.strftime("%Y%m%d%H%M%S%f")
response = self.client.post(reverse('summary-view', kwargs={'pk': self.user_sp.pk}),
{'validate_changes': 'ok', 'modified_date': modified_date},
follow=True)
self.assertEqual(response.status_code, 200)
self.assertNotIn('Validate changes', response.content.decode())
self.assertEqual(ServiceProvider.objects.all().count(), 2)
self.assertEqual(ServiceProvider.objects.filter(validated=None).count(), 1)
self.assertIsNotNone(ServiceProvider.objects.get(pk=self.user_sp.pk).validated)
class ServiceProviderBasicInformationTestCase(TestCase):
def setUp(self):
self.factory = RequestFactory()
self.user = User.objects.create(username='tester')
self.superuser = User.objects.create(username="superuser", is_superuser=True)
self.admin_sp = ServiceProvider.objects.create(entity_id='test:entity:1', service_type='saml')
self.user_sp = ServiceProvider.objects.create(entity_id='https://sp2.example.org/sp', service_type='saml')
self.user_sp.admins.add(self.user)
def test_sp_basic_information_view_denies_anonymous(self):
response = self.client.get(reverse('basicinformation-update', kwargs={'pk': self.user_sp.pk}), follow=True)
self.assertRedirects(response,
reverse('login') + '?next=' + reverse('basicinformation-update', kwargs={'pk': self.user_sp.pk}))
response = self.client.post(reverse('basicinformation-update', kwargs={'pk': self.user_sp.pk}), follow=True)
self.assertRedirects(response,
reverse('login') + '?next=' + reverse('basicinformation-update', kwargs={'pk': self.user_sp.pk}))
def test_sp_basic_information_view_denies_unauthorized_user(self):
self.client.force_login(self.user)
response = self.client.get(reverse('basicinformation-update', kwargs={'pk': self.admin_sp.pk}))
self.assertEqual(response.status_code, 404)
response = self.client.post(reverse('basicinformation-update', kwargs={'pk': self.admin_sp.pk}))
self.assertEqual(response.status_code, 404)
def test_sp_basic_information_view_summary(self):
self.client.force_login(self.user)
response = self.client.get(reverse('basicinformation-update', kwargs={'pk': self.user_sp.pk}))
self.assertEqual(response.status_code, 200)
def test_sp_basic_information_admin_field_visiblity(self):
self.client.force_login(self.user)
response = self.client.get(reverse('basicinformation-update', kwargs={'pk': self.user_sp.pk}))
self.assertNotIn('organization', response.content.decode())
self.assertNotIn('admin_notes', response.content.decode())
self.client.force_login(self.superuser)
response = self.client.get(reverse('basicinformation-update', kwargs={'pk': self.user_sp.pk}))
self.assertIn('organization', response.content.decode())
self.assertIn('admin_notes', response.content.decode())
def test_sp_basic_information_name_required(self):
self.client.force_login(self.user)
response = self.client.post(reverse('basicinformation-update', kwargs={'pk': self.user_sp.pk}),
{'name_fi': '', 'name_en': ''})
self.assertEqual(response.status_code, 200)
self.assertIn('Name in English or in Finnish is required.', response.content.decode())
response = self.client.post(reverse('basicinformation-update', kwargs={'pk': self.user_sp.pk}),
{'name_fi': 'abc', 'name_en': ''}, follow=True)
self.assertEqual(response.status_code, 200)
self.assertNotIn('Name in English or in Finnish is required.', response.content.decode())
response = self.client.post(reverse('basicinformation-update', kwargs={'pk': self.user_sp.pk}),
{'name_fi': '', 'name_en': 'abc'}, follow=True)
self.assertEqual(response.status_code, 200)
self.assertNotIn('Name in English or in Finnish is required.', response.content.decode())
| 57.563953 | 126 | 0.67478 |
15fa31e0db439c8e884fc7137a34d5c7da7dc8e9 | 1,842 | py | Python | proselint/checks/misc/waxed.py | ankita240796/proselint | 50d2a482df8f467737f9c958ace98ba152bec832 | [
"BSD-3-Clause"
] | 4,163 | 2015-10-03T07:37:21.000Z | 2022-03-31T03:52:32.000Z | proselint/checks/misc/waxed.py | ankita240796/proselint | 50d2a482df8f467737f9c958ace98ba152bec832 | [
"BSD-3-Clause"
] | 878 | 2015-09-30T20:03:33.000Z | 2022-03-28T11:06:15.000Z | proselint/checks/misc/waxed.py | ankita240796/proselint | 50d2a482df8f467737f9c958ace98ba152bec832 | [
"BSD-3-Clause"
] | 249 | 2015-10-04T12:21:27.000Z | 2022-02-28T22:13:11.000Z | """Waxed lyrical.
---
layout: post
source: Fowler's Modern English Usage
source_url: bit.ly/1YBG8QJ
title: Waxed lyrical
date: 2016-03-10 14:48:42
categories: writing
---
Fowler's says:
Its primary meaning 'grow larger, increase' (as opposed to 'wane') leads
naturally to the sense 'pass into a specified state or mood, begin to use a
specified tone. In this meaning a following modifier must be an adj. not an
adverb ('He waxed enthusiastic [not enthusiastically] about Australia').
"""
from proselint.tools import memoize, preferred_forms_check
@memoize
def check(text):
"""Suggest the preferred forms."""
err = "misc.waxed"
msg = "The modifier following 'waxed' must be an adj.: '{}' is correct"
waxes = ["wax", "waxes", "waxed", "waxing"]
modifiers = [("ebullient", "ebulliently"),
("ecstatic", "ecstatically"),
("eloquent", "eloquently"),
("enthusiastic", "enthusiastically"),
("euphoric", "euphorically"),
("indignant", "indignantly"),
("lyrical", "lyrically"),
("melancholic", "melancholically"),
("metaphorical", "metaphorically"),
("nostalgic", "nostalgically"),
("patriotic", "patriotically"),
("philosophical", "philosophically"),
("poetic", "poetically"),
("rhapsodic", "rhapsodically"),
("romantic", "romantically"),
("sentimental", "sentimentally")
]
def pairs(word):
return [[word + ' ' + pair[0], [word + ' ' + pair[1]]]
for pair in modifiers]
preferred = []
for word in waxes:
preferred += pairs(word)
return preferred_forms_check(text, preferred, err, msg)
| 33.490909 | 75 | 0.568404 |
67a48c9c9fa7f57b90c676ed012d3675270d6f35 | 545 | py | Python | setup.py | wheeler-microfluidics/logging-helpers | febc80b8204b6986e10ff061d25ab02c2154ae22 | [
"BSD-3-Clause"
] | null | null | null | setup.py | wheeler-microfluidics/logging-helpers | febc80b8204b6986e10ff061d25ab02c2154ae22 | [
"BSD-3-Clause"
] | null | null | null | setup.py | wheeler-microfluidics/logging-helpers | febc80b8204b6986e10ff061d25ab02c2154ae22 | [
"BSD-3-Clause"
] | null | null | null | import sys
import setuptools as st
import versioneer
st.setup(name='logging-helpers',
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
description='Add description here.',
keywords='',
author='Christian Fobel',
author_email='christian@fobel.net',
url='https://github.com/wheeler-microfluidics/logging-helpers',
license='BSD',
packages=['logging_helpers'],
# Install data listed in `MANIFEST.in`
include_package_data=True)
| 28.684211 | 72 | 0.644037 |
19fd412cafef218b13925ee3f65e56fdf5909b42 | 11,591 | py | Python | qiskit/pulse/instruction_schedule_map.py | diego-plan9/qiskit-terra | a4120d70bd631ad2add228fdb1f86706bc5f2339 | [
"Apache-2.0"
] | 1 | 2018-05-29T03:58:03.000Z | 2018-05-29T03:58:03.000Z | qiskit/pulse/instruction_schedule_map.py | diego-plan9/qiskit-terra | a4120d70bd631ad2add228fdb1f86706bc5f2339 | [
"Apache-2.0"
] | 3 | 2018-11-13T17:33:37.000Z | 2018-12-03T09:35:00.000Z | qiskit/pulse/instruction_schedule_map.py | diego-plan9/qiskit-terra | a4120d70bd631ad2add228fdb1f86706bc5f2339 | [
"Apache-2.0"
] | 2 | 2017-12-03T15:48:14.000Z | 2018-03-11T13:08:03.000Z | # -*- coding: utf-8 -*-
# This code is part of Qiskit.
#
# (C) Copyright IBM 2019.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
"""
A convenient way to track reusable subschedules by name and qubit.
This can be used for scheduling circuits with custom definitions, for instance::
inst_map = InstructionScheduleMap()
inst_map.add('new_inst', 0, qubit_0_new_inst_schedule)
sched = schedule(quantum_circuit, backend, inst_map)
An instance of this class is instantiated by Pulse-enabled backends and populated with defaults
(if available)::
inst_map = backend.defaults().instruction_schedule_map
"""
import warnings
import inspect
from collections import defaultdict
from typing import List, Tuple, Iterable, Union, Callable
from .schedule import Schedule, ParameterizedSchedule
from .exceptions import PulseError
class InstructionScheduleMap():
"""Mapping from :py:class:`~qiskit.circuit.QuantumCircuit`
:py:class:`qiskit.circuit.Instruction` names and qubits to
:py:class:`~qiskit.pulse.Schedule` s. In particular, the mapping is formatted as type::
Dict[str, Dict[Tuple[int], Schedule]]
where the first key is the name of a circuit instruction (e.g. ``'u1'``, ``'measure'``), the
second key is a tuple of qubit indices, and the final value is a Schedule implementing the
requested instruction.
"""
def __init__(self):
"""Initialize a circuit instruction to schedule mapper instance."""
# The processed and reformatted circuit instruction definitions
self._map = defaultdict(dict)
# A backwards mapping from qubit to supported instructions
self._qubit_instructions = defaultdict(set)
@property
def instructions(self) -> List[str]:
"""Return all instructions which have definitions.
By default, these are typically the basis gates along with other instructions such as
measure and reset.
Returns:
The names of all the circuit instructions which have Schedule definitions in this.
"""
return list(self._map.keys())
def qubits_with_instruction(self, instruction: str) -> List[Union[int, Tuple[int]]]:
"""Return a list of the qubits for which the given instruction is defined. Single qubit
instructions return a flat list, and multiqubit instructions return a list of ordered
tuples.
Args:
instruction: The name of the circuit instruction.
Returns:
Qubit indices which have the given instruction defined. This is a list of tuples if the
instruction has an arity greater than 1, or a flat list of ints otherwise.
Raises:
PulseError: If the instruction is not found.
"""
if instruction not in self._map:
return []
return [qubits[0] if len(qubits) == 1 else qubits
for qubits in sorted(self._map[instruction].keys())]
def qubit_instructions(self, qubits: Union[int, Iterable[int]]) -> List[str]:
"""Return a list of the instruction names that are defined by the backend for the given
qubit or qubits.
Args:
qubits: A qubit index, or a list or tuple of indices.
Returns:
All the instructions which are defined on the qubits.
For 1 qubit, all the 1Q instructions defined. For multiple qubits, all the instructions
which apply to that whole set of qubits (e.g. ``qubits=[0, 1]`` may return ``['cx']``).
"""
if _to_tuple(qubits) in self._qubit_instructions:
return list(self._qubit_instructions[_to_tuple(qubits)])
return []
def has(self, instruction: str, qubits: Union[int, Iterable[int]]) -> bool:
"""Is the instruction defined for the given qubits?
Args:
instruction: The instruction for which to look.
qubits: The specific qubits for the instruction.
Returns:
True iff the instruction is defined.
"""
return instruction in self._map and \
_to_tuple(qubits) in self._map[instruction]
def assert_has(self, instruction: str, qubits: Union[int, Iterable[int]]) -> None:
"""Error if the given instruction is not defined.
Args:
instruction: The instruction for which to look.
qubits: The specific qubits for the instruction.
Raises:
PulseError: If the instruction is not defined on the qubits.
"""
if not self.has(instruction, _to_tuple(qubits)):
if instruction in self._map:
raise PulseError("Operation '{inst}' exists, but is only defined for qubits "
"{qubits}.".format(
inst=instruction,
qubits=self.qubits_with_instruction(instruction)))
raise PulseError("Operation '{inst}' is not defined for this "
"system.".format(inst=instruction))
def get(self,
instruction: str,
qubits: Union[int, Iterable[int]],
*params: Union[int, float, complex],
**kwparams: Union[int, float, complex]) -> Schedule:
"""Return the defined :py:class:`~qiskit.pulse.Schedule` for the given instruction on
the given qubits.
Args:
instruction: Name of the instruction.
qubits: The qubits for the instruction.
*params: Command parameters for generating the output schedule.
**kwparams: Keyworded command parameters for generating the schedule.
Returns:
The Schedule defined for the input.
"""
self.assert_has(instruction, qubits)
schedule_generator = self._map[instruction].get(_to_tuple(qubits))
if callable(schedule_generator):
return schedule_generator(*params, **kwparams)
# otherwise this is just a Schedule
return schedule_generator
def add(self,
instruction: str,
qubits: Union[int, Iterable[int]],
schedule: Union[Schedule, Callable[..., Schedule]]) -> None:
"""Add a new known instruction for the given qubits and its mapping to a pulse schedule.
Args:
instruction: The name of the instruction to add.
qubits: The qubits which the instruction applies to.
schedule: The Schedule that implements the given instruction.
Raises:
PulseError: If the qubits are provided as an empty iterable.
"""
qubits = _to_tuple(qubits)
if qubits == ():
raise PulseError("Cannot add definition {} with no target qubits.".format(instruction))
if not (isinstance(schedule, Schedule) or callable(schedule)):
raise PulseError('Supplied schedule must be either a Schedule, or a '
'callable that outputs a schedule.')
self._map[instruction][qubits] = schedule
self._qubit_instructions[qubits].add(instruction)
def remove(self, instruction: str, qubits: Union[int, Iterable[int]]) -> None:
"""Remove the given instruction from the listing of instructions defined in self.
Args:
instruction: The name of the instruction to add.
qubits: The qubits which the instruction applies to.
"""
qubits = _to_tuple(qubits)
self.assert_has(instruction, qubits)
self._map[instruction].pop(qubits)
self._qubit_instructions[qubits].remove(instruction)
if not self._map[instruction]:
self._map.pop(instruction)
if not self._qubit_instructions[qubits]:
self._qubit_instructions.pop(qubits)
def pop(self,
instruction: str,
qubits: Union[int, Iterable[int]],
*params: Union[int, float, complex],
**kwparams: Union[int, float, complex]) -> Schedule:
"""Remove and return the defined ``Schedule`` for the given instruction on the given
qubits.
Args:
instruction: Name of the instruction.
qubits: The qubits for the instruction.
*params: Command parameters for generating the output schedule.
**kwparams: Keyworded command parameters for generating the schedule.
Returns:
The Schedule defined for the input.
"""
schedule = self.get(instruction, qubits, *params, **kwparams)
self.remove(instruction, qubits)
return schedule
def cmds(self) -> List[str]:
"""Deprecated.
Returns:
The names of all the circuit instructions which have Schedule definitions in this.
"""
warnings.warn("Please use the `instructions` attribute instead of `cmds()`.",
DeprecationWarning)
return self.instructions
def cmd_qubits(self, cmd_name: str) -> List[Union[int, Tuple[int]]]:
"""Deprecated.
Args:
cmd_name: The name of the circuit instruction.
Returns:
Qubit indices which have the given instruction defined. This is a list of tuples if
the instruction has an arity greater than 1, or a flat list of ints otherwise.
"""
warnings.warn("Please use qubits_with_instruction() instead of cmd_qubits().",
DeprecationWarning)
return self.qubits_with_instruction(cmd_name)
def get_parameters(self, instruction: str, qubits: Union[int, Iterable[int]]) -> Tuple[str]:
"""Return the list of parameters taken by the given instruction on the given qubits.
Args:
instruction: Name of the instruction.
qubits: The qubits for the instruction.
Returns:
The names of the parameters required by the instruction.
"""
self.assert_has(instruction, qubits)
schedule_generator = self._map[instruction][_to_tuple(qubits)]
if isinstance(schedule_generator, ParameterizedSchedule):
return schedule_generator.parameters
elif callable(schedule_generator):
return tuple(inspect.signature(schedule_generator).parameters.keys())
else:
return ()
def __str__(self):
single_q_insts = "1Q instructions:\n"
multi_q_insts = "Multi qubit instructions:\n"
for qubits, insts in self._qubit_instructions.items():
if len(qubits) == 1:
single_q_insts += " q{qubit}: {insts}\n".format(qubit=qubits[0], insts=insts)
else:
multi_q_insts += " {qubits}: {insts}\n".format(qubits=qubits, insts=insts)
instructions = single_q_insts + multi_q_insts
return ("<{name}({insts})>"
"".format(name=self.__class__.__name__, insts=instructions))
def _to_tuple(values: Union[int, Iterable[int]]) -> Tuple[int, ...]:
"""Return the input as a tuple.
Args:
values: An integer, or iterable of integers.
Returns:
The input values as a sorted tuple.
"""
try:
return tuple(values)
except TypeError:
return (values,)
| 39.291525 | 99 | 0.636701 |
10029791e45e77d1ea3ee0e232b2f0451441c919 | 1,315 | py | Python | benchmarks/benchmarks/signal_filtering.py | magnusja/scipy | c4a5a1f984e28840010f20a7e41caa21b8f41979 | [
"FSFAP"
] | 6 | 2016-10-28T02:39:49.000Z | 2019-02-19T21:41:01.000Z | benchmarks/benchmarks/signal_filtering.py | magnusja/scipy | c4a5a1f984e28840010f20a7e41caa21b8f41979 | [
"FSFAP"
] | 4 | 2015-07-05T19:58:44.000Z | 2016-01-24T17:17:02.000Z | benchmarks/benchmarks/signal_filtering.py | magnusja/scipy | c4a5a1f984e28840010f20a7e41caa21b8f41979 | [
"FSFAP"
] | 3 | 2019-11-02T04:25:20.000Z | 2021-02-20T10:43:43.000Z | from __future__ import division, absolute_import, print_function
import numpy as np
try:
from scipy.signal import lfilter, firwin, decimate
except ImportError:
pass
from .common import Benchmark
class Decimate(Benchmark):
param_names = ['q', 'ftype', 'zero_phase']
params = [
[2, 10, 30],
['iir', 'fir'],
[True, False]
]
def setup(self, q, ftype, zero_phase):
np.random.seed(123456)
sample_rate = 10000.
t = np.arange(int(1e6), dtype=np.float64) / sample_rate
self.sig = np.sin(2*np.pi*500*t) + 0.3 * np.sin(2*np.pi*4e3*t)
def time_decimate(self, q, ftype, zero_phase):
decimate(self.sig, q, ftype=ftype, zero_phase=zero_phase)
class Lfilter(Benchmark):
param_names = ['n_samples', 'numtaps']
params = [
[1e3, 50e3, 1e6],
[9, 23, 51]
]
def setup(self, n_samples, numtaps):
np.random.seed(125678)
sample_rate = 25000.
t = np.arange(n_samples, dtype=np.float64) / sample_rate
nyq_rate = sample_rate / 2.
cutoff_hz = 3000.0
self.sig = np.sin(2*np.pi*500*t) + 0.3 * np.sin(2*np.pi*11e3*t)
self.coeff = firwin(numtaps, cutoff_hz/nyq_rate)
def time_lfilter(self, n_samples, numtaps):
lfilter(self.coeff, 1.0, self.sig)
| 27.395833 | 71 | 0.611407 |
53c674eb539f165adbf9c8d0237f0ca1a9cb96e0 | 421 | py | Python | archive_for_wyko/Pyro-3.6/examples/callback/bounce_server.py | ArcetriAdaptiveOptics/plico_interferometer_server | e14e240229c802333b2aa5bf6458a079dc950bd2 | [
"MIT"
] | null | null | null | archive_for_wyko/Pyro-3.6/examples/callback/bounce_server.py | ArcetriAdaptiveOptics/plico_interferometer_server | e14e240229c802333b2aa5bf6458a079dc950bd2 | [
"MIT"
] | 3 | 2022-01-16T01:05:01.000Z | 2022-02-23T14:05:28.000Z | archive_for_wyko/Pyro-3.6/examples/callback/bounce_server.py | ArcetriAdaptiveOptics/plico_interferometer_server | e14e240229c802333b2aa5bf6458a079dc950bd2 | [
"MIT"
] | 1 | 2022-01-14T14:04:07.000Z | 2022-01-14T14:04:07.000Z | #! /usr/bin/env python
import Pyro.naming
import Pyro.core
from Pyro.errors import NamingError
import bouncer
Pyro.core.initServer()
daemon = Pyro.core.Daemon()
ns = Pyro.naming.NameServerLocator().getNS()
daemon.useNameServer(ns)
try:
ns.createGroup(':test')
except NamingError:
pass
daemon.connect(bouncer.Bouncer('Server'),':test.bouncer')
# enter the service loop.
print 'Bouncer started'
daemon.requestLoop()
| 18.304348 | 57 | 0.764846 |
e4e7b18aa9c9c3b600e067a2e9f58558f1986064 | 503 | py | Python | userbot/plugins/quit.py | RiderFA/Dark_Userbot | 480df539bfeae994d59649a54d2478ed24b445bb | [
"MIT"
] | null | null | null | userbot/plugins/quit.py | RiderFA/Dark_Userbot | 480df539bfeae994d59649a54d2478ed24b445bb | [
"MIT"
] | null | null | null | userbot/plugins/quit.py | RiderFA/Dark_Userbot | 480df539bfeae994d59649a54d2478ed24b445bb | [
"MIT"
] | null | null | null | """
.kickme
"""
import time
from telethon.tl.functions.channels import LeaveChannelRequest
from mafiabot.utils import admin_cmd
@borg.on(admin_cmd("kickme", outgoing=True))
async def leave(e):
if not e.text[0].isalpha() and e.text[0] not in ("/", "#", "@", "!"):
await e.edit("**I Iz lev this Kensur grp**")
time.sleep(1)
if "-" in str(e.chat_id):
await borg(LeaveChannelRequest(e.chat_id))
else:
await e.edit("**Iz this even a grp???**")
| 25.15 | 73 | 0.59841 |
6b236a20f3adcd12898e2cd015c9610b7a39acb8 | 1,739 | py | Python | bin/make_video_index.py | eryl/multimodal-dataset | 1daddab3247f5a3a9618197beb93540a437a773f | [
"MIT"
] | null | null | null | bin/make_video_index.py | eryl/multimodal-dataset | 1daddab3247f5a3a9618197beb93540a437a773f | [
"MIT"
] | null | null | null | bin/make_video_index.py | eryl/multimodal-dataset | 1daddab3247f5a3a9618197beb93540a437a773f | [
"MIT"
] | null | null | null | import argparse
import os.path
import re
import glob
from collections import defaultdict
import imageio
import sys
import csv
def main():
parser = argparse.ArgumentParser()
parser.add_argument('directories', nargs='+')
parser.add_argument('--output')
args = parser.parse_args()
video_data = defaultdict(dict)
for directory in args.directories:
print(directory)
files = []
files.extend(glob.glob(os.path.join(directory, '**', '*.mp4'), recursive=True))
files.extend(glob.glob(os.path.join(directory, '**', '*.srt'), recursive=True))
files.extend(glob.glob(os.path.join(directory, '**', '*.ttml'), recursive=True))
for file in files:
m = re.match(r'.*_([a-z0-9]+)_[a-z]+\.(mp4|srt|ttml)', file)
if m:
base_name = m.group(1)
ext = m.group(2)
video_data[base_name][ext] = file
if ext == 'mp4':
video_reader = imageio.get_reader(file)
video_metadata = video_reader.get_meta_data()
fps = video_metadata['fps']
nframes = video_metadata['nframes']
duration = nframes/fps
video_reader.close()
video_data[base_name]['duration'] = duration
else:
print("No match for {}".format(file))
if args.output is not None:
fp = open(args.output, 'w')
else:
fp = sys.stdout
csv_writer = csv.DictWriter(fp, fieldnames=['mp4', 'srt', 'ttml', 'duration'])
csv_writer.writeheader()
csv_writer.writerows(sorted(video_data.values(), key=lambda x: x['mp4']))
if __name__ == '__main__':
main() | 32.203704 | 88 | 0.567568 |
3f983ce5c84519674beb524ff2d0a5a1c6a3a0bc | 1,074 | py | Python | tests/integrational/native_sync/test_revoke_v3.py | natekspencer/pubnub-python | 453ba34104b4067077546d5d9ba3b206559494d1 | [
"MIT"
] | 146 | 2015-01-05T03:14:53.000Z | 2022-03-16T16:51:52.000Z | tests/integrational/native_sync/test_revoke_v3.py | natekspencer/pubnub-python | 453ba34104b4067077546d5d9ba3b206559494d1 | [
"MIT"
] | 48 | 2015-01-15T15:27:41.000Z | 2022-03-21T14:17:05.000Z | tests/integrational/native_sync/test_revoke_v3.py | natekspencer/pubnub-python | 453ba34104b4067077546d5d9ba3b206559494d1 | [
"MIT"
] | 128 | 2015-01-05T03:40:59.000Z | 2022-03-02T20:50:58.000Z | from pubnub.pubnub import PubNub
from pubnub.models.consumer.v3.channel import Channel
from tests.integrational.vcr_helper import pn_vcr
from tests.helper import pnconf_pam_stub_copy
from pubnub.models.consumer.v3.access_manager import PNGrantTokenResult, PNRevokeTokenResult
pubnub = PubNub(pnconf_pam_stub_copy())
pubnub.config.uuid = "test_revoke"
@pn_vcr.use_cassette(
'tests/integrational/fixtures/native_sync/pam/revoke_token.yaml',
filter_query_parameters=['uuid', 'seqn', 'pnsdk', 'timestamp', 'signature']
)
def test_grant_and_revoke_token():
grant_envelope = pubnub.grant_token()\
.channels([Channel.id("test_channel").read().write().manage().update().join().delete()])\
.authorized_uuid("test")\
.ttl(60)\
.sync()
assert isinstance(grant_envelope.result, PNGrantTokenResult)
token = grant_envelope.result.get_token()
assert token
revoke_envelope = pubnub.revoke_token(token).sync()
assert isinstance(revoke_envelope.result, PNRevokeTokenResult)
assert revoke_envelope.result.status == 200
| 35.8 | 97 | 0.755121 |
1ca0154a8683fcb8304b7bdb7e7df77b4526bcd4 | 8,065 | py | Python | src/lxml/builder.py | skeptycal/lxml | 32ac7a3bdc8faf2104a77787ed18f2096d0a7346 | [
"BSD-3-Clause"
] | null | null | null | src/lxml/builder.py | skeptycal/lxml | 32ac7a3bdc8faf2104a77787ed18f2096d0a7346 | [
"BSD-3-Clause"
] | 1 | 2020-07-07T04:35:41.000Z | 2020-07-07T04:35:41.000Z | src/lxml/builder.py | skeptycal/lxml | 32ac7a3bdc8faf2104a77787ed18f2096d0a7346 | [
"BSD-3-Clause"
] | null | null | null | # cython: language_level=2
#
# Element generator factory by Fredrik Lundh.
#
# Source:
# http://online.effbot.org/2006_11_01_archive.htm#et-builder
# http://effbot.python-hosting.com/file/stuff/sandbox/elementlib/builder.py
#
# --------------------------------------------------------------------
# The ElementTree toolkit is
#
# Copyright (c) 1999-2004 by Fredrik Lundh
#
# By obtaining, using, and/or copying this software and/or its
# associated documentation, you agree that you have read, understood,
# and will comply with the following terms and conditions:
#
# Permission to use, copy, modify, and distribute this software and
# its associated documentation for any purpose and without fee is
# hereby granted, provided that the above copyright notice appears in
# all copies, and that both that copyright notice and this permission
# notice appear in supporting documentation, and that the name of
# Secret Labs AB or the author not be used in advertising or publicity
# pertaining to distribution of the software without specific, written
# prior permission.
#
# SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD
# TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANT-
# ABILITY AND FITNESS. IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR
# BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY
# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
# OF THIS SOFTWARE.
# --------------------------------------------------------------------
"""
The ``E`` Element factory for generating XML documents.
"""
from __future__ import absolute_import
import lxml.etree as ET
from functools import partial
try:
basestring
except NameError:
basestring = str
try:
unicode
except NameError:
unicode = str
class ElementMaker(object):
"""Element generator factory.
Unlike the ordinary Element factory, the E factory allows you to pass in
more than just a tag and some optional attributes; you can also pass in
text and other elements. The text is added as either text or tail
attributes, and elements are inserted at the right spot. Some small
examples::
>>> from lxml import etree as ET
>>> from lxml.builder import E
>>> ET.tostring(E("tag"))
'<tag/>'
>>> ET.tostring(E("tag", "text"))
'<tag>text</tag>'
>>> ET.tostring(E("tag", "text", key="value"))
'<tag key="value">text</tag>'
>>> ET.tostring(E("tag", E("subtag", "text"), "tail"))
'<tag><subtag>text</subtag>tail</tag>'
For simple tags, the factory also allows you to write ``E.tag(...)`` instead
of ``E('tag', ...)``::
>>> ET.tostring(E.tag())
'<tag/>'
>>> ET.tostring(E.tag("text"))
'<tag>text</tag>'
>>> ET.tostring(E.tag(E.subtag("text"), "tail"))
'<tag><subtag>text</subtag>tail</tag>'
Here's a somewhat larger example; this shows how to generate HTML
documents, using a mix of prepared factory functions for inline elements,
nested ``E.tag`` calls, and embedded XHTML fragments::
# some common inline elements
A = E.a
I = E.i
B = E.b
def CLASS(v):
# helper function, 'class' is a reserved word
return {'class': v}
page = (
E.html(
E.head(
E.title("This is a sample document")
),
E.body(
E.h1("Hello!", CLASS("title")),
E.p("This is a paragraph with ", B("bold"), " text in it!"),
E.p("This is another paragraph, with a ",
A("link", href="http://www.python.org"), "."),
E.p("Here are some reserved characters: <spam&egg>."),
ET.XML("<p>And finally, here is an embedded XHTML fragment.</p>"),
)
)
)
print ET.tostring(page)
Here's a prettyprinted version of the output from the above script::
<html>
<head>
<title>This is a sample document</title>
</head>
<body>
<h1 class="title">Hello!</h1>
<p>This is a paragraph with <b>bold</b> text in it!</p>
<p>This is another paragraph, with <a href="http://www.python.org">link</a>.</p>
<p>Here are some reserved characters: <spam&egg>.</p>
<p>And finally, here is an embedded XHTML fragment.</p>
</body>
</html>
For namespace support, you can pass a namespace map (``nsmap``)
and/or a specific target ``namespace`` to the ElementMaker class::
>>> E = ElementMaker(namespace="http://my.ns/")
>>> print(ET.tostring( E.test ))
<test xmlns="http://my.ns/"/>
>>> E = ElementMaker(namespace="http://my.ns/", nsmap={'p':'http://my.ns/'})
>>> print(ET.tostring( E.test ))
<p:test xmlns:p="http://my.ns/"/>
"""
def __init__(
self, typemap=None, namespace=None, nsmap=None, makeelement=None
):
if namespace is not None:
self._namespace = "{" + namespace + "}"
else:
self._namespace = None
if nsmap:
self._nsmap = dict(nsmap)
else:
self._nsmap = None
if makeelement is not None:
assert callable(makeelement)
self._makeelement = makeelement
else:
self._makeelement = ET.Element
# initialize type map for this element factory
if typemap:
typemap = dict(typemap)
else:
typemap = {}
def add_text(elem, item):
try:
elem[-1].tail = (elem[-1].tail or "") + item
except IndexError:
elem.text = (elem.text or "") + item
def add_cdata(elem, cdata):
if elem.text:
raise ValueError(
"Can't add a CDATA section. Element already has some text: %r"
% elem.text
)
elem.text = cdata
if str not in typemap:
typemap[str] = add_text
if unicode not in typemap:
typemap[unicode] = add_text
if ET.CDATA not in typemap:
typemap[ET.CDATA] = add_cdata
def add_dict(elem, item):
attrib = elem.attrib
for k, v in item.items():
if isinstance(v, basestring):
attrib[k] = v
else:
attrib[k] = typemap[type(v)](None, v)
if dict not in typemap:
typemap[dict] = add_dict
self._typemap = typemap
def __call__(self, tag, *children, **attrib):
typemap = self._typemap
if self._namespace is not None and tag[0] != "{":
tag = self._namespace + tag
elem = self._makeelement(tag, nsmap=self._nsmap)
if attrib:
typemap[dict](elem, attrib)
for item in children:
if callable(item):
item = item()
t = typemap.get(type(item))
if t is None:
if ET.iselement(item):
elem.append(item)
continue
for basetype in type(item).__mro__:
# See if the typemap knows of any of this type's bases.
t = typemap.get(basetype)
if t is not None:
break
else:
raise TypeError(
"bad argument type: %s(%r)"
% (type(item).__name__, item)
)
v = t(elem, item)
if v:
typemap.get(type(v))(elem, v)
return elem
def __getattr__(self, tag):
return partial(self, tag)
# create factory object
E = ElementMaker()
| 32.651822 | 92 | 0.550403 |
60acce538e47c0f2e643d8f0fffbfe18a30aa3e3 | 1,424 | py | Python | easy/1748_sum_of_unique_elements.py | niki4/leetcode_py3 | 794f560a09a8950da21bd58ea222e0c74449ffa6 | [
"MIT"
] | null | null | null | easy/1748_sum_of_unique_elements.py | niki4/leetcode_py3 | 794f560a09a8950da21bd58ea222e0c74449ffa6 | [
"MIT"
] | null | null | null | easy/1748_sum_of_unique_elements.py | niki4/leetcode_py3 | 794f560a09a8950da21bd58ea222e0c74449ffa6 | [
"MIT"
] | null | null | null | """
You are given an integer array nums. The unique elements of an array are the elements that appear exactly once in the array.
Return the sum of all the unique elements of nums.
Example 1:
Input: nums = [1,2,3,2]
Output: 4
Explanation: The unique elements are [1,3], and the sum is 4.
"""
import collections
from typing import List
class Solution:
"""
Runtime: 44 ms, faster than 6.40% of Python3
Memory Usage: 14.3 MB, less than 42.72% of Python3
"""
def sumOfUnique(self, nums: List[int]) -> int:
ctr = collections.Counter(nums)
return sum(k for k in ctr if ctr[k] == 1)
class Solution2:
"""
Num range is constant as it was set in Constraints, so we can pre-allocate array where
index represent the num (1-100) and value is the count of the num from source array.
Runtime: 44 ms, faster than 6.44% of Python3
Memory Usage: 14.1 MB, less than 90.38% of Python3
"""
def sumOfUnique(self, nums: List[int]) -> int:
num_ctr = [0] * (100 + 1)
for n in nums:
num_ctr[n] += 1
return sum(i for (i, n) in enumerate(num_ctr) if n == 1)
if __name__ == '__main__':
solutions = [Solution()]
tc = (
([1, 2, 3, 2], 4),
([1, 1, 1, 1, 1], 0),
([1, 2, 3, 4, 5], 15),
)
for sol in solutions:
for inp_nums, exp_sum in tc:
assert sol.sumOfUnique(inp_nums) == exp_sum
| 25.428571 | 124 | 0.606039 |
372b8639e05549b582ad649f8644d2040235a0b9 | 15,962 | py | Python | lib/sqlalchemy/dialects/postgresql/pg8000.py | pedropozzobon/sqlalchemy | 03989d1dce80999bb9ea1a7d36df3285e5ce4c3b | [
"MIT"
] | null | null | null | lib/sqlalchemy/dialects/postgresql/pg8000.py | pedropozzobon/sqlalchemy | 03989d1dce80999bb9ea1a7d36df3285e5ce4c3b | [
"MIT"
] | null | null | null | lib/sqlalchemy/dialects/postgresql/pg8000.py | pedropozzobon/sqlalchemy | 03989d1dce80999bb9ea1a7d36df3285e5ce4c3b | [
"MIT"
] | null | null | null | # postgresql/pg8000.py
# Copyright (C) 2005-2022 the SQLAlchemy authors and contributors <see AUTHORS
# file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
r"""
.. dialect:: postgresql+pg8000
:name: pg8000
:dbapi: pg8000
:connectstring: postgresql+pg8000://user:password@host:port/dbname[?key=value&key=value...]
:url: https://pypi.org/project/pg8000/
.. versionchanged:: 1.4 The pg8000 dialect has been updated for version
1.16.6 and higher, and is again part of SQLAlchemy's continuous integration
with full feature support.
.. _pg8000_unicode:
Unicode
-------
pg8000 will encode / decode string values between it and the server using the
PostgreSQL ``client_encoding`` parameter; by default this is the value in
the ``postgresql.conf`` file, which often defaults to ``SQL_ASCII``.
Typically, this can be changed to ``utf-8``, as a more useful default::
#client_encoding = sql_ascii # actually, defaults to database
# encoding
client_encoding = utf8
The ``client_encoding`` can be overridden for a session by executing the SQL:
SET CLIENT_ENCODING TO 'utf8';
SQLAlchemy will execute this SQL on all new connections based on the value
passed to :func:`_sa.create_engine` using the ``client_encoding`` parameter::
engine = create_engine(
"postgresql+pg8000://user:pass@host/dbname", client_encoding='utf8')
.. _pg8000_ssl:
SSL Connections
---------------
pg8000 accepts a Python ``SSLContext`` object which may be specified using the
:paramref:`_sa.create_engine.connect_args` dictionary::
import ssl
ssl_context = ssl.create_default_context()
engine = sa.create_engine(
"postgresql+pg8000://scott:tiger@192.168.0.199/test",
connect_args={"ssl_context": ssl_context},
)
If the server uses an automatically-generated certificate that is self-signed
or does not match the host name (as seen from the client), it may also be
necessary to disable hostname checking::
import ssl
ssl_context = ssl.create_default_context()
ssl_context.check_hostname = False
ssl_context.verify_mode = ssl.CERT_NONE
engine = sa.create_engine(
"postgresql+pg8000://scott:tiger@192.168.0.199/test",
connect_args={"ssl_context": ssl_context},
)
.. _pg8000_isolation_level:
pg8000 Transaction Isolation Level
-------------------------------------
The pg8000 dialect offers the same isolation level settings as that
of the :ref:`psycopg2 <psycopg2_isolation_level>` dialect:
* ``READ COMMITTED``
* ``READ UNCOMMITTED``
* ``REPEATABLE READ``
* ``SERIALIZABLE``
* ``AUTOCOMMIT``
.. seealso::
:ref:`postgresql_isolation_level`
:ref:`psycopg2_isolation_level`
""" # noqa
import decimal
import re
from uuid import UUID as _python_UUID
from .array import ARRAY as PGARRAY
from .base import _DECIMAL_TYPES
from .base import _FLOAT_TYPES
from .base import _INT_TYPES
from .base import ENUM
from .base import INTERVAL
from .base import PGCompiler
from .base import PGDialect
from .base import PGExecutionContext
from .base import PGIdentifierPreparer
from .base import UUID
from .json import JSON
from .json import JSONB
from .json import JSONPathType
from ... import exc
from ... import types as sqltypes
from ... import util
from ...engine import processors
from ...sql.elements import quoted_name
class _PGString(sqltypes.String):
render_bind_cast = True
class _PGNumeric(sqltypes.Numeric):
render_bind_cast = True
def result_processor(self, dialect, coltype):
if self.asdecimal:
if coltype in _FLOAT_TYPES:
return processors.to_decimal_processor_factory(
decimal.Decimal, self._effective_decimal_return_scale
)
elif coltype in _DECIMAL_TYPES or coltype in _INT_TYPES:
# pg8000 returns Decimal natively for 1700
return None
else:
raise exc.InvalidRequestError(
"Unknown PG numeric type: %d" % coltype
)
else:
if coltype in _FLOAT_TYPES:
# pg8000 returns float natively for 701
return None
elif coltype in _DECIMAL_TYPES or coltype in _INT_TYPES:
return processors.to_float
else:
raise exc.InvalidRequestError(
"Unknown PG numeric type: %d" % coltype
)
class _PGFloat(_PGNumeric):
__visit_name__ = "float"
render_bind_cast = True
class _PGNumericNoBind(_PGNumeric):
def bind_processor(self, dialect):
return None
class _PGJSON(JSON):
render_bind_cast = True
def result_processor(self, dialect, coltype):
return None
class _PGJSONB(JSONB):
render_bind_cast = True
def result_processor(self, dialect, coltype):
return None
class _PGJSONIndexType(sqltypes.JSON.JSONIndexType):
def get_dbapi_type(self, dbapi):
raise NotImplementedError("should not be here")
class _PGJSONIntIndexType(sqltypes.JSON.JSONIntIndexType):
__visit_name__ = "json_int_index"
render_bind_cast = True
class _PGJSONStrIndexType(sqltypes.JSON.JSONStrIndexType):
__visit_name__ = "json_str_index"
render_bind_cast = True
class _PGJSONPathType(JSONPathType):
pass
# DBAPI type 1009
class _PGUUID(UUID):
render_bind_cast = True
def bind_processor(self, dialect):
if not self.as_uuid:
def process(value):
if value is not None:
value = _python_UUID(value)
return value
return process
def result_processor(self, dialect, coltype):
if not self.as_uuid:
def process(value):
if value is not None:
value = str(value)
return value
return process
class _PGEnum(ENUM):
def get_dbapi_type(self, dbapi):
return dbapi.UNKNOWN
class _PGInterval(INTERVAL):
render_bind_cast = True
def get_dbapi_type(self, dbapi):
return dbapi.INTERVAL
@classmethod
def adapt_emulated_to_native(cls, interval, **kw):
return _PGInterval(precision=interval.second_precision)
class _PGTimeStamp(sqltypes.DateTime):
render_bind_cast = True
class _PGDate(sqltypes.Date):
render_bind_cast = True
class _PGTime(sqltypes.Time):
render_bind_cast = True
class _PGInteger(sqltypes.Integer):
render_bind_cast = True
class _PGSmallInteger(sqltypes.SmallInteger):
render_bind_cast = True
class _PGNullType(sqltypes.NullType):
pass
class _PGBigInteger(sqltypes.BigInteger):
render_bind_cast = True
class _PGBoolean(sqltypes.Boolean):
render_bind_cast = True
class _PGARRAY(PGARRAY):
render_bind_cast = True
_server_side_id = util.counter()
class PGExecutionContext_pg8000(PGExecutionContext):
def create_server_side_cursor(self):
ident = "c_%s_%s" % (hex(id(self))[2:], hex(_server_side_id())[2:])
return ServerSideCursor(self._dbapi_connection.cursor(), ident)
def pre_exec(self):
if not self.compiled:
return
class ServerSideCursor:
server_side = True
def __init__(self, cursor, ident):
self.ident = ident
self.cursor = cursor
@property
def connection(self):
return self.cursor.connection
@property
def rowcount(self):
return self.cursor.rowcount
@property
def description(self):
return self.cursor.description
def execute(self, operation, args=(), stream=None):
op = "DECLARE " + self.ident + " NO SCROLL CURSOR FOR " + operation
self.cursor.execute(op, args, stream=stream)
return self
def executemany(self, operation, param_sets):
self.cursor.executemany(operation, param_sets)
return self
def fetchone(self):
self.cursor.execute("FETCH FORWARD 1 FROM " + self.ident)
return self.cursor.fetchone()
def fetchmany(self, num=None):
if num is None:
return self.fetchall()
else:
self.cursor.execute(
"FETCH FORWARD " + str(int(num)) + " FROM " + self.ident
)
return self.cursor.fetchall()
def fetchall(self):
self.cursor.execute("FETCH FORWARD ALL FROM " + self.ident)
return self.cursor.fetchall()
def close(self):
self.cursor.execute("CLOSE " + self.ident)
self.cursor.close()
def setinputsizes(self, *sizes):
self.cursor.setinputsizes(*sizes)
def setoutputsize(self, size, column=None):
pass
class PGCompiler_pg8000(PGCompiler):
def visit_mod_binary(self, binary, operator, **kw):
return (
self.process(binary.left, **kw)
+ " %% "
+ self.process(binary.right, **kw)
)
class PGIdentifierPreparer_pg8000(PGIdentifierPreparer):
def __init__(self, *args, **kwargs):
PGIdentifierPreparer.__init__(self, *args, **kwargs)
self._double_percents = False
class PGDialect_pg8000(PGDialect):
driver = "pg8000"
supports_statement_cache = True
supports_unicode_statements = True
supports_unicode_binds = True
default_paramstyle = "format"
supports_sane_multi_rowcount = True
execution_ctx_cls = PGExecutionContext_pg8000
statement_compiler = PGCompiler_pg8000
preparer = PGIdentifierPreparer_pg8000
supports_server_side_cursors = True
render_bind_cast = True
# reversed as of pg8000 1.16.6. 1.16.5 and lower
# are no longer compatible
description_encoding = None
# description_encoding = "use_encoding"
colspecs = util.update_copy(
PGDialect.colspecs,
{
sqltypes.String: _PGString,
sqltypes.Numeric: _PGNumericNoBind,
sqltypes.Float: _PGFloat,
sqltypes.JSON: _PGJSON,
sqltypes.Boolean: _PGBoolean,
sqltypes.NullType: _PGNullType,
JSONB: _PGJSONB,
sqltypes.JSON.JSONPathType: _PGJSONPathType,
sqltypes.JSON.JSONIndexType: _PGJSONIndexType,
sqltypes.JSON.JSONIntIndexType: _PGJSONIntIndexType,
sqltypes.JSON.JSONStrIndexType: _PGJSONStrIndexType,
UUID: _PGUUID,
sqltypes.Interval: _PGInterval,
INTERVAL: _PGInterval,
sqltypes.DateTime: _PGTimeStamp,
sqltypes.DateTime: _PGTimeStamp,
sqltypes.Date: _PGDate,
sqltypes.Time: _PGTime,
sqltypes.Integer: _PGInteger,
sqltypes.SmallInteger: _PGSmallInteger,
sqltypes.BigInteger: _PGBigInteger,
sqltypes.Enum: _PGEnum,
sqltypes.ARRAY: _PGARRAY,
},
)
def __init__(self, client_encoding=None, **kwargs):
PGDialect.__init__(self, **kwargs)
self.client_encoding = client_encoding
if self._dbapi_version < (1, 16, 6):
raise NotImplementedError("pg8000 1.16.6 or greater is required")
@util.memoized_property
def _dbapi_version(self):
if self.dbapi and hasattr(self.dbapi, "__version__"):
return tuple(
[
int(x)
for x in re.findall(
r"(\d+)(?:[-\.]?|$)", self.dbapi.__version__
)
]
)
else:
return (99, 99, 99)
@classmethod
def import_dbapi(cls):
return __import__("pg8000")
def create_connect_args(self, url):
opts = url.translate_connect_args(username="user")
if "port" in opts:
opts["port"] = int(opts["port"])
opts.update(url.query)
return ([], opts)
def is_disconnect(self, e, connection, cursor):
if isinstance(e, self.dbapi.InterfaceError) and "network error" in str(
e
):
# new as of pg8000 1.19.0 for broken connections
return True
# connection was closed normally
return "connection is closed" in str(e)
def get_isolation_level_values(self, dbapi_connection):
return (
"AUTOCOMMIT",
"READ COMMITTED",
"READ UNCOMMITTED",
"REPEATABLE READ",
"SERIALIZABLE",
)
def set_isolation_level(self, dbapi_connection, level):
level = level.replace("_", " ")
if level == "AUTOCOMMIT":
dbapi_connection.autocommit = True
else:
dbapi_connection.autocommit = False
cursor = dbapi_connection.cursor()
cursor.execute(
"SET SESSION CHARACTERISTICS AS TRANSACTION "
f"ISOLATION LEVEL {level}"
)
cursor.execute("COMMIT")
cursor.close()
def set_readonly(self, connection, value):
cursor = connection.cursor()
try:
cursor.execute(
"SET SESSION CHARACTERISTICS AS TRANSACTION %s"
% ("READ ONLY" if value else "READ WRITE")
)
cursor.execute("COMMIT")
finally:
cursor.close()
def get_readonly(self, connection):
cursor = connection.cursor()
try:
cursor.execute("show transaction_read_only")
val = cursor.fetchone()[0]
finally:
cursor.close()
return val == "on"
def set_deferrable(self, connection, value):
cursor = connection.cursor()
try:
cursor.execute(
"SET SESSION CHARACTERISTICS AS TRANSACTION %s"
% ("DEFERRABLE" if value else "NOT DEFERRABLE")
)
cursor.execute("COMMIT")
finally:
cursor.close()
def get_deferrable(self, connection):
cursor = connection.cursor()
try:
cursor.execute("show transaction_deferrable")
val = cursor.fetchone()[0]
finally:
cursor.close()
return val == "on"
def _set_client_encoding(self, dbapi_connection, client_encoding):
cursor = dbapi_connection.cursor()
cursor.execute(
f"""SET CLIENT_ENCODING TO '{
client_encoding.replace("'", "''")
}'"""
)
cursor.execute("COMMIT")
cursor.close()
def do_begin_twophase(self, connection, xid):
connection.connection.tpc_begin((0, xid, ""))
def do_prepare_twophase(self, connection, xid):
connection.connection.tpc_prepare()
def do_rollback_twophase(
self, connection, xid, is_prepared=True, recover=False
):
connection.connection.tpc_rollback((0, xid, ""))
def do_commit_twophase(
self, connection, xid, is_prepared=True, recover=False
):
connection.connection.tpc_commit((0, xid, ""))
def do_recover_twophase(self, connection):
return [row[1] for row in connection.connection.tpc_recover()]
def on_connect(self):
fns = []
def on_connect(conn):
conn.py_types[quoted_name] = conn.py_types[str]
fns.append(on_connect)
if self.client_encoding is not None:
def on_connect(conn):
self._set_client_encoding(conn, self.client_encoding)
fns.append(on_connect)
if self._json_deserializer:
def on_connect(conn):
# json
conn.register_in_adapter(114, self._json_deserializer)
# jsonb
conn.register_in_adapter(3802, self._json_deserializer)
fns.append(on_connect)
if len(fns) > 0:
def on_connect(conn):
for fn in fns:
fn(conn)
return on_connect
else:
return None
dialect = PGDialect_pg8000
| 27.426117 | 95 | 0.630497 |
e0ac67444ccfc7c621863fd4a396cc40f553143e | 320 | py | Python | Modules_import/Turtle.py | vasetousa/Python-fundamentals | 3180c03de28b4f4d36d966221719069a7e18e521 | [
"MIT"
] | null | null | null | Modules_import/Turtle.py | vasetousa/Python-fundamentals | 3180c03de28b4f4d36d966221719069a7e18e521 | [
"MIT"
] | null | null | null | Modules_import/Turtle.py | vasetousa/Python-fundamentals | 3180c03de28b4f4d36d966221719069a7e18e521 | [
"MIT"
] | null | null | null | # import turtle
# import time
#
# turtle.forward(150)
# turtle.right(250)
# turtle.forward(150)
# time.sleep(4)
# import turtle
#
# turtle.forward(150)
# turtle.right(250)
# turtle.forward(150)
#
# turtle.done()
from turtle import forward, right, done, circle
forward(150)
right(250)
circle(80)
forward(150)
done()
| 12.307692 | 47 | 0.7 |
13f303de25e8d10aed8357e4053cac5c24910a38 | 2,998 | py | Python | dimod/reference/composites/roofduality.py | joseppinilla/dimod | e33ca5045e31ee2d9d58515f017fb6be5276cd8e | [
"Apache-2.0"
] | 1 | 2020-11-03T16:42:26.000Z | 2020-11-03T16:42:26.000Z | dimod/reference/composites/roofduality.py | xpin/dimod | 5e399317b0bfaae6ed20e22b9f2ef242f5fa5e6c | [
"Apache-2.0"
] | null | null | null | dimod/reference/composites/roofduality.py | xpin/dimod | 5e399317b0bfaae6ed20e22b9f2ef242f5fa5e6c | [
"Apache-2.0"
] | 1 | 2022-02-01T14:40:31.000Z | 2022-02-01T14:40:31.000Z | # Copyright 2019 D-Wave Systems Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# =============================================================================
"""A composite that uses the roof duality algorithm [#bht]_ [#bh]_ to fix some
variables in the binary quadratic model before passing it on to its child
sampler.
.. [#bht] Boros, E., P.L. Hammer, G. Tavares. Preprocessing of Unconstrained
Quadratic Binary Optimization. Rutcor Research Report 10-2006, April, 2006.
.. [#bh] Boros, E., P.L. Hammer. Pseudo-Boolean optimization. Discrete Applied
Mathematics 123, (2002), pp. 155-225
"""
from dimod.reference.composites.fixedvariable import FixedVariableComposite
from dimod.roof_duality import fix_variables
__all__ = ['RoofDualityComposite']
class RoofDualityComposite(FixedVariableComposite):
"""Uses roof duality to assign some variables before invoking child sampler.
Uses the :func:`~dimod.roof_duality.fix_variables` function to determine
variable assignments, then fixes them before calling the child sampler.
Returned samples include the fixed variables.
Args:
child (:obj:`dimod.Sampler`):
A dimod sampler. Used to sample the bqm after variables have been
fixed.
"""
@property
def parameters(self):
params = self.child.parameters.copy()
params['sampling_mode'] = []
return params
def sample(self, bqm, sampling_mode=True, **parameters):
"""Sample from the provided binary quadratic model.
Uses the :func:`~dimod.roof_duality.fix_variables` function to determine
which variables to fix.
Args:
bqm (:obj:`dimod.BinaryQuadraticModel`):
Binary quadratic model to be sampled from.
sampling_mode (bool, optional, default=True):
In sampling mode, only roof-duality is used. When
`sampling_mode` is false, strongly connected components are used
to fix more variables, but in some optimal solutions these
variables may take different values.
**parameters:
Parameters for the child sampler.
Returns:
:obj:`dimod.SampleSet`
"""
# use roof-duality to decide which variables to fix
parameters['fixed_variables'] = fix_variables(bqm, sampling_mode=sampling_mode)
return super(RoofDualityComposite, self).sample(bqm, **parameters)
| 37.475 | 87 | 0.667779 |
133da7ccdf1242df11ab9e687e7131c9370011a0 | 2,597 | py | Python | shape-attributes-calculator/shape_area_calculator.py | bruceyu777/code-kata | 3c1c1f7ec24b94a30e91a0d185c6fdb9e497a008 | [
"MIT"
] | null | null | null | shape-attributes-calculator/shape_area_calculator.py | bruceyu777/code-kata | 3c1c1f7ec24b94a30e91a0d185c6fdb9e497a008 | [
"MIT"
] | null | null | null | shape-attributes-calculator/shape_area_calculator.py | bruceyu777/code-kata | 3c1c1f7ec24b94a30e91a0d185c6fdb9e497a008 | [
"MIT"
] | null | null | null | import abc
import logging
import logging.config
import yaml
import pprint
from pprint import pformat
import json
# logging.config.fileConfig('logging.conf')
# with open("logconf.yml", 'r') as f:
# log_config = yaml.safe_load(f)
# logging.config.dictConfig(log_config)
from logging_setting import *
logger = logging.getLogger(__name__)
shape_number_dict = {1: "rectangle", 2: "circle", 3: "cube", 4: "unknown"}
def calculate_area(name):
# converting all characters into str then into lower cases
name = str(name).lower()
# check for the conditions
if name.lower() == "rectangle" or str(name) == "1":
l = int(input("Enter rectangle's length: "))
b = int(input("Enter rectangle's breadth: "))
# calculate area of rectangle
rect_area = l * b
logger.info(f"The area of rectangle is {rect_area}.")
# elif name == "square":
# s = int(input("Enter square's side length: "))
# # calculate area of square
# sqt_area = s * s
# print(f"The area of square is {sqt_area}.")
# elif name == "triangle":
# h = int(input("Enter triangle's height length: "))
# b = int(input("Enter triangle's breadth length: "))
# # calculate area of triangle
# tri_area = 0.5 * b * h
# print(f"The area of triangle is {tri_area}.")
elif name.lower() == "circle" or str(name) == "2":
r = int(input("Enter circle's radius length: "))
pi = 3.14
# calculate area of circle
circ_area = pi * r * r
print(f"The area of triangle is {circ_area}.")
elif name.lower() == "cube" or str(name) == "3":
l = int(input("Enter cube's length: "))
b = int(input("Enter cube's breadth: "))
h = int(input("Enter cube's height: "))
pi = 3.14
# calculate area of circle
cube_area = (l * b + b * h + l * h) * 2
print(f"The area of triangle is {cube_area}.")
# elif name == "parallelogram":
# b = int(input("Enter parallelogram's base length: "))
# h = int(input("Enter parallelogram's height length: "))
# # calculate area of parallelogram
# para_area = b * h
# print(f"The area of parallelogram is {para_area}.")
else:
print("Sorry! This shape is not available")
if __name__ == "__main__":
print(f"Calculate Shape Area")
shape_name = input(
f"Please choose number of shapes:\n{pformat(shape_number_dict)}\nPlease Type in shape name or shape number:"
)
# function calling
calculate_area(shape_name)
| 28.855556 | 116 | 0.599153 |
308e2aabfd41f3e6dd5e7898163145f42fe12aa6 | 8,037 | py | Python | kubernetes_asyncio/client/models/v1_container_port.py | tomplus/kubernetes-asyncio | 11c3eb4d50ae822545572aa7b8c15f7153f65a1c | [
"Apache-2.0"
] | null | null | null | kubernetes_asyncio/client/models/v1_container_port.py | tomplus/kubernetes-asyncio | 11c3eb4d50ae822545572aa7b8c15f7153f65a1c | [
"Apache-2.0"
] | null | null | null | kubernetes_asyncio/client/models/v1_container_port.py | tomplus/kubernetes-asyncio | 11c3eb4d50ae822545572aa7b8c15f7153f65a1c | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
"""
Kubernetes
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: v1.23.6
Generated by: https://openapi-generator.tech
"""
try:
from inspect import getfullargspec
except ImportError:
from inspect import getargspec as getfullargspec
import pprint
import re # noqa: F401
import six
from kubernetes_asyncio.client.configuration import Configuration
class V1ContainerPort(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'container_port': 'int',
'host_ip': 'str',
'host_port': 'int',
'name': 'str',
'protocol': 'str'
}
attribute_map = {
'container_port': 'containerPort',
'host_ip': 'hostIP',
'host_port': 'hostPort',
'name': 'name',
'protocol': 'protocol'
}
def __init__(self, container_port=None, host_ip=None, host_port=None, name=None, protocol=None, local_vars_configuration=None): # noqa: E501
"""V1ContainerPort - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration.get_default_copy()
self.local_vars_configuration = local_vars_configuration
self._container_port = None
self._host_ip = None
self._host_port = None
self._name = None
self._protocol = None
self.discriminator = None
self.container_port = container_port
if host_ip is not None:
self.host_ip = host_ip
if host_port is not None:
self.host_port = host_port
if name is not None:
self.name = name
if protocol is not None:
self.protocol = protocol
@property
def container_port(self):
"""Gets the container_port of this V1ContainerPort. # noqa: E501
Number of port to expose on the pod's IP address. This must be a valid port number, 0 < x < 65536. # noqa: E501
:return: The container_port of this V1ContainerPort. # noqa: E501
:rtype: int
"""
return self._container_port
@container_port.setter
def container_port(self, container_port):
"""Sets the container_port of this V1ContainerPort.
Number of port to expose on the pod's IP address. This must be a valid port number, 0 < x < 65536. # noqa: E501
:param container_port: The container_port of this V1ContainerPort. # noqa: E501
:type container_port: int
"""
if self.local_vars_configuration.client_side_validation and container_port is None: # noqa: E501
raise ValueError("Invalid value for `container_port`, must not be `None`") # noqa: E501
self._container_port = container_port
@property
def host_ip(self):
"""Gets the host_ip of this V1ContainerPort. # noqa: E501
What host IP to bind the external port to. # noqa: E501
:return: The host_ip of this V1ContainerPort. # noqa: E501
:rtype: str
"""
return self._host_ip
@host_ip.setter
def host_ip(self, host_ip):
"""Sets the host_ip of this V1ContainerPort.
What host IP to bind the external port to. # noqa: E501
:param host_ip: The host_ip of this V1ContainerPort. # noqa: E501
:type host_ip: str
"""
self._host_ip = host_ip
@property
def host_port(self):
"""Gets the host_port of this V1ContainerPort. # noqa: E501
Number of port to expose on the host. If specified, this must be a valid port number, 0 < x < 65536. If HostNetwork is specified, this must match ContainerPort. Most containers do not need this. # noqa: E501
:return: The host_port of this V1ContainerPort. # noqa: E501
:rtype: int
"""
return self._host_port
@host_port.setter
def host_port(self, host_port):
"""Sets the host_port of this V1ContainerPort.
Number of port to expose on the host. If specified, this must be a valid port number, 0 < x < 65536. If HostNetwork is specified, this must match ContainerPort. Most containers do not need this. # noqa: E501
:param host_port: The host_port of this V1ContainerPort. # noqa: E501
:type host_port: int
"""
self._host_port = host_port
@property
def name(self):
"""Gets the name of this V1ContainerPort. # noqa: E501
If specified, this must be an IANA_SVC_NAME and unique within the pod. Each named port in a pod must have a unique name. Name for the port that can be referred to by services. # noqa: E501
:return: The name of this V1ContainerPort. # noqa: E501
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this V1ContainerPort.
If specified, this must be an IANA_SVC_NAME and unique within the pod. Each named port in a pod must have a unique name. Name for the port that can be referred to by services. # noqa: E501
:param name: The name of this V1ContainerPort. # noqa: E501
:type name: str
"""
self._name = name
@property
def protocol(self):
"""Gets the protocol of this V1ContainerPort. # noqa: E501
Protocol for port. Must be UDP, TCP, or SCTP. Defaults to \"TCP\". # noqa: E501
:return: The protocol of this V1ContainerPort. # noqa: E501
:rtype: str
"""
return self._protocol
@protocol.setter
def protocol(self, protocol):
"""Sets the protocol of this V1ContainerPort.
Protocol for port. Must be UDP, TCP, or SCTP. Defaults to \"TCP\". # noqa: E501
:param protocol: The protocol of this V1ContainerPort. # noqa: E501
:type protocol: str
"""
self._protocol = protocol
def to_dict(self, serialize=False):
"""Returns the model properties as a dict"""
result = {}
def convert(x):
if hasattr(x, "to_dict"):
args = getfullargspec(x.to_dict).args
if len(args) == 1:
return x.to_dict()
else:
return x.to_dict(serialize)
else:
return x
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
attr = self.attribute_map.get(attr, attr) if serialize else attr
if isinstance(value, list):
result[attr] = list(map(
lambda x: convert(x),
value
))
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], convert(item[1])),
value.items()
))
else:
result[attr] = convert(value)
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, V1ContainerPort):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, V1ContainerPort):
return True
return self.to_dict() != other.to_dict()
| 32.538462 | 216 | 0.607689 |
5cbd642f36e84dc9cfaacf3936b6cf8f8f712425 | 6,512 | py | Python | test/modules/md/test_751_sectigo.py | tititiou36/httpd | 1348607c00ba58ce371f2f8ecb08abf610227043 | [
"Apache-2.0"
] | 2,529 | 2015-01-02T11:52:53.000Z | 2022-03-30T19:54:27.000Z | test/modules/md/test_751_sectigo.py | tititiou36/httpd | 1348607c00ba58ce371f2f8ecb08abf610227043 | [
"Apache-2.0"
] | 133 | 2015-04-21T05:50:45.000Z | 2022-03-30T14:23:40.000Z | test/modules/md/test_751_sectigo.py | tititiou36/httpd | 1348607c00ba58ce371f2f8ecb08abf610227043 | [
"Apache-2.0"
] | 1,113 | 2015-01-01T14:47:02.000Z | 2022-03-29T16:47:18.000Z | import os
import re
import time
import pytest
from .md_conf import MDConf
# set the environment variables
# SECTIGO_EAB="$kid $hmac" for
# SECTIGO_TLD="<your registered dns name>"
# these tests to become active
#
DEMO_ACME = "https://acme.demo.sectigo.com/"
DEMO_TLD = None
EABS = [
{'kid': '0123', 'hmac': 'abcdef'},
]
def missing_eab():
global EABS
if len(EABS) == 1 and 'SECTIGO_EAB' in os.environ:
m = re.match(r'^\s*(\S+)\s+(\S+)\s*$', os.environ['SECTIGO_EAB'])
if m:
EABS.append({'kid': m.group(1), 'hmac': m.group(2)})
return len(EABS) == 1
def missing_tld():
global DEMO_TLD
if 'SECTIGO_TLD' in os.environ:
DEMO_TLD = os.environ['SECTIGO_TLD']
return DEMO_TLD is None
@pytest.mark.skipif(condition=missing_tld(), reason="env var SECTIGO_TLD not set")
@pytest.mark.skipif(condition=missing_eab(), reason="env var SECTIGO_EAB not set")
class TestSectigo:
@pytest.fixture(autouse=True, scope='class')
def _class_scope(self, env, acme):
acme.start(config='eab')
env.check_acme()
env.clear_store()
MDConf(env).install()
assert env.apache_restart() == 0
@pytest.fixture(autouse=True, scope='function')
def _method_scope(self, env, request):
env.clear_store()
self.test_domain = env.get_request_domain(request)
def test_md_751_001(self, env):
# valid config, expect cert with correct chain
domain = f"test1.{DEMO_TLD}"
domains = [domain]
conf = MDConf(env)
conf.start_md(domains)
conf.add(f"MDCertificateAuthority {DEMO_ACME}")
conf.add("MDCACertificateFile none")
conf.add(f"MDExternalAccountBinding {EABS[1]['kid']} {EABS[1]['hmac']}")
conf.end_md()
conf.add_vhost(domains=domains)
conf.install()
assert env.apache_restart() == 0
assert env.await_completion(domains)
r = env.curl_get(f"https://{domain}:{env.https_port}", options=[
"--cacert", f"{env.test_dir}/data/sectigo-demo-root.pem"
])
assert r.response['status'] == 200
def test_md_751_002(self, env):
# without EAB set
domain = f"test1.{DEMO_TLD}"
domains = [domain]
conf = MDConf(env)
conf.start_md(domains)
conf.add(f"MDCertificateAuthority {DEMO_ACME}")
conf.add("MDCACertificateFile none")
conf.end_md()
conf.add_vhost(domains=domains)
conf.install()
assert env.apache_restart() == 0
assert env.await_error(domain)
md = env.get_md_status(domain)
assert md['renewal']['errors'] > 0
assert md['renewal']['last']['problem'] == 'urn:ietf:params:acme:error:externalAccountRequired'
def test_md_751_003(self, env):
# with wrong EAB set
domain = f"test1.{DEMO_TLD}"
domains = [domain]
conf = MDConf(env)
conf.start_md(domains)
conf.add(f"MDCertificateAuthority {DEMO_ACME}")
conf.add("MDCACertificateFile none")
conf.add(f"MDExternalAccountBinding xxxxxx aaaaaaaaaaaaasdddddsdasdsadsadsadasdsadsa")
conf.end_md()
conf.add_vhost(domains=domains)
conf.install()
assert env.apache_restart() == 0
assert env.await_error(domain)
md = env.get_md_status(domain)
assert md['renewal']['errors'] > 0
assert md['renewal']['last']['problem'] == 'urn:ietf:params:acme:error:unauthorized'
def test_md_751_004(self, env):
# valid config, get cert, add dns name, renew cert
domain = f"test1.{DEMO_TLD}"
domain2 = f"test2.{DEMO_TLD}"
domains = [domain]
conf = MDConf(env)
conf.start_md(domains)
conf.add(f"MDCertificateAuthority {DEMO_ACME}")
conf.add("MDCACertificateFile none")
conf.add(f"MDExternalAccountBinding {EABS[1]['kid']} {EABS[1]['hmac']}")
conf.end_md()
conf.add_vhost(domains=domains)
conf.install()
assert env.apache_restart() == 0
assert env.await_completion(domains)
r = env.curl_get(f"https://{domain}:{env.https_port}", options=[
"--cacert", f"{env.test_dir}/data/sectigo-demo-root.pem"
])
assert r.response['status'] == 200
r = env.curl_get(f"https://{domain2}:{env.https_port}", options=[
"--cacert", f"{env.test_dir}/data/sectigo-demo-root.pem"
])
assert r.exit_code != 0
md1 = env.get_md_status(domain)
acct1 = md1['ca']['account']
# add the domain2 to the dns names
domains = [domain, domain2]
conf = MDConf(env)
conf.start_md(domains)
conf.add(f"MDCertificateAuthority {DEMO_ACME}")
conf.add("MDCACertificateFile none")
conf.add(f"MDExternalAccountBinding {EABS[1]['kid']} {EABS[1]['hmac']}")
conf.end_md()
conf.add_vhost(domains=domains)
conf.install()
assert env.apache_restart() == 0
assert env.await_completion(domains)
r = env.curl_get(f"https://{domain2}:{env.https_port}", options=[
"--cacert", f"{env.test_dir}/data/sectigo-demo-root.pem"
])
assert r.response['status'] == 200
md2 = env.get_md_status(domain)
acct2 = md2['ca']['account']
assert acct2 == acct1, f"ACME account was not reused: {acct1} became {acct2}"
def test_md_751_020(self, env):
# valid config, get cert, check OCSP status
domain = f"test1.{DEMO_TLD}"
domains = [domain]
conf = MDConf(env)
conf.add("MDStapling on")
conf.start_md(domains)
conf.add(f"""
MDCertificateAuthority {DEMO_ACME}
MDCACertificateFile none
MDExternalAccountBinding {EABS[1]['kid']} {EABS[1]['hmac']}
""")
conf.end_md()
conf.add_vhost(domains=domains)
conf.install()
assert env.apache_restart() == 0
assert env.await_completion(domains)
r = env.curl_get(f"https://{domain}:{env.https_port}", options=[
"--cacert", f"{env.test_dir}/data/sectigo-demo-root.pem"
])
assert r.response['status'] == 200
time.sleep(1)
for domain in domains:
stat = env.await_ocsp_status(domain,
ca_file=f"{env.test_dir}/data/sectigo-demo-root.pem")
assert stat['ocsp'] == "successful (0x0)"
assert stat['verify'] == "0 (ok)"
| 35.78022 | 103 | 0.601505 |
73010555adc53ff910afa0fc3380b899311fdb9e | 28 | py | Python | models/__init__.py | kaylode/mediaeval20-pp | 9c115d8121dbc84d4198ff1d117635a60b177d60 | [
"MIT"
] | 2 | 2021-07-10T14:46:02.000Z | 2021-12-13T01:33:09.000Z | models/__init__.py | kaylode/mediaeval20-pp | 9c115d8121dbc84d4198ff1d117635a60b177d60 | [
"MIT"
] | null | null | null | models/__init__.py | kaylode/mediaeval20-pp | 9c115d8121dbc84d4198ff1d117635a60b177d60 | [
"MIT"
] | null | null | null | from .model import FullModel | 28 | 28 | 0.857143 |
ffd0ecb5ef851cf21011faf29efa5c1f54fb7823 | 2,844 | py | Python | day7.py | zsmoore/Advent-Of-Code-2017 | 895a7fbaa8b8b82a338dac967bccbf97b2092b20 | [
"MIT"
] | null | null | null | day7.py | zsmoore/Advent-Of-Code-2017 | 895a7fbaa8b8b82a338dac967bccbf97b2092b20 | [
"MIT"
] | null | null | null | day7.py | zsmoore/Advent-Of-Code-2017 | 895a7fbaa8b8b82a338dac967bccbf97b2092b20 | [
"MIT"
] | null | null | null | import sys
import pprint
class tree_node:
def __init__(self, name, weight, children):
self.name = name
self.weight = int(weight)
self.children = [] if children == None else children
def __repr__(self):
return '{0} \n\tWeight:{1}\n\tChildren:{2}'.format(self.name, self.weight, self.children)
def __hash__(self):
return hash(self.name)
def __eq__(self, other):
return hash(self) == hash(other)
class weighted_tree:
def __init__(self, nodes):
self.nodes = set(nodes)
self.find_root()
self.compute_subs()
def find_root(self):
total = set([node.name for node in self.nodes])
children = set([child for node in self.nodes for child in node.children])
root = total - children
for node in root:
self.root = node
def compute_subs(self):
self.totalWeights = {}
toCheck = [self.get_node(self.root)]
seen = set()
while toCheck != []:
node = toCheck.pop()
seen.add(node)
self.totalWeights[node] = node.weight
for child in node.children:
curr = self.get_node(child)
if curr not in seen:
toCheck.append(curr)
self.totalWeights[node] += curr.weight
def check_balance(self):
toCheck = [self.get_node(self.root)]
seen = set()
while toCheck != []:
node = toCheck.pop()
seen.add(node)
if node.children != []:
known = self.get_node(node.children[0])
for child in node.children:
child = self.get_node(child)
if self.totalWeights[known] != self.totalWeights[child]:
return (self.totalWeights[known], self.totalWeights[child])
if child not in seen:
toCheck.append(child)
def get_node(self, toGet):
for node in self.nodes:
if node == toGet:
return node
def main():
in_file = open(sys.argv[1], 'r')
nodes = []
for line in in_file.readlines():
nodes.append(line.strip())
tree_nodes = initialize(nodes)
tree = weighted_tree(tree_nodes)
pprint.pprint(tree.check_balance())
def initialize(nodes):
clean_nodes = []
for node in nodes:
adjacency = None
if '->' in node:
node = node.split('->')
adjacency = [child.strip() for child in node[-1].split(', ')]
node = node[0]
node = node.split()
weight = node[1].strip('(').strip(')')
node = node[0]
clean_nodes.append(tree_node(node, weight, adjacency))
return clean_nodes
if __name__ == "__main__":
main()
| 26.830189 | 97 | 0.540084 |
aaebba0c7823e1e5cc6f80ac4ca0e6bad4f4c3f1 | 538 | py | Python | clustering/config.py | kburnik/naps-clustering | 8ceaad61e7f1c3d76ad9e7c7491b705b936a6f19 | [
"MIT"
] | null | null | null | clustering/config.py | kburnik/naps-clustering | 8ceaad61e7f1c3d76ad9e7c7491b705b936a6f19 | [
"MIT"
] | null | null | null | clustering/config.py | kburnik/naps-clustering | 8ceaad61e7f1c3d76ad9e7c7491b705b936a6f19 | [
"MIT"
] | null | null | null | """Configuration for directories and constants."""
import os
DIR = os.path.dirname(os.path.abspath(__file__))
"""Root directory for the source code."""
DATA_DIR = os.path.join(DIR, '..', 'data')
"""Root directory for the data."""
OUT_DIR = os.path.join(DIR, '..', 'out')
"""Direktorij za rezultate."""
DATA_NAPS_ALL = os.path.join(DATA_DIR, 'NAPSA.csv')
"""Path to the NAPS dataset for clustering."""
DATA_NAPS_BE_ALL = os.path.join(DATA_DIR, 'emotion_and_stimuli.csv')
"""Path to the emotion and stimuli dataset for clustering."""
| 28.315789 | 68 | 0.704461 |
9138f1ffda246e2b7a13506095e62c2948060f01 | 19,502 | py | Python | src/static_proxy/static_base.py | Yanivmd/maloss | af85ac202668da88d0b4a885386a1e56703e37c8 | [
"MIT"
] | 1 | 2022-01-29T16:13:06.000Z | 2022-01-29T16:13:06.000Z | src/static_proxy/static_base.py | Yanivmd/maloss | af85ac202668da88d0b4a885386a1e56703e37c8 | [
"MIT"
] | null | null | null | src/static_proxy/static_base.py | Yanivmd/maloss | af85ac202668da88d0b4a885386a1e56703e37c8 | [
"MIT"
] | 1 | 2022-01-29T16:13:07.000Z | 2022-01-29T16:13:07.000Z | import os
import logging
import shutil
import tempfile
import re
from os.path import join, exists, abspath, isdir, isfile, dirname, basename, relpath
import proto.python.ast_pb2 as ast_pb2
from pm_util import get_pm_proxy_for_language, get_pm_proxy
from util.enum_util import LanguageEnum
from util.compress_files import decompress_file, get_file_with_meta
from util.job_util import read_proto_from_file, write_proto_to_file, exec_command
from proto.python.ast_pb2 import PkgAstResults, AstLookupConfig, FileInfo, AstNode
from proto.python.module_pb2 import ModuleStatic
Language2Extensions = {
LanguageEnum.python: ('.py',),
LanguageEnum.javascript: ('.js',),
LanguageEnum.ruby: ('.rb',),
LanguageEnum.java: ('.java', '.class', '.jar', '.aar', '.war', '.dex', '.apk'), # java packages are compiled
LanguageEnum.csharp: ('.cs',), # c# packages are compiled and are windows binaries/libraries
LanguageEnum.php: ('.php',)
}
class StaticAnalyzer(object):
def __init__(self):
self.language = None
def astgen(self, inpath, outfile, root=None, configpath=None, pkg_name=None, pkg_version=None, evaluate_smt=False):
"""
Detects usage of sensitive APIs.
"""
pass
def taint(self, inpath, outfile, configpath=None, pkg_name=None, pkg_version=None):
"""
Identify data flow from sources to sinks.
This helps identify stealer, backdoor, and user-controlled sabotage. The returned message is of class module_pb2.ModuleStatic.
"""
pass
def get_taint_result(self, pm_proxy, pkg_name, outdir, configpath=None, pkg_version=None, cache_only=False):
taint_fname = pm_proxy.get_taint_fname(pkg_name=pkg_name, pkg_version=pkg_version)
taint_file = join(outdir, taint_fname)
taint_result = None
if exists(taint_file):
logging.warning("get_taint_result: using cached taint_file %s!", taint_file)
taint_result = ModuleStatic()
read_proto_from_file(taint_result, taint_file, binary=False)
else:
if cache_only:
logging.warning("skipping unprocessed pkg %s ver %s due to cache_only!", pkg_name, pkg_version)
return taint_result
# download current package and analyze it
tempdir = tempfile.mkdtemp(prefix='taint-')
pm_proxy.download(pkg_name=pkg_name, pkg_version=pkg_version, outdir=tempdir)
tempdir_files = os.listdir(tempdir)
if len(tempdir_files) == 0:
logging.error("fail to download pkg %s ver %s", pkg_name, pkg_version)
else:
pkg_file = join(tempdir, tempdir_files[0])
self.taint(inpath=pkg_file, outfile=taint_file, configpath=configpath, pkg_name=pkg_name,
pkg_version=pkg_version)
if exists(taint_file):
taint_result = ModuleStatic()
read_proto_from_file(taint_result, taint_file, binary=False)
else:
logging.error("fail to run taint on downloaded package %s", pkg_file)
shutil.rmtree(tempdir)
return taint_result
def taint_tree(self, pkg_name, outdir, cache_dir=None, configpath=None, pkg_version=None, ignore_dep_version=False,
ignore_dep=False):
"""
Performs static taint analysis on packages and their dependencies, based on sources and sinks.
This identifies suspicious API calls and flows in packages (i.e. network.read, eval).
"""
# sanitize language
if self.language is None:
raise Exception("Invoking taint on invalid language: %s" % self.language)
pm_proxy = get_pm_proxy_for_language(language=self.language, cache_dir=cache_dir, isolate_pkg_info=True)
# check for cached taint
taint_fname = pm_proxy.get_taint_fname(pkg_name=pkg_name, pkg_version=pkg_version)
taint_file = join(outdir, taint_fname)
if exists(taint_file):
logging.warning("skipping cached taint_file %s!", taint_file)
return
# get flattened dependencies, because each package result only contains module summary for itself (no children),
# but indirect dependencies can be directly imported.
if not ignore_dep:
try:
flatten_dep_pkgs = pm_proxy.get_dep(pkg_name=pkg_name, pkg_version=pkg_version, flatten=True)
except Exception as gde:
logging.error("fail to get_dep on pkg %s ver %s: %s", pkg_name, pkg_version, gde)
return
# get the taint results for dependent packages
dep_taint_results = []
for dep_name, dep_version in flatten_dep_pkgs.items():
if ignore_dep_version:
dep_version = None
dep_taint_result = self.get_taint_result(pm_proxy=pm_proxy, pkg_name=dep_name, outdir=outdir,
configpath=configpath, pkg_version=dep_version)
if dep_taint_result:
dep_taint_results.append(dep_taint_result)
# based on the taint result of the children, generate the new config file and run taint on current package
tmp_configpath = self._gen_combined_configpath(configpath=configpath, dep_taint_results=dep_taint_results)
taint_result = self.get_taint_result(pm_proxy=pm_proxy, pkg_name=pkg_name, outdir=outdir,
configpath=tmp_configpath, pkg_version=pkg_version)
os.remove(tmp_configpath)
else:
taint_result = self.get_taint_result(pm_proxy=pm_proxy, pkg_name=pkg_name, outdir=outdir,
configpath=configpath, pkg_version=pkg_version)
if taint_result:
logging.warning("identified %d flows in %s ver %s", len(taint_result.flows), pkg_name, pkg_version)
def _gen_combined_configpath(self, configpath, dep_taint_results):
# load the old config
configpb = AstLookupConfig()
read_proto_from_file(configpb, configpath, binary=False)
# iterate through the taint results to update configpb
num_new_sources = 0
num_new_sinks = 0
for dep_taint_result in dep_taint_results:
# dep_taint_result is of type module_pb2.ModuleStatic
for new_source in dep_taint_result.sources:
configpb.apis.append(new_source.node)
num_new_sources += 1
for new_sink in dep_taint_result.sinks:
configpb.apis.append(new_sink.node)
num_new_sinks += 1
if num_new_sources + num_new_sinks > 0:
logging.warning("added %d new sources and %d new sinks!", num_new_sources, num_new_sinks)
# generate the new config file
outf = tempfile.NamedTemporaryFile(prefix='configpath-', delete=False)
write_proto_to_file(proto=configpb, filename=outf.name, binary=False)
return outf.name
def danger(self, pkg_name, outdir, cache_dir=None, configpath=None, pkg_version=None):
"""
Identify arguments of sensitive APIs
http://www0.cs.ucl.ac.uk/staff/M.Harman/exe1.html
This helps identify hard-coded sabotage and argument-specific dangerous calls.
"""
pass
def danger_tree(self, pkg_name, outdir, cache_dir=None, configpath=None, pkg_version=None, ignore_dep_version=False,
ignore_dep=False):
"""
Perform static API analysis on packages and their dependencies.
This identifies suspicious API calls (e.g. rmdir).
"""
pass
@staticmethod
def _sanitize_astgen_args(inpath, outfile, root, configpath, language):
# get the absolute path
inpath = abspath(inpath)
outfile = abspath(outfile)
if root is not None:
root = abspath(root)
if configpath is not None:
configpath = abspath(configpath)
# handle the input path
analyze_path = None
is_decompress_path = False
if not exists(inpath):
raise Exception("inpath %s doesn't exist!" % inpath)
if isdir(inpath):
logging.debug("inpath %s is a directory!", inpath)
analyze_path = inpath
else:
logging.debug("inpath %s is a file, checking whether it is a compressed file!", inpath)
if inpath.endswith(Language2Extensions[language]):
logging.debug("inpath %s is a single file, directly analyze it!", inpath)
analyze_path = inpath
elif inpath.endswith(".gem"):
# Handle gem file using `gem unpack`
logging.debug("inpath %s is a gem file, decompress using gem unpack and analyze it!", inpath)
import tempfile
analyze_path = tempfile.mkdtemp(prefix='gem-')
gem_unpack_cmd = ['gem', 'unpack', inpath, '--target', analyze_path]
exec_command("gem unpack", gem_unpack_cmd)
is_decompress_path = True
elif get_file_with_meta(inpath) is not None:
logging.debug("inpath %s is a compressed file, decompress and analyze it!", inpath)
analyze_path = decompress_file(inpath)
is_decompress_path = True
else:
raise Exception("inpath %s is unhandled type for language %s!" % (inpath, language))
return analyze_path, is_decompress_path, outfile, root, configpath
@staticmethod
def _cleanup_astgen(analyze_path, is_decompress_path):
if is_decompress_path:
shutil.rmtree(analyze_path)
@staticmethod
def _pb_text_to_bin(proto, infile, outfile):
read_proto_from_file(proto=proto, filename=infile, binary=False)
write_proto_to_file(proto=proto, filename=outfile, binary=True)
@staticmethod
def _get_infiles(inpath, root, language):
infiles = []
if isfile(inpath):
if root is None:
root = dirname(inpath)
root = abspath(root)
infiles.append(abspath(inpath))
elif isdir(inpath):
if root is None:
root = inpath
root = abspath(root)
for i_root, _, i_files in os.walk(inpath):
for fname in i_files:
if fname.endswith(Language2Extensions[language]):
infiles.append(abspath(join(i_root, fname)))
if len(infiles) == 0:
logging.error("No input files from %s for language %s", inpath, language)
return infiles, root
@staticmethod
def _get_filepb(infile, root):
filepb = FileInfo()
filepb.filename = basename(infile)
filepb.relpath = relpath(dirname(infile), root)
filepb.file = relpath(infile, root)
filepb.directory = root
return filepb
@staticmethod
def _get_api_result(base, name, args, source_text, source_range, filepb):
api_result = AstNode()
api_result.type = ast_pb2.AstNode.FUNCTION_DECL_REF_EXPR
api_result.name = name
if base is None:
api_result.full_name = name
else:
api_result.base_type = base
api_result.full_name = '%s.%s' % (base, name)
for arg in args:
api_result.arguments.append(arg)
api_result.source = source_text
source_start, source_end = source_range
api_result.range.start.row = source_start[0]
api_result.range.start.column = source_start[1]
api_result.range.start.file_info.CopyFrom(filepb)
api_result.range.end.row = source_end[0]
api_result.range.end.column = source_end[1]
api_result.range.end.file_info.CopyFrom(filepb)
return api_result
def astfilter(self, pkg_name, outdir, cache_dir=None, configpath=None, pkg_version=None, pkg_manager=None,
ignore_dep_version=False, ignore_dep=False):
"""
Filters packages and their dependencies, based on sensitive APIs and their combinations
This helps narrow down packages for further analysis.
"""
# sanitize language
if self.language is None:
raise Exception("Invoking astfilter on invalid language: %s" % self.language)
if pkg_manager is None:
pm_proxy = get_pm_proxy_for_language(language=self.language, cache_dir=cache_dir, isolate_pkg_info=True)
else:
pm_proxy = get_pm_proxy(pm=pkg_manager, cache_dir=cache_dir, isolate_pkg_info=True)
# check for cached astfilter file
astfilter_fname = pm_proxy.get_astfilter_fname(pkg_name=pkg_name, pkg_version=pkg_version)
astfilter_file = join(outdir, astfilter_fname)
if exists(astfilter_file):
logging.warning("skipping cached astfilter_file %s!", astfilter_file)
return
# get the astgen results for the main package as well as its dependent packages
astgen_results = []
main_astgen_result = self.get_astgen_result(pm_proxy=pm_proxy, pkg_name=pkg_name, outdir=outdir,
configpath=configpath, pkg_version=pkg_version)
if main_astgen_result:
astgen_results.append(main_astgen_result)
else:
logging.error("fail to run astfilter on pkg %s ver %s", pkg_name, pkg_version)
return
# get flattened dependencies and their astgen results
if not ignore_dep:
try:
flatten_dep_pkgs = pm_proxy.get_dep(pkg_name=pkg_name, pkg_version=pkg_version, flatten=True)
except Exception as gde:
logging.error("fail to get_dep on pkg %s ver %s: %s", pkg_name, pkg_version, gde)
return
for dep_name, dep_version in flatten_dep_pkgs.items():
if ignore_dep_version:
dep_version = None
dep_astgen_result = self.get_astgen_result(pm_proxy=pm_proxy, pkg_name=dep_name, outdir=outdir,
configpath=configpath, pkg_version=dep_version)
if dep_astgen_result:
astgen_results.append(dep_astgen_result)
# check satisfiability of the specified smt formula and dump the corresponding output
satisfied = StaticAnalyzer._check_smt(astgen_results=astgen_results, configpath=configpath)
main_astgen_result.pkgs[0].config.smt_satisfied = satisfied
# TODO: maybe record the suspicious API usage in each dependent package as well
# dump the astfilter result to file
write_proto_to_file(proto=main_astgen_result, filename=astfilter_file, binary=False)
def get_astgen_result(self, pm_proxy, pkg_name, outdir, configpath=None, pkg_version=None, cache_only=False):
astgen_fname = pm_proxy.get_astgen_fname(pkg_name=pkg_name, pkg_version=pkg_version)
astgen_file = join(outdir, astgen_fname)
astgen_result = None
if exists(astgen_file):
logging.warning("get_astgen_result: using cached astgen_file %s!", astgen_file)
astgen_result = PkgAstResults()
read_proto_from_file(astgen_result, astgen_file, binary=False)
else:
if cache_only:
logging.warning("skipping unprocessed pkg %s ver %s due to cache_only!", pkg_name, pkg_version)
return astgen_result
# download current package and analyze it
tempdir = tempfile.mkdtemp(prefix='astfilter-')
pm_proxy.download(pkg_name=pkg_name, pkg_version=pkg_version, outdir=tempdir)
tempdir_files = os.listdir(tempdir)
if len(tempdir_files) == 0:
logging.error("fail to download pkg %s ver %s", pkg_name, pkg_version)
else:
pkg_file = join(tempdir, tempdir_files[0])
self.astgen(inpath=pkg_file, outfile=astgen_file, configpath=configpath, pkg_name=pkg_name,
pkg_version=pkg_version)
if exists(astgen_file):
astgen_result = PkgAstResults()
read_proto_from_file(astgen_result, astgen_file, binary=False)
else:
logging.error("fail to run astgen on downloaded package %s", pkg_file)
shutil.rmtree(tempdir)
return astgen_result
@staticmethod
def _get_api_partial_name(ast_node):
if ast_node.full_name == ast_node.name:
return ast_node.name
else:
return "." + ast_node.name
@staticmethod
def _get_partial_name2full_names(ast_nodes):
partial_name2full_name = {}
for ast_node in ast_nodes:
partial_name = StaticAnalyzer._get_api_partial_name(ast_node)
partial_name2full_name.setdefault(partial_name, [])
partial_name2full_name[partial_name].append(ast_node.full_name)
return partial_name2full_name
@staticmethod
def _check_smt(astgen_results, configpath=None):
if len(astgen_results) == 0:
logging.warning("no astgen_results specified, returning False!")
return False
# if configpath is not specified, use the config in any of the astgen result, o.w. use configpath
if configpath:
configpb = AstLookupConfig()
read_proto_from_file(configpb, configpath, binary=False)
else:
configpb = astgen_results[0].pkgs[0].config
logging.warning("checking satisfiability of smt formula %s", configpb.smt_formula)
used_apis = set()
# FIXME: works if each astgen_result has only one pkg
# Get the results from the different packages in the astgen results
for current_package in astgen_results:
current_package_results = current_package.pkgs[0].api_results
current_package_config = current_package.pkgs[0].config
if current_package_results:
if current_package_config.func_only:
# func only match
partial_name2full_names = StaticAnalyzer._get_partial_name2full_names(current_package_config.apis)
for api_result in current_package_results:
partial_name = StaticAnalyzer._get_api_partial_name(api_result)
used_apis.update(partial_name2full_names[partial_name])
else:
# full name match
for api_result in current_package_results:
used_apis.add(api_result.full_name)
# Transform the names found the astgen results to the numbers used in the formula
logging.warning("there are %d used apis: %s", len(used_apis), used_apis)
used_apis_numerical = []
for current_api in configpb.apis:
if current_api.full_name in used_apis:
used_apis_numerical.append(current_api.id)
# Transform the formula (the variable that will be evaluated is used_apis_numerical)
smt_formula = re.sub(r'(\d+)', r'(\1 in used_apis_numerical)', configpb.smt_formula)
satisfied = eval(smt_formula)
logging.warning("satisfiability = %s", satisfied)
return satisfied
| 46.767386 | 134 | 0.642344 |
8e91406bcd1b12d509bee48950ca03bd3d9cb67b | 971 | py | Python | External/opencv-2.4.6.1/samples/python2/inpaint.py | simonct/CoreAstro | eafd0aea314c427da616e1707a49aaeaf5ea6991 | [
"OML"
] | 3 | 2015-08-29T06:56:58.000Z | 2016-11-15T10:35:59.000Z | External/opencv-2.4.6.1/samples/python2/inpaint.py | simonct/CoreAstro | eafd0aea314c427da616e1707a49aaeaf5ea6991 | [
"OML"
] | null | null | null | External/opencv-2.4.6.1/samples/python2/inpaint.py | simonct/CoreAstro | eafd0aea314c427da616e1707a49aaeaf5ea6991 | [
"OML"
] | null | null | null | #!/usr/bin/env python
'''
Inpainting sample.
Inpainting repairs damage to images by floodfilling
the damage with surrounding image areas.
Usage:
inpaint.py [<image>]
Keys:
SPACE - inpaint
r - reset the inpainting mask
ESC - exit
'''
import numpy as np
import cv2
from common import Sketcher
if __name__ == '__main__':
import sys
try: fn = sys.argv[1]
except: fn = '../cpp/fruits.jpg'
print __doc__
img = cv2.imread(fn)
img_mark = img.copy()
mark = np.zeros(img.shape[:2], np.uint8)
sketch = Sketcher('img', [img_mark, mark], lambda : ((255, 255, 255), 255))
while True:
ch = 0xFF & cv2.waitKey()
if ch == 27:
break
if ch == ord(' '):
res = cv2.inpaint(img_mark, mark, 3, cv2.INPAINT_TELEA)
cv2.imshow('inpaint', res)
if ch == ord('r'):
img_mark[:] = img
mark[:] = 0
sketch.show()
cv2.destroyAllWindows()
| 21.108696 | 79 | 0.572606 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.