blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 2 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 69 | license_type stringclasses 2
values | repo_name stringlengths 5 118 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringlengths 4 63 | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 2.91k 686M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 23
values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 220
values | src_encoding stringclasses 30
values | language stringclasses 1
value | is_vendor bool 2
classes | is_generated bool 2
classes | length_bytes int64 2 10.3M | extension stringclasses 257
values | content stringlengths 2 10.3M | authors listlengths 1 1 | author_id stringlengths 0 212 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
172c104acfeb521fc901ae323095f6c66489f85b | bc47127bf9418d30cd281950c63e1464de2cdbde | /python/server/backends/__init__.py | 7302c710430613194d8931d7bc36a7dfbad4e473 | [
"Apache-2.0"
] | permissive | xiaoyehhuang/searchhub | 96c846458964eefd3a67b11d97f1ad73b242aa55 | b8962ef5ccf4310ced1f6d5ca7f450fa8f97dea5 | refs/heads/master | 2021-01-17T08:56:34.966590 | 2016-08-18T18:00:10 | 2016-08-18T18:00:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,311 | py | from collections import namedtuple
import importlib
from server import app
class Backend(object):
"Base class for Backend implementations"
def __init__(self):
pass
def toggle_system_metrics(self, enabled=True):
raise NotImplementedError()
def set_log_level(self, logLevel="ERROR"):
raise NotImplementedError()
def send_signal(self, collection_id, payload):
"""
Send a signal
"""
raise NotImplementedError()
def get_document(self, doc_id):
"""
Fetch a single document from the backend
:param doc_id: the document's id
:returns: the document or None
"""
raise NotImplementedError()
def find_documents(self, query="*", source=None, author=None, project=None, limit=10, offset=0):
"""
Filter and/or search for documents in the backend
:param query: a full text query
:param source: filter on the document's source field
:param author: filter on the document's author field
:param author: filter on the document's project field
:param limit: limits how many results to return
:param offset: how deeply into the results to start returning
:returns: a list of the resulting documents
"""
raise NotImplementedError()
def create_collection(self, collection_id, enable_signals=False, enable_search_logs=True, enable_dynamic_schema=True, solr_params=None):
raise NotImplementedError()
def create_user(self, user, password):
raise NotImplementedError()
def create_query_pipeline(self, collection_id, name, pipeline_name):
raise NotImplementedError()
def create_taxonomy(self, collection_id, taxonomy):
raise NotImplementedError()
def delete_taxonomy(self, collection_id, category=None):
raise NotImplementedError()
def create_or_update_project_pipeline(self, project):
raise NotImplementedError()
def add_field(self, collection_name, name, type="String", required=False, multivalued=False, indexed=True, stored=True, defaultVal=None):
raise NotImplementedError()
def add_field_type(self, collection_name, add_field_json):
raise NotImplementedError()
def create_or_update_project(self, project):
raise NotImplementedError()
def create_or_update_schedule(self, schedule):
raise NotImplementedError()
#if schedules is none, then activate all. If specified, only activate those schedules that match
def activate_schedules(self, schedules=None, searchHubOnly=True):
raise NotImplementedError()
def stop_schedules(self, schedules=None, searchHubOnly=True):
raise NotImplementedError()
def get_role(self, rolename):
raise NotImplementedError()
def update_role(self, rolename, data):
raise NotImplementedError()
def get_datasource(self, id):
"""
Retreive a datasource from the backend
:param id: the datasource's id
:returns: the datasource or None
"""
raise NotImplementedError()
def add_request_handler(self, collection_name, add_req_handler_json):
raise NotImplementedError()
def remove_request_handler(self, collection_name, req_handler_name):
raise NotImplementedError()
def add_search_component(self, collection_name, add_search_component_json):
raise NotImplementedError()
def remove_search_component(self, collection_name, component_name):
raise NotImplementedError()
def set_property(self, collection_name, data):
raise NotImplementedError()
def unset_property(self, collection_name, data):
raise NotImplementedError()
def stop_datasource(self, id, abort=False):
raise NotImplementedError()
def stop_datasources(self):
raise NotImplementedError()
def update_datasource(self, id, **config):
"""
Update a datasource in the backend
:param id: the datasource's id
:param config: the datasource config as a dictionary
:returns: the updated datasource
"""
raise NotImplementedError()
# Some model objects
Document = namedtuple("Document", ["id", "author", "source", "project", "content", "created_at", "link"])
def get_backend():
"Load the backend impl from config, default to the mock one"
BACKEND = app.config.get("BACKEND", "server.backends.mock.MockBackend")
package, clazz = BACKEND.rsplit('.', 1)
module = importlib.import_module(package)
return getattr(module, clazz)()
| [
"gsingers@apache.org"
] | gsingers@apache.org |
3ad3f271e1638aeab5f1a60f9e46cbf4d55b64e0 | a3faf585ac766da428ee896e6c70c39ecc22ce1f | /xy/planner.py | b4be54c6910ff99f946e9c2aa08bc9b5ab70185d | [] | no_license | RolandJuno/xy | dcab6c0682cda79ffd6b5fb6cb8365390421f784 | 1079175b9a2f58c72fd94520908ebbaf81585037 | refs/heads/master | 2020-04-05T04:11:02.909464 | 2019-11-13T22:05:16 | 2019-11-13T22:05:16 | 50,703,647 | 7 | 1 | null | 2016-01-30T01:56:42 | 2016-01-30T01:56:42 | null | UTF-8 | Python | false | false | 5,970 | py | from hashindex import Index
from math import hypot
import anneal
import random
def sort_paths_greedy(paths, reversable=True):
first = max(paths, key=lambda x: x[0][1])
paths.remove(first)
result = [first]
points = []
for path in paths:
x1, y1 = path[0]
x2, y2 = path[-1]
points.append((x1, y1, path, False))
if reversable:
points.append((x2, y2, path, True))
index = Index(points)
while index.size:
x, y, path, reverse = index.search(result[-1][-1])
x1, y1 = path[0]
x2, y2 = path[-1]
index.remove((x1, y1, path, False))
if reversable:
index.remove((x2, y2, path, True))
if reverse:
result.append(list(reversed(path)))
else:
result.append(path)
return result
def sort_paths(paths, iterations=100000, reversable=True):
'''
This function re-orders a set of 2D paths (polylines) to minimize the
distance required to visit each path. This is useful for 2D plotting to
reduce wasted movements where the instrument is not drawing.
If allowed, the algorithm will also reverse some paths if doing so reduces
the total distance.
The code uses simulated annealing as its optimization algorithm. The number
of iterations can be increased to improve the chances of finding a perfect
solution. However, a perfect solution isn't necessarily required - we just
want to find something good enough.
With randomly generated paths, the algorithm can quickly find a solution
that reduces the extra distance to ~25 percent of its original value.
'''
state = Model(list(paths), reversable)
max_temp = anneal.get_max_temp(state, 10000)
min_temp = max_temp / 1000.0
state = anneal.anneal(state, max_temp, min_temp, iterations)
for path, reverse in zip(state.paths, state.reverse):
if reverse:
path.reverse()
return state.paths
def sort_points(points, iterations=100000):
'''
Like sort_paths, but operates on individual points instead.
This is basically a traveling salesman optimization.
'''
paths = [[x] for x in points]
paths = sort_paths(paths, iterations, False)
points = [x[0] for x in paths]
return points
class Model(object):
def __init__(self, paths, reversable=True, reverse=None, distances=None, total_distance=None):
self.paths = paths
self.reversable = reversable
self.reverse = reverse or [False] * len(self.paths)
if distances:
self.total_distance = total_distance or 0
self.distances = distances
else:
self.total_distance = 0
self.distances = [0] * (len(paths) - 1)
self.add_distances(range(len(self.distances)))
def subtract_distances(self, indexes):
n = len(self.distances)
for i in indexes:
if i >= 0 and i < n:
self.total_distance -= self.distances[i]
def add_distances(self, indexes):
n = len(self.distances)
for i in indexes:
if i < 0 or i >= n:
continue
j = i + 1
if self.reverse[i]:
x1, y1 = self.paths[i][0]
else:
x1, y1 = self.paths[i][-1]
if self.reverse[j]:
x2, y2 = self.paths[j][-1]
else:
x2, y2 = self.paths[j][0]
self.distances[i] = hypot(x2 - x1, y2 - y1)
self.total_distance += self.distances[i]
def energy(self):
# return the total extra distance for this ordering
return self.total_distance
def do_move(self):
if self.reversable and random.random() < 0.25:
# mutate by reversing a random path
n = len(self.paths) - 1
i = random.randint(0, n)
indexes = [i - 1, i]
self.subtract_distances(indexes)
self.reverse[i] = not self.reverse[i]
self.add_distances(indexes)
return (1, i, 0)
else:
# mutate by swapping two random paths
n = len(self.paths) - 1
i = random.randint(0, n)
j = random.randint(0, n)
indexes = set([i - 1, i, j - 1, j])
self.subtract_distances(indexes)
self.paths[i], self.paths[j] = self.paths[j], self.paths[i]
self.add_distances(indexes)
return (0, i, j)
def undo_move(self, undo):
# undo the previous mutation
mode, i, j = undo
if mode == 0:
indexes = set([i - 1, i, j - 1, j])
self.subtract_distances(indexes)
self.paths[i], self.paths[j] = self.paths[j], self.paths[i]
self.add_distances(indexes)
else:
indexes = [i - 1, i]
self.subtract_distances(indexes)
self.reverse[i] = not self.reverse[i]
self.add_distances(indexes)
def copy(self):
# make a copy of the model
return Model(
list(self.paths), self.reversable, list(self.reverse),
list(self.distances), self.total_distance)
def test(n_paths, n_iterations, seed=None):
random.seed(seed)
paths = []
for _ in range(n_paths):
x1 = random.random()
y1 = random.random()
x2 = random.random()
y2 = random.random()
path = [(x1, y1), (x2, y2)]
paths.append(path)
before = Model(paths).energy()
if n_iterations:
paths = sort_paths(paths, n_iterations)
else:
paths = sort_paths_greedy(paths)
after = Model(paths).energy()
pct = 100.0 * after / before
return pct
if __name__ == '__main__':
# test the module
for n_paths in [10, 100, 1000, 10000]:
for n_iterations in [None, 10, 100, 1000, 10000, 100000, 1000000]:
pct = test(n_paths, n_iterations, 123)
print n_paths, n_iterations, pct
| [
"fogleman@gmail.com"
] | fogleman@gmail.com |
1b5642582133e3969680bf021cd3ea002273f95a | 4d6f0bf3ad2fc6af32b650a958012ae86b3732b3 | /leetcode_2022/udemy_leetcode/19.validateBST.py | 52be031c6a69e1506506820bc919cedc2c96699a | [] | no_license | andrewskej/algo_data | ec6ebf5c5467e0c2b62c6f1aefcf8cf5409916a5 | 800de3278037531be621377178db3172dc2464f6 | refs/heads/master | 2022-11-14T06:49:49.257614 | 2022-10-31T19:57:18 | 2022-10-31T19:57:18 | 188,797,507 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 894 | py | class Solution(object):
def minElem(self, root):
if not root:
return float('inf')
if not root.left and not root.right:
return root.val
return min(root.val, self.minElem(root.left), self.minElem(root.right))
def maxElem(self, root):
if not root:
return float('-inf')
if not root.left and not root.right:
return root.val
return max(root.val, self.maxElem(root.left), self.maxElem(root.right))
def isValidBST(self, root):
if not root or not(root.left or root.right):
return True
validRoot = root.val > self.maxElem(root.left) and root.val < self.minElem(root.right)
validLeft = self.isValidBST(root.left)
validRight = self.isValidBST(root.right)
return validRoot and validLeft and validRight | [
"andrewskej@gmail.com"
] | andrewskej@gmail.com |
62ae0501c93af87b8ce1095e97f72f13be31114f | f661d8218788742742ab301440c1583a43d79199 | /authors.py | 357cc99ff76b785f4dc4dde9be1ba2589f336b99 | [] | no_license | bartekbrak/wi-git-tools | 35cbd2422668ea544a221847d9f04f26b37c9147 | 5236ba1dbd9df596f6b8be6721ecd5d8bbc4c923 | refs/heads/master | 2020-04-27T11:53:43.784946 | 2015-06-16T06:43:10 | 2015-06-16T06:43:10 | 174,313,113 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,324 | py | """
Display information abut commits on remote branches. Useful to detect orphaned
code.
"""
import argparse
import re
from blessings import Terminal
from git import Repo
from git_common import info
repo = Repo()
t = Terminal()
WRONG_BRANCH = re.compile('->|/master|HEAD|/develop')
# It would be much more readable and interesting to not use the git formatting
# but retrieve all data and format in python
format_ = (
'%C(yellow)%h%x09%C(reset)%ar %C(green bold)'
'%an%x09%x09%C(reset)%C(bold)%s%C(reset)'
)
def parse_args():
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawTextHelpFormatter)
return parser.parse_args()
def remote_branches_info():
info('Remote branches:')
remotes = [
remote.strip()
for remote
in repo.git.branch(remotes=True).split('\n')
if not WRONG_BRANCH.search(remote)
]
for branch in remotes:
print(branch)
print(repo.git.log(
branch,
'^origin/master',
no_merges=True,
pretty='format:%s' % format_
))
print
def main():
global args
args = parse_args()
info('Working on %s' % repo)
repo.remotes.origin.fetch(p=True)
remote_branches_info()
if __name__ == '__main__':
main()
| [
"bartek.r@webinterpret.com"
] | bartek.r@webinterpret.com |
b2800c986371d3fb2bdede2f95c9a609029981f8 | 256600e431b69f18e9d8fc9327087a2fe6bfbea1 | /category/serializers.py | ce481d300eaa1be46ebd48a16a51c72e510ce944 | [] | no_license | IsaacDremaster/practice | 0a142e4dfcac94289c93501b6d114d946754f007 | 392ce827ced0046d2e2a0398953287d7c8ce37c3 | refs/heads/master | 2023-04-12T20:28:12.833315 | 2021-05-08T16:25:45 | 2021-05-08T16:25:45 | 365,560,060 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 199 | py | from .models import Category
from rest_framework import serializers
class CategorySerializer(serializers.ModelSerializer):
class Meta:
model = Category
fields = ('__all__', )
| [
"iskhak.dosmatov@gmail.com"
] | iskhak.dosmatov@gmail.com |
ff053c7af547706471c802a09fb2b03078714f37 | 3a0336f8ba841f6076f412dfb6de1af9ac946efd | /azure/multiapi/storagev2/fileshare/v2019_07_07/_models.py | 1488fcea6ebdc1c95ef6a44eccad5db9309bf5b8 | [
"MIT",
"LicenseRef-scancode-generic-cla"
] | permissive | Azure/azure-multiapi-storage-python | 4291579aa1f47d4b74557267558bd5029e01e12c | 650ef33ad683a5f2aba590c4553f9871bfa0dd93 | refs/heads/master | 2023-09-03T22:27:16.816305 | 2023-06-01T07:37:02 | 2023-06-01T07:37:02 | 94,827,841 | 4 | 17 | MIT | 2023-06-01T07:37:03 | 2017-06-19T22:58:21 | Python | UTF-8 | Python | false | false | 40,637 | py | # -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
# pylint: disable=too-few-public-methods, too-many-instance-attributes
# pylint: disable=super-init-not-called, too-many-lines
from azure.core.paging import PageIterator
from ._parser import _parse_datetime_from_str
from ._shared.response_handlers import return_context_and_deserialized, process_storage_error
from ._shared.models import DictMixin, get_enum_value
from ._generated.models import StorageErrorException
from ._generated.models import Metrics as GeneratedMetrics
from ._generated.models import RetentionPolicy as GeneratedRetentionPolicy
from ._generated.models import CorsRule as GeneratedCorsRule
from ._generated.models import AccessPolicy as GenAccessPolicy
from ._generated.models import DirectoryItem
def _wrap_item(item):
if isinstance(item, DirectoryItem):
return {'name': item.name, 'is_directory': True}
return {'name': item.name, 'size': item.properties.content_length, 'is_directory': False}
class Metrics(GeneratedMetrics):
"""A summary of request statistics grouped by API in hour or minute aggregates
for files.
All required parameters must be populated in order to send to Azure.
:keyword str version: The version of Storage Analytics to configure.
:keyword bool enabled: Required. Indicates whether metrics are enabled for the
File service.
:keyword bool include_ap_is: Indicates whether metrics should generate summary
statistics for called API operations.
:keyword ~azure.storage.fileshare.RetentionPolicy retention_policy: Determines how long the associated data should
persist.
"""
def __init__(self, **kwargs):
self.version = kwargs.get('version', u'1.0')
self.enabled = kwargs.get('enabled', False)
self.include_apis = kwargs.get('include_apis')
self.retention_policy = kwargs.get('retention_policy') or RetentionPolicy()
@classmethod
def _from_generated(cls, generated):
if not generated:
return cls()
return cls(
version=generated.version,
enabled=generated.enabled,
include_apis=generated.include_apis,
retention_policy=RetentionPolicy._from_generated(generated.retention_policy) # pylint: disable=protected-access
)
class RetentionPolicy(GeneratedRetentionPolicy):
"""The retention policy which determines how long the associated data should
persist.
All required parameters must be populated in order to send to Azure.
:param bool enabled: Required. Indicates whether a retention policy is enabled
for the storage service.
:param int days: Indicates the number of days that metrics or logging or
soft-deleted data should be retained. All data older than this value will
be deleted.
"""
def __init__(self, enabled=False, days=None):
self.enabled = enabled
self.days = days
if self.enabled and (self.days is None):
raise ValueError("If policy is enabled, 'days' must be specified.")
@classmethod
def _from_generated(cls, generated):
if not generated:
return cls()
return cls(
enabled=generated.enabled,
days=generated.days,
)
class CorsRule(GeneratedCorsRule):
"""CORS is an HTTP feature that enables a web application running under one
domain to access resources in another domain. Web browsers implement a
security restriction known as same-origin policy that prevents a web page
from calling APIs in a different domain; CORS provides a secure way to
allow one domain (the origin domain) to call APIs in another domain.
All required parameters must be populated in order to send to Azure.
:param list(str) allowed_origins:
A list of origin domains that will be allowed via CORS, or "*" to allow
all domains. The list of must contain at least one entry. Limited to 64
origin domains. Each allowed origin can have up to 256 characters.
:param list(str) allowed_methods:
A list of HTTP methods that are allowed to be executed by the origin.
The list of must contain at least one entry. For Azure Storage,
permitted methods are DELETE, GET, HEAD, MERGE, POST, OPTIONS or PUT.
:keyword list(str) allowed_headers:
Defaults to an empty list. A list of headers allowed to be part of
the cross-origin request. Limited to 64 defined headers and 2 prefixed
headers. Each header can be up to 256 characters.
:keyword list(str) exposed_headers:
Defaults to an empty list. A list of response headers to expose to CORS
clients. Limited to 64 defined headers and two prefixed headers. Each
header can be up to 256 characters.
:keyword int max_age_in_seconds:
The number of seconds that the client/browser should cache a
preflight response.
"""
def __init__(self, allowed_origins, allowed_methods, **kwargs):
self.allowed_origins = ','.join(allowed_origins)
self.allowed_methods = ','.join(allowed_methods)
self.allowed_headers = ','.join(kwargs.get('allowed_headers', []))
self.exposed_headers = ','.join(kwargs.get('exposed_headers', []))
self.max_age_in_seconds = kwargs.get('max_age_in_seconds', 0)
@classmethod
def _from_generated(cls, generated):
return cls(
[generated.allowed_origins],
[generated.allowed_methods],
allowed_headers=[generated.allowed_headers],
exposed_headers=[generated.exposed_headers],
max_age_in_seconds=generated.max_age_in_seconds,
)
class AccessPolicy(GenAccessPolicy):
"""Access Policy class used by the set and get acl methods in each service.
A stored access policy can specify the start time, expiry time, and
permissions for the Shared Access Signatures with which it's associated.
Depending on how you want to control access to your resource, you can
specify all of these parameters within the stored access policy, and omit
them from the URL for the Shared Access Signature. Doing so permits you to
modify the associated signature's behavior at any time, as well as to revoke
it. Or you can specify one or more of the access policy parameters within
the stored access policy, and the others on the URL. Finally, you can
specify all of the parameters on the URL. In this case, you can use the
stored access policy to revoke the signature, but not to modify its behavior.
Together the Shared Access Signature and the stored access policy must
include all fields required to authenticate the signature. If any required
fields are missing, the request will fail. Likewise, if a field is specified
both in the Shared Access Signature URL and in the stored access policy, the
request will fail with status code 400 (Bad Request).
:param permission:
The permissions associated with the shared access signature. The
user is restricted to operations allowed by the permissions.
Required unless an id is given referencing a stored access policy
which contains this field. This field must be omitted if it has been
specified in an associated stored access policy.
:type permission: str or ~azure.storage.fileshare.FileSasPermissions or
~azure.storage.fileshare.ShareSasPermissions
:param expiry:
The time at which the shared access signature becomes invalid.
Required unless an id is given referencing a stored access policy
which contains this field. This field must be omitted if it has
been specified in an associated stored access policy. Azure will always
convert values to UTC. If a date is passed in without timezone info, it
is assumed to be UTC.
:type expiry: ~datetime.datetime or str
:param start:
The time at which the shared access signature becomes valid. If
omitted, start time for this call is assumed to be the time when the
storage service receives the request. Azure will always convert values
to UTC. If a date is passed in without timezone info, it is assumed to
be UTC.
:type start: ~datetime.datetime or str
"""
def __init__(self, permission=None, expiry=None, start=None):
self.start = start
self.expiry = expiry
self.permission = permission
class LeaseProperties(DictMixin):
"""File Lease Properties.
:ivar str status:
The lease status of the file. Possible values: locked|unlocked
:ivar str state:
Lease state of the file. Possible values: available|leased|expired|breaking|broken
:ivar str duration:
When a file is leased, specifies whether the lease is of infinite or fixed duration.
"""
def __init__(self, **kwargs):
self.status = get_enum_value(kwargs.get('x-ms-lease-status'))
self.state = get_enum_value(kwargs.get('x-ms-lease-state'))
self.duration = get_enum_value(kwargs.get('x-ms-lease-duration'))
@classmethod
def _from_generated(cls, generated):
lease = cls()
lease.status = get_enum_value(generated.properties.lease_status)
lease.state = get_enum_value(generated.properties.lease_state)
lease.duration = get_enum_value(generated.properties.lease_duration)
return lease
class ContentSettings(DictMixin):
"""Used to store the content settings of a file.
:param str content_type:
The content type specified for the file. If no content type was
specified, the default content type is application/octet-stream.
:param str content_encoding:
If the content_encoding has previously been set
for the file, that value is stored.
:param str content_language:
If the content_language has previously been set
for the file, that value is stored.
:param str content_disposition:
content_disposition conveys additional information about how to
process the response payload, and also can be used to attach
additional metadata. If content_disposition has previously been set
for the file, that value is stored.
:param str cache_control:
If the cache_control has previously been set for
the file, that value is stored.
:param str content_md5:
If the content_md5 has been set for the file, this response
header is stored so that the client can check for message content
integrity.
"""
def __init__(
self, content_type=None, content_encoding=None,
content_language=None, content_disposition=None,
cache_control=None, content_md5=None, **kwargs):
self.content_type = content_type or kwargs.get('Content-Type')
self.content_encoding = content_encoding or kwargs.get('Content-Encoding')
self.content_language = content_language or kwargs.get('Content-Language')
self.content_md5 = content_md5 or kwargs.get('Content-MD5')
self.content_disposition = content_disposition or kwargs.get('Content-Disposition')
self.cache_control = cache_control or kwargs.get('Cache-Control')
@classmethod
def _from_generated(cls, generated):
settings = cls()
settings.content_type = generated.properties.content_type or None
settings.content_encoding = generated.properties.content_encoding or None
settings.content_language = generated.properties.content_language or None
settings.content_md5 = generated.properties.content_md5 or None
settings.content_disposition = generated.properties.content_disposition or None
settings.cache_control = generated.properties.cache_control or None
return settings
class ShareProperties(DictMixin):
"""Share's properties class.
:ivar str name:
The name of the share.
:ivar ~datetime.datetime last_modified:
A datetime object representing the last time the share was modified.
:ivar str etag:
The ETag contains a value that you can use to perform operations
conditionally.
:ivar int quota:
The allocated quota.
:ivar dict metadata: A dict with name_value pairs to associate with the
share as metadata.
:ivar str snapshot:
Snapshot of the share.
"""
def __init__(self, **kwargs):
self.name = None
self.last_modified = kwargs.get('Last-Modified')
self.etag = kwargs.get('ETag')
self.quota = kwargs.get('x-ms-share-quota')
self.next_allowed_quota_downgrade_time = kwargs.get('x-ms-share-next-allowed-quota-downgrade-time')
self.metadata = kwargs.get('metadata')
self.snapshot = None
self.provisioned_egress_mbps = kwargs.get('x-ms-share-provisioned-egress-mbps')
self.provisioned_ingress_mbps = kwargs.get('x-ms-share-provisioned-ingress-mbps')
self.provisioned_iops = kwargs.get('x-ms-share-provisioned-iops')
@classmethod
def _from_generated(cls, generated):
props = cls()
props.name = generated.name
props.last_modified = generated.properties.last_modified
props.etag = generated.properties.etag
props.quota = generated.properties.quota
props.next_allowed_quota_downgrade_time = generated.properties.next_allowed_quota_downgrade_time
props.metadata = generated.metadata
props.snapshot = generated.snapshot
props.provisioned_egress_mbps = generated.properties.provisioned_egress_mbps
props.provisioned_ingress_mbps = generated.properties.provisioned_ingress_mbps
props.provisioned_iops = generated.properties.provisioned_iops
return props
class SharePropertiesPaged(PageIterator):
"""An iterable of Share properties.
:ivar str service_endpoint: The service URL.
:ivar str prefix: A file name prefix being used to filter the list.
:ivar str marker: The continuation token of the current page of results.
:ivar int results_per_page: The maximum number of results retrieved per API call.
:ivar str continuation_token: The continuation token to retrieve the next page of results.
:ivar str location_mode: The location mode being used to list results. The available
options include "primary" and "secondary".
:ivar current_page: The current page of listed results.
:vartype current_page: list(~azure.storage.fileshare.ShareProperties)
:param callable command: Function to retrieve the next page of items.
:param str prefix: Filters the results to return only shares whose names
begin with the specified prefix.
:param int results_per_page: The maximum number of share names to retrieve per
call.
:param str continuation_token: An opaque continuation token.
"""
def __init__(self, command, prefix=None, results_per_page=None, continuation_token=None):
super(SharePropertiesPaged, self).__init__(
get_next=self._get_next_cb,
extract_data=self._extract_data_cb,
continuation_token=continuation_token or ""
)
self._command = command
self.service_endpoint = None
self.prefix = prefix
self.marker = None
self.results_per_page = results_per_page
self.location_mode = None
self.current_page = []
def _get_next_cb(self, continuation_token):
try:
return self._command(
marker=continuation_token or None,
maxresults=self.results_per_page,
prefix=self.prefix,
cls=return_context_and_deserialized,
use_location=self.location_mode)
except StorageErrorException as error:
process_storage_error(error)
def _extract_data_cb(self, get_next_return):
self.location_mode, self._response = get_next_return
self.service_endpoint = self._response.service_endpoint
self.prefix = self._response.prefix
self.marker = self._response.marker
self.results_per_page = self._response.max_results
self.current_page = [ShareProperties._from_generated(i) for i in self._response.share_items] # pylint: disable=protected-access
return self._response.next_marker or None, self.current_page
class Handle(DictMixin):
"""A listed Azure Storage handle item.
All required parameters must be populated in order to send to Azure.
:keyword str handle_id: Required. XSMB service handle ID
:keyword str path: Required. File or directory name including full path starting
from share root
:keyword str file_id: Required. FileId uniquely identifies the file or
directory.
:keyword str parent_id: ParentId uniquely identifies the parent directory of the
object.
:keyword str session_id: Required. SMB session ID in context of which the file
handle was opened
:keyword str client_ip: Required. Client IP that opened the handle
:keyword ~datetime.datetime open_time: Required. Time when the session that previously opened
the handle has last been reconnected. (UTC)
:keyword ~datetime.datetime last_reconnect_time: Time handle was last connected to (UTC)
"""
def __init__(self, **kwargs):
self.id = kwargs.get('handle_id')
self.path = kwargs.get('path')
self.file_id = kwargs.get('file_id')
self.parent_id = kwargs.get('parent_id')
self.session_id = kwargs.get('session_id')
self.client_ip = kwargs.get('client_ip')
self.open_time = kwargs.get('open_time')
self.last_reconnect_time = kwargs.get('last_reconnect_time')
@classmethod
def _from_generated(cls, generated):
handle = cls()
handle.id = generated.handle_id
handle.path = generated.path
handle.file_id = generated.file_id
handle.parent_id = generated.parent_id
handle.session_id = generated.session_id
handle.client_ip = generated.client_ip
handle.open_time = generated.open_time
handle.last_reconnect_time = generated.last_reconnect_time
return handle
class HandlesPaged(PageIterator):
"""An iterable of Handles.
:ivar str marker: The continuation token of the current page of results.
:ivar int results_per_page: The maximum number of results retrieved per API call.
:ivar str continuation_token: The continuation token to retrieve the next page of results.
:ivar str location_mode: The location mode being used to list results. The available
options include "primary" and "secondary".
:ivar current_page: The current page of listed results.
:vartype current_page: list(~azure.storage.fileshare.Handle)
:param callable command: Function to retrieve the next page of items.
:param int results_per_page: The maximum number of share names to retrieve per
call.
:param str continuation_token: An opaque continuation token.
"""
def __init__(self, command, results_per_page=None, continuation_token=None):
super(HandlesPaged, self).__init__(
get_next=self._get_next_cb,
extract_data=self._extract_data_cb,
continuation_token=continuation_token or ""
)
self._command = command
self.marker = None
self.results_per_page = results_per_page
self.location_mode = None
self.current_page = []
def _get_next_cb(self, continuation_token):
try:
return self._command(
marker=continuation_token or None,
maxresults=self.results_per_page,
cls=return_context_and_deserialized,
use_location=self.location_mode)
except StorageErrorException as error:
process_storage_error(error)
def _extract_data_cb(self, get_next_return):
self.location_mode, self._response = get_next_return
self.current_page = [Handle._from_generated(h) for h in self._response.handle_list] # pylint: disable=protected-access
return self._response.next_marker or None, self.current_page
class DirectoryProperties(DictMixin):
"""Directory's properties class.
:ivar str name:
The name of the directory.
:ivar ~datetime.datetime last_modified:
A datetime object representing the last time the directory was modified.
:ivar str etag:
The ETag contains a value that you can use to perform operations
conditionally.
:ivar bool server_encrypted:
Whether encryption is enabled.
:keyword dict metadata: A dict with name_value pairs to associate with the
directory as metadata.
:ivar change_time: Change time for the file.
:vartype change_time: str or ~datetime.datetime
:ivar creation_time: Creation time for the file.
:vartype creation_time: str or ~datetime.datetime
:ivar last_write_time: Last write time for the file.
:vartype last_write_time: str or ~datetime.datetime
:ivar file_attributes:
The file system attributes for files and directories.
:vartype file_attributes: str or :class:`~azure.storage.fileshare.NTFSAttributes`
:ivar permission_key: Key of the permission to be set for the
directory/file.
:vartype permission_key: str
:ivar file_id: Required. FileId uniquely identifies the file or
directory.
:vartype file_id: str
:ivar parent_id: ParentId uniquely identifies the parent directory of the
object.
:vartype parent_id: str
"""
def __init__(self, **kwargs):
self.name = None
self.last_modified = kwargs.get('Last-Modified')
self.etag = kwargs.get('ETag')
self.server_encrypted = kwargs.get('x-ms-server-encrypted')
self.metadata = kwargs.get('metadata')
self.change_time = _parse_datetime_from_str(kwargs.get('x-ms-file-change-time'))
self.creation_time = _parse_datetime_from_str(kwargs.get('x-ms-file-creation-time'))
self.last_write_time = _parse_datetime_from_str(kwargs.get('x-ms-file-last-write-time'))
self.file_attributes = kwargs.get('x-ms-file-attributes')
self.permission_key = kwargs.get('x-ms-file-permission-key')
self.file_id = kwargs.get('x-ms-file-id')
self.parent_id = kwargs.get('x-ms-file-parent-id')
@classmethod
def _from_generated(cls, generated):
props = cls()
props.name = generated.name
props.last_modified = generated.properties.last_modified
props.etag = generated.properties.etag
props.server_encrypted = generated.properties.server_encrypted
props.metadata = generated.metadata
return props
class DirectoryPropertiesPaged(PageIterator):
"""An iterable for the contents of a directory.
This iterable will yield dicts for the contents of the directory. The dicts
will have the keys 'name' (str) and 'is_directory' (bool).
Items that are files (is_directory=False) will have an additional 'content_length' key.
:ivar str service_endpoint: The service URL.
:ivar str prefix: A file name prefix being used to filter the list.
:ivar str marker: The continuation token of the current page of results.
:ivar int results_per_page: The maximum number of results retrieved per API call.
:ivar str continuation_token: The continuation token to retrieve the next page of results.
:ivar str location_mode: The location mode being used to list results. The available
options include "primary" and "secondary".
:ivar current_page: The current page of listed results.
:vartype current_page: list(dict(str, Any))
:param callable command: Function to retrieve the next page of items.
:param str prefix: Filters the results to return only directories whose names
begin with the specified prefix.
:param int results_per_page: The maximum number of share names to retrieve per
call.
:param str continuation_token: An opaque continuation token.
"""
def __init__(self, command, prefix=None, results_per_page=None, continuation_token=None):
super(DirectoryPropertiesPaged, self).__init__(
get_next=self._get_next_cb,
extract_data=self._extract_data_cb,
continuation_token=continuation_token or ""
)
self._command = command
self.service_endpoint = None
self.prefix = prefix
self.marker = None
self.results_per_page = results_per_page
self.location_mode = None
self.current_page = []
def _get_next_cb(self, continuation_token):
try:
return self._command(
marker=continuation_token or None,
prefix=self.prefix,
maxresults=self.results_per_page,
cls=return_context_and_deserialized,
use_location=self.location_mode)
except StorageErrorException as error:
process_storage_error(error)
def _extract_data_cb(self, get_next_return):
self.location_mode, self._response = get_next_return
self.service_endpoint = self._response.service_endpoint
self.prefix = self._response.prefix
self.marker = self._response.marker
self.results_per_page = self._response.max_results
self.current_page = [_wrap_item(i) for i in self._response.segment.directory_items]
self.current_page.extend([_wrap_item(i) for i in self._response.segment.file_items])
return self._response.next_marker or None, self.current_page
class FileProperties(DictMixin):
"""File's properties class.
:ivar str name:
The name of the file.
:ivar str path:
The path of the file.
:ivar str share:
The name of share.
:ivar str snapshot:
File snapshot.
:ivar int content_length:
Size of file in bytes.
:ivar dict metadata: A dict with name_value pairs to associate with the
file as metadata.
:ivar str file_type:
Type of the file.
:ivar ~datetime.datetime last_modified:
A datetime object representing the last time the file was modified.
:ivar str etag:
The ETag contains a value that you can use to perform operations
conditionally.
:ivar int size:
Size of file in bytes.
:ivar str content_range:
The range of bytes.
:ivar bool server_encrypted:
Whether encryption is enabled.
:ivar copy:
The copy properties.
:vartype copy: ~azure.storage.fileshare.CopyProperties
:ivar content_settings:
The content settings for the file.
:vartype content_settings: ~azure.storage.fileshare.ContentSettings
"""
def __init__(self, **kwargs):
self.name = kwargs.get('name')
self.path = None
self.share = None
self.snapshot = None
self.content_length = kwargs.get('Content-Length')
self.metadata = kwargs.get('metadata')
self.file_type = kwargs.get('x-ms-type')
self.last_modified = kwargs.get('Last-Modified')
self.etag = kwargs.get('ETag')
self.size = kwargs.get('Content-Length')
self.content_range = kwargs.get('Content-Range')
self.server_encrypted = kwargs.get('x-ms-server-encrypted')
self.copy = CopyProperties(**kwargs)
self.content_settings = ContentSettings(**kwargs)
self.lease = LeaseProperties(**kwargs)
self.change_time = _parse_datetime_from_str(kwargs.get('x-ms-file-change-time'))
self.creation_time = _parse_datetime_from_str(kwargs.get('x-ms-file-creation-time'))
self.last_write_time = _parse_datetime_from_str(kwargs.get('x-ms-file-last-write-time'))
self.file_attributes = kwargs.get('x-ms-file-attributes')
self.permission_key = kwargs.get('x-ms-file-permission-key')
self.file_id = kwargs.get('x-ms-file-id')
self.parent_id = kwargs.get('x-ms-file-parent-id')
@classmethod
def _from_generated(cls, generated):
props = cls()
props.name = generated.name
props.content_length = generated.properties.content_length
props.metadata = generated.properties.metadata
props.lease = LeaseProperties._from_generated(generated) # pylint: disable=protected-access
return props
class CopyProperties(DictMixin):
"""File Copy Properties.
:ivar str id:
String identifier for the last attempted Copy File operation where this file
was the destination file. This header does not appear if this file has never
been the destination in a Copy File operation, or if this file has been
modified after a concluded Copy File operation.
:ivar str source:
URL up to 2 KB in length that specifies the source file used in the last attempted
Copy File operation where this file was the destination file. This header does not
appear if this file has never been the destination in a Copy File operation, or if
this file has been modified after a concluded Copy File operation.
:ivar str status:
State of the copy operation identified by Copy ID, with these values:
success:
Copy completed successfully.
pending:
Copy is in progress. Check copy_status_description if intermittent,
non-fatal errors impede copy progress but don't cause failure.
aborted:
Copy was ended by Abort Copy File.
failed:
Copy failed. See copy_status_description for failure details.
:ivar str progress:
Contains the number of bytes copied and the total bytes in the source in the last
attempted Copy File operation where this file was the destination file. Can show
between 0 and Content-Length bytes copied.
:ivar datetime completion_time:
Conclusion time of the last attempted Copy File operation where this file was the
destination file. This value can specify the time of a completed, aborted, or
failed copy attempt.
:ivar str status_description:
Only appears when x-ms-copy-status is failed or pending. Describes cause of fatal
or non-fatal copy operation failure.
:ivar bool incremental_copy:
Copies the snapshot of the source file to a destination file.
The snapshot is copied such that only the differential changes between
the previously copied snapshot are transferred to the destination
:ivar datetime destination_snapshot:
Included if the file is incremental copy or incremental copy snapshot,
if x-ms-copy-status is success. Snapshot time of the last successful
incremental copy snapshot for this file.
"""
def __init__(self, **kwargs):
self.id = kwargs.get('x-ms-copy-id')
self.source = kwargs.get('x-ms-copy-source')
self.status = get_enum_value(kwargs.get('x-ms-copy-status'))
self.progress = kwargs.get('x-ms-copy-progress')
self.completion_time = kwargs.get('x-ms-copy-completion_time')
self.status_description = kwargs.get('x-ms-copy-status-description')
self.incremental_copy = kwargs.get('x-ms-incremental-copy')
self.destination_snapshot = kwargs.get('x-ms-copy-destination-snapshot')
@classmethod
def _from_generated(cls, generated):
copy = cls()
copy.id = generated.properties.copy_id or None
copy.status = get_enum_value(generated.properties.copy_status) or None
copy.source = generated.properties.copy_source or None
copy.progress = generated.properties.copy_progress or None
copy.completion_time = generated.properties.copy_completion_time or None
copy.status_description = generated.properties.copy_status_description or None
copy.incremental_copy = generated.properties.incremental_copy or None
copy.destination_snapshot = generated.properties.destination_snapshot or None
return copy
class FileSasPermissions(object):
"""FileSasPermissions class to be used with
generating shared access signature operations.
:param bool read:
Read the content, properties, metadata. Use the file as the source of a copy
operation.
:param bool create:
Create a new file or copy a file to a new file.
:param bool write:
Create or write content, properties, metadata. Resize the file. Use the file
as the destination of a copy operation within the same account.
:param bool delete:
Delete the file.
"""
def __init__(self, read=False, create=False, write=False, delete=False):
self.read = read
self.create = create
self.write = write
self.delete = delete
self._str = (('r' if self.read else '') +
('c' if self.create else '') +
('w' if self.write else '') +
('d' if self.delete else ''))
def __str__(self):
return self._str
@classmethod
def from_string(cls, permission):
"""Create a FileSasPermissions from a string.
To specify read, create, write, or delete permissions you need only to
include the first letter of the word in the string. E.g. For read and
create permissions, you would provide a string "rc".
:param str permission: The string which dictates the read, create,
write, or delete permissions
:return: A FileSasPermissions object
:rtype: ~azure.storage.fileshare.FileSasPermissions
"""
p_read = 'r' in permission
p_create = 'c' in permission
p_write = 'w' in permission
p_delete = 'd' in permission
parsed = cls(p_read, p_create, p_write, p_delete)
parsed._str = permission # pylint: disable = protected-access
return parsed
class ShareSasPermissions(object):
"""ShareSasPermissions class to be used to be used with
generating shared access signature and access policy operations.
:param bool read:
Read the content, properties or metadata of any file in the share. Use any
file in the share as the source of a copy operation.
:param bool write:
For any file in the share, create or write content, properties or metadata.
Resize the file. Use the file as the destination of a copy operation within
the same account.
Note: You cannot grant permissions to read or write share properties or
metadata with a service SAS. Use an account SAS instead.
:param bool delete:
Delete any file in the share.
Note: You cannot grant permissions to delete a share with a service SAS. Use
an account SAS instead.
:param bool list:
List files and directories in the share.
"""
def __init__(self, read=False, write=False, delete=False, list=False): # pylint: disable=redefined-builtin
self.read = read
self.write = write
self.delete = delete
self.list = list
self._str = (('r' if self.read else '') +
('w' if self.write else '') +
('d' if self.delete else '') +
('l' if self.list else ''))
def __str__(self):
return self._str
@classmethod
def from_string(cls, permission):
"""Create a ShareSasPermissions from a string.
To specify read, write, delete, or list permissions you need only to
include the first letter of the word in the string. E.g. For read and
write permissions, you would provide a string "rw".
:param str permission: The string which dictates the read, write,
delete, or list permissions
:return: A ShareSasPermissions object
:rtype: ~azure.storage.fileshare.ShareSasPermissions
"""
p_read = 'r' in permission
p_write = 'w' in permission
p_delete = 'd' in permission
p_list = 'l' in permission
parsed = cls(p_read, p_write, p_delete, p_list)
parsed._str = permission # pylint: disable = protected-access
return parsed
class NTFSAttributes(object):
"""
Valid set of attributes to set for file or directory.
To set attribute for directory, 'Directory' should always be enabled except setting 'None' for directory.
:ivar bool read_only:
Enable/disable 'ReadOnly' attribute for DIRECTORY or FILE
:ivar bool hidden:
Enable/disable 'Hidden' attribute for DIRECTORY or FILE
:ivar bool system:
Enable/disable 'System' attribute for DIRECTORY or FILE
:ivar bool none:
Enable/disable 'None' attribute for DIRECTORY or FILE to clear all attributes of FILE/DIRECTORY
:ivar bool directory:
Enable/disable 'Directory' attribute for DIRECTORY
:ivar bool archive:
Enable/disable 'Archive' attribute for DIRECTORY or FILE
:ivar bool temporary:
Enable/disable 'Temporary' attribute for FILE
:ivar bool offline:
Enable/disable 'Offline' attribute for DIRECTORY or FILE
:ivar bool not_content_indexed:
Enable/disable 'NotContentIndexed' attribute for DIRECTORY or FILE
:ivar bool no_scrub_data:
Enable/disable 'NoScrubData' attribute for DIRECTORY or FILE
"""
def __init__(self, read_only=False, hidden=False, system=False, none=False, directory=False, archive=False,
temporary=False, offline=False, not_content_indexed=False, no_scrub_data=False):
self.read_only = read_only
self.hidden = hidden
self.system = system
self.none = none
self.directory = directory
self.archive = archive
self.temporary = temporary
self.offline = offline
self.not_content_indexed = not_content_indexed
self.no_scrub_data = no_scrub_data
self._str = (('ReadOnly|' if self.read_only else '') +
('Hidden|' if self.hidden else '') +
('System|' if self.system else '') +
('None|' if self.none else '') +
('Directory|' if self.directory else '') +
('Archive|' if self.archive else '') +
('Temporary|' if self.temporary else '') +
('Offline|' if self.offline else '') +
('NotContentIndexed|' if self.not_content_indexed else '') +
('NoScrubData|' if self.no_scrub_data else ''))
def __str__(self):
concatenated_params = self._str
return concatenated_params.strip('|')
@classmethod
def from_string(cls, string):
"""Create a NTFSAttributes from a string.
To specify permissions you can pass in a string with the
desired permissions, e.g. "ReadOnly|Hidden|System"
:param str string: The string which dictates the permissions.
:return: A NTFSAttributes object
:rtype: ~azure.storage.fileshare.NTFSAttributes
"""
read_only = "ReadOnly" in string
hidden = "Hidden" in string
system = "System" in string
none = "None" in string
directory = "Directory" in string
archive = "Archive" in string
temporary = "Temporary" in string
offline = "Offline" in string
not_content_indexed = "NotContentIndexed" in string
no_scrub_data = "NoScrubData" in string
parsed = cls(read_only, hidden, system, none, directory, archive, temporary, offline, not_content_indexed,
no_scrub_data)
parsed._str = string # pylint: disable = protected-access
return parsed
def service_properties_deserialize(generated):
"""Deserialize a ServiceProperties objects into a dict.
"""
return {
'hour_metrics': Metrics._from_generated(generated.hour_metrics), # pylint: disable=protected-access
'minute_metrics': Metrics._from_generated(generated.minute_metrics), # pylint: disable=protected-access
'cors': [CorsRule._from_generated(cors) for cors in generated.cors], # pylint: disable=protected-access
}
| [
"noreply@github.com"
] | Azure.noreply@github.com |
cadf4a8240aa583d9ad090cc8ab82e29151c3b1d | c8be157a5376314f4ebb93c40fc948cfcc775c1e | /trainer/trainer.py | 5ae333b9e82d3b25512bf872505917f061ab8d78 | [] | no_license | 15926273249/OCR_detection_IC15 | fdf6cc244b0b1e78f1f03977be34c50da82d6519 | a1a7348bc8a6a7e66d364dac14acebbc57572d4c | refs/heads/master | 2023-07-27T04:03:15.561476 | 2021-09-15T07:19:16 | 2021-09-15T07:19:16 | 256,381,621 | 0 | 0 | null | 2020-04-17T02:31:45 | 2020-04-17T02:31:44 | null | UTF-8 | Python | false | false | 5,035 | py | import numpy as np
import torch
from base import BaseTrainer
from utils.bbox import Toolbox
from utils.visualize import Visualizer
class Trainer(BaseTrainer):
"""
Trainer class
Note:
Inherited from BaseTrainer.
self.optimizer is by default handled by BaseTrainer based on config.
"""
def __init__(self, model, loss, metrics, resume, config,
data_loader, toolbox: Toolbox, valid_data_loader=None, train_logger=None):
super(Trainer, self).__init__(model, loss, metrics, resume, config, train_logger)
self.config = config
self.batch_size = data_loader.batch_size
self.data_loader = data_loader
self.valid_data_loader = valid_data_loader
self.valid = True if self.valid_data_loader is not None else False
self.log_step = int(np.sqrt(self.batch_size))
self.toolbox = toolbox
self.visdom = Visualizer(env='FOTS')
def _to_tensor(self, *tensors):
t = []
for __tensors in tensors:
t.append(__tensors.to(self.device))
return t
def _eval_metrics(self, output, target, mask):
acc_metrics = np.zeros(len(self.metrics))
output = output.cpu().data.numpy()
target = target.cpu().data.numpy()
output = np.argmax(output, axis=1)
for i, metric in enumerate(self.metrics):
acc_metrics[i] += metric(output, target)
return acc_metrics
def _train_epoch(self, epoch):
"""
Training logic for an epoch
:param epoch: Current training epoch.
:return: A log that contains all information you want to save.
Note:
If you have additional information to record, for example:
> additional_log = {"x": x, "y": y}
merge it with log before return. i.e.
> log = {**log, **additional_log}
> return log
The metrics in log must have the key 'metrics'.
"""
self.model.train()
total_loss = 0
total_metrics = np.zeros(len(self.metrics))
for batch_idx, gt in enumerate(self.data_loader):
img, score_map, geo_map, training_mask, transcript = gt
img, score_map, geo_map, training_mask = self._to_tensor(img, score_map, geo_map, training_mask)
recog_map = None
self.optimizer.zero_grad()
pred_score_map, pred_geo_map, pred_recog_map = self.model(img)
loss = self.loss(score_map, pred_score_map, geo_map, pred_geo_map, pred_recog_map, recog_map, training_mask)
loss.backward()
self.optimizer.step()
total_loss += loss.item()
#total_metrics += self._eval_metrics(output, target)
total_metrics += 0
if self.verbosity >= 2 and batch_idx % self.log_step == 0:
self.logger.info('Train Epoch: {} [{}/{} ({:.0f}%)] Loss: {:.6f}'.format(
epoch,
batch_idx * self.data_loader.batch_size,
len(self.data_loader) * self.data_loader.batch_size,
100.0 * batch_idx / len(self.data_loader),
loss.item()))
self.visdom.plot('train_loss', total_loss / len(self.data_loader))
log = {
'loss': total_loss / len(self.data_loader),
'metrics': (total_metrics / len(self.data_loader)).tolist()
}
if self.valid:
val_log = self._valid_epoch()
log = {**log, **val_log}
return log
def _valid_epoch(self):
"""
Validate after training an epoch
:return: A log that contains information about validation
Note:
The validation metrics in log must have the key 'val_metrics'.
"""
self.model.eval()
total_val_loss = 0
total_val_metrics = np.zeros(len(self.metrics))
with torch.no_grad():
for batch_idx, gt in enumerate(self.valid_data_loader):
img, score_map, geo_map, training_mask, transcript = gt
img, score_map, geo_map, training_mask = self._to_tensor(img, score_map, geo_map, training_mask)
recog_map = None
pred_score_map, pred_geo_map, pred_recog_map = self.model(img)
loss = self.loss(score_map, pred_score_map, geo_map, pred_geo_map, pred_recog_map, recog_map,
training_mask)
total_val_loss += loss.item()
output = (pred_score_map, pred_geo_map, pred_recog_map)
target = (score_map, geo_map, recog_map)
#total_val_metrics += self._eval_metrics(output, target, training_mask) #TODO: should add AP metric
self.visdom.plot('val_loss', total_val_loss / len(self.valid_data_loader))
return {
'val_loss': total_val_loss / len(self.valid_data_loader),
'val_metrics': (total_val_metrics / len(self.valid_data_loader)).tolist()
}
| [
"Viper_mdl@126.com"
] | Viper_mdl@126.com |
5c0bf1565d794947c5e83c20c6ade13d158fedf3 | 650e9b9ce71aa40abc7dab33fc590a8aefca2155 | /Exercicios/11-20/ex015.py | 270f6418d753daafda951bfc835c895569395987 | [] | no_license | dennisasilva/PythonCourse | 412b22376cff99fc9ece9346ca7ff215f6091127 | 717c0506185e0d2319a082aecda85c8c8163e3cc | refs/heads/master | 2023-01-13T04:41:18.377357 | 2020-11-06T16:13:33 | 2020-11-06T16:13:33 | 264,807,134 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 417 | py | # Desafio 15
# Escreva um programa que pergunte a quantidade de Km percorridos por um carro alugado e a quantidade de dias
# pelos quais ele foi alugado. Calcule o preço a pagar, sabendo que o carro custa R$60 por dia e R$0,15 por Km rodado.
dia = int(input("Quantos dias alugados? "))
km = float(input("Quantos KM rodados? "))
pago = (dia * 60) + (km * 0.15)
f = input("O total a pagar é R${:.2f}".format(pago))
| [
"dennisasilva@gmail.com"
] | dennisasilva@gmail.com |
172d724d6cdeff773fb75953079b08d1d6f65291 | 2835d4c79f38bf4b97534c5b5f6f743473cf5ba9 | /test_app/migrations/0001_initial.py | 8e65a4ad46bb2cf7f04119aef26e276cf385f7c5 | [] | no_license | Darkmor88/my-first-site | dd0325cd5438a531f421b8f0a5348178d7fc0ef8 | 399aa3042321e7924a8536e93f714e8d589b506d | refs/heads/master | 2022-11-19T05:43:51.836536 | 2020-07-10T19:29:02 | 2020-07-10T19:29:02 | 277,367,456 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 986 | py | # Generated by Django 3.0.8 on 2020-07-05 18:13
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Post',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=200)),
('text', models.TextField()),
('created_date', models.DateTimeField(default=django.utils.timezone.now)),
('published_date', models.DateTimeField(blank=True, null=True)),
('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
| [
"akormilicin88@gmail.com"
] | akormilicin88@gmail.com |
d01f381c02e80357e531e32a614919713a104070 | caa2414d67547bb9d673bd51cd39cddabffe6e7d | /modalMethods/bin/POD/pod_energy_plot.py | bf90c0c0fc21d97e5ea97e2dbcc2ebaa67be19f9 | [] | no_license | sidShinde/modal-methods | 37c4128e8ebbb8e34c278216ba5098df7a0b386c | 56eb3c7d4a467161ba84a43df48b1b193d1c3ca1 | refs/heads/master | 2021-01-23T03:43:28.129110 | 2017-07-17T14:46:37 | 2017-07-17T14:46:37 | 86,115,381 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,824 | py | import os
import argparse
import numpy as np
import matplotlib
matplotlib.use('PDF')
from matplotlib import pyplot as plt
from modalMethods.readers.reader import *
def pod_energy_plot(configFile):
'''
Input
-----
configFile: file with input details
Output
------
saves a plot of energy v/s modes
'''
configDict, _, _ = config_to_dict(configFile)
# read data from configDict:
patchName = configDict['patchName']
nSnaps = int( configDict['nSnaps'] )
caseDir = os.getcwd();
fname = caseDir + '/postProcessing/POD/singVals_' + patchName + '_' + \
str( nSnaps ) + '.csv'
singVals = np.loadtxt(fname)
totalEnergy = np.sum( singVals[1:] )
perEnergy = singVals[1:]*(100/totalEnergy)
modes = np.linspace(1, nSnaps-1, nSnaps-1)
modes.astype(int)
print('\n plotting energy ...')
fig = plt.figure(figsize=(5,5))
plt.rc('text', usetex=True)
plt.rc('font', family='serif')
plt.plot(modes, perEnergy, '-ok', lw=2.0)
plt.xticks(fontsize=14)
plt.yticks(fontsize=14)
plt.xlim(0, 30)
plt.ylim(0, int(perEnergy.max() + 5))
plt.xlabel(r'$n^{th}$ POD mode', fontsize=16)
plt.ylabel(r'Energy ratio $(\%)$', fontsize=16)
fname = caseDir + '/postProcessing/POD/energy_' + \
patchName + '_' + str( nSnaps ) + '.pdf'
plt.savefig(fname)
plt.close(fig)
def main():
parser = argparse.ArgumentParser(
description="Save energy v/s modes plot in postProcessing/POD.")
parser.add_argument('-config',
type=str,
help='The config file',
required=True)
args = parser.parse_args()
# Parse the config
configFile = open(args.config, mode='r')
pod_energy_plot(configFile)
| [
"siddhesh@umich.edu"
] | siddhesh@umich.edu |
adddedcca61a8134c1a899015d0449e39f75ed65 | 5cf8e8f35aec5f7637c40f5baa0af378a69d3a73 | /main.py | e00b2edb02fbb6f1763eb92c5773ff5b818c02d5 | [] | no_license | l7ucian/Flask_BootStrap_sample | 6cc05f8bec1528221b9148aa97cd2bb9b9733265 | 8dba8e2f84a3386928d9217b01ef66deda5ce10b | refs/heads/master | 2021-01-17T14:20:16.753306 | 2017-03-06T14:46:32 | 2017-03-06T14:46:32 | 84,084,909 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 141 | py | from flask import Flask
app = Flask(__name__)
@app.route("/")
def index():
return 'index.html'
if __name__ == "__main__":
app.run() | [
"lucian.andercou@gmail.com"
] | lucian.andercou@gmail.com |
a1ccf0822ee71e89102f179bb0eb5ae1d1b36efe | fb170891e7d10139c3d5bf0d55403abef0196f1f | /src/pipelines/master_pipeline.py | b99f2be6eecfbf810f30be35255fef84854d7a8f | [] | no_license | lihkinVerma/CommunityQuestionAnswering | 759e93bbe94f9f0c157530269cb353719a010dd2 | 682212e3d0575b580c48984f979860ad17d30aa2 | refs/heads/master | 2023-05-12T03:53:50.104699 | 2021-06-03T16:08:06 | 2021-06-03T16:08:06 | 370,103,288 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 16,603 | py | #!/usr/bin/env python
'''
Controls the pipeline for Pythia.
This module regulates the features and algorithms used in order to detect novelty,
then adminstrates the implementation of the given specifications. It requires a
directory full of JSON files, where each file contains a cluster of documents.
'''
import pdb
import sys
import os
import pickle
import argparse
import logging
from collections import namedtuple
import numpy as np
os.environ["THEANO_FLAGS"] = "mode=FAST_RUN,device=gpu,floatX=float32,allow_gc=True" # Sets flags for use of GPU
from memory_profiler import profile
from src.pipelines import parse_json, preprocess, data_gen, log_reg, svm, xgb, predict, sgd
from src.utils.sampling import sample
from src.mem_net import main_mem_net
from src.utils import hashing
from src.utils.sampling import sample
cache_pickle = "{}.pkl"
cache_dir = ".cache-pythia"
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger(__name__)
logger.addHandler(logging.StreamHandler())
logger.setLevel(logging.DEBUG)
def main(argv):
'''
controls the over-arching implmentation of the algorithms
'''
directory, features, algorithms, parameters = argv
# Create a numpy random state
random_state = np.random.RandomState(parameters['seed'])
#parsing
print("parsing json data...",file=sys.stderr)
if parameters['use_cache']:
dir_hash = hashing.dir_hash(directory)
pickle_path = os.path.join(cache_dir, cache_pickle.format(dir_hash))
try:
logger.debug("Trying to use cache")
with open(pickle_path, 'rb') as f:
parsed_data = pickle.load(f)
logger.debug("Using existing cache")
except:
# parse and write to cache
logger.debug("Parsing and writing to cache")
parsed_data = parse_json.main(directory, parameters)
os.makedirs(cache_dir, exist_ok=True)
with open(pickle_path, 'wb') as f:
pickle.dump(parsed_data, f)
else:
parsed_data = parse_json.main(directory, parameters)
clusters, order, data, test_clusters, test_order, test_data, corpusdict = parsed_data
#preprocessing
print("preprocessing...",file=sys.stderr)
vocab, full_vocab, encoder_decoder, lda_model, tf_model, w2v_model = preprocess.main(features, parameters, corpusdict, data)
#featurization
hdf5_path_train=parameters['hdf5_path_train']
hdf5_path_test=parameters['hdf5_path_test']
print("generating training data...",file=sys.stderr)
train_data, train_target, train_ids = data_gen.gen_observations(clusters, order, data, features, parameters, vocab, full_vocab, encoder_decoder, lda_model, tf_model, w2v_model, hdf5_path_train)
with open('trainingData.pickle', 'wb') as f:
pickle.dump({'train_data':train_data, 'train_target':train_target, 'train_ids':train_ids}, f)
print("generating testing data...",file=sys.stderr)
test_data, test_target, test_ids = data_gen.gen_observations(test_clusters, test_order, test_data, features, parameters, vocab, full_vocab, encoder_decoder, lda_model, tf_model, w2v_model, hdf5_path_test)
with open('testData.pickle', 'wb') as f:
pickle.dump({'test_data':test_data, 'test_target':test_target, 'test_ids':test_ids}, f)
# save training data for separate experimentation and hyperparameter optimization
if 'saveexperimentdata' in parameters:
lunchbox = dict()
lunchbox['directory'] = directory
lunchbox['features'] = features
lunchbox['algorithms'] = algorithms
lunchbox['parameters'] = parameters
lunchbox['train_data'] = train_data
lunchbox['train_target'] = train_target
lunchbox['test_data'] = test_data
lunchbox['test_target'] = test_target
pickle.dump(lunchbox, open(parameters['saveexperimentdata']['experimentdatafile'], "wb"))
#modeling
print("running algorithms...",file=sys.stderr)
if 'log_reg' in algorithms:
logreg_model = log_reg.main([train_data, train_target, algorithms['log_reg']])
predicted_labels, perform_results = predict.predicter(logreg_model, test_data, test_target)
if 'svm' in algorithms:
svm_model = svm.main([train_data, train_target, algorithms['svm']])
predicted_labels, perform_results = predict.predicter(svm_model, test_data, test_target)
if 'xgb' in algorithms:
xgb_model = xgb.main([train_data, train_target, algorithms['xgb']])
predicted_labels, perform_results = predict.predicter(xgb_model, test_data, test_target)
if 'sgd' in algorithms:
sgd_model = sgd.main(hdf5_path_train, "/data", "/labels", **algorithms['sgd'])
predicted_labels, perform_results = predict.predicter(sgd_model, test_data, test_target)
if 'mem_net' in algorithms:
from src.mem_net import main_mem_net
mem_net_model, model_name = main_mem_net.run_mem_net(train_data, test_data, **algorithms['mem_net'])
predicted_labels, perform_results = main_mem_net.test_mem_network(mem_net_model, model_name, **algorithms['mem_net'])
#results
if "save_results" in parameters:
perform_results.update({"id":test_ids})
perform_results.update({"predicted_label":predicted_labels.tolist()})
perform_results.update({"novelty":test_target})
return perform_results
def get_args(
#DIRECTORY
directory = 'data/stackexchange/anime',
#FEATURES
#bag of words
BOW_APPEND = False,
BOW_DIFFERENCE = False,
BOW_PRODUCT = False,
BOW_COS = False,
BOW_TFIDF = False,
BOW_BINARY = True,
#skipthoughts
ST_APPEND = False,
ST_DIFFERENCE = False,
ST_PRODUCT = False,
ST_COS = False,
#lda
LDA_APPEND = False,
LDA_DIFFERENCE = False,
LDA_PRODUCT = False,
LDA_COS = False,
LDA_TOPICS = 40,
#word2vec
# If AVG, MAX, MIN or ABS are selected, APPEND, DIFFERENCE, PRODUCT or COS must be selected
W2V_AVG = False,
W2V_MAX = False,
W2V_MIN = False,
W2V_ABS = False,
# If APPEND, DIFFERENCE, PRODUCT or COS are selected AVG, MAX, MIN or ABS must be selected
W2V_APPEND = False,
W2V_DIFFERENCE = False,
W2V_PRODUCT = False,
W2V_COS = False,
W2V_PRETRAINED=False,
W2V_MIN_COUNT = 5,
W2V_WINDOW = 5,
# W2V_SIZE should be set to 300 if using the Google News pretrained word2vec model
W2V_SIZE = 300,
W2V_WORKERS = 3,
#one-hot CNN layer
CNN_APPEND = False,
CNN_DIFFERENCE = False,
CNN_PRODUCT = False,
CNN_COS = False,
#The one-hot CNN will use the full_vocab parameters
# wordonehot (will not play nicely with other featurization methods b/c not
# vector)
WORDONEHOT = False,
#WORDONEHOT_DOCLENGTH = None
WORDONEHOT_VOCAB = 5000,
#ALGORITHMS
#logistic regression
LOG_REG = False,
LOG_PENALTY = 'l2',
LOG_TOL = 1e-4,
LOG_C = 1e-4,
#svm
SVM = False,
SVM_C = 2000,
SVM_KERNEL = 'linear',
SVM_GAMMA = 'auto',
#xgboost
XGB = False,
XGB_LEARNRATE = 0.1,
XGB_MAXDEPTH = 3,
XGB_MINCHILDWEIGHT = 1,
XGB_COLSAMPLEBYTREE = 1,
# SGD Logistic regression
SGD = False,
SGD_LOSS = 'log',
SGD_ALPHA = 0.0001,
SGD_PENALTY = 'l2',
SGD_EPOCHS = 10,
SGD_BATCH_SIZE = 128,
#memory network
MEM_NET = False,
#The memory network vocab uses Glove which can be 50, 100, 200 or 300 depending on the models you have in /data/glove
MEM_VOCAB = 50,
MEM_TYPE = 'dmn_basic',
MEM_BATCH = 1,
MEM_EPOCHS = 5,
MEM_MASK_MODE = 'sentence',
MEM_EMBED_MODE = "word2vec",
MEM_ONEHOT_MIN_LEN = 140,
MEM_ONEHOT_MAX_LEN = 1000,
#PARAMETERS
#resampling
RESAMPLING = False,
NOVEL_RATIO = None,
OVERSAMPLING = False,
REPLACEMENT = False,
SAVE_RESULTS = False,
#save training data for experimentation and hyperparameter grid search
SAVEEXPERIMENTDATA = False,
EXPERIMENTDATAFILE='data/experimentdatafile.pkl',
#vocabulary
VOCAB_SIZE = 10000,
STEM = False,
FULL_VOCAB_SIZE = 10000,
FULL_VOCAB_TYPE = 'character',
FULL_CHAR_VOCAB = "abcdefghijklmnopqrstuvwxyz0123456789,;.!?:'\"/|_@#$%^&*~`+-=<>()[]{}",
FULL_VOCAB_STEM = False,
SEED = 41,
HDF5_PATH_TRAIN = None,
HDF5_PATH_TEST = None,
HDF5_SAVE_FREQUENCY = 100,
HDF5_USE_EXISTING = True,
USE_CACHE = False):
""" Return a parameters data structure with information on how to
run an experiment. Argument list should match experiments/experiments.py
"""
#get features
bow = None
st = None
lda = None
w2v = None
wordonehot = None
cnn = None
mem_net = None
if BOW_APPEND or BOW_DIFFERENCE or BOW_PRODUCT or BOW_COS or BOW_TFIDF:
bow = dict()
if BOW_APPEND: bow['append'] = BOW_APPEND
if BOW_DIFFERENCE: bow['difference'] = BOW_DIFFERENCE
if BOW_PRODUCT: bow['product'] = BOW_PRODUCT
if BOW_COS: bow['cos'] = BOW_COS
if BOW_TFIDF: bow['tfidf'] = BOW_TFIDF
if BOW_BINARY: bow['binary'] = BOW_BINARY
if ST_APPEND or ST_DIFFERENCE or ST_PRODUCT or ST_COS:
st = dict()
if ST_APPEND: st['append'] = ST_APPEND
if ST_DIFFERENCE: st['difference'] = ST_DIFFERENCE
if ST_PRODUCT: st['product'] = ST_PRODUCT
if ST_COS: st['cos'] = ST_COS
if LDA_APPEND or LDA_DIFFERENCE or LDA_PRODUCT or LDA_COS:
lda = dict()
if LDA_APPEND: lda['append'] = LDA_APPEND
if LDA_DIFFERENCE: lda['difference'] = LDA_DIFFERENCE
if LDA_PRODUCT: lda['product'] = LDA_PRODUCT
if LDA_COS: lda['cos'] = LDA_COS
if LDA_TOPICS: lda['topics'] = LDA_TOPICS
if any([W2V_APPEND,W2V_DIFFERENCE,W2V_PRODUCT,W2V_COS]) or any([W2V_AVG,W2V_MAX,W2V_MIN,W2V_ABS]):
w2v = dict()
if W2V_AVG: w2v['avg'] = W2V_AVG
if W2V_MAX: w2v['max'] = W2V_MAX
if W2V_MIN: w2v['min'] = W2V_MIN
if W2V_ABS: w2v['abs'] = W2V_ABS
if W2V_APPEND: w2v['append'] = W2V_APPEND
if W2V_DIFFERENCE: w2v['difference'] = W2V_DIFFERENCE
if W2V_PRODUCT: w2v['product'] = W2V_PRODUCT
if W2V_COS: w2v['cos'] = W2V_COS
if W2V_PRETRAINED: w2v['pretrained'] = W2V_PRETRAINED
if W2V_MIN_COUNT: w2v['min_count'] = W2V_MIN_COUNT
if W2V_WINDOW: w2v['window'] = W2V_WINDOW
if W2V_SIZE: w2v['size'] = W2V_SIZE
if W2V_WORKERS: w2v['workers'] = W2V_WORKERS
if WORDONEHOT:
wordonehot = dict()
if WORDONEHOT_VOCAB:
wordonehot['vocab'] = WORDONEHOT_VOCAB
if CNN_APPEND or CNN_DIFFERENCE or CNN_PRODUCT or CNN_COS:
cnn = dict()
if CNN_APPEND: cnn['append'] = CNN_APPEND
if CNN_DIFFERENCE: cnn['difference'] = CNN_DIFFERENCE
if CNN_PRODUCT: cnn['product'] = CNN_PRODUCT
if CNN_COS: cnn['cos'] = CNN_COS
if MEM_NET:
mem_net = dict()
if MEM_VOCAB: mem_net['word_vector_size'] = MEM_VOCAB
#if SEED: mem_net['seed'] = SEED
if MEM_TYPE: mem_net['network'] = MEM_TYPE
if MEM_BATCH: mem_net['batch_size'] = MEM_BATCH
if MEM_EPOCHS: mem_net['epochs'] = MEM_EPOCHS
if MEM_MASK_MODE: mem_net['mask_mode'] = MEM_MASK_MODE
if MEM_EMBED_MODE : mem_net['embed_mode'] = MEM_EMBED_MODE
if MEM_ONEHOT_MIN_LEN: mem_net['onehot_min_len'] = MEM_ONEHOT_MIN_LEN
if MEM_ONEHOT_MAX_LEN: mem_net['onehot_max_len'] = MEM_ONEHOT_MAX_LEN
#Use the same input params as word2vec
if W2V_PRETRAINED: mem_net['pretrained'] = W2V_PRETRAINED
if W2V_MIN_COUNT: mem_net['min_count'] = W2V_MIN_COUNT
if W2V_WINDOW: mem_net['window'] = W2V_WINDOW
if W2V_SIZE: mem_net['size'] = W2V_SIZE
if W2V_WORKERS: mem_net['workers'] = W2V_WORKERS
features = dict()
if bow:
features['bow'] = bow
if st:
features['st'] = st
if lda:
features['lda'] = lda
if w2v:
features['w2v'] = w2v
if wordonehot:
features['wordonehot'] = wordonehot
if cnn:
features['cnn'] = cnn
if mem_net:
if len(features)>0:
print("Caution!! Only the memory network feature and algorithm will be ran as they have to run alone")
features['mem_net'] = mem_net
if len(features) == 0:
print("Error: At least one feature (ex: Bag of Words, LDA, etc.) must be requested per run.", file=sys.stderr)
quit()
w2v_types = [W2V_AVG,W2V_MAX,W2V_MIN,W2V_ABS]
w2v_ops = [W2V_APPEND,W2V_DIFFERENCE,W2V_PRODUCT,W2V_COS]
if any(w2v_ops) and not any(w2v_types):
print("Caution!! A Word2Vec vector type must be selected. Default will be set to average (W2V_AVG)", file=sys.stderr)
features['w2v']['avg'] = True
if any(w2v_types) and not any(w2v_ops):
print("Caution!! A Word2Vec vector operation must be selected. Default will be set to append (W2V_APPEND)", file=sys.stderr)
features['w2v']['append'] = True
#get algorithms
log_reg = None
svm = None
xgb = None
sgd = None
if LOG_REG:
log_reg = dict()
if LOG_PENALTY: log_reg['log_penalty'] = LOG_PENALTY
if LOG_TOL: log_reg['log_tol'] = LOG_TOL
if LOG_C: log_reg['log_C'] = LOG_C
if SVM:
svm = dict()
if SVM_C: svm['svm_C'] = SVM_C
if SVM_KERNEL: svm['svm_kernel'] = SVM_KERNEL
if SVM_GAMMA: svm['svm_gamma'] = SVM_GAMMA
if XGB:
xgb = dict()
if XGB_LEARNRATE: xgb['x_learning_rate'] = XGB_LEARNRATE
if XGB_MAXDEPTH: xgb['x_max_depth'] = XGB_MAXDEPTH
if XGB_COLSAMPLEBYTREE: xgb['x_colsample_bytree'] = XGB_COLSAMPLEBYTREE
if XGB_MINCHILDWEIGHT: xgb['x_colsample_bylevel'] = XGB_MINCHILDWEIGHT
if SGD:
sgd = dict()
sgd['alpha'] = SGD_ALPHA
sgd['loss'] = SGD_LOSS
sgd['penalty'] = SGD_PENALTY
sgd['num_epochs'] = SGD_EPOCHS
sgd['batch_size'] = SGD_BATCH_SIZE
sgd['seed'] = SEED
assert HDF5_PATH_TRAIN is not None, "SGD-based methods should be used with HDF5"
algorithms = dict()
if log_reg: algorithms['log_reg'] = log_reg
if svm: algorithms['svm'] = svm
if xgb: algorithms['xgb'] = xgb
if mem_net:
algorithms['mem_net']=mem_net
if sgd:
algorithms['sgd'] = sgd
logger.debug("Algorithms structure: {}".format(algorithms))
# Enforce requirement and limitation of one algorithm per run
if len(algorithms) == 0:
print("Error: One classification algorithm must be requested per run.", file=sys.stderr)
quit()
elif len(algorithms) > 1:
print("Error: Only one classification can be requested per run.", file=sys.stderr)
quit()
#get parameters
resampling = None
if RESAMPLING:
resampling = dict()
if NOVEL_RATIO:
resampling['novelToNotNovelRatio'] = NOVEL_RATIO
logger.warn("NOVEL_RATIO specified but not supported")
resampling['over'] = OVERSAMPLING
resampling['replacement'] = REPLACEMENT
saveexperimentdata = None
if SAVEEXPERIMENTDATA:
saveexperimentdata = dict()
if EXPERIMENTDATAFILE: saveexperimentdata['experimentdatafile'] = EXPERIMENTDATAFILE
parameters = dict()
if RESAMPLING: parameters['resampling'] = resampling
if SAVE_RESULTS: parameters['save_results'] = SAVE_RESULTS
if SAVEEXPERIMENTDATA: parameters['saveexperimentdata'] = saveexperimentdata
if VOCAB_SIZE: parameters['vocab'] = VOCAB_SIZE
if STEM: parameters['stem'] = STEM
if SEED:
parameters['seed'] = SEED
else:
parameters['seed'] = 41
if FULL_VOCAB_SIZE: parameters['full_vocab_size'] = FULL_VOCAB_SIZE
if FULL_VOCAB_TYPE: parameters['full_vocab_type'] = FULL_VOCAB_TYPE
if FULL_CHAR_VOCAB: parameters['full_char_vocab'] = FULL_CHAR_VOCAB
if FULL_VOCAB_STEM: parameters['full_vocab_stem'] = FULL_VOCAB_STEM
assert (HDF5_PATH_TRAIN and SGD) or (not HDF5_PATH_TRAIN and not SGD)
parameters['hdf5_path_test'] = HDF5_PATH_TEST
parameters['hdf5_path_train'] = HDF5_PATH_TRAIN
parameters['hdf5_save_frequency'] = HDF5_SAVE_FREQUENCY
parameters['hdf5_use_existing'] = HDF5_USE_EXISTING
parameters['use_cache'] = USE_CACHE
return directory, features, algorithms, parameters
if __name__ == '__main__':
args = get_args()
print("Algorithm details and Results:", file=sys.stderr)
print(main(args), file=sys.stdout)
sys.exit(0)
| [
"nikhil@gmail.com"
] | nikhil@gmail.com |
2bcf76b268dcc14f93c164f38f79c9fac0b642c1 | 93d8f6332992d7f1574666096e956d47a2c23754 | /src/safe.py | 98b34c1ad9ca33b5b925d656a343e2388d310014 | [
"BSD-3-Clause"
] | permissive | aliceafterall/cocomud | d41a5a8964f1af17cacfb0d0dcdd4b5530bb1bc5 | b2b7a7b5f93542b8e94c0eec00c4dcd7bd96cff1 | refs/heads/master | 2023-07-20T09:34:49.410221 | 2017-08-03T15:16:05 | 2017-08-03T15:16:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,816 | py | # Copyright (c) 2016, LE GOFF Vincent
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of ytranslate nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""This file contains the 'safe' system of CocoMUD, ways to crypt/encrypt.
This feature requires:
pbkdf2
Crypto
The module contains a class named 'Safe', that should be insantiated
in order to manipulate the encrypting
/decrypting mechanism. This class requires a passphrase in
argument. You can insantiate it as follows:
>>> from safe import Safe
>>> safe = Safe(file=".passphrase")
>>> # (If the file doesn't exist, it will be created with an auto-generated
>>> # passphrase.)
>>> # Alternatively you can specify the passphrase directly
>>> safe = Safe(passphrase="Dsm18fvdjP9sz801,9DJA.1356gndYJz987v")
>>> # Store encrypted data
>>> safe.store("login", "kredh")
>>> safe.store("password", "YoudWishIToldYou")
>>> # Retrieve the data (can be later)
login = safe.retrieve("login")
password = safe.retrieve("password")
Note that datas that is not a string (like a bool or float) will be
saved as unprotected data. If you want to save it encrypted, you can
convert it to string.
"""
import base64
import os
import pickle
from Crypto.Cipher import AES
from pbkdf2 import PBKDF2
class Safe:
"""A safe object, to encrypt/decrypt information.
The Safe class requires a passphrase to be created. This is a
string of characters that adds to the security of encryption.
Obviously, it needs to remain similar to decrypt information that
has been encrypted. Other optional parameters are also possible:
secret: the path of the file in which to store crypted data.
"""
def __init__(self, passphrase=None, file=None, secret="data.crypt",
load=True):
self.salt_seed = 'mkhgts465wef4fwtdd'
self.passphrase = passphrase
self.secret = secret
self.passphrase_size = 64
self.key_size = 32
self.block_size = 16
self.iv_size = 16
self.salt_size = 8
self.data = {}
if file and os.path.exists(file):
with open(file, "r") as pass_file:
self.passphrase = pass_file.read()
if not self.passphrase:
self.passphrase = base64.b64encode(os.urandom(
self.passphrase_size))
if file:
with open(file, "w") as pass_file:
pass_file.write(self.passphrase)
# Load the secret file
if load:
self.load()
def get_salt_from_key(self, key):
return PBKDF2(key, self.salt_seed).read(self.salt_size)
def encrypt(self, plaintext, salt):
"""Pad plaintext, then encrypt it.
The encryption occurs with a new, randomly initialised cipher.
This method will not preserve trailing whitespace in plaintext!.
"""
# Initialise Cipher Randomly
init_vector = os.urandom(self.iv_size)
# Prepare cipher key
key = PBKDF2(self.passphrase, salt).read(self.key_size)
cipher = AES.new(key, AES.MODE_CBC, init_vector)
bs = self.block_size
return init_vector + cipher.encrypt(plaintext + \
" " * (bs - (len(plaintext) % bs)))
def decrypt(self, ciphertext, salt):
"""Reconstruct the cipher object and decrypt.
This method will not preserve trailing whitespace in the
retrieved value.
"""
# Prepare cipher key
key = PBKDF2(self.passphrase, salt).read(self.key_size)
# Extract IV
init_vector = ciphertext[:self.iv_size]
ciphertext = ciphertext[self.iv_size:]
cipher = AES.new(key, AES.MODE_CBC, init_vector)
return cipher.decrypt(ciphertext).rstrip(" ")
def load(self):
"""Load the data from the 'secret' file if exists."""
if os.path.exists(self.secret):
with open(self.secret, "rb") as file:
upic = pickle.Unpickler(file)
self.data = upic.load()
if not isinstance(self.data, dict):
raise ValueError("the data contained in the file " \
"'{}' is not a dictionary".format(self.secret))
def retrieve(self, key, *default):
"""Retrieve and decrypt the specified key.
If the key isn't present in the dictionary, either
return default if specified, or raise a KeyError.
If the value at this location isn't a string, return it as is.
"""
if key not in self.data:
if default:
return default[0]
raise KeyError(key)
value = self.data[key]
if isinstance(value, basestring):
salt = self.get_salt_from_key(key)
return self.decrypt(value, salt)
return value
def store(self, key, value):
"""Store the key in the file.
If the key already exists, replaces it.
If the value is not a string or unicode, it will be stored
WITHOUT encryption.
"""
if isinstance(value, basestring):
salt = self.get_salt_from_key(key)
crypted = self.encrypt(value, salt)
self.data[key] = crypted
else:
self.data[key] = value
# Write the new data in the file
with open(self.secret, "wb") as file:
pic = pickle.Pickler(file)
pic.dump(self.data)
| [
"vincent.legoff.srs@gmail.com"
] | vincent.legoff.srs@gmail.com |
a4b8a7c035036e9e0e83c562c498c103c3a7ba94 | 7d72ece1edb0009e2f5dadd96838e6fa4d020c86 | /src/follow_road/MyAlgorithm.py | 78146757492d8d71d43311729f3470639eea528e | [] | no_license | RoboticsLabURJC/2018-phd-luis-caiza | d188a9621c7339349dd32ba3f382010daeb49b95 | 834e93889c8b8aacdf8edee0206341154ef17073 | refs/heads/master | 2020-03-30T02:05:28.334834 | 2019-04-24T19:32:17 | 2019-04-24T19:32:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,509 | py | import threading
import time
from datetime import datetime
import cv2
import numpy as np
import math
time_cycle = 80
#value_min_HSV = np.array([20, 0, 0]) #for follow road original
#value_max_HSV = np.array([100, 130, 130]) #for follow road original
value_min_HSV=np.array([0, 50, 50]) # red color used in follow a ball
value_max_HSV=np.array([10, 255, 255]) #red color used in follow a ball
vel_front = 0
vel_z = 0
vel_yaw = 0
class MyAlgorithm(threading.Thread):
def __init__(self, drone):
self.drone = drone
self.height = 240
self.width = 320
self.yaw = 0.0
self.imageV=None
self.imageF =None
self.stop_event = threading.Event()
self.kill_event = threading.Event()
self.lock = threading.Lock()
threading.Thread.__init__(self, args=self.stop_event)
def setImageFilteredVentral(self, image):
self.lock.acquire()
self.imageV=image
self.lock.release()
def getImageFilteredVentral(self):
self.lock.acquire()
tempImageV=self.imageV
self.lock.release()
return tempImageV
def setImageFilteredFrontal(self, image):
self.lock.acquire()
self.imageF=image
self.lock.release()
def getImageFilteredFrontal(self):
self.lock.acquire()
tempImageF=self.imageF
self.lock.release()
return tempImageF
def run (self):
self.stop_event.clear()
while (not self.kill_event.is_set()):
start_time = datetime.now()
if not self.stop_event.is_set():
self.execute()
finish_Time = datetime.now()
dt = finish_Time - start_time
ms = (dt.days * 24 * 60 * 60 + dt.seconds) * 1000 + dt.microseconds / 1000.0
if (ms < time_cycle):
time.sleep((time_cycle - ms) / 1000.0)
def stop (self):
self.stop_event.set()
def play (self):
if self.is_alive():
self.stop_event.clear()
else:
self.start()
def kill (self):
self.kill_event.set()
def execute(self):
# Add your code here
input_imageV = self.drone.getImageVentral().data
input_imageF = self.drone.getImageFrontal().data
if input_imageV is not None:
image_HSV_V = cv2.cvtColor(input_imageV, cv2.COLOR_RGB2HSV)
#Treshold image
image_HSV_filtered_V = cv2.inRange(image_HSV_V, value_min_HSV, value_max_HSV)
#Reducing noise
opening_V = cv2.morphologyEx(image_HSV_filtered_V, cv2.MORPH_OPEN, np.ones((5,5),np.uint8))
closing_V = cv2.morphologyEx(opening_V, cv2.MORPH_CLOSE, np.ones((10,10),np.uint8))
#Filtered image
image_HSV_filtered_Mask_V = np.dstack((closing_V, closing_V, closing_V))
#drawing contours
imgray_V = cv2.cvtColor(image_HSV_filtered_Mask_V, cv2.COLOR_BGR2GRAY)
ret_V, thresh_V = cv2.threshold(imgray_V, 127, 255, 0)
_, contours_V, hierarchy_V = cv2.findContours(thresh_V, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
cv2.drawContours(image_HSV_filtered_Mask_V, contours_V, -1, (0,255,0), 3)
#Getting the centre of the road
if input_imageF is not None:
image_HSV_F = cv2.cvtColor(input_imageF, cv2.COLOR_RGB2HSV)
#Treshold image
image_HSV_filtered_F = cv2.inRange(image_HSV_F, value_min_HSV, value_max_HSV)
#Reducing noise
opening_F = cv2.morphologyEx(image_HSV_filtered_F, cv2.MORPH_OPEN, np.ones((5,5),np.uint8))
image_HSV_filtered_Mask_F = np.dstack((opening_F, opening_F, opening_F))
#drawing contours
imgray_F = cv2.cvtColor(image_HSV_filtered_Mask_F, cv2.COLOR_BGR2GRAY)
ret_F, thresh_F = cv2.threshold(imgray_F, 127, 255, 0)
_, contours_F, hierarchy_F = cv2.findContours(thresh_F, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
cv2.drawContours(image_HSV_filtered_Mask_F, contours_F, -1, (0,255,0), 3)
#Getting the centre of the road
area = []
for pic, contour in enumerate(contours_F):
area.append(cv2.contourArea(contour))
if len(area) > 1:
if area[0] < area[1]:
M = cv2.moments(contours_F[1])
else:
M = cv2.moments(contours_F[0])
else:
try:
M = cv2.moments(contours_F[0])
except IndexError:
self.drone.sendCMDVelocities(0,0,0,0)
M = cv2.moments(0)
if int(M['m00']) != 0:
#print("Road detected")
cx = int(M['m10']/M['m00'])
cy = int(M['m01']/M['m00'])
vel_front = 0.0001 * (3000 - int(M['m00']))
vel_z = 0.01 * (110 - cy)
vel_yaw = 0.02 * (140 - cx)
self.drone.sendCMDVelocities(0,vel_front,vel_z,vel_yaw)
print("cx: " + str(cx) + " cy: " + str(cy) + " area: " + str(M['m00']) + " vel_z " + str(vel_z))
self.yaw = int(cx)
#drawing the center
cv2.circle(image_HSV_filtered_Mask_F, (cx, cy), 7, np.array([255, 0, 0]), -1)
#printing the filtered image
self.setImageFilteredVentral(image_HSV_filtered_Mask_V)
self.setImageFilteredFrontal(image_HSV_filtered_Mask_F)
| [
"lcaiza2012@gmail.com"
] | lcaiza2012@gmail.com |
c38a25da5214e8923f65e45fd2fa196c29475700 | c1df38d160e3c052ae495dd41d0f18db5e9ead96 | /tesisjoha/recomendaciones/admin.py | 5627753334f1f38e2f9c034edc2dc6eb1a5314e0 | [] | no_license | JohannaOrozco/recomendador | eaf858ca26284bd267a71e8a5a0063d2bd5c3136 | e6d745916c05b72b11c3a59e05f4f4a7b2ff93ce | refs/heads/master | 2020-06-11T09:36:11.018559 | 2019-08-24T01:52:30 | 2019-08-24T01:52:30 | 193,918,762 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 700 | py | from django.contrib import admin
from recomendaciones.models import Estudiante, ColegioClasificacion, EstudianteNacionalidad, EstudianteGenero, EstudianteEstadoCivil, EstudianteEstrato, EstudianteTieneFinanciacion, EstudiantePerteneceMinoriaEtnica, EstudianteTieneDiscapacidad
# Register your models here.
admin.site.register(Estudiante)
admin.site.register(EstudianteNacionalidad)
admin.site.register(EstudianteGenero)
admin.site.register(EstudianteEstadoCivil)
admin.site.register(EstudianteEstrato)
admin.site.register(EstudianteTieneFinanciacion)
admin.site.register(EstudiantePerteneceMinoriaEtnica)
admin.site.register(EstudianteTieneDiscapacidad)
admin.site.register(ColegioClasificacion)
| [
"jarorid@gmail.com"
] | jarorid@gmail.com |
a760f514007324210f920f5d5ed401c4529ad032 | 350616248bf4384ef8c11d97a1af628ec64aa427 | /AIRS_stuff.py | ef547189c95db438a9a32438e496257f4c402109 | [] | no_license | jibbals/stations | 474956746ad20094fffc91d1fcb38c066d3997e7 | 557ec0e1d478a9da15e6a108004e8d3a08ce6571 | refs/heads/master | 2021-01-19T06:34:54.065136 | 2017-05-25T08:31:23 | 2017-05-25T08:31:23 | 62,143,927 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,809 | py | # Create images from AIRS of CO total column on Event days
#
# plot library
import matplotlib
# don't display stuff, we are just saving to file:
matplotlib.use('Agg')
import matplotlib.pyplot as plt
# local libraries
import fio as fio
# datetime library
from datetime import datetime
import netCDF4 as nc
import numpy as np
from mpl_toolkits.basemap import Basemap
from glob import glob
def read_AIRS_day(date):
'''
Read an AIRS day
'''
airsfolder = "/hpc/data/chemistry/CAC/GEOS_Chem/Satellite/AIRS/CO/daily/"
# find single file match, eg: "*AIRS.2004.01.01.*"
pattern=date.strftime("*AIRS.%Y.%m.%d.*")
filename = glob(airsfolder+pattern)
if len(filename) == 0:
print("MISSING DATE: "+pattern)
return([-1],[-1],[-1])
filename = filename[0]
# open file
fh = nc.Dataset(filename, mode='r')
# pull out variables of interest
lons=fh.variables['Longitude'][:]
lats=fh.variables['Latitude'][:]
# Ascending is during the day at 130pm local time
totco = fh.variables['TotCO_A'][:]
# close file
fh.close()
return (lats,lons,totco)
def plot_AIRS_day(date):
'''
Create a plot from the AIRS dataset for one day
'''
lats,lons,totco = read_AIRS_day(date)
if lats[0]==-1: return (-1)
# plot stuff
lon0=lons.mean()
lat0=lats.mean()
# width, height in meters,
#lon = -137.5, 172.5
#lat = 15.5, -75.5
m=Basemap(llcrnrlat=-80, urcrnrlat=20,
llcrnrlon=-140, urcrnrlon=175,
resolution='l',projection='merc',
lat_0=lat0, lon_0=lon0)
# lat lon are 1D, basemap uses 2D mesh
lon,lat = np.meshgrid(lons,lats)
xi, yi = m(lon,lat)
# draw the CO total column onto the map
cs = m.pcolor(xi,yi,np.squeeze(totco)) # squeeze removes any 1 length dimensions
# set up consistent colour map (the colour bar)
cmap = plt.cm.jet # blue to red
plt.set_cmap(cmap)
plt.clim(1e18, 3.5e18) # bounds for cmap
#add coastlines and equator
m.drawcoastlines()
m.drawparallels([0], labels=[0,0,0,0])
#add title, colorbar
cb=m.colorbar(cs,"right",size="5%", pad="2%")
cb.set_label('CO')
#ax.set_title('Total Column Ascending '+str(date))
plt.title('Total Column CO'+date.strftime("%Y%m%d"))
return(m)
def plot_all_events_AIRS(test=False):
'''
loop through sites and plot all events
'''
# Get the site data for each site
all_sonde_files=[fio.read_sonde(s) for s in range(3)]
# saving to here:
imagesfolder="images/AIRS/"
for sonde in all_sonde_files:
for i in sonde.einds:
date=sonde.dates[i]
dstr=date.strftime("%Y%m%d")
outf="%s%s_%s.png"%(imagesfolder,sonde.name,dstr)
# Set up figure window
fig=plt.figure(figsize=(12,6))
# Plotted in seperate function:
m=plot_AIRS_day(date)
# if no airs data on this day then save an empty plot
if (m == -1) :
plt.savefig(outf+'.missing.png')
plt.close(fig)
continue
# add site marker
x,y = m(sonde.lon, sonde.lat)
m.plot(x, y, 'mo', markersize=6 )
#save
plt.savefig(outf, bbox_inches='tight')
print ("Saved "+outf)
plt.close(fig)
if test:
return ()
def check_high_CO(date, site ,radius, threshold=2e18):
'''
If there is a column with greather than threshold [molecules/cm2] of CO,
within radius of site then return True
'''
lat=site.lat
lon=site.lon
if __name__ == "__main__":
plot_all_events_AIRS(test=True)
| [
"jwg366@uowmail.edu.au"
] | jwg366@uowmail.edu.au |
eed067e68e68bc9403d6958e844746a118bc601f | d6ce2f6bdddef373b9bbdf26d567307ce3667103 | /scripts/utils_specs/convert_spec_csv_to_json.py | 0db7b03c0e21edd6637ca3d51e06b9ffc1e88e4d | [
"MIT"
] | permissive | hezbranch/time_series_prediction | 505007fb248fe09f56943c3ad705a52ce77a193c | 9bffc3f279cbfaa3ec0acc937d15610c19e0975e | refs/heads/master | 2023-01-19T12:27:24.615657 | 2020-10-30T08:59:05 | 2020-10-30T08:59:05 | 296,434,092 | 1 | 0 | MIT | 2020-09-17T20:22:09 | 2020-09-17T20:22:08 | null | UTF-8 | Python | false | false | 2,503 | py | import argparse
import pandas as pd
import json
import copy
import os
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("--config_json_path", type=str)
parser.add_argument("--output_dir", type=str)
parser.add_argument("--row_template_json", type=str, default='row_template.json')
parser.add_argument("--sheet_template_json", type=str, default='sheet_template.json')
args = parser.parse_args()
with open(args.config_json_path, 'r') as f:
config = json.load(f)
with open(args.row_template_json, 'r') as f:
row_template = json.load(f)
with open(args.sheet_template_json, 'r') as f:
sheet_template = json.load(f)
for gid, sheet_name, csv_filename in zip(
config['spec_gid_list'],
config['spec_sheet_name_list'],
config['spec_csv_filename_list']):
sheet = copy.deepcopy(sheet_template)
sheet['name'] = sheet['name'].replace("{{sheet_name}}", sheet_name)
sheet['path'] = sheet['path'].replace("{{csv_filename}}", csv_filename)
out_csv_path = os.path.join(
args.output_dir,
config['output_csv_path_pattern'].replace("{{sheet_name}}", sheet_name)
)
out_json_path = os.path.join(
args.output_dir,
config['output_json_path_pattern'].replace("{{sheet_name}}", sheet_name)
)
csv_df = pd.read_csv(out_csv_path, dtype=str)
row_list = []
for rowid, row_df in csv_df.iterrows():
row = copy.deepcopy(row_template)
for k, v in row_template.items():
if isinstance(v, dict):
v = v.__repr__()
isdict = True
else:
isdict = False
assert isinstance(v, str)
while v.count("{{") > 0:
start = v.find("{{")
stop = v.find("}}", start)
varname = v[start+2:stop]
v = v.replace("{{%s}}" % varname, str(row_df[varname]))
if isdict:
row[k] = json.loads(v.replace("'", '"'))
else:
row[k] = v
row_list.append(row)
sheet['schema']['fields'] = row_list
sheet = json.dumps(sheet, indent=4, sort_keys=False)
with open(out_json_path, 'w') as f:
f.write(sheet)
print("Wrote to file: %s" % out_json_path)
| [
"mike@michaelchughes.com"
] | mike@michaelchughes.com |
bf91e11476896b1f49b83e1efbe0cf283e06f334 | 0d2fc31fd18b0a89fc6e3f2e13b3f651218acb7f | /.local/bin/cxfreeze-quickstart | 62f1c0dea1596cf6d6af3eb0891298c49d6d24c7 | [] | no_license | saad909/dotfiles | 5769da5f43c9a0e4af57ff79c4eb626ea8a6a31e | 6d6389c149330dbaa352e6f5bedaad22ce73cdcf | refs/heads/main | 2023-04-03T04:10:30.641149 | 2021-03-27T19:32:00 | 2021-03-27T19:32:00 | 291,691,462 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 220 | #!/usr/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from cx_Freeze.setupwriter import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"abuhurraira.saad909@gmail.com"
] | abuhurraira.saad909@gmail.com | |
672c9d2c9b1f1990beb4e2c02173c7fa753b57db | adf39b712b5df8adbe32279b07a7a6513b09ab0b | /set-periodic-raw-logging.py | 8af4582c2d72e70540307e044677fc9ef0bb5a65 | [
"MIT"
] | permissive | jkua/ubx | b6c2cbef0053f8a3defcc440f0e8c9685c5f02c5 | 7876cdbaa9f8f93195db67eb087dec79418b24ea | refs/heads/master | 2023-03-14T10:52:03.767083 | 2021-03-06T01:40:54 | 2021-03-06T01:40:54 | 116,107,208 | 4 | 0 | MIT | 2018-12-21T19:04:19 | 2018-01-03T07:38:33 | Python | UTF-8 | Python | false | false | 2,003 | py | #!/usr/bin/python
# Copyright (C) 2010 Timo Juhani Lindfors <timo.lindfors@iki.fi>
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# Set periodic reporting of RAW information on or off.
import ubx
import struct
import calendar
import os
import gobject
import logging
import sys
import socket
import time
def callback(ty, packet):
print("callback %s" % repr([ty, packet]))
if __name__ == "__main__":
assert len(sys.argv) == 2
t = ubx.Parser(callback)
if sys.argv[1] == "on":
t.send("CFG-MSG", 3, {"Class" : ubx.CLIDPAIR["RXM-RAW"][0] , "MsgID" : ubx.CLIDPAIR["RXM-RAW"][1] , "Rate" : 1 })
t.send("CFG-MSG", 3, {"Class" : ubx.CLIDPAIR["RXM-SFRB"][0] , "MsgID" : ubx.CLIDPAIR["RXM-SFRB"][1] , "Rate" : 1 })
else:
t.send("CFG-MSG", 3, {"Class" : ubx.CLIDPAIR["RXM-RAW"][0] , "MsgID" : ubx.CLIDPAIR["RXM-RAW"][1] , "Rate" : 0 })
t.send("CFG-MSG", 3, {"Class" : ubx.CLIDPAIR["RXM-SFRB"][0] , "MsgID" : ubx.CLIDPAIR["RXM-SFRB"][1] , "Rate" : 0 })
| [
"timo.lindfors@iki.fi"
] | timo.lindfors@iki.fi |
90118b22999d0850d70f1bd9e39f9ebafee8e412 | 6188f8ef474da80c9e407e8040de877273f6ce20 | /examples/docs_snippets/docs_snippets/guides/dagster/development_to_production/resources/resources_v1.py | c1339b0fabc7baf6e734f9610d9ced0cb55cf53e | [
"Apache-2.0"
] | permissive | iKintosh/dagster | 99f2a1211de1f3b52f8bcf895dafaf832b999de2 | 932a5ba35263deb7d223750f211c2ddfa71e6f48 | refs/heads/master | 2023-01-24T15:58:28.497042 | 2023-01-20T21:51:35 | 2023-01-20T21:51:35 | 276,410,978 | 1 | 0 | Apache-2.0 | 2020-07-01T15:19:47 | 2020-07-01T15:13:56 | null | UTF-8 | Python | false | false | 655 | py | # start_resource
# resources.py
from typing import Any, Dict, Optional
import requests
class HNAPIClient:
"""
Hacker News client that fetches live data
"""
def fetch_item_by_id(self, item_id: int) -> Optional[Dict[str, Any]]:
"""Fetches a single item from the Hacker News API by item id."""
item_url = f"https://hacker-news.firebaseio.com/v0/item/{item_id}.json"
item = requests.get(item_url, timeout=5).json()
return item
def fetch_max_item_id(self) -> int:
return requests.get(
"https://hacker-news.firebaseio.com/v0/maxitem.json", timeout=5
).json()
# end_resource
| [
"noreply@github.com"
] | iKintosh.noreply@github.com |
cad332858fb916aae94cf392338574f290c1bdce | 1e3cf9c1341083675fa9b716f11c2834e2d18374 | /src/pyphoplacecellanalysis/External/pyqtgraph/examples/VideoSpeedTest.py | bb3ff76da268f9820a6aa048e58f05c851b9e606 | [
"MIT"
] | permissive | CommanderPho/pyPhoPlaceCellAnalysis | a60313c98b3ad2834c2bf101f3463714df092cf5 | 212399d826284b394fce8894ff1a93133aef783f | refs/heads/master | 2023-09-01T20:27:43.792099 | 2023-09-01T03:24:19 | 2023-09-01T03:24:19 | 444,885,155 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,668 | py | """
Tests the speed of image updates for an ImageItem and RawImageWidget.
The speed will generally depend on the type of data being shown, whether
it is being scaled and/or converted by lookup table, and whether OpenGL
is used by the view widget
"""
import argparse
import sys
from time import perf_counter
import numpy as np
import pyphoplacecellanalysis.External.pyqtgraph as pg
from pyphoplacecellanalysis.External.pyqtgraph.Qt import QT_LIB, QtCore, QtGui, QtWidgets
pg.setConfigOption('imageAxisOrder', 'row-major')
import importlib
ui_template = importlib.import_module(f'VideoTemplate_{QT_LIB.lower()}')
try:
import cupy as cp
pg.setConfigOption("useCupy", True)
_has_cupy = True
except ImportError:
cp = None
_has_cupy = False
try:
import numba
_has_numba = True
except ImportError:
numba = None
_has_numba = False
try:
from pyphoplacecellanalysis.External.pyqtgraph.widgets.RawImageWidget import RawImageGLWidget
except ImportError:
RawImageGLWidget = None
parser = argparse.ArgumentParser(description="Benchmark for testing video performance")
parser.add_argument('--cuda', default=False, action='store_true', help="Use CUDA to process on the GPU", dest="cuda")
parser.add_argument('--dtype', default='uint8', choices=['uint8', 'uint16', 'float'], help="Image dtype (uint8, uint16, or float)")
parser.add_argument('--frames', default=3, type=int, help="Number of image frames to generate (default=3)")
parser.add_argument('--image-mode', default='mono', choices=['mono', 'rgb'], help="Image data mode (mono or rgb)", dest='image_mode')
parser.add_argument('--levels', default=None, type=lambda s: tuple([float(x) for x in s.split(',')]), help="min,max levels to scale monochromatic image dynamic range, or rmin,rmax,gmin,gmax,bmin,bmax to scale rgb")
parser.add_argument('--lut', default=False, action='store_true', help="Use color lookup table")
parser.add_argument('--lut-alpha', default=False, action='store_true', help="Use alpha color lookup table", dest='lut_alpha')
parser.add_argument('--size', default='512x512', type=lambda s: tuple([int(x) for x in s.split('x')]), help="WxH image dimensions default='512x512'")
args = parser.parse_args(sys.argv[1:])
if RawImageGLWidget is not None:
# don't limit frame rate to vsync
sfmt = QtGui.QSurfaceFormat()
sfmt.setSwapInterval(0)
QtGui.QSurfaceFormat.setDefaultFormat(sfmt)
app = pg.mkQApp("Video Speed Test Example")
win = QtWidgets.QMainWindow()
win.setWindowTitle('pyqtgraph example: VideoSpeedTest')
ui = ui_template.Ui_MainWindow()
ui.setupUi(win)
win.show()
if RawImageGLWidget is None:
ui.rawGLRadio.setEnabled(False)
ui.rawGLRadio.setText(ui.rawGLRadio.text() + " (OpenGL not available)")
else:
ui.rawGLImg = RawImageGLWidget()
ui.stack.addWidget(ui.rawGLImg)
# read in CLI args
ui.cudaCheck.setChecked(args.cuda and _has_cupy)
ui.cudaCheck.setEnabled(_has_cupy)
ui.numbaCheck.setChecked(_has_numba and pg.getConfigOption("useNumba"))
ui.numbaCheck.setEnabled(_has_numba)
ui.framesSpin.setValue(args.frames)
ui.widthSpin.setValue(args.size[0])
ui.heightSpin.setValue(args.size[1])
ui.dtypeCombo.setCurrentText(args.dtype)
ui.rgbCheck.setChecked(args.image_mode=='rgb')
ui.maxSpin1.setOpts(value=255, step=1)
ui.minSpin1.setOpts(value=0, step=1)
levelSpins = [ui.minSpin1, ui.maxSpin1, ui.minSpin2, ui.maxSpin2, ui.minSpin3, ui.maxSpin3]
if args.cuda and _has_cupy:
xp = cp
else:
xp = np
if args.levels is None:
ui.scaleCheck.setChecked(False)
ui.rgbLevelsCheck.setChecked(False)
else:
ui.scaleCheck.setChecked(True)
if len(args.levels) == 2:
ui.rgbLevelsCheck.setChecked(False)
ui.minSpin1.setValue(args.levels[0])
ui.maxSpin1.setValue(args.levels[1])
elif len(args.levels) == 6:
ui.rgbLevelsCheck.setChecked(True)
for spin,val in zip(levelSpins, args.levels):
spin.setValue(val)
else:
raise ValueError("levels argument must be 2 or 6 comma-separated values (got %r)" % (args.levels,))
ui.lutCheck.setChecked(args.lut)
ui.alphaCheck.setChecked(args.lut_alpha)
#ui.graphicsView.useOpenGL() ## buggy, but you can try it if you need extra speed.
vb = pg.ViewBox()
ui.graphicsView.setCentralItem(vb)
vb.setAspectLocked()
img = pg.ImageItem()
vb.addItem(img)
LUT = None
def updateLUT():
global LUT, ui
dtype = ui.dtypeCombo.currentText()
if dtype == 'uint8':
n = 256
else:
n = 4096
LUT = ui.gradient.getLookupTable(n, alpha=ui.alphaCheck.isChecked())
if _has_cupy and xp == cp:
LUT = cp.asarray(LUT)
ui.gradient.sigGradientChanged.connect(updateLUT)
updateLUT()
ui.alphaCheck.toggled.connect(updateLUT)
def updateScale():
global ui, levelSpins
if ui.rgbLevelsCheck.isChecked():
for s in levelSpins[2:]:
s.setEnabled(True)
else:
for s in levelSpins[2:]:
s.setEnabled(False)
updateScale()
ui.rgbLevelsCheck.toggled.connect(updateScale)
cache = {}
def mkData():
with pg.BusyCursor():
global data, cache, ui, xp
frames = ui.framesSpin.value()
width = ui.widthSpin.value()
height = ui.heightSpin.value()
cacheKey = (ui.dtypeCombo.currentText(), ui.rgbCheck.isChecked(), frames, width, height)
if cacheKey not in cache:
if cacheKey[0] == 'uint8':
dt = xp.uint8
loc = 128
scale = 64
mx = 255
elif cacheKey[0] == 'uint16':
dt = xp.uint16
loc = 4096
scale = 1024
mx = 2**16 - 1
elif cacheKey[0] == 'float':
dt = xp.float32
loc = 1.0
scale = 0.1
mx = 1.0
else:
raise ValueError(f"unable to handle dtype: {cacheKey[0]}")
chan_shape = (height, width)
if ui.rgbCheck.isChecked():
frame_shape = chan_shape + (3,)
else:
frame_shape = chan_shape
data = xp.empty((frames,) + frame_shape, dtype=dt)
view = data.reshape((-1,) + chan_shape)
for idx in range(view.shape[0]):
subdata = xp.random.normal(loc=loc, scale=scale, size=chan_shape)
# note: gaussian filtering has been removed as it slows down array
# creation greatly.
if cacheKey[0] != 'float':
xp.clip(subdata, 0, mx, out=subdata)
view[idx] = subdata
data[:, 10:50, 10] = mx
data[:, 48, 9:12] = mx
data[:, 47, 8:13] = mx
cache = {cacheKey: data} # clear to save memory (but keep one to prevent unnecessary regeneration)
data = cache[cacheKey]
updateLUT()
updateSize()
def updateSize():
global ui, vb
frames = ui.framesSpin.value()
width = ui.widthSpin.value()
height = ui.heightSpin.value()
dtype = xp.dtype(str(ui.dtypeCombo.currentText()))
rgb = 3 if ui.rgbCheck.isChecked() else 1
ui.sizeLabel.setText('%d MB' % (frames * width * height * rgb * dtype.itemsize / 1e6))
vb.setRange(QtCore.QRectF(0, 0, width, height))
def noticeCudaCheck():
global xp, cache
cache = {}
if ui.cudaCheck.isChecked():
if _has_cupy:
xp = cp
else:
xp = np
ui.cudaCheck.setChecked(False)
else:
xp = np
mkData()
def noticeNumbaCheck():
pg.setConfigOption('useNumba', _has_numba and ui.numbaCheck.isChecked())
mkData()
ui.dtypeCombo.currentIndexChanged.connect(mkData)
ui.rgbCheck.toggled.connect(mkData)
ui.widthSpin.editingFinished.connect(mkData)
ui.heightSpin.editingFinished.connect(mkData)
ui.framesSpin.editingFinished.connect(mkData)
ui.widthSpin.valueChanged.connect(updateSize)
ui.heightSpin.valueChanged.connect(updateSize)
ui.framesSpin.valueChanged.connect(updateSize)
ui.cudaCheck.toggled.connect(noticeCudaCheck)
ui.numbaCheck.toggled.connect(noticeNumbaCheck)
ptr = 0
lastTime = perf_counter()
fps = None
def update():
global ui, ptr, lastTime, fps, LUT, img
if ui.lutCheck.isChecked():
useLut = LUT
else:
useLut = None
downsample = ui.downsampleCheck.isChecked()
if ui.scaleCheck.isChecked():
if ui.rgbLevelsCheck.isChecked():
useScale = [
[ui.minSpin1.value(), ui.maxSpin1.value()],
[ui.minSpin2.value(), ui.maxSpin2.value()],
[ui.minSpin3.value(), ui.maxSpin3.value()]]
else:
useScale = [ui.minSpin1.value(), ui.maxSpin1.value()]
else:
useScale = None
if ui.rawRadio.isChecked():
ui.rawImg.setImage(data[ptr%data.shape[0]], lut=useLut, levels=useScale)
ui.stack.setCurrentIndex(1)
elif ui.rawGLRadio.isChecked():
ui.rawGLImg.setImage(data[ptr%data.shape[0]], lut=useLut, levels=useScale)
ui.stack.setCurrentIndex(2)
else:
img.setImage(data[ptr%data.shape[0]], autoLevels=False, levels=useScale, lut=useLut, autoDownsample=downsample)
ui.stack.setCurrentIndex(0)
#img.setImage(data[ptr%data.shape[0]], autoRange=False)
ptr += 1
now = perf_counter()
dt = now - lastTime
lastTime = now
if fps is None:
fps = 1.0/dt
else:
s = np.clip(dt*3., 0, 1)
fps = fps * (1-s) + (1.0/dt) * s
ui.fpsLabel.setText('%0.2f fps' % fps)
app.processEvents() ## force complete redraw for every plot
timer = QtCore.QTimer()
timer.timeout.connect(update)
timer.start(0)
if __name__ == '__main__':
pg.exec()
| [
"CommanderPho@users.noreply.github.com"
] | CommanderPho@users.noreply.github.com |
6a232e4c96ed1c55af4ffc914b4e01b6b7e15452 | dbc655e10463eacd6c757cb8f7bdea2dbfa7e8f7 | /USB-Cam/bosch_drivers/usb_cam/build/catkin_generated/05.catkin-test-results.sh.develspace.context.py | 1f5b35bd4ffe17a6def65bfdae8bcc649bd21921 | [] | no_license | rsdgroup3/FroboMind | c2df44e2036b87426c987ddf8ed66ff2d3b715af | dfeef2ddcfe25ca7e454c158aca24f41794beac5 | refs/heads/master | 2021-01-01T05:53:34.128993 | 2013-04-30T07:12:58 | 2013-04-30T07:12:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,397 | py | DEVELSPACE = True
INSTALLSPACE = False
CATKIN_DEVEL_PREFIX = '/home/rsd/groovy_workspace/FroboMind-Fuerte/USB-Cam/bosch_drivers/usb_cam/build/devel'
CATKIN_GLOBAL_BIN_DESTINATION = 'bin'
CATKIN_GLOBAL_ETC_DESTINATION = 'etc'
CATKIN_GLOBAL_INCLUDE_DESTINATION = 'include'
CATKIN_GLOBAL_LIB_DESTINATION = 'lib'
CATKIN_GLOBAL_LIBEXEC_DESTINATION = 'lib'
CATKIN_GLOBAL_PYTHON_DESTINATION = 'lib/python2.7/dist-packages'
CATKIN_GLOBAL_SHARE_DESTINATION = 'share'
CATKIN_PACKAGE_BIN_DESTINATION = ''
CATKIN_PACKAGE_ETC_DESTINATION = ''
CATKIN_PACKAGE_INCLUDE_DESTINATION = ''
CATKIN_PACKAGE_LIB_DESTINATION = ''
CATKIN_PACKAGE_LIBEXEC_DESTINATION = ''
CATKIN_PACKAGE_PYTHON_DESTINATION = ''
CATKIN_PACKAGE_SHARE_DESTINATION = ''
CMAKE_BINARY_DIR = '/home/rsd/groovy_workspace/FroboMind-Fuerte/USB-Cam/bosch_drivers/usb_cam/build'
CMAKE_CURRENT_BINARY_DIR = '/home/rsd/groovy_workspace/FroboMind-Fuerte/USB-Cam/bosch_drivers/usb_cam/build'
CMAKE_CURRENT_SOURCE_DIR = '/home/rsd/groovy_workspace/FroboMind-Fuerte/USB-Cam/bosch_drivers/usb_cam'
CMAKE_INSTALL_PREFIX = '/usr/local'
CMAKE_SOURCE_DIR = '/home/rsd/groovy_workspace/FroboMind-Fuerte/USB-Cam/bosch_drivers/usb_cam'
PROJECT_NAME = 'usb_cam'
PROJECT_BINARY_DIR = '/home/rsd/groovy_workspace/FroboMind-Fuerte/USB-Cam/bosch_drivers/usb_cam/build'
PROJECT_SOURCE_DIR = '/home/rsd/groovy_workspace/FroboMind-Fuerte/USB-Cam/bosch_drivers/usb_cam'
| [
"rsdgroup3@gmail.com"
] | rsdgroup3@gmail.com |
01cc7b4d8a0864fd65df11b08bbe02c1f4552b20 | 0b34a51ad87b23ff77e296842967b1c2158c75f6 | /9.py | 9365937bb9b108b0f6adda78724bf762729ad594 | [] | no_license | JeremyBlank/PythonCupofCode | 3ddfc783d351ec107cc0b2626dd9fccbe0ea708b | f3592512bd7e61fab5af029a237d497ee52804f6 | refs/heads/master | 2022-10-29T16:08:14.174138 | 2020-06-19T12:30:52 | 2020-06-19T12:30:52 | 273,487,110 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,014 | py | # Create an array of 5 integers and display array items, access them via index
from array import *
array_num = array('i', [1,3,5,7,9]) # Any numbers work
for i in array_num:
print(i)
print('Access first three items individuals')
print(array_num[0])
print(array_num[1])
print(array_num[2])
# Append a new item to end of array
print('Starting array ', array_num)
array_num.append(11) # Adds number 11
print('new array: ', array_num)
# reverse the order
print(array_num[::-1])
# insert a new value before the number 3
print(array_num)
array_num.insert(1,4) # I want the number 4 at index of 2
print(array_num)
# Remove an item via index
array_num.pop(3) # default is last item, otherwise add index #(3) is index location
print(array_num)
# Remove the first occurrence of an element
new_array = array('i', [1,3,5,7,3,9,3,11])
print('new array: ', new_array)
new_array.remove(3) # Integer not index
print(new_array)
# convert an array into a list
print(type(new_array))
x = new_array.tolist()
print(type(x))
| [
"jeremy.r.blank@gmail.com"
] | jeremy.r.blank@gmail.com |
93b05c1b91a528f90622d6db2202a79cd8f49ae5 | d9360bdf78c444ca5c858cc6d79a443d54f8da16 | /restModule/models.py | 9fbacc9dfc4ac2b22d6fbaddf7fc8342e23b3c23 | [] | no_license | MykolaKroshko/djangoFirstProject | e7bf4dbf79eee45328fe9ec83ebea7a9fae5a363 | d58442c225df18d9c759ac45601b3b9ee825c801 | refs/heads/master | 2021-09-10T21:26:05.063285 | 2018-04-02T13:33:50 | 2018-04-02T13:33:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 678 | py | from django.db import models
# Create your models here.
class Users(models.Model):
first_name = models.CharField("first_name", max_length=30)
last_name = models.CharField("last_name", null=True, max_length=30)
email = models.EmailField("email", unique=True, max_length=30)
birth_date = models.DateField("birth_date")
created_at = models.DateTimeField("created_at", auto_now_add=True)
updated_at = models.DateTimeField("updated_at", auto_now=True)
class Meta:
verbose_name = "User"
app_label = "restModule"
verbose_name_plural = "Users"
db_table = 'restUsers'
def __str__(self):
return self.first_name
| [
"kroshko@codevog.com"
] | kroshko@codevog.com |
8ad101be472852005ccef40f2437ae4aa7277330 | da796d44f1f9f3ad3e08f604afce6ea70202075d | /hw2/release/dos/code8.py | 18d5b6dd34887bc90d732e6d1d5cdaa5e5087b8b | [] | no_license | Andychen3558/CNS2019 | 3a5294274745ff1c1daad5b3d8b16d45121321ec | 349789366080beb2eb9195c0d13823ab95e79a16 | refs/heads/master | 2022-12-09T10:56:46.339386 | 2019-11-17T16:00:20 | 2019-11-17T16:00:20 | 220,145,193 | 0 | 0 | null | 2021-06-02T00:38:40 | 2019-11-07T03:38:40 | Java | UTF-8 | Python | false | false | 1,276 | py | from pwn import *
import time
import hashlib
import binascii
def sha256(content):
Hash=hashlib.sha256()
Hash.update(content)
return Hash.digest()
def challenge():
max_nonce = 2**24
r.recvuntil('with ')
randomstring = r.recv().decode()[:-2]
#print(randomstring)
user = '0'
for nonce in range(max_nonce):
hash_tmp = sha256(str(nonce).encode()).encode('hex')
if hash_tmp[-6:] == "{:0>6}".format(randomstring):
#print(nonce, hash_tmp, randomstring)
user = binascii.hexlify(str(nonce).encode()).decode()
break
r.send(user+'\n')
print(r.recvuntil('\n').decode())
r = remote("140.112.31.97", 10159)
### proof-of-work
challenge()
### dos
r.send('50000\n')
### malicious inputs file
f = open('input8.txt', 'w')
d = {}
i, perturb = 1, 1
for n in range(10000):
d[i] = 1
r.send(str(i)+'\n')
f.write(str(i)+'\n')
i = ((i<<2) + i + perturb + 1) & 0xfffffff
perturb >>= 5
chosen = 0
max_time = 0
for i in range(2**30, 2**30-10000, -1):
start = time.time()
for n in range(40000):
d[i] = 1
end = time.time()
if end - start > max_time:
max_time = end - start
chosen = i
del d[i]
#print(chosen)
for n in range(40000):
r.send(str(chosen)+'\n')
f.write(str(chosen)+'\n')
print(r.recvuntil('\n').decode())
print(r.recvuntil('\n').decode())
| [
"andychen3558@gmail.com"
] | andychen3558@gmail.com |
93db36640e286172bee479c27dc086ac4f892ad8 | d90283bff72b5a55dd4d0f90c7325355b00ce7b1 | /p1804/p12/xxxx.py | 1bd3153f8551e4d5a98764db70ac390410388037 | [] | no_license | yuemeiss/p1804daima | f841f52e63081d53d50a199e4d148d4533605bb6 | 6ea08eb9971e42bf4ac535033a006d98ed98bf98 | refs/heads/master | 2020-03-15T23:29:59.691297 | 2018-08-06T02:42:49 | 2018-08-06T02:42:49 | 132,395,078 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 167 | py | tu1 = (1,2,3)
alist=[123,5677,555]
for i in alist:
print(i)
for index,d in enumerate(alist):
print(index,d)
c=0
while c < len(tu1):
print(tu1[c])
c+=1
| [
"1083027306@qq.com"
] | 1083027306@qq.com |
8b09f80a72badcd81065c4921c3e31e1173a1a46 | c5b4d174ace61dd5914ca99fb0f2c710d0182324 | /pypes/tests/test__utils.py | bd9906b228853654176538174cd98e0cfc893330 | [
"Apache-2.0"
] | permissive | erramuzpe/pypes | 636c6b31023747a571af90390fd85b2dd6806dea | 3922d3162dc633b30961c036efdeb5d221ab1bfb | refs/heads/master | 2020-12-24T06:43:15.063955 | 2017-04-05T19:51:05 | 2017-04-05T19:51:05 | 73,461,509 | 0 | 0 | null | 2016-11-11T08:54:15 | 2016-11-11T08:54:14 | null | UTF-8 | Python | false | false | 2,645 | py | # -*- coding: utf-8 -*-
from pypes._utils import format_pair_list
def test_format_pair_list():
anat_fbasename = 'anat_hc'
regexp_subst = [
(r"/{anat}_.*corrected_seg8.mat$", "/{anat}_to_mni_affine.mat"),
(r"/m{anat}.*_corrected.nii$", "/{anat}_biascorrected.nii"),
(r"/w{anat}.*_biascorrected.nii$", "/{anat}_mni.nii"),
(r"/y_{anat}.*nii$", "/{anat}_to_mni_field.nii"),
(r"/iy_{anat}.*nii$", "/{anat}_to_mni_inv_field.nii"),
(r"/mwc1{anat}.*nii$", "/{anat}_gm_mod_w2tpm.nii"),
(r"/mwc2{anat}.*nii$", "/{anat}_wm_mod_w2tpm.nii"),
(r"/mwc3{anat}.*nii$", "/{anat}_csf_mod_w2tpm.nii"),
(r"/mwc4{anat}.*nii$", "/{anat}_nobrain_mod_w2tpm.nii"),
(r"/c1{anat}.*nii$", "/{anat}_gm.nii"),
(r"/c2{anat}.*nii$", "/{anat}_wm.nii"),
(r"/c3{anat}.*nii$", "/{anat}_csf.nii"),
(r"/c4{anat}.*nii$", "/{anat}_nobrain.nii"),
(r"/c5{anat}.*nii$", "/{anat}_nobrain_mask.nii"),
]
result = format_pair_list(regexp_subst, anat=anat_fbasename)
assert(result == [
(r"/anat_hc_.*corrected_seg8.mat$", "/anat_hc_to_mni_affine.mat"),
(r"/manat_hc.*_corrected.nii$", "/anat_hc_biascorrected.nii"),
(r"/wanat_hc.*_biascorrected.nii$", "/anat_hc_mni.nii"),
(r"/y_anat_hc.*nii$", "/anat_hc_to_mni_field.nii"),
(r"/iy_anat_hc.*nii$", "/anat_hc_to_mni_inv_field.nii"),
(r"/mwc1anat_hc.*nii$", "/anat_hc_gm_mod_w2tpm.nii"),
(r"/mwc2anat_hc.*nii$", "/anat_hc_wm_mod_w2tpm.nii"),
(r"/mwc3anat_hc.*nii$", "/anat_hc_csf_mod_w2tpm.nii"),
(r"/mwc4anat_hc.*nii$", "/anat_hc_nobrain_mod_w2tpm.nii"),
(r"/c1anat_hc.*nii$", "/anat_hc_gm.nii"),
(r"/c2anat_hc.*nii$", "/anat_hc_wm.nii"),
(r"/c3anat_hc.*nii$", "/anat_hc_csf.nii"),
(r"/c4anat_hc.*nii$", "/anat_hc_nobrain.nii"),
(r"/c5anat_hc.*nii$", "/anat_hc_nobrain_mask.nii"),
]) | [
"alexsavio@gmail.com"
] | alexsavio@gmail.com |
d0d86fd7eaeff03e9368f3949046aaef8fd1be17 | 1bf35ccf0d0774924a257b89a854097a33063c58 | /constants.py | 8453b9e3bf23bdfdba660e98f735627d13ae13cf | [] | no_license | lightbulbmeow/metamon | de2995943938dab77ebd68f8213ef96b3a2c58ce | 216a907850fda026e7e92382cdea1667a45bfbd5 | refs/heads/main | 2023-03-20T23:52:41.842672 | 2021-03-05T08:46:27 | 2021-03-05T08:46:27 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,026 | py | # TYPE_EFFICACY[offensive][defensive]
TYPE_EFFICACY = [
None,
[None, 1, 1, 1, 1, 1, 0.5, 1, 0, 0.5, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[None, 2, 1, 0.5, 0.5, 1, 2, 0.5, 0, 2, 1, 1, 1, 1, 0.5, 2, 1, 2, 0.5],
[None, 1, 2, 1, 1, 1, 0.5, 2, 1, 0.5, 1, 1, 2, 0.5, 1, 1, 1, 1, 1],
[None, 1, 1, 1, 0.5, 0.5, 0.5, 1, 0.5, 0, 1, 1, 2, 1, 1, 1, 1, 1, 2],
[None, 1, 1, 0, 2, 1, 2, 0.5, 1, 2, 2, 1, 0.5, 2, 1, 1, 1, 1, 1],
[None, 1, 0.5, 2, 1, 0.5, 1, 2, 1, 0.5, 2, 1, 1, 1, 1, 2, 1, 1, 1],
[None, 1, 0.5, 0.5, 0.5, 1, 1, 1, 0.5, 0.5, 0.5, 1, 2, 1, 2, 1, 1, 2, 0.5],
[None, 0, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 2, 1, 1, 0.5, 1],
[None, 1, 1, 1, 1, 1, 2, 1, 1, 0.5, 0.5, 0.5, 1, 0.5, 1, 2, 1, 1, 2],
[None, 1, 1, 1, 1, 1, 0.5, 2, 1, 2, 0.5, 0.5, 2, 1, 1, 2, 0.5, 1, 1],
[None, 1, 1, 1, 1, 2, 2, 1, 1, 1, 2, 0.5, 0.5, 1, 1, 1, 0.5, 1, 1],
[None, 1, 1, 0.5, 0.5, 2, 2, 0.5, 1, 0.5, 0.5, 2, 0.5, 1, 1, 1, 0.5, 1, 1],
[None, 1, 1, 2, 1, 0, 1, 1, 1, 1, 1, 2, 0.5, 0.5, 1, 1, 0.5, 1, 1],
[None, 1, 2, 1, 2, 1, 1, 1, 1, 0.5, 1, 1, 1, 1, 0.5, 1, 1, 0, 1],
[None, 1, 1, 2, 1, 2, 1, 1, 1, 0.5, 0.5, 0.5, 2, 1, 1, 0.5, 2, 1, 1],
[None, 1, 1, 1, 1, 1, 1, 1, 1, 0.5, 1, 1, 1, 1, 1, 1, 2, 1, 0],
[None, 1, 0.5, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 2, 1, 1, 0.5, 0.5],
[None, 1, 2, 1, 0.5, 1, 1, 1, 1, 0.5, 0.5, 1, 1, 1, 1, 1, 2, 2, 1],
]
TYPE_NUMBERS = {
"Normal": 1,
"Fighting": 2,
"Flying": 3,
"Poison": 4,
"Ground": 5,
"Rock": 6,
"Bug": 7,
"Ghost": 8,
"Steel": 9,
"Fire": 10,
"Water": 11,
"Grass": 12,
"Electric": 13,
"Psychic": 14,
"Ice": 15,
"Dragon": 16,
"Dark": 17,
"Fairy": 18
}
TYPES = [
None,
"Normal",
"Fighting",
"Flying",
"Poison",
"Ground",
"Rock",
"Bug",
"Ghost",
"Steel",
"Fire",
"Water",
"Grass",
"Electric",
"Psychic",
"Ice",
"Dragon",
"Dark",
"Fairy",
] | [
"dsjong2002@yahoo.com"
] | dsjong2002@yahoo.com |
a28e004cc404c5af1c644f8940fb77d3ab7e34a1 | 6295fa3e7d5f459935974bbe2994017005b2f3a2 | /system/test_elide_attributes.py | a017947b93c772a3f00b4e05bcc728c28a6495d2 | [] | no_license | We-Amp/psol_pytest | 1eed1d9fcd901734a59a94a6cf12e010105271d8 | 9ec4d75e5397fa431dc8963010236974e1793022 | refs/heads/master | 2016-09-06T02:19:04.690702 | 2015-03-03T17:01:01 | 2015-03-03T17:01:01 | 31,080,079 | 0 | 0 | null | 2015-02-27T21:24:05 | 2015-02-20T19:08:49 | Python | UTF-8 | Python | false | false | 312 | py | import config
import test_helpers as helpers
def test_elide_attributes_removes_boolean_and_default_attributes():
filter_name = "elide_attributes"
url = "%s/%s.html?PageSpeedFilters=%s" % (
config.EXAMPLE_ROOT, filter_name, filter_name)
assert helpers.fetch(url).body.count("disabled=") == 0
| [
"oschaaf@we-amp.com"
] | oschaaf@we-amp.com |
abc6f50a3ef37732a14b3e9a2ca46269572b017a | 3410f36ad4506c36725d34a1ae0fc5ebec215f4e | /make_and_save_lang_pop_data.py | 4e25f3d646d697a97da37f7af006b38147ab08a4 | [] | no_license | angwhen/mcm-2018b | d5c8b3feb7a65468dd4a510e43642e7c9d041471 | 2219a185da8a7e0927b4a8323d804e07b4186c68 | refs/heads/master | 2021-05-02T05:56:10.875379 | 2018-02-11T17:35:15 | 2018-02-11T17:35:15 | 120,851,086 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,922 | py | import pickle
countries_pop_dict = pickle.load(open("countries_pop_dict.p","rb"))
countries_lang_dict = pickle.load(open("country_lang_dict.p","rb"))
print countries_pop_dict["China"]
print countries_lang_dict["China"]
# make language to population/year dict
lang_pop_dict = {} # will have array for each language index 0 is 1950, and every consec index is 5 more years
countries_with_pop = countries_pop_dict.keys()
for country_name in countries_lang_dict.keys():
orig_country_name = country_name
country_name = country_name.replace("St.","Saint")
country_name = country_name.replace(" (proposed state)","")
country_name = country_name.replace(" (proposed)","")
if "," in country_name:
two_parts = country_name.split(",")
country_name = (two_parts[1]+" "+two_parts[0]).strip()
if country_name not in countries_with_pop:
close_names = [s for s in countries_with_pop if country_name in s]
if len(close_names) > 0:
country_pop_name = close_names[0]
elif country_name == "Sao Tome and Principel":
country_pop_name = "S\xc3\xa3o Tom\xc3\xa9 and Pr\xc3\xadncipe"
elif country_name == "Palestinian State":
country_pop_name = "State of Palestine"
else:
print country_name
continue # can't match
else:
country_pop_name = country_name
curr_lang = countries_lang_dict[orig_country_name]
if curr_lang not in lang_pop_dict:
lang_pop_dict[curr_lang] = [0]*21
for year in countries_pop_dict[country_pop_name].keys():
lang_pop_dict[curr_lang][(year-1950)/5] = countries_pop_dict[country_pop_name][year]
else:
for year in countries_pop_dict[country_pop_name].keys():
lang_pop_dict[curr_lang][(year-1950)/5] += countries_pop_dict[country_pop_name][year]
pickle.dump(lang_pop_dict, open("lang_pop_dict.p","wb"))
| [
"angelmwen@gmail.com"
] | angelmwen@gmail.com |
1e4ca474dbb9889ef05fd94e9250adda7560ad48 | 3bb04410f48d1f56a0e434abae57eb7b35b8c64e | /RLBotPython/RLBotPythonExample-master/NeatBot/States.py | 34452cf367e7757f22ee89e670fefd85a6665954 | [
"MIT"
] | permissive | Cgreenstreet/RocketLeagueBot | 5838eef452e5bb795c53db084cc0c766df73245a | a040829b82a6c6798e04b31f6f4d41c8717e91c4 | refs/heads/master | 2022-12-15T18:06:32.887111 | 2020-09-09T01:33:53 | 2020-09-09T01:33:53 | 293,970,211 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,895 | py | import math
import time
from rlbot.agents.base_agent import BaseAgent, SimpleControllerState
from util import *
class calcShot:
def __init__(self):
self.expired = False
def available(self,agent):
if ballReady(agent) and abs(agent.ball.location.data[1]) < 5050 and ballProject(agent) > 500 - (distance2D(agent.ball,agent.me)/2):
return True
return False
def execute(self,agent):
agent.controller = calcController
#getting the coordinates of the goalposts
leftPost = Vector3([-sign(agent.team)*700 , 5100*-sign(agent.team), 200])
rightPost = Vector3([sign(agent.team)*700, 5100*-sign(agent.team), 200])
center = Vector3([0, 5150*-sign(agent.team), 200])
#time stuff that we don't worry about yet
time_guess = 0
bloc = future(agent.ball,time_guess)
#vectors from the goalposts to the ball & to Gosling
ball_left = angle2(bloc,leftPost)
ball_right = angle2(bloc,rightPost)
agent_left = angle2(agent.me,leftPost)
agent_right = angle2(agent.me,rightPost)
#determining if we are left/right/inside of cone
if agent_left > ball_left and agent_right > ball_right:
goal_target = leftPost
#rightPost
elif agent_left > ball_left and agent_right < ball_right:
goal_target = None
elif agent_left < ball_left and agent_right < ball_right:
goal_target = rightPost
#leftPost
else:
goal_target = None
if goal_target != None:
#if we are outside the cone, this is the same as Gosling's old code
goal_to_ball = (agent.ball.location - goal_target).normalize()
goal_to_agent = (agent.me.location - goal_target).normalize()
difference = goal_to_ball - goal_to_agent
error = cap(abs(difference.data[0])+ abs(difference.data[1]),1,10)
else:
#if we are inside the cone, our line to follow is a vector from the ball to us (although it's still named 'goal_to_ball')
goal_to_ball = (agent.me.location - agent.ball.location).normalize()
error = cap( distance2D(bloc,agent.me) /1000,0,1)
#this is measuring how fast the ball is traveling away from us if we were stationary
ball_dpp_skew = cap(abs(dpp(agent.ball.location, agent.ball.velocity, agent.me.location, [0,0,0]))/80, 1,1.5)
#same as Gosling's old distance calculation, but now we consider dpp_skew which helps us handle when the ball is moving
target_distance =cap( (40 + distance2D(agent.ball.location,agent.me)* (error**2))/1.8, 0,4000)
target_location = agent.ball.location + Vector3([(goal_to_ball.data[0]*target_distance) * ball_dpp_skew, goal_to_ball.data[1]*target_distance,0])
#this also adjusts the target location based on dpp
ball_something = dpp(target_location,agent.ball.velocity, agent.me,[0,0,0])**2
if ball_something > 100: #if we were stopped, and the ball is moving 100uu/s away from us
ball_something = cap(ball_something,0,80)
correction = agent.ball.velocity.normalize()
correction = Vector3([correction.data[0]*ball_something,correction.data[1]*ball_something,correction.data[2]*ball_something])
target_location += correction #we're adding some component of the ball's velocity to the target position so that we are able to hit a faster moving ball better
#it's important that this only happens when the ball is moving away from us.
#another target adjustment that applies if the ball is close to the wall
extra = 4120 - abs(target_location.data[0])
if extra < 0:
# we prevent our target from going outside the wall, and extend it so that Gosling gets closer to the wall before taking a shot, makes things more reliable
target_location.data[0] = cap(target_location.data[0],-4120,4120)
target_location.data[1] = target_location.data[1] + (-sign(agent.team)*cap(extra,-500,500))
#getting speed, this would be a good place to modify because it's not very good
target_local = toLocal(agent.ball.location,agent.me)
angle_to_target = cap(math.atan2(target_local.data[1], target_local.data[0]),-3,3)
distance_to_target = distance2D(agent.me, target_location)
speed= 2000 - (100*(1+angle_to_target)**2)
#picking our rendered target color based on the speed we want to go
colorRed = cap(int( (speed/2300) * 255),0,255)
colorBlue =cap(255-colorRed,0,255)
#see the rendering tutorial on github about this, just drawing lines from the posts to the ball and one from the ball to the target
agent.renderer.begin_rendering()
agent.renderer.draw_line_3d(bloc.data, leftPost.data, agent.renderer.create_color(255,255,0,0))
agent.renderer.draw_line_3d(bloc.data, rightPost.data, agent.renderer.create_color(255,0,255,0))
agent.renderer.draw_line_3d(agent.ball.location.data,target_location.data, agent.renderer.create_color(255,colorRed,0,colorBlue))
agent.renderer.draw_rect_3d(target_location.data, 10,10, True, agent.renderer.create_color(255,colorRed,0,colorBlue))
agent.renderer.end_rendering()
if ballReady(agent) == False or abs(agent.ball.location.data[1]) > 5050:
self.expired = True
return agent.controller(agent,target_location,speed)
class quickShot:
def __init__(self):
self.expired = False
def available(self,agent):
if ballProject(agent) > -(distance2D(agent.ball,agent.me)/2):
return True
return False
def execute(self,agent):
agent.controller = shotController
left_post = Vector3([sign(agent.team)*GOAL_WIDTH/2,-sign(agent.team)*FIELD_LENGTH/2,100])
right_post = Vector3([-sign(agent.team)*GOAL_WIDTH/2,-sign(agent.team)*FIELD_LENGTH/2,100])
ball_left = angle2(agent.ball.location,left_post)
ball_right = angle2(agent.ball.location,right_post)
our_left = angle2(agent.me.location,left_post)
our_right = angle2( agent.me.location,right_post)
offset = (agent.ball.location.data[0] / FIELD_WIDTH) * 3.14
x = agent.ball.location.data[0] +90 * abs(math.cos(offset)) * sign(offset)
y = agent.ball.location.data[1] + 90 * abs(math.sin(offset)) * sign(agent.team)
target_location = toLocation([x,y,agent.ball.location.data[2]])
location = toLocal(target_location,agent.me)
angle_to_target = math.atan2(location.data[1],location.data[0])
distance_to_target = distance2D(agent.me, target_location)
speedCorrection = ((2+ abs(angle_to_target)**2) * 350)
speed = 2400 - speedCorrection
if self.available(agent) == False:
self.expired = True
elif calcShot().available(agent) == True:
self.expired = True
return agent.controller(agent,target_location, speed)
class wait():
def __init__(self):
self.expired = False
def available(self, agent):
if timeZ(agent.ball) > 1.5:
return True
def execute(self,agent):
#taking a rough guess at where the ball will be in the future, based on how long it will take to hit the ground
ball_future = future(agent.ball, timeZ(agent.ball))
if agent.me.boost < 35: #if we are low on boost, we'll go for boot
closest = 0
closest_distance = distance2D(boosts[0], ball_future)
#going through every large pad to see which one is closest to our ball_future guesstimation
for i in range(1,len(boosts)):
if distance2D(boosts[i], ball_future) < closest_distance:
closest = i
closest_distance = distance2D(boosts[i], ball_future)
target = boosts[closest]
speed = 2300
else:
#if we have boost, we just go towards the ball_future position, and slow down just like in exampleATBA as we get close
target = ball_future
current = velocity2D(agent.me)
ratio = distance2D(agent.me,target)/(current + 0.01)
speed = cap(600 * ratio,0,2300)
if speed <= 100:
speed = 0
if ballReady(agent):
self.expired = True
return frugalController(agent,target,speed)
def frugalController(agent,target,speed):
controller_state = SimpleControllerState()
location = toLocal(target,agent.me)
angle_to_target = math.atan2(location.data[1],location.data[0])
controller_state.steer = steer(angle_to_target)
current_speed = velocity2D(agent.me)
if current_speed < speed:
controller_state.throttle = 1.0
elif current_speed - 50 > speed:
controller_state.throttle = -1.0
else:
controller_state.throttle = 0
time_difference = time.time() - agent.start
if time_difference > 2.2 and distance2D(target,agent.me) > (velocity2D(agent.me)*2.3) and abs(angle_to_target) < 1 and current_speed < speed:
agent.start = time.time()
elif time_difference <= 0.1:
controller_state.jump = True
controller_state.pitch = -1
elif time_difference >= 0.1 and time_difference <= 0.15:
controller_state.jump = False
controller_state.pitch = -1
elif time_difference > 0.15 and time_difference < 1:
controller_state.jump = True
controller_state.yaw = controller_state.steer
controller_state.pitch = -1
return controller_state
def calcController(agent, target_object,target_speed):
location = toLocal(target_object,agent.me)
controller_state = SimpleControllerState()
angle_to_ball = math.atan2(location.data[1],location.data[0])
current_speed = velocity2D(agent.me)
controller_state.steer = steer(angle_to_ball)
#throttle
if target_speed > current_speed:
controller_state.throttle = 1.0
if target_speed > 1400 and agent.start > 2.2 and current_speed < 2250:
controller_state.boost = True
elif target_speed < current_speed:
controller_state.throttle = -1.0
return controller_state
def shotController(agent, target_object,target_speed):
goal_local = toLocal([0,-sign(agent.team)*FIELD_LENGTH/2,100],agent.me)
goal_angle = math.atan2(goal_local.data[1],goal_local.data[0])
location = toLocal(target_object,agent.me)
controller_state = SimpleControllerState()
angle_to_target = math.atan2(location.data[1],location.data[0])
current_speed = velocity2D(agent.me)
#steering
controller_state.steer = steer(angle_to_target)
#throttle
if target_speed > 1400 and target_speed > current_speed and agent.start > 2.5 and current_speed < 2250:
controller_state.boost = True
if target_speed > current_speed:
controller_state.throttle = 1.0
elif target_speed < current_speed:
controller_state.throttle = 0
#dodging
time_difference = time.time() - agent.start # how long its been since we last dodged
if ballReady(agent) and time_difference > 2.2 and distance2D(target_object,agent.me) <= 270:
agent.start = time.time()
elif time_difference <= 0.1:
controller_state.jump = True
controller_state.yaw = math.sin(goal_angle)
controller_state.pitch = -abs(math.cos(goal_angle))
elif time_difference >= 0.1 and time_difference <= 0.13:
controller_state.jump = False
controller_state.yaw = math.sin(goal_angle)
controller_state.pitch = -abs(math.cos(goal_angle))
elif time_difference > 0.13 and time_difference < 1:
controller_state.jump = True
controller_state.yaw = math.sin(goal_angle)
controller_state.pitch = -abs(math.cos(goal_angle))
return controller_state
class exampleATBA:
def __init__(self):
self.expired = False
def execute(self, agent):
target_location = agent.ball
target_speed = velocity2D(agent.ball) + (distance2D(agent.ball,agent.me)/1.5)
return agent.controller(agent, target_location, target_speed)
def exampleController(agent, target_object,target_speed):
location = toLocal(target_object,agent.me)
controller_state = SimpleControllerState()
angle_to_ball = math.atan2(location.data[1],location.data[0])
current_speed = velocity2D(agent.me)
#steering
controller_state.steer = steer(angle_to_ball)
#throttle
if target_speed > current_speed:
controller_state.throttle = 1.0
if target_speed > 1400 and agent.start > 2.2 and current_speed < 2250:
controller_state.boost = True
elif target_speed < current_speed:
controller_state.throttle = 0
#dodging
time_difference = time.time() - agent.start
if time_difference > 2.2 and distance2D(target_object,agent.me) > (velocity2D(agent.me)*2.5) and abs(angle_to_ball) < 1.3:
agent.start = time.time()
elif time_difference <= 0.1:
controller_state.jump = True
controller_state.pitch = -1
elif time_difference >= 0.1 and time_difference <= 0.15:
controller_state.jump = False
controller_state.pitch = -1
elif time_difference > 0.15 and time_difference < 1:
controller_state.jump = True
controller_state.yaw = controller_state.steer
controller_state.pitch = -1
return controller_state
| [
"noreply@github.com"
] | Cgreenstreet.noreply@github.com |
e51f5a51804a9c4d99ecc7e2839017b94abe5da2 | 274dde0cd7ded9c38d0a71f5af8125c85cb10afe | /scenes/happy_buddhas.py | 20354741a19246883f132db07a73b0d8c48eb3ae | [
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | theomission/Fujiyama-Renderer | a81572c625421bde63096622d5c4436a505fc80d | e49c074dde1a4ab2abc5e687458487b35a692722 | refs/heads/master | 2020-12-28T22:46:45.517020 | 2015-11-20T19:19:35 | 2015-11-20T19:34:04 | 48,100,567 | 1 | 0 | null | 2015-12-16T09:28:36 | 2015-12-16T09:28:36 | null | UTF-8 | Python | false | false | 14,261 | py | #!/usr/bin/env python
# 16 Happy Buddhas with 32 point lights
# Copyright (c) 2011-2015 Hiroshi Tsubokawa
import fujiyama
si = fujiyama.SceneInterface()
#plugins
si.OpenPlugin('PlasticShader')
si.OpenPlugin('GlassShader')
si.OpenPlugin('ConstantShader')
#Camera
si.NewCamera('cam1', 'PerspectiveCamera')
si.SetProperty3('cam1', 'translate', 5, 4, 5)
si.SetProperty3('cam1', 'rotate', -19.4712206345, 45, 0)
#Light
si.NewLight('light0', 'PointLight')
si.SetProperty3('light0', 'translate', 0.900771, 12, 4.09137)
si.SetProperty1('light0', 'intensity', 0.03125)
si.NewLight('light1', 'PointLight')
si.SetProperty3('light1', 'translate', 2.02315, 12, 5.28021)
si.SetProperty1('light1', 'intensity', 0.03125)
si.NewLight('light2', 'PointLight')
si.SetProperty3('light2', 'translate', 10.69, 12, 13.918)
si.SetProperty1('light2', 'intensity', 0.03125)
si.NewLight('light3', 'PointLight')
si.SetProperty3('light3', 'translate', 4.28027, 12, 7.58462)
si.SetProperty1('light3', 'intensity', 0.03125)
si.NewLight('light4', 'PointLight')
si.SetProperty3('light4', 'translate', 12.9548, 12, 1.19914)
si.SetProperty1('light4', 'intensity', 0.03125)
si.NewLight('light5', 'PointLight')
si.SetProperty3('light5', 'translate', 6.55808, 12, 2.31772)
si.SetProperty1('light5', 'intensity', 0.03125)
si.NewLight('light6', 'PointLight')
si.SetProperty3('light6', 'translate', 0.169064, 12, 10.9623)
si.SetProperty1('light6', 'intensity', 0.03125)
si.NewLight('light7', 'PointLight')
si.SetProperty3('light7', 'translate', 1.25002, 12, 4.51314)
si.SetProperty1('light7', 'intensity', 0.03125)
si.NewLight('light8', 'PointLight')
si.SetProperty3('light8', 'translate', 2.46758, 12, 5.73382)
si.SetProperty1('light8', 'intensity', 0.03125)
si.NewLight('light9', 'PointLight')
si.SetProperty3('light9', 'translate', 3.55644, 12, 6.84334)
si.SetProperty1('light9', 'intensity', 0.03125)
si.NewLight('light10', 'PointLight')
si.SetProperty3('light10', 'translate', 4.76112, 12, 8.00264)
si.SetProperty1('light10', 'intensity', 0.03125)
si.NewLight('light11', 'PointLight')
si.SetProperty3('light11', 'translate', 13.3267, 12, 9.10333)
si.SetProperty1('light11', 'intensity', 0.03125)
si.NewLight('light12', 'PointLight')
si.SetProperty3('light12', 'translate', 14.4155, 12, 2.68084)
si.SetProperty1('light12', 'intensity', 0.03125)
si.NewLight('light13', 'PointLight')
si.SetProperty3('light13', 'translate', 8.10755, 12, 3.79629)
si.SetProperty1('light13', 'intensity', 0.03125)
si.NewLight('light14', 'PointLight')
si.SetProperty3('light14', 'translate', 9.21103, 12, 4.9484)
si.SetProperty1('light14', 'intensity', 0.03125)
si.NewLight('light15', 'PointLight')
si.SetProperty3('light15', 'translate', 2.83469, 12, 6.09221)
si.SetProperty1('light15', 'intensity', 0.03125)
si.NewLight('light16', 'PointLight')
si.SetProperty3('light16', 'translate', 4.00945, 12, 7.18302)
si.SetProperty1('light16', 'intensity', 0.03125)
si.NewLight('light17', 'PointLight')
si.SetProperty3('light17', 'translate', 12.6072, 12, 0.832089)
si.SetProperty1('light17', 'intensity', 0.03125)
si.NewLight('light18', 'PointLight')
si.SetProperty3('light18', 'translate', 6.21169, 12, 1.98055)
si.SetProperty1('light18', 'intensity', 0.03125)
si.NewLight('light19', 'PointLight')
si.SetProperty3('light19', 'translate', 7.39599, 12, 10.5563)
si.SetProperty1('light19', 'intensity', 0.03125)
si.NewLight('light20', 'PointLight')
si.SetProperty3('light20', 'translate', 8.52421, 12, 4.15086)
si.SetProperty1('light20', 'intensity', 0.03125)
si.NewLight('light21', 'PointLight')
si.SetProperty3('light21', 'translate', 9.5891, 12, 5.39715)
si.SetProperty1('light21', 'intensity', 0.03125)
si.NewLight('light22', 'PointLight')
si.SetProperty3('light22', 'translate', 3.18967, 12, 13.9542)
si.SetProperty1('light22', 'intensity', 0.03125)
si.NewLight('light23', 'PointLight')
si.SetProperty3('light23', 'translate', 4.41432, 12, 0.082813)
si.SetProperty1('light23', 'intensity', 0.03125)
si.NewLight('light24', 'PointLight')
si.SetProperty3('light24', 'translate', 5.48803, 12, 1.21856)
si.SetProperty1('light24', 'intensity', 0.03125)
si.NewLight('light25', 'PointLight')
si.SetProperty3('light25', 'translate', 6.57647, 12, 2.31432)
si.SetProperty1('light25', 'intensity', 0.03125)
si.NewLight('light26', 'PointLight')
si.SetProperty3('light26', 'translate', 0.265098, 12, 10.9453)
si.SetProperty1('light26', 'intensity', 0.03125)
si.NewLight('light27', 'PointLight')
si.SetProperty3('light27', 'translate', 8.84422, 12, 12.1117)
si.SetProperty1('light27', 'intensity', 0.03125)
si.NewLight('light28', 'PointLight')
si.SetProperty3('light28', 'translate', 10.0154, 12, 5.67625)
si.SetProperty1('light28', 'intensity', 0.03125)
si.NewLight('light29', 'PointLight')
si.SetProperty3('light29', 'translate', 11.0907, 12, 14.4043)
si.SetProperty1('light29', 'intensity', 0.03125)
si.NewLight('light30', 'PointLight')
si.SetProperty3('light30', 'translate', 4.71726, 12, 7.98851)
si.SetProperty1('light30', 'intensity', 0.03125)
si.NewLight('light31', 'PointLight')
si.SetProperty3('light31', 'translate', 13.3907, 12, 9.08986)
si.SetProperty1('light31', 'intensity', 0.03125)
#Texture
si.NewTexture('tex1', '../../hdr/uffizi-large.hdr')
#Shader
si.NewShader('buddha_shader0', 'PlasticShader')
si.SetProperty3('buddha_shader0', 'diffuse', 0.420094, 0.420094, 0.350488)
si.NewShader('buddha_shader1', 'PlasticShader')
si.SetProperty3('buddha_shader1', 'diffuse', 0.28069, 0.458229, 0.137373)
si.NewShader('buddha_shader2', 'PlasticShader')
si.SetProperty3('buddha_shader2', 'diffuse', 0.0677194, 0.243452, 0.17638)
si.NewShader('buddha_shader3', 'PlasticShader')
si.SetProperty3('buddha_shader3', 'diffuse', 0.103483, 0.282905, 0.463173)
si.NewShader('buddha_shader4', 'PlasticShader')
si.SetProperty3('buddha_shader4', 'diffuse', 0.3928, 0.316322, 0.499749)
si.NewShader('buddha_shader5', 'PlasticShader')
si.SetProperty3('buddha_shader5', 'diffuse', 0.177487, 0.107719, 0.285897)
si.NewShader('buddha_shader6', 'PlasticShader')
si.SetProperty3('buddha_shader6', 'diffuse', 0.464536, 0.145117, 0.0744061)
si.NewShader('buddha_shader7', 'PlasticShader')
si.SetProperty3('buddha_shader7', 'diffuse', 0.25295, 0.432513, 0.363791)
si.NewShader('buddha_shader8', 'PlasticShader')
si.SetProperty3('buddha_shader8', 'diffuse', 0.0438571, 0.469678, 0.397773)
si.NewShader('buddha_shader9', 'PlasticShader')
si.SetProperty3('buddha_shader9', 'diffuse', 0.329916, 0.258578, 0.437995)
si.NewShader('buddha_shader10', 'PlasticShader')
si.SetProperty3('buddha_shader10', 'diffuse', 0.114984, 0.29606, 0.224838)
si.NewShader('buddha_shader11', 'PlasticShader')
si.SetProperty3('buddha_shader11', 'diffuse', 0.153974, 0.084485, 0.262245)
si.NewShader('buddha_shader12', 'PlasticShader')
si.SetProperty3('buddha_shader12', 'diffuse', 0.19063, 0.119706, 0.299113)
si.NewShader('buddha_shader13', 'PlasticShader')
si.SetProperty3('buddha_shader13', 'diffuse', 0.478574, 0.409711, 0.335123)
si.NewShader('buddha_shader14', 'PlasticShader')
si.SetProperty3('buddha_shader14', 'diffuse', 0.016735, 0.196075, 0.374869)
si.NewShader('buddha_shader15', 'PlasticShader')
si.SetProperty3('buddha_shader15', 'diffuse', 0.304462, 0.234848, 0.41284)
si.NewShader('floor_shader', 'PlasticShader')
si.NewShader('dome_shader', 'ConstantShader')
#Mesh
si.NewMesh('buddha_mesh', '../../ply/happy.ply')
si.NewMesh('floor_mesh', '../../ply/floor.ply')
si.NewMesh('dome_mesh', '../../ply/dome.ply')
#ObjectInstance
si.NewObjectInstance('buddha0', 'buddha_mesh')
si.SetProperty3('buddha0', 'translate', -0.0, 0, -0.0)
si.SetProperty3('buddha0', 'rotate', 0, 0, 0)
si.SetProperty3('buddha0', 'scale', 0.6, 0.6, 0.6)
si.AssignShader('buddha0', 'DEFAULT_SHADING_GROUP', 'buddha_shader0')
si.NewObjectInstance('buddha1', 'buddha_mesh')
si.SetProperty3('buddha1', 'translate', -0.0, 0, -1.5)
si.SetProperty3('buddha1', 'rotate', 0, 30, 0)
si.SetProperty3('buddha1', 'scale', 0.6, 0.6, 0.6)
si.AssignShader('buddha1', 'DEFAULT_SHADING_GROUP', 'buddha_shader1')
si.NewObjectInstance('buddha2', 'buddha_mesh')
si.SetProperty3('buddha2', 'translate', -0.0, 0, -3.0)
si.SetProperty3('buddha2', 'rotate', 0, 60, 0)
si.SetProperty3('buddha2', 'scale', 0.6, 0.6, 0.6)
si.AssignShader('buddha2', 'DEFAULT_SHADING_GROUP', 'buddha_shader2')
si.NewObjectInstance('buddha3', 'buddha_mesh')
si.SetProperty3('buddha3', 'translate', -0.0, 0, -4.5)
si.SetProperty3('buddha3', 'rotate', 0, 90, 0)
si.SetProperty3('buddha3', 'scale', 0.6, 0.6, 0.6)
si.AssignShader('buddha3', 'DEFAULT_SHADING_GROUP', 'buddha_shader3')
si.NewObjectInstance('buddha4', 'buddha_mesh')
si.SetProperty3('buddha4', 'translate', -1.5, 0, -0.0)
si.SetProperty3('buddha4', 'rotate', 0, 120, 0)
si.SetProperty3('buddha4', 'scale', 0.6, 0.6, 0.6)
si.AssignShader('buddha4', 'DEFAULT_SHADING_GROUP', 'buddha_shader4')
si.NewObjectInstance('buddha5', 'buddha_mesh')
si.SetProperty3('buddha5', 'translate', -1.5, 0, -1.5)
si.SetProperty3('buddha5', 'rotate', 0, 150, 0)
si.SetProperty3('buddha5', 'scale', 0.6, 0.6, 0.6)
si.AssignShader('buddha5', 'DEFAULT_SHADING_GROUP', 'buddha_shader5')
si.NewObjectInstance('buddha6', 'buddha_mesh')
si.SetProperty3('buddha6', 'translate', -1.5, 0, -3.0)
si.SetProperty3('buddha6', 'rotate', 0, 180, 0)
si.SetProperty3('buddha6', 'scale', 0.6, 0.6, 0.6)
si.AssignShader('buddha6', 'DEFAULT_SHADING_GROUP', 'buddha_shader6')
si.NewObjectInstance('buddha7', 'buddha_mesh')
si.SetProperty3('buddha7', 'translate', -1.5, 0, -4.5)
si.SetProperty3('buddha7', 'rotate', 0, 210, 0)
si.SetProperty3('buddha7', 'scale', 0.6, 0.6, 0.6)
si.AssignShader('buddha7', 'DEFAULT_SHADING_GROUP', 'buddha_shader7')
si.NewObjectInstance('buddha8', 'buddha_mesh')
si.SetProperty3('buddha8', 'translate', -3.0, 0, -0.0)
si.SetProperty3('buddha8', 'rotate', 0, 240, 0)
si.SetProperty3('buddha8', 'scale', 0.6, 0.6, 0.6)
si.AssignShader('buddha8', 'DEFAULT_SHADING_GROUP', 'buddha_shader8')
si.NewObjectInstance('buddha9', 'buddha_mesh')
si.SetProperty3('buddha9', 'translate', -3.0, 0, -1.5)
si.SetProperty3('buddha9', 'rotate', 0, 270, 0)
si.SetProperty3('buddha9', 'scale', 0.6, 0.6, 0.6)
si.AssignShader('buddha9', 'DEFAULT_SHADING_GROUP', 'buddha_shader9')
si.NewObjectInstance('buddha10', 'buddha_mesh')
si.SetProperty3('buddha10', 'translate', -3.0, 0, -3.0)
si.SetProperty3('buddha10', 'rotate', 0, 300, 0)
si.SetProperty3('buddha10', 'scale', 0.6, 0.6, 0.6)
si.AssignShader('buddha10', 'DEFAULT_SHADING_GROUP', 'buddha_shader10')
si.NewObjectInstance('buddha11', 'buddha_mesh')
si.SetProperty3('buddha11', 'translate', -3.0, 0, -4.5)
si.SetProperty3('buddha11', 'rotate', 0, 330, 0)
si.SetProperty3('buddha11', 'scale', 0.6, 0.6, 0.6)
si.AssignShader('buddha11', 'DEFAULT_SHADING_GROUP', 'buddha_shader11')
si.NewObjectInstance('buddha12', 'buddha_mesh')
si.SetProperty3('buddha12', 'translate', -4.5, 0, -0.0)
si.SetProperty3('buddha12', 'rotate', 0, 360, 0)
si.SetProperty3('buddha12', 'scale', 0.6, 0.6, 0.6)
si.AssignShader('buddha12', 'DEFAULT_SHADING_GROUP', 'buddha_shader12')
si.NewObjectInstance('buddha13', 'buddha_mesh')
si.SetProperty3('buddha13', 'translate', -4.5, 0, -1.5)
si.SetProperty3('buddha13', 'rotate', 0, 390, 0)
si.SetProperty3('buddha13', 'scale', 0.6, 0.6, 0.6)
si.AssignShader('buddha13', 'DEFAULT_SHADING_GROUP', 'buddha_shader13')
si.NewObjectInstance('buddha14', 'buddha_mesh')
si.SetProperty3('buddha14', 'translate', -4.5, 0, -3.0)
si.SetProperty3('buddha14', 'rotate', 0, 420, 0)
si.SetProperty3('buddha14', 'scale', 0.6, 0.6, 0.6)
si.AssignShader('buddha14', 'DEFAULT_SHADING_GROUP', 'buddha_shader14')
si.NewObjectInstance('buddha15', 'buddha_mesh')
si.SetProperty3('buddha15', 'translate', -4.5, 0, -4.5)
si.SetProperty3('buddha15', 'rotate', 0, 450, 0)
si.SetProperty3('buddha15', 'scale', 0.6, 0.6, 0.6)
si.AssignShader('buddha15', 'DEFAULT_SHADING_GROUP', 'buddha_shader15')
si.NewObjectInstance('floor1', 'floor_mesh')
si.SetProperty3('floor1', 'translate', -2.0, 0, -2.0)
si.AssignShader('floor1', 'DEFAULT_SHADING_GROUP', 'floor_shader')
si.NewObjectInstance('dome1', 'dome_mesh')
si.SetProperty3('dome1', 'scale', 0.5, 0.5, 0.5)
si.AssignShader('dome1', 'DEFAULT_SHADING_GROUP', 'dome_shader')
si.AssignTexture('dome_shader', 'texture', 'tex1')
#ObjectGroup
si.NewObjectGroup('group1')
si.AddObjectToGroup('group1', 'buddha0')
si.AddObjectToGroup('group1', 'buddha1')
si.AddObjectToGroup('group1', 'buddha2')
si.AddObjectToGroup('group1', 'buddha3')
si.AddObjectToGroup('group1', 'buddha4')
si.AddObjectToGroup('group1', 'buddha5')
si.AddObjectToGroup('group1', 'buddha6')
si.AddObjectToGroup('group1', 'buddha7')
si.AddObjectToGroup('group1', 'buddha8')
si.AddObjectToGroup('group1', 'buddha9')
si.AddObjectToGroup('group1', 'buddha10')
si.AddObjectToGroup('group1', 'buddha11')
si.AddObjectToGroup('group1', 'buddha12')
si.AddObjectToGroup('group1', 'buddha13')
si.AddObjectToGroup('group1', 'buddha14')
si.AddObjectToGroup('group1', 'buddha15')
si.AssignObjectGroup('buddha0', 'shadow_target', 'group1')
si.AssignObjectGroup('buddha1', 'shadow_target', 'group1')
si.AssignObjectGroup('buddha2', 'shadow_target', 'group1')
si.AssignObjectGroup('buddha3', 'shadow_target', 'group1')
si.AssignObjectGroup('buddha4', 'shadow_target', 'group1')
si.AssignObjectGroup('buddha5', 'shadow_target', 'group1')
si.AssignObjectGroup('buddha6', 'shadow_target', 'group1')
si.AssignObjectGroup('buddha7', 'shadow_target', 'group1')
si.AssignObjectGroup('buddha8', 'shadow_target', 'group1')
si.AssignObjectGroup('buddha9', 'shadow_target', 'group1')
si.AssignObjectGroup('buddha10', 'shadow_target', 'group1')
si.AssignObjectGroup('buddha11', 'shadow_target', 'group1')
si.AssignObjectGroup('buddha12', 'shadow_target', 'group1')
si.AssignObjectGroup('buddha13', 'shadow_target', 'group1')
si.AssignObjectGroup('buddha14', 'shadow_target', 'group1')
si.AssignObjectGroup('buddha15', 'shadow_target', 'group1')
si.AssignObjectGroup('floor1', 'shadow_target', 'group1')
#FrameBuffer
si.NewFrameBuffer('fb1', 'rgba')
#Renderer
si.NewRenderer('ren1')
si.AssignCamera('ren1', 'cam1')
si.AssignFrameBuffer('ren1', 'fb1')
si.SetProperty2('ren1', 'resolution', 640, 480)
#si.SetProperty2('ren1', 'resolution', 160, 120)
#Rendering
si.RenderScene('ren1')
#Output
si.SaveFrameBuffer('fb1', '../happy_buddhas.fb')
#Run commands
si.Run()
#si.Print()
| [
"hiroshi@fujiyama-renderer.com"
] | hiroshi@fujiyama-renderer.com |
ac9fff064bb63cdba96a2db32fefd5b246561966 | a02fe5443c31df41c391738b5d9fce3d94fb21bc | /bot_ct/models/support.py | 657dcbd36f8c231862a127aebffd6dbe41b0584a | [] | no_license | Ryslan271/bot_ct | 34862a67717cc036a4caebe025dab334ab8bb6f3 | ba0fe8a4c1629fb31955ee701c8b96ffa9936a39 | refs/heads/master | 2023-03-14T01:45:46.409921 | 2021-03-06T18:22:49 | 2021-03-06T18:22:49 | 282,878,207 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,832 | py | from random import choice
import requests
from bs4 import BeautifulSoup as Bs
from models import db_session
from models.start import bot
from models.users import User, Game
from telebot import types
db_session.global_init('sqlite.db')
def answer_not(message):
bot.send_message(message.chat.id, 'Я не знаю что даже ответить на' + ' <b><i>' + message.text + '</i></b> ' + '\n'
'Напиши мне вопрос и ответ для него, чтобы я мог научиться так же говорить как '
'и людишки\n'
'Напиши вот так:\n'
'\n'
'<b>/Обучить "Тут вопрос"="тут ответ на этот вопрос"</b>',
parse_mode='html'
)
def Error(message):
sti = open('assets/hi2.tgs', 'rb')
bot.send_sticker(message.chat.id, sti)
bot.send_message(message.chat.id,
"Ошибка, может вы ввели не правильную команду\n"
'Напиши вот так:\n'
'\n'
'<b>/Обучить (Тут вопрос)=(тут ответ на этот вопрос)</b>',
parse_mode='html'
)
def start_hct(message):
markup = types.ReplyKeyboardMarkup(resize_keyboard=True)
item2 = types.KeyboardButton("/Telegram")
item3 = types.KeyboardButton("/vk")
item1 = types.KeyboardButton("/help")
markup.add(item1, item2, item3)
bot.send_message(message.chat.id,
" Чтобы начать общаться просто пиши и обучай этого бота :D",
reply_markup=markup
)
def game_dif(message):
session = db_session.create_session()
user_all = session.query(Game).all()
game_all = []
for all in user_all:
game_all.append(all.game)
print(game_all)
n = choice(game_all)
mar = types.InlineKeyboardMarkup()
mar.add(types.InlineKeyboardButton('игра', url=n))
try:
for all in user_all:
if all.game == n:
msg = all.answer_game.split('|')
print(msg)
if len(msg) == 1:
bot.send_message(message.chat.id,
"Поиграй в эту игру\n" + all.answer_game,
reply_markup=mar
)
break
else:
bot.send_message(message.chat.id, choice(msg), reply_markup=mar)
break
else:
bot.send_message(message.chat.id,
"Поиграй в эту игру и напиши сколько набрал *результат (тут счет)",
reply_markup=mar
)
break
except RuntimeError:
bot.send_message(message.chat.id, 'Ошибка')
back(message)
print('error/game_dif/1_try')
def weather_sup(message):
try:
f = False
text = message.text.replace('/погода ', '')
r = requests.get('https://sinoptik.ua/погода-' + text)
html = Bs(r.content, 'html.parser')
for all in html.select('#content'):
bot.send_message(message.chat.id, 'погода на 7 дней:')
for i in range(1, 8):
v = '#bd' + str(i)
for el in html.select(v):
day = el.select('.day-link')[0].text
date = el.select('.date')[0].text
month = el.select('.month')[0].text
t_min = el.select('.temperature .min')[0].text
t_max = el.select('.temperature .max')[0].text
bot.send_message(
message.chat.id,
" День недели : *" + str(day) + "*: \n Дата : *" + str(
date) + "* \n Месяц : *" + str(month) + "* \n Мин. температура : *" + str(
t_min) + "* \n Мах. температура : *" + str(t_max) + "* \n",
parse_mode="Markdown"
)
for el in html.select('#content'):
text = el.select('.description')[0].text
bot.send_message(message.chat.id, 'Сегодняшний день:'
'\n' + text)
f = True
if not f:
error_weather(message)
except BaseException:
print('error/weather_sup/1_try')
error_weather(message)
def error_weather(message):
text1 = message.text.replace('/погода ', '')
bot.send_message(message.chat.id, 'Ошибка в системе\n'
'Может нет такого города как' + ' ' + text1)
bot.send_message(message.chat.id, 'Вводите вот так:\n'
'\n'
'/погода (ваш город)')
| [
"noreply@github.com"
] | Ryslan271.noreply@github.com |
1ab320425a4b1a6568c0ae0d930d6c9f420e792d | 168f8546daf36bead1a9b8f32e8a43fdc5d844cf | /Test/python/multiply.py | cec03f2eaf740cf2f1ca1e9f23d4046fa9dd1500 | [] | no_license | whztt07/RenderFish | ea67915a672096254444765347044c6229681d05 | 7d0a4fd6a01a949091ec05ba93c42aa1760b9408 | refs/heads/master | 2020-05-04T14:47:51.215280 | 2015-11-22T16:42:31 | 2015-11-22T16:42:31 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 432 | py | '''py_class.py - Python source designed to demonstrate'''
'''the use of python embedding'''
class Multiply:
def __init__(self):
self.a = 6
self.b = 5
def multiply(self):
c = self.a*self.b
print 'The result of', self.a, 'x', self.b, ':', c
return c
def multiply2(self, a, b):
c = a*b
print 'The result of', a, 'x', b, ':', c
return c | [
"yushroom@gmail.com"
] | yushroom@gmail.com |
cf0d0270a063fc06e51ed538e2e450ff033cf88a | 236ecf08b3143c43e8999ad8490969258b036413 | /test/test_scripts/v_user.py | 2e871001fbd4d404311109609f6ee581e4492681 | [] | no_license | hulingfeng211/mywork | fb72d964f27070923bf99d53568353db1c5de3d5 | bf3a68ee8353912f112b729d54710d9eb981cc4d | refs/heads/master | 2021-01-21T04:26:25.068377 | 2016-08-02T05:38:26 | 2016-08-02T05:38:26 | 47,739,864 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,441 | py | # -*- coding:utf-8 -*-
import random
import time
import requests
def get_user_session(cookie=""):
"""
获取用户session数据,包括session id和xsrf数据,在请求中使用
"""
cookies={
}
cookie_str="msid=8dc48fd69f80446fa2304786d54053dd; _xsrf=2|0e2212d9|be3a7f3c45c84f0d31f411bd2e67e1d6|1457438251"
for cookie in cookie_str.split(';'):
item=cookie.strip().split('=')
cookies[item[0].strip()]=item[1]
if item[0].strip()=="_xsrf":
cookies['X-Xsrftoken']=item[1]
#login_url='http://localhost:10000/page/login'
#data={'username':user,"pwd":pwd}
#response=requests.post(login_url,data)
#print response.headers
#pass
return cookies
class Transaction(object):
def __init__(self):
self.custom_timers={}
def run(self):
url='http://localhost:10000/s/onlineuser'
cookies=get_user_session()
start=time.time()
res=requests.get(url,headers={
'content-type':'application/json',
'X-Xsrftoken':cookies['X-Xsrftoken']
},cookies=cookies)
end=time.time()
assert res.status_code==200
#print res.status_code
self.custom_timers['Example_Timer'] = end-start
if __name__ == '__main__':
trans = Transaction()
#for i in range(1,1000):
# print i
#time.sleep(1)
# trans.run()
trans.run()
print trans.custom_timers
| [
"hulingfeng211@163.com"
] | hulingfeng211@163.com |
eb3e1585341debf43f5683ae5d04f1b4cc7345dd | b9be3202a4db8875299d4a123b7c1e3c7d282eaf | /tensorflow/contrib/learn/python/learn/estimators/dnn.py | 63c262800864b7b2cbaf753220f58867cf376f3d | [
"Apache-2.0"
] | permissive | prafullasd/tensorflow | f25d0eb5997af2500d4bd2f7596d103d7028f048 | 2c55490c6d6d361985dbb0565ab08a648c819949 | refs/heads/master | 2021-01-18T13:31:25.223301 | 2016-06-03T20:27:32 | 2016-06-03T20:27:32 | 60,382,471 | 1 | 0 | null | 2016-06-03T22:58:26 | 2016-06-03T22:58:25 | null | UTF-8 | Python | false | false | 8,828 | py | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Deep Neural Network estimators."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib import layers
from tensorflow.contrib.learn.python.learn.estimators import _sklearn
from tensorflow.contrib.learn.python.learn.estimators import dnn_linear_combined
from tensorflow.contrib.learn.python.learn.estimators.base import DeprecatedMixin
from tensorflow.python.ops import nn
class DNNClassifier(dnn_linear_combined.DNNLinearCombinedClassifier):
"""A classifier for TensorFlow DNN models.
Example:
```
installed_app_id = sparse_column_with_hash_bucket("installed_id", 1e6)
impression_app_id = sparse_column_with_hash_bucket("impression_id", 1e6)
installed_emb = embedding_column(installed_app_id, dimension=16,
combiner="sum")
impression_emb = embedding_column(impression_app_id, dimension=16,
combiner="sum")
estimator = DNNClassifier(
feature_columns=[installed_emb, impression_emb],
hidden_units=[1024, 512, 256])
# Input builders
def input_fn_train: # returns X, Y
pass
estimator.fit(input_fn=input_fn_train)
def input_fn_eval: # returns X, Y
pass
estimator.evaluate(input_fn_eval)
estimator.predict(x)
```
Input of `fit`, `train`, and `evaluate` should have following features,
otherwise there will be a `KeyError`:
if `weight_column_name` is not `None`, a feature with
`key=weight_column_name` whose value is a `Tensor`.
for each `column` in `feature_columns`:
- if `column` is a `SparseColumn`, a feature with `key=column.name`
whose `value` is a `SparseTensor`.
- if `column` is a `RealValuedColumn, a feature with `key=column.name`
whose `value` is a `Tensor`.
- if `feauture_columns` is None, then `input` must contains only real
valued `Tensor`.
Parameters:
hidden_units: List of hidden units per layer. All layers are fully
connected. Ex. [64, 32] means first layer has 64 nodes and second one has
32.
feature_columns: An iterable containing all the feature columns used by the
model. All items in the set should be instances of classes derived from
`FeatureColumn`.
model_dir: Directory to save model parameters, graph and etc.
n_classes: number of target classes. Default is binary classification.
It must be greater than 1.
weight_column_name: A string defining feature column name representing
weights. It is used to down weight or boost examples during training. It
will be multiplied by the loss of the example.
optimizer: An instance of `tf.Optimizer` used to train the model. If `None`,
will use an Adagrad optimizer.
activation_fn: Activation function applied to each layer. If `None`, will
use `tf.nn.relu`.
dropout: When not None, the probability we will drop out a given coordinate.
"""
def __init__(self,
hidden_units,
feature_columns=None,
model_dir=None,
n_classes=2,
weight_column_name=None,
optimizer=None,
activation_fn=nn.relu,
dropout=None):
super(DNNClassifier, self).__init__(n_classes=n_classes,
weight_column_name=weight_column_name,
dnn_feature_columns=feature_columns,
dnn_optimizer=optimizer,
dnn_hidden_units=hidden_units,
dnn_activation_fn=activation_fn,
dnn_dropout=dropout)
def _get_train_ops(self, features, targets):
"""See base class."""
if self._dnn_feature_columns is None:
self._dnn_feature_columns = layers.infer_real_valued_columns(features)
return super(DNNClassifier, self)._get_train_ops(features, targets)
@property
def weights_(self):
return self.dnn_weights_
@property
def bias_(self):
return self.dnn_bias_
class DNNRegressor(dnn_linear_combined.DNNLinearCombinedRegressor):
"""A regressor for TensorFlow DNN models.
Example:
```
installed_app_id = sparse_column_with_hash_bucket("installed_id", 1e6)
impression_app_id = sparse_column_with_hash_bucket("impression_id", 1e6)
installed_emb = embedding_column(installed_app_id, dimension=16,
combiner="sum")
impression_emb = embedding_column(impression_app_id, dimension=16,
combiner="sum")
estimator = DNNRegressor(
feature_columns=[installed_emb, impression_emb],
hidden_units=[1024, 512, 256])
# Input builders
def input_fn_train: # returns X, Y
pass
estimator.fit(input_fn=input_fn_train)
def input_fn_eval: # returns X, Y
pass
estimator.evaluate(input_fn_eval)
estimator.predict(x)
```
Input of `fit`, `train`, and `evaluate` should have following features,
otherwise there will be a `KeyError`:
if `weight_column_name` is not `None`, a feature with
`key=weight_column_name` whose value is a `Tensor`.
for each `column` in `feature_columns`:
- if `column` is a `SparseColumn`, a feature with `key=column.name`
whose `value` is a `SparseTensor`.
- if `column` is a `RealValuedColumn, a feature with `key=column.name`
whose `value` is a `Tensor`.
- if `feauture_columns` is None, then `input` must contains only real
valued `Tensor`.
Parameters:
hidden_units: List of hidden units per layer. All layers are fully
connected. Ex. [64, 32] means first layer has 64 nodes and second one has
32.
feature_columns: An iterable containing all the feature columns used by the
model. All items in the set should be instances of classes derived from
`FeatureColumn`.
model_dir: Directory to save model parameters, graph and etc.
weight_column_name: A string defining feature column name representing
weights. It is used to down weight or boost examples during training. It
will be multiplied by the loss of the example.
optimizer: An instance of `tf.Optimizer` used to train the model. If `None`,
will use an Adagrad optimizer.
activation_fn: Activation function applied to each layer. If `None`, will
use `tf.nn.relu`.
dropout: When not None, the probability we will drop out a given coordinate.
"""
def __init__(self,
hidden_units,
feature_columns=None,
model_dir=None,
weight_column_name=None,
optimizer=None,
activation_fn=nn.relu,
dropout=None):
super(DNNRegressor, self).__init__(weight_column_name=weight_column_name,
dnn_feature_columns=feature_columns,
dnn_optimizer=optimizer,
dnn_hidden_units=hidden_units,
dnn_activation_fn=activation_fn,
dnn_dropout=dropout)
def _get_train_ops(self, features, targets):
"""See base class."""
if self._dnn_feature_columns is None:
self._dnn_feature_columns = layers.infer_real_valued_columns(features)
return super(DNNRegressor, self)._get_train_ops(features, targets)
@property
def weights_(self):
return self.dnn_weights_
@property
def bias_(self):
return self.dnn_bias_
# TensorFlowDNNClassifier and TensorFlowDNNRegressor are deprecated.
class TensorFlowDNNClassifier(DeprecatedMixin, DNNClassifier,
_sklearn.ClassifierMixin):
pass
class TensorFlowDNNRegressor(DeprecatedMixin, DNNRegressor,
_sklearn.RegressorMixin):
pass
| [
"gardener@tensorflow.org"
] | gardener@tensorflow.org |
a4c1c455cf5fb61154b9d3c2c35d0661314913f2 | 9b9a02657812ea0cb47db0ae411196f0e81c5152 | /repoData/sdiehl-sockjs-gevent/allPythonContent.py | 20f871a6a70bd7b62eafabef19118c5e95d31179 | [] | no_license | aCoffeeYin/pyreco | cb42db94a3a5fc134356c9a2a738a063d0898572 | 0ac6653219c2701c13c508c5c4fc9bc3437eea06 | refs/heads/master | 2020-12-14T14:10:05.763693 | 2016-06-27T05:15:15 | 2016-06-27T05:15:15 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 175,363 | py | __FILENAME__ = conf
# -*- coding: utf-8 -*-
#
# gevent-sockjs documentation build configuration file, created by
# sphinx-quickstart on Mon Mar 12 20:11:57 2012.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'gevent-sockjs'
copyright = u'2012, Stephen Diehl & John Debs'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = 'dev'
# The full version, including alpha/beta/rc tags.
release = 'dev'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'nature'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'gevent-sockjsdoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'gevent-sockjs.tex', u'gevent-sockjs Documentation',
u'Stephen Diehl \\& John Debs', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'gevent-sockjs', u'gevent-sockjs Documentation',
[u'Stephen Diehl & John Debs'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'gevent-sockjs', u'gevent-sockjs Documentation',
u'Stephen Diehl & John Debs', 'gevent-sockjs', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
########NEW FILE########
__FILENAME__ = devserver
"""
This module is most like what a user would define in their
application, namely the
- Routes
- Connection Handlers
The one's sketched here are the Echo, Disabled Websockets, and
the Close connection handlers which are used by the protocol test
suite.
"""
import gevent.monkey
# Monkey patching stdlib is not a necessity for all use cases
gevent.monkey.patch_all()
from server import SockJSServer
from router import SockJSRouter, SockJSConnection
# Need to moneky patch the threading module to use greenlets
import werkzeug.serving
class Echo(SockJSConnection):
def on_message(self, message):
self.send(message)
class DisabledWebsocket(SockJSConnection):
disallowed_transports = ('websocket',)
def on_message(self, message):
pass
class Close(SockJSConnection):
disallowed_transports = ()
def on_open(self, session):
self.close()
def on_message(self, message):
pass
router = SockJSRouter({
'echo': Echo,
'close': Close,
'disabled_websocket_echo': DisabledWebsocket,
})
@werkzeug.serving.run_with_reloader
def devel_server():
"""
A local server with code reload. Should only be used for
development.
"""
try:
sockjs = SockJSServer(('localhost',8081), router, trace=True)
sockjs.serve_forever()
except KeyboardInterrupt:
sockjs.kill()
if __name__ == '__main__':
devel_server()
########NEW FILE########
__FILENAME__ = errors
class InvalidJSON(Exception):
pass
class Http404(Exception):
def __init__(self, message=None):
if message:
self.message = message
else:
self.message = "404: Page Not Found"
assert isinstance(self.message, basestring)
def __str__(self):
return self.message
class Http405(Exception):
def __str__(self):
return '405: Method Not Allowed'
class Http500(Exception):
"""
Exception for catching exceptions, also has a slot for a
stack trace string.
"""
def __init__(self, stacktrace=None):
if stacktrace:
self.message = stacktrace
self.stacktrace = stacktrace
else:
self.message = "500: Internal Server Error"
self.stacktrace = None
assert isinstance(self.message, basestring)
def __str__(self):
return self.message
########NEW FILE########
__FILENAME__ = handler
import uuid
import sys
import re
import datetime
import time
import traceback
from Cookie import SimpleCookie
import gevent
from gevent.pywsgi import WSGIHandler
from geventwebsocket.handler import WebSocketHandler
import protocol
from errors import *
class SockJSHandler(WSGIHandler):
"""
Base request handler for all HTTP derivative transports, will
switch over to WSHandler in the case of using Websockets.
The primary purpose of this class it delegate raw response
from the server through the router and handle the low level
HTTP.
"""
# Dynamic URLs, urls serving data
DYNAMIC_FORMAT = re.compile(r"""
^/(?P<route>[^/]+)/ # sockjs route, alphanumeric not empty
(?P<server_id>[^/.]+)/ # load balancer id, alphanumeric not empty, without (.)
(?P<session_id>[^/.]+)/ # session id, alphanumeric not empty, without (.)
(?P<transport>[^/.]+)$ # transport string, (Example: xhr | jsonp ... )
""", re.X)
# Dynamic URLs, urls serving static pages
STATIC_FORMAT = re.compile(r"""
^/(?P<route>[^/]+)(/)? # sockjs route, alphanumeric not empty
(?P<suffix>[^/]+)?$ # url suffix ( Example: / , info, iframe.html )
""", re.X)
RAW_FORMAT = re.compile(r"""
^/(?P<route>[^/]+)/ # sockjs route, alphanumeric not empty
websocket$ # url suffix ( Example: / , info, iframe.html )
""", re.X)
def prep_response(self):
"""
Prepare the default headers.
Calling this will overload any existing headers.
"""
self.time_start = time.time()
self.status = None
self.headers = []
self.headers_sent = False
self.result = None
self.response_use_chunked = False
self.response_length = 0
def raw_headers(self):
"""
Return the available headers as a string, used for low
level socket handeling.
"""
head = []
# Protocol, status line
head.append('%s %s\r\n' % (self.request_version, self.status))
for header in self.response_headers:
head.append('%s: %s\r\n' % header)
head.append('\r\n')
return ''.join(head)
def raw_chunk(self, data):
"""
Return a raw HTTP chunk, hex encoded size.
"""
return "%x\r\n%s\r\n" % (len(data), data)
# Raw write actions
# -----------------
def write_text(self, text):
self.content_type = ("Content-Type", "text/plain; charset=UTF-8")
self.headers += [self.content_type]
self.start_response("200 OK", self.headers)
self.result = [text]
self.process_result()
def write_js(self, text):
self.content_type = ("Content-Type",
"application/javascript; charset=UTF-8")
self.headers += [self.content_type]
self.start_response("200 OK", self.headers)
self.result = [text]
self.process_result()
def write_json(self, json):
self.content_type = ("Content-Type", "application/json; charset=UTF-8")
self.headers += [self.content_type]
self.start_response("200 OK", self.headers)
self.result = [protocol.encode(json)]
self.log_request()
self.process_result()
def write_html(self, html):
content_type = ("Content-Type", "text/html; charset=UTF-8")
self.headers += [content_type]
self.start_response("200 OK", self.headers)
self.result = [html]
self.process_result()
def write_options(self, allowed_methods):
self.headers += [
('Access-Control-Allow-Methods',(', '.join(allowed_methods)))
]
self.enable_caching()
self.enable_cookie()
self.enable_cors()
self.write_nothing()
def write_nothing(self):
self.start_response("204 NO CONTENT", self.headers)
self.result = [None]
self.log_request()
self.process_result()
def greeting(self):
self.write_text('Welcome to SockJS!\n')
def do404(self, message=None, cookie=False):
"""
Do a 404 NOT FOUND, allow for custom messages and the
optional ability to return a cookie on the page.
"""
self.prep_response()
self.content_type = ("Content-Type", "text/plain; charset=UTF-8")
self.headers += [self.content_type]
if cookie:
self.enable_cookie()
self.start_response("404 NOT FOUND", self.headers)
if message:
self.result = [message]
else:
self.result = ['404 Error: Page not found']
self.process_result()
self.wsgi_input._discard()
self.time_finish = time.time()
self.log_request()
def do500(self, stacktrace=None, message=None):
"""
Handle 500 errors, if we're in an exception context then
print the stack trace is SockJSServer has trace=True.
"""
self.prep_response()
if self.server.trace and not message:
# If we get an explicit stack trace use that,
# otherwise grab it from the current frame.
if stacktrace:
pretty_trace = stacktrace
else:
exc_type, exc_value, exc_tb = sys.exc_info()
stack_trace = traceback.format_exception(exc_type, exc_value, exc_tb)
pretty_trace = str('\n'.join(stack_trace))
self.start_response("500 INTERNAL SERVER ERROR", self.headers)
self.result = [pretty_trace]
else:
self.content_type = ("Content-Type", "text/plain; charset=UTF-8")
self.headers += [self.content_type]
self.start_response("500 INTERNAL SERVER ERROR", self.headers)
self.result = [message or '500: Interneal Server Error']
self.process_result()
self.time_finish = time.time()
self.log_request()
# Header Manipulation
# -------------------
def enable_cors(self):
origin = self.environ.get("HTTP_ORIGIN", '*')
self.headers += [
('access-control-allow-origin', origin),
('access-control-allow-credentials', 'true')
]
def enable_nocache(self):
self.headers += [
('Cache-Control', 'no-store, no-cache, must-revalidate, max-age=0'),
]
def enable_cookie(self, cookies=None):
"""
Given a list of cookies, add them to the header.
If not then add a dummy JSESSIONID cookie.
"""
if self.environ.get('HTTP_COOKIE'):
cookies = [SimpleCookie(self.environ.get('HTTP_COOKIE'))]
if cookies:
for cookie in cookies:
for morsel in cookie.values():
morsel['path'] = '/'
# TODO: fixme
k, v = cookie.output().split(':')[0:2]
self.headers += [(k,v)]
else:
cookie = SimpleCookie()
cookie['JSESSIONID'] = 'dummy'
cookie['JSESSIONID']['path'] = '/'
k, v = cookie.output().split(':')
self.headers += [(k,v)]
def enable_caching(self):
d = datetime.datetime.now() + datetime.timedelta(days=365)
s = datetime.timedelta(days=365).total_seconds()
self.headers += [
('Cache-Control', 'max-age=%d, public' % s),
('Expires', d.strftime('%a, %d %b %Y %H:%M:%S')),
('access-control-max-age', int(s)),
]
def handle_websocket(self, tokens, raw=False):
handle = WSHandler(
self.socket,
self.client_address,
self.server,
self.rfile,
)
handle.tokens = tokens
handle.raw = raw
handle.__dict__.update(self.__dict__)
return handle.handle_one_response()
def handle_one_response(self):
path = self.environ.get('PATH_INFO')
meth = self.environ.get("REQUEST_METHOD")
self.router = self.server.application
self.session_pool = self.server.session_pool
# Static URLs
# -----------
static_url = self.STATIC_FORMAT.match(path)
dynamic_url = self.DYNAMIC_FORMAT.match(path)
raw_url = self.RAW_FORMAT.match(path)
# The degenerate raw websocket endpoint
if raw_url:
tokens = raw_url.groupdict()
tokens['transport'] = 'rawwebsocket'
# An ad-hoc session
tokens['session'] = uuid.uuid4()
return self.handle_websocket(tokens, raw=True)
elif static_url:
tokens = static_url.groupdict()
route = tokens['route']
suffix = tokens['suffix']
try:
static_serve = self.router.route_static(route, suffix)
raw_request_data = self.wsgi_input.readline()
self.wsgi_input._discard()
self.prep_response()
static_serve(self, meth, raw_request_data)
except Http404 as e:
return self.do404(e.message)
except Http500 as e:
return self.do500(e.stacktrace)
elif dynamic_url:
tokens = dynamic_url.groupdict()
route = tokens['route']
session_uid = tokens['session_id']
server = tokens['server_id']
transport = tokens['transport']
if transport == 'websocket':
return self.handle_websocket(tokens)
try:
# Router determines the downlink route as a
# function of the given url parameters.
downlink = self.router.route_dynamic(
route,
session_uid,
server,
transport
)
# A downlink is some data-dependent connection
# to the client taken as a result of a request.
raw_request_data = self.wsgi_input.readline()
self.prep_response()
threads = downlink(self, meth, raw_request_data)
gevent.joinall(threads)
except Http404 as e:
return self.do404(e.message, cookie=True)
except Http500 as e:
return self.do500(e.stacktrace)
except Exception:
return self.do500()
else:
self.do404()
class WSHandler(WebSocketHandler):
"""
A WSGI-esque handler but the underlying connection is a
websocket instead of a HTTP.
The base SockJS handler will delegate to this in the case of
using any websocket transport, it will then upgrade to the
websocket and throw away any existing HTTP information.
"""
def prep_response(self):
"""
Prepare the default headers.
Calling this will overload any existing headers.
"""
self.time_start = time.time()
self.status = None
self.headers = []
self.headers_sent = False
self.result = None
self.response_use_chunked = False
self.response_length = 0
def bad_request(self):
"""
Sent if we have invaild Connection headers.
"""
self.prep_response()
self.start_response('400 BAD REQUEST', [
("Content-Type", "text/plain; charset=UTF-8")
])
self.result = ['Can "Upgrade" only to "WebSocket".']
self.process_result()
def not_allowed(self):
self.prep_response()
self.start_response('405 NOT ALLOWED', [('allow', True)])
self.result = []
self.process_result()
def handle_one_response(self):
self.pre_start()
environ = self.environ
upgrade = environ.get('HTTP_UPGRADE', '').lower()
meth = self.environ.get('REQUEST_METHOD')
if meth != 'GET':
return self.not_allowed()
# Upgrade the connect if we have the proper headers
if upgrade == 'websocket':
connection = environ.get('HTTP_CONNECTION', '').lower()
if 'upgrade' in connection:
return self._handle_websocket()
# Malformed request
self.bad_request()
def _handle_websocket(self):
"""
Slightly overloaded version of gevent websocket handler,
delegates the connection to the right protocol and then
procedes to invoke the router to figure out what to do.
"""
environ = self.environ
try:
try:
if environ.get("HTTP_SEC_WEBSOCKET_VERSION"):
result = self._handle_hybi()
elif environ.get("HTTP_ORIGIN"):
result = self._handle_hixie()
except:
self.close_connection = True
raise
self.result = []
if not result:
return
self.route(environ, None)
return []
finally:
self.log_request()
def route(self, environ, start_response):
"""
Route the websocket pipe to its transport handler. Logic
is more or less identical to HTTP logic instead of
exposing the WSGI handler we expose the socket.
"""
self.router = self.server.application
websocket = environ.get('wsgi.websocket')
meth = environ.get("REQUEST_METHOD")
# The only mandatory url token
route = self.tokens['route']
session_uid = self.tokens.get('session_id', None)
server = self.tokens.get('server_id', None)
transport = self.tokens.get('transport', None)
# We're no longer dealing with HTTP so throw away
# anything we received.
self.wsgi_input._discard()
downlink = self.router.route_dynamic(
route,
session_uid,
server,
transport
)
#downlink.raw = self.raw
threads = downlink(websocket, None, None)
# This is a neat trick ( due to Jeffrey Gellens ), of
# keeping track of the transporst threads at the handler
# level, this ensures that if this thread is forcefully
# terminated the transports actions will subsequently
# die.
gevent.joinall(threads)
########NEW FILE########
__FILENAME__ = protocol
import hashlib
from errors import *
from simplejson.decoder import JSONDecodeError
# -----------
# Serializer
# -----------
# Fastest
# TODO:
# Should add some caveats about the unicode compatability
# with ujson...
try:
import ujson
has_ujson = True
except ImportError:
has_ujson = False
# Faster
try:
import simplejson
has_simplejson = True
except ImportError:
has_simplejson = False
# Slowest
try:
import json
has_json = True
except ImportError:
# should never happen
has_json = False
def pick_serializer():
if has_ujson:
return ujson
elif has_simplejson:
return simplejson
elif has_json:
return json
json = pick_serializer()
# Frames
# ------
OPEN = "o\n"
CLOSE = "c"
MESSAGE = "a"
HEARTBEAT = "h\n"
# ------------------
IFRAME_HTML = """
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="X-UA-Compatible" content="IE=edge" />
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8" />
<script>
document.domain = document.domain;
_sockjs_onload = function(){SockJS.bootstrap_iframe();};
</script>
<script src="%s"></script>
</head>
<body>
<h2>Don't panic!</h2>
<p>This is a SockJS hidden iframe. It's used for cross domain magic.</p>
</body>
</html>
""".strip()
IFRAME_MD5 = hashlib.md5(IFRAME_HTML).hexdigest()
HTMLFILE_IFRAME_HTML = r"""
<!doctype html>
<html><head>
<meta http-equiv="X-UA-Compatible" content="IE=edge" />
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8" />
</head><body><h2>Don't panic!</h2>
<script>
document.domain = document.domain;
var c = parent.%s;
c.start();
function p(d) {c.message(d);};
window.onload = function() {c.stop();};
</script>
""".strip()
def encode(message):
"""
Python to JSON
"""
# TODO: actually deal with the nuances of escaping and
# unicode
if isinstance(message, basestring):
# Don't both calling json, since its simple
msg = '["' + message + '"]'
elif isinstance(message, (object, dict, list)):
msg = json.dumps(message, separators=(',',':'))
else:
raise ValueError("Unable to serialize: %s", str(message))
return msg
def decode(data):
"""
JSON to Python
"""
messages = []
data = data.decode('utf-8')
# "a['123', 'abc']" -> [123, 'abc']
try:
messages = json.loads(data)
except JSONDecodeError:
raise InvalidJSON()
return messages
def close_frame(code, reason, newline=True):
if newline:
return '%s[%d,"%s"]\n' % (CLOSE, code, reason)
else:
return '%s[%d,"%s"]' % (CLOSE, code, reason)
def message_frame(data):
assert isinstance(data, basestring)
assert '[' in data
assert ']' in data
return ''.join([MESSAGE, data])
def enum(*sequential, **named):
enums = dict(zip(sequential, range(len(sequential))), **named)
return type('Enum', (), enums)
FRAMES = enum( 'CLOSE', 'OPEN', 'MESSAGE', 'HEARTBEAT' )
########NEW FILE########
__FILENAME__ = router
import re
import transports
import static
from errors import *
# Route Tables
# ============
class RegexRouter(object):
"""
A hybrid hash table, regex matching table.
Tries to do O(1) hash lookup falls back on
worst case O(n) regex matching.
"""
_re = []
_dct = {}
def __init__(self, dct):
for k, v in dct.iteritems():
try:
self._re.append((re.compile(k),v))
except:
pass
self._dct[k] = v
def __getitem__(self, k):
if self._dct.has_key(k):
return self._dct[k]
else:
for r, v in self._re:
if r.match(k):
return v
raise KeyError(k)
static_routes = RegexRouter({
None : static.Greeting,
'info' : static.InfoHandler,
r'iframe[0-9-.a-z_]*.html' : static.IFrameHandler,
})
dynamic_routes = {
# Ajax Tranports
# ==============
'xhr' : transports.XHRPolling,
'xhr_send' : transports.XHRSend,
'xhr_streaming' : transports.XHRStreaming,
'jsonp' : transports.JSONPolling,
'jsonp_send' : transports.JSONPSend,
# WebSockets
# ===============
'websocket' : transports.WebSocket,
'rawwebsocket' : transports.RawWebSocket,
# File Transports
# ===============
'eventsource' : transports.EventSource,
'htmlfile' : transports.HTMLFile,
'iframe' : transports.IFrame,
}
class SockJSConnection(object):
disallowed_transports = tuple()
def __init__(self, session):
self.session = session
@classmethod
def transport_allowed(cls, transport):
return transport not in cls.disallowed_transports
# Event Callbacks
# ===============
def on_open(self, request):
pass
def on_message(self, message):
raise NotImplementedError()
def on_close(self):
pass
def on_error(self, exception):
raise NotImplementedError()
# Server side actions
# ===================
def send(self, message):
if self.session:
self.session.add_message(message)
else:
raise Exception("Tried to send message over closed session")
def broadcast(self, channel, message):
raise NotImplementedError()
def close(self):
if self.session:
self.session.interrupt()
else:
raise Exception("Tried to close closed session")
class SockJSRouter(object):
routes = {}
def __init__(self, applications):
"""
Set up the routing table for the specific routes attached
to this server.
"""
for route, connection in applications.iteritems():
self.routes[route] = connection
def route_static(self, route, suffix):
try:
route_handle = self.routes[route]
except:
raise Http404('No such route')
try:
handle_cls = static_routes[suffix]
except KeyError:
raise Http404('No such static page ' + str(suffix))
return handle_cls(route_handle)
def route_dynamic(self, route, session_uid, server, transport):
"""
Return the downlink transport to the client resulting
from request.
"""
try:
conn_cls = self.routes[route]
except:
raise Http500('No such route')
try:
transport_cls = dynamic_routes[transport]
except:
raise Http500('No such transport')
if transport_cls.direction == 'send':
create_if_null = False
elif transport_cls.direction in ('recv', 'bi'):
create_if_null = True
else:
raise Exception('Could not determine direction')
session = self.server.get_session(session_uid, \
create_if_null)
if not session:
raise Http404()
# Initialize the transport and call, any side-effectful
# code is the __init__ method, the communication is
# invoked by __call__ method.
conn = conn_cls(session)
downlink = transport_cls(session, conn)
if session.is_new:
conn.on_open(session)
session.timeout.rawlink(lambda g: conn.on_close())
return downlink
def __call__(self, environ, start_response):
raise NotImplemented()
########NEW FILE########
__FILENAME__ = server
import session
from handler import SockJSHandler
from sessionpool import SessionPool
from gevent.pywsgi import WSGIServer
class SockJSServer(WSGIServer):
"""
The base SockJS server, subclasses gevent.pywsgi.WSGIServer
"""
session_backend = session.MemorySession
handler_class = SockJSHandler
def __init__(self, *args, **kwargs):
"""
Initialize the SockJS server
Options:
listener : ( address, port )
application : The SockJS router instance
trace : Show stack traces on 500 status code
Example::
sockjs = SockJSServer(('',8081), router)
sockjs.serve_forever()
"""
self.trace = kwargs.pop('trace', False)
super(SockJSServer, self).__init__(*args, **kwargs)
self.session_pool = SessionPool()
self.session_pool.start_gc()
# hack to get the server inside the router
self.application.server = self
def del_session(self, uid):
del self.sessions[uid]
def get_session(self, session_id='', create_if_null=False):
"""
Return an existing or initialize a new session with the
session id passed.
"""
# Is it an existing session?
session = self.session_pool.get(session_id)
# Otherwise let the client choose their session_id, if
# this transport direction allows
if create_if_null and session is None:
session = self.session_backend(self, session_id=session_id)
self.session_pool.add(session)
elif session:
session.incr_hits()
return session
def kill(self):
"""
Shutdown the server, block to inform the sessions that
they are closing.
"""
self.session_pool.shutdown()
super(SockJSServer, self).kill()
########NEW FILE########
__FILENAME__ = session
import uuid
from gevent.queue import Queue, Empty
from gevent.event import Event
from datetime import datetime, timedelta
class Session(object):
"""
Base class for Session objects. Provides for different
backends for queueing messages for sessions.
Subclasses are expected to overload the add_message and
get_messages to reflect their storage system.
"""
# Session's timeout after 5 seconds
expires = timedelta(seconds=5)
def __init__(self, server, session_id=None):
self.expires_at = datetime.now() + self.expires
self.expired = False
self.forever = False
self.session_id = self.generate_uid()
# Whether this was closed explictly by client vs
# internally by garbage collection.
self.interrupted = False
# When a polling request is closed by a network error - not by
# server, the session should be automatically closed. When there
# is a network error - we're in an undefined state. Some messages
# may have been lost, there is not much we can do about it.
self.network_error = False
# Async event, use rawlink to string callbacks
self.timeout = Event()
self.locked = Event()
def generate_uid(self):
"""
Returns a string of the unique identifier of the session.
"""
return str(uuid.uuid4())
def persist(self, extension=None, forever=False):
"""
Bump the time to live of the session by a given amount,
or forever.
"""
self.expired = False
if forever:
self.forever = True
return
# Slide the expiration time one more expiration interval
# into the future
if extension is None:
self.expires_at = datetime.now() + self.expires
else:
self.expires_at = datetime.now() + extension
self.forever = False
def post_delete(self):
pass
def kill(self):
self.killed = True
self.expire()
def expire(self):
"""
Manually expire a session.
"""
self.expired = True
self.forever = False
def incr_hits(self):
self.hits += 1
def is_new(self):
return self.hits == 0
def heartbeat(self):
self.persist()
self.heartbeats += 1
return self.heartbeats
def add_message(self, msg):
raise NotImplemented()
def get_messages(self, **kwargs):
raise NotImplemented()
def is_locked(self):
return self.locked.is_set()
def is_network_error(self):
return self.network_error
def is_expired(self):
return self.expired
def is_interrupted(self):
return self.interrupted
def lock(self):
self.locked.set()
def unlock(self):
self.locked.clear()
def __str__(self):
pass
class MemorySession(Session):
"""
In memory session with a outgoing gevent Queue as the message
store.
"""
def __init__(self, server, session_id=None):
super(MemorySession, self).__init__(server, session_id=session_id)
self.session_id = session_id or str(uuid.uuid4())[:8]
self.server = server
self.queue = Queue()
self.hits = 0
self.heartbeats = 0
self.connected = False
def add_message(self, msg):
self.queue.put_nowait(msg)
def get_messages(self, **kwargs):
timeout = kwargs.get('timeout', None)
self.incr_hits()
if self.queue.empty():
try:
return self.queue.get(**kwargs)
except Empty:
return []
else:
accum = []
try:
while not self.queue.empty():
if timeout:
accum.append(self.queue.get(timeout=timeout))
else:
accum.append(self.queue.get_nowait())
finally:
return accum
def interrupt(self):
"""
A kill event trigged through a client accessible endpoint
Internal expires will not have is_interupted() == True
"""
self.interrupted = True
self.kill()
def kill(self):
self.connected = False
# Expire only once
if not self.expired:
self.expired = True
self.timeout.set()
########NEW FILE########
__FILENAME__ = sessionpool
import uuid
import gevent
from heapq import heappush, heappop
from datetime import datetime
class SessionPool(object):
"""
A garbage collected Session Pool.
See: https://github.com/sdiehl/greengoop
"""
gc_cycle = 10.0
def __init__(self):
self.sessions = dict()
self.pool = []
self.gcthread = gevent.Greenlet(self._gc_sessions)
def __str__(self):
return str(self.sessions.items())
def start_gc(self):
"""
Start the session pool garbage collector. This is broken
out into a seperate function to give you more granular
control on the context this thread is spawned in.
"""
if not self.gcthread.started:
self.gcthread.start()
return self.gcthread
else:
print "Rejected attempt to start multiple garbage \
collectors on SessionPool instance."
def _gc_sessions(self):
while True:
gevent.sleep(self.gc_cycle)
self.gc()
def add(self, session):
session.cycle = None
self.sessions[session.session_id] = session
if not session.expired:
heappush(self.pool, session)
def get(self, session_id):
"""
Get active sessions by their session id.
"""
return self.sessions.get(session_id, None)
def remove(self, session_id):
session = self.sessions.get(session_id, None)
if session:
session.post_delete()
del self.sessions[session_id]
def shutdown(self):
"""
Manually expire all sessions in the pool.
"""
while self.pool:
head = heappop(self.pool)
head.expired = True
head.timeout.set()
def __del__(self):
"""
On Python interpreter garbage collection expire all sessions, not
guaranteed to run!
"""
self.shutdown()
def gc(self):
"""
Rearrange the heap flagging active sessions with the id
of this collection iteration. This data-structure is
time-independent so we sessions can be added to and from
without the need to lock the pool.
"""
if len(self.pool) == 0:
return
current_time = datetime.now()
while self.pool:
head = self.pool[0]
# Every session is fresh
if head.cycle == current_time or head.expires_at > current_time:
break
head = heappop(self.pool)
# Flag the session with the id of this GC cycle
head.cycle = current_time
# Session is to be GC'd immedietely
if head.expired:
del self.sessions[head.session_id]
head.post_delete()
continue
if not head.forever and head.expires_at < current_time:
del self.sessions[head.session_id]
head.post_delete()
else:
heappush(self.pool, head)
########NEW FILE########
__FILENAME__ = static
import random
import protocol
from errors import *
class Greeting():
def __init__(self, conn_cls):
self.conn_cls = conn_cls
def __call__(self, handler, request_method, raw_request_data):
handler.greeting()
class InfoHandler():
def __init__(self, conn_cls):
self.conn_cls = conn_cls
def __call__(self, handler, request_method, raw_request_data):
if request_method == 'GET':
entropy = random.randint(1, 2**32)
has_ws = self.conn_cls.transport_allowed('websocket')
handler.enable_nocache()
handler.enable_cors()
handler.write_json({
'cookie_needed' : True,
'websocket' : has_ws,
'origins' : ['*:*'],
'entropy' : entropy,
'route' : self.conn_cls.__name__
})
elif request_method == 'OPTIONS':
handler.write_options(['OPTIONS','GET'])
class IFrameHandler():
def __init__(self, route):
self.route = route
def __call__(self, handler, request_method, raw_request_data):
if request_method != 'GET':
raise Http405()
cached = handler.environ.get('HTTP_IF_NONE_MATCH')
# TODO: check this is equal to our MD5
if cached:
handler.start_response("304 NOT MODIFIED", handler.headers)
handler.enable_caching()
handler.result = [None]
handler.process_result()
return
handler.headers += [
('ETag', protocol.IFRAME_MD5),
]
# TODO: actually put this in here
html = protocol.IFRAME_HTML % ('http',)
handler.enable_caching()
handler.write_html(html)
########NEW FILE########
__FILENAME__ = transports
import socket
import gevent
import urllib2
import urlparse
import simplejson as json
from socket import error as socketerror
import protocol
from errors import *
from geventwebsocket.websocket import WebSocketError
class BaseTransport(object):
def __init__(self, session, conn):
self.session = session
self.conn = conn
def encode(self, data):
"""
Wrapper around the protocol's frame encoding.
"""
return protocol.encode(data)
def decode(self, data):
"""
Wrapper around the protocol's frame decoding.
"""
return protocol.decode(data)
def write_frame(self, data):
"""
Write the data in a frame specifically for this
transport. Deals with the edge cases of formatting the
messages for the transports. Things like \n characters
and Javascript callback frames.
"""
raise NotImplemented()
def __call__(self, handler, request_method, raw_request_data):
"""
Downlink function, action taken as a result of the
specified route.
"""
raise NotImplemented()
# Receiving Transports
# ====================
#
# Recieve messages from the client, provide them to the session
# object and its callbacks, provide confirmation of any actions
# taken per protocol.
class XHRSend(BaseTransport):
direction = 'send'
def __call__(self, handler, request_method, raw_request_data):
if request_method == 'OPTIONS':
handler.write_options(['OPTIONS', 'POST'])
return []
if raw_request_data == '':
handler.do500(message='Payload expected.')
return
try:
messages = self.decode(raw_request_data)
except InvalidJSON:
handler.do500(message='Broken JSON encoding.')
return
for msg in messages:
self.conn.on_message(msg)
handler.content_type = ("Content-Type", "text/plain; charset=UTF-8")
handler.headers = [handler.content_type]
handler.enable_cookie()
handler.enable_cors()
handler.write_nothing()
return []
class JSONPSend(BaseTransport):
direction = 'recv'
def __call__(self, handler, request_method, raw_request_data):
if request_method == 'OPTIONS':
handler.write_options(['OPTIONS', 'POST'])
return []
qs = urlparse.parse_qs(raw_request_data)
using_formdata = True
# Do we have a Payload?
try:
if qs.has_key('d'):
using_formdata = True
payload = qs['d']
else:
using_formdata = False
payload = raw_request_data
# todo: more granular exception catching
except Exception as e:
handler.do500(message='Payload expected.')
return
# Confirm that this at least looks like a JSON array
if not using_formdata:
if not ('[' in payload and ']' in payload):
handler.do500(message='Payload expected.')
return
try:
if using_formdata:
messages = self.decode(payload[0])
else:
messages = self.decode(payload)
except InvalidJSON:
handler.do500(message='Broken JSON encoding.')
for msg in messages:
self.conn.on_message(msg)
handler.content_type = ("Content-Type", "text/plain; charset=UTF-8")
handler.enable_cookie()
handler.enable_nocache()
handler.write_text('ok')
return []
class PollingTransport(BaseTransport):
"""
Long polling derivative transports, used for XHRPolling and
JSONPolling.
Subclasses overload the write_frame method for their
respective serialization methods.
"""
direction = 'recv'
TIMING = 5.0
def poll(self, handler):
"""
Spin lock the thread until we have a message on the
gevent queue.
"""
messages = self.session.get_messages(timeout=self.TIMING)
messages = self.encode(messages)
self.session.unlock()
handler.start_response("200 OK", [
("Access-Control-Allow-Origin", "*"),
("Connection", "close"),
self.content_type,
])
handler.write_text(self.write_frame(messages))
def __call__(self, handler, request_method, raw_request_data):
"""
On the first poll, send back the open frame, one
subsequent calls actually poll the queue.
"""
if request_method == 'OPTIONS':
handler.write_options(['OPTIONS', 'POST'])
return []
if self.session.is_new():
handler.enable_cookie()
handler.enable_cors()
handler.write_js(protocol.OPEN)
return []
elif self.session.is_network_error():
interrupt_error = protocol.close_frame(1002, "Connection interrupted")
handler.write_text(interrupt_error)
return []
elif self.session.is_expired():
close_error = protocol.close_frame(3000, "Go away!")
handler.write_text(close_error)
return []
elif self.session.is_locked():
lock_error = protocol.close_frame(2010, "Another connection still open")
self.session.network_error = True
handler.write_text(lock_error)
return []
else:
self.session.lock()
return [gevent.spawn(self.poll, handler)]
def write_frame(self, data):
raise NotImplemented()
# Polling Transports
# ==================
#
# Poll for new messages on the server.
class XHRPolling(PollingTransport):
direction = 'recv'
TIMING = 2
content_type = ("Content-Type", "text/html; charset=UTF-8")
def write_frame(self, data):
return protocol.message_frame(data) + '\n'
class JSONPolling(PollingTransport):
direction = 'recv'
content_type = ("Content-Type", "text/plain; charset=UTF-8")
def write_frame(self, data):
frame = protocol.json.dumps(protocol.message_frame(data))
return """%s(%s);\r\n""" % ( self.callback, frame)
def __call__(self, handler, request_method, raw_request_data):
try:
callback_param = handler.environ.get("QUERY_STRING").split('=')[1]
self.callback = urllib2.unquote(callback_param)
except IndexError:
handler.do500(message='"callback" parameter required')
return
if request_method == 'OPTIONS':
handler.write_options(['OPTIONS', 'POST'])
return []
if self.session.is_new():
handler.enable_nocache()
handler.enable_cookie()
handler.enable_cors()
open_frame = '%s("o");\r\n' % self.callback
handler.write_js(open_frame)
return []
elif self.session.is_expired():
close_error = protocol.close_frame(3000, "Go away!")
handler.write_text(close_error)
return []
elif self.session.is_locked():
lock_error = protocol.close_frame(2010, "Another connection still open")
handler.write_text(lock_error)
return []
else:
self.session.lock()
return [gevent.spawn(self.poll, handler)]
class XHRStreaming(PollingTransport):
direction = 'recv'
TIMING = 2
# THIS NUMBER MAY NOT BE RIGHT. DEEP MAGIC.
response_limit = 4224
prelude = 'h' * 2048 + '\n'
content_type = ("Content-Type", "application/javascript; charset=UTF-8")
def write_prelude(self, handler):
handler.enable_cookie()
handler.enable_cors()
# https://groups.google.com/forum/#!msg/sockjs/bl3af2zqc0A/w-o3OK3LKi8J
if handler.request_version == 'HTTP/1.1':
handler.headers += [
self.content_type,
("Transfer-Encoding", "chunked"),
('Connection', 'keep-alive'),
]
elif handler.request_version == 'HTTP/1.0':
handler.headers += [
self.content_type,
('Connection', 'close'),
]
# Use very low level api here, since we want more granular
# control over our response
handler.start_response("200 OK", handler.headers)
headers = handler.raw_headers()
try:
writer = handler.socket.makefile()
written = 0
writer.write(headers)
writer.flush()
prelude_chunk = handler.raw_chunk(self.prelude)
writer.write(prelude_chunk)
writer.flush()
except socket.error:
self.session.expire()
return (writer, written)
def stream(self, handler):
writer, written = self.write_prelude(handler)
try:
open_chunk = handler.raw_chunk('o\n')
writer.write(open_chunk)
writer.flush()
while written < self.response_limit:
messages = self.session.get_messages(timeout=self.TIMING)
messages = self.encode(messages)
frame = protocol.message_frame(messages) + '\n'
chunk = handler.raw_chunk(frame)
writer.write(chunk)
writer.flush()
written += len(chunk)
except socket.error:
self.session.expire()
zero_chunk = handler.raw_chunk('')
writer.write(zero_chunk)
self.session.unlock()
def __call__(self, handler, request_method, raw_request_data):
"""
"""
if request_method == 'OPTIONS':
handler.write_options(['OPTIONS', 'POST'])
return []
elif self.session.is_network_error():
writer, written = self.write_prelude(handler)
try:
interrupt_error = protocol.close_frame(1002, "Connection interrupted")
interrupt_error_chunk = handler.raw_chunk(interrupt_error)
writer.write(interrupt_error_chunk)
writer.flush()
except socket.error:
self.session.expire()
zero_chunk = handler.raw_chunk('')
writer.write(zero_chunk)
self.session.network_error = True
return []
elif self.session.is_locked():
writer, written = self.write_prelude(handler)
try:
close_error = protocol.close_frame(2010, "Another connection still open")
close_error_chunk = handler.raw_chunk(close_error)
writer.write(close_error_chunk)
writer.flush()
except socket.error:
self.session.expire()
zero_chunk = handler.raw_chunk('')
writer.write(zero_chunk)
self.session.network_error = True
return []
self.session.lock()
return [
gevent.spawn(self.stream, handler),
]
def pad(s):
return s + ' ' * (1024 - len(s) + 14)
class HTMLFile(BaseTransport):
direction = 'recv'
response_limit = 4096
def write_frame(self, data):
pass
def stream(self, handler):
try:
callback_param = handler.environ.get("QUERY_STRING").split('=')[1]
self.callback = urllib2.unquote(callback_param)
except IndexError:
handler.do500(message='"callback" parameter required')
return
# Turn on cookie, turn off caching, set headers
handler.enable_cookie()
handler.enable_nocache()
handler.headers += [
("Content-Type", "text/html; charset=UTF-8"),
("Transfer-Encoding", "chunked"),
('Connection', 'keep-alive'),
]
# Start writing
handler.start_response("200 OK", handler.headers)
headers = handler.raw_headers()
writer = handler.socket.makefile()
writer.write(headers)
written = 0
# Send down HTMLFile IFRAME
html = protocol.HTMLFILE_IFRAME_HTML % self.callback
html = pad(html)
chunk = handler.raw_chunk(html)
writer.write(chunk)
writer.flush()
written += len(chunk)
chunk = '<script>\np("o");\n</script>\r\n'
chunk = handler.raw_chunk(chunk)
writer.write(chunk)
writer.flush()
written += len(chunk)
try:
while written < self.response_limit:
messages = self.session.get_messages(timeout=5)
messages = self.encode(messages)
frame = protocol.message_frame(messages)
frame = json.dumps(frame)
chunk = '<script>\np(%s);\n</script>\r\n' % frame
chunk = handler.raw_chunk(chunk)
writer.write(chunk)
writer.flush()
written += len(chunk)
except socket.error:
self.session.expire()
zero_chunk = handler.raw_chunk('')
writer.write(zero_chunk)
writer.close()
def __call__(self, handler, request_method, raw_request_data):
return [
gevent.spawn(self.stream, handler),
]
class IFrame(BaseTransport):
direction = 'recv'
class EventSource(BaseTransport):
direction = 'recv'
TIMING = 5.0
response_limit = 4096
def encode(self, data):
# TODO: Not using protocol.encode because it doesn't escape
# things properly here. The other version should be fixed at
# some point to avoid duplication.
data = json.dumps(data, separators=(',', ':'))
if isinstance(data, basestring):
# Don't both calling json, since its simple
data = '[' + data + ']'
elif isinstance(data, (object, dict, list)):
data = json.dumps(data, separators=(',',':'))
else:
raise ValueError("Unable to serialize: %s", str(data))
return protocol.message_frame(data)
def stream(self, handler):
handler.enable_cookie()
handler.enable_nocache()
handler.headers += [
("Content-Type", "text/event-stream; charset=UTF-8"),
]
write = handler.start_response("200 OK", handler.headers)
write("\r\n")
if self.session.is_new():
write("data: o\r\n\r\n")
written = 0
while written < self.response_limit:
messages = self.session.get_messages(timeout=self.TIMING)
if messages:
messages = self.encode(messages)
else:
messages = protocol.HEARTBEAT
messages = "data: %s\r\n\r\n" % messages
write(messages)
written += len(messages)
writer = handler.socket.makefile()
zero_chunk = handler.raw_chunk('')
writer.write(zero_chunk)
def __call__(self, handler, request_method, raw_request_data):
return [
gevent.spawn(self.stream, handler),
]
# Socket Transports
# ==================
#
# Provides a bidirectional connection to and from the client.
# Sending and receiving are split in two different threads.
class WebSocket(BaseTransport):
direction = 'bi'
def poll(self, socket):
"""
Spin lock the thread until we have a message on the
gevent queue.
"""
while not self.session.expired:
messages = self.session.get_messages()
messages = self.encode(messages)
socket.send(protocol.message_frame(messages))
close_error = protocol.close_frame(3000, "Go away!", newline=False)
socket.send(close_error)
# Session expires, so unlock
socket.close()
self.session.unlock()
def put(self, socket):
wsprotocol = socket.protocol
while not self.session.is_expired():
try:
messages = socket.receive() # blocking
# geventwebsocket doesn't wrap these failure modes
# into nice exceptions so we have to catch base Python
# Exceptions. :(
# Ignore invalid frames
except ValueError:
continue
except TypeError:
continue
# Ignore empty frames
except WebSocketError:
continue
# If the peer closes early then a fobj.read attribute
# won't exist so ignore.
except AttributeError:
break
#except socketerror:
#break
# Hybi = Closed
# Hixie = None
if messages is None:
break
try:
messages = protocol.decode(messages)
except InvalidJSON:
# When user sends broken data - broken JSON for example, the
# server must terminate the ws connection.
break
for msg in messages:
self.conn.on_message(msg)
self.session.incr_hits()
# Session expires, so unlock
socket.close()
self.session.unlock()
self.session.expire()
def __call__(self, socket, request_method, raw_request_data):
socket.send('o')
if self.session.is_expired():
close_error = protocol.close_frame(3000, "Go away!", newline=False)
socket.send(close_error)
socket.close()
return []
#elif self.session.is_locked():
#lock_error = protocol.close_frame(2010, "Another connection still open")
#socket.send(lock_error)
#socket.close()
#return []
self.session.lock()
return [
gevent.spawn(self.poll, socket),
gevent.spawn(self.put, socket),
]
class RawWebSocket(BaseTransport):
direction = 'bi'
def poll(self, socket):
while not self.session.is_expired():
messages = self.session.get_messages()
for message in messages:
# TODO: this is a hack because the rest of the
# transports actually use framing and this is the
# one abberation. But it works...
if len(message) == 1:
socket.send(message[0])
else:
socket.send(message)
socket.close()
def put(self, socket):
while not self.session.is_expired():
# Just read atomic strings and do what the connection
# wants.
message = socket.receive() # blocking
if message is None:
break
self.conn.on_message([message])
self.session.incr_hits()
socket.close()
def __call__(self, socket, request_method, raw_request_data):
if self.session.is_expired():
socket.close()
return []
return [
gevent.spawn(self.poll, socket),
gevent.spawn(self.put, socket),
]
########NEW FILE########
__FILENAME__ = httplib_fork
"""HTTP/1.1 client library
<intro stuff goes here>
<other stuff, too>
HTTPConnection goes through a number of "states", which define when a client
may legally make another request or fetch the response for a particular
request. This diagram details these state transitions:
(null)
|
| HTTPConnection()
v
Idle
|
| putrequest()
v
Request-started
|
| ( putheader() )* endheaders()
v
Request-sent
|
| response = getresponse()
v
Unread-response [Response-headers-read]
|\____________________
| |
| response.read() | putrequest()
v v
Idle Req-started-unread-response
______/|
/ |
response.read() | | ( putheader() )* endheaders()
v v
Request-started Req-sent-unread-response
|
| response.read()
v
Request-sent
This diagram presents the following rules:
-- a second request may not be started until {response-headers-read}
-- a response [object] cannot be retrieved until {request-sent}
-- there is no differentiation between an unread response body and a
partially read response body
Note: this enforcement is applied by the HTTPConnection class. The
HTTPResponse class does not enforce this state machine, which
implies sophisticated clients may accelerate the request/response
pipeline. Caution should be taken, though: accelerating the states
beyond the above pattern may imply knowledge of the server's
connection-close behavior for certain requests. For example, it
is impossible to tell whether the server will close the connection
UNTIL the response headers have been read; this means that further
requests cannot be placed into the pipeline until it is known that
the server will NOT be closing the connection.
Logical State __state __response
------------- ------- ----------
Idle _CS_IDLE None
Request-started _CS_REQ_STARTED None
Request-sent _CS_REQ_SENT None
Unread-response _CS_IDLE <response_class>
Req-started-unread-response _CS_REQ_STARTED <response_class>
Req-sent-unread-response _CS_REQ_SENT <response_class>
"""
from array import array
import os
import socket
from sys import py3kwarning
from urlparse import urlsplit
import warnings
with warnings.catch_warnings():
if py3kwarning:
warnings.filterwarnings("ignore", ".*mimetools has been removed",
DeprecationWarning)
import mimetools
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
__all__ = ["HTTP", "HTTPResponse", "HTTPConnection",
"HTTPException", "NotConnected", "UnknownProtocol",
"UnknownTransferEncoding", "UnimplementedFileMode",
"IncompleteRead", "InvalidURL", "ImproperConnectionState",
"CannotSendRequest", "CannotSendHeader", "ResponseNotReady",
"BadStatusLine", "error", "responses"]
HTTP_PORT = 80
HTTPS_PORT = 443
_UNKNOWN = 'UNKNOWN'
# connection states
_CS_IDLE = 'Idle'
_CS_REQ_STARTED = 'Request-started'
_CS_REQ_SENT = 'Request-sent'
# status codes
# informational
CONTINUE = 100
SWITCHING_PROTOCOLS = 101
PROCESSING = 102
# successful
OK = 200
CREATED = 201
ACCEPTED = 202
NON_AUTHORITATIVE_INFORMATION = 203
NO_CONTENT = 204
RESET_CONTENT = 205
PARTIAL_CONTENT = 206
MULTI_STATUS = 207
IM_USED = 226
# redirection
MULTIPLE_CHOICES = 300
MOVED_PERMANENTLY = 301
FOUND = 302
SEE_OTHER = 303
NOT_MODIFIED = 304
USE_PROXY = 305
TEMPORARY_REDIRECT = 307
# client error
BAD_REQUEST = 400
UNAUTHORIZED = 401
PAYMENT_REQUIRED = 402
FORBIDDEN = 403
NOT_FOUND = 404
METHOD_NOT_ALLOWED = 405
NOT_ACCEPTABLE = 406
PROXY_AUTHENTICATION_REQUIRED = 407
REQUEST_TIMEOUT = 408
CONFLICT = 409
GONE = 410
LENGTH_REQUIRED = 411
PRECONDITION_FAILED = 412
REQUEST_ENTITY_TOO_LARGE = 413
REQUEST_URI_TOO_LONG = 414
UNSUPPORTED_MEDIA_TYPE = 415
REQUESTED_RANGE_NOT_SATISFIABLE = 416
EXPECTATION_FAILED = 417
UNPROCESSABLE_ENTITY = 422
LOCKED = 423
FAILED_DEPENDENCY = 424
UPGRADE_REQUIRED = 426
# server error
INTERNAL_SERVER_ERROR = 500
NOT_IMPLEMENTED = 501
BAD_GATEWAY = 502
SERVICE_UNAVAILABLE = 503
GATEWAY_TIMEOUT = 504
HTTP_VERSION_NOT_SUPPORTED = 505
INSUFFICIENT_STORAGE = 507
NOT_EXTENDED = 510
# Mapping status codes to official W3C names
responses = {
100: 'Continue',
101: 'Switching Protocols',
200: 'OK',
201: 'Created',
202: 'Accepted',
203: 'Non-Authoritative Information',
204: 'No Content',
205: 'Reset Content',
206: 'Partial Content',
300: 'Multiple Choices',
301: 'Moved Permanently',
302: 'Found',
303: 'See Other',
304: 'Not Modified',
305: 'Use Proxy',
306: '(Unused)',
307: 'Temporary Redirect',
400: 'Bad Request',
401: 'Unauthorized',
402: 'Payment Required',
403: 'Forbidden',
404: 'Not Found',
405: 'Method Not Allowed',
406: 'Not Acceptable',
407: 'Proxy Authentication Required',
408: 'Request Timeout',
409: 'Conflict',
410: 'Gone',
411: 'Length Required',
412: 'Precondition Failed',
413: 'Request Entity Too Large',
414: 'Request-URI Too Long',
415: 'Unsupported Media Type',
416: 'Requested Range Not Satisfiable',
417: 'Expectation Failed',
500: 'Internal Server Error',
501: 'Not Implemented',
502: 'Bad Gateway',
503: 'Service Unavailable',
504: 'Gateway Timeout',
505: 'HTTP Version Not Supported',
}
# maximal amount of data to read at one time in _safe_read
MAXAMOUNT = 1048576
# maximal line length when calling readline().
_MAXLINE = 65536
class HTTPMessage(mimetools.Message):
def addheader(self, key, value):
"""Add header for field key handling repeats."""
prev = self.dict.get(key)
if prev is None:
self.dict[key] = value
else:
combined = ", ".join((prev, value))
self.dict[key] = combined
def addcontinue(self, key, more):
"""Add more field data from a continuation line."""
prev = self.dict[key]
self.dict[key] = prev + "\n " + more
def readheaders(self):
"""Read header lines.
Read header lines up to the entirely blank line that terminates them.
The (normally blank) line that ends the headers is skipped, but not
included in the returned list. If a non-header line ends the headers,
(which is an error), an attempt is made to backspace over it; it is
never included in the returned list.
The variable self.status is set to the empty string if all went well,
otherwise it is an error message. The variable self.headers is a
completely uninterpreted list of lines contained in the header (so
printing them will reproduce the header exactly as it appears in the
file).
If multiple header fields with the same name occur, they are combined
according to the rules in RFC 2616 sec 4.2:
Appending each subsequent field-value to the first, each separated
by a comma. The order in which header fields with the same field-name
are received is significant to the interpretation of the combined
field value.
"""
# XXX The implementation overrides the readheaders() method of
# rfc822.Message. The base class design isn't amenable to
# customized behavior here so the method here is a copy of the
# base class code with a few small changes.
self.dict = {}
self.unixfrom = ''
self.headers = hlist = []
self.status = ''
headerseen = ""
firstline = 1
startofline = unread = tell = None
if hasattr(self.fp, 'unread'):
unread = self.fp.unread
elif self.seekable:
tell = self.fp.tell
while True:
if tell:
try:
startofline = tell()
except IOError:
startofline = tell = None
self.seekable = 0
line = self.fp.readline(_MAXLINE + 1)
if len(line) > _MAXLINE:
raise LineTooLong("header line")
if not line:
self.status = 'EOF in headers'
break
# Skip unix From name time lines
if firstline and line.startswith('From '):
self.unixfrom = self.unixfrom + line
continue
firstline = 0
if headerseen and line[0] in ' \t':
# XXX Not sure if continuation lines are handled properly
# for http and/or for repeating headers
# It's a continuation line.
hlist.append(line)
self.addcontinue(headerseen, line.strip())
continue
elif self.iscomment(line):
# It's a comment. Ignore it.
continue
elif self.islast(line):
# Note! No pushback here! The delimiter line gets eaten.
break
headerseen = self.isheader(line)
if headerseen:
# It's a legal header line, save it.
hlist.append(line)
self.addheader(headerseen, line[len(headerseen)+1:].strip())
continue
else:
# It's not a header line; throw it back and stop here.
if not self.dict:
self.status = 'No headers'
else:
self.status = 'Non-header line where header expected'
# Try to undo the read.
if unread:
unread(line)
elif tell:
self.fp.seek(startofline)
else:
self.status = self.status + '; bad seek'
break
class HTTPResponse:
# strict: If true, raise BadStatusLine if the status line can't be
# parsed as a valid HTTP/1.0 or 1.1 status line. By default it is
# false because it prevents clients from talking to HTTP/0.9
# servers. Note that a response with a sufficiently corrupted
# status line will look like an HTTP/0.9 response.
# See RFC 2616 sec 19.6 and RFC 1945 sec 6 for details.
def __init__(self, sock, debuglevel=0, strict=0, method=None, buffering=False):
if buffering:
# The caller won't be using any sock.recv() calls, so buffering
# is fine and recommended for performance.
self.fp = sock.makefile('rb')
else:
# The buffer size is specified as zero, because the headers of
# the response are read with readline(). If the reads were
# buffered the readline() calls could consume some of the
# response, which make be read via a recv() on the underlying
# socket.
self.fp = sock.makefile('rb', 0)
self.debuglevel = debuglevel
self.strict = strict
self._method = method
self.msg = None
# from the Status-Line of the response
self.version = _UNKNOWN # HTTP-Version
self.status = _UNKNOWN # Status-Code
self.reason = _UNKNOWN # Reason-Phrase
self.chunked = _UNKNOWN # is "chunked" being used?
self.chunk_left = _UNKNOWN # bytes left to read in current chunk
self.length = _UNKNOWN # number of bytes left in response
self.will_close = _UNKNOWN # conn will close at end of response
def _read_status(self):
# Initialize with Simple-Response defaults
line = self.fp.readline()
if self.debuglevel > 0:
print "reply:", repr(line)
if not line:
# Presumably, the server closed the connection before
# sending a valid response.
raise BadStatusLine(line)
try:
[version, status, reason] = line.split(None, 2)
except ValueError:
try:
[version, status] = line.split(None, 1)
reason = ""
except ValueError:
# empty version will cause next test to fail and status
# will be treated as 0.9 response.
version = ""
if not version.startswith('HTTP/'):
if self.strict:
self.close()
raise BadStatusLine(line)
else:
# assume it's a Simple-Response from an 0.9 server
self.fp = LineAndFileWrapper(line, self.fp)
return "HTTP/0.9", 200, ""
# The status code is a three-digit number
try:
status = int(status)
if status < 100 or status > 999:
raise BadStatusLine(line)
except ValueError:
raise BadStatusLine(line)
return version, status, reason
def begin(self):
if self.msg is not None:
# we've already started reading the response
return
# read until we get a non-100 response
while True:
version, status, reason = self._read_status()
if status != CONTINUE:
break
# skip the header from the 100 response
while True:
skip = self.fp.readline(_MAXLINE + 1)
if len(skip) > _MAXLINE:
raise LineTooLong("header line")
skip = skip.strip()
if not skip:
break
if self.debuglevel > 0:
print "header:", skip
self.status = status
self.reason = reason.strip()
if version == 'HTTP/1.0':
self.version = 10
elif version.startswith('HTTP/1.'):
self.version = 11 # use HTTP/1.1 code for HTTP/1.x where x>=1
elif version == 'HTTP/0.9':
self.version = 9
else:
raise UnknownProtocol(version)
if self.version == 9:
self.length = None
self.chunked = 0
self.will_close = 1
self.msg = HTTPMessage(StringIO())
return
self.msg = HTTPMessage(self.fp, 0)
if self.debuglevel > 0:
for hdr in self.msg.headers:
print "header:", hdr,
# don't let the msg keep an fp
self.msg.fp = None
# are we using the chunked-style of transfer encoding?
tr_enc = self.msg.getheader('transfer-encoding')
if tr_enc and tr_enc.lower() == "chunked":
self.chunked = 1
self.chunk_left = None
else:
self.chunked = 0
# will the connection close at the end of the response?
self.will_close = self._check_close()
# do we have a Content-Length?
# NOTE: RFC 2616, S4.4, #3 says we ignore this if tr_enc is "chunked"
length = self.msg.getheader('content-length')
if length and not self.chunked:
try:
self.length = int(length)
except ValueError:
self.length = None
else:
if self.length < 0: # ignore nonsensical negative lengths
self.length = None
else:
self.length = None
# does the body have a fixed length? (of zero)
if (status == NO_CONTENT or status == NOT_MODIFIED or
100 <= status < 200 or # 1xx codes
self._method == 'HEAD'):
self.length = 0
# if the connection remains open, and we aren't using chunked, and
# a content-length was not provided, then assume that the connection
# WILL close.
if not self.will_close and \
not self.chunked and \
self.length is None:
self.will_close = 1
def _check_close(self):
conn = self.msg.getheader('connection')
if self.version == 11:
# An HTTP/1.1 proxy is assumed to stay open unless
# explicitly closed.
conn = self.msg.getheader('connection')
if conn and "close" in conn.lower():
return True
return False
# Some HTTP/1.0 implementations have support for persistent
# connections, using rules different than HTTP/1.1.
# For older HTTP, Keep-Alive indicates persistent connection.
if self.msg.getheader('keep-alive'):
return False
# At least Akamai returns a "Connection: Keep-Alive" header,
# which was supposed to be sent by the client.
if conn and "keep-alive" in conn.lower():
return False
# Proxy-Connection is a netscape hack.
pconn = self.msg.getheader('proxy-connection')
if pconn and "keep-alive" in pconn.lower():
return False
# otherwise, assume it will close
return True
def close(self):
if self.fp:
self.fp.close()
self.fp = None
def isclosed(self):
# NOTE: it is possible that we will not ever call self.close(). This
# case occurs when will_close is TRUE, length is None, and we
# read up to the last byte, but NOT past it.
#
# IMPLIES: if will_close is FALSE, then self.close() will ALWAYS be
# called, meaning self.isclosed() is meaningful.
return self.fp is None
# XXX It would be nice to have readline and __iter__ for this, too.
def read(self, amt=None):
if self.fp is None:
return ''
if self._method == 'HEAD':
self.close()
return ''
if self.chunked:
return self._read_chunked(amt)
if amt is None:
# unbounded read
if self.length is None:
s = self.fp.read()
else:
s = self._safe_read(self.length)
self.length = 0
self.close() # we read everything
return s
if self.length is not None:
if amt > self.length:
# clip the read to the "end of response"
amt = self.length
# we do not use _safe_read() here because this may be a .will_close
# connection, and the user is reading more bytes than will be provided
# (for example, reading in 1k chunks)
s = self.fp.read(amt)
if self.length is not None:
self.length -= len(s)
if not self.length:
self.close()
return s
def _read_chunked(self, amt):
assert self.chunked != _UNKNOWN
chunk_left = self.chunk_left
value = []
while True:
if chunk_left is None:
line = self.fp.readline(_MAXLINE + 1)
if len(line) > _MAXLINE:
raise LineTooLong("chunk size")
i = line.find(';')
if i >= 0:
line = line[:i] # strip chunk-extensions
try:
chunk_left = int(line, 16)
except ValueError:
# close the connection as protocol synchronisation is
# probably lost
self.close()
raise IncompleteRead(''.join(value))
if chunk_left == 0:
break
if amt is None:
value.append(self._safe_read(chunk_left))
elif amt < chunk_left:
value.append(self._safe_read(amt))
self.chunk_left = chunk_left - amt
return ''.join(value)
elif amt == chunk_left:
value.append(self._safe_read(amt))
self._safe_read(2) # toss the CRLF at the end of the chunk
self.chunk_left = None
return ''.join(value)
else:
value.append(self._safe_read(chunk_left))
amt -= chunk_left
# we read the whole chunk, get another
self._safe_read(2) # toss the CRLF at the end of the chunk
chunk_left = None
return ''.join(value)
# read and discard trailer up to the CRLF terminator
### note: we shouldn't have any trailers!
while True:
line = self.fp.readline(_MAXLINE + 1)
if len(line) > _MAXLINE:
raise LineTooLong("trailer line")
if not line:
# a vanishingly small number of sites EOF without
# sending the trailer
break
if line == '\r\n':
break
# we read everything; close the "file"
self.close()
return ''.join(value)
def _safe_read(self, amt):
"""Read the number of bytes requested, compensating for partial reads.
Normally, we have a blocking socket, but a read() can be interrupted
by a signal (resulting in a partial read).
Note that we cannot distinguish between EOF and an interrupt when zero
bytes have been read. IncompleteRead() will be raised in this
situation.
This function should be used when <amt> bytes "should" be present for
reading. If the bytes are truly not available (due to EOF), then the
IncompleteRead exception can be used to detect the problem.
"""
# NOTE(gps): As of svn r74426 socket._fileobject.read(x) will never
# return less than x bytes unless EOF is encountered. It now handles
# signal interruptions (socket.error EINTR) internally. This code
# never caught that exception anyways. It seems largely pointless.
# self.fp.read(amt) will work fine.
s = []
while amt > 0:
chunk = self.fp.read(min(amt, MAXAMOUNT))
if not chunk:
raise IncompleteRead(''.join(s), amt)
s.append(chunk)
amt -= len(chunk)
return ''.join(s)
def fileno(self):
return self.fp.fileno()
def getheader(self, name, default=None):
if self.msg is None:
raise ResponseNotReady()
return self.msg.getheader(name, default)
def getheaders(self):
"""Return list of (header, value) tuples."""
if self.msg is None:
raise ResponseNotReady()
return self.msg.items()
class HTTPConnection:
_http_vsn = 11
_http_vsn_str = 'HTTP/1.1'
response_class = HTTPResponse
default_port = HTTP_PORT
auto_open = 1
debuglevel = 0
strict = 0
def __init__(self, host, port=None, strict=None,
timeout=socket._GLOBAL_DEFAULT_TIMEOUT, source_address=None):
self.timeout = timeout
self.source_address = source_address
self.sock = None
self._buffer = []
self.__response = None
self.__state = _CS_IDLE
self._method = None
self._tunnel_host = None
self._tunnel_port = None
self._tunnel_headers = {}
self._set_hostport(host, port)
if strict is not None:
self.strict = strict
def set_tunnel(self, host, port=None, headers=None):
""" Sets up the host and the port for the HTTP CONNECT Tunnelling.
The headers argument should be a mapping of extra HTTP headers
to send with the CONNECT request.
"""
self._tunnel_host = host
self._tunnel_port = port
if headers:
self._tunnel_headers = headers
else:
self._tunnel_headers.clear()
def _set_hostport(self, host, port):
if port is None:
i = host.rfind(':')
j = host.rfind(']') # ipv6 addresses have [...]
if i > j:
try:
port = int(host[i+1:])
except ValueError:
raise InvalidURL("nonnumeric port: '%s'" % host[i+1:])
host = host[:i]
else:
port = self.default_port
if host and host[0] == '[' and host[-1] == ']':
host = host[1:-1]
self.host = host
self.port = port
def set_debuglevel(self, level):
self.debuglevel = level
def _tunnel(self):
self._set_hostport(self._tunnel_host, self._tunnel_port)
self.send("CONNECT %s:%d HTTP/1.0\r\n" % (self.host, self.port))
for header, value in self._tunnel_headers.iteritems():
self.send("%s: %s\r\n" % (header, value))
self.send("\r\n")
response = self.response_class(self.sock, strict = self.strict,
method = self._method)
(version, code, message) = response._read_status()
if code != 200:
self.close()
raise socket.error("Tunnel connection failed: %d %s" % (code,
message.strip()))
while True:
line = response.fp.readline(_MAXLINE + 1)
if len(line) > _MAXLINE:
raise LineTooLong("header line")
if line == '\r\n': break
def connect(self):
"""Connect to the host and port specified in __init__."""
self.sock = socket.create_connection((self.host,self.port),
self.timeout)
if self._tunnel_host:
self._tunnel()
def close(self):
"""Close the connection to the HTTP server."""
if self.sock:
self.sock.close() # close it manually... there may be other refs
self.sock = None
if self.__response:
self.__response.close()
self.__response = None
self.__state = _CS_IDLE
def send(self, data):
"""Send `data' to the server."""
if self.sock is None:
if self.auto_open:
self.connect()
else:
raise NotConnected()
if self.debuglevel > 0:
print "send:", repr(data)
blocksize = 8192
if hasattr(data,'read') and not isinstance(data, array):
if self.debuglevel > 0: print "sendIng a read()able"
datablock = data.read(blocksize)
while datablock:
self.sock.sendall(datablock)
datablock = data.read(blocksize)
else:
self.sock.sendall(data)
def _output(self, s):
"""Add a line of output to the current request buffer.
Assumes that the line does *not* end with \\r\\n.
"""
self._buffer.append(s)
def _send_output(self, message_body=None):
"""Send the currently buffered request and clear the buffer.
Appends an extra \\r\\n to the buffer.
A message_body may be specified, to be appended to the request.
"""
self._buffer.extend(("", ""))
msg = "\r\n".join(self._buffer)
del self._buffer[:]
# If msg and message_body are sent in a single send() call,
# it will avoid performance problems caused by the interaction
# between delayed ack and the Nagle algorithm.
if isinstance(message_body, str):
msg += message_body
message_body = None
self.send(msg)
if message_body is not None:
#message_body was not a string (i.e. it is a file) and
#we must run the risk of Nagle
self.send(message_body)
def putrequest(self, method, url, skip_host=0, skip_accept_encoding=0):
"""Send a request to the server.
`method' specifies an HTTP request method, e.g. 'GET'.
`url' specifies the object being requested, e.g. '/index.html'.
`skip_host' if True does not add automatically a 'Host:' header
`skip_accept_encoding' if True does not add automatically an
'Accept-Encoding:' header
"""
# if a prior response has been completed, then forget about it.
if self.__response and self.__response.isclosed():
self.__response = None
# in certain cases, we cannot issue another request on this connection.
# this occurs when:
# 1) we are in the process of sending a request. (_CS_REQ_STARTED)
# 2) a response to a previous request has signalled that it is going
# to close the connection upon completion.
# 3) the headers for the previous response have not been read, thus
# we cannot determine whether point (2) is true. (_CS_REQ_SENT)
#
# if there is no prior response, then we can request at will.
#
# if point (2) is true, then we will have passed the socket to the
# response (effectively meaning, "there is no prior response"), and
# will open a new one when a new request is made.
#
# Note: if a prior response exists, then we *can* start a new request.
# We are not allowed to begin fetching the response to this new
# request, however, until that prior response is complete.
#
if self.__state == _CS_IDLE:
self.__state = _CS_REQ_STARTED
else:
raise CannotSendRequest()
# Save the method we use, we need it later in the response phase
self._method = method
if not url:
url = '/'
hdr = '%s %s %s' % (method, url, self._http_vsn_str)
self._output(hdr)
if self._http_vsn == 11:
# Issue some standard headers for better HTTP/1.1 compliance
if not skip_host:
# this header is issued *only* for HTTP/1.1
# connections. more specifically, this means it is
# only issued when the client uses the new
# HTTPConnection() class. backwards-compat clients
# will be using HTTP/1.0 and those clients may be
# issuing this header themselves. we should NOT issue
# it twice; some web servers (such as Apache) barf
# when they see two Host: headers
# If we need a non-standard port,include it in the
# header. If the request is going through a proxy,
# but the host of the actual URL, not the host of the
# proxy.
netloc = ''
if url.startswith('http'):
nil, netloc, nil, nil, nil = urlsplit(url)
if netloc:
try:
netloc_enc = netloc.encode("ascii")
except UnicodeEncodeError:
netloc_enc = netloc.encode("idna")
self.putheader('Host', netloc_enc)
else:
try:
host_enc = self.host.encode("ascii")
except UnicodeEncodeError:
host_enc = self.host.encode("idna")
# Wrap the IPv6 Host Header with [] (RFC 2732)
if host_enc.find(':') >= 0:
host_enc = "[" + host_enc + "]"
if self.port == self.default_port:
self.putheader('Host', host_enc)
else:
self.putheader('Host', "%s:%s" % (host_enc, self.port))
# note: we are assuming that clients will not attempt to set these
# headers since *this* library must deal with the
# consequences. this also means that when the supporting
# libraries are updated to recognize other forms, then this
# code should be changed (removed or updated).
# we only want a Content-Encoding of "identity" since we don't
# support encodings such as x-gzip or x-deflate.
if not skip_accept_encoding:
self.putheader('Accept-Encoding', 'identity')
# we can accept "chunked" Transfer-Encodings, but no others
# NOTE: no TE header implies *only* "chunked"
#self.putheader('TE', 'chunked')
# if TE is supplied in the header, then it must appear in a
# Connection header.
#self.putheader('Connection', 'TE')
else:
# For HTTP/1.0, the server will assume "not chunked"
pass
def putheader(self, header, *values):
"""Send a request header line to the server.
For example: h.putheader('Accept', 'text/html')
"""
if self.__state != _CS_REQ_STARTED:
raise CannotSendHeader()
hdr = '%s: %s' % (header, '\r\n\t'.join([str(v) for v in values]))
self._output(hdr)
def endheaders(self, message_body=None):
"""Indicate that the last header line has been sent to the server.
This method sends the request to the server. The optional
message_body argument can be used to pass message body
associated with the request. The message body will be sent in
the same packet as the message headers if possible. The
message_body should be a string.
"""
if self.__state == _CS_REQ_STARTED:
self.__state = _CS_REQ_SENT
else:
raise CannotSendHeader()
self._send_output(message_body)
def request(self, method, url, body=None, headers={}):
"""Send a complete request to the server."""
self._send_request(method, url, body, headers)
def _set_content_length(self, body):
# Set the content-length based on the body.
thelen = None
try:
thelen = str(len(body))
except TypeError, te:
# If this is a file-like object, try to
# fstat its file descriptor
try:
thelen = str(os.fstat(body.fileno()).st_size)
except (AttributeError, OSError):
# Don't send a length if this failed
if self.debuglevel > 0: print "Cannot stat!!"
if thelen is not None:
self.putheader('Content-Length', thelen)
def _send_request(self, method, url, body, headers):
# Honor explicitly requested Host: and Accept-Encoding: headers.
header_names = dict.fromkeys([k.lower() for k in headers])
skips = {}
if 'host' in header_names:
skips['skip_host'] = 1
if 'accept-encoding' in header_names:
skips['skip_accept_encoding'] = 1
self.putrequest(method, url, **skips)
if body and ('content-length' not in header_names):
self._set_content_length(body)
for hdr, value in headers.iteritems():
self.putheader(hdr, value)
self.endheaders(body)
def getresponse(self, buffering=False):
"Get the response from the server."
# if a prior response has been completed, then forget about it.
if self.__response and self.__response.isclosed():
self.__response = None
#
# if a prior response exists, then it must be completed (otherwise, we
# cannot read this response's header to determine the connection-close
# behavior)
#
# note: if a prior response existed, but was connection-close, then the
# socket and response were made independent of this HTTPConnection
# object since a new request requires that we open a whole new
# connection
#
# this means the prior response had one of two states:
# 1) will_close: this connection was reset and the prior socket and
# response operate independently
# 2) persistent: the response was retained and we await its
# isclosed() status to become true.
#
if self.__state != _CS_REQ_SENT or self.__response:
raise ResponseNotReady()
args = (self.sock,)
kwds = {"strict":self.strict, "method":self._method}
if self.debuglevel > 0:
args += (self.debuglevel,)
if buffering:
#only add this keyword if non-default, for compatibility with
#other response_classes.
kwds["buffering"] = True;
response = self.response_class(*args, **kwds)
response.begin()
assert response.will_close != _UNKNOWN
self.__state = _CS_IDLE
if response.will_close:
# this effectively passes the connection to the response
self.close()
else:
# remember this, so we can tell when it is complete
self.__response = response
return response
class HTTP:
"Compatibility class with httplib.py from 1.5."
_http_vsn = 10
_http_vsn_str = 'HTTP/1.0'
debuglevel = 0
_connection_class = HTTPConnection
def __init__(self, host='', port=None, strict=None):
"Provide a default host, since the superclass requires one."
# some joker passed 0 explicitly, meaning default port
if port == 0:
port = None
# Note that we may pass an empty string as the host; this will throw
# an error when we attempt to connect. Presumably, the client code
# will call connect before then, with a proper host.
self._setup(self._connection_class(host, port, strict))
def _setup(self, conn):
self._conn = conn
# set up delegation to flesh out interface
self.send = conn.send
self.putrequest = conn.putrequest
self.putheader = conn.putheader
self.endheaders = conn.endheaders
self.set_debuglevel = conn.set_debuglevel
conn._http_vsn = self._http_vsn
conn._http_vsn_str = self._http_vsn_str
self.file = None
def connect(self, host=None, port=None):
"Accept arguments to set the host/port, since the superclass doesn't."
if host is not None:
self._conn._set_hostport(host, port)
self._conn.connect()
def getfile(self):
"Provide a getfile, since the superclass' does not use this concept."
return self.file
def getreply(self, buffering=False):
"""Compat definition since superclass does not define it.
Returns a tuple consisting of:
- server status code (e.g. '200' if all goes well)
- server "reason" corresponding to status code
- any RFC822 headers in the response from the server
"""
try:
if not buffering:
response = self._conn.getresponse()
else:
#only add this keyword if non-default for compatibility
#with other connection classes
response = self._conn.getresponse(buffering)
except BadStatusLine, e:
### hmm. if getresponse() ever closes the socket on a bad request,
### then we are going to have problems with self.sock
### should we keep this behavior? do people use it?
# keep the socket open (as a file), and return it
self.file = self._conn.sock.makefile('rb', 0)
# close our socket -- we want to restart after any protocol error
self.close()
self.headers = None
return -1, e.line, None
self.headers = response.msg
self.file = response.fp
return response.status, response.reason, response.msg
def close(self):
self._conn.close()
# note that self.file == response.fp, which gets closed by the
# superclass. just clear the object ref here.
### hmm. messy. if status==-1, then self.file is owned by us.
### well... we aren't explicitly closing, but losing this ref will
### do it
self.file = None
try:
import ssl
except ImportError:
pass
else:
class HTTPSConnection(HTTPConnection):
"This class allows communication via SSL."
default_port = HTTPS_PORT
def __init__(self, host, port=None, key_file=None, cert_file=None,
strict=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
source_address=None):
HTTPConnection.__init__(self, host, port, strict, timeout,
source_address)
self.key_file = key_file
self.cert_file = cert_file
def connect(self):
"Connect to a host on a given (SSL) port."
sock = socket.create_connection((self.host, self.port),
self.timeout, self.source_address)
if self._tunnel_host:
self.sock = sock
self._tunnel()
self.sock = ssl.wrap_socket(sock, self.key_file, self.cert_file)
__all__.append("HTTPSConnection")
class HTTPS(HTTP):
"""Compatibility with 1.5 httplib interface
Python 1.5.2 did not have an HTTPS class, but it defined an
interface for sending http requests that is also useful for
https.
"""
_connection_class = HTTPSConnection
def __init__(self, host='', port=None, key_file=None, cert_file=None,
strict=None):
# provide a default host, pass the X509 cert info
# urf. compensate for bad input.
if port == 0:
port = None
self._setup(self._connection_class(host, port, key_file,
cert_file, strict))
# we never actually use these for anything, but we keep them
# here for compatibility with post-1.5.2 CVS.
self.key_file = key_file
self.cert_file = cert_file
def FakeSocket (sock, sslobj):
warnings.warn("FakeSocket is deprecated, and won't be in 3.x. " +
"Use the result of ssl.wrap_socket() directly instead.",
DeprecationWarning, stacklevel=2)
return sslobj
class HTTPException(Exception):
# Subclasses that define an __init__ must call Exception.__init__
# or define self.args. Otherwise, str() will fail.
pass
class NotConnected(HTTPException):
pass
class InvalidURL(HTTPException):
pass
class UnknownProtocol(HTTPException):
def __init__(self, version):
self.args = version,
self.version = version
class UnknownTransferEncoding(HTTPException):
pass
class UnimplementedFileMode(HTTPException):
pass
class IncompleteRead(HTTPException):
def __init__(self, partial, expected=None):
self.args = partial,
self.partial = partial
self.expected = expected
def __repr__(self):
if self.expected is not None:
e = ', %i more expected' % self.expected
else:
e = ''
return 'IncompleteRead(%i bytes read%s)' % (len(self.partial), e)
def __str__(self):
return repr(self)
class ImproperConnectionState(HTTPException):
pass
class CannotSendRequest(ImproperConnectionState):
pass
class CannotSendHeader(ImproperConnectionState):
pass
class ResponseNotReady(ImproperConnectionState):
pass
class BadStatusLine(HTTPException):
def __init__(self, line):
if not line:
line = repr(line)
self.args = line,
self.line = line
class LineTooLong(HTTPException):
def __init__(self, line_type):
HTTPException.__init__(self, "got more than %d bytes when reading %s"
% (_MAXLINE, line_type))
# for backwards compatibility
error = HTTPException
class LineAndFileWrapper:
"""A limited file-like object for HTTP/0.9 responses."""
# The status-line parsing code calls readline(), which normally
# get the HTTP status line. For a 0.9 response, however, this is
# actually the first line of the body! Clients need to get a
# readable file object that contains that line.
def __init__(self, line, file):
self._line = line
self._file = file
self._line_consumed = 0
self._line_offset = 0
self._line_left = len(line)
def __getattr__(self, attr):
return getattr(self._file, attr)
def _done(self):
# called when the last byte is read from the line. After the
# call, all read methods are delegated to the underlying file
# object.
self._line_consumed = 1
self.read = self._file.read
self.readline = self._file.readline
self.readlines = self._file.readlines
def read(self, amt=None):
if self._line_consumed:
return self._file.read(amt)
assert self._line_left
if amt is None or amt > self._line_left:
s = self._line[self._line_offset:]
self._done()
if amt is None:
return s + self._file.read()
else:
return s + self._file.read(amt - len(s))
else:
assert amt <= self._line_left
i = self._line_offset
j = i + amt
s = self._line[i:j]
self._line_offset = j
self._line_left -= amt
if self._line_left == 0:
self._done()
return s
def readline(self):
if self._line_consumed:
return self._file.readline()
assert self._line_left
s = self._line[self._line_offset:]
self._done()
return s
def readlines(self, size=None):
if self._line_consumed:
return self._file.readlines(size)
assert self._line_left
L = [self._line[self._line_offset:]]
self._done()
if size is None:
return L + self._file.readlines()
else:
return L + self._file.readlines(size)
########NEW FILE########
__FILENAME__ = test_extended
#!/usr/bin/env python
"""
"""
import os
import time
import json
import re
import unittest2 as unittest
from utils import GET, GET_async, POST, POST_async, OPTIONS
from utils import WebSocket8Client
import uuid
import nose
# Base URL
# ========
test_top_url = os.environ.get('SOCKJS_URL', 'http://localhost:8081')
base_url = test_top_url + '/echo'
close_base_url = test_top_url + '/close'
wsoff_base_url = test_top_url + '/disabled_websocket_echo'
class Test(unittest.TestCase):
# We are going to test several `404/not found` pages. We don't
# define a body or a content type.
def verify404(self, r, cookie=False):
self.assertEqual(r.status, 404)
if cookie is False:
self.verify_no_cookie(r)
elif cookie is True:
self.verify_cookie(r)
# In some cases `405/method not allowed` is more appropriate.
def verify405(self, r):
self.assertEqual(r.status, 405)
self.assertFalse(r['content-type'])
self.assertTrue(r['allow'])
self.assertFalse(r.body)
# Multiple transport protocols need to support OPTIONS method. All
# responses to OPTIONS requests must be cacheable and contain
# appropriate headers.
def verify_options(self, url, allowed_methods):
for origin in [None, 'test']:
h = {}
if origin:
h['Origin'] = origin
r = OPTIONS(url, headers=h)
self.assertEqual(r.status, 204)
self.assertTrue(re.search('public', r['Cache-Control']))
self.assertTrue(re.search('max-age=[1-9][0-9]{6}', r['Cache-Control']),
"max-age must be large, one year (31536000) is best")
self.assertTrue(r['Expires'])
self.assertTrue(int(r['access-control-max-age']) > 1000000)
self.assertEqual(r['Access-Control-Allow-Methods'], allowed_methods)
self.assertFalse(r.body)
self.verify_cors(r, origin)
self.verify_cookie(r)
# All transports except WebSockets need sticky session support
# from the load balancer. Some load balancers enable that only
# when they see `JSESSIONID` cookie. For all the session urls we
# must set this cookie.
def verify_cookie(self, r):
self.assertEqual(r['Set-Cookie'].split(';')[0].strip(),
'JSESSIONID=dummy')
self.assertEqual(r['Set-Cookie'].split(';')[1].lower().strip(),
'path=/')
def verify_no_cookie(self, r):
self.assertFalse(r['Set-Cookie'])
# Most of the XHR/Ajax based transports do work CORS if proper
# headers are set.
def verify_cors(self, r, origin=None):
self.assertEqual(r['access-control-allow-origin'], origin or '*')
# In order to get cookies (`JSESSIONID` mostly) flying, we
# need to set `allow-credentials` header to true.
self.assertEqual(r['access-control-allow-credentials'], 'true')
# Sometimes, due to transports limitations we need to request
# private data using GET method. In such case it's very important
# to disallow any caching.
def verify_not_cached(self, r, origin=None):
self.assertEqual(r['Cache-Control'],
'no-store, no-cache, must-revalidate, max-age=0')
self.assertFalse(r['Expires'])
self.assertFalse(r['Last-Modified'])
@classmethod
def tearDownClass(cls):
"""
Wait five seconds for the current sessions to expire.
"""
time.sleep(5)
# Footnote
# ========
# Make this script runnable.
if __name__ == '__main__':
nose.main()
########NEW FILE########
__FILENAME__ = test_protocol
#!/usr/bin/env python
"""
[**SockJS-protocol**](https://github.com/sockjs/sockjs-protocol) is an
effort to define a protocol between in-browser
[SockJS-client](https://github.com/sockjs/sockjs-client) and its
server-side counterparts, like
[SockJS-node](https://github.com/sockjs/sockjs-client). This should
help others to write alternative server implementations.
This protocol definition is also a runnable test suite, do run it
against your server implementation. Supporting all the tests doesn't
guarantee that SockJS client will work flawlessly, end-to-end tests
using real browsers are always required.
"""
import os
import time
import json
import re
import unittest2 as unittest
from utils import GET, GET_async, POST, POST_async, OPTIONS
from utils import WebSocket8Client
import uuid
import nose
from nose.tools import timed
# Base URL
# ========
"""
The SockJS server provides one or more SockJS services. The services
are usually exposed with a simple url prefixes, like:
`http://localhost:8000/echo` or
`http://localhost:8000/broadcast`. We'll call this kind of url a
`base_url`. There is nothing wrong with base url being more complex,
like `http://localhost:8000/a/b/c/d/echo`. Base url should
never end with a slash.
Base url is the url that needs to be supplied to the SockJS client.
All paths under base url are controlled by SockJS server and are
defined by SockJS protocol.
SockJS protocol can be using either http or https.
To run this tests server pointed by `base_url` needs to support
following services:
- `echo` - responds with identical data as received
- `disabled_websocket_echo` - identical to `echo`, but with websockets disabled
- `close` - server immediately closes the session
This tests should not be run more often than once in five seconds -
many tests operate on the same (named) sessions and they need to have
enough time to timeout.
"""
test_top_url = os.environ.get('SOCKJS_URL', 'http://localhost:8081')
base_url = test_top_url + '/echo'
close_base_url = test_top_url + '/close'
wsoff_base_url = test_top_url + '/disabled_websocket_echo'
# Static URLs
# ===========
class Test(unittest.TestCase):
# We are going to test several `404/not found` pages. We don't
# define a body or a content type.
def verify404(self, r, cookie=False):
self.assertEqual(r.status, 404)
if cookie is False:
self.verify_no_cookie(r)
elif cookie is True:
self.verify_cookie(r)
# In some cases `405/method not allowed` is more appropriate.
def verify405(self, r):
self.assertEqual(r.status, 405)
self.assertFalse(r['content-type'])
self.assertTrue(r['allow'])
self.assertFalse(r.body)
# Multiple transport protocols need to support OPTIONS method. All
# responses to OPTIONS requests must be cacheable and contain
# appropriate headers.
def verify_options(self, url, allowed_methods):
for origin in [None, 'test']:
h = {}
if origin:
h['Origin'] = origin
r = OPTIONS(url, headers=h)
self.assertEqual(r.status, 204)
self.assertTrue(re.search('public', r['Cache-Control']))
self.assertTrue(re.search('max-age=[1-9][0-9]{6}', r['Cache-Control']),
"max-age must be large, one year (31536000) is best")
self.assertTrue(r['Expires'])
self.assertTrue(int(r['access-control-max-age']) > 1000000)
self.assertEqual(r['Access-Control-Allow-Methods'], allowed_methods)
self.assertFalse(r.body)
self.verify_cors(r, origin)
self.verify_cookie(r)
# All transports except WebSockets need sticky session support
# from the load balancer. Some load balancers enable that only
# when they see `JSESSIONID` cookie. For all the session urls we
# must set this cookie.
def verify_cookie(self, r):
self.assertEqual(r['Set-Cookie'].split(';')[0].strip(),
'JSESSIONID=dummy')
self.assertEqual(r['Set-Cookie'].split(';')[1].lower().strip(),
'path=/')
def verify_no_cookie(self, r):
self.assertFalse(r['Set-Cookie'])
# Most of the XHR/Ajax based transports do work CORS if proper
# headers are set.
def verify_cors(self, r, origin=None):
self.assertEqual(r['access-control-allow-origin'], origin or '*')
# In order to get cookies (`JSESSIONID` mostly) flying, we
# need to set `allow-credentials` header to true.
self.assertEqual(r['access-control-allow-credentials'], 'true')
# Sometimes, due to transports limitations we need to request
# private data using GET method. In such case it's very important
# to disallow any caching.
def verify_not_cached(self, r, origin=None):
self.assertEqual(r['Cache-Control'],
'no-store, no-cache, must-revalidate, max-age=0')
self.assertFalse(r['Expires'])
self.assertFalse(r['Last-Modified'])
@classmethod
def tearDownClass(cls):
"""
Wait five seconds for the current sessions to expire.
"""
time.sleep(5)
# Greeting url: `/`
# ----------------
class BaseUrlGreeting(Test):
# The most important part of the url scheme, is without doubt, the
# top url. Make sure the greeting is valid.
def test_greeting(self):
for url in [base_url, base_url + '/']:
r = GET(url)
self.assertEqual(r.status, 200)
self.assertEqual(r['content-type'], 'text/plain; charset=UTF-8')
self.assertEqual(r.body, 'Welcome to SockJS!\n')
self.verify_no_cookie(r)
# Other simple requests should return 404.
def test_notFound(self):
for suffix in ['/a', '/a.html', '//', '///', '/a/a', '/a/a/', '/a',
'/a/']:
self.verify404(GET(base_url + suffix))
# IFrame page: `/iframe*.html`
# ----------------------------
class IframePage(Test):
"""
Some transports don't support cross domain communication
(CORS). In order to support them we need to do a cross-domain
trick: on remote (server) domain we serve an simple html page,
that loads back SockJS client javascript and is able to
communicate with the server within the same domain.
"""
iframe_body = re.compile('''
^<!DOCTYPE html>
<html>
<head>
<meta http-equiv="X-UA-Compatible" content="IE=edge" />
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8" />
<script>
document.domain = document.domain;
_sockjs_onload = function\(\){SockJS.bootstrap_iframe\(\);};
</script>
<script src="(?P<sockjs_url>[^"]*)"></script>
</head>
<body>
<h2>Don't panic!</h2>
<p>This is a SockJS hidden iframe. It's used for cross domain magic.</p>
</body>
</html>$
'''.strip())
# SockJS server must provide this html page.
def test_simpleUrl(self):
self.verify(base_url + '/iframe.html')
# To properly utilize caching, the same content must be served
# for request which try to version the iframe. The server may want
# to give slightly different answer for every SockJS client
# revision.
def test_versionedUrl(self):
for suffix in ['/iframe-a.html', '/iframe-.html', '/iframe-0.1.2.html',
'/iframe-0.1.2abc-dirty.2144.html']:
self.verify(base_url + suffix)
# In some circumstances (`devel` set to true) client library
# wants to skip caching altogether. That is achieved by
# supplying a random query string.
def test_queriedUrl(self):
for suffix in ['/iframe-a.html?t=1234', '/iframe-0.1.2.html?t=123414',
'/iframe-0.1.2abc-dirty.2144.html?t=qweqweq123']:
self.verify(base_url + suffix)
# Malformed urls must give 404 answer.
def test_invalidUrl(self):
for suffix in ['/iframe.htm', '/iframe', '/IFRAME.HTML', '/IFRAME',
'/iframe.HTML', '/iframe.xml', '/iframe-/.html']:
r = GET(base_url + suffix)
self.verify404(r)
# The '/iframe.html' page and its variants must give `200/ok` and be
# served with 'text/html' content type.
def verify(self, url):
r = GET(url)
self.assertEqual(r.status, 200)
self.assertEqual(r['content-type'], 'text/html; charset=UTF-8')
# The iframe page must be strongly cacheable, supply
# Cache-Control, Expires and Etag headers and avoid
# Last-Modified header.
self.assertTrue(re.search('public', r['Cache-Control']))
self.assertTrue(re.search('max-age=[1-9][0-9]{6}', r['Cache-Control']),
"max-age must be large, one year (31536000) is best")
self.assertTrue(r['Expires'])
self.assertTrue(r['ETag'])
self.assertFalse(r['last-modified'])
# Body must be exactly as specified, with the exception of
# `sockjs_url`, which should be configurable.
match = self.iframe_body.match(r.body.strip())
self.assertTrue(match)
# `Sockjs_url` must be a valid url and should utilize caching.
sockjs_url = match.group('sockjs_url')
self.assertTrue(sockjs_url.startswith('/') or
sockjs_url.startswith('http'))
self.verify_no_cookie(r)
return r
# The iframe page must be strongly cacheable. ETag headers must
# not change too often. Server must support 'if-none-match'
# requests.
def test_cacheability(self):
r1 = GET(base_url + '/iframe.html')
r2 = GET(base_url + '/iframe.html')
self.assertEqual(r1['etag'], r2['etag'])
self.assertTrue(r1['etag']) # Let's make sure ETag isn't None.
r = GET(base_url + '/iframe.html', headers={'If-None-Match': r1['etag']})
self.assertEqual(r.status, 304)
self.assertFalse(r['content-type'])
self.assertFalse(r.body)
# Info test: `/info`
# ------------------
#
# Warning: this is a replacement of `/chunking_test` functionality
# from SockJS 0.1.
class InfoTest(Test):
# This url is called before the client starts the session. It's
# used to check server capabilities (websocket support, cookies
# requiremet) and to get the value of "origin" setting (currently
# not used).
#
# But more importantly, the call to this url is used to measure
# the roundtrip time between the client and the server. So, please,
# do respond to this url in a timely fashin.
def test_basic(self):
r = GET(base_url + '/info')
self.assertEqual(r.status, 200)
self.assertEqual(r['content-type'],
'application/json; charset=UTF-8')
self.verify_no_cookie(r)
self.verify_not_cached(r)
self.verify_cors(r)
data = json.loads(r.body)
# Are websockets enabled on the server?
self.assertEqual(data['websocket'], True)
# Do transports need to support cookies (ie: for load
# balancing purposes. Test server must have `cookie_needed`
# option enabled.
self.assertEqual(data['cookie_needed'], True)
# List of allowed origins. Currently ignored.
self.assertEqual(data['origins'], ['*:*'])
# Source of entropy for random number generator.
self.assertTrue(isinstance(data['entropy'], int))
# As browsers don't have a good entropy source, the server must
# help with tht. Info url must supply a good, unpredictable random
# number from the range 0..2^32 to feed the browser.
def test_entropy(self):
r1 = GET(base_url + '/info')
data1 = json.loads(r1.body)
r2 = GET(base_url + '/info')
data2 = json.loads(r2.body)
self.assertTrue(isinstance(data1['entropy'], int))
self.assertTrue(isinstance(data2['entropy'], int))
self.assertNotEqual(data1['entropy'], data2['entropy'])
# Info url must support CORS.
def test_options(self):
self.verify_options(base_url + '/info', 'OPTIONS, GET')
# The 'disabled_websocket_echo' service should have websockets
# disabled.
def test_disabled_websocket(self):
r = GET(wsoff_base_url + '/info')
self.assertEqual(r.status, 200)
data = json.loads(r.body)
self.assertEqual(data['websocket'], False)
# Session URLs
# ============
# Top session URL: `/<server>/<session>`
# --------------------------------------
#
# The session between the client and the server is always initialized
# by the client. The client chooses `server_id`, which should be a
# three digit number: 000 to 999. It can be supplied by user or
# randomly generated. The main reason for this parameter is to make it
# easier to configure load balancer - and enable sticky sessions based
# on first part of the url.
#
# Second parameter `session_id` must be a random string, unique for
# every session.
#
# It is undefined what happens when two clients share the same
# `session_id`. It is a client responsibility to choose identifier
# with enough entropy.
#
# Neither server nor client API's can expose `session_id` to the
# application. This field must be protected from the app.
class SessionURLs(Test):
# The server must accept any value in `server` and `session` fields.
def test_anyValue(self):
self.verify('/a/a')
for session_part in ['/_/_', '/1/1', '/abcdefgh_i-j%20/abcdefg_i-j%20']:
self.verify(session_part)
# To test session URLs we're going to use `xhr-polling` transport
# facilitites.
def verify(self, session_part):
r = POST(base_url + session_part + '/xhr')
self.assertEqual(r.status, 200)
self.assertEqual(r.body, 'o\n')
# But not an empty string, anything containing dots or paths with
# less or more parts.
def test_invalidPaths(self):
for suffix in ['//', '/a./a', '/a/a.', '/./.' ,'/', '///']:
self.verify404(GET(base_url + suffix + '/xhr'))
self.verify404(POST(base_url + suffix + '/xhr'))
# A session is identified by only `session_id`. `server_id` is a
# parameter for load balancer and must be ignored by the server.
def test_ignoringServerId(self):
session_id = str(uuid.uuid4())
r = POST(base_url + '/000/' + session_id + '/xhr')
self.assertEqual(r.status, 200)
self.assertEqual(r.body, 'o\n')
payload = '["a"]'
r = POST(base_url + '/000/' + session_id + '/xhr_send', body=payload)
self.assertEqual(r.status, 204)
self.assertFalse(r.body)
r = POST(base_url + '/999/' + session_id + '/xhr')
self.assertEqual(r.status, 200)
self.assertEqual(r.body, 'a["a"]\n')
# Protocol and framing
# --------------------
#
# SockJS tries to stay API-compatible with WebSockets, but not on the
# network layer. For technical reasons SockJS must introduce custom
# framing and simple custom protocol.
#
# ### Framing accepted by the client
#
# SockJS client accepts following frames:
#
# * `o` - Open frame. Every time a new session is established, the
# server must immediately send the open frame. This is required, as
# some protocols (mostly polling) can't distinguish between a
# properly established connection and a broken one - we must
# convince the client that it is indeed a valid url and it can be
# expecting further messages in the future on that url.
#
# * `h` - Heartbeat frame. Most loadbalancers have arbitrary timeouts
# on connections. In order to keep connections from breaking, the
# server must send a heartbeat frame every now and then. The typical
# delay is 25 seconds and should be configurable.
#
# * `a` - Array of json-encoded messages. For example: `a["message"]`.
#
# * `c` - Close frame. This frame is send to the browser every time
# the client asks for data on closed connection. This may happen
# multiple times. Close frame contains a code and a string explaining
# a reason of closure, like: `c[3000,"Go away!"]`.
#
# ### Framing accepted by the server
#
# SockJS server does not have any framing defined. All incoming data
# is treated as incoming messages, either single json-encoded messages
# or an array of json-encoded messages, depending on transport.
#
# ### Tests
#
# To explain the protocol we'll use `xhr-polling` transport
# facilities.
class Protocol(Test):
# When server receives a request with unknown `session_id` it must
# recognize that as request for a new session. When server opens a
# new sesion it must immediately send an frame containing a letter
# `o`.
def test_simpleSession(self):
trans_url = base_url + '/000/' + str(uuid.uuid4())
r = POST(trans_url + '/xhr')
"New line is a frame delimiter specific for xhr-polling"
self.assertEqual(r.status, 200)
self.assertEqual(r.body, 'o\n')
# After a session was established the server needs to accept
# requests for sending messages.
"Xhr-polling accepts messages as a list of JSON-encoded strings."
payload = '["a"]'
r = POST(trans_url + '/xhr_send', body=payload)
self.assertEqual(r.status, 204)
self.assertFalse(r.body)
'''We're using an echo service - we'll receive our message
back. The message is encoded as an array 'a'.'''
r = POST(trans_url + '/xhr')
self.assertEqual(r.status, 200)
self.assertEqual(r.body, 'a["a"]\n')
# Sending messages to not existing sessions is invalid.
payload = '["a"]'
r = POST(base_url + '/000/bad_session/xhr_send', body=payload)
self.verify404(r, cookie=True)
# The session must time out after 5 seconds of not having a
# receiving connection. The server must send a heartbeat frame
# every 25 seconds. The heartbeat frame contains a single `h`
# character. This delay may be configurable.
pass
# The server must not allow two receiving connections to wait
# on a single session. In such case the server must send a
# close frame to the new connection.
r1 = POST_async(trans_url + '/xhr', load=False)
r2 = POST(trans_url + '/xhr')
r1.close()
self.assertEqual(r2.body, 'c[2010,"Another connection still open"]\n')
self.assertEqual(r2.status, 200)
# The server may terminate the connection, passing error code and
# message.
def test_closeSession(self):
trans_url = close_base_url + '/000/' + str(uuid.uuid4())
r = POST(trans_url + '/xhr')
self.assertEqual(r.status, 200)
self.assertEqual(r.body, 'o\n')
r = POST(trans_url + '/xhr')
self.assertEqual(r.status, 200)
self.assertEqual(r.body, 'c[3000,"Go away!"]\n')
# Until the timeout occurs, the server must constantly serve
# the close message.
r = POST(trans_url + '/xhr')
self.assertEqual(r.status, 200)
self.assertEqual(r.body, 'c[3000,"Go away!"]\n')
# WebSocket protocols: `/*/*/websocket`
# -------------------------------------
import websocket
websocket.setdefaulttimeout(5)
# The most important feature of SockJS is to support native WebSocket
# protocol. A decent SockJS server should support at least the
# following variants:
#
# - hixie-75 (Chrome 4, Safari 5.0.0)
# - hixie-76/hybi-00 (Chrome 6, Safari 5.0.1)
# - hybi-07 (Firefox 6)
# - hybi-10 (Firefox 7, Chrome 14)
#
class WebsocketHttpErrors(Test):
# Normal requests to websocket should not succeed.
def test_httpMethod(self):
r = GET(base_url + '/0/0/websocket')
self.assertEqual(r.status, 400)
self.assertTrue('Can "Upgrade" only to "WebSocket".' in r.body)
# Server should be able to reject connections if origin is
# invalid.
def test_verifyOrigin(self):
#r = GET(base_url + '/0/0/websocket', {'Upgrade': 'WebSocket',
# 'Origin': 'VeryWrongOrigin'})
#self.assertEqual(r.status, 400)
#self.assertEqual(r.body, 'Unverified origin.')
pass
# Some proxies and load balancers can rewrite 'Connection' header,
# in such case we must refuse connection.
def test_invalidConnectionHeader(self):
r = GET(base_url + '/0/0/websocket', headers={'Upgrade': 'WebSocket',
'Connection': 'close'})
self.assertEqual(r.status, 400)
self.assertTrue('"Connection" must be "Upgrade".', r.body)
# WebSocket should only accept GET
def test_invalidMethod(self):
for h in [{'Upgrade': 'WebSocket', 'Connection': 'Upgrade'},
{}]:
r = POST(base_url + '/0/0/websocket', headers=h)
self.verify405(r)
# Support WebSocket Hixie-76 protocol
class WebsocketHixie76(Test):
def test_transport(self):
ws_url = 'ws:' + base_url.split(':',1)[1] + \
'/000/' + str(uuid.uuid4()) + '/websocket'
ws = websocket.create_connection(ws_url)
self.assertEqual(ws.recv(), u'o')
ws.send(u'["a"]')
self.assertEqual(ws.recv(), u'a["a"]')
ws.close()
def test_close(self):
ws_url = 'ws:' + close_base_url.split(':',1)[1] + \
'/000/' + str(uuid.uuid4()) + '/websocket'
ws = websocket.create_connection(ws_url)
self.assertEqual(ws.recv(), u'o')
self.assertEqual(ws.recv(), u'c[3000,"Go away!"]')
# The connection should be closed after the close frame.
with self.assertRaises(websocket.ConnectionClosedException):
ws.recv()
ws.close()
# Empty frames must be ignored by the server side.
def test_empty_frame(self):
ws_url = 'ws:' + base_url.split(':',1)[1] + \
'/000/' + str(uuid.uuid4()) + '/websocket'
ws = websocket.create_connection(ws_url)
self.assertEqual(ws.recv(), u'o')
# Server must ignore empty messages.
ws.send(u'')
ws.send(u'"a"')
self.assertEqual(ws.recv(), u'a["a"]')
ws.close()
# For WebSockets, as opposed to other transports, it is valid to
# reuse `session_id`. The lifetime of SockJS WebSocket session is
# defined by a lifetime of underlying WebSocket connection. It is
# correct to have two separate sessions sharing the same
# `session_id` at the same time.
def test_reuseSessionId(self):
on_close = lambda(ws): self.assertFalse(True)
ws_url = 'ws:' + base_url.split(':',1)[1] + \
'/000/' + str(uuid.uuid4()) + '/websocket'
ws1 = websocket.create_connection(ws_url, on_close=on_close)
self.assertEqual(ws1.recv(), u'o')
ws2 = websocket.create_connection(ws_url, on_close=on_close)
self.assertEqual(ws2.recv(), u'o')
ws1.send(u'"a"')
self.assertEqual(ws1.recv(), u'a["a"]')
ws2.send(u'"b"')
self.assertEqual(ws2.recv(), u'a["b"]')
ws1.close()
ws2.close()
# It is correct to reuse the same `session_id` after closing a
# previous connection.
ws1 = websocket.create_connection(ws_url)
self.assertEqual(ws1.recv(), u'o')
ws1.send(u'"a"')
self.assertEqual(ws1.recv(), u'a["a"]')
ws1.close()
# Verify WebSocket headers sanity. Due to HAProxy design the
# websocket server must support writing response headers *before*
# receiving -76 nonce. In other words, the websocket code must
# work like that:
#
# * Receive request headers.
# * Write response headers.
# * Receive request nonce.
# * Write response nonce.
def test_headersSanity(self):
url = base_url.split(':',1)[1] + \
'/000/' + str(uuid.uuid4()) + '/websocket'
ws_url = 'ws:' + url
http_url = 'http:' + url
origin = '/'.join(http_url.split('/')[:3])
h = {'Upgrade': 'WebSocket',
'Connection': 'Upgrade',
'Origin': origin,
'Sec-WebSocket-Key1': '4 @1 46546xW%0l 1 5',
'Sec-WebSocket-Key2': '12998 5 Y3 1 .P00'
}
r = GET_async(http_url, headers=h)
self.assertEqual(r.status, 101)
self.assertEqual(r['sec-websocket-location'], ws_url)
self.assertEqual(r['connection'].lower(), 'upgrade')
self.assertEqual(r['upgrade'].lower(), 'websocket')
self.assertEqual(r['sec-websocket-origin'], origin)
self.assertFalse(r['content-length'])
r.close()
# When user sends broken data - broken JSON for example, the
# server must terminate the ws connection.
@timed(1)
def test_broken_json(self):
ws_url = 'ws:' + base_url.split(':',1)[1] + \
'/000/' + str(uuid.uuid4()) + '/websocket'
ws = websocket.create_connection(ws_url)
self.assertEqual(ws.recv(), u'o')
ws.send(u'"a')
with self.assertRaises(websocket.ConnectionClosedException):
ws.recv()
ws.close()
# The server must support Hybi-10 protocol
class WebsocketHybi10(Test):
def test_transport(self):
trans_url = base_url + '/000/' + str(uuid.uuid4()) + '/websocket'
ws = WebSocket8Client(trans_url)
self.assertEqual(ws.recv(), 'o')
# Server must ignore empty messages.
ws.send(u'')
ws.send(u'"a"')
self.assertEqual(ws.recv(), 'a["a"]')
ws.close()
def test_close(self):
trans_url = close_base_url + '/000/' + str(uuid.uuid4()) + '/websocket'
ws = WebSocket8Client(trans_url)
self.assertEqual(ws.recv(), u'o')
self.assertEqual(ws.recv(), u'c[3000,"Go away!"]')
with self.assertRaises(ws.ConnectionClosedException):
ws.recv()
ws.close()
# Verify WebSocket headers sanity. Server must support both
# Hybi-07 and Hybi-10.
def test_headersSanity(self):
for version in ['7', '8', '13']:
url = base_url.split(':',1)[1] + \
'/000/' + str(uuid.uuid4()) + '/websocket'
ws_url = 'ws:' + url
http_url = 'http:' + url
origin = '/'.join(http_url.split('/')[:3])
h = {'Upgrade': 'websocket',
'Connection': 'Upgrade',
'Sec-WebSocket-Version': version,
'Sec-WebSocket-Origin': 'http://asd',
'Sec-WebSocket-Key': 'x3JJHMbDL1EzLkh9GBhXDw==',
}
r = GET_async(http_url, headers=h)
self.assertEqual(r.status, 101)
self.assertEqual(r['sec-websocket-accept'], 'HSmrc0sMlYUkAGmm5OPpG2HaGWk=')
self.assertEqual(r['connection'].lower(), 'upgrade')
self.assertEqual(r['upgrade'].lower(), 'websocket')
self.assertFalse(r['content-length'])
r.close()
# When user sends broken data - broken JSON for example, the
# server must terminate the ws connection.
def test_broken_json(self):
ws_url = 'ws:' + base_url.split(':',1)[1] + \
'/000/' + str(uuid.uuid4()) + '/websocket'
ws = WebSocket8Client(ws_url)
self.assertEqual(ws.recv(), u'o')
ws.send(u'"a')
with self.assertRaises(ws.ConnectionClosedException):
ws.recv()
ws.close()
# As a fun part, Firefox 6.0.2 supports Websockets protocol '7'. But,
# it doesn't send a normal 'Connection: Upgrade' header. Instead it
# sends: 'Connection: keep-alive, Upgrade'. Brilliant.
def test_firefox_602_connection_header(self):
url = base_url.split(':',1)[1] + \
'/000/' + str(uuid.uuid4()) + '/websocket'
ws_url = 'ws:' + url
http_url = 'http:' + url
origin = '/'.join(http_url.split('/')[:3])
h = {'Upgrade': 'websocket',
'Connection': 'keep-alive, Upgrade',
'Sec-WebSocket-Version': '7',
'Sec-WebSocket-Origin': 'http://asd',
'Sec-WebSocket-Key': 'x3JJHMbDL1EzLkh9GBhXDw==',
}
r = GET_async(http_url, headers=h)
self.assertEqual(r.status, 101)
# XhrPolling: `/*/*/xhr`, `/*/*/xhr_send`
# ---------------------------------------
#
# The server must support xhr-polling.
class XhrPolling(Test):
# The transport must support CORS requests, and answer correctly
# to OPTIONS requests.
def test_options(self):
for suffix in ['/xhr', '/xhr_send']:
self.verify_options(base_url + '/abc/abc' + suffix,
'OPTIONS, POST')
# Test the transport itself.
def test_transport(self):
url = base_url + '/000/' + str(uuid.uuid4())
r = POST(url + '/xhr')
self.assertEqual(r.status, 200)
self.assertEqual(r.body, 'o\n')
self.assertEqual(r['content-type'],
'application/javascript; charset=UTF-8')
self.verify_cookie(r)
self.verify_cors(r)
# Xhr transports receive json-encoded array of messages.
r = POST(url + '/xhr_send', body='["x"]')
self.assertEqual(r.status, 204)
self.assertFalse(r.body)
# The content type of `xhr_send` must be set to `text/plain`,
# even though the response code is `204`. This is due to
# Firefox/Firebug behaviour - it assumes that the content type
# is xml and shouts about it.
self.assertEqual(r['content-type'], 'text/plain; charset=UTF-8')
self.verify_cookie(r)
self.verify_cors(r)
r = POST(url + '/xhr')
self.assertEqual(r.status, 200)
self.assertEqual(r.body, 'a["x"]\n')
# Publishing messages to a non-existing session must result in
# a 404 error.
def test_invalid_session(self):
url = base_url + '/000/' + str(uuid.uuid4())
r = POST(url + '/xhr_send', body='["x"]')
self.verify404(r, cookie=None)
# The server must behave when invalid json data is send or when no
# json data is sent at all.
def test_invalid_json(self):
url = base_url + '/000/' + str(uuid.uuid4())
r = POST(url + '/xhr')
self.assertEqual(r.status, 200)
self.assertEqual(r.body, 'o\n')
r = POST(url + '/xhr_send', body='["x')
self.assertEqual(r.status, 500)
self.assertTrue("Broken JSON encoding." in r.body)
r = POST(url + '/xhr_send', body='')
self.assertEqual(r.status, 500)
self.assertTrue("Payload expected." in r.body)
r = POST(url + '/xhr_send', body='["a"]')
self.assertFalse(r.body)
self.assertEqual(r.status, 204)
r = POST(url + '/xhr')
self.assertEqual(r.body, 'a["a"]\n')
self.assertEqual(r.status, 200)
# The server must accept messages send with different content
# types.
def test_content_types(self):
url = base_url + '/000/' + str(uuid.uuid4())
r = POST(url + '/xhr')
self.assertEqual(r.body, 'o\n')
ctypes = ['text/plain', 'T', 'application/json', 'application/xml', '',
'application/json; charset=utf-8', 'text/xml; charset=utf-8',
'text/xml']
for ct in ctypes:
r = POST(url + '/xhr_send', body='["a"]', headers={'Content-Type': ct})
self.assertEqual(r.status, 204)
self.assertFalse(r.body)
r = POST(url + '/xhr')
self.assertEqual(r.status, 200)
self.assertEqual(r.body, 'a[' + (',').join(['"a"']*len(ctypes)) +']\n')
# JSESSIONID cookie must be set by default.
def test_jsessionid(self):
url = base_url + '/000/' + str(uuid.uuid4())
r = POST(url + '/xhr')
self.assertEqual(r.status, 200)
self.assertEqual(r.body, 'o\n')
self.verify_cookie(r)
# And must be echoed back if it's already set.
url = base_url + '/000/' + str(uuid.uuid4())
r = POST(url + '/xhr', headers={'Cookie': 'JSESSIONID=abcdef'})
self.assertEqual(r.status, 200)
self.assertEqual(r.body, 'o\n')
self.assertEqual(r['Set-Cookie'].split(';')[0].strip(),
'JSESSIONID=abcdef')
self.assertEqual(r['Set-Cookie'].split(';')[1].lower().strip(),
'path=/')
# XhrStreaming: `/*/*/xhr_streaming`
# ----------------------------------
class XhrStreaming(Test):
def test_options(self):
self.verify_options(base_url + '/abc/abc/xhr_streaming',
'OPTIONS, POST')
def test_transport(self):
url = base_url + '/000/' + str(uuid.uuid4())
r = POST_async(url + '/xhr_streaming')
self.assertEqual(r.status, 200)
self.assertEqual(r['Content-Type'],
'application/javascript; charset=UTF-8')
self.verify_cookie(r)
self.verify_cors(r)
# The transport must first send 2KiB of `h` bytes as prelude.
self.assertEqual(r.read(), 'h' * 2048 + '\n')
self.assertEqual(r.read(), 'o\n')
r1 = POST(url + '/xhr_send', body='["x"]')
self.assertEqual(r1.status, 204)
self.assertFalse(r1.body)
self.assertEqual(r.read(), 'a["x"]\n')
r.close()
def test_response_limit(self):
# Single streaming request will buffer all data until
# closed. In order to remove (garbage collect) old messages
# from the browser memory we should close the connection every
# now and then. By default we should close a streaming request
# every 128KiB messages was send. The test server should have
# this limit decreased to 4096B.
url = base_url + '/000/' + str(uuid.uuid4())
r = POST_async(url + '/xhr_streaming')
self.assertEqual(r.status, 200)
self.assertTrue(r.read()) # prelude
self.assertEqual(r.read(), 'o\n')
# Test server should gc streaming session after 4096 bytes
# were sent (including framing).
msg = '"' + ('x' * 128) + '"'
for i in range(31):
r1 = POST(url + '/xhr_send', body='[' + msg + ']')
self.assertEqual(r1.status, 204)
self.assertEqual(r.read(), 'a[' + msg + ']\n')
# The connection should be closed after enough data was
# delivered.
self.assertFalse(r.read())
# EventSource: `/*/*/eventsource`
# -------------------------------
#
# For details of this protocol framing read the spec:
#
# * [http://dev.w3.org/html5/eventsource/](http://dev.w3.org/html5/eventsource/)
#
# Beware leading spaces.
class EventSource(Test):
def test_transport(self):
url = base_url + '/000/' + str(uuid.uuid4())
r = GET_async(url + '/eventsource')
self.assertEqual(r.status, 200)
self.assertEqual(r['Content-Type'],
'text/event-stream; charset=UTF-8')
# As EventSource is requested using GET we must be very
# carefull not to allow it being cached.
self.verify_not_cached(r)
self.verify_cookie(r)
# The transport must first send a new line prelude, due to a
# bug in Opera.
self.assertEqual(r.read(), '\r\n')
self.assertEqual(r.read(), 'data: o\r\n\r\n')
r1 = POST(url + '/xhr_send', body='["x"]')
self.assertFalse(r1.body)
self.assertEqual(r1.status, 204)
self.assertEqual(r.read(), 'data: a["x"]\r\n\r\n')
# This protocol doesn't allow binary data and we need to
# specially treat leading space, new lines and things like
# \x00. But, now the protocol json-encodes everything, so
# there is no way to trigger this case.
r1 = POST(url + '/xhr_send', body=r'[" \u0000\n\r "]')
self.assertFalse(r1.body)
self.assertEqual(r1.status, 204)
self.assertEqual(r.read(),
'data: a[" \\u0000\\n\\r "]\r\n\r\n')
r.close()
def test_response_limit(self):
# Single streaming request should be closed after enough data
# was delivered (by default 128KiB, but 4KiB for test server).
# Although EventSource transport is better, and in theory may
# not need this mechanism, there are some bugs in the browsers
# that actually prevent the automatic GC.
url = base_url + '/000/' + str(uuid.uuid4())
r = GET_async(url + '/eventsource')
self.assertEqual(r.status, 200)
self.assertTrue(r.read()) # prelude
self.assertEqual(r.read(), 'data: o\r\n\r\n')
# Test server should gc streaming session after 4096 bytes
# were sent (including framing).
msg = '"' + ('x' * 4096) + '"'
r1 = POST(url + '/xhr_send', body='[' + msg + ']')
self.assertEqual(r1.status, 204)
self.assertEqual(r.read(), 'data: a[' + msg + ']\r\n\r\n')
# The connection should be closed after enough data was
# delivered.
self.assertFalse(r.read())
# HtmlFile: `/*/*/htmlfile`
# -------------------------
#
# Htmlfile transport is based on research done by Michael Carter. It
# requires a famous `document.domain` trick. Read on:
#
# * [http://stackoverflow.com/questions/1481251/what-does-document-domain-document-domain-do](http://stackoverflow.com/questions/1481251/what-does-document-domain-document-domain-do)
# * [http://cometdaily.com/2007/11/18/ie-activexhtmlfile-transport-part-ii/](http://cometdaily.com/2007/11/18/ie-activexhtmlfile-transport-part-ii/)
#
class HtmlFile(Test):
head = r'''
<!doctype html>
<html><head>
<meta http-equiv="X-UA-Compatible" content="IE=edge" />
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8" />
</head><body><h2>Don't panic!</h2>
<script>
document.domain = document.domain;
var c = parent.%s;
c.start();
function p(d) {c.message(d);};
window.onload = function() {c.stop();};
</script>
'''.strip()
def test_transport(self):
url = base_url + '/000/' + str(uuid.uuid4())
r = GET_async(url + '/htmlfile?c=%63allback')
self.assertEqual(r.status, 200)
self.assertEqual(r['Content-Type'],
'text/html; charset=UTF-8')
# As HtmlFile is requested using GET we must be very careful
# not to allow it being cached.
self.verify_not_cached(r)
self.verify_cookie(r)
d = r.read()
self.assertEqual(d.strip(), self.head % ('callback',))
self.assertGreater(len(d), 1024)
self.assertEqual(r.read(),
'<script>\np("o");\n</script>\r\n')
r1 = POST(url + '/xhr_send', body='["x"]')
self.assertFalse(r1.body)
self.assertEqual(r1.status, 204)
self.assertEqual(r.read(),
'<script>\np("a[\\"x\\"]");\n</script>\r\n')
r.close()
def test_no_callback(self):
r = GET(base_url + '/a/a/htmlfile')
self.assertEqual(r.status, 500)
self.assertTrue('"callback" parameter required' in r.body)
def test_response_limit(self):
# Single streaming request should be closed after enough data
# was delivered (by default 128KiB, but 4KiB for test server).
url = base_url + '/000/' + str(uuid.uuid4())
r = GET_async(url + '/htmlfile?c=callback')
self.assertEqual(r.status, 200)
self.assertTrue(r.read()) # prelude
self.assertEqual(r.read(),
'<script>\np("o");\n</script>\r\n')
# Test server should gc streaming session after 4096 bytes
# were sent (including framing).
msg = ('x' * 4096)
r1 = POST(url + '/xhr_send', body='["' + msg + '"]')
self.assertEqual(r1.status, 204)
self.assertEqual(r.read(),
'<script>\np("a[\\"' + msg + '\\"]");\n</script>\r\n')
# The connection should be closed after enough data was
# delivered.
self.assertFalse(r.read())
# JsonpPolling: `/*/*/jsonp`, `/*/*/jsonp_send`
# ---------------------------------------------
class JsonPolling(Test):
def test_transport(self):
url = base_url + '/000/' + str(uuid.uuid4())
r = GET(url + '/jsonp?c=%63allback')
self.assertEqual(r.status, 200)
self.assertEqual(r['Content-Type'],
'application/javascript; charset=UTF-8')
# As JsonPolling is requested using GET we must be very
# carefull not to allow it being cached.
self.verify_not_cached(r)
self.verify_cookie(r)
self.assertEqual(r.body, 'callback("o");\r\n')
r = POST(url + '/jsonp_send', body='d=%5B%22x%22%5D',
headers={'Content-Type': 'application/x-www-form-urlencoded'})
# Konqueror does weird things on 204. As a workaround we need
# to respond with something - let it be the string `ok`.
self.assertEqual(r.body, 'ok')
self.assertEqual(r.status, 200)
self.assertEqual(r['Content-Type'], 'text/plain; charset=UTF-8')
self.verify_cookie(r)
r = GET(url + '/jsonp?c=%63allback')
self.assertEqual(r.status, 200)
self.assertEqual(r.body, 'callback("a[\\"x\\"]");\r\n')
def test_no_callback(self):
r = GET(base_url + '/a/a/jsonp')
self.assertEqual(r.status, 500)
self.assertTrue('"callback" parameter required' in r.body)
# The server must behave when invalid json data is send or when no
# json data is sent at all.
def test_invalid_json(self):
url = base_url + '/000/' + str(uuid.uuid4())
r = GET(url + '/jsonp?c=x')
self.assertEqual(r.body, 'x("o");\r\n')
r = POST(url + '/jsonp_send', body='d=%5B%22x',
headers={'Content-Type': 'application/x-www-form-urlencoded'})
self.assertEqual(r.status, 500)
self.assertTrue("Broken JSON encoding." in r.body)
for data in ['', 'd=', 'p=p']:
r = POST(url + '/jsonp_send', body=data,
headers={'Content-Type': 'application/x-www-form-urlencoded'})
self.assertEqual(r.status, 500)
self.assertTrue("Payload expected." in r.body)
r = POST(url + '/jsonp_send', body='d=%5B%22b%22%5D',
headers={'Content-Type': 'application/x-www-form-urlencoded'})
self.assertEqual(r.body, 'ok')
r = GET(url + '/jsonp?c=x')
self.assertEqual(r.status, 200)
self.assertEqual(r.body, 'x("a[\\"b\\"]");\r\n')
# The server must accept messages sent with different content
# types.
def test_content_types(self):
url = base_url + '/000/' + str(uuid.uuid4())
r = GET(url + '/jsonp?c=x')
self.assertEqual(r.body, 'x("o");\r\n')
r = POST(url + '/jsonp_send', body='d=%5B%22abc%22%5D',
headers={'Content-Type': 'application/x-www-form-urlencoded'})
self.assertEqual(r.body, 'ok')
r = POST(url + '/jsonp_send', body='["%61bc"]',
headers={'Content-Type': 'text/plain'})
self.assertEqual(r.body, 'ok')
r = GET(url + '/jsonp?c=x')
self.assertEqual(r.status, 200)
self.assertEqual(r.body, 'x("a[\\"abc\\",\\"%61bc\\"]");\r\n')
# Raw WebSocket url: `/websocket`
# -------------------------------
#
# SockJS protocol defines a bit of higher level framing. This is okay
# when the browser using SockJS-client establishes the connection, but
# it's not really appropriate when the connection is being esablished
# from another program. Although SockJS focuses on server-browser
# communication, it should be straightforward to connect to SockJS
# from command line or some any programming language.
#
# In order to make writing command-line clients easier, we define this
# `/websocket` entry point. This entry point is special and doesn't
# use any additional custom framing, no open frame, no
# heartbeats. Only raw WebSocket protocol.
class RawWebsocket(Test):
def test_transport(self):
ws = WebSocket8Client(base_url + '/websocket')
ws.send(u'Hello world!\uffff')
self.assertEqual(ws.recv(), u'Hello world!\uffff')
ws.close()
def test_close(self):
ws = WebSocket8Client(close_base_url + '/websocket')
with self.assertRaises(ws.ConnectionClosedException):
ws.recv()
ws.close()
# JSON Unicode Encoding
# =====================
#
# SockJS takes the responsibility of encoding Unicode strings for the
# user. The idea is that SockJS should properly deliver any valid
# string from the browser to the server and back. This is actually
# quite hard, as browsers do some magical character
# translations. Additionally there are some valid characters from
# JavaScript point of view that are not valid Unicode, called
# surrogates (JavaScript uses UCS-2, which is not really Unicode).
#
# Dealing with unicode surrogates (0xD800-0xDFFF) is quite special. If
# possible we should make sure that server does escape decode
# them. This makes sense for SockJS servers that support UCS-2
# (SockJS-node), but can't really work for servers supporting unicode
# properly (Python).
#
# The browser must escape quite a list of chars, this is due to
# browser mangling outgoing chars on transports like XHR.
escapable_by_client = re.compile(u"[\\\"\x00-\x1f\x7f-\x9f\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u2000-\u20ff\ufeff\ufff0-\uffff\x00-\x1f\ufffe\uffff\u0300-\u0333\u033d-\u0346\u034a-\u034c\u0350-\u0352\u0357-\u0358\u035c-\u0362\u0374\u037e\u0387\u0591-\u05af\u05c4\u0610-\u0617\u0653-\u0654\u0657-\u065b\u065d-\u065e\u06df-\u06e2\u06eb-\u06ec\u0730\u0732-\u0733\u0735-\u0736\u073a\u073d\u073f-\u0741\u0743\u0745\u0747\u07eb-\u07f1\u0951\u0958-\u095f\u09dc-\u09dd\u09df\u0a33\u0a36\u0a59-\u0a5b\u0a5e\u0b5c-\u0b5d\u0e38-\u0e39\u0f43\u0f4d\u0f52\u0f57\u0f5c\u0f69\u0f72-\u0f76\u0f78\u0f80-\u0f83\u0f93\u0f9d\u0fa2\u0fa7\u0fac\u0fb9\u1939-\u193a\u1a17\u1b6b\u1cda-\u1cdb\u1dc0-\u1dcf\u1dfc\u1dfe\u1f71\u1f73\u1f75\u1f77\u1f79\u1f7b\u1f7d\u1fbb\u1fbe\u1fc9\u1fcb\u1fd3\u1fdb\u1fe3\u1feb\u1fee-\u1fef\u1ff9\u1ffb\u1ffd\u2000-\u2001\u20d0-\u20d1\u20d4-\u20d7\u20e7-\u20e9\u2126\u212a-\u212b\u2329-\u232a\u2adc\u302b-\u302c\uaab2-\uaab3\uf900-\ufa0d\ufa10\ufa12\ufa15-\ufa1e\ufa20\ufa22\ufa25-\ufa26\ufa2a-\ufa2d\ufa30-\ufa6d\ufa70-\ufad9\ufb1d\ufb1f\ufb2a-\ufb36\ufb38-\ufb3c\ufb3e\ufb40-\ufb41\ufb43-\ufb44\ufb46-\ufb4e]")
#
# The server is able to send much more chars verbatim. But, it can't
# send Unicode surrogates over Websockets, also various \u2xxxx chars
# get mangled. Additionally, if the server is capable of handling
# UCS-2 (ie: 16 bit character size), it should be able to deal with
# Unicode surrogates 0xD800-0xDFFF:
# http://en.wikipedia.org/wiki/Mapping_of_Unicode_characters#Surrogates
escapable_by_server = re.compile(u"[\x00-\x1f\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufff0-\uffff]")
client_killer_string_esc = '"' + ''.join([
r'\u%04x' % (i) for i in range(65536)
if escapable_by_client.match(unichr(i))]) + '"'
server_killer_string_esc = '"' + ''.join([
r'\u%04x'% (i) for i in range(255, 65536)
if escapable_by_server.match(unichr(i))]) + '"'
class JSONEncoding(Test):
def test_xhr_server_encodes(self):
# Make sure that server encodes at least all the characters
# it's supposed to encode.
trans_url = base_url + '/000/' + str(uuid.uuid4())
r = POST(trans_url + '/xhr')
self.assertEqual(r.body, 'o\n')
self.assertEqual(r.status, 200)
payload = '["' + json.loads(server_killer_string_esc) + '"]'
r = POST(trans_url + '/xhr_send', body=payload)
self.assertEqual(r.status, 204)
r = POST(trans_url + '/xhr')
self.assertEqual(r.status, 200)
# skip framing, quotes and parenthesis
recv = r.body.strip()[2:-1]
# Received string is indeed what we send previously, aka - escaped.
self.assertEqual(recv, server_killer_string_esc)
def test_xhr_server_decodes(self):
# Make sure that server decodes the chars we're customly
# encoding.
trans_url = base_url + '/000/' + str(uuid.uuid4())
r = POST(trans_url + '/xhr')
self.assertEqual(r.body, 'o\n')
self.assertEqual(r.status, 200)
payload = '[' + client_killer_string_esc + ']' # Sending escaped
r = POST(trans_url + '/xhr_send', body=payload)
self.assertEqual(r.status, 204)
r = POST(trans_url + '/xhr')
self.assertEqual(r.status, 200)
# skip framing, quotes and parenthesis
recv = r.body.strip()[2:-1]
# Received string is indeed what we send previously. We don't
# really need to know what exactly got escaped and what not.
a = json.loads(recv)
b = json.loads(client_killer_string_esc)
self.assertEqual(a, b)
# Handling close
# ==============
#
# Dealing with session closure is quite complicated part of the
# protocol. The exact details here don't matter that much to the
# client side, but it's good to have a common behaviour on the server
# side.
#
# This is less about defining the protocol and more about sanity
# checking implementations.
class HandlingClose(Test):
# When server is closing session, it should unlink current
# request. That means, if a new request appears, it should receive
# an application close message rather than "Another connection
# still open" message.
def test_close_frame(self):
url = close_base_url + '/000/' + str(uuid.uuid4())
r1 = POST_async(url + '/xhr_streaming')
r1.read() # prelude
self.assertEqual(r1.read(), 'o\n')
self.assertEqual(r1.read(), 'c[3000,"Go away!"]\n')
r2 = POST_async(url + '/xhr_streaming')
r2.read() # prelude
self.assertEqual(r2.read(), 'c[3000,"Go away!"]\n')
# HTTP streaming requests should be automatically closed after
# close.
self.assertEqual(r1.read(), None)
self.assertEqual(r2.read(), None)
def test_close_request(self):
url = base_url + '/000/' + str(uuid.uuid4())
r1 = POST_async(url + '/xhr_streaming')
r1.read() # prelude
self.assertEqual(r1.read(), 'o\n')
r2 = POST_async(url + '/xhr_streaming')
r2.read() # prelude
self.assertEqual(r2.read(), 'c[2010,"Another connection still open"]\n')
# HTTP streaming requests should be automatically closed after
# getting the close frame.
self.assertEqual(r2.read(), None)
# When a polling request is closed by a network error - not by
# server, the session should be automatically closed. When there
# is a network error - we're in an undefined state. Some messages
# may have been lost, there is not much we can do about it.
def test_abort_xhr_streaming(self):
url = base_url + '/000/' + str(uuid.uuid4())
r1 = POST_async(url + '/xhr_streaming')
r1.read() # prelude
self.assertEqual(r1.read(), 'o\n')
# Can't do second polling request now.
r2 = POST_async(url + '/xhr_streaming')
r2.read() # prelude
self.assertEqual(r2.read(), 'c[2010,"Another connection still open"]\n')
self.assertEqual(r2.read(), None)
r1.close()
# Polling request now, after we aborted previous one, should
# trigger a connection closure. Implementations may close
# the session and forget the state related. Alternatively
# they may return a 1002 close message.
r3 = POST_async(url + '/xhr_streaming')
r3.read() # prelude
self.assertTrue(r3.read() in ['o\n', 'c[1002,"Connection interrupted"]\n'])
r3.close()
# The same for polling transports
def test_abort_xhr_polling(self):
url = base_url + '/000/' + str(uuid.uuid4())
r1 = POST(url + '/xhr')
self.assertEqual(r1.body, 'o\n')
r1 = POST_async(url + '/xhr', load=False)
# Can't do second polling request now.
r2 = POST(url + '/xhr')
self.assertEqual(r2.body, 'c[2010,"Another connection still open"]\n')
r1.close()
# Polling request now, after we aborted previous one, should
# trigger a connection closure. Implementations may close
# the session and forget the state related. Alternatively
# they may return a 1002 close message.
r3 = POST(url + '/xhr')
self.assertTrue(r3.body in ['o\n', 'c[1002,"Connection interrupted"]\n'])
# Footnote
# ========
# Make this script runnable.
if __name__ == '__main__':
nose.main()
########NEW FILE########
__FILENAME__ = utils
import urlparse
import httplib_fork as httplib
from ws4py.client.threadedclient import WebSocketClient
import Queue
import logging
class HttpResponse:
def __init__(self, method, url,
headers={}, body=None, async=False, load=True):
headers = headers.copy()
u = urlparse.urlparse(url)
kwargs = {'timeout': None if async else 1.0}
if u.scheme == 'http':
conn = httplib.HTTPConnection(u.netloc, **kwargs)
elif u.scheme == 'https':
conn = httplib.HTTPSConnection(u.netloc, **kwargs)
else:
assert False, "Unsupported scheme " + u.scheme
assert u.fragment == ''
path = u.path + ('?' + u.query if u.query else '')
self.conn = conn
if not body:
if method is 'POST':
# The spec says: "Applications SHOULD use this field
# to indicate the transfer-length of the message-body,
# unless this is prohibited by the rules in section
# 4.4."
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.13
# While httplib sets it only if there is body.
headers['Content-Length'] = 0
conn.request(method, path, headers=headers)
else:
if isinstance(body, unicode):
body = body.encode('utf-8')
conn.request(method, path, headers=headers, body=body)
if load:
if not async:
self._load()
else:
self._async_load()
@property
def status(self):
if self.res.status == 500 and hasattr(self, 'body'):
logging.error(self.body)
return self.res.status
def __getitem__(self, key):
return self.headers.get(key.lower())
def _load(self):
self.res = self.conn.getresponse()
self.headers = dict( (k.lower(), v) for k, v in self.res.getheaders() )
self.body = self.res.read()
self.close()
def close(self):
if self.conn:
self.conn.close()
self.conn = None
def _async_load(self):
self.res = self.conn.getresponse()
self.headers = dict( (k.lower(), v) for k, v in self.res.getheaders() )
def read(self):
data = self.res.read(10240)
if data:
return data
else:
self.close()
return None
def GET(url, **kwargs):
try:
return HttpResponse('GET', url, **kwargs)
except Exception as e:
logging.error(url)
raise e
def GET_async(url, **kwargs):
try:
return HttpResponse('GET', url, async=True, **kwargs)
except Exception as e:
logging.error(url)
raise e
def POST(url, **kwargs):
try:
return HttpResponse('POST', url, **kwargs)
except Exception as e:
logging.error(url)
raise e
def POST_async(url, **kwargs):
try:
return HttpResponse('POST', url, async=True, **kwargs)
except Exception as e:
logging.error(url)
raise e
def OPTIONS(url, **kwargs):
try:
return HttpResponse('OPTIONS', url, **kwargs)
except Exception as e:
logging.error(url)
raise e
class WebSocket8Client(object):
class ConnectionClosedException(Exception): pass
def __init__(self, url):
queue = Queue.Queue()
self.queue = queue
class IntWebSocketClient(WebSocketClient):
def received_message(self, m):
queue.put(unicode(str(m), 'utf-8'))
def read_from_connection(self, amount):
r = super(IntWebSocketClient, self).read_from_connection(amount)
if not r:
queue.put(Ellipsis)
return r
self.client = IntWebSocketClient(url)
self.client.connect()
def close(self):
if self.client:
self.client.running = False
self.client.close()
self.client._th.join()
self.client = None
def send(self, data):
self.client.send(data)
def recv(self):
try:
r = self.queue.get(timeout=1.0)
if r is Ellipsis:
raise self.ConnectionClosedException()
return r
except:
self.close()
raise
########NEW FILE########
| [
"dyangUCI@github.com"
] | dyangUCI@github.com |
5204c3876a1a013ce7e2785ae90635262b21aa4e | 23a7d005e15cda232f0132c7ca73ea029bfaa27c | /django rest framework/api/migrations/0001_initial.py | 2a6c9f23b0e70bc08dd1f23154135e57e0f51c43 | [
"MIT"
] | permissive | Festorz/Task-manager-with-Django-Rest-Framework | a36b7b029c70134e7ed14b0119a7400f75a6cb89 | 9a7244afeeb7ace3c0dffc752dea8c49deb69bb9 | refs/heads/main | 2023-08-16T20:16:29.138183 | 2021-10-08T10:48:03 | 2021-10-08T10:48:03 | 414,940,584 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 555 | py | # Generated by Django 3.2.5 on 2021-09-11 12:36
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Task',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=200)),
('completed', models.BooleanField(default=False)),
],
),
]
| [
"festaskirui@gmail.com"
] | festaskirui@gmail.com |
5eb7335b61d89785000b594be7c1cfc92e90602d | f3d728e0f7417f9632c6e4560f9156714f7915d0 | /apps/api/ObjectDict.py | 7910904b14f8da0ec94dba7a9f30423f7fc6be18 | [] | no_license | zhengjiali/easyapi | 3861b29c3379204eb81a81ee65da840494cf04b8 | 9677691a864c3adf0a8ff024b3b702ae530ecac5 | refs/heads/master | 2022-12-13T03:06:42.640217 | 2019-05-27T14:25:36 | 2019-05-27T14:25:36 | 127,411,271 | 0 | 1 | null | 2022-09-13T22:24:01 | 2018-03-30T09:45:41 | HTML | UTF-8 | Python | false | false | 274 | py | class ObjectDict(dict):
def __init__(self,*args,**kwargs):
super(ObjectDict, self).__init__(*args,**kwargs)
def __getattr__(self, name):
value = self[name]
if isinstance(value,dict):
value = ObjectDict(value)
return value | [
"zhengjiali@zhengjialideMacBook-Air.local"
] | zhengjiali@zhengjialideMacBook-Air.local |
fb2680d88d18ce600486ee37dc8a9555d684e11b | ca74d50774cfe7e52e0da1ad9c4b4abd58f44770 | /gat_layers.py | eb715fb7b7415fbe67ae6aace935e52782887f1f | [] | no_license | TachiChan/IJCAI2019_HGAT | 777adcae71618d9ab335e47d77a876e77988cff5 | 69bbe6e67af56f94f71479620ded1e6ef2627b60 | refs/heads/master | 2021-12-14T19:55:03.185907 | 2021-12-14T13:33:40 | 2021-12-14T13:33:40 | 219,287,300 | 56 | 13 | null | null | null | null | UTF-8 | Python | false | false | 1,928 | py | import torch
import torch.nn as nn
import torch.nn.init as init
import torch.nn.functional as F
from torch.nn.parameter import Parameter
class BatchMultiHeadGraphAttention(nn.Module):
def __init__(self, n_head, f_in, f_out, attn_dropout, bias=True):
super(BatchMultiHeadGraphAttention, self).__init__()
self.n_head = n_head
self.w = Parameter(torch.Tensor(n_head, f_in, f_out))
self.a_src = Parameter(torch.Tensor(n_head, f_out, 1))
self.a_dst = Parameter(torch.Tensor(n_head, f_out, 1))
self.leaky_relu = nn.LeakyReLU(negative_slope=0.2)
self.softmax = nn.Softmax(dim=-1)
self.dropout = nn.Dropout(attn_dropout)
if bias:
self.bias = Parameter(torch.Tensor(f_out))
init.constant_(self.bias, 0)
else:
self.register_parameter('bias', None)
init.xavier_uniform_(self.w)
init.xavier_uniform_(self.a_src)
init.xavier_uniform_(self.a_dst)
def forward(self, h, adj):
bs, n = h.size()[:2]
# bs x n x f_in
h_prime = torch.matmul(h.unsqueeze(1), self.w)
# bs x n_head x n x f_out
attn_src = torch.matmul(torch.tanh(h_prime), self.a_src)
# bs x n_head x n x 1
attn_dst = torch.matmul(torch.tanh(h_prime), self.a_dst)
# bs x n_head x n x 1
attn = attn_src.expand(-1, -1, -1, n) + \
attn_dst.expand(-1, -1, -1, n).permute(0, 1, 3, 2)
# bs x n_head x n x n
attn = self.leaky_relu(attn)
mask = 1 - adj.unsqueeze(1)
# bs x 1 x n x n
attn.data.masked_fill_(mask, float("-inf"))
attn = self.softmax(attn)
# bs x n_head x n x n
attn = self.dropout(attn)
output = torch.matmul(attn, h_prime)
# bs x n_head x n x f_out
if self.bias is not None:
return output + self.bias
else:
return output
| [
"noreply@github.com"
] | TachiChan.noreply@github.com |
429db81a0758bdafedfa8c259f04ed1d63ab61b1 | 151f05ddc58d90db858ac5ac22236d30bded103e | /src/pokedex/forms.py | 392400ea52ec4c58650b1c1e5c14cdd8e1b9df4a | [] | no_license | franzcruspero/mugna_exam | c25174682329e581d3784bca59f86db9878db35f | 9d49c1b1b24509cabbdf1af926501c73f4dc791e | refs/heads/master | 2022-12-14T23:34:10.601584 | 2020-03-16T05:12:20 | 2020-03-16T05:12:20 | 246,623,384 | 1 | 0 | null | 2022-12-08T03:47:35 | 2020-03-11T16:29:54 | JavaScript | UTF-8 | Python | false | false | 405 | py | from django import forms
from .models import Pokemon
class PokemonForm(forms.ModelForm):
class Meta:
model = Pokemon
fields = "__all__"
exclude = ["pokedex_id", "evolution_id", "evolution_order", "slug"]
class UpdatePokemonForm(forms.ModelForm):
class Meta:
model = Pokemon
fields = "__all__"
exclude = ["evolution_id", "evolution_order", "slug"] | [
"franz.cruspero@gmail.com"
] | franz.cruspero@gmail.com |
f1318351ae4716d2341351aa7ba537219924a05b | 444a9480bce2035565332d4d4654244c0b5cd47b | /research/nlp/mass/src/language_model/masked_language_model.py | 52aed8d53ed7b0a0eae8a67d7231364bbf913a00 | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference",
"LicenseRef-scancode-proprietary-license"
] | permissive | mindspore-ai/models | 7ede9c6454e77e995e674628204e1c6e76bd7b27 | eab643f51336dbf7d711f02d27e6516e5affee59 | refs/heads/master | 2023-07-20T01:49:34.614616 | 2023-07-17T11:43:18 | 2023-07-17T11:43:18 | 417,393,380 | 301 | 92 | Apache-2.0 | 2023-05-17T11:22:28 | 2021-10-15T06:38:37 | Python | UTF-8 | Python | false | false | 4,698 | py | # Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Masked language model."""
import numpy as np
from .base import LanguageModel
class MaskedLanguageModel(LanguageModel):
"""
Do mask operation on sentence.
If k is assigned, then mask sentence with length k.
Otherwise, use mask_ratio.
Args:
k (int): Length of fragment.
mask_ratio (float): Mask ratio.
"""
def __init__(self, k: int = None, mask_ratio=0.5,
mask_all_prob=None):
super(MaskedLanguageModel, self).__init__()
self.mask_ratio = mask_ratio
self._k = k
self._threshold = mask_all_prob
def emit(self, sentence: np.ndarray, vocabulary):
"""
Mask mono source sentence.
A sample used to train model is processed with following step:
encoder input (source): [x1, x2, x3, x4, x5, x6, x7, x8, </eos>]
masked encoder input: [x1, x2, _, _, _, x6, x7, x8, </eos>]
decoder input: [ _, x3, x4]
| | |
V V V
decoder output: [ x3, x4, x5]
Notes:
A simple rule is made that source sentence starts without <BOS>
but end with <EOS>.
Args:
vocabulary (Dictionary): Vocabulary.
sentence (np.ndarray): Raw sentence instance.
Returns:
dict, an example.
"""
encoder_input = sentence.copy()
seq_len = encoder_input.shape[0]
# If v=0, then u must equal to 0. [u, v)
u, v = self._get_masked_interval(len(encoder_input),
self._k, self._threshold)
if u == 0:
_len = v - u if v - u != 0 else seq_len
decoder_input = np.array([vocabulary.mask_index] * _len, dtype=np.int32)
decoder_input[1:] = encoder_input[:_len - 1].copy()
else:
decoder_input = np.array([vocabulary.mask_index] * (v - u), dtype=np.int32)
decoder_input[1:] = encoder_input[u:v - 1].copy()
if v == 0:
decoder_output = encoder_input.copy()
encoder_input[:] = vocabulary.mask_index
else:
decoder_output = encoder_input[u:v].copy()
encoder_input[np.arange(start=u, stop=v)] = vocabulary.mask_index
if u != v and u > 0:
padding = np.array([vocabulary.padding_index] * u, dtype=np.int32)
decoder_input = np.concatenate((padding, decoder_input))
decoder_output = np.concatenate((padding, decoder_output))
assert decoder_input.shape[0] == decoder_output.shape[0], "seq len must equal."
return {
"sentence_length": seq_len,
"tgt_sen_length": decoder_output.shape[0],
"encoder_input": encoder_input, # end with </eos>
"decoder_input": decoder_input,
"decoder_output": decoder_output # end with </eos>
}
def _get_masked_interval(self, length, fix_length=None,
threshold_to_mask_all=None):
"""
Generate a sequence length according to length and mask_ratio.
Args:
length (int): Sequence length.
Returns:
Tuple[int, int], [start position, end position].
"""
# Can not larger than sequence length.
# Mask_length belongs to [0, length].
if fix_length is not None:
interval_length = min(length, fix_length)
else:
interval_length = min(length, round(self.mask_ratio * length))
_magic = np.random.random()
if threshold_to_mask_all is not None and _magic <= threshold_to_mask_all:
return 0, length
# If not sequence to be masked, then return 0, 0.
if interval_length == 0:
return 0, 0
# Otherwise, return start position and interval length.
start_pos = np.random.randint(low=0, high=length - interval_length + 1)
return start_pos, start_pos + interval_length
| [
"chenhaozhe1@huawei.com"
] | chenhaozhe1@huawei.com |
6e19f99fdcbedb91b77dcc3f941deabd3fd7e1ff | 34cf9bdb1f6d966df64a3c93a0f386fbb5f9ed37 | /a3/resultslast.txt | d4c671d5813c88690bcd02d0156ae3c0efca0760 | [] | no_license | rdagnoletto/ML-Assignments | 7d25aafd396d051644281ef754a1893c98991cd0 | f01729fb4c556047982abd64b88a1baba31a3f7b | refs/heads/master | 2021-05-21T05:58:51.925958 | 2020-04-02T22:25:39 | 2020-04-02T22:25:39 | 252,575,712 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,406 | txt | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Mar 26 13:04:43 2019
@author: ragnoletto
"""
D = 100, K=5
def gMM():
X = tf.placeholder(tf.float32,[None, dim], name="X")
mu = tf.get_variable('mean',dtype = tf.float32,shape = [K,dim], initializer = tf.truncated_normal_initializer(stddev=2))
phi = tf.get_variable('stdDev',dtype = tf.float32,shape = [K,1],initializer = tf.truncated_normal_initializer(mean=4,stddev=0.5))
#sigma = tf.pow(phi,2)
sigma = tf.abs(phi)
psi = tf.get_variable('logPiProb',dtype = tf.float32,shape = [K,1], initializer = tf.truncated_normal_initializer(mean=1,stddev=0.25))
log_pi = hlp.logsoftmax(psi)
log_PDF = log_GaussPDF(X, mu, sigma)
log_rnj = log_posterior(log_PDF, log_pi)
lossfunc = neg_log_likelihood(log_PDF, log_pi)
belong = tf.arg_max(log_rnj,dimension = 1)
optimizer = tf.train.AdamOptimizer(learning_rate = 0.05, beta1=0.9, beta2=0.99, epsilon=1e-5)
train = optimizer.minimize(loss=lossfunc)
return X,mu,sigma,lossfunc,log_pi,log_PDF,log_rnj,train,belong
K=5
Counter({0: 2215, 3: 1778, 4: 1367, 1: 1307})
Counter({0: 1161, 3: 846, 1: 693, 4: 633})
Final Training Loss: 30155.678
Final Validation Loss: 15119.109
Final sigma: [[24.648119 ]
[ 8.03342 ]
[ 6.0991817]
[12.8815565]
[ 9.100573 ]]
Final log pi: [[-0.6612251]
[-1.8864878]
[-7.6394854]
[-1.7485799]
[-1.8472118]]
K=10
Counter({1: 2021, 6: 1367, 5: 1322, 8: 1307, 9: 650})
Counter({1: 979, 8: 693, 5: 678, 6: 633, 9: 350})
Final Training Loss: 29394.38
Final Validation Loss: 14726.586
Final sigma: [[ 6.6478353]
[22.40433 ]
[ 5.778818 ]
[12.90002 ]
[ 5.66792 ]
[18.0566 ]
[ 7.0670586]
[ 6.037612 ]
[12.178116 ]
[ 4.429267 ]]
Final log pi: [[-7.641652 ]
[-1.1315677]
[-7.7463417]
[-6.220504 ]
[-9.217827 ]
[-1.6502082]
[-1.6255999]
[-8.085298 ]
[-1.6588047]
[-2.353931 ]]
K=15
Counter({9: 2021, 7: 1367, 0: 1322, 8: 1307, 2: 650})
Counter({9: 979, 8: 693, 0: 678, 7: 633, 2: 350})
Final Training Loss: 29239.518
Final Validation Loss: 14648.619
Final sigma: [[23.120388 ]
[ 8.954913 ]
[ 6.138508 ]
[ 5.68269 ]
[ 8.213345 ]
[ 9.041283 ]
[ 5.737563 ]
[ 9.149303 ]
[11.76718 ]
[20.312317 ]
[ 6.22716 ]
[ 7.7068715]
[ 5.5133624]
[ 8.862666 ]
[ 6.5244875]]
Final log pi: [[-1.568387 ]
[-6.4941792]
[-2.3657823]
[-7.581526 ]
[-8.356365 ]
[-4.9403906]
[-9.373861 ]
[-1.6634154]
[-1.7037067]
[-1.1764455]
[-8.240028 ]
[-8.180656 ]
[-9.415679 ]
[-4.966434 ]
[-7.1499686]]
K=20
Counter({0: 2020, 8: 1367, 14: 1323, 5: 1307, 11: 650})
Counter({0: 978, 5: 693, 14: 679, 8: 633, 11: 350})
Final Training Loss: 29278.252
Final Validation Loss: 14649.309
Final sigma: [[10.040725 ]
[ 3.356871 ]
[12.626341 ]
[ 5.5524993]
[ 1.2704089]
[12.817027 ]
[ 5.6152296]
[ 8.032126 ]
[ 9.033022 ]
[13.367916 ]
[ 5.427177 ]
[ 1.2772119]
[ 6.678755 ]
[ 3.4861107]
[27.274075 ]
[ 5.7169247]
[ 5.3119693]
[ 3.4362798]
[ 5.6319413]
[ 6.188081 ]]
Final log pi: [[-1.5198736]
[-9.449995 ]
[-8.339245 ]
[-7.75984 ]
[-6.111004 ]
[-1.7968259]
[-9.940735 ]
[-8.631561 ]
[-1.7689257]
[-8.611637 ]
[-9.882305 ]
[-2.4673033]
[-7.7179394]
[-9.723809 ]
[-1.0339785]
[-8.860258 ]
[-8.625879 ]
[-9.759966 ]
[-9.250551 ]
[-9.066065 ]]
K=30
Counter({2: 2021, 7: 1367, 26: 1322, 4: 1307, 3: 650})
Counter({2: 979, 4: 693, 26: 678, 7: 633, 3: 350})
Final Training Loss: 28717.031
Final Validation Loss: 14374.426
Final sigma: [[ 6.1045113]
[13.29596 ]
[23.42971 ]
[ 1.5929362]
[10.38726 ]
[ 8.453452 ]
[ 5.521256 ]
[ 9.144945 ]
[ 5.6249676]
[ 6.9695635]
[ 7.037581 ]
[ 8.208211 ]
[ 8.176843 ]
[ 5.569924 ]
[ 7.384004 ]
[ 7.395604 ]
[ 6.182825 ]
[ 5.862272 ]
[ 6.392672 ]
[11.87933 ]
[10.575838 ]
[ 7.208489 ]
[ 7.436039 ]
[10.304189 ]
[ 6.6217146]
[ 5.7177844]
[27.85846 ]
[12.62668 ]
[ 5.502292 ]
[ 5.6618896]]
Final log pi: [[-10.005434 ]
[ -6.897786 ]
[ -1.1882257]
[ -2.387189 ]
[ -1.7208242]
[ -9.669098 ]
[-11.708347 ]
[ -1.7061167]
[ -8.639264 ]
[-11.276788 ]
[-10.982132 ]
[-10.001139 ]
[ -9.625512 ]
[ -9.183496 ]
[ -5.8127427]
[ -9.189085 ]
[-11.404862 ]
[-10.835509 ]
[-11.653503 ]
[ -7.5719414]
[ -9.170695 ]
[ -9.645347 ]
[ -8.149894 ]
[ -9.875679 ]
[ -9.843362 ]
[-10.313959 ]
[ -1.4430718]
[ -7.065337 ]
[-11.539928 ]
[-11.517614 ]] | [
"noreply@github.com"
] | rdagnoletto.noreply@github.com |
9cbab2ee223ec135dca3a29d86775736fcd0e4dd | 70e4234519613fa6d680691ebe99f5174df660d3 | /Fabio02_P02/F2_P2_Q10_NOTASALUNOS.py | eb53b10600aa2a069bbd27a5ec3a5e0d73fd6ee0 | [] | no_license | erickcarvalho1/ifpi-ads-algoritmos2020 | 061a39454f487adb9be82879b9270379486498ba | 26258c1457fe53a086e206c129e5bcd93edf1113 | refs/heads/master | 2021-04-15T02:02:16.074706 | 2020-03-22T22:48:16 | 2020-03-22T22:48:16 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 946 | py | #Leia as duas notas parciais obtidas por um aluno numa disciplina ao longo de um semestre, e calcule a sua média.
p_nota = float(input('Digite a primeira nota: '))
s_nota = float(input('Digite a segunda nota: '))
media = ( p_nota + s_nota) / 2
if media >= 9:
print('Suas notas são',p_nota,'e',s_nota,'sua média ficou',media,'e seu conceito é A,portanto, aprovado.')
if 9 > media >= 7.5:
print('Suas notas são',p_nota,'e',s_nota,'sua média ficou',media,'e seu conceito é B,portanto, aprovado.')
if 7.5 > media >= 6:
print('Suas notas são',p_nota,'e',s_nota,'sua média ficou',media,'e seu conceito é C,portanto, aprovado.')
if 6 > media >= 4:
print('Suas notas são',p_nota,'e',s_nota,'sua média ficou',media,'e seu conceito é D,portanto, reprovado.')
if 4 > media >= 0:
print('Suas notas são',p_nota,'e',s_nota,'sua média ficou',media,'e seu conceito é E,portanto, reprovado.')
| [
"noreply@github.com"
] | erickcarvalho1.noreply@github.com |
5c7b64acaf3a6e83d5ba5d2c7d70b23b1b53f3fc | ed3f052100d437c57c52b737d4c0f91d3b20c0af | /Python_Scripts/LSTM/LSTM.py | f0caa3174ddf356205b1ba08d808e63c11a62ae1 | [] | no_license | timschott/dmp | 216f1b21a95dce67f55097fccf1dce599fd7884b | 13859bda6e9adefc1ebbf26a98a299c457b4468e | refs/heads/master | 2021-10-14T11:49:54.435887 | 2021-10-05T02:59:56 | 2021-10-05T02:59:56 | 156,625,032 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,021 | py | # General imports
from string import maketrans
import numpy as np
import pandas as pd
import sqlite3
import tensorflow as tf
from sklearn.model_selection import train_test_split
# Keras
import keras
from keras.layers import *
from keras.models import *
from keras.engine.topology import Layer
from keras_preprocessing.text import *
from keras.layers import Input, Dense, merge
from keras import backend as K
from AttentionWithContext import AttentionWithContext
# In this file I will construct a LSTM with the goal of identifying particularly
# Consequential and important words across my corpus of 50 novels.
# This function pulls all the words out of my database and
# Returns them in a 50 element list with each element containing all the words of a single book
# So, element 2 in this list is all the words from Blood Meridian as a single (enormous!) string
# At the moment, I'm having difficulties getting the 50 book version to run, so I'm testing it
# Out with a smaller, 4 book list. Thus the performance of the RNN is pretty hampered
# Currently but that is okay as I'm really just interested in the output of the model
# Itself not the quality of the results
def get_data():
conn = sqlite3.connect('../../textTable.sqlite')
cursor = conn.cursor()
words = []
for row in cursor.execute('''SELECT Unit FROM textTable WHERE Type='word' '''):
words.append((row[0]).encode('utf-8').strip())
conn.close()
# indices of each book
heartOfDarkness = words[0:39084]
theRoad = words[39085:97786]
theGreatGatsby = words[97787:146604]
womenInLove = words[146605:329261]
portraitOfTheArtist = words[329262:414188]
lolita = words[414189:526381]
theRainbow = words[526382:713940]
mobyDick = words[713941:928123]
theSerpent = words[928124:1100808]
pym = words[1100809:1201762]
underTheVolcano = words[1201763:1340374]
orlando = words[1340375:1419921]
toTheLighthouse = words[1419922:1489826]
eureka = words[1489827:1528392]
paleFire = words[1528393:1596524]
billyBudd = words[1596525:1627267]
theSoundAndTheFury = words[1627268:1723739]
thePedersenKid = words[1723740:1747441]
theAngelOfTerror = words[1747442:1810705]
lifeAndTimesOfMichaelK = words[1810706:1877059]
absalomAbsalom = words[1877060: 2009727]
bloodMeridian = words[2009728:2124927]
mrsDalloway = words[2124928:2189194]
somethingHappened = words[2189195:2379429]
theMoonstone = words[2379430: 2576798]
theSecretAdversary = words[2576799:2652844]
theScarhavenKeep = words[2652845:2727453]
theRedThumbMark = words[2727454:2802062]
theParadiseMystery = words[2802063:2878931]
theRaynerSladeAmalgamation = words[2878932:2958661]
theLeavenworthCase = words[2958662: 3068879]
theOldManInTheCorner = words[3068880:3138823]
theMoonRock = words[3138824:3246627]
theHandInTheDark = words[3246628: 3354187]
theDaffodilMystery = words[3354188:3422566]
theInnocenceOfFatherBrown = words[3422567:3502004]
theBrandOfSilence = words[3502005:3561098]
theCircularStaircase = words[3561099:3631729]
theAshielMystery = words[3631730:3719130]
theMysteryOfRoom75 = words[3719131:3767884]
theLadyInBlue = words[3767885:3846509]
theMaxwellMystery = words[3846510:3906798]
aStudyInScarlet = words[3906799:3950699]
theBigSleep = words[3950700:4017610]
theShriekingPit = words[4017611:4117616]
thePictureOfDorianGray = words[4117617:4196905]
theSignOfFour = words[4196906:4240350]
wideSargassoSea = words[4240351:4287699]
gravitysRainbow = words[4287700: 4618050]
theSpiralStaircase = words[4618051:4689006]
# thank you paste0 -- > with love, from R! paste0(titles, collapse=",")
small_string_list = [str(billyBudd), str(bloodMeridian),
str(theShriekingPit), str(theSignOfFour)]
big_string_list = [str(absalomAbsalom), str(billyBudd), str(bloodMeridian), str(eureka), str(gravitysRainbow),
str(heartOfDarkness), str(lifeAndTimesOfMichaelK), str(lolita), str(mobyDick), str(mrsDalloway),
str(orlando), str(paleFire), str(portraitOfTheArtist), str(pym), str(somethingHappened),
str(theGreatGatsby), str(thePedersenKid), str(thePictureOfDorianGray), str(theRainbow),
str(theRoad), str(theSerpent), str(theSoundAndTheFury), str(toTheLighthouse),
str(underTheVolcano), str(wideSargassoSea), str(womenInLove), str(aStudyInScarlet),
str(theAngelOfTerror), str(theAshielMystery), str(theBigSleep), str(theBrandOfSilence),
str(theCircularStaircase), str(theDaffodilMystery), str(theHandInTheDark),
str(theInnocenceOfFatherBrown), str(theLadyInBlue), str(theLeavenworthCase),
str(theMaxwellMystery), str(theMoonRock), str(theMoonstone), str(theMysteryOfRoom75),
str(theOldManInTheCorner), str(theParadiseMystery), str(theRaynerSladeAmalgamation),
str(theRedThumbMark), str(theScarhavenKeep), str(theSecretAdversary), str(theShriekingPit),
str(theSignOfFour), str(theSpiralStaircase)]
quite_small = [str(words[1489827:1489926])]
return small_string_list, big_string_list, quite_small
# In order to pipe the words into a Keras tokenizer we have to make sure they're properly
# Formatted in Unicode
# https://github.com/keras-team/keras/issues/1072
def text_to_word_sequence(text, filters='!"#$%&()*+,-./:;<=>?@[\\]^_`{|}~\t\n', split=" "):
if type(text) == unicode:
translate_table = {ord(c): ord(t) for c, t in zip(filters, split * len(filters))}
else:
translate_table = maketrans(filters, split * len(filters))
text = text.translate(translate_table)
seq = text.split(split)
return [i for i in seq if i]
# In this function I hand in a well-formatted, Keras compliant string
# It returns a sequence of integers that maps each unique word in the text to a number
# This way it's readable by the LSTM
def encode(encoded_list):
tokenizer = Tokenizer(lower=True, split=' ')
tokenizer.fit_on_texts(encoded_list)
X = tokenizer.texts_to_sequences(encoded_list)
return X
# This function calls encode() for every book in my corpus
# It saved it as a .npy object so I only need to call it once
def make_padded_list(word_strings):
encoded_bucket = []
for text in word_strings:
clean = text_to_word_sequence(text)
numbers_now = encode(clean)
encoded_bucket.append(numbers_now)
# pad 0's up to longest book length, gravity's rainbow
# since I'm working with a smaller subset right now, max length is just
# the number of words in the biggest book of the small set
padded = keras.preprocessing.sequence.pad_sequences(encoded_bucket, maxlen=115200)
np.save('padded_small_keras_list', padded)
return
# Instantiates a vanilla LSTM
# Takes advantage of word embeddings
# https://keras.io/layers/embeddings/
# https://towardsdatascience.com/understanding-lstm-and-its-quick-implementation-in-keras-for-sentiment-analysis-af410fd85b47
def create_LSTM():
embed_dim = 128
lstm_out = 200
batch_size = 32
model = Sequential()
model.add(Embedding(69230, embed_dim, input_length=330351, mask_zero=True))
model.add(keras.layers.SpatialDropout1D(.2))
model.add(LSTM(units=128))
model.add(Dense(units=330351))
model.add(Activation('softmax'))
model.compile(optimizer='adam', loss='sparse_categorical_crossentropy')
# print(model.summary())
return model
def train_test_division(padded_list, y_labels):
mat = np.matrix(padded_list)
df = pd.DataFrame(data=mat)
X_train, X_test, Y_train, Y_test = train_test_split(df, y_labels, test_size=0.25, random_state=21)
return X_train.values, X_test.values, Y_train, Y_test
# Runs and evaluates my vanilla LSTM
def train_and_test_vanilla_model(X_train, X_test, Y_train, Y_test, model):
model_history = model.fit(X_train, Y_train, batch_size=64, epochs=3, verbose=1)
predictions = model.predict(X_test)
score = model.evaluate(X_test, Y_test, verbose=0)
print model.summary()
val_loss_history = model_history.history['val_loss']
val_acc_history = model_history.history['val_acc']
print('Val loss: ', sum(val_loss_history) / len(val_loss_history))
print('Val accuracy: ', sum(val_acc_history) / len(val_acc_history))
print('Vanilla Model Score: ',score)
return model_history, predictions
# Runs and evaluates my Attentive LSTM
def train_and_test_attentive_model(X_train, X_test, Y_train, Y_test, model):
model_history = model.fit(X_train, Y_train, batch_size=64, epochs=1, verbose=1)
# predictions = model.predict_classes(X_test)
# score = model.evaluate(X_test, Y_test, verbose=0)
attentions = model.predict(X_test, batch_size=64)
#val_loss_history = model_history.history['val_loss']
#val_acc_history = model_history.history['val_acc']
# print('Val loss: ', sum(val_loss_history) / len(val_loss_history))
# print('Val accuracy: ', sum(val_acc_history) / len(val_acc_history))
# print('Attentive Model Score: ', score)
with open('hist.json', 'w') as f:
json.dump(model_history.history, f)
return model_history
def cal_att_weights(output, att_w):
# if model_name == 'HAN':
eij = np.tanh(np.dot(output[0], att_w[0]) + att_w[1])
eij = np.dot(eij, att_w[2])
eij = eij.reshape((eij.shape[0], eij.shape[1]))
ai = np.exp(eij)
weights = ai / np.sum(ai)
return weights
def important_lstm_take_2():
# 1
model = Sequential()
# 2
# input length is the longest number of words
# first argument is howe many unique words there are (i think)
model.add(Embedding(18211, 64, input_length=115200))
# 3
model.add(Bidirectional(LSTM(units=64, activation='relu', return_sequences=True)))
# 3
model.add(Dropout(0.1))
# 4
model.add(TimeDistributed(Dense(128)))
# 5
model.add(AttentionWithContext())
# 6
model.add(Dropout(0.25))
# 7
# model.add(Flatten())
model.add(Dense(4, activation="softmax"))
# Comp.
model.compile(optimizer='adam', loss='sparse_categorical_crossentropy')
return model
if __name__ == '__main__':
print 'hello world'
# stringed_words = get_data()
# make_padded_list(stringed_words)
# small_string, big_string, quite_small = get_data()
# make_padded_list(small_string)
# Load List
# pads = np.load('padded_keras_list.npy')
small_pads = np.load('padded_small_keras_list.npy')
# Make labels array
print small_pads
small_y_labels = np.asarray([1,1,0,0])
#y_labels = np.asarray(
# [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
# 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])
# Split Data
# x_train, x_test, y_train, y_test = train_test_division(pads, y_labels)
# pandas dfs.
small_x_train, small_x_test, small_y_train, small_y_test = train_test_division(small_pads, small_y_labels)
print(type(small_x_train))
print(small_x_train.shape)
print(type(small_x_test))
print(small_x_test.shape)
print(type(small_y_train))
print(small_y_train.shape)
print(type(small_y_test))
print(small_y_test.shape)
# print(small_x_test.shape)
# Create Models
#lstm = create_LSTM()
# lstm_2, attentive = important_lstm()
# Small Models
# vanilla_history, predictions = train_and_test_vanilla_model(small_x_train, small_x_test, small_y_train, small_y_test, lstm)
# hist = train_and_test_attentive_model(small_x_train, small_x_test, small_y_train, small_y_test, lstm_2)
# rough = rough_lstm()
#model1 = train_and_test_attentive_model(small_x_train, small_x_test, small_y_train, small_y_test, rough)
# small_pads = small_pads.T
#vanilla_history = train_and_test_vanilla_model(x_train, x_test, y_train, y_test, lstm)
#train_and_test_attentive_model(x_train, x_test, y_train, y_test, attentive)
# from stack
model1 = important_lstm_take_2()
print(model1.summary())
model_history = model1.fit(small_x_train, small_y_train, batch_size=32, epochs=1, verbose=1)
sent_att_w = model1.layers[4].get_weights()
test_seq = small_x_test
print(np.shape(test_seq))
print(np.count_nonzero(test_seq))
print test_seq
sent_before_att_2 = K.function([model1.layers[0].input, K.learning_phase()], [model1.layers[3].output])
out = sent_before_att_2([test_seq, 0])
test_weight = sent_att_w[0]
weights = cal_att_weights(out, sent_att_w)
print weights
# dataframe = pd.DataFrame(data=weights.astype(float))
# dataframe.to_csv('outfile.csv', sep=' ', header=False, float_format='%.2f', index=False)
np.savetxt('test.txt', weights, delimiter=',', fmt='%f')
| [
"tcs9pk@virginia.edu"
] | tcs9pk@virginia.edu |
133b4e75b57bd573d0b6d6f9c0e8caaaca1f9fae | 4e409e9c4175717e8c39e4896842391b7828f7e7 | /python/buildRF.py | a8be235268134883022f7c5a9e9116b552018ea5 | [] | no_license | oreluk/tagClassification | 5b6957f071a660e1fea302fd81b405357cbd1550 | ac903c8fdbac79e0d3265a328422de3bade03e1b | refs/heads/master | 2021-01-15T12:55:01.313268 | 2016-05-13T00:22:31 | 2016-05-13T00:22:31 | 58,685,148 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,956 | py | # Final Project - Competition 1 - Team 4
# Stat154 Spring 2015
# Jim Oreluk
import time
import os
import numpy as np
import re
#####################################################
### Import Data ###
#####################################################
t = time.time() #tic
def loadX():
pf = open('powerFeatures2.csv', 'r').read()
pf = pf.split('\n')
powerFeatures = []
for item in pf:
powerFeatures.append(item.split(','))
powerFeatures = powerFeatures[1:-1]
powerFeatures = np.asarray(powerFeatures)
return(powerFeatures)
def loadY():
yt = open('yTrain.csv','r').read()
yt = yt.split('\n')
yTr = yt[0:-1]
yTr = np.asarray(yTr)
return(yTr)
#####################################################
### Random Forest ###
#####################################################
from sklearn.ensemble import RandomForestClassifier
from sklearn import cross_validation
xTr = loadX()
yTr = loadY()
elapsed = time.time() - t #toc
print("The total time elasped was:" + str(elapsed))
cv = cross_validation.KFold(len(xTr), n_folds=5, shuffle=True)
rf = RandomForestClassifier(n_estimators=1000, criterion='gini', \
max_depth=None, min_samples_split=2, max_features='sqrt', \
bootstrap=True, oob_score=False, n_jobs=-1, verbose=1)
score = []
for traincv, testcv in cv:
probs = rf.fit(xTr[traincv], yTr[traincv]).predict_proba(xTr[testcv])
predLabel = []
for i in range(0,len(probs)):
if probs[i][0] < probs[i][1]:
predLabel.append('1')
else:
predLabel.append('0')
predLabel = np.asarray(predLabel)
cc = sum(predLabel == yTr[testcv]) / float(len(probs))
score.append(cc)
print('Completed Fold.')
np.savetxt('cvScore.csv', score, fmt="%s")
# Export Model
from sklearn.externals import joblib
fit = rf.fit(xTr,yTr)
joblib.dump(fit, 'rfModel.pkl', compress=9)
| [
"jim.oreluk@gmail.com"
] | jim.oreluk@gmail.com |
5c3dda335336b3b644e37fe7f8f4f46f4fd0ee86 | 60ce73bf2f86940438e5b7fecaaccad086888dc5 | /working_scrapers/Illinois_dekalb.py | d04843c1e230207cd3080ec2535d4860593519dd | [] | no_license | matthewgomies/jailcrawl | 22baf5f0e6dc66fec1b1b362c26c8cd2469dcb0d | 9a9ca7e1328ae549860ebeea9b149a785f152f39 | refs/heads/master | 2023-02-16T06:39:42.107493 | 2021-01-15T16:37:57 | 2021-01-15T16:37:57 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,161 | py | #!/usr/bin/python
'''
This is a template script
MG
'''
from urllib.request import urlopen, Request
import pandas as pd
import os
import time
import numpy as np
from datetime import datetime
import datetime as dt
import sys
from io import StringIO
from joblib import Parallel, delayed
import requests
from jailscrape.common import save_to_s3, get_browser, get_logger, record_error, save_pages_array
from jailscrape import crawlers
# jailscrape.common is a file that is part of the project which keeps
# most common boilerplate code out of this file
from selenium.webdriver.common.keys import Keys
import watchtower
from bs4 import BeautifulSoup
import re
import math
# NOTE: These are imports. They ideally don't change very often.
# It's OK to have a large, maximal set here and to bulk-edit files to add to these.
# MG - Extra imports
import selenium as sm
from selenium import webdriver
from selenium.common.exceptions import NoSuchElementException
ROW_INDEX = 171 # Change this for each scraper. This references the row
# of the main jailcrawl spreadsheet. This index will be used to look up
# the URL as well as state/county info
THIS_STATE = 'illinois' # Change the current state/county information.
THIS_COUNTY = 'dekalb'
def main(roster_row):
try:
logger = get_logger(roster_row) # Get a standard logger
# Here are standard variable values/how to initialize them.
# These aren't initialized here since in the save_single_page
# case, they can be done in the called function
browser = get_browser() # Get a standard browser
urlAddress = roster_row['Working Link'] # Set the main URL from the spreadsheet
page_index = 0 # Set an initial value of "page_index", which we will use to separate output pages
logger.info('Set working link to _%s_', urlAddress) # Log the chosen URL
####################################
# Begin core specific scraping code
if roster_row['State'].lower() != THIS_STATE or roster_row['County'].lower() != THIS_COUNTY:
raise Exception("Expected county definition info from _%s, %s_, but found info: _%s_" % (THIS_COUNTY, THIS_STATE, roster_row))
#Given the urlAddress passed to the function we will navigate to the page
browser.get(urlAddress)
time.sleep(np.random.uniform(7,10,1))
#Extract the HTML#
store_source = browser.page_source
## Code to save the first page and log appropriately
save_to_s3(store_source, page_index, roster_row)
logger.info('Saved page _%s_', page_index)
#Finding the last page
soup = BeautifulSoup(store_source, 'lxml')
page=0
for link in soup.findAll("div", {"class":"loca-search-head text-center"}):
page=str(link.text)
page=re.sub(' Results for "_"', "", page)
page=int(page)/10
page=math.ceil(page)
#Crawling through all the pages
string = str(1)
for i in range(2,page+1):
if i>30 :
print("Exceeds 300 inmates")
elif i==2:
elem = browser.find_element_by_xpath('/html/body/div/div/div/div[2]/div[3]/div[12]/ul/li[3]/a')
elem.click()
time.sleep(np.random.uniform(3,5,1))
store_source = browser.page_source
string=str(i)
## Code to save the page and log appropriately
page_index=int(string)-1
save_to_s3(store_source, page_index, roster_row)
logger.info('Saved page _%s_', page_index)
elif i==3:
elem = browser.find_element_by_xpath('/html/body/div/div/div/div[2]/div[3]/div[12]/ul/li[4]/a')
elem.click()
time.sleep(np.random.uniform(3,5,1))
store_source = browser.page_source
string=str(i)
## Code to save the page and log appropriately
page_index=int(string)-1
save_to_s3(store_source, page_index, roster_row)
logger.info('Saved page _%s_', page_index)
elif i==4:
elem = browser.find_element_by_xpath('/html/body/div/div/div/div[2]/div[3]/div[12]/ul/li[5]/a')
elem.click()
time.sleep(np.random.uniform(3,5,1))
store_source = browser.page_source
string=str(i)
## Code to save the page and log appropriately
page_index=int(string)-1
save_to_s3(store_source, page_index, roster_row)
logger.info('Saved page _%s_', page_index)
elif i>=5:
elem = browser.find_element_by_xpath('/html/body/div/div/div/div[2]/div[3]/div[12]/ul/li[6]/a')
elem.click()
time.sleep(np.random.uniform(3,5,1))
store_source = browser.page_source
string=str(i)
## Code to save the page and log appropriately
page_index=int(string)-1
save_to_s3(store_source, page_index, roster_row)
logger.info('Saved page _%s_', page_index)
# End core specific scraping code
####################################
#Close the browser
logger.info('complete!')
except Exception as errorMessage:
try:
browser.close()
record_error(message=str(errorMessage), roster_row=roster_row, browser=browser)
except:
record_error(message=str(errorMessage), roster_row=roster_row)
# Record error in S3 for a general error
logger.error('Error: %s', errorMessage)
# Log error
sys.exit(1)
if __name__ == "__main__":
#This will load in the current jail roster list
#Select the index of the roster this script is for:
#Write the name of the county and state
roster = pd.read_csv('/opt/jail_roster_final_rmDuplicates.csv',encoding = "utf-8")
main(roster[roster['index'] == ROW_INDEX].iloc[0])
| [
"jborowitz@gmail.com"
] | jborowitz@gmail.com |
3d73e7b277e9fa32045273a746968aae4f013902 | d50c9e58f52e3aac3a55f8ff65013810c7f6ee01 | /1020.py | ba1cb6dc78c018cef26467eb5690f6bf33f91ecb | [] | no_license | Miicchhel/uri-exercicios | 7eea6068414d7aadb310ddb6f34e3778593d4284 | db1f9294555561c316e4e545943624edbf864f4d | refs/heads/master | 2022-09-23T08:23:27.285570 | 2020-06-03T08:38:05 | 2020-06-03T08:38:05 | 268,389,662 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 261 | py | '''
URI Online Judge | 1020
autor: Michel Melo
'''
entrada = int(input())
anos = entrada//365
entrada = entrada - anos * 365
meses = entrada//30
entrada = entrada - meses*30
dias=entrada
print('{} ano(s)\n{} mes(es)\n{} dia(s)'.format(anos, meses, dias))
| [
"michel.ferreira.melo@gmail.com"
] | michel.ferreira.melo@gmail.com |
90e2a1876a8a58fbb729d51a5a369911b976686e | bf79091e5465ed2f8c8df652b9a592a18adb638e | /automation-tests/venv/bin/easy_install-3.8 | da73ab2fcb4001fdfc89fa0fd5480f95fe61bc93 | [] | no_license | marcin-pasiewicz/python | 9cb8dffef1e48533660279920305f6ef9a0d90eb | fc7e6ab32cb656dcb5ecc84855c99ec172d0633d | refs/heads/master | 2023-03-28T01:16:05.783615 | 2020-04-02T07:45:56 | 2020-04-02T07:45:56 | 250,227,326 | 0 | 0 | null | 2021-03-20T03:13:25 | 2020-03-26T10:27:42 | Python | UTF-8 | Python | false | false | 299 | 8 | #!/Users/marcin_pasiewicz/PycharmProjects/mypython/automation-tests/venv/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from setuptools.command.easy_install import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"marcin_pasiewicz@epam.com"
] | marcin_pasiewicz@epam.com |
63245acb528b36b71c34653867d875b948fc0ca8 | fb178669d247edc9f4df737285dcf0283eb0d49f | /code_hello_world.py | 0ca8ce474d8db0dca6e42c57106d74ff36aa85c8 | [] | no_license | Joey-Marcotte/ICS3U-FP-Lesson-1 | f8fdf753032035844fa58c3cdce3ef3d96b36170 | 8380cedc33617b6fc13b21afb2f6d9a2372bed43 | refs/heads/master | 2020-07-28T08:01:42.029678 | 2019-09-18T17:33:47 | 2019-09-18T17:33:47 | 209,358,801 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 406 | py | #!/usr/bin/env python3
# Created by: Joey Marcotte
# Created on: Sep 2019
# This program is the "Hello, World!" program on the PyBadge
def main():
# this function prints to the screen and console
print("\n\n\n") # 3 blank lines
print("Hello, World!")
while True:
# repeat forever, or you turn it off!
pass # just a place holder
if __name__ == "__main__":
main()
| [
"ubuntu@ip-172-31-27-176.ec2.internal"
] | ubuntu@ip-172-31-27-176.ec2.internal |
52653aba24afa2f912822861c64e6be326d87c8b | cc36eb9b7fd062b3b39b3b3c8ad92c4a7a4b6664 | /detectors/MaskDetector/MaskRCN/parallel_model.py | 9d633e6b36a542c8510dfb7c7a7045b003b559ff | [] | no_license | JosephSanjaya/PersonDetectors | e0766b375b751da5b4bfe0b8d5bbbbb2d7fae5c6 | 2cd4ac7c8c125654ca44e4efa6d46b3f1fb58210 | refs/heads/master | 2020-07-31T02:48:06.248033 | 2018-05-15T21:44:15 | 2018-05-15T21:44:15 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,951 | py | """
Mask R-CNN
Multi-GPU Support for Keras.
Copyright (c) 2017 Matterport, Inc.
Licensed under the MIT License (see LICENSE for details)
Written by Waleed Abdulla
Ideas and a small code snippets from these sources:
https://github.com/fchollet/keras/issues/2436
https://medium.com/@kuza55/transparent-multi-gpu-training-on-tensorflow-with-keras-8b0016fd9012
https://github.com/avolkov1/keras_experiments/blob/master/keras_exp/multigpu/
https://github.com/fchollet/keras/blob/master/keras/utils/training_utils.py
"""
import tensorflow as tf
import keras.backend as K
import keras.layers as KL
import keras.models as KM
class ParallelModel(KM.Model):
"""Subclasses the standard Keras Model and adds multi-GPU support.
It works by creating a copy of the trained_model on each GPU. Then it slices
the inputs and sends a slice to each copy of the trained_model, and then
merges the outputs together and applies the loss on the combined
outputs.
"""
def __init__(self, keras_model, gpu_count):
"""Class constructor.
keras_model: The Keras trained_model to parallelize
gpu_count: Number of GPUs. Must be > 1
"""
self.inner_model = keras_model
self.gpu_count = gpu_count
merged_outputs = self.make_parallel()
super(ParallelModel, self).__init__(inputs=self.inner_model.inputs,
outputs=merged_outputs)
def __getattribute__(self, attrname):
"""Redirect loading and saving methods to the inner trained_model. That's where
the weights are stored."""
if 'load' in attrname or 'save' in attrname:
return getattr(self.inner_model, attrname)
return super(ParallelModel, self).__getattribute__(attrname)
def summary(self, *args, **kwargs):
"""Override summary() to display summaries of both, the wrapper
and inner models."""
super(ParallelModel, self).summary(*args, **kwargs)
self.inner_model.summary(*args, **kwargs)
def make_parallel(self):
"""Creates a new wrapper trained_model that consists of multiple replicas of
the original trained_model placed on different GPUs.
"""
# Slice inputs. Slice inputs on the CPU to avoid sending a copy
# of the full inputs to all GPUs. Saves on bandwidth and memory.
input_slices = {name: tf.split(x, self.gpu_count)
for name, x in zip(self.inner_model.input_names,
self.inner_model.inputs)}
output_names = self.inner_model.output_names
outputs_all = []
for i in range(len(self.inner_model.outputs)):
outputs_all.append([])
# Run the trained_model call() on each GPU to place the ops there
for i in range(self.gpu_count):
with tf.device('/gpu:%d' % i):
with tf.name_scope('tower_%d' % i):
# Run a slice of inputs through this replica
zipped_inputs = zip(self.inner_model.input_names,
self.inner_model.inputs)
inputs = [
KL.Lambda(lambda s: input_slices[name][i],
output_shape=lambda s: (None,) + s[1:])(tensor)
for name, tensor in zipped_inputs]
# Create the trained_model replica and get the outputs
outputs = self.inner_model(inputs)
if not isinstance(outputs, list):
outputs = [outputs]
# Save the outputs for merging back together later
for l, o in enumerate(outputs):
outputs_all[l].append(o)
# Merge outputs on CPU
with tf.device('/cpu:0'):
merged = []
for outputs, name in zip(outputs_all, output_names):
# If outputs are numbers without dimensions, add a batch dim.
def add_dim(tensor):
"""Add a dimension to tensors that don't have any."""
if K.int_shape(tensor) == ():
return KL.Lambda(lambda t: K.reshape(t, [1, 1]))(tensor)
return tensor
outputs = list(map(add_dim, outputs))
# Concatenate
merged.append(KL.Concatenate(axis=0, name=name)(outputs))
return merged
if __name__ == "__main__":
# Testing code below. It creates a simple trained_model to train on MNIST and
# tries to run it on 2 GPUs. It saves the graph so it can be viewed
# in TensorBoard. Run it as:
#
# python3 parallel_model.py
import os
import numpy as np
import keras.optimizers
from keras.datasets import mnist
from keras.preprocessing.image import ImageDataGenerator
GPU_COUNT = 2
# Root directory of the project
ROOT_DIR = os.getcwd()
# Directory to save logs and trained trained_model
MODEL_DIR = os.path.join(ROOT_DIR, "logs/parallel")
def build_model(x_train, num_classes):
# Reset default graph. Keras leaves old ops in the graph,
# which are ignored for execution but clutter graph
# visualization in TensorBoard.
tf.reset_default_graph()
inputs = KL.Input(shape=x_train.shape[1:], name="input_image")
x = KL.Conv2D(32, (3, 3), activation='relu', padding="same",
name="conv1")(inputs)
x = KL.Conv2D(64, (3, 3), activation='relu', padding="same",
name="conv2")(x)
x = KL.MaxPooling2D(pool_size=(2, 2), name="pool1")(x)
x = KL.Flatten(name="flat1")(x)
x = KL.Dense(128, activation='relu', name="dense1")(x)
x = KL.Dense(num_classes, activation='softmax', name="dense2")(x)
return KM.Model(inputs, x, "digit_classifier_model")
# Load MNIST Data
(x_train, y_train), (x_test, y_test) = mnist.load_data()
x_train = np.expand_dims(x_train, -1).astype('float32') / 255
x_test = np.expand_dims(x_test, -1).astype('float32') / 255
print('x_train shape:', x_train.shape)
print('x_test shape:', x_test.shape)
# Build data generator and trained_model
datagen = ImageDataGenerator()
model = build_model(x_train, 10)
# Add multi-GPU support.
model = ParallelModel(model, GPU_COUNT)
optimizer = keras.optimizers.SGD(lr=0.01, momentum=0.9, clipnorm=5.0)
model.compile(loss='sparse_categorical_crossentropy',
optimizer=optimizer, metrics=['accuracy'])
model.summary()
# Train
model.fit_generator(
datagen.flow(x_train, y_train, batch_size=64),
steps_per_epoch=50, epochs=10, verbose=1,
validation_data=(x_test, y_test),
callbacks=[keras.callbacks.TensorBoard(log_dir=MODEL_DIR,
write_graph=True)]
)
| [
"madhawavidanapathirana@gmail.com"
] | madhawavidanapathirana@gmail.com |
6f409ce181ccfacc565feea9433e652a11fe88ae | c6939d3e5d5628673d44d29ef38b0511556a83aa | /new_shangmi/shangmi/apis_v1.py | a76dd4e4cbce2576231baab2a614b4a0b49d8b0d | [] | no_license | General-Coder/shangmiteam | a536867a7e03f33eec3d2c55c0f55a1cb7ae1b85 | a628e38a545ffc36caa4c05d2fb5b73398a26ac1 | refs/heads/master | 2020-04-11T06:03:28.468625 | 2018-12-11T09:33:14 | 2018-12-11T09:33:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,462 | py | import json
import requests
from django.conf import settings
from django.forms import model_to_dict
from django.http import JsonResponse, HttpResponse
from django.views.generic import View
from .utils import *
from .models import *
from django.core.cache import caches
from .getqr import *
import uuid
user_cache = caches['user']
class LoginAPI(View):
def post(self, request):
params = request.POST
code = params.get('code')
avatar = params.get('avatar')
# gender = params.get('gender')
nick_name = params.get('name')
mini_type = params.get('mini_type')
token = params.get("token")
user_id = user_cache.get(token)
if user_id:
user_cache.set(token, user_id, settings.LOGIN_TIMEOUT)
return JsonResponse({'code': 0, 'data': {'token': token, "uid": user_id}})
if mini_type == 'background':
appid = 'wx4a8c99d5d8b43556'
secret = '014ad578b31357e53b61b9ab69db0761'
elif mini_type == 'customer':
appid = 'wx8b50ab8fa813a49e'
secret = 'b32f63c36ea123710173c4c9d4b15e8b'
else:
appid = 'wxebd828458f8b2b38'
secret = 'a40cb9c5ecb1f4f5c0f31b75829fed03'
url = settings.SMALL_WEIXIN_OPENID_URL
params = {"appid": appid,
"secret": secret,
"js_code": code,
"grant_type": 'authorization_code'
}
response = requests.get(url, params=params)
data = json.loads(response.content.decode())
if 'openid' in data:
openid = data.get('openid')
user = ShangmiUser.objects.get_or_create(openid=openid)[0]
# token = generate_validate_token(str(user.id))
token = uuid.uuid4().hex
user_cache.set(token, user.id, settings.LOGIN_TIMEOUT)
user.nick_name = nick_name
user.icon = avatar
user.source = mini_type
user.save()
return HttpResponse(json.dumps({'code': 0, 'data': {'token': token, "uid": user.id}}),
content_type='application/json')
else:
return HttpResponse(json.dumps({'code': 1, 'msg': 'failed'}),
content_type='application/json')
class ActivesAPI(View):
def get(self, req):
actives = Active.objects.filter(
is_active=True
)
fast = actives.filter(is_fast=True)
unfast = actives.filter(is_fast=False)
# fast_data = [model_to_dict(i) for i in fast]
unfast_data = [model_to_dict(i) for i in unfast]
fast_data = []
for i in fast:
tmp = model_to_dict(i)
if i.need_num == 0:
tmp["percent"] = "0%"
else:
tmp["percent"] = str((i.complete_num / i.need_num) * 100) + "%"
fast_data.append(tmp)
unfast_data = []
for i in unfast:
tmp = model_to_dict(i)
if i.need_num == 0:
tmp["percent"] = "0%"
else:
tmp["percent"] = str((i.complete_num / i.need_num) * 100) + "%"
unfast_data.append(tmp)
result = {
"code": 1,
"msg": "ok",
"data": {
"fast": fast_data,
"unfast": unfast_data
}
}
return JsonResponse(result)
class AdvAPI(View):
def get(self,req):
advs = Advertise.objects.filter(
is_used=True
)
res = [model_to_dict(i) for i in advs]
data = {
"code":1,
"msg": "ok",
"data": res
}
return JsonResponse(data)
class IndexAPI(View):
# @login_req
def get(self, req):
user = ShangmiUser.objects.get(pk=int(user_cache.get(req.GET.get("token"))))
actives = UserActiveLog.objects.filter(user=user)
# 未通过的
doing_count = actives.filter(status=0).count()
# 审核通过的
finish_count = actives.filter(status=1).count()
# 用户余额
try:
money = Balance.objects.get(user=user).money
except:
money = 0
data = {
"code": 0,
"data": {
'money': money,
'doing_count': doing_count,
'finish_count': finish_count
}
}
return JsonResponse(data)
# 用户参加活动明细
class UserActiveLogAPI(View):
def get(self, req):
user = ShangmiUser.objects.get(
pk=int(user_cache.get(
req.GET.get("token")
)
)
)
logs = UserActiveLog.objects.filter(
user=user,
status=1
).order_by("-create_time")
data_logs = []
for i in logs:
tmp = model_to_dict(i)
tmp['create_time'] = i.create_time.strftime("%Y年%m月%d日 %H:%M")
tmp["status"] = i.get_status_display()
tmp["active_msg"] = model_to_dict(i.active)
tmp["type"] = i.get_type_display()
data_logs.append(tmp)
return JsonResponse({"code": 0, "data": data_logs})
# 付款明细
class UserPayLogAPI(View):
def get(self, req):
user = ShangmiUser.objects.get(
pk=int(user_cache.get(
req.GET.get("token")
)
)
)
logs = UserPayLog.objects.filter(user=user, status=1).order_by("-create_time")
datas = []
for i in logs:
tmp = model_to_dict(i)
tmp['create_time'] = i.create_time.strftime("%Y年%m月%d日 %H:%M:%S")
tmp["store_name"] = i.store.name
tmp["money"] = i.money / 100
tmp["integral"] = i.integral / 100
datas.append(tmp)
data = {
"code": 0,
"data": datas
}
return JsonResponse(data)
# 任务明细
class TaskDetailAPI(View):
def get(self, req):
user = ShangmiUser.objects.get(
pk=int(user_cache.get(
req.GET.get("token")
)
)
)
datas = UserActiveLog.objects.filter(user=user).order_by("-create_time")
details = []
for i in datas:
tmp = model_to_dict(i)
tmp['create_time'] = i.create_time.strftime("%Y年%m月%d日 %H:%M")
tmp["status"] = i.get_status_display()
tmp["active_msg"] = model_to_dict(i.active)
tmp["type"] = i.get_type_display()
details.append(tmp)
data = {
"code": 0,
"data": details
}
return JsonResponse(data)
class ActiveAPI(View):
def get(self, req):
id = int(req.GET.get("id"))
active = Active.objects.get(pk=id)
data = {
"code": 0,
"data": model_to_dict(active)
}
return JsonResponse(data)
class ShareGetMoneyAPI(View):
def post(self, req):
token = req.POST.get("token")
share_uid = req.POST.get("uid")
user = user_cache.get()
class JoinActiveAPI(View):
def post(self, req):
user = ShangmiUser.objects.get(pk=int(user_cache.get(
req.POST.get("token")
)))
uid = req.POST.get("uid")
id = req.POST.get("id")
active = Active.objects.get(id=id)
if active.is_active == False:
data = {
"code": 3,
"data": "活动已结束"
}
return JsonResponse(data)
# 先判断该用户是不是已经参与了
if UserActiveLog.objects.filter(user_id=user.id).exists():
data = {
"code": 2,
"data": "您已参加,想赚更多可分享"
}
return JsonResponse(data)
log = UserActiveLog.objects.create(
active_id=id,
user_id=user.id,
integral=active.give_money,
type="join",
status=1
)
active.complete_num += 1
active.save()
# 更新用户余额表
user_balance = Balance.objects.get_or_create(user_id=user.id)[0]
user_balance.money += active.give_money
user_balance.save()
if int(uid) != -1 and int(uid) != user.id:
UserActiveLog.objects.create(
active_id=id,
user_id=uid,
integral=active.share_give_money,
type="share",
status=1
)
# 更新分享人用户积分余额
share_user_balance = Balance.objects.get(user_id=uid)
share_user_balance.money += active.share_give_money
share_user_balance.save()
data = {
"code": 0,
"data": "参与成功,积分已发放到个人中心"
}
return JsonResponse(data)
class QrcodeAPI(View):
def get(self, request):
params = request.GET
active_id = int(params.get('active_id'))
wx_mini_path = 'pages/join/join?uid=-1&aid=%s' % active_id
image_data = get_qrcode(wx_mini_path)
return HttpResponse(image_data,content_type="image/png")
class StoreAPI(View):
def get(self, req):
user = ShangmiUser.objects.get(
pk=int(user_cache.get(
req.GET.get("token")
)
)
)
balance = Balance.objects.get(user_id=user.id)
store_id = int(req.GET.get("sid"))
store = Store.objects.get(id=store_id)
if store.is_active == False:
data = {
"code": 2,
"data": "该店暂不参与"
}
return JsonResponse(data)
else:
store_dict = model_to_dict(store)
store_dict["boss_name"] = store.boss.nick_name
store_dict["boss_icon"] = store.boss.icon
store_dict["user_balance"] = balance.money / 100
return JsonResponse({"code": 0, "data": store_dict}) | [
"1625211623@qq.com"
] | 1625211623@qq.com |
e3421447a8225cc4e8464a1815d43de78d1715f1 | 30a1b285ff4aab39eebe342c5dbca255a69b454c | /full-problems/maxDiff.py | 347a657be99ca517cd6ae0e9e6234e8672f61c47 | [
"Apache-2.0"
] | permissive | vikas-t/practice-problems | cd5852ea112421a2a39db31ae9092c6a148b2af8 | ea654d1cad5374c824c52da9d3815a9546eb43fa | refs/heads/master | 2021-10-27T14:08:42.724019 | 2019-04-17T18:26:23 | 2019-04-17T18:26:23 | 170,156,225 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 442 | py | #!/usr/bin/python3
# https://practice.geeksforgeeks.org/problems/maximum-difference/0
def sol(arr, n):
d = -1
min_i = 0
min_till_here = 0
for i in range(1, n):
if arr[i] < arr[min_till_here]:
min_till_here = i
if min_till_here != min_i and min_till_here < i:
min_i = min_till_here
d = max(d, arr[i]-arr[min_i])
return d
arr = [5, 15, 3, 4, 5, 14]
print(sol(arr, len(arr))) | [
"vikas@optumsoft.com"
] | vikas@optumsoft.com |
250a3a51f3343b4685807270982766fbc9d80694 | 999ab5b1c1b259e0f224d7c2f249db4ff39168e7 | /Course_2_DS/2.3.py | 39400c8af3c088183fbc8cae7fb7ed774fb7398f | [] | no_license | medha130101/py4e_Coursera | 87601a5683af560ace4130797ea12e7f537e0010 | 335e03dfc61a093c9f4b5c5ddaf3dd2fb72334a3 | refs/heads/master | 2022-08-01T01:06:44.222696 | 2020-05-25T07:32:42 | 2020-05-25T07:32:42 | 266,498,647 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 358 | py | # Use the file name mbox-short.txt as the file name
fname = input("Enter file name: ")
fh = open(fname)
count = 0
res = 0
for line in fh:
if not line.startswith("X-DSPAM-Confidence:") : continue
else :
count = count+1
str = line[19:]
val = float(str)
res = res + val
print('Average spam confidence:',float(res/count))
| [
"medha130101@gmail.com"
] | medha130101@gmail.com |
c00c93c6d1b42848ce17ef71d6a8a741b5185e74 | 5d01334b9eaa86889f5ebf8903906d1539f23d16 | /validation/bilevel/bilevel_mibs2.py | 016716acfb08a68939caa7e3fb96aae53a127edc | [
"MIT"
] | permissive | toulbar2/toulbar2 | 07b322821b932431c344663abef517ad15c2d137 | abfcb59daf8eec8d2ae4f3c83792050ff68359c7 | refs/heads/master | 2023-08-17T23:19:16.417151 | 2023-08-09T14:01:02 | 2023-08-09T14:01:02 | 132,434,975 | 56 | 17 | MIT | 2023-06-01T14:05:02 | 2018-05-07T09:03:53 | C++ | UTF-8 | Python | false | false | 2,995 | py |
import pytoulbar2 as tb2
cfn = tb2.CFN(ubinit = 1000, verbose = 0)
cfn.NoPreprocessing()
cfn.Option.btdMode = 1
cfn.Option.hbfs = 0
# create restricted leader problem
cfn.Option.bilevel = 1
cfn.AddVariable('x0',range(2))
cfn.AddVariable('x1',range(2))
cfn.AddVariable('x2',range(2))
cfn.AddLinearConstraint([7,5,2],['x0','x1','x2'],'<=',9)
cfn.Option.initialLbBLP = cfn.Option.initialLbBLP + [cfn.CFN.wcsp.getLb()]
cfn.CFN.wcsp.setLb(0)
cfn.Option.negCostBLP = cfn.Option.negCostBLP + [cfn.CFN.wcsp.getNegativeLb()]
cfn.CFN.wcsp.decreaseLb(-cfn.CFN.wcsp.getNegativeLb())
# create follower problem
cfn.Option.bilevel = 2
cfn.AddVariable('C0',range(4))
cfn.AddVariable('C1',range(3))
cfn.AddVariable('C2',range(5))
cfn.AddFunction(['C0','C1','C2'], [(0 if (11 * v0 + 4 * v1 + 6 * v2 <= 50) else 1000000) for v0 in range(4) for v1 in range(3) for v2 in range(5)])
cfn.AddFunction(['x0','C0'], [(0 if v0 <= 3*(1-x0) else 1000000) for x0 in range(2) for v0 in range(4)])
cfn.AddFunction(['x1','C1'], [(0 if v1 <= 2*(1-x1) else 1000000) for x1 in range(2) for v1 in range(3)])
cfn.AddFunction(['x2','C2'], [(0 if v2 <= 4*(1-x2) else 1000000) for x2 in range(2) for v2 in range(5)])
cfn.AddFunction(['C0'], [-8 * v0 for v0 in range(4)])
cfn.AddFunction(['C1'], [-12 * v1 for v1 in range(3)])
cfn.AddFunction(['C2'], [-3 * v2 for v2 in range(5)])
cfn.Option.initialLbBLP = cfn.Option.initialLbBLP + [cfn.CFN.wcsp.getLb()]
cfn.CFN.wcsp.setLb(0)
cfn.Option.negCostBLP = cfn.Option.negCostBLP + [cfn.CFN.wcsp.getNegativeLb()]
cfn.CFN.wcsp.decreaseLb(-cfn.CFN.wcsp.getNegativeLb())
# create negative form of follower problem
cfn.Option.bilevel = 3
cfn.AddVariable('C0neg',range(4))
cfn.AddVariable('C1neg',range(3))
cfn.AddVariable('C2neg',range(5))
cfn.AddFunction(['C0neg','C1neg','C2neg'], [(8 * v0 + 12 * v1 + 3 * v2 if (11 * v0 + 4 * v1 + 6 * v2 <= 50) else 1000000) for v0 in range(4) for v1 in range(3) for v2 in range(5)])
cfn.AddFunction(['x0','C0neg'], [(0 if v0 <= 3*(1-x0) else 1000000) for x0 in range(2) for v0 in range(4)])
cfn.AddFunction(['x1','C1neg'], [(0 if v1 <= 2*(1-x1) else 1000000) for x1 in range(2) for v1 in range(3)])
cfn.AddFunction(['x2','C2neg'], [(0 if v2 <= 4*(1-x2) else 1000000) for x2 in range(2) for v2 in range(5)])
cfn.Option.initialLbBLP = cfn.Option.initialLbBLP + [cfn.CFN.wcsp.getLb()]
cfn.CFN.wcsp.setLb(0)
cfn.Option.negCostBLP = cfn.Option.negCostBLP + [cfn.CFN.wcsp.getNegativeLb()]
cfn.CFN.wcsp.decreaseLb(-cfn.CFN.wcsp.getNegativeLb())
cfn.Option.bilevel = 4
cfn.Option.decimalPointBLP = [0,0,0]
cfn.Option.costMultiplierBLP = [1.,1.,-1.]
cfn.Option.initialUbBLP = [tb2.tb2.MAX_COST,tb2.tb2.MAX_COST,tb2.tb2.MAX_COST]
print(cfn.Option.negCostBLP)
print(cfn.Option.initialLbBLP)
cfn.CFN.wcsp.setLb(cfn.Option.initialLbBLP[0] + cfn.Option.initialLbBLP[2])
cfn.CFN.wcsp.decreaseLb(cfn.Option.negCostBLP[0] + cfn.Option.negCostBLP[2])
cfn.Option.setVarOrder('0 -1 0 1 2\n1 0 0 1 2\n2 0 0 1 2 3 4 5\n3 0 0 1 2 6 7 8\n')
cfn.Solve(showSolutions=3)
| [
"degivry@toulouse.inra.fr"
] | degivry@toulouse.inra.fr |
4d8efb14bfc9d763266281293a83734cd6351144 | 6c7d51637785b55f7d4e6e4450740729132dc1b6 | /colliers_scraper/spiders/CBRE.py | 8bd76083a3d88a612af615ba159026852fe8e5ad | [] | no_license | bradstone52/rescraper | fe813207dfc5e93d54df122d4abeae3caf4fdb3b | 71e3900be736b8a6ce7ac232d170908cadf4c56e | refs/heads/master | 2023-04-03T00:45:57.706935 | 2021-04-13T05:43:52 | 2021-04-13T05:43:52 | 355,763,584 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 958 | py | import scrapy
from scrapy_splash import SplashRequest
class CbreSpider(scrapy.Spider):
name = 'CBRE'
base_url = 'http://www.commerciallistings.cbre.ca'
def start_requests(self):
url = 'http://www.commerciallistings.cbre.ca/en-CA/listings/industrial/results?aspects=isSale/'
yield SplashRequest(url=url, callback=self.parse, args={'wait': 10})
def parse(self, response):
links = response.xpath("//a[@class='card_content']/@href").extract()
for link in links:
absolute_url = self.base_url + link
yield SplashRequest(url=absolute_url, callback=self.parse_details, args={'wait': 5})
def parse_details(self, response):
yield {
"Size": response.xpath("//div[@class='cbre_table__cell col-xs-6 col-sm-5 col-lg-4'][1]/span/text()").get(),
"Property Description": response.xpath("//h1[@class='cbre_h1']/div/span/span/text()").get()
} | [
"bradstone52@gmail.com"
] | bradstone52@gmail.com |
3b44fec9bb31af3be10a66a4eea127ebec478e4c | 57085cfc580c68262631166bb28585a2946d2b2e | /Project Kivy/SimpleDrawingApp/main.py | bd564c0515a1b96809cbcee47f44dc805bff67b3 | [] | no_license | izzatii/kivyapp | 589a2f94286d388bf237e310b92debdae4379d59 | b35f526f0e93325dfb997347df59966a4ec0e7f3 | refs/heads/main | 2023-04-10T22:22:07.844136 | 2021-04-24T01:44:15 | 2021-04-24T01:44:15 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 950 | py | import kivy
from kivy.app import App
from kivy.uix.widget import Widget
from kivy.graphics import Rectangle
from kivy.graphics import Color
from kivy.graphics import Line
class Touch(Widget):
def __init__(self, **kwargs):
super(Touch, self).__init__(**kwargs)
with self.canvas:
Color(0, 1, 0, 0.5, mode = 'rgba') #changing line color
Line(points=(20,30,400,500,60,500)) #adding lines
Color(1, 0, 0, 0.5, mode = 'rgba')
self.rect = Rectangle(pos =(0, 0), size = (50, 50)) #initialize rectangle position
def on_touch_down(self, touch):
self.rect.pos = touch.pos #rectangle appears when touch
print("Mouse Down", touch)
def on_touch_move(self, touch):
self.rect.pos = touch.pos #rectangle follows mouse/touch movemnet
print("Mouse Move", touch)
class MyApp(App):
def build(self):
return Touch()
if __name__ == "__main__":
MyApp().run()
| [
"noreply@github.com"
] | izzatii.noreply@github.com |
550316b6e7933d5b70efdcae7eb58b238ce71293 | 4fc21c3f8dca563ce8fe0975b5d60f68d882768d | /neoOkpara/Phase-1/Day6/parseNumber.py | 3b2a14cf121441929c33c9acc1d732d222ef962d | [
"MIT"
] | permissive | Uche-Clare/python-challenge-solutions | 17e53dbedbff2f33e242cf8011696b3059cd96e9 | 49ede6204ee0a82d5507a19fbc7590a1ae10f058 | refs/heads/master | 2022-11-13T15:06:52.846937 | 2020-07-10T20:59:37 | 2020-07-10T20:59:37 | 266,404,840 | 1 | 0 | MIT | 2020-05-23T19:24:56 | 2020-05-23T19:24:55 | null | UTF-8 | Python | false | false | 53 | py | n = "938.968"
print(float(n))
print(int(float(n)))
| [
"emmox55@gmail.com"
] | emmox55@gmail.com |
3f6f3c98379f75bbddb482857cf7d063368b608d | c26c554bca72ccee89d9fdc6bd74655817689eef | /venv/bin/pip3.8 | ff65fc3acc730c1299d9241b56b8604bf3735192 | [] | no_license | moriya1232/ex1-bina | 1a30ada9f0484fcf412d94d106eabc1b91b80536 | 810cbedc61c52295e52de5b5a7305200f57a82b2 | refs/heads/master | 2023-01-21T16:57:06.602531 | 2020-11-26T09:51:16 | 2020-11-26T09:51:16 | 310,950,771 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 262 | 8 | #!/home/moriya1232/PycharmProjects/ex1-bina/venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from pip._internal.cli.main import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"moriya1232@gmail.com"
] | moriya1232@gmail.com |
9c970c775b50c67a5493ab12714e64a33cb90e85 | 302095ab88572a975f1000a2a53aa496c10c27dc | /tshirt.py | 3a78ba3cf92286114de45204a4e8a89bd9657e5d | [] | no_license | mtj6/class_project | b621da39f58e1507e5b444202c842b1e372afa39 | 9e39b19052be580b573da204fb553ad7717121da | refs/heads/master | 2020-04-08T11:05:29.434312 | 2018-11-27T07:26:22 | 2018-11-27T07:26:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 203 | py | def make_shirt(size, text):
"""Make a t-shirt."""
print('Make a ' + size + ' shirt that says ' + text + '.')
make_shirt('small', '"no bad days"')
make_shirt(text= '"no bad days"', size= 'small') | [
"mtj6@uw.edu"
] | mtj6@uw.edu |
7c9cf96fec5ae82a69dbd79a6ac1f9dca77d98e0 | 7ebac949324b6e22b5a0b0e9c2452e89548a056e | /autocomplete/data_generation.py | ede344e9b2d76847e71609e53b30fb5fd70288de | [
"MIT"
] | permissive | yudhik11/Artify | 69c2cea98cfa565732caca66a358afc76436ae58 | 745d4aa29a368a313bcd1b86f4680a7146a4d790 | refs/heads/master | 2022-04-14T21:20:35.881899 | 2020-03-15T11:29:32 | 2020-03-15T11:29:32 | 247,047,709 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,967 | py | import cv2
import numpy as np
import matplotlib.pyplot as plt
import os
import math
from PIL import Image
from PIL import ImageOps
from tqdm import tqdm
import argparse
parser = argparse.ArgumentParser(description='Input Scribble Directory!')
parser.add_argument('--scribble_dir', type=str, help='Scribble Directory')
parser.add_argument('--how_many_per_size', type=int, default =1, help='Number of images per size')
args = parser.parse_args()
def fill(im,points):
filler = cv2.convexHull(points)
im=cv2.fillConvexPoly(im, filler, (255,255,255))
return im
scribble_dir = args.scribble_dir #'../data/cartoonset10k/scribbles/'
occ_sizes=[64,128,192]
how_many_per_size = args.how_many_per_size
img_size = 256
for scribble_class in os.listdir(scribble_dir):
out_dir = os.path.join(os.getcwd(),'autocomplete',scribble_class)
if not os.path.exists(out_dir):
os.mkdir(out_dir)
for scribble_img in tqdm(os.listdir(os.path.join(scribble_dir,scribble_class))):
scribble_path = os.path.join(scribble_dir,scribble_class,scribble_img)
if not (scribble_path.endswith('.png') or scribble_path.endswith('.jpg')):
continue
img = cv2.imread(scribble_path)
img = cv2.resize(img,(img_size,img_size))
rows,cols,ch = img.shape
img_copy = img.copy()
for occ_size in occ_sizes:
max_start=rows-occ_size
# start in range (0,191)
for i in range(how_many_per_size):
start= np.random.randint(max_start, size=2)
points=np.array(((start[0],start[1]),(start[0]+occ_size,start[1]),(start[0],start[1]+occ_size),(start[0]+occ_size,start[1]+occ_size)))
img=img_copy.copy()
im = fill(img,points)
scribble_img_id = scribble_img.split('.')[0]
save_path=os.path.join(out_dir,scribble_img_id + '_' + str(occ_size) + '_' + str(i)+'.png')
cv2.imwrite(save_path,im)
| [
"yudhik100@gmail.com"
] | yudhik100@gmail.com |
55780545078e76b5427b82bb60b0e3d3965821a7 | 6e74fee5a861b6aa9816419cbbcc1e325bd763e9 | /repomgmt/management/commands/repo-connect-to-node.py | 0e7a85718021b725dcc1847e5df3ddd7e5dba5f2 | [
"Apache-2.0"
] | permissive | kuggaa/python-django-repomgmt | cc4ea29e09cc6ea2b85dfd4666ee11361c79666b | 35fb793c9f9543feeef1e6088ab3c3e6aebcf159 | refs/heads/master | 2020-05-29T17:28:25.658425 | 2013-08-15T11:53:32 | 2013-08-15T11:53:32 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,002 | py | #
# Copyright 2012 Cisco Systems, Inc.
#
# Author: Soren Hansen <sorhanse@cisco.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from django.core.management.base import BaseCommand
from repomgmt.models import BuildNode
class Command(BaseCommand):
args = '<build_node_name>'
help = 'Connect interactively to the given build node'
def handle(self, build_node_name, **options):
bn = BuildNode.objects.get(name=build_node_name)
bn.interactive_ssh()
| [
"sorhanse@cisco.com"
] | sorhanse@cisco.com |
bc88b7b4b853da1e17e893e1cf5a65046e3bb74a | b3c75530c67b3da5a0f886d7279ecce86c2a15be | /add_media_content.sikuli/add_media_content.py | f8b8c4817249d1a09a534e5685865103eba88148 | [] | no_license | abalanuk-okta/Sikuli_automation | d413644e6216f94bdaa0597e01834ff6f4f73dac | 88cfdf778f81a64298a63e39a73f6fe09b0695bc | refs/heads/master | 2021-03-12T20:28:35.410650 | 2015-05-13T19:34:56 | 2015-05-13T19:34:56 | 33,612,578 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,074 | py | from sikuli.Sikuli import *
import unittest
import HTMLTestRunner
from Config import Config
import Helpers
import base_cases
reload(base_cases)
project_name = Helpers.generate_project_name()
class AddMediaContentTestCase(unittest.TestCase):
def testAddMediaContent(self):
app = base_cases.BaseMyHeritageTestCase()
app.openApp()
app.loginApp()
app.fullScreenApp()
app.addPhotos()
while not exists("1430789093915.png"):
wait(1)
print "New foto was added"
#app.closeApp()
base_cases.BaseMyHeritageTestCase.cleanUserData()
#Config.init()
suite = unittest.TestSuite()
suite.addTest(unittest.TestLoader().loadTestsFromTestCase(AddMediaContentTestCase))
#suite.addTest(unittest.TestLoader().loadTestsFromTestCase(sikuli1.CreatingProjectTest))
outfile = open(Config.get_reports_path() + "/%s.html" % (project_name), "w")
runner = HTMLTestRunner.HTMLTestRunner(stream=outfile, title=' Report Title', description='desc..' )
runner.run(suite)
outfile.close() | [
"abalanuk@lohika.com"
] | abalanuk@lohika.com |
fbd47d76359cf0b1d4f72a8a455910eec8d70a14 | a5e06360397a51a499974c24b587e39ef98b12cb | /4进程/4启动大量子进程.py | 60a761c42f3e330c998f63b6be4377390dda6709 | [] | no_license | songjiabin/PyDemo | 3c89b03f009a6f72813099e61c1a9e2d5d16fb87 | afc2d98521b2d158ef2b54cf887502a3b1568aec | refs/heads/master | 2022-10-29T00:18:31.887522 | 2018-09-12T16:23:09 | 2018-09-12T16:23:09 | 142,775,346 | 0 | 1 | null | 2022-10-22T18:33:14 | 2018-07-29T16:06:41 | Python | UTF-8 | Python | false | false | 689 | py | from multiprocessing import Pool
import os
import time
def run(name):
print("启动子进程%d,id是%s"%(name, str(os.getpid())))
time.sleep(0.5)
print("结束子进程%d,id是%s"%(name, str(os.getpid())))
pass
def main():
print("启动主进程")
# 创建多个进程
# 进程池
# 表示可以同时执行的进程数量 默认为CPU的核数
pool = Pool(2)
for i in range(5):
# 创建进程,放入进程池统一管理
pool.apply_async(run, args=(i,))
# 注意在进行join之前必须先close
# 一旦colse就不能添加新的进程了
pool.close()
pool.join()
if __name__ == '__main__':
main()
| [
"1796254117@qq.com"
] | 1796254117@qq.com |
b298869f7dc5f7a2e2768feabbc8a3758fdcedd7 | 5c2e0fe391f7c720d0a6c117a64f4c8e89fece93 | /research/object_detection/models/faster_rcnn_inception_v2_feature_extractor.py | d2681127b2706faec7433bb8adbcfb619375bd4a | [
"Apache-2.0"
] | permissive | lyltencent/tf_models_v15 | e3bed9dfee42685118b0f3d21bb9de37d58cf500 | 0081dbe36831342051c09a2f94ef9ffa95da0e79 | refs/heads/master | 2022-10-20T20:00:26.594259 | 2020-09-19T05:37:22 | 2020-09-19T05:37:22 | 161,750,047 | 0 | 1 | Apache-2.0 | 2021-03-31T21:04:01 | 2018-12-14T07:47:33 | Python | UTF-8 | Python | false | false | 12,050 | py | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Inception V2 Faster R-CNN implementation.
See "Rethinking the Inception Architecture for Computer Vision"
https://arxiv.org/abs/1512.00567
"""
import tensorflow as tf
from object_detection.meta_architectures import faster_rcnn_meta_arch
from nets import inception_v2
slim = tf.contrib.slim
def _batch_norm_arg_scope(list_ops,
use_batch_norm=True,
batch_norm_decay=0.9997,
batch_norm_epsilon=0.001,
batch_norm_scale=False,
train_batch_norm=False):
"""Slim arg scope for InceptionV2 batch norm."""
if use_batch_norm:
batch_norm_params = {
'is_training': train_batch_norm,
'scale': batch_norm_scale,
'decay': batch_norm_decay,
'epsilon': batch_norm_epsilon
}
normalizer_fn = slim.batch_norm
else:
normalizer_fn = None
batch_norm_params = None
return slim.arg_scope(list_ops,
normalizer_fn=normalizer_fn,
normalizer_params=batch_norm_params)
class FasterRCNNInceptionV2FeatureExtractor(
faster_rcnn_meta_arch.FasterRCNNFeatureExtractor):
"""Faster R-CNN Inception V2 feature extractor implementation."""
def __init__(self,
is_training,
first_stage_features_stride,
batch_norm_trainable=False,
reuse_weights=None,
weight_decay=0.0,
depth_multiplier=1.0,
min_depth=16):
"""Constructor.
Args:
is_training: See base class.
first_stage_features_stride: See base class.
batch_norm_trainable: See base class.
reuse_weights: See base class.
weight_decay: See base class.
depth_multiplier: float depth multiplier for feature extractor.
min_depth: minimum feature extractor depth.
Raises:
ValueError: If `first_stage_features_stride` is not 8 or 16.
"""
if first_stage_features_stride != 8 and first_stage_features_stride != 16:
raise ValueError('`first_stage_features_stride` must be 8 or 16.')
self._depth_multiplier = depth_multiplier
self._min_depth = min_depth
super(FasterRCNNInceptionV2FeatureExtractor, self).__init__(
is_training, first_stage_features_stride, batch_norm_trainable,
reuse_weights, weight_decay)
def preprocess(self, resized_inputs):
"""Faster R-CNN Inception V2 preprocessing.
Maps pixel values to the range [-1, 1].
Args:
resized_inputs: a [batch, height, width, channels] float tensor
representing a batch of images.
Returns:
preprocessed_inputs: a [batch, height, width, channels] float tensor
representing a batch of images.
"""
return (2.0 / 255.0) * resized_inputs - 1.0
def _extract_proposal_features(self, preprocessed_inputs, scope):
"""Extracts first stage RPN features.
Args:
preprocessed_inputs: A [batch, height, width, channels] float32 tensor
representing a batch of images.
scope: A scope name.
Returns:
rpn_feature_map: A tensor with shape [batch, height, width, depth]
Raises:
InvalidArgumentError: If the spatial size of `preprocessed_inputs`
(height or width) is less than 33.
ValueError: If the created network is missing the required activation.
"""
preprocessed_inputs.get_shape().assert_has_rank(4)
shape_assert = tf.Assert(
tf.logical_and(tf.greater_equal(tf.shape(preprocessed_inputs)[1], 33),
tf.greater_equal(tf.shape(preprocessed_inputs)[2], 33)),
['image size must at least be 33 in both height and width.'])
with tf.control_dependencies([shape_assert]):
with tf.variable_scope('InceptionV2',
reuse=self._reuse_weights) as scope:
with _batch_norm_arg_scope([slim.conv2d, slim.separable_conv2d],
batch_norm_scale=True,
train_batch_norm=self._train_batch_norm):
_, activations = inception_v2.inception_v2_base(
preprocessed_inputs,
final_endpoint='Mixed_4e',
min_depth=self._min_depth,
depth_multiplier=self._depth_multiplier,
scope=scope)
return activations['Mixed_4e']
def _extract_box_classifier_features(self, proposal_feature_maps, scope):
"""Extracts second stage box classifier features.
Args:
proposal_feature_maps: A 4-D float tensor with shape
[batch_size * self.max_num_proposals, crop_height, crop_width, depth]
representing the feature map cropped to each proposal.
scope: A scope name (unused).
Returns:
proposal_classifier_features: A 4-D float tensor with shape
[batch_size * self.max_num_proposals, height, width, depth]
representing box classifier features for each proposal.
"""
net = proposal_feature_maps
depth = lambda d: max(int(d * self._depth_multiplier), self._min_depth)
trunc_normal = lambda stddev: tf.truncated_normal_initializer(0.0, stddev)
data_format = 'NHWC'
concat_dim = 3 if data_format == 'NHWC' else 1
with tf.variable_scope('InceptionV2', reuse=self._reuse_weights):
with slim.arg_scope(
[slim.conv2d, slim.max_pool2d, slim.avg_pool2d],
stride=1,
padding='SAME',
data_format=data_format):
with _batch_norm_arg_scope([slim.conv2d, slim.separable_conv2d],
batch_norm_scale=True,
train_batch_norm=self._train_batch_norm):
with tf.variable_scope('Mixed_5a'):
with tf.variable_scope('Branch_0'):
branch_0 = slim.conv2d(
net, depth(128), [1, 1],
weights_initializer=trunc_normal(0.09),
scope='Conv2d_0a_1x1')
branch_0 = slim.conv2d(branch_0, depth(192), [3, 3], stride=2,
scope='Conv2d_1a_3x3')
with tf.variable_scope('Branch_1'):
branch_1 = slim.conv2d(
net, depth(192), [1, 1],
weights_initializer=trunc_normal(0.09),
scope='Conv2d_0a_1x1')
branch_1 = slim.conv2d(branch_1, depth(256), [3, 3],
scope='Conv2d_0b_3x3')
branch_1 = slim.conv2d(branch_1, depth(256), [3, 3], stride=2,
scope='Conv2d_1a_3x3')
with tf.variable_scope('Branch_2'):
branch_2 = slim.max_pool2d(net, [3, 3], stride=2,
scope='MaxPool_1a_3x3')
net = tf.concat([branch_0, branch_1, branch_2], concat_dim)
with tf.variable_scope('Mixed_5b'):
with tf.variable_scope('Branch_0'):
branch_0 = slim.conv2d(net, depth(352), [1, 1],
scope='Conv2d_0a_1x1')
with tf.variable_scope('Branch_1'):
branch_1 = slim.conv2d(
net, depth(192), [1, 1],
weights_initializer=trunc_normal(0.09),
scope='Conv2d_0a_1x1')
branch_1 = slim.conv2d(branch_1, depth(320), [3, 3],
scope='Conv2d_0b_3x3')
with tf.variable_scope('Branch_2'):
branch_2 = slim.conv2d(
net, depth(160), [1, 1],
weights_initializer=trunc_normal(0.09),
scope='Conv2d_0a_1x1')
branch_2 = slim.conv2d(branch_2, depth(224), [3, 3],
scope='Conv2d_0b_3x3')
branch_2 = slim.conv2d(branch_2, depth(224), [3, 3],
scope='Conv2d_0c_3x3')
with tf.variable_scope('Branch_3'):
branch_3 = slim.avg_pool2d(net, [3, 3], scope='AvgPool_0a_3x3')
branch_3 = slim.conv2d(
branch_3, depth(128), [1, 1],
weights_initializer=trunc_normal(0.1),
scope='Conv2d_0b_1x1')
net = tf.concat([branch_0, branch_1, branch_2, branch_3],
concat_dim)
with tf.variable_scope('Mixed_5c'):
with tf.variable_scope('Branch_0'):
branch_0 = slim.conv2d(net, depth(352), [1, 1],
scope='Conv2d_0a_1x1')
with tf.variable_scope('Branch_1'):
branch_1 = slim.conv2d(
net, depth(192), [1, 1],
weights_initializer=trunc_normal(0.09),
scope='Conv2d_0a_1x1')
branch_1 = slim.conv2d(branch_1, depth(320), [3, 3],
scope='Conv2d_0b_3x3')
with tf.variable_scope('Branch_2'):
branch_2 = slim.conv2d(
net, depth(192), [1, 1],
weights_initializer=trunc_normal(0.09),
scope='Conv2d_0a_1x1')
branch_2 = slim.conv2d(branch_2, depth(224), [3, 3],
scope='Conv2d_0b_3x3')
branch_2 = slim.conv2d(branch_2, depth(224), [3, 3],
scope='Conv2d_0c_3x3')
with tf.variable_scope('Branch_3'):
branch_3 = slim.max_pool2d(net, [3, 3], scope='MaxPool_0a_3x3')
branch_3 = slim.conv2d(
branch_3, depth(128), [1, 1],
weights_initializer=trunc_normal(0.1),
scope='Conv2d_0b_1x1')
proposal_classifier_features = tf.concat(
[branch_0, branch_1, branch_2, branch_3], concat_dim)
return proposal_classifier_features
| [
"yxl7245@eng-4150-nix03.main.ad.rit.edu"
] | yxl7245@eng-4150-nix03.main.ad.rit.edu |
b67b6cbba18d5bc6a25ed1226dcfd281f5da559e | 7522593c5e69892f1f8c0c9816c001445d49b9c7 | /python/python/section 5/code/app.py | c417136b3efa9c1ea68e5de253bfa7f2b6c77574 | [] | no_license | omaraham94/My-Learnings | a3241d968d1560a0676fc6c43cca1dda211a7d52 | 719ec5c3557fef936daca9e14ef71ebfc4fcb25a | refs/heads/master | 2023-01-24T17:09:56.682753 | 2020-01-04T20:22:53 | 2020-01-04T20:22:53 | 231,823,541 | 0 | 0 | null | 2023-01-07T13:24:00 | 2020-01-04T20:23:57 | Python | UTF-8 | Python | false | false | 2,625 | py | from flask import Flask, request
from flask_restful import Resource, Api, reqparse
from flask_jwt import JWT, jwt_required
from security import authenticate, identity
# JWT - stands Json web token. It is used for authentication of data.
app = Flask(__name__)
app.secret_key = 'jose';
# Api works with resources and every Resource has to be a class.
api = Api(app)
jwt = JWT(app, authenticate, identity) # /auth
items = []
print(type(reqparse))
# Here we dont need to do jsonify as Flask-Restful does it for us.
class Item(Resource):
@jwt_required()
def get(self, name): # this is the get of the http protocol
item = next(filter(lambda x: x["name"] == name, items), None) # next gives the items that match in the iterator
# if more than 1 item match, calling next again gives the next
# item. But if none of the items match, it will return an error
# hence , None is given.
return {"item":item}, 200 if item else 404
# not all apis will accept something other than dictionary.
# since this is an error make sure to add 404 other wise 200 will be reported.
# 404 page not found
# 200 success
# 401 => unauthorized
def post(self, name):
if next(filter(lambda x: x["name"] == name, items), None) is not None:
return {"message" : "item " + name + " already exists"}, 400
# 400 is bad request
data = request.get_json(); # force=true => the data is accepted even though it is not in json
# and silent=true => if the data is not in json, it doesnot do any thing, just
# returns None.
item = {'name':name,
'price' : data["price"]}
items.append(item, )
return item, 201 # 201 stands for created
def delete(self, name) :
global items
items = list(filter(lambda x: x["name"] != name, items))
return {"message" : "items deleted"}
def put(self, name) :
data = request.get_json()
item = next(filter(lambda x : x['name'] == name, items), None)
if item is None:
item = {"name" : name, 'price' : data.price}
items.append(item)
else:
item.update(data)
return item
class ItemList(Resource):
def get(self):
return {'items' : items}
api.add_resource(Item, '/item/<string:name>') # http://127.0.0.1:5000/student/Atul
api.add_resource(ItemList, '/items')
app.run(port=5000, debug=False) # debug = true -> gives lot of info as to where the error is pointing to. | [
"Atul.Golchha@go-mmt.com"
] | Atul.Golchha@go-mmt.com |
b6c8582e27830c87d8baddbf1ebed41b4789d50e | ca55dcaa64ea9db4068e13091321cfebecc0ff41 | /codeUp/codeUp100/1022.py | ce1b4c02ce9f4e7fb71c474a37ec6ab7fc90b694 | [] | no_license | gomtinQQ/algorithm-python | 8fb8343594b945099ae2a4dfa794ecb47e54ab0b | 751562922b66e335f621d366bb73dacdc7125140 | refs/heads/master | 2022-12-07T23:05:44.535593 | 2020-08-21T12:29:58 | 2020-08-21T12:29:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 219 | py | '''
1022 : [기초-입출력] 문장 1개 입력받아 그대로 출력하기(설명)
공백 문자가 포함되어 있는 문장을 입력받고 그대로 출력하는 연습을 해보자.
'''
str = input()
print(str) | [
"minhyeonlee1@gmail.com"
] | minhyeonlee1@gmail.com |
83f702f40210def83db43b117c01fb32c0afec26 | f0f4a0f24b3a7cc8bf0366cf329923e9bd5b00c7 | /activity/activity_DepositDigestIngestAssets.py | 515d8398361d40ebc252d28d7bed3993d5a6e601 | [
"MIT"
] | permissive | elifesciences/elife-bot | 45c79993d13bacb37f59ba57462179dd7c6f1e2e | 2324e26943f805a0602ea3251ff0f6a5db27f1a0 | refs/heads/develop | 2023-08-17T15:25:42.170870 | 2023-08-14T16:47:02 | 2023-08-14T16:47:02 | 7,503,542 | 21 | 10 | MIT | 2023-09-07T19:50:30 | 2013-01-08T15:09:54 | Python | UTF-8 | Python | false | false | 4,579 | py | import os
import json
from S3utility.s3_notification_info import parse_activity_data
from provider.storage_provider import storage_context
from provider import digest_provider, download_helper
import provider.utils as utils
from activity.objects import Activity
"""
DepositDigestIngestAssets.py activity
"""
class activity_DepositDigestIngestAssets(Activity):
def __init__(self, settings, logger, client=None, token=None, activity_task=None):
super(activity_DepositDigestIngestAssets, self).__init__(
settings, logger, client, token, activity_task
)
self.name = "DepositDigestIngestAssets"
self.pretty_name = "Deposit Digest Ingest Assets"
self.version = "1"
self.default_task_heartbeat_timeout = 30
self.default_task_schedule_to_close_timeout = 60 * 5
self.default_task_schedule_to_start_timeout = 30
self.default_task_start_to_close_timeout = 60 * 5
self.description = "Deposit Assets for a Digest (Pre-Ingest)"
# Track some values
self.input_file = None
self.digest = None
self.dest_resource = None
# Local directory settings
self.directories = {
"TEMP_DIR": os.path.join(self.get_tmp_dir(), "tmp_dir"),
"INPUT_DIR": os.path.join(self.get_tmp_dir(), "input_dir"),
}
# Track the success of some steps
self.build_status = None
def do_activity(self, data=None):
"do the work"
if self.logger:
self.logger.info("data: %s" % json.dumps(data, sort_keys=True, indent=4))
# Create output directories
self.make_activity_directories()
# parse the data with the digest_provider
real_filename, bucket_name, bucket_folder = parse_activity_data(data)
# Download from S3
self.input_file = download_helper.download_file_from_s3(
self.settings,
real_filename,
bucket_name,
bucket_folder,
self.directories.get("INPUT_DIR"),
)
# Parse input and build digest
digest_config = digest_provider.digest_config(
self.settings.digest_config_section, self.settings.digest_config_file
)
self.build_status, self.digest = digest_provider.build_digest(
self.input_file,
self.directories.get("TEMP_DIR"),
self.logger,
digest_config,
)
if not self.build_status:
self.logger.info(
"Failed to build the Digest in Deposit Digest Ingest Assets for %s",
real_filename,
)
return self.ACTIVITY_PERMANENT_FAILURE
# check if there is an image and if not return True
if not digest_provider.has_image(self.digest):
self.logger.info(
"Digest for file %s has no images to deposit", real_filename
)
return self.ACTIVITY_SUCCESS
# bucket name
cdn_bucket_name = (
self.settings.publishing_buckets_prefix + self.settings.digest_cdn_bucket
)
# deposit the image file to S3
self.deposit_digest_image(self.digest, cdn_bucket_name)
return self.ACTIVITY_SUCCESS
def image_dest_resource(self, digest, cdn_bucket_name):
"concatenate the S3 bucket object path we copy the file to"
msid = utils.msid_from_doi(digest.doi)
article_id = utils.pad_msid(msid)
# file name from the digest image file
file_name = digest.image.file.split(os.sep)[-1]
new_file_name = digest_provider.new_file_name(file_name, msid)
storage_provider = self.settings.storage_provider + "://"
dest_resource = (
storage_provider + cdn_bucket_name + "/" + article_id + "/" + new_file_name
)
return dest_resource
def deposit_digest_image(self, digest, cdn_bucket_name):
"deposit the image file from the digest to the bucket"
self.dest_resource = self.image_dest_resource(digest, cdn_bucket_name)
storage = storage_context(self.settings)
self.logger.info("Depositing digest image to S3 key %s", self.dest_resource)
# set the bucket object resource from the local file
metadata = {"ContentType": utils.content_type_from_file_name(digest.image.file)}
storage.set_resource_from_filename(
self.dest_resource, digest.image.file, metadata
)
self.logger.info("Deposited digest image %s to S3", digest.image.file)
return True
| [
"gnott@starglobal.ca"
] | gnott@starglobal.ca |
bd6a7d150cf3eb9fac42f5a543f377ad8356ad67 | 27691e5ef8e49fb29189b01dd76a1dc3720e7ae8 | /AC/ABC-TDD/180/c.py | 76f7581f37b0ab7ec2b1fda1f0887f7b32dc1463 | [] | no_license | oshou/procon | 61e5f5bc819e0fe5ab29749fc2f894fe6f3b1d07 | 3d000c64b5917c65b51ed7da5b90cb79892d5909 | refs/heads/master | 2023-05-10T23:56:50.861468 | 2021-09-23T06:07:29 | 2021-09-23T06:07:29 | 116,886,484 | 1 | 0 | null | 2023-05-05T02:28:41 | 2018-01-10T00:21:38 | Go | UTF-8 | Python | false | false | 257 | py | n = int(input())
ans = []
for i in range(1, n+1):
if i*i > n:
break
if n % i == 0:
ans.append(i)
tmp = n//i
if i != tmp:
ans.append(n//i)
ans = sorted(ans)
counts = len(ans)
for num in ans:
print(num)
| [
"adf1985adf@gmail.com"
] | adf1985adf@gmail.com |
ce70b53036c39b6bde2864a036057fe984e141ac | c8e2f350b54acb24b599e37d012696d8a97f7d08 | /env/env/lib/python3.8/site-packages/celery/backends/redis.py | 8904ee0bca5947c5e324666ff5f05fcb3a9449e2 | [] | no_license | nargo0o/geekshop | 9c2dc00e2d91dd3671975a61ea21b9246015f311 | c30837ea4d2ff699a633bff8f5f2f55f03bdde6f | refs/heads/master | 2023-06-22T10:44:53.514629 | 2021-07-18T14:38:14 | 2021-07-18T14:38:14 | 379,321,818 | 1 | 0 | null | 2021-07-13T14:56:22 | 2021-06-22T15:43:50 | Python | UTF-8 | Python | false | false | 25,877 | py | """Redis result store backend."""
import time
from contextlib import contextmanager
from functools import partial
from ssl import CERT_NONE, CERT_OPTIONAL, CERT_REQUIRED
from urllib.parse import unquote
from kombu.utils.functional import retry_over_time
from kombu.utils.objects import cached_property
from kombu.utils.url import _parse_url, maybe_sanitize_url
from celery import states
from celery._state import task_join_will_block
from celery.canvas import maybe_signature
from celery.exceptions import (BackendStoreError, ChordError,
ImproperlyConfigured)
from celery.result import GroupResult, allow_join_result
from celery.utils.functional import _regen, dictfilter
from celery.utils.log import get_logger
from celery.utils.time import humanize_seconds
from .asynchronous import AsyncBackendMixin, BaseResultConsumer
from .base import BaseKeyValueStoreBackend
try:
import redis.connection
from kombu.transport.redis import get_redis_error_classes
except ImportError: # pragma: no cover
redis = None # noqa
get_redis_error_classes = None # noqa
try:
import redis.sentinel
except ImportError:
pass
__all__ = ('RedisBackend', 'SentinelBackend')
E_REDIS_MISSING = """
You need to install the redis library in order to use \
the Redis result store backend.
"""
E_REDIS_SENTINEL_MISSING = """
You need to install the redis library with support of \
sentinel in order to use the Redis result store backend.
"""
W_REDIS_SSL_CERT_OPTIONAL = """
Setting ssl_cert_reqs=CERT_OPTIONAL when connecting to redis means that \
celery might not validate the identity of the redis broker when connecting. \
This leaves you vulnerable to man in the middle attacks.
"""
W_REDIS_SSL_CERT_NONE = """
Setting ssl_cert_reqs=CERT_NONE when connecting to redis means that celery \
will not validate the identity of the redis broker when connecting. This \
leaves you vulnerable to man in the middle attacks.
"""
E_REDIS_SSL_PARAMS_AND_SCHEME_MISMATCH = """
SSL connection parameters have been provided but the specified URL scheme \
is redis://. A Redis SSL connection URL should use the scheme rediss://.
"""
E_REDIS_SSL_CERT_REQS_MISSING_INVALID = """
A rediss:// URL must have parameter ssl_cert_reqs and this must be set to \
CERT_REQUIRED, CERT_OPTIONAL, or CERT_NONE
"""
E_LOST = 'Connection to Redis lost: Retry (%s/%s) %s.'
E_RETRY_LIMIT_EXCEEDED = """
Retry limit exceeded while trying to reconnect to the Celery redis result \
store backend. The Celery application must be restarted.
"""
logger = get_logger(__name__)
class ResultConsumer(BaseResultConsumer):
_pubsub = None
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._get_key_for_task = self.backend.get_key_for_task
self._decode_result = self.backend.decode_result
self._ensure = self.backend.ensure
self._connection_errors = self.backend.connection_errors
self.subscribed_to = set()
def on_after_fork(self):
try:
self.backend.client.connection_pool.reset()
if self._pubsub is not None:
self._pubsub.close()
except KeyError as e:
logger.warning(str(e))
super().on_after_fork()
def _reconnect_pubsub(self):
self._pubsub = None
self.backend.client.connection_pool.reset()
# task state might have changed when the connection was down so we
# retrieve meta for all subscribed tasks before going into pubsub mode
metas = self.backend.client.mget(self.subscribed_to)
metas = [meta for meta in metas if meta]
for meta in metas:
self.on_state_change(self._decode_result(meta), None)
self._pubsub = self.backend.client.pubsub(
ignore_subscribe_messages=True,
)
self._pubsub.subscribe(*self.subscribed_to)
@contextmanager
def reconnect_on_error(self):
try:
yield
except self._connection_errors:
try:
self._ensure(self._reconnect_pubsub, ())
except self._connection_errors:
logger.critical(E_RETRY_LIMIT_EXCEEDED)
raise
def _maybe_cancel_ready_task(self, meta):
if meta['status'] in states.READY_STATES:
self.cancel_for(meta['task_id'])
def on_state_change(self, meta, message):
super().on_state_change(meta, message)
self._maybe_cancel_ready_task(meta)
def start(self, initial_task_id, **kwargs):
self._pubsub = self.backend.client.pubsub(
ignore_subscribe_messages=True,
)
self._consume_from(initial_task_id)
def on_wait_for_pending(self, result, **kwargs):
for meta in result._iter_meta(**kwargs):
if meta is not None:
self.on_state_change(meta, None)
def stop(self):
if self._pubsub is not None:
self._pubsub.close()
def drain_events(self, timeout=None):
if self._pubsub:
with self.reconnect_on_error():
message = self._pubsub.get_message(timeout=timeout)
if message and message['type'] == 'message':
self.on_state_change(self._decode_result(message['data']), message)
elif timeout:
time.sleep(timeout)
def consume_from(self, task_id):
if self._pubsub is None:
return self.start(task_id)
self._consume_from(task_id)
def _consume_from(self, task_id):
key = self._get_key_for_task(task_id)
if key not in self.subscribed_to:
self.subscribed_to.add(key)
with self.reconnect_on_error():
self._pubsub.subscribe(key)
def cancel_for(self, task_id):
key = self._get_key_for_task(task_id)
self.subscribed_to.discard(key)
if self._pubsub:
with self.reconnect_on_error():
self._pubsub.unsubscribe(key)
class RedisBackend(BaseKeyValueStoreBackend, AsyncBackendMixin):
"""Redis task result store.
It makes use of the following commands:
GET, MGET, DEL, INCRBY, EXPIRE, SET, SETEX
"""
ResultConsumer = ResultConsumer
#: :pypi:`redis` client module.
redis = redis
connection_class_ssl = redis.SSLConnection if redis else None
#: Maximum number of connections in the pool.
max_connections = None
supports_autoexpire = True
supports_native_join = True
#: Maximal length of string value in Redis.
#: 512 MB - https://redis.io/topics/data-types
_MAX_STR_VALUE_SIZE = 536870912
def __init__(self, host=None, port=None, db=None, password=None,
max_connections=None, url=None,
connection_pool=None, **kwargs):
super().__init__(expires_type=int, **kwargs)
_get = self.app.conf.get
if self.redis is None:
raise ImproperlyConfigured(E_REDIS_MISSING.strip())
if host and '://' in host:
url, host = host, None
self.max_connections = (
max_connections or
_get('redis_max_connections') or
self.max_connections)
self._ConnectionPool = connection_pool
socket_timeout = _get('redis_socket_timeout')
socket_connect_timeout = _get('redis_socket_connect_timeout')
retry_on_timeout = _get('redis_retry_on_timeout')
socket_keepalive = _get('redis_socket_keepalive')
health_check_interval = _get('redis_backend_health_check_interval')
self.connparams = {
'host': _get('redis_host') or 'localhost',
'port': _get('redis_port') or 6379,
'db': _get('redis_db') or 0,
'password': _get('redis_password'),
'max_connections': self.max_connections,
'socket_timeout': socket_timeout and float(socket_timeout),
'retry_on_timeout': retry_on_timeout or False,
'socket_connect_timeout':
socket_connect_timeout and float(socket_connect_timeout),
}
username = _get('redis_username')
if username:
# We're extra careful to avoid including this configuration value
# if it wasn't specified since older versions of py-redis
# don't support specifying a username.
# Only Redis>6.0 supports username/password authentication.
# TODO: Include this in connparams' definition once we drop
# support for py-redis<3.4.0.
self.connparams['username'] = username
if health_check_interval:
self.connparams["health_check_interval"] = health_check_interval
# absent in redis.connection.UnixDomainSocketConnection
if socket_keepalive:
self.connparams['socket_keepalive'] = socket_keepalive
# "redis_backend_use_ssl" must be a dict with the keys:
# 'ssl_cert_reqs', 'ssl_ca_certs', 'ssl_certfile', 'ssl_keyfile'
# (the same as "broker_use_ssl")
ssl = _get('redis_backend_use_ssl')
if ssl:
self.connparams.update(ssl)
self.connparams['connection_class'] = self.connection_class_ssl
if url:
self.connparams = self._params_from_url(url, self.connparams)
# If we've received SSL parameters via query string or the
# redis_backend_use_ssl dict, check ssl_cert_reqs is valid. If set
# via query string ssl_cert_reqs will be a string so convert it here
if ('connection_class' in self.connparams and
issubclass(self.connparams['connection_class'], redis.SSLConnection)):
ssl_cert_reqs_missing = 'MISSING'
ssl_string_to_constant = {'CERT_REQUIRED': CERT_REQUIRED,
'CERT_OPTIONAL': CERT_OPTIONAL,
'CERT_NONE': CERT_NONE,
'required': CERT_REQUIRED,
'optional': CERT_OPTIONAL,
'none': CERT_NONE}
ssl_cert_reqs = self.connparams.get('ssl_cert_reqs', ssl_cert_reqs_missing)
ssl_cert_reqs = ssl_string_to_constant.get(ssl_cert_reqs, ssl_cert_reqs)
if ssl_cert_reqs not in ssl_string_to_constant.values():
raise ValueError(E_REDIS_SSL_CERT_REQS_MISSING_INVALID)
if ssl_cert_reqs == CERT_OPTIONAL:
logger.warning(W_REDIS_SSL_CERT_OPTIONAL)
elif ssl_cert_reqs == CERT_NONE:
logger.warning(W_REDIS_SSL_CERT_NONE)
self.connparams['ssl_cert_reqs'] = ssl_cert_reqs
self.url = url
self.connection_errors, self.channel_errors = (
get_redis_error_classes() if get_redis_error_classes
else ((), ()))
self.result_consumer = self.ResultConsumer(
self, self.app, self.accept,
self._pending_results, self._pending_messages,
)
def _params_from_url(self, url, defaults):
scheme, host, port, username, password, path, query = _parse_url(url)
connparams = dict(
defaults, **dictfilter({
'host': host, 'port': port, 'username': username,
'password': password, 'db': query.pop('virtual_host', None)})
)
if scheme == 'socket':
# use 'path' as path to the socket… in this case
# the database number should be given in 'query'
connparams.update({
'connection_class': self.redis.UnixDomainSocketConnection,
'path': '/' + path,
})
# host+port are invalid options when using this connection type.
connparams.pop('host', None)
connparams.pop('port', None)
connparams.pop('socket_connect_timeout')
else:
connparams['db'] = path
ssl_param_keys = ['ssl_ca_certs', 'ssl_certfile', 'ssl_keyfile',
'ssl_cert_reqs']
if scheme == 'redis':
# If connparams or query string contain ssl params, raise error
if (any(key in connparams for key in ssl_param_keys) or
any(key in query for key in ssl_param_keys)):
raise ValueError(E_REDIS_SSL_PARAMS_AND_SCHEME_MISMATCH)
if scheme == 'rediss':
connparams['connection_class'] = redis.SSLConnection
# The following parameters, if present in the URL, are encoded. We
# must add the decoded values to connparams.
for ssl_setting in ssl_param_keys:
ssl_val = query.pop(ssl_setting, None)
if ssl_val:
connparams[ssl_setting] = unquote(ssl_val)
# db may be string and start with / like in kombu.
db = connparams.get('db') or 0
db = db.strip('/') if isinstance(db, str) else db
connparams['db'] = int(db)
for key, value in query.items():
if key in redis.connection.URL_QUERY_ARGUMENT_PARSERS:
query[key] = redis.connection.URL_QUERY_ARGUMENT_PARSERS[key](
value
)
# Query parameters override other parameters
connparams.update(query)
return connparams
@cached_property
def retry_policy(self):
retry_policy = super().retry_policy
if "retry_policy" in self._transport_options:
retry_policy = retry_policy.copy()
retry_policy.update(self._transport_options['retry_policy'])
return retry_policy
def on_task_call(self, producer, task_id):
if not task_join_will_block():
self.result_consumer.consume_from(task_id)
def get(self, key):
return self.client.get(key)
def mget(self, keys):
return self.client.mget(keys)
def ensure(self, fun, args, **policy):
retry_policy = dict(self.retry_policy, **policy)
max_retries = retry_policy.get('max_retries')
return retry_over_time(
fun, self.connection_errors, args, {},
partial(self.on_connection_error, max_retries),
**retry_policy)
def on_connection_error(self, max_retries, exc, intervals, retries):
tts = next(intervals)
logger.error(
E_LOST.strip(),
retries, max_retries or 'Inf', humanize_seconds(tts, 'in '))
return tts
def set(self, key, value, **retry_policy):
if isinstance(value, str) and len(value) > self._MAX_STR_VALUE_SIZE:
raise BackendStoreError('value too large for Redis backend')
return self.ensure(self._set, (key, value), **retry_policy)
def _set(self, key, value):
with self.client.pipeline() as pipe:
if self.expires:
pipe.setex(key, self.expires, value)
else:
pipe.set(key, value)
pipe.publish(key, value)
pipe.execute()
def forget(self, task_id):
super().forget(task_id)
self.result_consumer.cancel_for(task_id)
def delete(self, key):
self.client.delete(key)
def incr(self, key):
return self.client.incr(key)
def expire(self, key, value):
return self.client.expire(key, value)
def add_to_chord(self, group_id, result):
self.client.incr(self.get_key_for_group(group_id, '.t'), 1)
def _unpack_chord_result(self, tup, decode,
EXCEPTION_STATES=states.EXCEPTION_STATES,
PROPAGATE_STATES=states.PROPAGATE_STATES):
_, tid, state, retval = decode(tup)
if state in EXCEPTION_STATES:
retval = self.exception_to_python(retval)
if state in PROPAGATE_STATES:
raise ChordError(f'Dependency {tid} raised {retval!r}')
return retval
def set_chord_size(self, group_id, chord_size):
self.set(self.get_key_for_group(group_id, '.s'), chord_size)
def apply_chord(self, header_result_args, body, **kwargs):
# If any of the child results of this chord are complex (ie. group
# results themselves), we need to save `header_result` to ensure that
# the expected structure is retained when we finish the chord and pass
# the results onward to the body in `on_chord_part_return()`. We don't
# do this is all cases to retain an optimisation in the common case
# where a chord header is comprised of simple result objects.
if not isinstance(header_result_args[1], _regen):
header_result = self.app.GroupResult(*header_result_args)
if any(isinstance(nr, GroupResult) for nr in header_result.results):
header_result.save(backend=self)
@cached_property
def _chord_zset(self):
return self._transport_options.get('result_chord_ordered', True)
@cached_property
def _transport_options(self):
return self.app.conf.get('result_backend_transport_options', {})
def on_chord_part_return(self, request, state, result,
propagate=None, **kwargs):
app = self.app
tid, gid, group_index = request.id, request.group, request.group_index
if not gid or not tid:
return
if group_index is None:
group_index = '+inf'
client = self.client
jkey = self.get_key_for_group(gid, '.j')
tkey = self.get_key_for_group(gid, '.t')
skey = self.get_key_for_group(gid, '.s')
result = self.encode_result(result, state)
encoded = self.encode([1, tid, state, result])
with client.pipeline() as pipe:
pipeline = (
pipe.zadd(jkey, {encoded: group_index}).zcount(jkey, "-inf", "+inf")
if self._chord_zset
else pipe.rpush(jkey, encoded).llen(jkey)
).get(tkey).get(skey)
if self.expires:
pipeline = pipeline \
.expire(jkey, self.expires) \
.expire(tkey, self.expires) \
.expire(skey, self.expires)
_, readycount, totaldiff, chord_size_bytes = pipeline.execute()[:4]
totaldiff = int(totaldiff or 0)
if chord_size_bytes:
try:
callback = maybe_signature(request.chord, app=app)
total = int(chord_size_bytes) + totaldiff
if readycount == total:
header_result = GroupResult.restore(gid)
if header_result is not None:
# If we manage to restore a `GroupResult`, then it must
# have been complex and saved by `apply_chord()` earlier.
#
# Before we can join the `GroupResult`, it needs to be
# manually marked as ready to avoid blocking
header_result.on_ready()
# We'll `join()` it to get the results and ensure they are
# structured as intended rather than the flattened version
# we'd construct without any other information.
join_func = (
header_result.join_native
if header_result.supports_native_join
else header_result.join
)
with allow_join_result():
resl = join_func(
timeout=app.conf.result_chord_join_timeout,
propagate=True
)
else:
# Otherwise simply extract and decode the results we
# stashed along the way, which should be faster for large
# numbers of simple results in the chord header.
decode, unpack = self.decode, self._unpack_chord_result
with client.pipeline() as pipe:
if self._chord_zset:
pipeline = pipe.zrange(jkey, 0, -1)
else:
pipeline = pipe.lrange(jkey, 0, total)
resl, = pipeline.execute()
resl = [unpack(tup, decode) for tup in resl]
try:
callback.delay(resl)
except Exception as exc: # pylint: disable=broad-except
logger.exception(
'Chord callback for %r raised: %r', request.group, exc)
return self.chord_error_from_stack(
callback,
ChordError(f'Callback error: {exc!r}'),
)
finally:
with client.pipeline() as pipe:
pipe \
.delete(jkey) \
.delete(tkey) \
.delete(skey) \
.execute()
except ChordError as exc:
logger.exception('Chord %r raised: %r', request.group, exc)
return self.chord_error_from_stack(callback, exc)
except Exception as exc: # pylint: disable=broad-except
logger.exception('Chord %r raised: %r', request.group, exc)
return self.chord_error_from_stack(
callback,
ChordError(f'Join error: {exc!r}'),
)
def _create_client(self, **params):
return self._get_client()(
connection_pool=self._get_pool(**params),
)
def _get_client(self):
return self.redis.StrictRedis
def _get_pool(self, **params):
return self.ConnectionPool(**params)
@property
def ConnectionPool(self):
if self._ConnectionPool is None:
self._ConnectionPool = self.redis.ConnectionPool
return self._ConnectionPool
@cached_property
def client(self):
return self._create_client(**self.connparams)
def __reduce__(self, args=(), kwargs=None):
kwargs = {} if not kwargs else kwargs
return super().__reduce__(
(self.url,), {'expires': self.expires},
)
if getattr(redis, "sentinel", None):
class SentinelManagedSSLConnection(
redis.sentinel.SentinelManagedConnection,
redis.SSLConnection):
"""Connect to a Redis server using Sentinel + TLS.
Use Sentinel to identify which Redis server is the current master
to connect to and when connecting to the Master server, use an
SSL Connection.
"""
class SentinelBackend(RedisBackend):
"""Redis sentinel task result store."""
# URL looks like `sentinel://0.0.0.0:26347/3;sentinel://0.0.0.0:26348/3`
_SERVER_URI_SEPARATOR = ";"
sentinel = getattr(redis, "sentinel", None)
connection_class_ssl = SentinelManagedSSLConnection if sentinel else None
def __init__(self, *args, **kwargs):
if self.sentinel is None:
raise ImproperlyConfigured(E_REDIS_SENTINEL_MISSING.strip())
super().__init__(*args, **kwargs)
def as_uri(self, include_password=False):
"""
Return the server addresses as URIs, sanitizing the password or not.
"""
# Allow superclass to do work if we don't need to force sanitization
if include_password:
return super().as_uri(
include_password=include_password,
)
# Otherwise we need to ensure that all components get sanitized rather
# by passing them one by one to the `kombu` helper
uri_chunks = (
maybe_sanitize_url(chunk)
for chunk in (self.url or "").split(self._SERVER_URI_SEPARATOR)
)
# Similar to the superclass, strip the trailing slash from URIs with
# all components empty other than the scheme
return self._SERVER_URI_SEPARATOR.join(
uri[:-1] if uri.endswith(":///") else uri
for uri in uri_chunks
)
def _params_from_url(self, url, defaults):
chunks = url.split(self._SERVER_URI_SEPARATOR)
connparams = dict(defaults, hosts=[])
for chunk in chunks:
data = super()._params_from_url(
url=chunk, defaults=defaults)
connparams['hosts'].append(data)
for param in ("host", "port", "db", "password"):
connparams.pop(param)
# Adding db/password in connparams to connect to the correct instance
for param in ("db", "password"):
if connparams['hosts'] and param in connparams['hosts'][0]:
connparams[param] = connparams['hosts'][0].get(param)
return connparams
def _get_sentinel_instance(self, **params):
connparams = params.copy()
hosts = connparams.pop("hosts")
min_other_sentinels = self._transport_options.get("min_other_sentinels", 0)
sentinel_kwargs = self._transport_options.get("sentinel_kwargs", {})
sentinel_instance = self.sentinel.Sentinel(
[(cp['host'], cp['port']) for cp in hosts],
min_other_sentinels=min_other_sentinels,
sentinel_kwargs=sentinel_kwargs,
**connparams)
return sentinel_instance
def _get_pool(self, **params):
sentinel_instance = self._get_sentinel_instance(**params)
master_name = self._transport_options.get("master_name", None)
return sentinel_instance.master_for(
service_name=master_name,
redis_class=self._get_client(),
).connection_pool
| [
"nargo4535@gmail.com"
] | nargo4535@gmail.com |
cc3f0e7308c542e5a31f837c3e3e8816120e76ef | 9df4cce1240c298e899ca836e36cf7419d9256bb | /myname.py | 1976902d70a3dc698e3cbf8411e91c0e927e1718 | [] | no_license | anshulshakya/PracticeRepo | 4753525ca7e6dffa2321d3b659e9166167c04f50 | 75201c7c11109325645a5f89db06fa552b07ffa3 | refs/heads/master | 2023-03-06T16:55:36.214710 | 2021-02-02T20:09:34 | 2021-02-02T20:09:34 | 259,899,074 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10 | py | print(2+6) | [
"an1999shakya@gmail.com"
] | an1999shakya@gmail.com |
315ab7aa2ef9d0579f0d045b6dfb17919ba8530a | c741f04141784a2571d2d27d95e0d994e4584ab1 | /learning/py3/0-1/21-模块-包-4.py | d70f489fbe8852df7919744087de49fb955d0899 | [] | no_license | haodonghui/python | bbdece136620bc6f787b4942d6e1760ed808afd4 | 365062ba54297c81093b7f378742e76d438658b7 | refs/heads/master | 2022-02-03T23:52:37.288503 | 2022-01-27T05:23:25 | 2022-01-27T05:23:25 | 191,729,797 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,361 | py | from package1 import *
p.prt(4, '从一个包中导入*')
'''
4、
设想一下,如果我们使用 from sound.effects import *会发生什么
Python 会进入文件系统,找到这个包里面所有的子模块,一个一个的把它们都导入进来。
Windows是一个大小写不区分的系统。
在这类平台上,没有人敢担保一个叫做 ECHO.py 的文件导入为模块 echo 还是 Echo 甚至 ECHO。
为了解决这个问题,只能烦劳包作者提供一个精确的包的索引了。
导入语句遵循如下规则:
如果包定义文件 __init__.py 存在一个叫做 __all__ 的列表变量,
那么在使用 from package import * 的时候就把这个列表中的所有名字作为包内容导入。
作为包的作者,可别忘了在更新包之后保证 __all__ 也更新了啊。你说我就不这么做,我就不使用导入*这种用法,好吧,没问题,谁让你是老板呢
'''
def package_example():
p.prt(4,
'learning/py3/0-1/package1/__init__.py存在 __all__ = [\'p\'],顶部使用from package1 import * ,只导入了 package1包下的p模块')
p2.prt(4,
'learning/py3/0-1/package1/__init__.py存在 __all__ = [\'p\',\'p2\'],顶部使用from package1 import * ,只导入了 package1包下的p模块')
package_example()
| [
"haodonghui@yestae.com"
] | haodonghui@yestae.com |
2a2c97ba51b70bab6a24312c393fbea8c3b66af5 | e1ada8f254c272bf59a7b2d5b6b25f7986bccdd6 | /bintest.py | 549b1ca31a153fed210a5afb0744a1196bc977da | [] | no_license | shmunb/DES | d7ad67b6a2ec5124da73ae3eccff2c817b9d686e | ee05897ea0c931cd063464cf8a6350de379e6952 | refs/heads/master | 2020-12-16T21:55:13.478214 | 2020-01-21T07:53:11 | 2020-01-21T07:53:11 | 235,276,468 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 530 | py | import des_lib as lib
input_binary = 'C:/Users/mi/Desktop/прога/des/tests/binary_input.txt'
encrypted_binary_file = "C:/Users/mi/Desktop/прога/des/tests/binary_encrypted.txt"
decrypted_binary_file = "C:/Users/mi/Desktop/прога/des/tests/binary_decrypted.txt"
test_key = 0b11101001010101010011001010101100101010101111001001010101
test = lib.DES(test_key, input_binary, encrypted_binary_file, 'bin')
test.encrypt()
test = lib.DES(test_key, encrypted_binary_file, decrypted_binary_file)
test.decrypt(41) | [
"noreply@github.com"
] | shmunb.noreply@github.com |
62d7db44a7f7613ecdc9a3d3d14553c99253d5c7 | 2f046d174e08849d2b1004de28ded78340670c2d | /reid/evaluator/partialcrfevaluator.py | 80df56c922329c58aa0485ad188e4b8b1ffa39de | [] | no_license | danxuhk/crf_affinity | 743f52d4e053caf4ed389af6ed800be381620523 | 05141910dbeabade256077baa1cfb3d42590c64c | refs/heads/master | 2020-04-03T23:55:31.096114 | 2019-01-13T17:21:02 | 2019-01-13T17:21:02 | 155,634,628 | 2 | 0 | null | 2018-10-31T23:11:04 | 2018-10-31T23:11:04 | null | UTF-8 | Python | false | false | 9,768 | py | from __future__ import print_function, absolute_import
import time
from collections import OrderedDict
import torch
from torch.autograd import Variable
import numpy as np
from reid.evaluator import cmc, mean_ap
from reid.feature_extraction import extract_cnn_feature
from reid.utils.meters import AverageMeter
from reid.utils import to_torch
import torch.nn.functional as F
def to_numpy(tensor):
if torch.is_tensor(tensor):
return tensor.cpu().numpy()
elif type(tensor).__module__ != 'numpy':
raise ValueError("Cannot convert {} to numpy array"
.format(type(tensor)))
return tensor
class PartialCRFEvaluator(object):
def __init__(self, cnnmodel, classifier, crfmodel):
self.cnnmodel = cnnmodel
self.classifier = classifier
self.crfmodel = crfmodel
self.select_size = 50
def evaluate_1st(self, distmat, query=None, gallery=None):
if query is not None and gallery is not None:
query_ids = [pid for _, pid, _ in query]
gallery_ids = [pid for _, pid, _ in gallery]
query_cams = [cam for _, _, cam in query]
gallery_cams = [cam for _, _, cam in gallery]
else:
raise RuntimeError('please provide the query and gallery information')
distmat = to_numpy(distmat)
mAP = mean_ap(distmat, query_ids, gallery_ids, query_cams, gallery_cams)
print('Mean AP: {:4.1%}'.format(mAP))
indices = np.argsort(distmat, axis=1)
indices = np.argsort(indices, axis=1)
mask = (indices < self.select_size).astype(float)
cmc_configs = {
# 'allshots': dict(separate_camera_set=False,
# single_gallery_shot=False,
# first_match_break=False),
# 'cuhk03': dict(separate_camera_set=True,
# single_gallery_shot=True,
# first_match_break=False),
'market1501': dict(separate_camera_set=False,
single_gallery_shot=False,
first_match_break=True)}
cmc_scores = {name: cmc(distmat, query_ids, gallery_ids,
query_cams, gallery_cams, **params)
for name, params in cmc_configs.items()}
return [cmc_scores['market1501'][k] for k in [0, 4, 9, 19]], mask
def evaluate_all(self, distmat, query=None, gallery=None):
if query is not None and gallery is not None:
query_ids = [pid for _, pid, _ in query]
gallery_ids = [pid for _, pid, _ in gallery]
query_cams = [cam for _, _, cam in query]
gallery_cams = [cam for _, _, cam in gallery]
else:
raise RuntimeError('please provide the query and gallery information')
# Compute mean AP
mAP = mean_ap(distmat, query_ids, gallery_ids, query_cams, gallery_cams)
print('Mean AP: {:4.1%}'.format(mAP))
# Compute all kinds of CMC scores
cmc_configs = {
# 'allshots': dict(separate_camera_set=False,
# single_gallery_shot=False,
# first_match_break=False),
# 'cuhk03': dict(separate_camera_set=True,
# single_gallery_shot=True,
# first_match_break=False),
'market1501': dict(separate_camera_set=False,
single_gallery_shot=False,
first_match_break=True)}
cmc_scores = {name: cmc(distmat, query_ids, gallery_ids,
query_cams, gallery_cams, **params)
for name, params in cmc_configs.items()}
# print('CMC Scores{:>12}{:>12}{:>12}'
# .format('allshots', 'cuhk03', 'market1501'))
# for k in cmc_topk:
# print(' top-{:<4}{:12.1%}{:12.1%}{:12.1%}'
# .format(k, cmc_scores['allshots'][k - 1],
# cmc_scores['cuhk03'][k - 1],
# cmc_scores['market1501'][k - 1]))
# Use the allshots cmc top-1 score for validation criterion
return [cmc_scores['market1501'][k] for k in [0, 4, 9, 19]]
def extractfeature(self, data_loader):
## print
print_freq = 50
batch_time = AverageMeter()
data_time = AverageMeter()
end = time.time()
queryfeatures = 0
preimgs = 0
for i, (imgs, fnames, pids, _) in enumerate(data_loader):
data_time.update(time.time() - end)
imgs = Variable(imgs, volatile=True)
if i == 0:
query_feat = self.cnnmodel(imgs)
queryfeatures = query_feat
preimgs = imgs
elif imgs.size(0) < data_loader.batch_size:
flaw_batchsize = imgs.size(0)
cat_batchsize = data_loader.batch_size - flaw_batchsize
imgs = torch.cat((imgs, preimgs[0:cat_batchsize]), 0)
query_feat = self.cnnmodel(imgs)
query_feat = query_feat[0:flaw_batchsize]
queryfeatures = torch.cat((queryfeatures, query_feat), 0)
else:
query_feat = self.cnnmodel(imgs)
queryfeatures = torch.cat((queryfeatures, query_feat), 0)
batch_time.update(time.time() - end)
end = time.time()
if (i + 1) % print_freq == 0:
print('Extract Features: [{}/{}]\t'
'Time {:.3f} ({:.3f})\t'
'Data {:.3f} ({:.3f})\t'
.format(i + 1, len(data_loader),
batch_time.val, batch_time.avg,
data_time.val, data_time.avg))
return queryfeatures
def sim_computation(self, galleryloader, query_features):
batch_time = AverageMeter()
data_time = AverageMeter()
end = time.time()
print_freq = 50
simmat = 0
for i, (imgs, _, pids, _) in enumerate(galleryloader):
data_time.update(time.time() - end)
imgs = Variable(imgs, volatile=True)
if i == 0:
gallery_feat = self.cnnmodel(imgs)
preimgs = imgs
elif imgs.size(0) < galleryloader.batch_size:
flaw_batchsize = imgs.size(0)
cat_batchsize = galleryloader.batch_size - flaw_batchsize
imgs = torch.cat((imgs, preimgs[0:cat_batchsize]), 0)
gallery_feat = self.cnnmodel(imgs)
gallery_feat = gallery_feat[0:flaw_batchsize]
else:
gallery_feat = self.cnnmodel(imgs)
batch_cls_encode = self.classifier(query_features, gallery_feat)
batch_cls_size = batch_cls_encode.size()
batch_cls_encode = batch_cls_encode.view(-1, 2)
batch_cls_encode = F.softmax(batch_cls_encode)
batch_cls_encode = batch_cls_encode.view(batch_cls_size[0], batch_cls_size[1], 2)
batch_similarity = batch_cls_encode[:, :, 1]
if i == 0:
simmat = batch_similarity
else:
simmat = torch.cat((simmat, batch_similarity), 1)
batch_time.update(time.time() - end)
end = time.time()
if (i + 1) % print_freq == 0:
print('Extract Features: [{}/{}]\t'
'Time {:.3f} ({:.3f})\t'
'Data {:.3f} ({:.3f})\t'
.format(i + 1, len(galleryloader),
batch_time.val, batch_time.avg,
data_time.val, data_time.avg))
return simmat
def partial_crf(self, probescore, galleryscore, mask):
## 换成 numpy 进行计算
## probescore, galleryscore torch
## mask numpy nd arrary
pairwise_mat = galleryscore - np.diag(np.diag(galleryscore))
pairwise_mat = pairwise_mat / (self.select_size-1)
softmax_weights = F.softmax(self.crfmodel.weights).data
alphas = softmax_weights[0:self.crfmodel.Unarynum].cpu().numpy()
betas = softmax_weights[self.crfmodel.Unarynum:self.crfmodel.Unarynum + self.crfmodel.Pairnum].cpu().numpy()
norm_simsum = np.dot(mask, pairwise_mat)
normalizes = alphas + norm_simsum*betas
mu = probescore * mask
for i in range(self.crfmodel.layernum):
mu = (probescore * alphas + np.dot(mu, pairwise_mat*betas)) / normalizes
mu = mu * mask
return mu
def evaluate(self, queryloader, galleryloader, query, gallery):
self.cnnmodel.eval()
self.classifier.eval()
self.crfmodel.eval()
query_features = self.extractfeature(queryloader)
# gallery_features = self.extractfeature(galleryloader)
simmat = self.sim_computation(galleryloader, query_features)
# top0, mask = self.evaluate_1st(1 - simmat.data, query = query, gallery = gallery)
# print(top0)
# gallerymat = self.sim_computation(galleryloader, gallery_features)
# ### partial crf model
# simmat = simmat.data.cpu().numpy()
# gallerymat = gallerymat.data.cpu().numpy()
# print(np.amax(simmat))
# print(np.amin(simmat))
#
# scores = self.partial_crf(simmat, gallerymat, mask)
# final_scores = (scores+1)*mask + simmat*(1-mask)
# print(np.amax(final_scores))
# print(np.amin(final_scores))
final_scores = simmat.data.cpu().numpy()
return self.evaluate_all(2-final_scores, query = query, gallery = gallery)
| [
"liyan@pn-206-72.itsc.cuhk.edu.hk"
] | liyan@pn-206-72.itsc.cuhk.edu.hk |
d82a29d09c5b5543449e1e42daea3c2696b3d592 | 48e204dadffadbb2afbb658a24901650808a4081 | /config/settings.py | a2f6ef2932fccdba14313a4bfa427d245e7d25ec | [] | no_license | fbotero1956/FSDI_112 | ffbd0efa35f4962eb9993f83628c07214da5d9da | b4aab43385519dea5953dcc48ce0bfc04de276a9 | refs/heads/main | 2023-05-23T23:18:32.984824 | 2021-06-08T10:45:04 | 2021-06-08T10:45:04 | 372,275,220 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,328 | py | """
Django settings for config project.
Generated by 'django-admin startproject' using Django 3.2.3.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.2/ref/settings/
"""
from pathlib import Path
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'django-insecure-fyg4^gbi8evt$kol+m3=eo_-vca36@^#so821h)p4ts=kbirmm'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = [".herokuapp.com", "localhost", "127.0.0.1"]
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'pages'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'config.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [str(BASE_DIR.joinpath('templates'))],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'config.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.2/howto/static-files/
STATIC_URL = '/static/'
# Default primary key field type
# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
| [
"felipe.botero.botero@gmail.com"
] | felipe.botero.botero@gmail.com |
c64d4a116649878ddb94e95e66e3c58c114e7155 | fb39e15da72e41cf1903aa3670833e35b668edad | /main.py | 37961bed2279b1b12f56a1ef46d4b9588b8717ea | [] | no_license | jackfrostwillbeking/script_test | e9b8c91babc2c9d6ed111a77b5156f3624683a1e | f47370f05632e1a76cbcacd24737ec370d2faf58 | refs/heads/master | 2021-04-06T17:11:36.479236 | 2018-03-09T10:21:19 | 2018-03-09T10:21:19 | 124,380,417 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 104 | py | import subprocess
import sys
res = subprocess.call(["/bin/bash","-c","./test_script.sh"])
sys.exit(0)
| [
"jackfrostwillbeking@gmail.com"
] | jackfrostwillbeking@gmail.com |
7d49e538d0c4d130c6964156f419d4cf1a992c6c | b939cbae7229b7792e573aa3d7552d20c7706d59 | /3/3-1.py | 173ccc5666eca436dc7af694362665b2815bbe4b | [] | no_license | dlittleton/aoc2020 | ce2d93fcf9a1ecfce4a31cdde5d68424b5f819fe | ab09cfdc47691256de45bec21e1a543458b70a82 | refs/heads/main | 2023-02-06T00:49:53.160763 | 2020-12-24T23:54:31 | 2020-12-24T23:54:31 | 318,066,875 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 203 | py | import sys
lines = [l.rstrip() for l in sys.stdin.readlines()]
trees = 0
index = 0
# skip first line
for l in lines[1:]:
index += 3
if l[index % len(l)] == '#':
trees += 1
print(trees) | [
"dlittleton87@gmail.com"
] | dlittleton87@gmail.com |
c57945669f9a7b5e20711b37a26bbc7ef4f4f9fc | f0cf7d744e4bbc363c694fe18a3af0f987779eac | /penny_twit1.py | f069ed8d5f786e9a033016800afa5f01267d9bc7 | [] | no_license | iswdp/penny_twit | de12fac5125d514d4cf04351a6613df29137e0c8 | c219baaf2b7851ed639078c212bac2a04d8039fb | refs/heads/master | 2021-03-12T19:18:55.151473 | 2014-08-23T11:24:42 | 2014-08-23T11:24:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,745 | py | from twython import TwythonStreamer
from pandas import DataFrame
import pandas as pd
import re, dateutil, datetime
def write_data(new_data, data):
today = datetime.datetime.now().strftime('%m/%d/%Y')
print today
if data['Date'][len(data)-1].strftime('%m/%d/%Y') != today:
zero_list = []
for j in range(len(data.T)):
zero_list.append(0)
zero_list[0] = dateutil.parser.parse(today)
temp = DataFrame(zero_list).T
temp.columns = data.columns
data = pd.concat([data,temp]).reset_index(drop=True)
for i in new_data:
if i not in list(data.columns):
data[i] = 0
data[i][len(data)-1] += 1
data.to_csv('data.csv', sep=',', index=False)
print data.ix[len(data)-1,:].tail()
class MyStreamer(TwythonStreamer):
def on_success(self, data):
self.counter += 1
print self.counter
if 'text' in data:
regex = '[A-Z]{2,5}'
pattern = re.compile(regex)
temp = re.findall(pattern, data['text'].encode('utf-8'))
new_data = []
for i in range(len(temp)):
if temp[i] in symbols:
new_data.append(temp[i])
if len(new_data) > 0:
print new_data
write_data(new_data, self.data)
def on_error(self, status_code, data):
print status_code
fi = open('OTCBB.txt')
symbols = []
for i in fi:
symbols.append(i.strip())
fi.close()
data = pd.read_csv('data.csv')
date_list = []
for i in data['Date']:
date_list.append(dateutil.parser.parse(i))
data['Date'] = date_list
stream.symbols = symbols
stream.data = data
stream.counter = 0
stream.statuses.filter(track='stock') | [
"iswdp@hotmail.com"
] | iswdp@hotmail.com |
a7e5132842dfb7272babf3b1172fde553a309d82 | 54e9ebd9204914a5addc3a4da2456a079f0deacb | /misc.py | 22a767478bd293e2414f9ba01df1a871342782fd | [] | no_license | cosmin-paunica/Monochrome-Dreams-Classification | b8fbecc1b5d117cc120427c001cdfa42b6bb0878 | 7fcf20d1d8ad4a3efbe627523a08997b01993b82 | refs/heads/main | 2023-06-06T06:42:55.553216 | 2021-07-02T10:38:46 | 2021-07-02T10:47:40 | 382,312,963 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,472 | py | import numpy as np
import imageio
NUM_TRAIN_IMAGES = 30001
NUM_VALIDATION_IMAGES = 5000
NUM_TEST_IMAGES = 5000
IMG_WIDTH, IMG_HEIGHT = 32, 32
NUM_CLASSES = 9
# citirea imaginilor
# se va apela cu read_labels==True pentru imaginile de antrenare si de validare
# se va apela cu read_labels==False pentru imaginile de testare
# for_CNN==True va face ca imaginile sa fie citite sub forma unor array-uri tridimensionale (2 dimensiuni ale imaginii si una pentru canalele de culori)
# for_CNN==False va face ca imaginile sa fie citite sub forma unor array-uri unidimensionale, cu pixelii unei imagini insirati unul dupa altul
def read_images(file, num_images, path, read_labels=True, for_CNN=False):
if not for_CNN:
images = np.zeros((num_images, IMG_WIDTH * IMG_HEIGHT))
else:
images = np.zeros((num_images, IMG_WIDTH, IMG_HEIGHT, 1))
if read_labels:
labels = np.zeros(num_images, 'int')
with open(file) as f_in:
for i, img_string in enumerate(f_in):
if read_labels:
img_info = img_string.split(',')
img_file = img_info[0]
labels[i] = int(img_info[1])
else:
img_file = img_string.split('\n')[0]
if not for_CNN:
images[i] = np.reshape(
np.asarray(imageio.imread(f'{path}/{img_file}')),
IMG_WIDTH * IMG_HEIGHT
)
else:
images[i] = np.asarray(imageio.imread(f'{path}/{img_file}')).reshape((IMG_WIDTH, IMG_HEIGHT, 1))
if read_labels:
return images, labels
else:
return images
def read_all(for_CNN=False):
train_images, train_labels = read_images(
'./data/train.txt',
NUM_TRAIN_IMAGES,
'./data/train',
for_CNN=for_CNN
)
validation_images, validation_labels = read_images(
'./data/validation.txt',
NUM_VALIDATION_IMAGES,
'./data/validation',
for_CNN=for_CNN
)
test_images = read_images(
'./data/test.txt',
NUM_TEST_IMAGES,
'./data/test',
False,
for_CNN=for_CNN
)
return train_images, train_labels, validation_images, validation_labels, test_images
def write_predictions(file, predictions):
with open(file, 'w') as f_out:
f_out.write('id,label\n')
for i in range(5000):
f_out.write(f'0{35000 + i + 1}.png,{predictions[i]}\n')
| [
"cosmin_paunica@yahoo.com"
] | cosmin_paunica@yahoo.com |
a86314c6dbb75fbe34b73dfc167af86fb091d514 | a1a565991a00918ff7196bcbb09f39c0742e2b56 | /swagger_client/models/trip_request_response_journey_fare_zone.py | 45af2629a1fd446d3312ec78c7911b32a9382e07 | [] | no_license | joeyabouharb/opendata-swagger | 1b08821f1f82bcd8ea82e19d29a89f209cf60c31 | adbd714778503337a863d4d9f80b7a4266b7e367 | refs/heads/master | 2023-03-15T06:07:49.138370 | 2019-10-05T02:41:38 | 2019-10-05T02:41:38 | 209,513,317 | 0 | 0 | null | 2023-02-15T22:55:01 | 2019-09-19T09:24:02 | Python | UTF-8 | Python | false | false | 5,784 | py | # coding: utf-8
"""
Trip Planner
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: 1.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class TripRequestResponseJourneyFareZone(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'net': 'str',
'to_leg': 'int',
'from_leg': 'int',
'neutral_zone': 'str'
}
attribute_map = {
'net': 'net',
'to_leg': 'toLeg',
'from_leg': 'fromLeg',
'neutral_zone': 'neutralZone'
}
def __init__(self, net=None, to_leg=None, from_leg=None, neutral_zone=None): # noqa: E501
"""TripRequestResponseJourneyFareZone - a model defined in Swagger""" # noqa: E501
self._net = None
self._to_leg = None
self._from_leg = None
self._neutral_zone = None
self.discriminator = None
if net is not None:
self.net = net
if to_leg is not None:
self.to_leg = to_leg
if from_leg is not None:
self.from_leg = from_leg
if neutral_zone is not None:
self.neutral_zone = neutral_zone
@property
def net(self):
"""Gets the net of this TripRequestResponseJourneyFareZone. # noqa: E501
Not currently used. # noqa: E501
:return: The net of this TripRequestResponseJourneyFareZone. # noqa: E501
:rtype: str
"""
return self._net
@net.setter
def net(self, net):
"""Sets the net of this TripRequestResponseJourneyFareZone.
Not currently used. # noqa: E501
:param net: The net of this TripRequestResponseJourneyFareZone. # noqa: E501
:type: str
"""
self._net = net
@property
def to_leg(self):
"""Gets the to_leg of this TripRequestResponseJourneyFareZone. # noqa: E501
Not currently used. # noqa: E501
:return: The to_leg of this TripRequestResponseJourneyFareZone. # noqa: E501
:rtype: int
"""
return self._to_leg
@to_leg.setter
def to_leg(self, to_leg):
"""Sets the to_leg of this TripRequestResponseJourneyFareZone.
Not currently used. # noqa: E501
:param to_leg: The to_leg of this TripRequestResponseJourneyFareZone. # noqa: E501
:type: int
"""
self._to_leg = to_leg
@property
def from_leg(self):
"""Gets the from_leg of this TripRequestResponseJourneyFareZone. # noqa: E501
Not currently used. # noqa: E501
:return: The from_leg of this TripRequestResponseJourneyFareZone. # noqa: E501
:rtype: int
"""
return self._from_leg
@from_leg.setter
def from_leg(self, from_leg):
"""Sets the from_leg of this TripRequestResponseJourneyFareZone.
Not currently used. # noqa: E501
:param from_leg: The from_leg of this TripRequestResponseJourneyFareZone. # noqa: E501
:type: int
"""
self._from_leg = from_leg
@property
def neutral_zone(self):
"""Gets the neutral_zone of this TripRequestResponseJourneyFareZone. # noqa: E501
Not currently used. # noqa: E501
:return: The neutral_zone of this TripRequestResponseJourneyFareZone. # noqa: E501
:rtype: str
"""
return self._neutral_zone
@neutral_zone.setter
def neutral_zone(self, neutral_zone):
"""Sets the neutral_zone of this TripRequestResponseJourneyFareZone.
Not currently used. # noqa: E501
:param neutral_zone: The neutral_zone of this TripRequestResponseJourneyFareZone. # noqa: E501
:type: str
"""
self._neutral_zone = neutral_zone
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(TripRequestResponseJourneyFareZone, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, TripRequestResponseJourneyFareZone):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"joeyabouharb@gmail.com"
] | joeyabouharb@gmail.com |
a316a002441fd12b147ad79df548c801fcb04d82 | 77c7b74f401f7ba95257b62e8aaee682bd5756a5 | /rnn_class/srn_language.py | 9df25ec96d2b7af6c24ee30148b3b61ff666ea86 | [] | no_license | fortiema/udemy_lazyprog | 1387841c182b60a4f8711b3bb709b1fd10b9b94f | c9d76d6d6b38e76bb92dd47695cfdbd40401ad48 | refs/heads/master | 2018-12-05T11:19:17.869876 | 2018-11-04T11:42:43 | 2018-11-04T11:42:43 | 109,830,844 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,937 | py | import numpy as np
import matplotlib.pyplot as plt
from sklearn.utils import shuffle
import theano
import theano.tensor as T
from util import init_weight, get_robert_frost
class SimpleLMRNN:
"""A simple Language Model using SRN"""
def __init__(self, D, M, V):
self.D = D
self.M = M
self.V = V
def fit(self, X, epochs=200, lr=10e-1, mu=0.99, reg=1.0, act=T.tanh, show_fig=False):
N = len(X)
D = self.D
M = self.M
V = self.V
We = init_weight(V, D)
Wx = init_weight(D, M)
Wh = init_weight(M, M)
bh = np.zeros(M)
h0 = np.zeros(M)
Wo = init_weight(M, V)
bo = np.zeros(V)
self.f = act
self.We = theano.shared(We)
self.Wx = theano.shared(Wx)
self.Wh = theano.shared(Wh)
self.bh = theano.shared(bh)
self.h0 = theano.shared(h0)
self.Wo = theano.shared(Wo)
self.bo = theano.shared(bo)
self.params = [self.We, self.Wx, self.Wh, self.bh, self.h0, self.Wo, self.bo]
thX = T.ivector('X')
Ei = self.We[thX]
thY = T.ivector('Y')
def recurrence(x_t, h_t1):
h_t = self.f(x_t.dot(self.Wx) + h_t1.dot(self.Wh) + self.bh)
y_t = T.nnet.softmax(h_t.dot(self.Wo) + self.bo)
return h_t, y_t
[h, y], _ = theano.scan(
fn=recurrence,
outputs_info=[self.h0, None],
sequences=Ei,
n_steps=Ei.shape[0]
)
py_x = y[:, 0, :]
pred = T.argmax(py_x, axis=1)
cost = -T.mean(T.log(py_x[T.arange(thY.shape[0]), thY]))
grads = T.grad(cost, self.params)
dparams = [theano.shared(p.get_value()*0) for p in self.params]
updates = [
(p, p + mu*dp - lr*g) for p, dp, g in zip(self.params, dparams, grads)
] + [
(dp, mu*dp - lr*g) for dp, g in zip(dparams, grads)
]
self.pred_op = theano.function(inputs=[thX], outputs=pred, allow_input_downcast=True)
self.train_op = theano.function(
inputs=[thX, thY],
outputs=[cost, pred],
updates=updates
)
costs = []
n_total = sum((len(seq) + 1) for seq in X)
for i in range(epochs):
X = shuffle(X)
n_correct = 0
cost = 0
for j in range(N):
input_seq = [0] + X[j]
output_seq = X[j] + [1]
c, p = self.train_op(input_seq, output_seq)
cost += c
for pj, xj in zip(p, output_seq):
if pj == xj:
n_correct += 1
print(f'i: {i}; cost: {cost}; accuracy: {float(n_correct) / n_total}')
costs.append(cost)
if show_fig:
plt.plot(costs)
plt.show()
def save(self, filename):
np.savez(filename, *[p.get_value() for p in self.params])
@staticmethod
def load(filename, act):
npz = np.load(filename)
We = npz['arr_0']
Wx = npz['arr_1']
Wh = npz['arr_2']
bh = npz['arr_3']
h0 = npz['arr_4']
Wo = npz['arr_5']
bo = npz['arr_6']
V, D = We.shape
_, M = Wx.shape
model = SimpleLMRNN(D, M, V)
model.set(We, Wx, Wh, bh, h0, Wo, bo, act)
return model
def set(self, We, Wx, Wh, bh, h0, Wo, bo, act):
self.f = act
self.We = theano.shared(We)
self.Wx = theano.shared(Wx)
self.Wh = theano.shared(Wh)
self.bh = theano.shared(bh)
self.h0 = theano.shared(h0)
self.Wo = theano.shared(Wo)
self.bo = theano.shared(bo)
self.params = [self.We, self.Wx, self.Wh, self.bh, self.h0, self.Wo, self.bo]
thX = T.ivector('X')
Ei = self.We[thX]
thY = T.ivector('Y')
def recurrence(x_t, h_t1):
h_t = self.f(x_t.dot(self.Wx) + h_t1.dot(self.Wh) + self.bh)
y_t = T.nnet.softmax(h_t.dot(self.Wo) + self.bo)
[h, y], _ = theano.scan(
fn=recurrence,
outputs_info=[self.h0, None],
sequences=Ei,
n_steps=Ei.shape[0]
)
py_x = y[:, 0, :]
pred = T.argmax(py_x, axis=1)
cost = -T.mean(T.log(py_x[T.arange(thY.shape[0]), thY]))
grads = T.grad(cost, self.params)
dparams - [theano.shared(p.get_value()*0) for p in self.params]
updates = [
(p, p + mu*dp - lr*g) for p, dp, g in zip(self.params, dparams, grads)
] + [
(dp, mu*dp - lr*g) for dp, g in zip(dparams, grads)
]
self.pred_op = theano.function(inputs=[thX], outputs=pred, allow_input_downcast=True)
def generate(self, pi, word2idx):
idx2word = {v:k for k, v in word2idx.items()}
V = len(pi)
n_lines = 0
X = [ np.random.choice(V, pi=pi) ]
print(idx2word[X[0]] + '\n')
while n_lines < 4:
P = self.pred_op(X)[-1]
X += [P]
P = P[-1]
if P > 1:
word = idx2word[P]
print(word + '\n')
elif P == 1:
n_lines += 1
if n_lines < 4:
X = [ np.random.choice(V, pi=pi) ]
print(idx2word[X[0]] + '\n')
def train_poetry():
sents, w2i = get_robert_frost()
model = SimpleLMRNN(30, 30, len(w2i))
model.fit(sents, lr=10e-5, show_fig=True, act=T.nnet.relu, epochs=2000)
model.save('RNN_D30_M30_epochs2000_relu.npz')
def generate_poetry():
sents, w2i = get_robert_frost()
model = SimpleLMRNN.load('RNN_D30_M30_epochs2000_relu.npz', T.nnet.relu)
V = len(w2i)
pi = np.zeroes(V)
for sent in sents:
pi[sent[0]] += 1
pi /= pi.sum()
model.generate(pi, w2i)
if __name__ == '__main__':
train_poetry()
generate_poetry() | [
"fortiema@gmail.com"
] | fortiema@gmail.com |
245d25c51fcb9630b9b8fb3531f028b5c462a945 | 225bf8285be2b4fee7577d1177e92c10ac912b6e | /mth314/instruct/auto_meta_nbgrader.py | 55558854e312c82ce6293c292cf3ba1854d2127b | [] | no_license | ninja542/autograde-testing | d485d010ce0a52f09cecebf4c9e768cf5239c92b | 81fa354ea0613bbfaadb8ba99232a3e99c3eaf67 | refs/heads/master | 2020-12-08T22:15:24.360006 | 2020-02-24T19:25:22 | 2020-02-24T19:25:22 | 233,109,875 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 831 | py | # add nbgrader metadata
import json
import numpy as np
import sys
def is_command(line):
if "ANSWER" in process_comment(line)[0] or "AMSWER" in process_comment(line)[1]:
return True
else:
return False
def process_comment(line):
command = line.replace("#","")
command = command.strip()
# if command[:3] == "AGA":
return [i.strip() for i in command.split()]
ASSIGNMENT = sys.argv[1]
ind = ASSIGNMENT.index("INST")
ext = ASSIGNMENT.index(".ipynb")
# NEW_ASSIGNMENT = ASSIGNMENT[:ind] + "STUDENT" + ASSIGNMENT[ext:]
with open(ASSIGNMENT, 'r+', encoding="utf-8") as file:
data = json.load(file)
print(data)
for cell in data['cells']:
if is_command(cell['source'][0]) == True:
cell_type = process_comment(cell['source'])
if cell_type[0] == "AA":
cell["metadata"]["nbgrader"]["grade"] = False
| [
"theroboticcyborg@gmail.com"
] | theroboticcyborg@gmail.com |
5bf1c9b97dbb63e6b93677013796382b79d3a38b | 9daa01cc39c9b76bb40e8d968b163022521c55bc | /func_for_mult_spectr.py | d3a5a1b3fcbdf32b90c0ff45e1c5b13a8f097662 | [] | no_license | Pugavkomm/nonlinear | d61109f0d71b51ff54255b6d47ad420f98713eb0 | d2876320a8ea3da4f072df2af2fa39ac86dccecb | refs/heads/master | 2021-09-06T07:11:33.443508 | 2018-02-03T16:22:28 | 2018-02-03T16:22:28 | 103,822,984 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 178 | py | import numpy as np
def mutipl_all(N, alpha):
f = np.zeros(N)
for i in range(N):
z = np.cos((i + 1)*np.pi/(N + 1))
f[i] = (1 - alpha)/(1 + z)
return f | [
"slavapugavko2@gmail.com"
] | slavapugavko2@gmail.com |
a7012e26515d2e214c34f1a948756e9af8cff489 | 5837fd85b18b56d23612de1e36d79b5a06827542 | /sniterator.py | d5e3435d3bd0924383507459b0e3f279464d9c66 | [
"MIT"
] | permissive | ChristopherWilks/snaptron | 75e33c4f25a65f3093555a7bf235ab69865f7086 | 75903c30d54708b19d91772142013687c74d88d8 | refs/heads/master | 2023-02-19T01:38:57.343293 | 2023-02-11T21:47:52 | 2023-02-11T21:47:52 | 45,953,724 | 26 | 7 | NOASSERTION | 2022-06-17T21:10:44 | 2015-11-11T02:03:37 | Python | UTF-8 | Python | false | false | 3,341 | py | #!/usr/bin/env python2.7
# This file is part of Snaptron.
#
# Snaptron is free software: you can redistribute it and/or modify
# it under the terms of the
#
# The MIT License
#
# Copyright (c) 2016- by Christopher Wilks <broadsword@gmail.com>
# and Ben Langmead <langmea@cs.jhu.edu>
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import sys
import subprocess
import shlex
class SnaptronServerIterator():
def __init__(self,cmds,stdout=subprocess.PIPE,shell=False,bufsize=-1,direct_output=False):
self.cmds = cmds
self.stdout = stdout
#performance trick, pipe output from subprocess directly to this process's output
#to avoid the cost of python line processing
if direct_output:
self.stdout = sys.stdout
self.shell = shell
self.bufsize = bufsize
#used to run them in parallel, but that's a bad idea because:
#1) results will come back in random order
#2) we need to control the number of potential processes spun up by any given query (so for now we'll keep this at 1)
if direct_output:
for cmd in self.cmds:
extern_proc = subprocess.Popen(cmd, shell=self.shell, bufsize=self.bufsize)
extern_proc.wait()
else:
#TODO: stop this running in parallel for the above cited reasons, but will need to handle
#the sequential nature in the next() method
self.extern_procs = [subprocess.Popen(cmd, stdout=self.stdout, shell=self.shell, bufsize=self.bufsize) for cmd in self.cmds]
self.idx = 0
def __iter__(self):
return self
#this is only used if the self.stdout isn't directed to the current process's sys.stdout
#i.e. direct_output is False
def next(self):
line = self.extern_procs[self.idx].stdout.readline()
if line == '':
exitc=self.extern_procs[self.idx].wait()
if exitc != 0:
raise RuntimeError("%s returned non-0 exit code\n" % (self.cmds[self.idx]))
self.idx+=1
if self.idx >= len(self.extern_procs):
raise StopIteration
line = self.extern_procs[self.idx].stdout.readline()
return line
| [
"broadsword@gmail.com"
] | broadsword@gmail.com |
9549acb29d9a0c5bf134052cccc04c0ca9a590e6 | f5f538edf999d5a7eb265b90efa4599a81367489 | /ptranking/metric/adhoc_metric.py | c2e41ba50be9154df1b89366b12914dfe3e440f0 | [
"MIT"
] | permissive | ii-metric/ptranking | ad4db16e5a995b11103b04af46aed099e525af82 | fd4fe1373fd2dfd7c6342eb666f36e34b71e8298 | refs/heads/master | 2023-03-24T03:18:16.414348 | 2021-03-19T06:06:43 | 2021-03-19T06:06:43 | 328,522,824 | 0 | 1 | MIT | 2021-03-19T06:06:44 | 2021-01-11T02:02:01 | Python | UTF-8 | Python | false | false | 18,252 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Description
The widely used IR evaluation metrics, such as AP (average precision), nDCG and ERR
Note: commonly the metric-computation is not conducted on gpu
"""
import torch
import numpy as np
from ptranking.data.data_utils import LABEL_TYPE
""" Precision """
def torch_precision_at_k(batch_sys_sorted_labels, k=None, gpu=False):
''' Precision at k
:param sys_sorted_labels: [batch_size, ranking_size] system's predicted ltr_adhoc of labels in a descending order
:param ks: cutoff values
:return: [batch_size, len(ks)]
'''
max_cutoff = batch_sys_sorted_labels.size(1)
used_cutoff = min(max_cutoff, k)
batch_sys_sorted_labels = batch_sys_sorted_labels[:, 0:used_cutoff]
batch_bi_sys_sorted_labels = torch.clamp(batch_sys_sorted_labels, min=0, max=1) # binary
batch_sys_cumsum_reles = torch.cumsum(batch_bi_sys_sorted_labels, dim=1)
batch_ranks = (torch.arange(used_cutoff).type(torch.cuda.FloatTensor).expand_as(batch_sys_cumsum_reles) + 1.0) \
if gpu else (torch.arange(used_cutoff).expand_as(batch_sys_cumsum_reles) + 1.0)
batch_sys_rankwise_precision = batch_sys_cumsum_reles / batch_ranks
batch_sys_p_at_k = batch_sys_rankwise_precision[:, used_cutoff-1:used_cutoff]
return batch_sys_p_at_k
def torch_precision_at_ks(batch_sys_sorted_labels, ks=None, gpu=False):
''' Precision at ks
:param sys_sorted_labels: [batch_size, ranking_size] system's predicted ltr_adhoc of labels in a descending order
:param ks: cutoff values
:return: [batch_size, len(ks)]
'''
valid_max_cutoff = batch_sys_sorted_labels.size(1)
need_padding = True if valid_max_cutoff < max(ks) else False
used_ks = [k for k in ks if k <= valid_max_cutoff] if need_padding else ks
max_cutoff = max(used_ks)
inds = torch.from_numpy(np.asarray(used_ks) - 1)
batch_sys_sorted_labels = batch_sys_sorted_labels[:, 0:max_cutoff]
batch_bi_sys_sorted_labels = torch.clamp(batch_sys_sorted_labels, min=0, max=1) # binary
batch_sys_cumsum_reles = torch.cumsum(batch_bi_sys_sorted_labels, dim=1)
batch_ranks = (torch.arange(max_cutoff).type(torch.cuda.FloatTensor).expand_as(batch_sys_cumsum_reles) + 1.0) if gpu \
else (torch.arange(max_cutoff).expand_as(batch_sys_cumsum_reles) + 1.0)
batch_sys_rankwise_precision = batch_sys_cumsum_reles / batch_ranks
batch_sys_p_at_ks = batch_sys_rankwise_precision[:, inds]
if need_padding:
padded_p_at_ks = torch.zeros(batch_sys_sorted_labels.size(0), len(ks))
padded_p_at_ks[:, 0:len(used_ks)] = batch_sys_p_at_ks
return padded_p_at_ks
else:
return batch_sys_p_at_ks
""" Average Precision """
def torch_ap_at_k(batch_sys_sorted_labels, batch_ideal_sorted_labels, k=None, gpu=False):
'''
AP(average precision) at ks (i.e., different cutoff values)
:param ideal_sorted_labels: [batch_size, ranking_size] the ideal ltr_adhoc of labels
:param sys_sorted_labels: [batch_size, ranking_size] system's predicted ltr_adhoc of labels in a descending order
:param ks:
:return: [batch_size, len(ks)]
'''
max_cutoff = batch_sys_sorted_labels.size(1)
used_cutoff = min(max_cutoff, k)
batch_sys_sorted_labels = batch_sys_sorted_labels[:, 0:used_cutoff]
batch_bi_sys_sorted_labels = torch.clamp(batch_sys_sorted_labels, min=0, max=1) # binary
batch_sys_cumsum_reles = torch.cumsum(batch_bi_sys_sorted_labels, dim=1)
batch_ranks = (torch.arange(used_cutoff).type(torch.cuda.FloatTensor).expand_as(batch_sys_cumsum_reles) + 1.0) if gpu \
else (torch.arange(used_cutoff).expand_as(batch_sys_cumsum_reles) + 1.0)
batch_sys_rankwise_precision = batch_sys_cumsum_reles / batch_ranks # rank-wise precision
batch_sys_cumsum_precision = torch.cumsum(batch_sys_rankwise_precision * batch_bi_sys_sorted_labels, dim=1) # exclude precisions of which the corresponding documents are not relevant
batch_std_cumsum_reles = torch.cumsum(batch_ideal_sorted_labels, dim=1)
batch_sys_rankwise_ap = batch_sys_cumsum_precision / batch_std_cumsum_reles[:, 0:used_cutoff]
batch_sys_ap_at_k = batch_sys_rankwise_ap[:, used_cutoff-1:used_cutoff]
return batch_sys_ap_at_k
def torch_ap_at_ks(batch_sys_sorted_labels, batch_ideal_sorted_labels, ks=None, gpu=False):
'''
AP(average precision) at ks (i.e., different cutoff values)
:param ideal_sorted_labels: [batch_size, ranking_size] the ideal ltr_adhoc of labels
:param sys_sorted_labels: [batch_size, ranking_size] system's predicted ltr_adhoc of labels in a descending order
:param ks:
:return: [batch_size, len(ks)]
'''
valid_max_cutoff = batch_sys_sorted_labels.size(1)
need_padding = True if valid_max_cutoff < max(ks) else False
used_ks = [k for k in ks if k <= valid_max_cutoff] if need_padding else ks
max_cutoff = max(used_ks)
inds = torch.from_numpy(np.asarray(used_ks) - 1)
batch_sys_sorted_labels = batch_sys_sorted_labels[:, 0:max_cutoff]
batch_bi_sys_sorted_labels = torch.clamp(batch_sys_sorted_labels, min=0, max=1) # binary
batch_sys_cumsum_reles = torch.cumsum(batch_bi_sys_sorted_labels, dim=1)
batch_ranks = (torch.arange(max_cutoff).type(torch.cuda.FloatTensor).expand_as(batch_sys_cumsum_reles) + 1.0) if gpu \
else (torch.arange(max_cutoff).expand_as(batch_sys_cumsum_reles) + 1.0)
batch_sys_rankwise_precision = batch_sys_cumsum_reles / batch_ranks # rank-wise precision
batch_sys_cumsum_precision = torch.cumsum(batch_sys_rankwise_precision * batch_bi_sys_sorted_labels, dim=1) # exclude precisions of which the corresponding documents are not relevant
batch_std_cumsum_reles = torch.cumsum(batch_ideal_sorted_labels, dim=1)
batch_sys_rankwise_ap = batch_sys_cumsum_precision / batch_std_cumsum_reles[:, 0:max_cutoff]
batch_sys_ap_at_ks = batch_sys_rankwise_ap[:, inds]
if need_padding:
padded_ap_at_ks = torch.zeros(batch_sys_sorted_labels.size(0), len(ks))
padded_ap_at_ks[:, 0:len(used_ks)] = batch_sys_ap_at_ks
return padded_ap_at_ks
else:
return batch_sys_ap_at_ks
""" NERR """
def torch_rankwise_err(batch_sorted_labels, max_label=None, k=10, point=True, gpu=False):
assert batch_sorted_labels.size(1) >= k
assert max_label is not None # it is either query-level or corpus-level
batch_labels = batch_sorted_labels[:, 0:k]
batch_satis_probs = (torch.pow(2.0, batch_labels) - 1.0) / torch.pow(2.0, max_label)
batch_unsatis_probs = torch.ones_like(batch_labels) - batch_satis_probs
batch_cum_unsatis_probs = torch.cumprod(batch_unsatis_probs, dim=1)
batch_ranks = torch.arange(k).type(torch.cuda.FloatTensor).expand_as(batch_labels) + 1.0 if gpu \
else torch.arange(k).expand_as(batch_labels) + 1.0
batch_expt_ranks = 1.0 / batch_ranks
batch_cascad_unsatis_probs = torch.ones_like(batch_expt_ranks)
batch_cascad_unsatis_probs[:, 1:k] = batch_cum_unsatis_probs[:, 0:k-1]
batch_expt_satis_ranks = batch_expt_ranks * batch_satis_probs * batch_cascad_unsatis_probs # w.r.t. all rank positions
if point: # a specific position
batch_err_at_k = torch.sum(batch_expt_satis_ranks, dim=1, keepdim=True)
return batch_err_at_k
else:
batch_rankwise_err = torch.cumsum(batch_expt_satis_ranks, dim=1)
return batch_rankwise_err
def torch_nerr_at_k(batch_sys_sorted_labels, batch_ideal_sorted_labels, k=None, gpu=False, label_type=LABEL_TYPE.MultiLabel):
valid_max_cutoff = batch_sys_sorted_labels.size(1)
cutoff = min(valid_max_cutoff, k)
if LABEL_TYPE.MultiLabel == label_type:
max_label = torch.max(batch_ideal_sorted_labels)
batch_sys_err_at_k = torch_rankwise_err(batch_sys_sorted_labels, max_label=max_label, k=cutoff, point=True, gpu=gpu)
batch_ideal_err_at_k = torch_rankwise_err(batch_ideal_sorted_labels, max_label=max_label, k=cutoff, point=True, gpu=gpu)
batch_nerr_at_k = batch_sys_err_at_k / batch_ideal_err_at_k
return batch_nerr_at_k
else:
raise NotImplementedError
def torch_nerr_at_ks(batch_sys_sorted_labels, batch_ideal_sorted_labels, ks=None, gpu=False, label_type=LABEL_TYPE.MultiLabel):
'''
:param sys_sorted_labels: [batch_size, ranking_size] the standard labels sorted in descending order according to predicted relevance scores
:param ks:
:return: [batch_size, len(ks)]
'''
valid_max_cutoff = batch_sys_sorted_labels.size(1)
need_padding = True if valid_max_cutoff < max(ks) else False
used_ks = [k for k in ks if k <= valid_max_cutoff] if need_padding else ks
max_label = torch.max(batch_ideal_sorted_labels)
max_cutoff = max(used_ks)
inds = torch.from_numpy(np.asarray(used_ks) - 1)
if LABEL_TYPE.MultiLabel == label_type:
batch_sys_rankwise_err = torch_rankwise_err(batch_sys_sorted_labels, max_label=max_label, k=max_cutoff, point=False, gpu=gpu)
batch_ideal_rankwise_err = torch_rankwise_err(batch_ideal_sorted_labels, max_label=max_label, k=max_cutoff, point=False, gpu=gpu)
batch_rankwise_nerr = batch_sys_rankwise_err/batch_ideal_rankwise_err
batch_nerr_at_ks = batch_rankwise_nerr[:, inds]
if need_padding:
padded_nerr_at_ks = torch.zeros(batch_sys_sorted_labels.size(0), len(ks))
padded_nerr_at_ks[:, 0:len(used_ks)] = batch_nerr_at_ks
return padded_nerr_at_ks
else:
return batch_nerr_at_ks
else:
raise NotImplementedError
""" nDCG """
def torch_dcg_at_k(batch_sorted_labels, cutoff=None, label_type=LABEL_TYPE.MultiLabel, gpu=False):
'''
ICML-nDCG, which places stronger emphasis on retrieving relevant documents
:param batch_sorted_labels: [batch_size, ranking_size] a batch of ranked labels (either standard or predicted by a system)
:param cutoff: the cutoff position
:param label_type: either the case of multi-level relevance or the case of listwise int-value, e.g., MQ2007-list
:return: [batch_size, 1] cumulative gains for each rank position
'''
if cutoff is None: # using whole list
cutoff = batch_sorted_labels.size(1)
if LABEL_TYPE.MultiLabel == label_type: #the common case with multi-level labels
batch_numerators = torch.pow(2.0, batch_sorted_labels[:, 0:cutoff]) - 1.0
elif LABEL_TYPE.Permutation == label_type: # the case like listwise ltr_adhoc, where the relevance is labeled as (n-rank_position)
batch_numerators = batch_sorted_labels[:, 0:cutoff]
else:
raise NotImplementedError
batch_discounts = torch.log2(torch.arange(cutoff).type(torch.cuda.FloatTensor).expand_as(batch_numerators) + 2.0) if gpu \
else torch.log2(torch.arange(cutoff).expand_as(batch_numerators) + 2.0)
batch_dcg_at_k = torch.sum(batch_numerators/batch_discounts, dim=1, keepdim=True)
return batch_dcg_at_k
def torch_dcg_at_ks(batch_sorted_labels, max_cutoff, label_type=LABEL_TYPE.MultiLabel, gpu=False):
'''
:param batch_sorted_labels: [batch_size, ranking_size] ranked labels (either standard or predicted by a system)
:param max_cutoff: the maximum cutoff value
:param label_type: either the case of multi-level relevance or the case of listwise int-value, e.g., MQ2007-list
:return: [batch_size, max_cutoff] cumulative gains for each rank position
'''
if LABEL_TYPE.MultiLabel == label_type: # the common case with multi-level labels
batch_numerators = torch.pow(2.0, batch_sorted_labels[:, 0:max_cutoff]) - 1.0
elif LABEL_TYPE.Permutation == label_type: # the case like listwise ltr_adhoc, where the relevance is labeled as (n-rank_position)
batch_numerators = batch_sorted_labels[:, 0:max_cutoff]
else:
raise NotImplementedError
batch_discounts = torch.log2(torch.arange(max_cutoff).type(torch.cuda.FloatTensor).expand_as(batch_numerators) + 2.0) if gpu\
else torch.log2(torch.arange(max_cutoff).expand_as(batch_numerators) + 2.0)
batch_dcg_at_ks = torch.cumsum(batch_numerators/batch_discounts, dim=1) # dcg w.r.t. each position
return batch_dcg_at_ks
def torch_nDCG_at_k(batch_sys_sorted_labels, batch_ideal_sorted_labels, k=None, gpu=False, label_type=LABEL_TYPE.MultiLabel):
batch_sys_dcg_at_k = torch_dcg_at_k(batch_sys_sorted_labels, cutoff=k, label_type=label_type, gpu=gpu) # only using the cumulative gain at the final rank position
batch_ideal_dcg_at_k = torch_dcg_at_k(batch_ideal_sorted_labels, cutoff=k, label_type=label_type, gpu=gpu)
batch_ndcg_at_k = batch_sys_dcg_at_k / batch_ideal_dcg_at_k
return batch_ndcg_at_k
def torch_nDCG_at_ks(batch_sys_sorted_labels, batch_ideal_sorted_labels, ks=None, gpu=False, label_type=LABEL_TYPE.MultiLabel):
valid_max_cutoff = batch_sys_sorted_labels.size(1)
used_ks = [k for k in ks if k<=valid_max_cutoff] if valid_max_cutoff < max(ks) else ks
inds = torch.from_numpy(np.asarray(used_ks) - 1)
batch_sys_dcgs = torch_dcg_at_ks(batch_sys_sorted_labels, max_cutoff=max(used_ks), label_type=label_type, gpu=gpu)
batch_sys_dcg_at_ks = batch_sys_dcgs[:, inds] # get cumulative gains at specified rank positions
batch_ideal_dcgs = torch_dcg_at_ks(batch_ideal_sorted_labels, max_cutoff=max(used_ks), label_type=label_type, gpu=gpu)
batch_ideal_dcg_at_ks = batch_ideal_dcgs[:, inds]
batch_ndcg_at_ks = batch_sys_dcg_at_ks / batch_ideal_dcg_at_ks
if valid_max_cutoff < max(ks):
padded_ndcg_at_ks = torch.zeros(batch_sys_sorted_labels.size(0), len(ks))
padded_ndcg_at_ks[:, 0:len(used_ks)] = batch_ndcg_at_ks
return padded_ndcg_at_ks
else:
return batch_ndcg_at_ks
""" Kendall'tau Coefficient """
def torch_kendall_tau(sys_ranking, natural_ascending_as_reference = True):
'''
$\tau = 1.0 - \frac{2S(\pi, \delta)}{N(N-1)/2}$, cf. 2006-Automatic Evaluation of Information Ordering: Kendall’s Tau
The tie issue is not considered within this version.
The current implementation is just counting the inversion number, then normalized by n(n-1)/2. The underlying assumption is that the reference ltr_adhoc is the ideal ltr_adhoc, say labels are ordered in a descending order.
:param sys_ranking: system's ltr_adhoc, whose entries can be predicted values, labels, etc.
:return:
'''
assert 1 == len(sys_ranking.size()) # one-dimension vector
ranking_size = sys_ranking.size(0)
pair_diffs = sys_ranking.view(-1, 1) - sys_ranking.view(1, -1)
if natural_ascending_as_reference:
bi_pair_diffs = torch.clamp(pair_diffs, min=0, max=1)
bi_pair_diffs_triu1 = torch.triu(bi_pair_diffs, diagonal=1)
#print('bi_pair_diffs_triu1\n', bi_pair_diffs_triu1)
tau = 1.0 - 4 * torch.sum(bi_pair_diffs_triu1) / (ranking_size*(ranking_size-1))
else: # i.e., natural descending as the reference
bi_pair_diffs = torch.clamp(pair_diffs, min=-1, max=0)
bi_pair_diffs_triu1 = torch.triu(bi_pair_diffs, diagonal=1)
#print('bi_pair_diffs_triu1\n', bi_pair_diffs_triu1)
print('total discordant: ', 2*torch.sum(bi_pair_diffs_triu1))
tau = 1.0 + 4 * torch.sum(bi_pair_diffs_triu1) / (ranking_size*(ranking_size-1))
return tau
def rele_gain(rele_level, gain_base=2.0):
gain = np.power(gain_base, rele_level) - 1.0
return gain
def np_metric_at_ks(ranker=None, test_Qs=None, ks=[1, 5, 10], label_type=LABEL_TYPE.MultiLabel, max_rele_level=None, gpu=False, device=None):
'''
There is no check based on the assumption (say light_filtering() is called)
that each test instance Q includes at least k(k=max(ks)) documents, and at least one relevant document.
Or there will be errors.
'''
cnt = 0
sum_ndcg_at_ks = torch.zeros(len(ks))
sum_err_at_ks = torch.zeros(len(ks))
sum_ap_at_ks = torch.zeros(len(ks))
sum_p_at_ks = torch.zeros(len(ks))
list_ndcg_at_ks_per_q = []
list_err_at_ks_per_q = []
list_ap_at_ks_per_q = []
list_p_at_ks_per_q = []
for entry in test_Qs:
tor_test_ranking, tor_test_std_label_vec = entry[1], torch.squeeze(entry[2], dim=0) # remove the size 1 of dim=0 from loader itself
if gpu:
tor_rele_pred = ranker.predict(tor_test_ranking.to(device))
tor_rele_pred = torch.squeeze(tor_rele_pred)
tor_rele_pred = tor_rele_pred.cpu()
else:
tor_rele_pred = ranker.predict(tor_test_ranking)
tor_rele_pred = torch.squeeze(tor_rele_pred)
_, tor_sorted_inds = torch.sort(tor_rele_pred, descending=True)
sys_sorted_labels = tor_test_std_label_vec[tor_sorted_inds]
ideal_sorted_labels, _ = torch.sort(tor_test_std_label_vec, descending=True)
ndcg_at_ks_per_query = torch_nDCG_at_ks(sys_sorted_labels=sys_sorted_labels, ideal_sorted_labels=ideal_sorted_labels, ks=ks, label_type=label_type)
sum_ndcg_at_ks = torch.add(sum_ndcg_at_ks, ndcg_at_ks_per_query)
list_ndcg_at_ks_per_q.append(ndcg_at_ks_per_query.numpy())
err_at_ks_per_query = torch_nerr_at_ks(sys_sorted_labels, ideal_sorted_labels=ideal_sorted_labels, ks=ks, label_type=label_type)
sum_err_at_ks = torch.add(sum_err_at_ks, err_at_ks_per_query)
list_err_at_ks_per_q.append(err_at_ks_per_query.numpy())
ap_at_ks_per_query = torch_ap_at_ks(sys_sorted_labels=sys_sorted_labels, ideal_sorted_labels=ideal_sorted_labels, ks=ks)
sum_ap_at_ks = torch.add(sum_ap_at_ks, ap_at_ks_per_query)
list_ap_at_ks_per_q.append(ap_at_ks_per_query.numpy())
p_at_ks_per_query = torch_precision_at_ks(sys_sorted_labels=sys_sorted_labels, ks=ks)
sum_p_at_ks = torch.add(sum_p_at_ks, p_at_ks_per_query)
list_p_at_ks_per_q.append(p_at_ks_per_query.numpy())
cnt += 1
ndcg_at_ks = sum_ndcg_at_ks/cnt
err_at_ks = sum_err_at_ks/cnt
ap_at_ks = sum_ap_at_ks / cnt
p_at_ks = sum_p_at_ks/cnt
return ndcg_at_ks.numpy(), err_at_ks.numpy(), ap_at_ks.numpy(), p_at_ks.numpy(), list_ndcg_at_ks_per_q, list_err_at_ks_per_q, list_ap_at_ks_per_q, list_p_at_ks_per_q
def np_stable_softmax_e(histogram):
histogram = np.asarray(histogram, dtype=np.float64)
max_v, _ = np.max(histogram, dim=0) # a transformation aiming for higher stability when computing softmax() with exp()
hist = histogram - max_v
hist_exped = np.exp(hist)
probs = np.divide(hist_exped, np.sum(hist_exped, dim=0))
return probs
def eval_cost_mat_group(sorted_std_labels, group_div_cost=np.e, margin_to_non_rele=100.0, rele_gain_base=4.0):
size_ranking = len(sorted_std_labels)
cost_mat = np.zeros(shape=(size_ranking, size_ranking), dtype=np.float64)
for i in range(size_ranking):
i_rele_level = sorted_std_labels[i]
for j in range(size_ranking):
if i==j:
cost_mat[i, j] = 0
else:
j_rele_level = sorted_std_labels[j]
if i_rele_level == j_rele_level:
cost_mat[i, j] = group_div_cost
else:
cost_mat[i, j] = np.abs(rele_gain(i_rele_level, gain_base=rele_gain_base) - rele_gain(j_rele_level, gain_base=rele_gain_base))
if 0 == i_rele_level or 0 == j_rele_level:
cost_mat[i, j] += margin_to_non_rele
return cost_mat
| [
"gengshen.research@gmail.com"
] | gengshen.research@gmail.com |
a48250482c63d19a2f0d213540103fe86d917bf1 | 946d156e778c71602d1059d1bdac0b388557f6d8 | /plot_c.py | 5d4ff51334854a22ea09ca8ed990926f3449b15e | [] | no_license | spohngellert-o/Pytorch-EWC-Replication | a57a34428770b09b8e36338eab9b6e6527394297 | 0f17676ad13ceb83c8e3362cda38ac0eac310405 | refs/heads/main | 2023-04-08T13:30:06.155748 | 2021-04-23T14:36:42 | 2021-04-23T14:36:42 | 359,985,686 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,674 | py | import numpy as np
import torch
import torchvision
import matplotlib.pyplot as plt
from time import time
from torchvision import datasets, transforms
from torch import nn, optim
from tqdm import tqdm
import pickle
import pdb
from utils import *
class Net(nn.Module):
def __init__(self):
super(Net, self).__init__()
self.model = nn.Sequential(
nn.Flatten(),
nn.Linear(784, 100),
nn.ReLU(),
nn.Linear(100, 100),
nn.ReLU(),
nn.Linear(100, 100),
nn.ReLU(),
nn.Linear(100, 100),
nn.ReLU(),
nn.Linear(100, 100),
nn.ReLU(),
nn.Linear(100, 100),
nn.ReLU(),
nn.Linear(100, 100),
nn.ReLU(),
nn.Linear(100, 10)
)
def forward(self, x):
return self.model(x.float())
def train(size):
net = Net()
net.cuda()
pnets = []
fishers = []
train_loaders = []
perts = [ScrambleSize(size) for i in range(2)]
for i, pert in enumerate(perts):
print("Starting task {}".format(i + 1))
cur_trset = datasets.MNIST('./files', train=True, transform=transforms.Compose([
pert,
transforms.ToTensor()
]))
cur_tset = datasets.MNIST('./files', train=False, transform=transforms.Compose([
pert,
transforms.ToTensor()
]))
train_loaders.append(torch.utils.data.DataLoader(
cur_trset,
batch_size=5,
num_workers=2,
drop_last=False,
shuffle=True))
test_loader = torch.utils.data.DataLoader(
cur_tset,
batch_size=50,
num_workers=2,
drop_last=False)
criterion = EWC(fishers, pnets, 0.01)
optimizer = torch.optim.SGD(net.parameters(), lr=10 ** -3)
for epoch in range(100): # loop over the dataset multiple times
print("Starting epoch {}".format(epoch))
net.eval()
tot = 0
correct = 0
with torch.no_grad():
for item in test_loader:
ims = item[0].cuda()
labs = item[1].cuda()
preds = net(ims)
preds = torch.sigmoid(
preds).cpu().detach().numpy()
right = preds.argmax(
axis=1) == labs.cpu().detach().numpy()
tot += len(right)
correct += sum(right)
print("Current test acc: {}%".format(round(100 * correct / tot, 3)))
net.train()
for batch_n, data in enumerate(train_loaders[-1]):
# get the inputs; data is a list of [inputs, labels]
rl = 0.0
inputs, labels = data
inputs = inputs.cuda()
labels = labels.cuda()
optimizer.zero_grad()
outputs = net(inputs)
loss = criterion(outputs, labels, net)
rl += loss.item()
loss.backward()
optimizer.step()
if batch_n % 200 == 199:
print("Loss: {}".format(round(rl / 1000, 6)))
fishers.append(get_fisher(net, cur_trset))
copy_net = Net()
copy_net.load_state_dict(net.state_dict())
pnets.append(copy_net.cuda())
with open('{}_c.pickle'.format(size), 'wb') as f:
pickle.dump(fishers, f)
for size in [26, 8]:
print("---- Starting size {} ----".format(size))
train(size)
| [
"spohngellert.o@northeastern.edu"
] | spohngellert.o@northeastern.edu |
a5d16bd864bf44e50730265aadbb84335c3ebed6 | e6d9e395c9062237e7679f0e3ca1265b84820eee | /Ch5BitMnpltn/57oddEvnSwp.py | 2e31a39ebd2bcce463b86c14af24174b24aa4606 | [] | no_license | kalyan-ch/SltnsCrkdCpp | ffad8998b1f4319ee0d092fb927d64b50c559b66 | 09bb8a416b3306d67dcb6dfcd6e18518306fe39b | refs/heads/master | 2021-09-08T08:21:59.530786 | 2018-03-08T16:37:17 | 2018-03-08T16:37:17 | 106,737,537 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 133 | py |
def oddEvenSwap(num):
return ((num & 0xaaaaaa) >> 1) | ((num & 0x555555) << 1)
if __name__ == '__main__':
print oddEvenSwap(658) | [
"kalyan.chaval123@gmail.com"
] | kalyan.chaval123@gmail.com |
cbc74a86c614e06d91e0bd7359e4844aff8ab5f1 | 381cdf1fb24864a38bebb80b47669c02245b785f | /exercises/skeleton/setup.py | 6c33cd6fae6880735d9468585bc2172f5914c1e8 | [] | no_license | jraddaoui/python-thw | 0fa009f48979c9a82f3fef3baf50c9ed9cee1a9a | f01c5042bf328859a5c72afb5cdefe42871072a2 | refs/heads/master | 2021-01-02T22:47:54.430392 | 2017-08-23T11:06:10 | 2017-08-23T11:06:10 | 99,390,892 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 429 | py | try:
from setuptools import setup
except ImportError:
from distutils.core import setup
config = {
'description': 'Base project',
'author': 'José Raddaoui Marín',
'url': '',
'download_url': '',
'author_email': 'raddaouimarin@gmail.com',
'version': '0.1',
'install_requires': ['nose'],
'packages': ['NAME'],
'scripts': ['bin/script.py'],
'name': 'base-project'
}
setup(**config)
| [
"raddaouimarin@gmail.com"
] | raddaouimarin@gmail.com |
e2afe9a20ddf90de814e09676cddd9998e3aee9f | d89080dc044b4e707fdddf3c757a972612712adf | /StoryList/migrations/0003_auto_20210626_1338.py | ebad15ae34298f41080e6ea88a4440be2e6fa127 | [] | no_license | magbatofrenzy/storylist | d72dd5acab195bed10ca6689ac76100c4aee8288 | 5c2e4045e22297060f3a740531936695d3edae85 | refs/heads/main | 2023-06-09T10:02:16.791571 | 2021-06-27T22:39:28 | 2021-06-27T22:39:28 | 373,690,640 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 793 | py | # Generated by Django 3.1.6 on 2021-06-26 13:38
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('StoryList', '0002_auto_20210616_1538'),
]
operations = [
migrations.RenameField(
model_name='list',
old_name='nread',
new_name='Newreader',
),
migrations.RenameField(
model_name='reader',
old_name='Gender',
new_name='nAge',
),
migrations.RenameField(
model_name='reader',
old_name='Name',
new_name='nGender',
),
migrations.AddField(
model_name='reader',
name='nName',
field=models.TextField(default=''),
),
]
| [
"magbatofrenzycamille@gmail.com"
] | magbatofrenzycamille@gmail.com |
3ec3d5b1352761d5f27a18618daf8406ada59f0f | 240ccb555b946d9506caabc775f46ea560f4f12b | /assignment_1/crawl.py | 5a9ef293658bc2866ecd2b80ab16c7ad212f8e1e | [] | no_license | eling22/data_science_class | 86350555b5b9e9383aeaac68533b94d94e339645 | 731e9039c35231e61a1c05e95a1ddbdc6c9b9bf4 | refs/heads/master | 2020-03-28T21:16:44.462634 | 2019-07-06T04:45:37 | 2019-07-06T04:45:37 | 149,143,619 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,467 | py | import requests
import time
import tool
from bs4 import BeautifulSoup
def main():
t_start = time.time()
not_exist_list = ["https://www.ptt.cc/bbs/Beauty/M.1490936972.A.60D.html","https://www.ptt.cc/bbs/Beauty/M.1494776135.A.50A.html","https://www.ptt.cc/bbs/Beauty/M.1503194519.A.F4C.html","https://www.ptt.cc/bbs/Beauty/M.1504936945.A.313.html","https://www.ptt.cc/bbs/Beauty/M.1505973115.A.732.html","https://www.ptt.cc/bbs/Beauty/M.1507620395.A.27E.html","https://www.ptt.cc/bbs/Beauty/M.1510829546.A.D83.html","https://www.ptt.cc/bbs/Beauty/M.1512141143.A.D31.html"]
file_ar = tool.OpenWriteFile("all_articles.txt")
file_po = tool.OpenWriteFile("all_popular.txt")
#1992-2340(include)
for ptt_index in range(1992,2341):
print ( str(ptt_index-1991) + "/" +str(2341-1992+1) )
url = "https://www.ptt.cc/bbs/Beauty/index"+ str(ptt_index) +".html"
content = requests.get(url).text
soup = BeautifulSoup(content,'html.parser')
ptt_list = soup.find_all(class_="r-ent")
for part in ptt_list :
author = part.find(class_="author")
if author.string == "-" :
continue
date = part.find(class_="date").string.replace('/','').strip()
if ptt_index == 1992 and int(date) > 1000 :
continue
if ptt_index == 2340 and int(date) < 1000 :
continue
title = part.find(class_="title").find('a')
if title == None :
continue
if title.string == None :
fp = title.find('span').get('data-cfemail')
r = int(fp[:2],16)
email = ''.join([chr(int(fp[i:i+2], 16) ^ r) for i in range(2, len(fp), 2)])
title = title.text.rstrip("[email protected]").rstrip().rstrip("[email protected]") + email
else :
title = title.string
if title[1:3] == "公告" :
continue
url = "https://www.ptt.cc" + part.find('a').get('href')
if url in not_exist_list :
continue
file_ar.write(date + ',' + title + ',' + url + '\n')
if part.find(class_="nrec").string == "爆" :
file_po.write(date + ',' + title + ',' + url + '\n')
time.sleep(0.5)
file_ar.close()
file_po.close()
t_end = time.time()
print ("It cost %f sec" % (t_end - t_start))
| [
"lai.eling22@gmail.com"
] | lai.eling22@gmail.com |
ef1b7a1250f227e8a591ba8de8b063fb4241c940 | 62ff5fd135bf295172c5f78a0e4c2f1573dbf8d7 | /.venv_heat/bin/distro3 | 31f4f5842c2ee612bc68203f9bea0cb2af06024f | [] | no_license | javershal/heat | 3e6664a7b7c0d2c0e6f04d774231876f5f8529d4 | 699b7aa7bd81518ef4d3a23324d0e4080c77a68a | refs/heads/master | 2023-02-04T04:43:30.698702 | 2020-12-26T20:27:58 | 2020-12-26T20:27:58 | 324,629,507 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 236 | #!/home/jacob/projects/heat/.venv_heat/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from distro import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"jza22092@gmail.com"
] | jza22092@gmail.com | |
d0d4e162ef7d61ee8837d30178c97b450bb885cf | f2f892a85c3e1bed1affb3fea1d03da96537565a | /http/__init__.py | 71d7a1bba4ad007a428e04a8a1bbfd7550897ae4 | [
"MIT"
] | permissive | williamty/guahao1 | 08a954dd2d414ba2155b86957b87c9f9dba2cbe7 | 577eff04bd619d458480aa6a33098338342b379a | refs/heads/master | 2021-01-21T16:43:35.695787 | 2014-07-09T06:01:33 | 2014-07-09T06:01:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 25 | py | __author__ = 'zhijieliu'
| [
"meajhlp@gmail.com"
] | meajhlp@gmail.com |
10495a50f058e69c7dd0be9fe19abbebbbbbb848 | c9d42d98b0e30cf6f097b0a1b128eb5e9a679c32 | /tictactoe.py | a3c7083cf22c1d58de6e6746bb46d4c3a5a3f058 | [] | no_license | KrishnaSreeraj/TicTacToe | fa5a8bc288bc1ea844499d8f3a36ded46128728e | e7d4e1d598e6728601f776f2631b40f1984f044e | refs/heads/master | 2022-04-20T22:59:06.693157 | 2020-04-23T07:29:28 | 2020-04-23T07:29:28 | 258,126,740 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,008 | py | # -*- coding: utf-8 -*-
"""
Created on Sat Apr 18 18:35:58 2020
@author: acer
"""
import numpy
board = numpy.array([['-','-','-'],['-','-','-'],['-','-','-']])
p1s = 'X'
p2s = 'O'
print(numpy.matrix(board))
def entry(symbol):
while(1):
row=int(input("Enter the row : "))
col=int(input("Enter the column : "))
if(row>0 and row<4 and col>0 and col<4 and board[row-1][col-1]=='-'):
board[row-1][col-1]=symbol
break
else:
print("Invalid entry !! Re-enter row and column !!")
def check_row(symbol):
for r in range(3):
count = 0
for c in range(3):
if(board[r][c]==symbol):
count = count+1
if count==3:
print(symbol,'won')
return True
return False
def check_col(symbol):
for c in range(3):
count = 0
for r in range(3):
if(board[r][c]==symbol):
count = count+1
if count==3:
print(symbol,'won')
return True
return False
def diagonal(symbol):
if(board[0][0]==board[1][1] and board[1][1]==board[2][2] and board[1][1]==symbol):
print(symbol,'won')
return True
if(board[0][1]==board[1][1] and board[1][1]==board[0][2] and board[1][1]==symbol):
print(symbol,'won')
return True
return False
def won(symbol):
return check_row(symbol) or check_col(symbol) or diagonal(symbol)
def play():
for turn in range(9):
if(turn%2==0):
print("X turn : ")
entry(p1s)
print(numpy.matrix(board))
if(won(p1s)):
break
else:
print("O Turn : ")
entry(p2s)
print(numpy.matrix(board))
if(won(p2s)):
break
if(not(won(p1s)) and not(won(p2s))):
print("Draw")
play()
| [
"noreply@github.com"
] | KrishnaSreeraj.noreply@github.com |
52cea66c3660ae01964ef82ef14f9ad6d96b4ebf | cf19b96eb917eca1208627edc27c698d249e859b | /build/ball_chaser/cmake/ball_chaser-genmsg-context.py | 8e93c919234312cf0e50d657db23bd143beb78cd | [] | no_license | Shubham-Tandon/ROBOND_WhereAmI | 82865453acd7f345e4890770728de8b66cbaf183 | c18fa676b7cdf3cc2e7f0381631b276b3da48b65 | refs/heads/master | 2022-07-31T04:00:14.208578 | 2020-05-22T21:29:55 | 2020-05-22T21:29:55 | 265,763,953 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 528 | py | # generated from genmsg/cmake/pkg-genmsg.context.in
messages_str = ""
services_str = "/home/workspace/ROBOND_WhereAmI/src/ball_chaser/srv/DriveToTarget.srv"
pkg_name = "ball_chaser"
dependencies_str = "std_msgs"
langs = "gencpp;geneus;genlisp;gennodejs;genpy"
dep_include_paths_str = "std_msgs;/opt/ros/kinetic/share/std_msgs/cmake/../msg"
PYTHON_EXECUTABLE = "/usr/bin/python2"
package_has_static_sources = '' == 'TRUE'
genmsg_check_deps_script = "/opt/ros/kinetic/share/genmsg/cmake/../../../lib/genmsg/genmsg_check_deps.py"
| [
"standon@ncsu.edu"
] | standon@ncsu.edu |
2a570dac5eae17f271138addb65d53d8a6c2b94c | 1642c2ab1a57fd24aefc6b4e073981c36376d6c5 | /binary_search_tree/bst_lecture.py | 83396ab0f3ccd684eb5b5846301d0fb2d1f4f613 | [] | no_license | asherkobin/Data-Structures | e8b6fe1d6cf4501b299333448505065deb85f35d | 50c07920087f1ced7218cbd8c020379e77604028 | refs/heads/master | 2022-11-17T09:58:42.506649 | 2020-07-14T02:41:35 | 2020-07-14T02:41:35 | 263,117,554 | 0 | 0 | null | 2020-07-14T02:41:36 | 2020-05-11T17:56:42 | null | UTF-8 | Python | false | false | 1,298 | py | # LEFT child value less than parent value
# RIGHT child value is greater (or equal if dups are allowed) than parent value
class BSTNode:
def __init__(self, value):
self.value = value
self.left = None
self.right = None
# Insert the given value into the tree
def insert(self, value):
pass
# Return True if the tree contains the value
# False if it does not
def contains(self, target):
if self.value == target:
return True
if target < self.value:
if self.left is None:
return False
return self.left.contains(target)
if target > self.value:
if self.right is None:
return False
return self.right.contains(target)
# Return the maximum value found in the tree
def get_max(self):
pass
# Call the function `fn` on the value of each node
def for_each(self, fn):
pass
bst = BSTNode(20)
bst.left = BSTNode(10)
bst.right = BSTNode(34)
bst.left.left = BSTNode(4)
bst.left.right = BSTNode(19)
bst.right.left = BSTNode(21)
bst.right.right = BSTNode(100)
print(bst.contains(20))
print(bst.contains(10))
print(bst.contains(34))
print(bst.contains(4))
print(bst.contains(19))
print(bst.contains(21))
print(bst.contains(100)) | [
"asherkobin@gmail.com"
] | asherkobin@gmail.com |
898dd1834baff47904063ee551f79d48ed66f821 | e444d4ff921b38597cb42bb15e7f4ccaf1336923 | /scripts/check_membership.py | 5c47ede1547066eb2fdf808fea0b6961161a3ddc | [
"Apache-2.0"
] | permissive | syspimp/puppet | 37d1b79ff87def8f7594cfb7e77f6ed30d933998 | 4747baf3cbe9a63c87942e5ffad4746b7ba245dc | refs/heads/master | 2021-01-13T02:16:22.598405 | 2014-06-19T17:49:32 | 2014-06-19T17:49:32 | 2,885,883 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,854 | py | #!/usr/bin/env python
import sys
import ldap
import adconfig
from pprint import pprint
from optparse import OptionParser
parser = OptionParser()
parser.add_option("-t", "--target", type="string", dest="TARGET", help="Target to search for", default=False)
parser.add_option("-u", "--user", action="store_true", dest="USER", help="Search for a user (default)", default=True)
parser.add_option("-c", "--computer", action="store_true", dest="COMPUTER", help="Search for a computer", default=False)
def perform_search(Filter=None,target=None,Base=adconfig.Base,SearchAttrs=adconfig.SearchAttrs):
try:
l = ldap.initialize(adconfig.Server)
Scope = ldap.SCOPE_SUBTREE
l.protocol_version = 3
l.set_option(ldap.OPT_REFERRALS, 0)
l.simple_bind_s(adconfig.username, adconfig.secret)
r = l.search(Base, Scope, Filter, SearchAttrs)
Type,user = l.result(r,60)
Name,Attrs = user[0]
for attr in adconfig.SearchAttrs:
if attr in Attrs:
print Attrs[attr]
except Exception,e:
print "Exception!"
for i in e:
print i
def check_user(target=None):
filter="(&(objectClass=user)(sAMAccountName="+target+"))"
adconfig.SearchAttrs = ["displayName","memberOf"]
perform_search(filter,target)
def check_computer(target=None):
filter="(&(objectClass=computer)(sAMAccountName="+target+"$))"
adconfig.SearchAttrs = ["memberOf"]
perform_search(filter,target)
def mainloop():
(options, args) = parser.parse_args()
if options.COMPUTER and options.TARGET:
check_computer(options.TARGET)
elif options.USER and options.TARGET:
check_user(options.TARGET)
else:
print "I need a target! Example: thisscript.py -u -t dtaylor"
print "Nothing to do!"
if __name__=='__main__':
try:
mainloop()
except Exception as e:
print "Exception!"
for i in e:
print i
sys.exit()
| [
"root@jump2.maskedadmins.com"
] | root@jump2.maskedadmins.com |
df1df6a26e3f3cc1f589f8df742440b2e90f4901 | 8af2d242582f981be3a983c81c3e1da4728f8823 | /python-code/45-Jump Game-II.py | a29b19c4a05ad91038e1edab3f77be352e179a49 | [] | no_license | Yingminzhou/leetcode | eff49184bfa61f305293da1522aad857fa829857 | ba264d6d218afefc0af385b036840ae451b4d714 | refs/heads/master | 2020-03-22T11:44:37.195126 | 2019-02-28T00:22:42 | 2019-02-28T00:22:42 | 139,993,163 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 863 | py | class Solution(object):
def jump(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
if len(nums) <= 1:
return 0
next_jump = [0]
check_dict = dict()
return self.jumpN(nums, next_jump, check_dict)
def jumpN(self, nums, next_jump, check_dict):
now_jump = list()
for now_idx in next_jump:
check_dict[now_idx] = 1
cur_num = nums[now_idx]
if cur_num + now_idx >= len(nums) - 1:
return 1
for i in range(1, cur_num+1):
next = now_idx + i
if next not in check_dict:
now_jump.append(next)
if len(now_jump) == 0:
return 0
return 1 + self.jumpN(nums, now_jump, check_dict)
s = Solution()
nums = [0,1,2]
print s.jump(nums) | [
"yingminzhou@sohu-inc.com"
] | yingminzhou@sohu-inc.com |
443843671835ee8dd936fe399ad7da7b414c55c5 | cdcb940e39c9459f737d1796a365c62bdef4d707 | /tests/test_dependency.py | 74409c753870bb7bc5537c97b48cf45f783f7504 | [
"MIT"
] | permissive | dizballanze/dependency | 3ff8526ad267768b2ea67f88d50aec91d867aa21 | 158bab9004aedf69a40ff9bd2cd5735c28933ddb | refs/heads/master | 2020-05-23T14:37:15.592583 | 2019-05-15T13:09:57 | 2019-05-15T13:09:57 | 186,808,235 | 0 | 0 | null | 2019-05-15T10:57:29 | 2019-05-15T10:57:28 | null | UTF-8 | Python | false | false | 3,240 | py | import asyncio
import pytest
from aiomisc import Service, entrypoint
from aiomisc_dependency import freeze, consumer, inject, dependency
async def test_register_dependency():
@dependency
async def foo():
return 'Foo'
@dependency
async def spam(foo):
return foo * 3
freeze()
@consumer
async def consume(spam):
return spam
await consume() == 'FooFooFoo'
async def test_inject_dependencies():
@dependency
async def foo():
return 'Foo'
@dependency
async def bar():
return 'Bar'
class Target:
...
target = Target()
await inject(target, ('foo', 'bar'))
assert target.foo == 'Foo'
assert target.bar == 'Bar'
def test_dependency_injection():
@dependency
async def foo():
yield 'Foo'
@dependency
async def bar():
yield 'Bar'
class TestService(Service):
__dependencies__ = ('foo', 'bar')
async def start(self):
...
service = TestService()
with entrypoint(service):
assert service.foo == 'Foo'
assert service.bar == 'Bar'
def test_missed_dependency_exception():
class TestService(Service):
__dependencies__ = ('spam',)
async def start(self):
...
with pytest.raises(RuntimeError):
with entrypoint(TestService()):
...
def test_graceful_dependency_shutdown():
@dependency
async def spam():
resource = ['spam'] * 3
yield resource
resource.clear()
class TestService(Service):
__dependencies__ = ('spam',)
async def start(self):
...
service = TestService()
resource = None
with entrypoint(service):
resource = service.spam
assert resource == ['spam'] * 3
assert resource == []
def test_set_dependency_in_init():
@dependency
async def answer():
yield 777
class TestService(Service):
__dependencies__ = ('answer',)
async def start(self):
...
service = TestService(answer=42)
with entrypoint(service):
assert service.answer == 42
def test_coroutine_function_dependency():
@dependency
async def foo():
await asyncio.sleep(0.1)
return 'Foo'
@dependency
async def bar():
return 'Bar'
class TestService(Service):
__dependencies__ = ('foo', 'bar',)
async def start(self):
...
service = TestService()
with entrypoint(service):
assert service.foo == 'Foo'
assert service.bar == 'Bar'
def test_dependencies_for_depndencies():
@dependency
async def foo():
return 'Foo'
@dependency
async def spam(foo):
return foo * 3
class TestService(Service):
__dependencies__ = ('spam',)
async def start(self):
...
service = TestService()
with entrypoint(service):
assert service.spam == 'FooFooFoo'
def test_loop_dependency():
injected_loop = None
@dependency
def need_loop(loop):
nonlocal injected_loop
injected_loop = loop
with entrypoint() as loop:
assert loop == injected_loop
| [
"dizballanze@yandex-team.ru"
] | dizballanze@yandex-team.ru |
6aa1ce107d8e3d7a2c5cbf05e78d16249bcbd0de | 403d1f4acae8edc4ac24bc4830c6f1dababfecaa | /attacks/__init__.py | 623e8b5c5f30b44c67efec8a5cc289c666ddcce8 | [] | no_license | coleary9/RockBrawl | b5539b01670f5aad3c86f245df346b74bc37bdfa | 1bc7d983481a62ee178ba4774f445e37b16d3b0c | refs/heads/master | 2021-01-01T15:55:36.569836 | 2014-04-09T17:49:00 | 2014-04-09T17:49:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 213 | py | # BitCity Studios:
# Cameron O'Leary <coleary9@jhu.edu>
# Steve Griffin <sgriff27@jhu.edu>
# Jeremy Dolinko <j.dolinko@gmail.com>
# Jonathan Riveria <jriver21@jhu.edu>
# Michael Shavit <shavitmichael@gmail.com>
| [
"coleary9@jhu.edu"
] | coleary9@jhu.edu |
13f3589b2c19e818b9382fc185423b21aba9ddab | 3ed14ff2300f5f52d06f55458bffea921820e451 | /empty_intersection.py | 56c9f3c7ee7dfecd71f07e48696d9669ed2a6116 | [] | no_license | LSparkzwz/HLL-and-MinHash-BML-tests | c352cafc21891c297f30ef9b8650288ad8f7eea4 | 6316fe2824bba427c95a95e6ede2bebebf707415 | refs/heads/master | 2023-02-14T17:41:51.300031 | 2021-01-07T14:35:21 | 2021-01-07T14:35:21 | 272,054,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 242,700 | py | data1 = ["418265688","418265689","418265690","418265691","418265692","418265693","418265694","418265695","418265696","418265697","418265698","418265699","418265700","418265701","418265702","418265703","418265704","418265705","418265706","418265707","418265708","418265709","418265710","418265711","418265712","418265713","418265714","418265715","418265716","418265717","418265718","418265719","418265720","418265721","418265722","418265723","418265724","418265725","418265726","418265727","418265728","418265729","418265730","418265731","418265732","418265733","418265734","418265735","418265736","418265737","418265738","418265739","418265740","418265741","418265742","418265743","418265744","418265745","418265746","418265747","418265748","418265749","418265750","418265751","418265752","418265753","418265754","418265755","418265756","418265757","418265758","418265759","418265760","418265761","418265762","418265763","418265764","418265765","418265766","418265767","418265768","418265769","418265770","418265771","418265772","418265773","418265774","418265775","418265776","418265777","418265778","418265779","418265780","418265781","418265782","418265783","418265784","418265785","418265786","418265787","418265788","418265789","418265790","418265791","418265792","418265793","418265794","418265795","418265796","418265797","418265798","418265799","418265800","418265801","418265802","418265803","418265804","418265805","418265806","418265807","418265808","418265809","418265810","418265811","418265812","418265813","418265814","418265815","418265816","418265817","418265818","418265819","418265820","418265821","418265822","418265823","418265824","418265825","418265826","418265827","418265828","418265829","418265830","418265831","418265832","418265833","418265834","418265835","418265836","418265837","418265838","418265839","418265840","418265841","418265842","418265843","418265844","418265845","418265846","418265847","418265848","418265849","418265850","418265851","418265852","418265853","418265854","418265855","418265856","418265857","418265858","418265859","418265860","418265861","418265862","418265863","418265864","418265865","418265866","418265867","418265868","418265869","418265870","418265871","418265872","418265873","418265874","418265875","418265876","418265877","418265878","418265879","418265880","418265881","418265882","418265883","418265884","418265885","418265886","418265887","418265888","418265889","418265890","418265891","418265892","418265893","418265894","418265895","418265896","418265897","418265898","418265899","418265900","418265901","418265902","418265903","418265904","418265905","418265906","418265907","418265908","418265909","418265910","418265911","418265912","418265913","418265914","418265915","418265916","418265917","418265918","418265919","418265920","418265921","418265922","418265923","418265924","418265925","418265926","418265927","418265928","418265929","418265930","418265931","418265932","418265933","418265934","418265935","418265936","418265937","418265938","418265939","418265940","418265941","418265942","418265943","418265944","418265945","418265946","418265947","418265948","418265949","418265950","418265951","418265952","418265953","418265954","418265955","418265956","418265957","418265958","418265959","418265960","418265961","418265962","418265963","418265964","418265965","418265966","418265967","418265968","418265969","418265970","418265971","418265972","418265973","418265974","418265975","418265976","418265977","418265978","418265979","418265980","418265981","418265982","418265983","418265984","418265985","418265986","418265987","418265988","418265989","418265990","418265991","418265992","418265993","418265994","418265995","418265996","418265997","418265998","418265999","418266000","418266001","418266002","418266003","418266004","418266005","418266006","418266007","418266008","418266009","418266010","418266011","418266012","418266013","418266014","418266015","418266016","418266017","418266018","418266019","418266020","418266021","418266022","418266023","418266024","418266025","418266026","418266027","418266028","418266029","418266030","418266031","418266032","418266033","418266034","418266035","418266036","418266037","418266038","418266039","418266040","418266041","418266042","418266043","418266044","418266045","418266046","418266047","418266048","418266049","418266050","418266051","418266052","418266053","418266054","418266055","418266056","418266057","418266058","418266059","418266060","418266061","418266062","418266063","418266064","418266065","418266066","418266067","418266068","418266069","418266070","418266071","418266072","418266073","418266074","418266075","418266076","418266077","418266078","418266079","418266080","418266081","418266082","418266083","418266084","418266085","418266086","418266087","418266088","418266089","418266090","418266091","418266092","418266093","418266094","418266095","418266096","418266097","418266098","418266099","418266100","418266101","418266102","418266103","418266104","418266105","418266106","418266107","418266108","418266109","418266110","418266111","418266112","418266113","418266114","418266115","418266116","418266117","418266118","418266119","418266120","418266121","418266122","418266123","418266124","418266125","418266126","418266127","418266128","418266129","418266130","418266131","418266132","418266133","418266134","418266135","418266136","418266137","418266138","418266139","418266140","418266141","418266142","418266143","418266144","418266145","418266146","418266147","418266148","418266149","418266150","418266151","418266152","418266153","418266154","418266155","418266156","418266157","418266158","418266159","418266160","418266161","418266162","418266163","418266164","418266165","418266166","418266167","418266168","418266169","418266170","418266171","418266172","418266173","418266174","418266175","418266176","418266177","418266178","418266179","418266180","418266181","418266182","418266183","418266184","418266185","418266186","418266187","418266188","418266189","418266190","418266191","418266192","418266193","418266194","418266195","418266196","418266197","418266198","418266199","418266200","418266201","418266202","418266203","418266204","418266205","418266206","418266207","418266208","418266209","418266210","418266211","418266212","418266213","418266214","418266215","418266216","418266217","418266218","418266219","418266220","418266221","418266222","418266223","418266224","418266225","418266226","418266227","418266228","418266229","418266230","418266231","418266232","418266233","418266234","418266235","418266236","418266237","418266238","418266239","418266240","418266241","418266242","418266243","418266244","418266245","418266246","418266247","418266248","418266249","418266250","418266251","418266252","418266253","418266254","418266255","418266256","418266257","418266258","418266259","418266260","418266261","418266262","418266263","418266264","418266265","418266266","418266267","418266268","418266269","418266270","418266271","418266272","418266273","418266274","418266275","418266276","418266277","418266278","418266279","418266280","418266281","418266282","418266283","418266284","418266285","418266286","418266287","418266288","418266289","418266290","418266291","418266292","418266293","418266294","418266295","418266296","418266297","418266298","418266299","418266300","418266301","418266302","418266303","418266304","418266305","418266306","418266307","418266308","418266309","418266310","418266311","418266312","418266313","418266314","418266315","418266316","418266317","418266318","418266319","418266320","418266321","418266322","418266323","418266324","418266325","418266326","418266327","418266328","418266329","418266330","418266331","418266332","418266333","418266334","418266335","418266336","418266337","418266338","418266339","418266340","418266341","418266342","418266343","418266344","418266345","418266346","418266347","418266348","418266349","418266350","418266351","418266352","418266353","418266354","418266355","418266356","418266357","418266358","418266359","418266360","418266361","418266362","418266363","418266364","418266365","418266366","418266367","418266368","418266369","418266370","418266371","418266372","418266373","418266374","418266375","418266376","418266377","418266378","418266379","418266380","418266381","418266382","418266383","418266384","418266385","418266386","418266387","418266388","418266389","418266390","418266391","418266392","418266393","418266394","418266395","418266396","418266397","418266398","418266399","418266400","418266401","418266402","418266403","418266404","418266405","418266406","418266407","418266408","418266409","418266410","418266411","418266412","418266413","418266414","418266415","418266416","418266417","418266418","418266419","418266420","418266421","418266422","418266423","418266424","418266425","418266426","418266427","418266428","418266429","418266430","418266431","418266432","418266433","418266434","418266435","418266436","418266437","418266438","418266439","418266440","418266441","418266442","418266443","418266444","418266445","418266446","418266447","418266448","418266449","418266450","418266451","418266452","418266453","418266454","418266455","418266456","418266457","418266458","418266459","418266460","418266461","418266462","418266463","418266464","418266465","418266466","418266467","418266468","418266469","418266470","418266471","418266472","418266473","418266474","418266475","418266476","418266477","418266478","418266479","418266480","418266481","418266482","418266483","418266484","418266485","418266486","418266487","418266488","418266489","418266490","418266491","418266492","418266493","418266494","418266495","418266496","418266497","418266498","418266499","418266500","418266501","418266502","418266503","418266504","418266505","418266506","418266507","418266508","418266509","418266510","418266511","418266512","418266513","418266514","418266515","418266516","418266517","418266518","418266519","418266520","418266521","418266522","418266523","418266524","418266525","418266526","418266527","418266528","418266529","418266530","418266531","418266532","418266533","418266534","418266535","418266536","418266537","418266538","418266539","418266540","418266541","418266542","418266543","418266544","418266545","418266546","418266547","418266548","418266549","418266550","418266551","418266552","418266553","418266554","418266555","418266556","418266557","418266558","418266559","418266560","418266561","418266562","418266563","418266564","418266565","418266566","418266567","418266568","418266569","418266570","418266571","418266572","418266573","418266574","418266575","418266576","418266577","418266578","418266579","418266580","418266581","418266582","418266583","418266584","418266585","418266586","418266587","418266588","418266589","418266590","418266591","418266592","418266593","418266594","418266595","418266596","418266597","418266598","418266599","418266600","418266601","418266602","418266603","418266604","418266605","418266606","418266607","418266608","418266609","418266610","418266611","418266612","418266613","418266614","418266615","418266616","418266617","418266618","418266619","418266620","418266621","418266622","418266623","418266624","418266625","418266626","418266627","418266628","418266629","418266630","418266631","418266632","418266633","418266634","418266635","418266636","418266637","418266638","418266639","418266640","418266641","418266642","418266643","418266644","418266645","418266646","418266647","418266648","418266649","418266650","418266651","418266652","418266653","418266654","418266655","418266656","418266657","418266658","418266659","418266660","418266661","418266662","418266663","418266664","418266665","418266666","418266667","418266668","418266669","418266670","418266671","418266672","418266673","418266674","418266675","418266676","418266677","418266678","418266679","418266680","418266681","418266682","418266683","418266684","418266685","418266686","418266687","418266688","418266689","418266690","418266691","418266692","418266693","418266694","418266695","418266696","418266697","418266698","418266699","418266700","418266701","418266702","418266703","418266704","418266705","418266706","418266707","418266708","418266709","418266710","418266711","418266712","418266713","418266714","418266715","418266716","418266717","418266718","418266719","418266720","418266721","418266722","418266723","418266724","418266725","418266726","418266727","418266728","418266729","418266730","418266731","418266732","418266733","418266734","418266735","418266736","418266737","418266738","418266739","418266740","418266741","418266742","418266743","418266744","418266745","418266746","418266747","418266748","418266749","418266750","418266751","418266752","418266753","418266754","418266755","418266756","418266757","418266758","418266759","418266760","418266761","418266762","418266763","418266764","418266765","418266766","418266767","418266768","418266769","418266770","418266771","418266772","418266773","418266774","418266775","418266776","418266777","418266778","418266779","418266780","418266781","418266782","418266783","418266784","418266785","418266786","418266787","418266788","418266789","418266790","418266791","418266792","418266793","418266794","418266795","418266796","418266797","418266798","418266799","418266800","418266801","418266802","418266803","418266804","418266805","418266806","418266807","418266808","418266809","418266810","418266811","418266812","418266813","418266814","418266815","418266816","418266817","418266818","418266819","418266820","418266821","418266822","418266823","418266824","418266825","418266826","418266827","418266828","418266829","418266830","418266831","418266832","418266833","418266834","418266835","418266836","418266837","418266838","418266839","418266840","418266841","418266842","418266843","418266844","418266845","418266846","418266847","418266848","418266849","418266850","418266851","418266852","418266853","418266854","418266855","418266856","418266857","418266858","418266859","418266860","418266861","418266862","418266863","418266864","418266865","418266866","418266867","418266868","418266869","418266870","418266871","418266872","418266873","418266874","418266875","418266876","418266877","418266878","418266879","418266880","418266881","418266882","418266883","418266884","418266885","418266886","418266887","418266888","418266889","418266890","418266891","418266892","418266893","418266894","418266895","418266896","418266897","418266898","418266899","418266900","418266901","418266902","418266903","418266904","418266905","418266906","418266907","418266908","418266909","418266910","418266911","418266912","418266913","418266914","418266915","418266916","418266917","418266918","418266919","418266920","418266921","418266922","418266923","418266924","418266925","418266926","418266927","418266928","418266929","418266930","418266931","418266932","418266933","418266934","418266935","418266936","418266937","418266938","418266939","418266940","418266941","418266942","418266943","418266944","418266945","418266946","418266947","418266948","418266949","418266950","418266951","418266952","418266953","418266954","418266955","418266956","418266957","418266958","418266959","418266960","418266961","418266962","418266963","418266964","418266965","418266966","418266967","418266968","418266969","418266970","418266971","418266972","418266973","418266974","418266975","418266976","418266977","418266978","418266979","418266980","418266981","418266982","418266983","418266984","418266985","418266986","418266987","418266988","418266989","418266990","418266991","418266992","418266993","418266994","418266995","418266996","418266997","418266998","418266999","418267000","418267001","418267002","418267003","418267004","418267005","418267006","418267007","418267008","418267009","418267010","418267011","418267012","418267013","418267014","418267015","418267016","418267017","418267018","418267019","418267020","418267021","418267022","418267023","418267024","418267025","418267026","418267027","418267028","418267029","418267030","418267031","418267032","418267033","418267034","418267035","418267036","418267037","418267038","418267039","418267040","418267041","418267042","418267043","418267044","418267045","418267046","418267047","418267048","418267049","418267050","418267051","418267052","418267053","418267054","418267055","418267056","418267057","418267058","418267059","418267060","418267061","418267062","418267063","418267064","418267065","418267066","418267067","418267068","418267069","418267070","418267071","418267072","418267073","418267074","418267075","418267076","418267077","418267078","418267079","418267080","418267081","418267082","418267083","418267084","418267085","418267086","418267087","418267088","418267089","418267090","418267091","418267092","418267093","418267094","418267095","418267096","418267097","418267098","418267099","418267100","418267101","418267102","418267103","418267104","418267105","418267106","418267107","418267108","418267109","418267110","418267111","418267112","418267113","418267114","418267115","418267116","418267117","418267118","418267119","418267120","418267121","418267122","418267123","418267124","418267125","418267126","418267127","418267128","418267129","418267130","418267131","418267132","418267133","418267134","418267135","418267136","418267137","418267138","418267139","418267140","418267141","418267142","418267143","418267144","418267145","418267146","418267147","418267148","418267149","418267150","418267151","418267152","418267153","418267154","418267155","418267156","418267157","418267158","418267159","418267160","418267161","418267162","418267163","418267164","418267165","418267166","418267167","418267168","418267169","418267170","418267171","418267172","418267173","418267174","418267175","418267176","418267177","418267178","418267179","418267180","418267181","418267182","418267183","418267184","418267185","418267186","418267187","418267188","418267189","418267190","418267191","418267192","418267193","418267194","418267195","418267196","418267197","418267198","418267199","418267200","418267201","418267202","418267203","418267204","418267205","418267206","418267207","418267208","418267209","418267210","418267211","418267212","418267213","418267214","418267215","418267216","418267217","418267218","418267219","418267220","418267221","418267222","418267223","418267224","418267225","418267226","418267227","418267228","418267229","418267230","418267231","418267232","418267233","418267234","418267235","418267236","418267237","418267238","418267239","418267240","418267241","418267242","418267243","418267244","418267245","418267246","418267247","418267248","418267249","418267250","418267251","418267252","418267253","418267254","418267255","418267256","418267257","418267258","418267259","418267260","418267261","418267262","418267263","418267264","418267265","418267266","418267267","418267268","418267269","418267270","418267271","418267272","418267273","418267274","418267275","418267276","418267277","418267278","418267279","418267280","418267281","418267282","418267283","418267284","418267285","418267286","418267287","418267288","418267289","418267290","418267291","418267292","418267293","418267294","418267295","418267296","418267297","418267298","418267299","418267300","418267301","418267302","418267303","418267304","418267305","418267306","418267307","418267308","418267309","418267310","418267311","418267312","418267313","418267314","418267315","418267316","418267317","418267318","418267319","418267320","418267321","418267322","418267323","418267324","418267325","418267326","418267327","418267328","418267329","418267330","418267331","418267332","418267333","418267334","418267335","418267336","418267337","418267338","418267339","418267340","418267341","418267342","418267343","418267344","418267345","418267346","418267347","418267348","418267349","418267350","418267351","418267352","418267353","418267354","418267355","418267356","418267357","418267358","418267359","418267360","418267361","418267362","418267363","418267364","418267365","418267366","418267367","418267368","418267369","418267370","418267371","418267372","418267373","418267374","418267375","418267376","418267377","418267378","418267379","418267380","418267381","418267382","418267383","418267384","418267385","418267386","418267387","418267388","418267389","418267390","418267391","418267392","418267393","418267394","418267395","418267396","418267397","418267398","418267399","418267400","418267401","418267402","418267403","418267404","418267405","418267406","418267407","418267408","418267409","418267410","418267411","418267412","418267413","418267414","418267415","418267416","418267417","418267418","418267419","418267420","418267421","418267422","418267423","418267424","418267425","418267426","418267427","418267428","418267429","418267430","418267431","418267432","418267433","418267434","418267435","418267436","418267437","418267438","418267439","418267440","418267441","418267442","418267443","418267444","418267445","418267446","418267447","418267448","418267449","418267450","418267451","418267452","418267453","418267454","418267455","418267456","418267457","418267458","418267459","418267460","418267461","418267462","418267463","418267464","418267465","418267466","418267467","418267468","418267469","418267470","418267471","418267472","418267473","418267474","418267475","418267476","418267477","418267478","418267479","418267480","418267481","418267482","418267483","418267484","418267485","418267486","418267487","418267488","418267489","418267490","418267491","418267492","418267493","418267494","418267495","418267496","418267497","418267498","418267499","418267500","418267501","418267502","418267503","418267504","418267505","418267506","418267507","418267508","418267509","418267510","418267511","418267512","418267513","418267514","418267515","418267516","418267517","418267518","418267519","418267520","418267521","418267522","418267523","418267524","418267525","418267526","418267527","418267528","418267529","418267530","418267531","418267532","418267533","418267534","418267535","418267536","418267537","418267538","418267539","418267540","418267541","418267542","418267543","418267544","418267545","418267546","418267547","418267548","418267549","418267550","418267551","418267552","418267553","418267554","418267555","418267556","418267557","418267558","418267559","418267560","418267561","418267562","418267563","418267564","418267565","418267566","418267567","418267568","418267569","418267570","418267571","418267572","418267573","418267574","418267575","418267576","418267577","418267578","418267579","418267580","418267581","418267582","418267583","418267584","418267585","418267586","418267587","418267588","418267589","418267590","418267591","418267592","418267593","418267594","418267595","418267596","418267597","418267598","418267599","418267600","418267601","418267602","418267603","418267604","418267605","418267606","418267607","418267608","418267609","418267610","418267611","418267612","418267613","418267614","418267615","418267616","418267617","418267618","418267619","418267620","418267621","418267622","418267623","418267624","418267625","418267626","418267627","418267628","418267629","418267630","418267631","418267632","418267633","418267634","418267635","418267636","418267637","418267638","418267639","418267640","418267641","418267642","418267643","418267644","418267645","418267646","418267647","418267648","418267649","418267650","418267651","418267652","418267653","418267654","418267655","418267656","418267657","418267658","418267659","418267660","418267661","418267662","418267663","418267664","418267665","418267666","418267667","418267668","418267669","418267670","418267671","418267672","418267673","418267674","418267675","418267676","418267677","418267678","418267679","418267680","418267681","418267682","418267683","418267684","418267685","418267686","418267687","418267688","418267689","418267690","418267691","418267692","418267693","418267694","418267695","418267696","418267697","418267698","418267699","418267700","418267701","418267702","418267703","418267704","418267705","418267706","418267707","418267708","418267709","418267710","418267711","418267712","418267713","418267714","418267715","418267716","418267717","418267718","418267719","418267720","418267721","418267722","418267723","418267724","418267725","418267726","418267727","418267728","418267729","418267730","418267731","418267732","418267733","418267734","418267735","418267736","418267737","418267738","418267739","418267740","418267741","418267742","418267743","418267744","418267745","418267746","418267747","418267748","418267749","418267750","418267751","418267752","418267753","418267754","418267755","418267756","418267757","418267758","418267759","418267760","418267761","418267762","418267763","418267764","418267765","418267766","418267767","418267768","418267769","418267770","418267771","418267772","418267773","418267774","418267775","418267776","418267777","418267778","418267779","418267780","418267781","418267782","418267783","418267784","418267785","418267786","418267787","418267788","418267789","418267790","418267791","418267792","418267793","418267794","418267795","418267796","418267797","418267798","418267799","418267800","418267801","418267802","418267803","418267804","418267805","418267806","418267807","418267808","418267809","418267810","418267811","418267812","418267813","418267814","418267815","418267816","418267817","418267818","418267819","418267820","418267821","418267822","418267823","418267824","418267825","418267826","418267827","418267828","418267829","418267830","418267831","418267832","418267833","418267834","418267835","418267836","418267837","418267838","418267839","418267840","418267841","418267842","418267843","418267844","418267845","418267846","418267847","418267848","418267849","418267850","418267851","418267852","418267853","418267854","418267855","418267856","418267857","418267858","418267859","418267860","418267861","418267862","418267863","418267864","418267865","418267866","418267867","418267868","418267869","418267870","418267871","418267872","418267873","418267874","418267875","418267876","418267877","418267878","418267879","418267880","418267881","418267882","418267883","418267884","418267885","418267886","418267887","418267888","418267889","418267890","418267891","418267892","418267893","418267894","418267895","418267896","418267897","418267898","418267899","418267900","418267901","418267902","418267903","418267904","418267905","418267906","418267907","418267908","418267909","418267910","418267911","418267912","418267913","418267914","418267915","418267916","418267917","418267918","418267919","418267920","418267921","418267922","418267923","418267924","418267925","418267926","418267927","418267928","418267929","418267930","418267931","418267932","418267933","418267934","418267935","418267936","418267937","418267938","418267939","418267940","418267941","418267942","418267943","418267944","418267945","418267946","418267947","418267948","418267949","418267950","418267951","418267952","418267953","418267954","418267955","418267956","418267957","418267958","418267959","418267960","418267961","418267962","418267963","418267964","418267965","418267966","418267967","418267968","418267969","418267970","418267971","418267972","418267973","418267974","418267975","418267976","418267977","418267978","418267979","418267980","418267981","418267982","418267983","418267984","418267985","418267986","418267987","418267988","418267989","418267990","418267991","418267992","418267993","418267994","418267995","418267996","418267997","418267998","418267999","418268000","418268001","418268002","418268003","418268004","418268005","418268006","418268007","418268008","418268009","418268010","418268011","418268012","418268013","418268014","418268015","418268016","418268017","418268018","418268019","418268020","418268021","418268022","418268023","418268024","418268025","418268026","418268027","418268028","418268029","418268030","418268031","418268032","418268033","418268034","418268035","418268036","418268037","418268038","418268039","418268040","418268041","418268042","418268043","418268044","418268045","418268046","418268047","418268048","418268049","418268050","418268051","418268052","418268053","418268054","418268055","418268056","418268057","418268058","418268059","418268060","418268061","418268062","418268063","418268064","418268065","418268066","418268067","418268068","418268069","418268070","418268071","418268072","418268073","418268074","418268075","418268076","418268077","418268078","418268079","418268080","418268081","418268082","418268083","418268084","418268085","418268086","418268087","418268088","418268089","418268090","418268091","418268092","418268093","418268094","418268095","418268096","418268097","418268098","418268099","418268100","418268101","418268102","418268103","418268104","418268105","418268106","418268107","418268108","418268109","418268110","418268111","418268112","418268113","418268114","418268115","418268116","418268117","418268118","418268119","418268120","418268121","418268122","418268123","418268124","418268125","418268126","418268127","418268128","418268129","418268130","418268131","418268132","418268133","418268134","418268135","418268136","418268137","418268138","418268139","418268140","418268141","418268142","418268143","418268144","418268145","418268146","418268147","418268148","418268149","418268150","418268151","418268152","418268153","418268154","418268155","418268156","418268157","418268158","418268159","418268160","418268161","418268162","418268163","418268164","418268165","418268166","418268167","418268168","418268169","418268170","418268171","418268172","418268173","418268174","418268175","418268176","418268177","418268178","418268179","418268180","418268181","418268182","418268183","418268184","418268185","418268186","418268187","418268188","418268189","418268190","418268191","418268192","418268193","418268194","418268195","418268196","418268197","418268198","418268199","418268200","418268201","418268202","418268203","418268204","418268205","418268206","418268207","418268208","418268209","418268210","418268211","418268212","418268213","418268214","418268215","418268216","418268217","418268218","418268219","418268220","418268221","418268222","418268223","418268224","418268225","418268226","418268227","418268228","418268229","418268230","418268231","418268232","418268233","418268234","418268235","418268236","418268237","418268238","418268239","418268240","418268241","418268242","418268243","418268244","418268245","418268246","418268247","418268248","418268249","418268250","418268251","418268252","418268253","418268254","418268255","418268256","418268257","418268258","418268259","418268260","418268261","418268262","418268263","418268264","418268265","418268266","418268267","418268268","418268269","418268270","418268271","418268272","418268273","418268274","418268275","418268276","418268277","418268278","418268279","418268280","418268281","418268282","418268283","418268284","418268285","418268286","418268287","418268288","418268289","418268290","418268291","418268292","418268293","418268294","418268295","418268296","418268297","418268298","418268299","418268300","418268301","418268302","418268303","418268304","418268305","418268306","418268307","418268308","418268309","418268310","418268311","418268312","418268313","418268314","418268315","418268316","418268317","418268318","418268319","418268320","418268321","418268322","418268323","418268324","418268325","418268326","418268327","418268328","418268329","418268330","418268331","418268332","418268333","418268334","418268335","418268336","418268337","418268338","418268339","418268340","418268341","418268342","418268343","418268344","418268345","418268346","418268347","418268348","418268349","418268350","418268351","418268352","418268353","418268354","418268355","418268356","418268357","418268358","418268359","418268360","418268361","418268362","418268363","418268364","418268365","418268366","418268367","418268368","418268369","418268370","418268371","418268372","418268373","418268374","418268375","418268376","418268377","418268378","418268379","418268380","418268381","418268382","418268383","418268384","418268385","418268386","418268387","418268388","418268389","418268390","418268391","418268392","418268393","418268394","418268395","418268396","418268397","418268398","418268399","418268400","418268401","418268402","418268403","418268404","418268405","418268406","418268407","418268408","418268409","418268410","418268411","418268412","418268413","418268414","418268415","418268416","418268417","418268418","418268419","418268420","418268421","418268422","418268423","418268424","418268425","418268426","418268427","418268428","418268429","418268430","418268431","418268432","418268433","418268434","418268435","418268436","418268437","418268438","418268439","418268440","418268441","418268442","418268443","418268444","418268445","418268446","418268447","418268448","418268449","418268450","418268451","418268452","418268453","418268454","418268455","418268456","418268457","418268458","418268459","418268460","418268461","418268462","418268463","418268464","418268465","418268466","418268467","418268468","418268469","418268470","418268471","418268472","418268473","418268474","418268475","418268476","418268477","418268478","418268479","418268480","418268481","418268482","418268483","418268484","418268485","418268486","418268487","418268488","418268489","418268490","418268491","418268492","418268493","418268494","418268495","418268496","418268497","418268498","418268499","418268500","418268501","418268502","418268503","418268504","418268505","418268506","418268507","418268508","418268509","418268510","418268511","418268512","418268513","418268514","418268515","418268516","418268517","418268518","418268519","418268520","418268521","418268522","418268523","418268524","418268525","418268526","418268527","418268528","418268529","418268530","418268531","418268532","418268533","418268534","418268535","418268536","418268537","418268538","418268539","418268540","418268541","418268542","418268543","418268544","418268545","418268546","418268547","418268548","418268549","418268550","418268551","418268552","418268553","418268554","418268555","418268556","418268557","418268558","418268559","418268560","418268561","418268562","418268563","418268564","418268565","418268566","418268567","418268568","418268569","418268570","418268571","418268572","418268573","418268574","418268575","418268576","418268577","418268578","418268579","418268580","418268581","418268582","418268583","418268584","418268585","418268586","418268587","418268588","418268589","418268590","418268591","418268592","418268593","418268594","418268595","418268596","418268597","418268598","418268599","418268600","418268601","418268602","418268603","418268604","418268605","418268606","418268607","418268608","418268609","418268610","418268611","418268612","418268613","418268614","418268615","418268616","418268617","418268618","418268619","418268620","418268621","418268622","418268623","418268624","418268625","418268626","418268627","418268628","418268629","418268630","418268631","418268632","418268633","418268634","418268635","418268636","418268637","418268638","418268639","418268640","418268641","418268642","418268643","418268644","418268645","418268646","418268647","418268648","418268649","418268650","418268651","418268652","418268653","418268654","418268655","418268656","418268657","418268658","418268659","418268660","418268661","418268662","418268663","418268664","418268665","418268666","418268667","418268668","418268669","418268670","418268671","418268672","418268673","418268674","418268675","418268676","418268677","418268678","418268679","418268680","418268681","418268682","418268683","418268684","418268685","418268686","418268687","418268688","418268689","418268690","418268691","418268692","418268693","418268694","418268695","418268696","418268697","418268698","418268699","418268700","418268701","418268702","418268703","418268704","418268705","418268706","418268707","418268708","418268709","418268710","418268711","418268712","418268713","418268714","418268715","418268716","418268717","418268718","418268719","418268720","418268721","418268722","418268723","418268724","418268725","418268726","418268727","418268728","418268729","418268730","418268731","418268732","418268733","418268734","418268735","418268736","418268737","418268738","418268739","418268740","418268741","418268742","418268743","418268744","418268745","418268746","418268747","418268748","418268749","418268750","418268751","418268752","418268753","418268754","418268755","418268756","418268757","418268758","418268759","418268760","418268761","418268762","418268763","418268764","418268765","418268766","418268767","418268768","418268769","418268770","418268771","418268772","418268773","418268774","418268775","418268776","418268777","418268778","418268779","418268780","418268781","418268782","418268783","418268784","418268785","418268786","418268787","418268788","418268789","418268790","418268791","418268792","418268793","418268794","418268795","418268796","418268797","418268798","418268799","418268800","418268801","418268802","418268803","418268804","418268805","418268806","418268807","418268808","418268809","418268810","418268811","418268812","418268813","418268814","418268815","418268816","418268817","418268818","418268819","418268820","418268821","418268822","418268823","418268824","418268825","418268826","418268827","418268828","418268829","418268830","418268831","418268832","418268833","418268834","418268835","418268836","418268837","418268838","418268839","418268840","418268841","418268842","418268843","418268844","418268845","418268846","418268847","418268848","418268849","418268850","418268851","418268852","418268853","418268854","418268855","418268856","418268857","418268858","418268859","418268860","418268861","418268862","418268863","418268864","418268865","418268866","418268867","418268868","418268869","418268870","418268871","418268872","418268873","418268874","418268875","418268876","418268877","418268878","418268879","418268880","418268881","418268882","418268883","418268884","418268885","418268886","418268887","418268888","418268889","418268890","418268891","418268892","418268893","418268894","418268895","418268896","418268897","418268898","418268899","418268900","418268901","418268902","418268903","418268904","418268905","418268906","418268907","418268908","418268909","418268910","418268911","418268912","418268913","418268914","418268915","418268916","418268917","418268918","418268919","418268920","418268921","418268922","418268923","418268924","418268925","418268926","418268927","418268928","418268929","418268930","418268931","418268932","418268933","418268934","418268935","418268936","418268937","418268938","418268939","418268940","418268941","418268942","418268943","418268944","418268945","418268946","418268947","418268948","418268949","418268950","418268951","418268952","418268953","418268954","418268955","418268956","418268957","418268958","418268959","418268960","418268961","418268962","418268963","418268964","418268965","418268966","418268967","418268968","418268969","418268970","418268971","418268972","418268973","418268974","418268975","418268976","418268977","418268978","418268979","418268980","418268981","418268982","418268983","418268984","418268985","418268986","418268987","418268988","418268989","418268990","418268991","418268992","418268993","418268994","418268995","418268996","418268997","418268998","418268999","418269000","418269001","418269002","418269003","418269004","418269005","418269006","418269007","418269008","418269009","418269010","418269011","418269012","418269013","418269014","418269015","418269016","418269017","418269018","418269019","418269020","418269021","418269022","418269023","418269024","418269025","418269026","418269027","418269028","418269029","418269030","418269031","418269032","418269033","418269034","418269035","418269036","418269037","418269038","418269039","418269040","418269041","418269042","418269043","418269044","418269045","418269046","418269047","418269048","418269049","418269050","418269051","418269052","418269053","418269054","418269055","418269056","418269057","418269058","418269059","418269060","418269061","418269062","418269063","418269064","418269065","418269066","418269067","418269068","418269069","418269070","418269071","418269072","418269073","418269074","418269075","418269076","418269077","418269078","418269079","418269080","418269081","418269082","418269083","418269084","418269085","418269086","418269087","418269088","418269089","418269090","418269091","418269092","418269093","418269094","418269095","418269096","418269097","418269098","418269099","418269100","418269101","418269102","418269103","418269104","418269105","418269106","418269107","418269108","418269109","418269110","418269111","418269112","418269113","418269114","418269115","418269116","418269117","418269118","418269119","418269120","418269121","418269122","418269123","418269124","418269125","418269126","418269127","418269128","418269129","418269130","418269131","418269132","418269133","418269134","418269135","418269136","418269137","418269138","418269139","418269140","418269141","418269142","418269143","418269144","418269145","418269146","418269147","418269148","418269149","418269150","418269151","418269152","418269153","418269154","418269155","418269156","418269157","418269158","418269159","418269160","418269161","418269162","418269163","418269164","418269165","418269166","418269167","418269168","418269169","418269170","418269171","418269172","418269173","418269174","418269175","418269176","418269177","418269178","418269179","418269180","418269181","418269182","418269183","418269184","418269185","418269186","418269187","418269188","418269189","418269190","418269191","418269192","418269193","418269194","418269195","418269196","418269197","418269198","418269199","418269200","418269201","418269202","418269203","418269204","418269205","418269206","418269207","418269208","418269209","418269210","418269211","418269212","418269213","418269214","418269215","418269216","418269217","418269218","418269219","418269220","418269221","418269222","418269223","418269224","418269225","418269226","418269227","418269228","418269229","418269230","418269231","418269232","418269233","418269234","418269235","418269236","418269237","418269238","418269239","418269240","418269241","418269242","418269243","418269244","418269245","418269246","418269247","418269248","418269249","418269250","418269251","418269252","418269253","418269254","418269255","418269256","418269257","418269258","418269259","418269260","418269261","418269262","418269263","418269264","418269265","418269266","418269267","418269268","418269269","418269270","418269271","418269272","418269273","418269274","418269275","418269276","418269277","418269278","418269279","418269280","418269281","418269282","418269283","418269284","418269285","418269286","418269287","418269288","418269289","418269290","418269291","418269292","418269293","418269294","418269295","418269296","418269297","418269298","418269299","418269300","418269301","418269302","418269303","418269304","418269305","418269306","418269307","418269308","418269309","418269310","418269311","418269312","418269313","418269314","418269315","418269316","418269317","418269318","418269319","418269320","418269321","418269322","418269323","418269324","418269325","418269326","418269327","418269328","418269329","418269330","418269331","418269332","418269333","418269334","418269335","418269336","418269337","418269338","418269339","418269340","418269341","418269342","418269343","418269344","418269345","418269346","418269347","418269348","418269349","418269350","418269351","418269352","418269353","418269354","418269355","418269356","418269357","418269358","418269359","418269360","418269361","418269362","418269363","418269364","418269365","418269366","418269367","418269368","418269369","418269370","418269371","418269372","418269373","418269374","418269375","418269376","418269377","418269378","418269379","418269380","418269381","418269382","418269383","418269384","418269385","418269386","418269387","418269388","418269389","418269390","418269391","418269392","418269393","418269394","418269395","418269396","418269397","418269398","418269399","418269400","418269401","418269402","418269403","418269404","418269405","418269406","418269407","418269408","418269409","418269410","418269411","418269412","418269413","418269414","418269415","418269416","418269417","418269418","418269419","418269420","418269421","418269422","418269423","418269424","418269425","418269426","418269427","418269428","418269429","418269430","418269431","418269432","418269433","418269434","418269435","418269436","418269437","418269438","418269439","418269440","418269441","418269442","418269443","418269444","418269445","418269446","418269447","418269448","418269449","418269450","418269451","418269452","418269453","418269454","418269455","418269456","418269457","418269458","418269459","418269460","418269461","418269462","418269463","418269464","418269465","418269466","418269467","418269468","418269469","418269470","418269471","418269472","418269473","418269474","418269475","418269476","418269477","418269478","418269479","418269480","418269481","418269482","418269483","418269484","418269485","418269486","418269487","418269488","418269489","418269490","418269491","418269492","418269493","418269494","418269495","418269496","418269497","418269498","418269499","418269500","418269501","418269502","418269503","418269504","418269505","418269506","418269507","418269508","418269509","418269510","418269511","418269512","418269513","418269514","418269515","418269516","418269517","418269518","418269519","418269520","418269521","418269522","418269523","418269524","418269525","418269526","418269527","418269528","418269529","418269530","418269531","418269532","418269533","418269534","418269535","418269536","418269537","418269538","418269539","418269540","418269541","418269542","418269543","418269544","418269545","418269546","418269547","418269548","418269549","418269550","418269551","433393113","433393114","433393115","433393116","433393117","433393118","433393119","433393120","433393121","433393122","433393123","433393124","433393125","433393126","433393127","433393128","433393129","433393130","433393131","433393132","433393133","449404833","449404834","449404835","449404836","449404837","449404838","449404839","449404840","449404841","449404842","449404843","449404844","449404845","449404846","449404847","449404848","449404849","449404850","449404851","449404852","449404853","449404854","449404855","449404856","449404857","449404858","449404859","449404860","449404861","449404862","449404863","449404864","449404865","449404866","449404867","449404868","449404869","449404870","449404871","449404872","449404873","449404874","449404875","449404876","449404877","449404878","449404879","449404880","449404881","449404882","449404883","449404884","449404885","449404886","449404887","449404888","449404889","449404890","449404891","449404892","449404893","473755747","473755748","473755749","473755750","473755751","473755752","473755753","473755754","473755755","473755756","473755757","473755758","473755759","473755760","474791546","474791547","474791548","474791549","474791550","474791551","474791552","474791553","474791554","474791555","474791556","474791557","474791558","474791559","474791560","474791561","474791562","474791563","474791564","474791565","474791566","474791567","474791568","474791569","474791570","474791571","474791572","474791573","474791574","474791575","474791576","474791577","474791578","474791579","474791580","474791581","474791582","474791583","474791584","474791585","474791586","474791587","474791588","474791589","474791590","474791591","474791592","474791593","474791594","474791595","474791596","474791597","474791598","474791599","474791600","474791601","474791602","474791603","474791604","474791605","474791606","474791607","474791608","474791609","474791610","474791611","474791612","474791613","474791614","474791615","474791616","474791617","474791618","474791619","474791620","474791621","474791622","474791623","474791624","474791625","474791626","474791627","474791628","474791629","474791630","474791631","474791632","474791633","474791634","474791635","474791636","474791637","474791638","474791639","474791640","474791641","474791642","474791643","474791644","474791645","474791646","474791647","474791648","474791649","474791650","474791651","474791652","474791653","474791654","474791655","474791656","474791657","474791658","474791659","474791660","474791661","474791662","474791663","474791664","474791665","474791666","474791667","474791668","474791669","474791670","474791671","474791672","474791673","474791674","474791675","474791676","474791677","474791678","474791679","474791680","474791681","474791682","474791683","474791684","474791685","474791686","474791687","474791688","474791689","474791690","474791691","474791692","474791693","474791694","474791695","474791696","474791697","474791698","474791699","474791700","474791701","474791702","474791703","474791704","474791705","474791706","474791707","474791708","474791709","474791710","474791711","474791712","474791713","474791714","474791715","474791716","474791717","474791718","474791719","474791720","474791721","474791722","474791723","474791724","474791725","474791726","474791727","474791728","474791729","474791730","474791731","474791732","474791733","474791734","474791735","474791736","474791737","474791738","474791739","474791740","474791741","474791742","474791743","474791744","474791745","474791746","474791747","474791748","474791749","474791750","474791751","474791752","474791753","474791754","474791755","474791756","474791757","474791758","474791759","474791760","474791761","474791762","474791763","474791764","474791765","474791766","474791767","474791768","474791769","474791770","474791771","474791772","474791773","474791774","474791775","474791776","474791777","474791778","474791779","474791780","474791781","474791782","474791783","474791784","474791785","474791786","474791787","474791788","474791789","474791790","474791791","474791792","474791793","474791794","474791795","474791796","474791797","474791798","474791799","474791800","474791801","474791802","474791803","474791804","474791805","474791806","474791807","474791808","474791809","474791810","474791811","474791812","474791813","474791814","474791815","474791816","474791817","474791818","474791819","474791820","474791821","474791822","474791823","474791824","474791825","474791826","474791827","474791828","474791829","474791830","474791831","474791832","474791833","474791834","474791835","474791836","474791837","474791838","474791839","474791840","474791841","474791842","474791843","474791844","474791845","474791846","474791847","474791848","474791849","474791850","474791851","474791852","474791853","474791854","474791855","474791856","474791857","474791858","474791859","474791860","474791861","474791862","474791863","474791864","474791865","474791866","474791867","474791868","474791869","474791870","474791871","474791872","474791873","474791874","474791875","474791876","474791877","474791878","474791879","474791880","474791881","474791882","474791883","474791884","474791885","474791886","474791887","474791888","474791889","474791890","474791891","474791892","474791893","474791894","474791895","474791896","474791897","474791898","474791899","474791900","474791901","474791902","474791903","474791904","474791905","474791906","474791907","474791908","474791909","474791910","474791911","474791912","474791913","474791914","474791915","474791916","474791917","474791918","474791919","474791920","474791921","474791922","474791923","474791924","474791925","474791926","474791927","474791928","474791929","474791930","474791931","474791932","474791933","474791934","474791935","474791936","474791937","474791938","474791939","474791940","474791941","474791942","474791943","474791944","474791945","474791946","474791947","474791948","474791949","474791950","474791951","474791952","474791953","474791954","474791955","474791956","474791957","474791958","474791959","474791960","474791961","474791962","474791963","474791964","474791965","474791966","474791967","474791968","474791969","474791970","474791971","474791972","474791973","474791974","474791975","474791976","474791977","474791978","474791979","474791980","474791981","474791982","474791983","474791984","474791985","474791986","474791987","474791988","474791989","474791990","474791991","474791992","474791993","474791994","474791995","474791996","474791997","474791998","474791999","474792000","474792001","474792002","474792003","474792004","474792005","474792006","474792007","474792008","474792009","474792010","474792011","474792012","474792013","474792014","474792015","474792016","474792017","474792018","474792019","474792020","474792021","474792022","474792023","474792024","474792025","474792026","474792027","474792028","474792029","474792030","474792031","474792032","474792033","474792034","474792035","474792036","474792037","474792038","474792039","474792040","474792041","474792042","474792043","474792044","474792045","474792046","474792047","474792048","474792049","474792050","474792051","474792052","474792053","474792054","474792055","474792056","474792057","474792058","474792059","474792060","474792061","474792062","474792063","474792064","474792065","474792066","474792067","474792068","474792069","474792070","474792071","474792072","474792073","474792074","474792075","474792076","474792077","474792078","474792079","474792080","474792081","474792082","474792083","474792084","474792085","474792086","474792087","474792088","474792089","474792090","474792091","474792092","474792093","474792094","474792095","474792096","474792097","474792098","474792099","474792100","474792101","474792102","474792103","474792104","474792105","474792106","474792107","474792108","474792109","474792110","474792111","474792112","474792113","474792114","474792115","474792116","474792117","474792118","474792119","474792120","474792121","474792122","474792123","474792124","474792125","474792126","474792127","474792128","474792129","474792130","474792131","474792132","474792133","474792134","474792135","474792136","474792137","474792138","474792139","474792140","474792141","474792142","474792143","474792144","474792145","474792146","474792147","474792148","474792149","474792150","474792151","474792152","474792153","474792154","474792155","474792156","474792157","474792158","474792159","474792160","474792161","474792162","474792163","474792164","474792165","474792166","474792167","474792168","474792169","474792170","474792171","474792172","474792173","474792174","474792175","474792176","474792177","474792178","474792179","474792180","474792181","474792182","474792183","474792184","474792185","474792186","474792187","474792188","474792189","474792190","474792191","474792192","474792193","474792194","474792195","474792196","474792197","474792198","474792199","474792200","474792201","474792202","474792203","474792204","474792205","474792206","474792207","474792208","474792209","474792210","474792211","474792212","474792213","474792214","474792215","474792216","474792217","474792218","474792219","474792220","474792221","474792222","474792223","474792224","474792225","474792226","474792227","474792228","474792229","474792230","474792231","474792232","474792233","474792234","474792235","474792236","474792237","474792238","474792239","474792240","474792241","474792242","474792243","474792244","474792245","474792246","474792247","474792248","474792249","474792250","474792251","474792252","474792253","474792254","474792255","474792256","474792257","474792258","474792259","474792260","474792261","474792262","474792263","474792264","474792265","474792266","474792267","474792268","474792269","474792270","474792271","474792272","474792273","474792274","474792275","474792276","474792277","474792278","474792279","474792280","474792281","474792282","474792283","474792284","474792285","474792286","474792287","474792288","474792289","474792290","474792291","474792292","474792293","474792294","474792295","474792296","474792297","474792298","474792299","474792300","474792301","474792302","474792303","474792304","474792305","474792306","474792307","474792308","474792309","474792310","474792311","474792312","474792313","474792314","474792315","474792316","474792317","474792318","474792319","474792320","474792321","474792322","474792323","474792324","474792325","474792326","474792327","474792328","474792329","474792330","474792331","474792332","474792333","474792334","474792335","474792336","474792337","474792338","474792339","474792340","474792341","474792342","474792343","474792344","474792345","474792346","474792347","474792348","474792349","474792350","474792351","474792352","474792353","474792354","474792355","474792356","474792357","474792358","474792359","474792360","474792361","474792362","474792363","474792364","474792365","474792366","474792367","474792368","474792369","474792370","474792371","474792372","474792373","474792374","474792375","474792376","474792377","474792378","474792379","474792380","474792381","474792382","474792383","474792384","474792385","474792386","474792387","474792388","474792389","474792390","474792391","474792392","474792393","474792394","474792395","474792396","474792397","474792398","474792399","474792400","474792401","474792402","474792403","474792404","474792405","474792406","474792407","474792408","474792409","474792410","474792411","474792412","474792413","474792414","474792415","474792416","474792417","474792418","474792419","474792420","474792421","474792422","474792423","474792424","474792425","474792426","474792427","474792428","474792429","474792430","474792431","474792432","474792433","474792434","474792435","474792436","474792437","474792438","474792439","474792440","474792441","474792442","474792443","474792444","474792445","474792446","474792447","474792448","474792449","474792450","474792451","474792452","474792453","474792454","474792455","474792456","474792457","474792458","474792459","474792460","474792461","474792462","474792463","474792464","474792465","474792466","474792467","474792468","474792469","474792470","474792471","474792472","474792473","474792474","474792475","474792476","474792477","474792478","474792479","474792480","474792481","474792482","474792483","474792484","474792485","474792486","474792487","474792488","474792489","474792490","474792491","474792492","474792493","474792494","474792495","474792496","474792497","474792498","474792499","474792500","474792501","474792502","474792503","474792504","474792505","474792506","474792507","474792508","474792509","474792510","474792511","474792512","474792513","474792514","474792515","474792516","474792517","474792518","474792519","474792520","474792521","474792522","474792523","474792524","474792525","474792526","474792527","474792528","474792529","474792530","474792531","474792532","474792533","474792534","474792535","474792536","474792537","474792538","474792539","474792540","474792541","474792542","474792543","474792544","474792545","474792546","474792547","474792548","474792549","474792550","474792551","474792552","474792553","474792554","474792555","474792556","474792557","474792558","474792559","474792560","474792561","474792562","474792563","474792564","474792565","474792566","474792567","474792568","474792569","474792570","474792571","474792572","474792573","474792574","474792575","474792576","474792577","474792578","474792579","474792580","474792581","474792582","474792583","474792584","474792585","474792586","474792587","474792588","474792589","474792590","474792591","474792592","474792593","474792594","474792595","474792596","474792597","474792598","474792599","474792600","474792601","474792602","474792603","474792604","474792605","474792606","474792607","474792608","474792609","474792610","474792611","474792612","474792613","474792614","474792615","474792616","474792617","474792618","474792619","474792620","474792621","474792622","474792623","474792624","474792625","474792626","474792627","474792628","474792629","474792630","474792631","474792632","474792633","474792634","474792635","474792636","474792637","474792638","474792639","474792640","474792641","474792642","474792643","474792644","474792645","474792646","474792647","474792648","474792649","474792650","474792651","474792652","474792653","474792654","474792655","474792656","474792657","474792658","474792659","474792660","474792661","474792662","474792663","474792664","474792665","474792666","474792667","474792668","474792669","474792670","474792671","474792672","474792673","474792674","474792675","474792676","474792677","474792678","474792679","474792680","474792681","474792682","474792683","474792684","474792685","474792686","474792687","474792688","474792689","474792690","474792691","474792692","474792693","474792694","474792695","474792696","474792697","474792698","474792699","474792700","474792701","474792702","474792703","474792704","474792705","474792706","474792707","474792708","474792709","474792710","474792711","474792712","474792713","474792714","474792715","474792716","474792717","474792718","474792719","474792720","474792721","474792722","474792723","474792724","474792725","474792726","474792727","474792728","474792729","474792730","474792731","474792732","474792733","474792734","474792735","474792736","474792737","474792738","474792739","474792740","474792741","474792742","474792743","474792744","474792745","474792746","474792747","474792748","474792749","474792750","474792751","474792752","474792753","474792754","474792755","474792756","474792757","474792758","474792759","474792760","474792761","474792762","474792763","474792764","474792765","474792766","474792767","474792768","474792769","474792770","474792771","474792772","474792773","474792774","474792775","474792776","474792777","474792778","474792779","474792780","474792781","474792782","474792783","474792784","474792785","474792786","474792787","474792788","474792789","474792790","474792791","474792792","474792793","474792794","474792795","474792796","474792797","474792798","474792799","474792800","474792801","474792802","474792803","474792804","474792805","474792806","474792807","474792808","474792809","474792810","474792811","474792812","474792813","474792814","474792815","474792816","474792817","474792818","474792819","474792820","474792821","474792822","474792823","474792824","474792825","474792826","474792827","474792828","474792829","474792830","474792831","474792832","474792833","474792834","474792835","474792836","474792837","474792838","474792839","474792840","474792841","474792842","474792843","474792844","474792845","474792846","474792847","474792848","474792849","474792850","474792851","475096671","475096672","475096673","475096674","475096675","475096676","491104519","491104520","491104521","491104522","491104523","491104524","491104525","491104526","491104527","491104528","491104529","491104530","491104531","491104532","491104533","492227066","492227067","492227068","492227069","492227070","492227071","492227072","492227073","492227074","492227075","492227076","492227077","492227078","492227079","492227080","492227081","492227082","492227083","492227084","492227085","492227086","492227087","492227088","492227089","492227090","492227091","492227092","492227093","492227094","492227095","492227096","492227097","492227098","492227099","492227100","492227101","492227102","492227103","494590765","494590766","494590767","494590768","494675500","494864911","495095306","495095307","495479430","495479431","495479432","495479433","495479434","495884481","495884482","495884483","495892289","495892290","495892291","495892292","495892293","495892294","495892295","496078570","496078571","496091826","496091827","496297112","498058959","498058960","498058961","498058962","498058963","498058964","498058965","498058966","498058967","498058968","498058969","498058970","498058971","498058972","498058973","498058974","498058975","498058976","498058977","498058978","498058979","498058980","498058981","498058982","498058983","498058984","498058985","498058986","498446420","498446421","498534467","498538065","498730293","498748750","498748751","498822009","498822010","498822011","498822012","498895826","499099490","499099491","499302135","499302136","499302137","499702437","499702438","499702439","499702440","499702441","499702442","499702443","499702444","499888653","499888654","499888655","499888656","499888657","499888658","499888659","499888660","499888661","499888662","499888663","499888664","499888665","499888666","499888667","499888668","499898894","499898895","499898896","499898897","499898898","499898899","499898900","499898901","499898902","499898903","499898904","499898905","499898906","499898907","499898908","499898909","499898910","499898911","499898912","499898913","499898914","499981367","499981368","500273031","500273032","500279494","500467837","500467838","500725081","500725082","500725083","500725084","500968953","500968954","500968955","500968956","500968957","500968958","500968959","500968960","500968961","500968962","501002242","501002243","501068710","501134453","501134454","501506238","501596443","501847591","501847592","501847593","501905275","502121900","502121901","502121902","502121903","502121904","502121905","502121906","502121907","502121908","502121909","502121910","502121911","502121912","502121913","502121914","502140661","502140662","502140663","502140664","502632650","502632651","503101606","504233520","504233521","504507322","504507323","504507324","504507325","504507326","504507327","504507328","504507329","504507330","504507331","504507332","504507333","504507334","504507335","504507336","504507337","504507338","504507339","504507340","504507341","504507342","504544547","504834810","504834811","506357700","506428115","506428116","507372109","507372110","507440228","507440229","507440230","507440231","508090550","509757745","509808273","509808274","509808275","509808276","509808277","509808278","509808279","509808280","509808281","509808282","509808283","509808284","509808285","509808286","509808287","509808288","509808289","509808290","509808291","510003115","510440936","510440937","510440938","510440939","510440940","510552051","511179432","511179433","511237367","511833671","511833672","511833673","511833674","511833675","511833676","511833677","511833678","511833679","511833680","511833681","511833682","511833683","511833684","511833685","511833686","511833687","511833688","511833689","511833690","511833691","511833692","511833693","511833694","511833695","511833696","511833697","511833698","511833699","511833700","511833701","511833702","511833703","511833704","511833705","511833706","511833707","511833708","511833709","511833710","511833711","511833712","511833713","511833714","511833715","511833716","511833717","511833718","511833719","511833720","511833721","511833722","511833723","511833724","511833725","511833726","511833727","511833728","511833729","511833730","511833731","511833732","511833733","511833734","511833735","511833736","511833737","511833738","511833739","511833740","511833741","511833742","511833743","511833744","511833745","511833746","511833747","511833748","511833749","511833750","511833751","511833752","511833753","511833754","511833755","511833756","511833757","511833758","511833759","511833760","511833761","511833762","511833763","511833764","511833765","511833766","511833767","511833768","511833769","511833770","511833771","511833772","511833773","511833774","511833775","511833776","511833777","511833778","511833779","511833780","511833781","511833782","511833783","511833784","511833785","511833786","511833787","511833788","511833789","511833790","511833791","511833792","511833793","511833794","511833795","511833796","511833797","511833798","511833799","511833800","511833801","511833802","511833803","511833804","511833805","511833806","511833807","511833808","511833809","511833810","511833811","511833812","511833813","512326367","512326368","512326369","512396813","512456896","512456897","512492916","512492917","512492918","512492919","512492920","512492921","512492922","512492923","512922618","512922619","512922620","512922621","512922622","512922623","512922624","512922625","512922626","512937161","512957490","512957491","512991243","512997360","512997361","512997362","512997363","512997364","512997365","512997366","512997367","512997368","513012584","513159485","513159486","513159487","513159488","513159489","513159490","513159491","513159492","513159493","513159494","513159495","513159496","513159497","513159498","513159499","513159500","513159501","513159502","513159503","513276793","513276794","513276795","514315783","514339720","514339721","514339722","514451358","514693511","514693512","514693513","514874199","514874200","514874201","514874202","514874203","514874204","514874205","514874206","514874207","514874208","514874209","514874210","516318924","516318925","516351975","516351976","516351977","516754146","516754147","516754148","516754149","516754150","516754151","516754152","516754153","516754154","516754155","516754156","516754157","516754158","516754159","516754160","516754161","516754162","516754163","516754164","516754165","516754166","516754167","516754168","516754169","516754170","516754171","516754172","516754173","516754174","516754175","516754176","516754177","516754178","516754179","516754180","516754181","516754182","516754183","516754184","516754185","516784033","516819792","516819793","516819794","516819795","516819796","516994019","517318019","517318020","518732901","518795365","518795366","526580972","529973144","537140029","538343315","540618445"]
data2 = ["72183369","72183370","72183371","72183372","72183373","72183374","72183375","72183376","72183377","72183378","72183379","72183380","72183381","72183382","72183383","72183384","72183385","72183386","72183387","72183388","72183389","72183390","72183391","72183392","72183393","72183394","72183395","72183396","72183397","72183398","72183399","72183400","72183401","72183402","72183403","72183404","72183405","72183406","72183407","72183408","72183409","72183410","72183411","72183412","72183413","72183414","72183415","72183416","72183417","72183418","72183419","72183420","72183421","72183422","72183423","72183424","72183425","72183426","72183427","72183428","72183429","72183430","72183431","72183432","72183433","72183434","72183435","72183436","72183437","72183438","72183439","72183440","72183441","72183442","72183443","72183444","72183445","72183446","72183447","72183448","72183449","72183450","72183451","72183452","72183453","72183454","72183455","72183456","72183457","72183458","72183459","72183460","72183461","72183462","72183463","72183464","72183465","72183466","72183467","72183468","72183469","72183470","72183471","72183472","72183473","72183474","72183475","72183476","72183477","72183478","72183479","72183480","72183481","72183482","72183483","72183484","72183485","72183486","72183487","72183488","72183489","72183490","72183491","72183492","72183493","72183494","72183495","72183496","72183497","72183498","72183499","72183500","72183501","72183502","72183503","72183504","72183505","72183506","72183507","72183508","72183509","72183510","72183511","72183512","72183513","72183514","72183515","72183516","72183517","72183518","72183519","72183520","72183521","72183522","72183523","72183524","72183525","72183526","72183527","72183528","72183529","72183530","72183531","72183532","72183533","72183534","72183535","72183536","72183537","72183538","72183539","72183540","72183541","72183542","72183543","72183544","72183545","72183546","72183547","72183548","72183549","72183550","72183551","72183552","72183553","72183554","72183555","72183556","72183557","72183558","72183559","72183560","72183561","72183562","72183563","72183564","72183565","72183566","72183567","72183568","72183569","72183570","72183571","72183572","72183573","72183574","72183575","72183576","72183577","72183578","72183579","72183580","72183581","72183582","72183583","72183584","72183585","72183586","72183587","72183588","72183589","72183590","72183591","72183592","72183593","72183594","72183595","72183596","72183597","72183598","72183599","72183600","72183601","72183602","72183603","72183604","72183605","72183606","72183607","72183608","72183609","72183610","72183611","72183612","72183613","72183614","72183615","72183616","72183617","72183618","72183619","72183620","72183621","72183622","72183623","72183624","72183625","72183626","72183627","72183628","72183629","72183630","72183631","72183632","72183633","72183634","72183635","72183636","72183637","72183638","72183639","72183640","72183641","72183642","72183643","72183644","72183645","72183646","72183647","72183648","72183649","72183650","72183651","72183652","72183653","72183654","72183655","72183656","72183657","72183658","72183659","72183660","72183661","72183662","72183663","72183664","72183665","72183666","72183667","72183668","72183669","72183670","72183671","72183672","72183673","72183674","72183675","72183676","72183677","72183678","72183679","72183680","72183681","72183682","72183683","72183684","72183685","72183686","72183687","72183688","72183689","72183690","72183691","72183692","72183693","72183694","72183695","72183696","72183697","72183698","72183699","72183700","72183701","72183702","72183703","72183704","72183705","72183706","72183707","72183708","72183709","72183710","72183711","72183712","72183713","72183714","72183715","72183716","72183717","72183718","72183719","72183720","72183721","72183722","72183723","72183724","72183725","72183726","72183727","72183728","72183729","72183730","72183731","72183732","72183733","72183734","72183735","72183736","72183737","72183738","72183739","72183740","72183741","72183742","72183743","72183744","72183745","72183746","72183747","72183748","72183749","72183750","72183751","72183752","72183753","72183754","72183755","72183756","72183757","72183758","72183759","72183760","72183761","72183762","72183763","72183764","72183765","72183766","72183767","72183768","72183769","72183770","72183771","72183772","72183773","72183774","72183775","72183776","72183777","72183778","72183779","72183780","72183781","72183782","72183783","72183784","72183785","72183786","72183787","72183788","72183789","72183790","72183791","72183792","72183793","72183794","72183795","72183796","72183797","72183798","72183799","72183800","72183801","72183802","72183803","72183804","72183805","72183806","72183807","72183808","72183809","72183810","72183811","72183812","72183813","72183814","72183815","72183816","72183817","72183818","72183819","72183820","72183821","72183822","72183823","72183824","72183825","72183826","72183827","72183828","72183829","72183830","72183831","72183832","72183833","72183834","72183835","72183836","72183837","72183838","72183839","72183840","72183841","72183842","72183843","72183844","72183845","72183846","72183847","72183848","72183849","72183850","72183851","72183852","72183853","72183854","72183855","72183856","72183857","72183858","72183859","72183860","72183861","72183862","72183863","72183864","72183865","72183866","72183867","72183868","72183869","72183870","72183871","72183872","72183873","72183874","72183875","72183876","72183877","72183878","72183879","72183880","72183881","72183882","72183883","72183884","72183885","72183886","72183887","72183888","72183889","72183890","72183891","72183892","72183893","72183894","72183895","72183896","72183897","72183898","72183899","72183900","72183901","72183902","72183903","72183904","72183905","72183906","72183907","72183908","72183909","72183910","72183911","72183912","72183913","72183914","72183915","72183916","72183917","72183918","72183919","72183920","72183921","72183922","72183923","72183924","72183925","72183926","72183927","72183928","72183929","72183930","72183931","72183932","72183933","72183934","72183935","72183936","72183937","72183938","72183939","72183940","72183941","72183942","72183943","72183944","72183945","72183946","72183947","72183948","72183949","72183950","72183951","72183952","72183953","72183954","72183955","72183956","72183957","72183958","72183959","72183960","72183961","72183962","72183963","72183964","72183965","72183966","72183967","72183968","72183969","72183970","72183971","72183972","72183973","72183974","72183975","72183976","72183977","72183978","72183979","72183980","72183981","72183982","72183983","72183984","72183985","72183986","72183987","72183988","72183989","72183990","72183991","72183992","72183993","72183994","72183995","72183996","72183997","72183998","72183999","72184000","72184001","72184002","72184003","72184004","72184005","72184006","72184007","72184008","72184009","72184010","72184011","72184012","72184013","72184014","72184015","72184016","72184017","72184018","72184019","72184020","72184021","72184022","72184023","72184024","72184025","72184026","72184027","72184028","72184029","72184030","72184031","72184032","72184033","72184034","72184035","72184036","72184037","72184038","72184039","72184040","72184041","72184042","72184043","72184044","72184045","72184046","72184047","72184048","72184049","72184050","72184051","72184052","72184053","72184054","72184055","72184056","72184057","72184058","72184059","72184060","72184061","72184062","72184063","72184064","72184065","72184066","72184067","72184068","72184069","72184070","72184071","72184072","72184073","72184074","72184075","72184076","72184077","72184078","72184079","72184080","72184081","72184082","72184083","72184084","72184085","72184086","72184087","72184088","72184089","72184090","72184091","72184092","72184093","72184094","72184095","72184096","72184097","72184098","72184099","72184100","72184101","72184102","72184103","72184104","72184105","72184106","72184107","72184108","72184109","72184110","72184111","72184112","72184113","72184114","72184115","72184116","72184117","72184118","72184119","72184120","72184121","72184122","72184123","72184124","72184125","72184126","72184127","72184128","72184129","72184130","72184131","72184132","72184133","72184134","72184135","72184136","72184137","72184138","72184139","72184140","72184141","72184142","72184143","72184144","72184145","72184146","72184147","72184148","72184149","72184150","72184151","72184152","72184153","72184154","72184155","72184156","72184157","72184158","72184159","72184160","72184161","72184162","72184163","72184164","72184165","72184166","72184167","72184168","72184169","72184170","72184171","72184172","72184173","72184174","72184175","72184176","72184177","72184178","72184179","72184180","72184181","72184182","72184183","72184184","72184185","72184186","72184187","72184188","72184189","72184190","72184191","72184192","72184193","72184194","72184195","72184196","72184197","72184198","72184199","72184200","72184201","72184202","72184203","72184204","72184205","72184206","72184207","72184208","72184209","72184210","72184211","72184212","72184213","72184214","72184215","72184216","72184217","72184218","72184219","72184220","72184221","72184222","72184223","72184224","72184225","72184226","72184227","72184228","72184229","72184230","72184231","72184232","72184233","72184234","72184235","72184236","72184237","72184238","72184239","72184240","72184241","72184242","72184243","72184244","72184245","72184246","72184247","72184248","72184249","72184250","72184251","72184252","72184253","72184254","72184255","72184256","72184257","72184258","72184259","72184260","72184261","72184262","72184263","72184264","72184265","72184266","72184267","72184268","72184269","72184270","72184271","72184272","72184273","72184274","72184275","72184276","72184277","72184278","72184279","72184280","72184281","72184282","72184283","72184284","72184285","72184286","72184287","72184288","72184289","72184290","72184291","72184292","72184293","72184294","72184295","72184296","72184297","72184298","72184299","72184300","72184301","72184302","72184303","72184304","72184305","72184306","72184307","72184308","72184309","72184310","72184311","72184312","72184313","72184314","72184315","72184316","72184317","72184318","72184319","72184320","72184321","72184322","72184323","72184324","72184325","72184326","72184327","72184328","72184329","72184330","72184331","72184332","72184333","72184334","72184335","72184336","72184337","72184338","72184339","72184340","72184341","72184342","72184343","72184344","72184345","72184346","72184347","72184348","72184349","72184350","72184351","72184352","72184353","72184354","72184355","72184356","72184357","72184358","72184359","72184360","72184361","72184362","72184363","72184364","72184365","72184366","72184367","72184368","72184369","72184370","72184371","72184372","72184373","72184374","72184375","72184376","72184377","72184378","72184379","72184380","72184381","72184382","72184383","72184384","72184385","72184386","72184387","72184388","72184389","72184390","72184391","72184392","72184393","72184394","72184395","72184396","72184397","72184398","72184399","72184400","72184401","72184402","72184403","72184404","72184405","72184406","72184407","72184408","72184409","72184410","72184411","72184412","72184413","72184414","72184415","72184416","72184417","72184418","72184419","72184420","72184421","72184422","72184423","72184424","72184425","72184426","72184427","72184428","72184429","72184430","72184431","72184432","72184433","72184434","72184435","72184436","72184437","72184438","72184439","72184440","72184441","72184442","72184443","72184444","72184445","72184446","72184447","72184448","72184449","72184450","72184451","72184452","72184453","72184454","72184455","72184456","72184457","72184458","72184459","72184460","72184461","72184462","72184463","72184464","72184465","72184466","72184467","72184468","72184469","72184470","72184471","72184472","72184473","72184474","72184475","72184476","72184477","72184478","72184479","72184480","72184481","72184482","72184483","72184484","72184485","72184486","72184487","72184488","72184489","72184490","72184491","72184492","72184493","72184494","72184495","72184496","72184497","72184498","72184499","72184500","72184501","72184502","72184503","72184504","72184505","72184506","72184507","72184508","72184509","72184510","72184511","72184512","72184513","72184514","72184515","72184516","72184517","72184518","72184519","72184520","72184521","72184522","72184523","72184524","72184525","72184526","72184527","72184528","72184529","72184530","72184531","72184532","72184533","72184534","72184535","72184536","72184537","72184538","72184539","72184540","72184541","72184542","72184543","72184544","72184545","72184546","72184547","72184548","72184549","72184550","72184551","72184552","72184553","72184554","72184555","72184556","72184557","72184558","72184559","72184560","72184561","72184562","72184563","72184564","72184565","72184566","72184567","72184568","72184569","72184570","72184571","72184572","72184573","72184574","72184575","72184576","72184577","72184578","72184579","72184580","72184581","72184582","72184583","72184584","72184585","72184586","72184587","72184588","72184589","72184590","72184591","72184592","72184593","72184594","72184595","72184596","72184597","72184598","72184599","72184600","72184601","72184602","72184603","72184604","72184605","72184606","72184607","72184608","72184609","72184610","72184611","72184612","72184613","72184614","72184615","72184616","72184617","72184618","72184619","72184620","72184621","72184622","72184623","72184624","72184625","72184626","72184627","72184628","72184629","72184630","72184631","72184632","72184633","72184634","72184635","72184636","72184637","72184638","72184639","72184640","72184641","72184642","72184643","72184644","72184645","72184646","72184647","72184648","72184649","72184650","72184651","72184652","72184653","72184654","72184655","72184656","72184657","72184658","72184659","72184660","72184661","72184662","72184663","72184664","72184665","72184666","72184667","72184668","72184669","72184670","72184671","72184672","72184673","72184674","72184675","72184676","72184677","72184678","72184679","72184680","72184681","72184682","72184683","72184684","72184685","72184686","72184687","72184688","72184689","72184690","72184691","72184692","72184693","72184694","72184695","72184696","72184697","72184698","72184699","72184700","72184701","72184702","72184703","72184704","72184705","72184706","72184707","72184708","72184709","72184710","72184711","72184712","72184713","72184714","72184715","72184716","72184717","72184718","72184719","72184720","72184721","72184722","72184723","72184724","72184725","72184726","72184727","72184728","72184729","72184730","72184731","72184732","72184733","72184734","72184735","72184736","72184737","72184738","72184739","72184740","72184741","72184742","72184743","72184744","72184745","72184746","72184747","72184748","72184749","72184750","72184751","72184752","72184753","72184754","72184755","72184756","72184757","72184758","72184759","72184760","72184761","72184762","72184763","72184764","72184765","72184766","72184767","72184768","72184769","72184770","72184771","72184772","72184773","72184774","72184775","72184776","72184777","72184778","72184779","72184780","72184781","72184782","72184783","72184784","72184785","72184786","72184787","72184788","72184789","72184790","72184791","72184792","72184793","72184794","72184795","72184796","72184797","72184798","72184799","72184800","72184801","72184802","72184803","72184804","72184805","72184806","72184807","72184808","72184809","72184810","72184811","72184812","72184813","72184814","72184815","72184816","72184817","72184818","72184819","72184820","72184821","72184822","72184823","72184824","72184825","72184826","72184827","72184828","72184829","72184830","72184831","72184832","72184833","72184834","72184835","72184836","72184837","72184838","72184839","72184840","72184841","72184842","72184843","72184844","72184845","72184846","72184847","72184848","72184849","72184850","72184851","72184852","72184853","72184854","72184855","72184856","72184857","72184858","72184859","72184860","72184861","72184862","72184863","72184864","72184865","72184866","72184867","72184868","72184869","72184870","72184871","72184872","72184873","72184874","72184875","72184876","72184877","72184878","72184879","72184880","72184881","72184882","72184883","72184884","72184885","72184886","72184887","72184888","72184889","72184890","72184891","72184892","72184893","72184894","72184895","72184896","72184897","72184898","72184899","72184900","72184901","72184902","72184903","72184904","72184905","72184906","72184907","72184908","72184909","72184910","72184911","72184912","72184913","72184914","72184915","72184916","72184917","72184918","72184919","72184920","72184921","72184922","72184923","72184924","72184925","72184926","72184927","72184928","72184929","72184930","72184931","72184932","72184933","72184934","72184935","72184936","72184937","72184938","72184939","72184940","72184941","72184942","72184943","72184944","72184945","72184946","72184947","72184948","72184949","72184950","72184951","72184952","72184953","72184954","72184955","72184956","72184957","72184958","72184959","72184960","72184961","72184962","72184963","72184964","72184965","72184966","72184967","72184968","72184969","72184970","72184971","72184972","72184973","72184974","72184975","72184976","72184977","72184978","72184979","72184980","72184981","72184982","72184983","72184984","72184985","72184986","72184987","72184988","72184989","72184990","72184991","72184992","72184993","72184994","72184995","72184996","72184997","72184998","72184999","72185000","72185001","72185002","72185003","72185004","72185005","72185006","358146085","358146086","358146087","358146088","358146089","358146090","358146091","358146092","358146093","358146094","358146095","358146096","358146097","358146098","358146099","358146100","358146101","358146102","358146103","364227232","364227233","364227234","364227235","364227236","364227237","364227238","364227239","364227240","364227241","364227242","364227243","364227244","364227245","364227246","364227247","364227248","364227249","364227250","364227251","371114239","371114240","371858824","371858825","371858826","371858827","371858828","371858829","371858830","371858831","371858832","371858833","371858834","371858835","371858836","371858837","371858838","371858839","371858840","371858841","371858842","371858843","371858844","371858845","371858846","371858847","371858848","371858849","371858850","371858851","371858852","371858853","371858854","371858855","371858856","371858857","371858858","371858859","371858860","371858861","371858862","371858863","371858864","371858865","371858866","371858867","371858868","371858869","371858870","371858871","371858872","371858873","371858874","371858875","371858876","371858877","371858878","371858879","371858880","371858881","371858882","371858883","371858884","371858885","371858886","371858887","371858888","371858889","371858890","371858891","371858892","371858893","371858894","371858895","371858896","371858897","371858898","371858899","371858900","371858901","371858902","371858903","371858904","371858905","371858906","371858907","371858908","371858909","371858910","371858911","371858912","371858913","371858914","371858915","371858916","371858917","371858918","371858919","371858920","371858921","371858922","371858923","371858924","371858925","371858926","371858927","371858928","371858929","371858930","371858931","371858932","371858933","371858934","371858935","371858936","371858937","371858938","371858939","371858940","371858941","371858942","371858943","371858944","371858945","371858946","371858947","371858948","371858949","371858950","371858951","371858952","371858953","371858954","371858955","371858956","371858957","371858958","371858959","371858960","371858961","371858962","371858963","371858964","371858965","371858966","371858967","371858968","371858969","371858970","371858971","371858972","371858973","371858974","371858975","371858976","371858977","371858978","371858979","371858980","371858981","371858982","371858983","371858984","371858985","371858986","371858987","371858988","371858989","371858990","371858991","371858992","371858993","371858994","371858995","371858996","371858997","371858998","371858999","371859000","371859001","371859002","371859003","371859004","371859005","371859006","371859007","371859008","371859009","371859010","371859011","371859012","371859013","371859014","371859015","371859016","371859017","371859018","371859019","371859020","371859021","371859022","371859023","371859024","371859025","371859026","371859027","371859028","371859029","371859030","371859031","371859032","371859033","371859034","371859035","371859036","371859037","371859038","371859039","371859040","371859041","371859042","371859043","371859044","371859045","371859046","371859047","371859048","371859049","371859050","371859051","371859052","371859053","371859054","371859055","371859056","371859057","371859058","371859059","371859060","371859061","371859062","371859063","371859064","371859065","371859066","371859067","371859068","371859069","371859070","371859071","371859072","371859073","371859074","371859075","371859076","371859077","371859078","371859079","371859080","371859081","371859082","371859083","371859084","371859085","371859086","371859087","371859088","371859089","371859090","371859091","371859092","371859093","371859094","371859095","371859096","371859097","371859098","371859099","371859100","371859101","371859102","371859103","371859104","371859105","371859106","371859107","371859108","371859109","371859110","371859111","371859112","371859113","371859114","371859115","371859116","371859117","371859118","371859119","371859120","371859121","371859122","371859123","371859124","371859125","371859126","371859127","371859128","371859129","371859130","371859131","371859132","371859133","371859134","371859135","371859136","371859137","371859138","371859139","371859140","371859141","371859142","371859143","371859144","371859145","371859146","371859147","371859148","371859149","371859150","371859151","371859152","371859153","371859154","371859155","371859156","371859157","371859158","371859159","371859160","371859161","371859162","371859163","371859164","371859165","371859166","371859167","371859168","371859169","371859170","371859171","371859172","371859173","371859174","371859175","371859176","371859177","371859178","371859179","371859180","371859181","371859182","371859183","371859184","371859185","371859186","371859187","371859188","371859189","371859190","371859191","371859192","371859193","371859194","371859195","371859196","371859197","371859198","371859199","371859200","371859201","371859202","371859203","371859204","371859205","371859206","371859207","371859208","371859209","371859210","371859211","371859212","371859213","371859214","371859215","371859216","371859217","371859218","371859219","371859220","371859221","371859222","371859223","371859224","371859225","371859226","371859227","371859228","371859229","371859230","371859231","371859232","371859233","371859234","371859235","371859236","371859237","372055158","372055159","372055160","372055161","374629671","374629672","374629673","374629674","374629675","374629676","374629677","374629678","374629679","374629680","374629681","374629682","374629683","374629684","374629685","374629686","374629687","374629688","374629689","374629690","374629691","374629692","374629693","374629694","374629695","374629696","374629697","374629698","374629699","374629700","374629701","374629702","374629703","374629704","374629705","374629706","374629707","374629708","374629709","374629710","374629711","374629712","374629713","374629714","374629715","374629716","374629717","374629718","374629719","374629720","374629721","374629722","374629723","374629724","374629725","374629726","374629727","374629728","374629729","374629730","374629731","374629732","374629733","374629734","374629735","374629736","374629737","374629738","374629739","374629740","374629741","374629742","374629743","374629744","374629745","374629746","374629747","374629748","374629749","374629750","374629751","374629752","374629753","374629754","374629755","374629756","374629757","374629758","374629759","374629760","374629761","374629762","374629763","374629764","374629765","374629766","374629767","374629768","374629769","374629770","374629771","374629772","374629773","374629774","374629775","374629776","374629777","374629778","374629779","374629780","374629781","374629782","374629783","374629784","374629785","374629786","374629787","374629788","374629789","374629790","374629791","374629792","374629793","374629794","374629795","374629796","374629797","374629798","374629799","374629800","374629801","374629802","374629803","374629804","374629805","374629806","374629807","374629808","374629809","374629810","374629811","374629812","374629813","374629814","374629815","374629816","374629817","374629818","374629819","374629820","374629821","374629822","374629823","374629824","374629825","374629826","374629827","374629828","374629829","374629830","374629831","374629832","374629833","374629834","374629835","374629836","374629837","374629838","374629839","374629840","374629841","374629842","374629843","374629844","374629845","374629846","374629847","374629848","374629849","374629850","374629851","374629852","374629853","374629854","374629855","374629856","374629857","374629858","374629859","385164656","385164657","385164658","385164659","385164660","385164661","385164662","385164663","389392480","389392481","389392482","389392483","389392484","418265654","418265655","418265656","418265657","418265658","418265659","418265660","418265661","418265662","418265663","418265664","418265665","418265666","418265667","418265668","418265669","418265670","418265671","418265672","418265673","418265674","418265675","418265676","418265677","418265678","418265679","418265680","418265681","418265682","418265683","418265684","418265685","418265686","418265687","264764401","264764402","264764403","264764404","264764405","264764406","264764407","264764408","264764409","264764410","264764411","264764412","264764413","264764414","264764415","264764416","264764417","264764418","264764419","264764420","264764421","264764422","264764423","264764424","264764425","264764426","264764427","264764428","264764429","264764430","264764431","264764432","264764433","264764434","264764435","264764436","264764437","264764438","264764439","264764440","264764441","264764442","264764443","264764444","264764445","264764446","264764447","264764448","264764449","264764450","264764451","264764452","264764453","264764454","264764455","264764456","264764457","264764458","264764459","264764460","264764461","264764462","264764463","264764464","264764465","264764466","264764467","264764468","264764469","264764470","264764471","264764472","264764473","264764474","264764475","264764476","264764477","264764478","264764479","264764480","264764481","264764482","264764483","264764484","264764485","264764486","264764487","264764488","264764489","264764490","264764491","264764492","264764493","264764494","264764495","264764496","264764497","264764498","264764499","264764500","264764501","264764502","264764503","264764504","264764505","264764506","264764507","264764508","264764509","264764510","264764511","264764512","264764513","264764514","264764515","264764516","264764517","264764518","264764519","264764520","264764521","264764522","264764523","264764524","264764525","264764526","264764527","264764528","264764529","264764530","264764531","264764532","264764533","264764534","264764535","264764536","264764537","264764538","264764539","264764540","264764541","264764542","264764543","264764544","264764545","264764546","264764547","264764548","264764549","264764550","264764551","264764552","264764553","264764554","264764555","264764556","264764557","264764558","264764559","264764560","264764561","264764562","264764563","264764564","264764565","264764566","264764567","264764568","264764569","264764570","264764571","264764572","264764573","264764574","264764575","264764576","264764577","264764578","264764579","264764580","264764581","264764582","264764583","264764584","264764585","264764586","264764587","264764588","264764589","264764590","264764591","264764592","264764593","264764594","264764595","264764596","264764597","264764598","264764599","264764600","264764601","264764602","264764603","264764604","264764605","264764606","264764607","264764608","264764609","264764610","264764611","264764612","264764613","264764614","264764615","264764616","264764617","264764618","264764619","264764620","264764621","264764622","264764623","264764624","264764625","264764626","264764627","264764628","264764629","264764630","264764631","264764632","264764633","264764634","264764635","264764636","264764637","264764638","264764639","264764640","264764641","264764642","264764643","264764644","264764645","264764646","264764647","264764648","264764649","264764650","264764651","264764652","264764653","264764654","264764655","264764656","264764657","264764658","264764659","264764660","264764661","264764662","264764663","264764664","264764665","264764666","264764667","264764668","264764669","264764670","264764671","264764672","264764673","264764674","264764675","264764676","264764677","264764678","264764679","264764680","264764681","264764682","264764683","264764684","264764685","264764686","264764687","264764688","264764689","264764690","264764691","264764692","264764693","264764694","264764695","264764696","264764697","264764698","264764699","264764700","264764701","264764702","264764703","264764704","264764705","264764706","264764707","264764708","264764709","264764710","264764711","264764712","264764713","264764714","264764715","264764716","264764717","264764718","264764719","264764720","264764721","264764722","264764723","264764724","264764725","264764726","264764727","264764728","264764729","264764730","264764731","264764732","264764733","264764734","264764735","264764736","264764737","264764738","264764739","264764740","264764741","264764742","264764743","264764744","264764745","264764746","264764747","264764748","264764749","264764750","264764751","264764752","264764753","264764754","264764755","264764756","264764757","264764758","264764759","264764760","264764761","264764762","264764763","264764764","264764765","264764766","264764767","264764768","264764769","264764770","264764771","264764772","264764773","264764774","264764775","264764776","264764777","264764778","264764779","264764780","264764781","264764782","264764783","264764784","264764785","264764786","264764787","264764788","264764789","264764790","264764791","264764792","264764793","264764794","264764795","264764796","264764797","264764798","264764799","264764800","264764801","264764802","264764803","264764804","264764805","264764806","264764807","264764808","264764809","264764810","264764811","264764812","264764813","264764814","264764815","264764816","264764817","264764818","264764819","264764820","264764821","264764822","264764823","264764824","264764825","264764826","264764827","264764828","264764829","264764830","264764831","264764832","264764833","264764834","264764835","264764836","264764837","264764838","264764839","264764840","264764841","264764842","264764843","264764844","264764845","264764846","264764847","264764848","264764849","264764850","264764851","264764852","264764853","264764854","264764855","264764856","264764857","264764858","264764859","264764860","264764861","264764862","264764863","264764864","264764865","264764866","264764867","264764868","264764869","264764870","264764871","264764872","264764873","264764874","264764875","264764876","264764877","264764878","264764879","264764880","264764881","264764882","264764883","264764884","264764885","264764886","264764887","264764888","264764889","264764890","264764891","264764892","264764893","264764894","264764895","264764896","264764897","264764898","264764899","264764900","264764901","264764902","264764903","264764904","264764905","264764906","264764907","264764908","264764909","264764910","264764911","264764912","264764913","264764914","264764915","264764916","264764917","264764918","264764919","264764920","264764921","264764922","264764923","264764924","264764925","264764926","264764927","264764928","264764929","264764930","264764931","264764932","264764933","264764934","264764935","264764936","264764937","264764938","264764939","264764940","264764941","264764942","264764943","264764944","264764945","264764946","264764947","264764948","264764949","264764950","264764951","264764952","264764953","264764954","264764955","264764956","264764957","264764958","264764959","264764960","264764961","264764962","264764963","264764964","264764965","264764966","264764967","264764968","264764969","264764970","264764971","264764972","264764973","264764974","264764975","264764976","264764977","264764978","264764979","264764980","264764981","264764982","264764983","264764984","264764985","264764986","264764987","264764988","264764989","264764990","264764991","264764992","264764993","264764994","264764995","264764996","264764997","264764998","264764999","264765000","264765001","264765002","264765003","264765004","264765005","264765006","264765007","264765008","264765009","264765010","264765011","264765012","264765013","264765014","264765015","264765016","264765017","264765018","264765019","264765020","264765021","264765022","264765023","264765024","264765025","264765026","264765027","264765028","264765029","264765030","264765031","264765032","264765033","264765034","264765035","264765036","264765037","264765038","264765039","264765040","264765041","264765042","264765043","264765044","264765045","264765046","264765047","264765048","264765049","264765050","264765051","264765052","264765053","264765054","264765055","264765056","264765057","264765058","264765059","264765060","264765061","264765062","264765063","264765064","264765065","264765066","264765067","264765068","264765069","264765070","264765071","264765072","264765073","264765074","264765075","264765076","264765077","264765078","264765079","264765080","264765081","264765082","264765083","264765084","264765085","264765086","264765087","264765088","264765089","264765090","264765091","264765092","264765093","264765094","264765095","264765096","264765097","264765098","264765099","264765100","264765101","264765102","264765103","264765104","264765105","264765106","264765107","264765108","264765109","264765110","264765111","264765112","264765113","264765114","264765115","264765116","264765117","264765118","264765119","264765120","264765121","264765122","264765123","264765124","264765125","264765126","264765127","264765128","264765129","264765130","264765131","264765132","264765133","264765134","264765135","264765136","264765137","264765138","264765139","264765140","264765141","264765142","264765143","264765144","264765145","264765146","264765147","264765148","264765149","264765150","264765151","264765152","264765153","264765154","264765155","264765156","264765157","264765158","264765159","264765160","264765161","264765162","264765163","264765164","264765165","264765166","264765167","264765168","264765169","264765170","264765171","264765172","264765173","264765174","264765175","264765176","264765177","264765178","264765179","264765180","264765181","264765182","264765183","264765184","264765185","264765186","264765187","264765188","264765189","264765190","264765191","264765192","264765193","264765194","264765195","264765196","264765197","264765198","264765199","264765200","264765201","264765202","264765203","264765204","264765205","264765206","264765207","264765208","264765209","264765210","264765211","264765212","264765213","264765214","264765215","264765216","264765217","264765218","264765219","264765220","264765221","264765222","264765223","264765224","264765225","264765226","264765227","264765228","264765229","264765230","264765231","264765232","264765233","264765234","264765235","264765236","264765237","264765238","264765239","264765240","264765241","264765242","264765243","264765244","264765245","264765246","264765247","264765248","264765249","264765250","264765251","264765252","264765253","264765254","264765255","264765256","264765257","264765258","264765259","264765260","264765261","264765262","264765263","264765264","264765265","264765266","264765267","264765268","264765269","264765270","264765271","264765272","264765273","264765274","264765275","264765276","264765277","264765278","264765279","264765280","264765281","264765282","264765283","264765284","264765285","264765286","264765287","264765288","264765289","264765290","264765291","264765292","264765293","264765294","264765295","264765296","264765297","264765298","264765299","264765300","264765301","264765302","264765303","264765304","264765305","264765306","264765307","264765308","264765309","264765310","264765311","264765312","264765313","264765314","264765315","264765316","264765317","264765318","264765319","264765320","264765321","264765322","264765323","264765324","264765325","264765326","264765327","264765328","264765329","264765330","264765331","264765332","264765333","264765334","264765335","264765336","264765337","264765338","264765339","264765340","264765341","264765342","264765343","264765344","264765345","264765346","264765347","264765348","264765349","264765350","264765351","264765352","264765353","264765354","264765355","264765356","264765357","264765358","264765359","264765360","264765361","264765362","264765363","264765364","264765365","264765366","264765367","264765368","264765369","264765370","264765371","264765372","264765373","264765374","264765375","264765376","264765377","264765378","264765379","264765380","264765381","264765382","264765383","264765384","264765385","264765386","264765387","264765388","264765389","264765390","264765391","264765392","264765393","264765394","264765395","264765396","264765397","264765398","264765399","264765400","264765401","264765402","264765403","264765404","264765405","264765406","264765407","264765408","264765409","264765410","264765411","264765412","264765413","264765414","264765415","264765416","264765417","264765418","264765419","264765420","264765421","264765422","264765423","264765424","264765425","264765426","264765427","264765428","264765429","264765430","264765431","264765432","264765433","264765434","264765435","264765436","264765437","264765438","264765439","264765440","264765441","264765442","264765443","264765444","264765445","264765446","264765447","264765448","264765449","264765450","264765451","264765452","264765453","264765454","264765455","264765456","264765457","264765458","264765459","264765460","264765461","264765462","264765463","264765464","264765465","264765466","264765467","264765468","264765469","264765470","264765471","264765472","264765473","264765474","264765475","264765476","264765477","264765478","264765479","264765480","264765481","264765482","264765483","264765484","264765485","264765486","264765487","264765488","264765489","264765490","264765491","264765492","264765493","264765494","264765495","264765496","264765497","264765498","264765499","264765500","264765501","264765502","264765503","264765504","264765505","264765506","264765507","264765508","264765509","264765510","264765511","264765512","264765513","264765514","264765515","264765516","264765517","264765518","264765519","264765520","264765521","264765522","264765523","264765524","264765525","264765526","264765527","264765528","264765529","264765530","264765531","264765532","264765533","264765534","264765535","264765536","264765537","264765538","264765539","264765540","264765541","264765542","264765543","264765544","264765545","264765546","264765547","264765548","264765549","264765550","264765551","264765552","264765553","264765554","264765555","264765556","264765557","264765558","264765559","264765560","264765561","264765562","264765563","264765564","264765565","264765566","264765567","264765568","264765569","264765570","264765571","264765572","264765573","264765574","264765575","264765576","264765577","264765578","264765579","264765580","264765581","264765582","264765583","264765584","264765585","264765586","264765587","264765588","264765589","264765590","264765591","264765592","264765593","264765594","264765595","264765596","264765597","264765598","264765599","264765600","264765601","264765602","264765603","264765604","264765605","264765606","264765607","264765608","264765609","264765610","264765611","264765612","264765613","264765614","264765615","264765616","264765617","264765618","264765619","264765620","264765621","264765622","264765623","264765624","264765625","264765626","264765627","264765628","264765629","264765630","264765631","264765632","264765633","264765634","264765635","264765636","264765637","264765638","264765639","264765640","264765641","264765642","264765643","264765644","264765645","264765646","264765647","264765648","264765649","264765650","264765651","264765652","264765653","264765654","264765655","264765656","264765657","264765658","264765659","264765660","264765661","264765662","264765663","264765664","264765665","264765666","264765667","264765668","264765669","264765670","264765671","264765672","264765673","264765674","264765675","264765676","264765677","264765678","264765679","264765680","264765681","264765682","264765683","264765684","264765685","264765686","264765687","264765688","264765689","264765690","264765691","264765692","264765693","264765694","264765695","264765696","264765697","264765698","264765699","264765700","264765701","264765702","264765703","264765704","264765705","264765706","264765707","264765708","264765709","264765710","264765711","264765712","264765713","264765714","264765715","264765716","264765717","264765718","264765719","264765720","264765721","264765722","264765723","264765724","264765725","264765726","264765727","264765728","264765729","264765730","264765731","264765732","264765733","264765734","264765735","264765736","264765737","264765738","264765739","264765740","264765741","264765742","264765743","264765744","264765745","264765746","264765747","264765748","264765749","264765750","264765751","264765752","264765753","264765754","264765755","264765756","264765757","264765758","264765759","264765760","264765761","264765762","264765763","264765764","264765765","264765766","264765767","264765768","264765769","264765770","264765771","264765772","264765773","264765774","264765775","264765776","264765777","264765778","264765779","264765780","264765781","264765782","264765783","264765784","264765785","264765786","264765787","264765788","264765789","264765790","264765791","264765792","264765793","264765794","264765795","264765796","264765797","264765798","264765799","264765800","264765801","264765802","264765803","264765804","264765805","264765806","264765807","264765808","264765809","264765810","264765811","264765812","264765813","264765814","264765815","264765816","264765817","264765818","264765819","264765820","264765821","264765822","264765823","264765824","264765825","264765826","264765827","264765828","264765829","264765830","264765831","264765832","264765833","264765834","264765835","264765836","264765837","264765838","264765839","264765840","264765841","264765842","264765843","264765844","264765845","264765846","264765847","264765848","264765849","264765850","264765851","264765852","264765853","264765854","264765855","264765856","264765857","264765858","264765859","264765860","264765861","264765862","264765863","264765864","264765865","264765866","264765867","264765868","264765869","264765870","264765871","264765872","264765873","264765874","264765875","264765876","264765877","264765878","264765879","264765880","264765881","264765882","264765883","264765884","264765885","264765886","264765887","264765888","264765889","264765890","264765891","264765892","264765893","264765894","264765895","264765896","264765897","264765898","264765899","264765900","264765901","264765902","264765903","264765904","264765905","264765906","264765907","264765908","264765909","264765910","264765911","264765912","264765913","264765914","264765915","264765916","264765917","264765918","264765919","264765920","264765921","264765922","264765923","264765924","264765925","264765926","264765927","264765928","264765929","264765930","264765931","264765932","264765933","264765934","264765935","264765936","264765937","264765938","264765939","264765940","264765941","264765942","264765943","264765944","264765945","264765946","264765947","264765948","264765949","264765950","264765951","264765952","264765953","264765954","264765955","264765956","264765957","264765958","264765959","264765960","264765961","264765962","264765963","264765964","264765965","264765966","264765967","264765968","264765969","264765970","264765971","264765972","264765973","264765974","264765975","264765976","264765977","264765978","264765979","264765980","264765981","264765982","264765983","264765984","264765985","264765986","264765987","264765988","264765989","264765990","264765991","264765992","264765993","264765994","264765995","264765996","264765997","264765998","264765999","264766000","264766001","264766002","264766003","264766004","264766005","264766006","264766007","264766008","264766009","264766010","264766011","264766012","264766013","264766014","264766015","264766016","264766017","264766018","264766019","264766020","264766021","264766022","264766023","264766024","264766025","264766026","264766027","264766028","264766029","264766030","264766031","264766032","264766033","264766034","264766035","264766036","264766037","264766038","264766039","264766040","264766041","264766042","264766043","264766044","264766045","264766046","264766047","264766048","264766049","264766050","264766051","264766052","264766053","264766054","264766055","264766056","264766057","264766058","264766059","264766060","264766061","264766062","264766063","264766064","264766065","264766066","264766067","264766068","264766069","264766070","264766071","264766072","264766073","264766074","264766075","264766076","264766077","264766078","264766079","264766080","264766081","264766082","264766083","264766084","264766085","264766086","264766087","264766088","264766089","264766090","264766091","264766092","264766093","264766094","264766095","264766096","264766097","264766098","264766099","264766100","264766101","264766102","264766103","264766104","264766105","264766106","264766107","264766108","264766109","264766110","264766111","264766112","264766113","264766114","264766115","264766116","264766117","264766118","264766119","264766120","264766121","264766122","264766123","264766124","264766125","264766126","264766127","264766128","264766129","264766130","264766131","264766132","264766133","264766134","264766135","264766136","264766137","264766138","264766139","264766140","264766141","264766142","264766143","264766144","264766145","264766146","264766147","264766148","264766149","264766150","264766151","264766152","264766153","264766154","264766155","264766156","264766157","264766158","264766159","264766160","264766161","264766162","264766163","264766164","264766165","264766166","264766167","264766168","264766169","264766170","264766171","264766172","264766173","264766174","264766175","264766176","264766177","264766178","264766179","264766180","264766181","264766182","264766183","264766184","264766185","264766186","264766187","264766188","264766189","264766190","264766191","264766192","264766193","264766194","264766195","264766196","264766197","264766198","264766199","264766200","264766201","264766202","264766203","264766204","264766205","264766206","264766207","264766208","264766209","264766210","264766211","264766212","264766213","264766214","264766215","264766216","264766217","264766218","264766219","264766220","264766221","264766222","264766223","264766224","264766225","264766226","264766227","264766228","264766229","264766230","264766231","264766232","264766233","264766234","264766235","264766236","264766237","264766238","264766239","264766240","264766241","264766242","264766243","264766244","264766245","264766246","264766247","264766248","264766249","264766250","264766251","264766252","264766253","264766254","264766255","264766256","264766257","264766258","264766259","264766260","264766261","264766262","264766263","264766264","264766265","264766266","264766267","264766268","264766269","264766270","264766271","264766272","264766273","264766274","264766275","264766276","264766277","264766278","264766279","264766280","264766281","264766282","264766283","264766284","264766285","264766286","264766287","264766288","264766289","264766290","264766291","264766292","264766293","264766294","264766295","264766296","264766297","264766298","264766299","264766300","264766301","264766302","264766303","264766304","264766305","264766306","264766307","264766308","264766309","264766310","264766311","264766312","264766313","264766314","264766315","264766316","264766317","264766318","264766319","264766320","264766321","264766322","264766323","264766324","264766325","264766326","264766327","264766328","264766329","264766330","264766331","264766332","264766333","264766334","264766335","264766336","264766337","264766338","264766339","264766340","264766341","264766342","264766343","264766344","264766345","264766346","264766347","264766348","264766349","264766350","264766351","264766352","264766353","264766354","264766355","264766356","264766357","264766358","264766359","264766360","264766361","264766362","264766363","264766364","264766365","264766366","264766367","264766368","264766369","264766370","264766371","264766372","264766373","264766374","264766375","264766376","264766377","264766378","264766379","264766380","264766381","264766382","264766383","264766384","264766385","264766386","264766387","264766388","264766389","264766390","264766391","264766392","264766393","264766394","264766395","264766396","264766397","264766398","264766399","264766400","264766401","264766402","264766403","264766404","264766405","264766406","264766407","264766408","264766409","264766410","264766411","264766412","264766413","264766414","264766415","264766416","264766417","264766418","264766419","264766420","264766421","264766422","264766423","264766424","264766425","264766426","264766427","264766428","264766429","264766430","264766431","264766432","264766433","264766434","264766435","264766436","264766437","264766438","264766439","264766440","264766441","264766442","264766443","264766444","264766445","264766446","264766447","264766448","264766449","264766450","264766451","264766452","264766453","264766454","264766455","264766456","264766457","264766458","264766459","264766460","264766461","264766462","264766463","264766464","264766465","264766466","264766467","264766468","264766469","264766470","264766471","264766472","264766473","264766474","264766475","264766476","264766477","264766478","264766479","264766480","264766481","264766482","264766483","264766484","264766485","264766486","264766487","264766488","264766489","264766490","264766491","264766492","264766493","264766494","264766495","264766496","264766497","264766498","264766499","264766500","264766501","264766502","264766503","264766504","264766505","264766506","264766507","264766508","264766509","264766510","264766511","264766512","264766513","264766514","264766515","264766516","264766517","264766518","264766519","264766520","264766521","264766522","264766523","264766524","264766525","264766526","264766527","264766528","264766529","264766530","264766531","264766532","264766533","264766534","264766535","264766536","264766537","264766538","264766539","264766540","264766541","264766542","264766543","264766544","264766545","264766546","264766547","264766548","264766549","264766550","264766551","264766552","264766553","264766554","264766555","264766556","264766557","264766558","264766559","264766560","264766561","264766562","264766563","264766564","264766565","264766566","264766567","264766568","264766569","264766570","264766571","264766572","264766573","264766574","264766575","264766576","264766577","264766578","264766579","264766580","264766581","264766582","264766583","264766584","264766585","264766586","264766587","264766588","264766589","264766590","264766591","264766592","264766593","264766594","264766595","264766596","264766597","264766598","264766599","264766600","264766601","264766602","264766603","264766604","264766605","264766606","264766607","264766608","264766609","264766610","264766611","264766612","264766613","264766614","264766615","264766616","264766617","264766618","264766619","264766620","264766621","264766622","264766623","264766624","264766625","264766626","264766627","264766628","264766629","264766630","264766631","264766632","264766633","264766634","264766635","264766636","264766637","264766638","264766639","264766640","264766641","264766642","264766643","264766644","264766645","264766646","264766647","264766648","264766649","264766650","264766651","264766652","264766653","264766654","264766655","264766656","264766657","264766658","264766659","264766660","264766661","264766662","264766663","264766664","264766665","264766666","264766667","264766668","264766669","264766670","264766671","264766672","264766673","264766674","264766675","264766676","264766677","264766678","264766679","264766680","264766681","264766682","264766683","264766684","264766685","264766686","264766687","264766688","264766689","264766690","264766691","264766692","264766693","264766694","264766695","264766696","264766697","264766698","264766699","264766700","264766701","264766702","264766703","264766704","264766705","264766706","264766707","264766708","264766709","264766710","264766711","264766712","264766713","264766714","264766715","264766716","264766717","264766718","264766719","264766720","264766721","264766722","264766723","264766724","264766725","264766726","264766727","264766728","264766729","264766730","264766731","264766732","264766733","264766734","264766735","264766736","264766737","264766738","264766739","264766740","264766741","264766742","264766743","264766744","264766745","264766746","264766747","264766748","264766749","264766750","264766751","264766752","264766753","264766754","264766755","264766756","264766757","264766758","264766759","264766760","264766761","264766762","264766763","264766764","264766765","264766766","264766767","264766768","264766769","264766770","264766771","264766772","264766773","264766774","264766775","264766776","264766777","264766778","264766779","264766780","264766781","264766782","264766783","264766784","264766785","264766786","264766787","264766788","264766789","264766790","264766791","264766792","264766793","264766794","264766795","264766796","264766797","264766798","264766799","264766800","264766801","264766802","264766803","264766804","264766805","264766806","264766807","264766808","264766809","264766810","264766811","264766812","264766813","264766814","264766815","264766816","264766817","264766818","264766819","264766820","264766821","264766822","264766823","264766824","264766825","264766826","264766827","264766828","264766829","264766830","264766831","264766832","264766833","264766834","264766835","264766836","264766837","264766838","264766839","264766840","264766841","264766842","264766843","264766844","264766845","264766846","264766847","264766848","264766849","264766850","264766851","264766852","264766853","264766854","264766855","264766856","264766857","264766858","264766859","264766860","264766861","264766862","264766863","264766864","264766865","264766866","264766867","264766868","264766869","264766870","264766871","264766872","264766873","264766874","264766875","264766876","264766877","264766878","264766879","264766880","264766881","264766882","264766883","264766884","264766885","264766886","264766887","264766888","264766889","264766890","264766891","264766892","264766893","264766894","264766895","264766896","264766897","264766898","264766899","264766900","264766901","264766902","264766903","264766904","264766905","264766906","264766907","264766908","264766909","264766910","264766911","264766912","264766913","264766914","264766915","264766916","264766917","264766918","264766919","264766920","264766921","264766922","264766923","264766924","264766925","264766926","264766927","264766928","264766929","264766930","264766931","264766932","264766933","264766934","264766935","264766936","264766937","264766938","264766939","264766940","264766941","264766942","264766943","264766944","264766945","264766946","264766947","264766948","264766949","264766950","264766951","264766952","264766953","264766954","264766955","264766956","264766957","264766958","264766959","264766960","264766961","264766962","264766963","264766964","264766965","264766966","264766967","264766968","264766969","264766970","264766971","264766972","264766973","264766974","264766975","264766976","264766977","264766978","264766979","264766980","264766981","264766982","264766983","264766984","264766985","264766986","264766987","264766988","264766989","264766990","264766991","264766992","264766993","264766994","264766995","264766996","264766997","264766998","264766999","264767000","264767001","264767002","264767003","264767004","264767005","264767006","264767007","264767008","264767009","264767010","264767011","264767012","264767013","264767014","264767015","264767016","264767017","264767018","264767019","264767020","264767021","264767022","264767023","264767024","264767025","264767026","264767027","264767028","264767029","264767030","264767031","264767032","264767033","264767034","264767035","264767036","264767037","264767038","264767039","264767040","264767041","264767042","264767043","264767044","264767045","264767046","264767047","264767048","264767049","264767050","264767051","264767052","264767053","264767054","264767055","264767056","264767057","264767058","264767059","264767060","264767061","264767062","264767063","264767064","264767065","264767066","264767067","264767068","264767069","264767070","264767071","264767072","264767073","264767074","264767075","264767076","264767077","264767078","264767079","264767080","264767081","264767082","264767083","264767084","264767085","264767086","264767087","264767088","264767089","264767090","264767091","264767092","264767093","264767094","264767095","264767096","264767097","264767098","264767099","264767100","264767101","264767102","264767103","264767104","264767105","264767106","264767107","264767108","264767109","264767110","264767111","264767112","264767113","264767114","264767115","264767116","264767117","264767118","264767119","264767120","264767121","264767122","264767123","264767124","264767125","264767126","264767127","264767128","264767129","264767130","264767131","264767132","264767133","264767134","264767135","264767136","264767137","264767138","264767139","264767140","264767141","264767142","264767143","264767144","264767145","264767146","264767147","264767148","264767149","264767150","264767151","264767152","264767153","264767154","264767155","264767156","264767157","264767158","264767159","264767160","264767161","264767162","264767163","264767164","264767165","264767166","264767167","264767168","264767169","264767170","264767171","264767172","264767173","264767174","264767175","264767176","264767177","264767178","264767179","264767180","264767181","264767182","264767183","264767184","264767185","264767186","264767187","264767188","264767189","264767190","264767191","264767192","264767193","264767194","264767195","264767196","264767197","264767198","264767199","264767200","264767201","264767202","264767203","264767204","264767205","264767206","264767207","264767208","264767209","264767210","264767211","264767212","264767213","264767214","264767215","264767216","264767217","264767218","264767219","264767220","264767221","264767222","264767223","264767224","264767225","264767226","264767227","264767228","264767229","264767230","264767231","264767232","264767233","264767234","264767235","264767236","264767237","264767238","264767239","264767240","264767241","264767242","264767243","264767244","264767245","264767246","264767247","264767248","264767249","264767250","264767251","264767252","264767253","264767254","264767255","264767256","264767257","264767258","264767259","264767260","264767261","264767262","264767263","264767264","264767265","264767266","264767267","264767268","264767269","264767270","264767271","264767272","264767273","264767274","264767275","264767276","264767277","264767278","264767279","264767280","264767281","264767282","264767283","264767284","264767285","264767286","264767287","264767288","264767289","264767290","264767291","264767292","264767293","264767294","264767295","264767296","264767297","264767298","264767299","264767300","264767301","264767302","264767303","264767304","264767305","264767306","264767307","264767308","264767309","264767310","264767311","264767312","264767313","264767314","264767315","264767316","264767317","264767318","264767319","264767320","264767321","264767322","264767323","264767324","264767325","264767326","264767327","264767328","264767329","264767330","264767331","264767332","264767333","264767334","264767335","264767336","264767337","264767338","264767339","264767340","264767341","264767342","264767343","264767344","264767345","264767346","264767347","264767348","264767349","264767350","264767351","264767352","264767353","264767354","264767355","264767356","264767357","264767358","264767359","264767360","264767361","264767362","264767363","264767364","264767365","264767366","264767367","264767368","264767369","264767370","264767371","264767372","264767373","264767374","264767375","264767376","264767377","264767378","264767379","264767380","264767381","264767382","264767383","264767384","264767385","264767386","264767387","264767388","264767389","264767390","264767391","264767392","264767393","264767394","264767395","264767396","264767397","264767398","264767399","264767400","264767401","264767402","264767403","264767404","264767405","264767406","264767407","264767408","264767409","264767410","264767411","264767412","264767413","264767414","264767415","264767416","264767417","264767418","264767419","264767420","264767421","264767422","264767423","264767424","264767425","264767426","264767427","264767428","264767429","264767430","264767431","264767432","264767433","264767434","264767435","264767436","264767437","264767438","264767439","264767440","264767441","264767442","264767443","264767444","264767445","264767446","264767447","264767448","264767449","264767450","264767451","264767452","264767453","264767454","264767455","264767456","264767457","264767458","264767459","264767460","264767461","264767462","264767463","264767464","264767465","264767466","264767467","264767468","264767469","264767470","264767471","264767472","264767473","264767474","264767475","264767476","264767477","264767478","264767479","264767480","264767481","264767482","264767483","264767484","264767485","264767486","264767487","264767488","264767489","264767490","264767491","264767492","264767493","264767494","264767495","264767496","264767497","264767498","264767499","264767500","264767501","264767502","264767503","264767504","264767505","264767506","264767507","264767508","264767509","264767510","264767511","264767512","264767513","264767514","264767515","264767516","264767517","264767518","264767519","264767520","264767521","264767522","264767523","264767524","264767525","264767526","264767527","264767528","264767529","264767530","264767531","264767532","264767533","264767534","264767535","264767536","264767537","264767538","264767539","264767540","264767541","264767542","264767543","264767544","264767545","264767546","264767547","264767548","264767549","264767550","264767551","264767552","264767553","264767554","264767555","264767556","264767557","264767558","264767559","264767560","264767561","264767562","264767563","264767564","264767565","264767566","264767567","264767568","264767569","264767570","264767571","264767572","264767573","264767574","264767575","264767576","264767577","264767578","264767579","264767580","264767581","264767582","264767583","264767584","264767585","264767586","264767587","264767588","264767589","264767590","264767591","264767592","264767593","264767594","264767595","264767596","264767597","264767598","264767599","264767600","264767601","264767602","264767603","264767604","264767605","264767606","264767607","264767608","264767609","264767610","264767611","264767612","264767613","264767614","264767615","264767616","264767617","264767618","264767619","264767620","264767621","264767622","264767623","264767624","264767625","264767626","264767627","264767628","264767629","264767630","264767631","264767632","264767633","264767634","264767635","264767636","264767637","264767638","264767639","264767640","264767641","264767642","264767643","264767644","264767645","264767646","264767647","264767648","264767649","264767650","264767651","264767652","264767653","264767654","264767655","264767656","264767657","264767658","264767659","264767660","264767661","264767662","264767663","264767664","264767665","264767666","264767667","264767668","264767669","264767670","264767671","264767672","264767673","264767674","264767675","264767676","264767677","264767678","264767679","264767680","264767681","264767682","264767683","264767684","264767685","264767686","264767687","264767688","264767689","264767690","264767691","264767692","264767693","264767694","264767695","264767696","264767697","264767698","264767699","264767700","264767701","264767702","264767703","264767704","264767705","264767706","264767707","264767708","264767709","264767710","264767711","264767712","264767713","264767714","264767715","264767716","264767717","264767718","264767719","264767720","264767721","264767722","264767723","264767724","264767725","264767726","264767727","264767728","264767729","264767730","264767731","264767732","264767733","264767734","264767735","264767736","264767737","264767738","264767739","264767740","264767741","264767742","264767743","264767744","264767745","264767746","264767747","264767748","264767749","264767750","264767751","264767752","264767753","264767754","264767755","264767756","264767757","264767758","264767759","264767760","264767761","264767762","264767763","264767764","264767765","264767766","264767767","264767768","264767769","264767770","264767771","264767772","264767773","264767774","264767775","264767776","264767777","264767778","264767779","264767780","264767781","264767782","264767783","264767784","264767785","264767786","264767787","264767788","264767789","264767790","264767791","264767792","264767793","264767794","264767795","264767796","264767797","264767798","264767799","264767800","264767801","264767802","264767803","264767804","264767805","264767806","264767807","264767808","264767809","264767810","264767811","264767812","264767813","264767814","264767815","264767816","264767817","264767818","264767819","264767820","264767821","264767822","264767823","264767824","264767825","264767826","264767827","264767828","264767829","264767830","264767831","264767832","264767833","264767834","264767835","264767836","264767837","264767838","264767839","264767840","264767841","264767842","264767843","264767844","264767845","264767846","264767847","264767848","264767849","264767850","264767851","264767852","264767853","264767854","264767855","264767856","264767857","264767858","264767859","264767860","264767861","264767862","264767863","264767864","264767865","264767866","264767867","264767868","264767869","264767870","264767871","264767872","264767873","264767874","264767875","264767876","264767877","264767878","264767879","264767880","264767881","264767882","264767883","264767884","264767885","264767886","264767887","264767888","264767889","264767890","264767891","264767892","264767893","264767894","264767895","264767896","264767897","264767898","264767899","264767900","264767901","264767902","264767903","264767904","264767905","264767906","264767907","264767908","264767909","264767910","264767911","264767912","264767913","264767914","264767915","264767916","264767917","264767918","264767919","264767920","264767921","264767922","264767923","264767924","264767925","264767926","264767927","264767928","264767929","264767930","264767931","264767932","264767933","264767934","264767935","264767936","264767937","264767938","264767939","264767940","264767941","264767942","264767943","264767944","264767945","264767946","264767947","264767948","264767949","264767950","264767951","264767952","264767953","264767954","264767955","264767956","264767957","264767958","264767959","264767960","264767961","264767962","264767963","264767964","264767965","264767966","264767967","264767968","264767969","264767970","264767971","264767972","264767973","264767974","264767975","264767976","264767977","264767978","264767979","264767980","264767981","264767982","264767983","264767984","264767985","264767986","264767987","264767988","264767989","264767990","264767991","264767992","264767993","264767994","264767995","264767996","264767997","264767998","264767999","264768000","264768001","264768002","264768003","264768004","264768005","264768006","264768007","264768008","264768009","264768010","264768011","264768012","264768013","264768014","264768015","264768016","264768017","264768018","264768019","264768020","264768021","264768022","264768023","264768024","264768025","264768026","264768027","264768028","264768029","264768030","264768031","264768032","264768033","264768034","264768035","264768036","264768037","264768038","264768039","264768040","264768041","264768042","264768043","264768044","264768045","264768046","264768047","264768048","264768049","264768050","264768051","264768052","264768053","264768054","264768055","264768056","264768057","264768058","264768059","264768060","264768061","264768062","264768063","264768064","264768065","264768066","264768067","264768068","264768069","264768070","264768071","264768072","264768073","264768074","264768075","264768076","264768077","264768078","264768079","264768080","264768081","264768082","264768083","264768084","264768085","264768086","264768087","264768088","264768089","264768090","264768091","264768092","264768093","264768094","264768095","264768096","264768097","264768098","264768099","264768100","264768101","264768102","264768103","264768104","264768105","264768106","264768107","264768108","264768109","264768110","264768111","264768112","264768113","264768114","264768115","264768116","264768117","264768118","264768119","264768120","264768121","264768122","264768123","264768124","264768125","264768126","264768127","264768128","264768129","264768130","264768131","264768132","264768133","264768134","264768135","264768136","264768137","264768138","264768139","264768140","264768141","264768142","264768143","264768144","264768145","264768146","264768147","264768148","264768149","264768150","264768151","264768152","264768153","264768154","264768155","264768156","264768157","264768158","264768159","264768160","264768161","264768162","264768163","264768164","264768165","264768166","264768167","264768168","264768169","264768170","264768171","264768172","264768173","264768174","264768175","264768176","264768177","264768178","264768179","264768180","264768181","264768182","264768183","264768184","264768185","264768186","264768187","264768188","264768189","264768190","264768191","264768192","264768193","264768194","264768195","264768196","264768197","264768198","264768199","264768200","264768201","264768202","264768203","264768204","264768205","264768206","264768207","264768208","264768209","264768210","264768211","264768212","264768213","264768214","264768215","264768216","264768217","264768218","264768219","264768220","264768221","264768222","264768223","264768224","264768225","264768226","264768227","264768228","264768229","264768230","264768231","264768232","264768233","264768234","264768235","264768236","264768237","264768238","264768239","264768240","264768241","264768242","264768243","264768244","264768245","264768246","264768247","264768248","264768249","264768250","264768251","264768252","264768253","264768254","264768255","264768256","264768257","264768258","264768259","264768260","264768261","264768262","264768263","264768264","264768265","264768266","264768267","264768268","264768269","264768270","264768271","264768272","264768273","264768274","264768275","264768276","264768277","264768278","264768279","264768280","264768281","264768282","264768283","264768284","264768285","264768286","264768287","264768288","264768289","264768290","264768291","264768292","264768293","264768294","264768295","264768296","264768297","264768298","264768299","264768300","264768301","264768302","264768303","264768304","264768305","264768306","264768307","264768308","264768309","264768310","264768311","264768312","264768313","264768314","264768315","264768316","264768317","264768318","264768319","264768320","264768321","264768322","264768323","264768324","264768325","264768326","264768327","264768328","264768329","264768330","264768331","264768332","264768333","264768334","264768335","264768336","264768337","264768338","264768339","264768340","264768341","264768342","264768343","264768344","264768345","264768346","264768347","264768348","264768349","264768350","264768351","264768352","264768353","264768354","264768355","264768356","264768357","264768358","264768359","264768360","264768361","264768362","264768363","264768364","264768365","264768366","264768367","264768368","264768369","264768370","264768371","264768372","264768373","264768374","264768375","264768376","264768377","264768378","264768379","264768380","264768381","264768382","264768383","264768384","264768385","264768386","264768387","264768388","264768389","264768390","264768391","264768392","264768393","264768394","264768395","264768396","264768397","264768398","264768399","264768400","264768401","264768402","264768403","264768404","264768405","264768406","264768407","264768408","264768409","264768410","264768411","264768412","264768413","264768414","264768415","264768416","264768417","264768418","264768419","264768420","264768421","264768422","264768423","264768424","264768425","264768426","264768427","264768428","264768429","264768430","264768431","264768432","264768433","264768434","264768435","264768436","264768437","264768438","264768439","264768440","264768441","264768442","264768443","264768444","264768445","264768446","264768447","264768448","264768449","264768450","264768451","264768452","264768453","264768454","264768455","264768456","264768457","264768458","264768459","264768460","264768461","264768462","264768463","264768464","264768465","264768466","264768467","264768468","264768469","264768470","264768471","264768472","264768473","264768474","264768475","264768476","264768477","264768478","264768479","264768480","264768481","264768482","264768483","264768484","264768485","264768486","264768487","264768488","264768489","264768490","264768491","264768492","264768493","264768494","264768495","264768496","264768497","264768498","264768499","264768500","264768501","264768502","264768503","264768504","264768505","264768506","264768507","264768508","264768509","264768510","264768511","264768512","264768513","264768514","264768515","264768516","264768517","264768518","264768519","264768520","264768521","264768522","264768523","264768524","264768525","264768526","264768527","264768528","264768529","264768530","264768531","264768532","264768533","264768534","264768535","264768536","264768537","264768538","264768539","264768540","264768541","264768542","264768543","264768544","264768545","264768546","264768547","264768548","264768549","264768550","264768551","264768552","264768553","264768554","264768555","264768556","264768557","264768558","264768559","264768560","264768561","264768562","264768563","264768564","264768565","264768566","264768567","264768568","264768569","264768570","264768571","264768572","264768573","264768574","264768575","264768576","264768577","264768578","264768579","264768580","264768581","264768582","264768583","264768584","264768585","264768586","264768587","264768588","264768589","264768590","264768591","264768592","264768593","264768594","264768595","264768596","264768597","264768598","264768599","264768600","264768601","264768602","264768603","264768604","264768605","264768606","264768607","264768608","264768609","264768610","264768611","264768612","264768613","264768614","264768615","264768616","264768617","264768618","264768619","264768620","264768621","264768622","264768623","264768624","264768625","264768626","264768627","264768628","264768629","264768630","264768631","264768632","264768633","264768634","264768635","264768636","264768637","264768638","264768639","264768640","264768641","264768642","264768643","264768644","264768645","264768646","264768647","264768648","264768649","264768650","264768651","264768652","264768653","264768654","264768655","264768656","264768657","264768658","264768659","264768660","264768661","264768662","264768663","264768664","264768665","264768666","264768667","264768668","264768669","264768670","264768671","264768672","264768673","264768674","264768675","264768676","264768677","264768678","264768679","264768680","264768681","264768682","264768683","264768684","264768685","264768686","264768687","264768688","264768689","264768690","264768691","264768692","264768693","264768694","264768695","264768696","264768697","264768698","264768699","264768700","264768701","264768702","264768703","264768704","264768705","264768706","264768707","264768708","264768709","264768710","264768711","264768712","264768713","264768714","264768715","264768716","264768717","264768718","264768719","264768720","264768721","264768722","264768723","264768724","264768725","264768726","264768727","264768728","264768729","264768730","264768731","264768732","264768733","264768734","264768735","264768736","264768737","264768738","264768739","264768740","264768741","264768742","264768743","264768744","264768745","264768746","264768747","264768748","264768749","264768750","264768751","264768752","264768753","264768754","264768755","264768756","264768757","264768758","264768759","264768760","264768761","264768762","264768763","264768764","264768765","264768766","264768767","264768768","264768769","264768770","264768771","264768772","264768773","264768774","264768775","264768776","264768777","264768778","264768779","264768780","264768781","264768782","264768783","264768784","264768785","264768786","264768787","264768788","264768789","264768790","264768791","264768792","264768793","264768794","264768795","264768796","264768797","264768798","264768799","264768800","264768801","264768802","264768803","264768804","264768805","264768806","264768807","264768808","264768809","264768810","264768811","264768812","264768813","264768814","264768815","264768816","264768817","264768818","264768819","264768820","264768821","264768822","264768823","264768824","264768825","264768826","264768827","264768828","264768829","264768830","264768831","264768832","264768833","264768834","264768835","264768836","264768837","264768838","264768839","264768840","264768841","264768842","264768843","264768844","264768845","264768846","264768847","264768848","264768849","264768850","264768851","264768852","264768853","264768854","264768855","264768856","264768857","264768858","264768859","264768860","264768861","264768862","264768863","264768864","264768865","264768866","264768867","264768868","264768869","264768870","264768871","264768872","264768873","264768874","264768875","264768876","264768877","264768878","264768879","264768880","264768881","264768882","264768883","264768884","264768885","264768886","264768887","264768888","264768889","264768890","264768891","264768892","264768893","264768894","264768895","264768896","264768897","264768898","264768899","264768900","264768901","264768902","264768903","264768904","264768905","264768906","264768907","264768908","264768909","264768910","264768911","264768912","264768913","264768914","264768915","264768916","264768917","264768918","264768919","264768920","264768921","264768922","264768923","264768924","264768925","264768926","264768927","264768928","264768929","264768930","264768931","264768932","264768933","264768934","264768935","264768936","264768937","264768938","264768939","264768940","264768941","264768942","264768943","264768944","264768945","264768946","264768947","264768948","264768949","264768950","264768951","264768952","264768953","264768954","264768955","264768956","264768957","264768958","264768959","264768960","264768961","264768962","264768963","264768964","264768965","264768966","264768967","264768968","264768969","264768970","264768971","264768972","264768973","264768974","264768975","264768976","264768977","264768978","264768979","264768980","264768981","264768982","264768983","264768984","264768985","264768986","264768987","264768988","264768989","264768990","264768991","264768992","264768993","264768994","264768995","264768996","264768997","264768998","264768999","264769000","264769001","264769002","264769003","264769004","264769005","264769006","264769007","264769008","264769009","264769010","264769011","264769012","264769013","264769014","264769015","264769016","264769017","264769018","264769019","264769020","264769021","264769022","264769023","264769024","264769025","264769026","264769027","264769028","264769029","264769030","264769031","264769032","264769033","264769034","264769035","264769036","264769037","264769038","264769039","264769040","264769041","264769042","264769043","264769044","264769045","264769046","264769047","264769048","264769049","264769050","264769051","264769052","264769053","264769054","264769055","264769056","264769057","264769058","264769059","264769060","264769061","264769062","264769063","264769064","264769065","264769066","264769067","264769068","264769069","264769070","264769071","264769072","264769073","264769074","264769075","264769076","264769077","264769078","264769079","264769080","264769081","264769082","264769083","264769084","264769085","264769086","264769087","264769088","264769089","264769090","264769091","264769092","264769093","264769094","264769095","264769096","264769097","264769098","264769099","264769100","264769101","264769102","264769103","264769104","264769105","264769106","264769107","264769108","264769109","264769110","264769111","264769112","264769113","264769114","264769115","264769116","264769117","264769118","264769119","264769120","264769121","264769122","264769123","264769124","264769125","264769126","264769127","264769128","264769129","264769130","264769131","264769132","264769133","264769134","264769135","264769136","264769137","264769138","264769139","264769140","264769141","264769142","264769143","264769144","264769145","264769146","264769147","264769148","264769149","264769150","264769151","264769152","264769153","264769154","264769155","264769156","264769157","264769158","264769159","264769160","264769161","264769162","264769163","264769164","264769165","264769166","264769167","264769168","264769169","264769170","264769171","264769172","264769173","264769174","264769175","264769176","264769177","264769178","264769179","264769180","264769181","264769182","264769183","264769184","264769185","264769186","264769187","264769188","264769189","264769190","264769191","264769192","264769193","264769194","264769195","264769196","264769197","264769198","264769199","264769200","264769201","264769202","264769203","264769204","264769205","264769206","264769207","264769208","264769209","264769210","264769211","264769212","264769213","264769214","264769215","264769216","264769217","264769218","264769219","264769220","264769221","264769222","264769223","264769224","264769225","264769226","264769227","264769228","264769229","264769230","264769231","264769232","264769233","264769234","264769235","264769236","264769237","264769238","264769239","264769240","264769241","264769242","264769243","264769244","264769245","264769246","264769247","264769248","264769249","264769250","264769251","264769252","264769253","264769254","264769255","264769256","264769257","264769258","264769259","264769260","264769261","264769262","264769263","264769264","264769265","264769266","264769267","264769268","264769269","264769270","264769271","264769272","264769273","264769274","264769275","264769276","264769277","264769278","264769279","264769280","264769281","264769282","264769283","264769284","264769285","264769286","264769287","264769288","264769289","264769290","264769291","264769292","264769293","264769294","264769295","264769296","264769297","264769298","264769299","264769300","264769301","264769302","264769303","264769304","264769305","264769306","264769307","264769308","264769309","264769310","264769311","264769312","264769313","264769314","264769315","264769316","264769317","264769318","264769319","264769320","264769321","264769322","264769323","264769324","264769325","264769326","264769327","264769328","264769329","264769330","264769331","264769332","264769333","264769334","264769335","264769336","264769337","264769338","264769339","264769340","264769341","264769342","264769343","264769344","264769345","264769346","264769347","264769348","264769349","264769350","264769351","264769352","264769353","264769354","264769355","264769356","264769357","264769358","264769359","264769360","264769361","264769362","264769363","264769364","264769365","264769366","264769367","264769368","264769369","264769370","264769371","264769372","264769373","264769374","264769375","264769376","264769377","264769378","264769379","264769380","264769381","264769382","264769383","264769384","264769385","264769386","264769387","264769388","264769389","264769390","264769391","264769392","264769393","264769394","264769395","264769396","264769397","264769398","264769399","264769400","264769401","264769402","264769403","264769404","264769405","264769406","264769407","264769408","264769409","264769410","264769411","264769412","264769413","264769414","264769415","264769416","264769417","264769418","264769419","264769420","264769421","264769422","264769423","264769424","264769425","264769426","264769427","264769428","264769429","264769430","264769431","264769432","264769433","264769434","264769435","264769436","264769437","264769438","264769439","264769440","264769441","264769442","264769443","264769444","264769445","264769446","264769447","264769448","264769449","264769450","264769451","264769452","264769453","264769454","264769455","264769456","264769457","264769458","264769459","264769460","264769461","264769462","264769463","264769464","264769465","264769466","264769467","264769468","264769469","264769470","264769471","264769472","264769473","264769474","264769475","264769476","264769477","264769478","264769479","264769480","264769481","264769482","264769483","264769484","264769485","264769486","264769487","264769488","264769489","264769490","264769491","264769492","264769493","264769494","264769495","264769496","264769497","264769498","264769499","264769500","264769501","264769502","264769503","264769504","264769505","264769506","264769507","264769508","264769509","264769510","264769511","264769512","264769513","264769514","264769515","264769516","264769517","264769518","264769519","264769520","264769521","264769522","264769523","264769524","264769525","264769526","264769527","264769528","264769529","264769530","264769531","264769532","264769533","264769534","264769535","264769536","264769537","264769538","264769539","264769540","264769541","264769542","264769543","264769544","264769545","264769546","264769547","264769548","264769549","264769550","264769551","264769552","264769553","264769554","264769555","264769556","264769557","264769558","264769559","264769560","264769561","264769562","264769563","264769564","264769565","264769566","264769567","264769568","264769569","264769570","264769571","264769572","264769573","264769574","264769575","264769576","264769577","264769578","264769579","264769580","264769581","264769582","264769583","264769584","264769585","264769586","264769587","264769588","264769589","264769590","264769591","264769592","264769593","264769594","264769595","264769596","264769597","264769598","264769599","264769600","264769601","264769602","264769603","264769604","264769605","264769606","264769607","264769608","264769609","264769610","264769611","264769612","264769613","264769614","264769615","264769616","264769617","264769618","264769619","264769620","264769621","264769622","264769623","264769624","264769625","264769626","264769627","264769628","264769629","264769630","264769631","264769632","264769633","264769634","264769635","264769636","264769637","264769638","264769639","264769640","264769641","264769642","264769643","264769644","264769645","264769646","264769647","264769648","264769649","264769650","264769651","264769652","264769653","264769654","264769655","264769656","264769657","264769658","264769659","264769660","264769661","264769662","264769663","264769664","264769665","264769666","264769667","264769668","264769669","264769670","264769671","264769672","264769673","264769674","264769675","264769676","264769677","264769678","264769679","264769680","264769681","264769682","264769683","264769684","264769685","264769686","264769687","264769688","264769689","264769690","264769691","264769692","264769693","264769694","264769695","264769696","264769697","264769698","264769699","264769700","264769701","264769702","264769703","264769704","264769705","264769706","264769707","264769708","264769709","264769710","264769711","264769712","264769713","264769714","264769715","264769716","264769717","264769718","264769719","264769720","264769721","264769722","264769723","264769724","264769725","264769726","264769727","264769728","264769729","264769730","264769731","264769732","264769733","264769734","264769735","264769736","264769737","264769738","264769739","264769740","264769741","264769742","264769743","264769744","264769745","264769746","264769747","264769748","264769749","264769750","264769751","264769752","264769753","264769754","264769755","264769756","264769757","264769758","264769759","264769760","264769761","264769762","264769763","264769764","264769765","264769766","264769767","264769768","264769769","264769770","264769771","264769772","264769773","264769774","264769775","264769776","264769777","264769778","264769779","264769780","264769781","264769782","264769783","264769784","264769785","264769786","264769787","264769788","264769789","264769790","264769791","264769792","264769793","264769794","264769795","264769796","264769797","264769798","264769799","264769800","264769801","264769802","264769803","264769804","264769805","264769806","264769807","264769808","264769809","264769810","264769811","264769812","264769813","264769814","264769815","264769816","264769817","264769818","264769819","264769820","264769821","264769822","264769823","264769824","264769825","264769826","264769827","264769828","264769829","264769830","264769831","264769832","264769833","264769834","264769835","264769836","264769837","264769838","264769839","264769840","264769841","264769842","264769843","264769844","264769845","264769846","264769847","264769848","264769849","264769850","264769851","264769852","264769853","264769854","264769855","264769856","264769857","264769858","264769859","264769860","264769861","264769862","264769863","264769864","264769865","264769866","264769867","264769868","264769869","264769870","264769871","264769872","264769873","264769874","264769875","264769876","264769877","264769878","264769879","264769880","264769881","264769882","264769883","264769884","264769885","264769886","264769887","264769888","264769889","264769890","264769891","264769892","264769893","264769894","264769895","264769896","264769897","264769898","264769899","264769900","264769901","264769902","264769903","264769904","264769905","264769906","264769907","264769908","264769909","264769910","264769911","264769912","264769913","264769914","264769915","264769916","264769917","264769918","264769919","264769920","264769921","264769922","264769923","264769924","264769925","264769926","264769927","264769928","264769929","264769930","264769931","264769932","264769933","264769934","264769935","264769936","264769937","264769938","264769939","264769940","264769941","264769942","264769943","264769944","264769945","264769946","264769947","264769948","264769949","264769950","264769951","264769952","264769953","264769954","264769955","264769956","264769957","264769958","264769959","264769960","264769961","264769962","264769963","264769964","264769965","264769966","264769967","264769968","264769969","264769970","264769971","264769972","264769973","264769974","264769975","264769976","264769977","264769978","264769979","264769980","264769981","264769982","264769983","264769984","264769985","264769986","264769987","264769988","264769989","264769990","264769991","264769992","264769993","264769994","264769995","264769996","264769997","264769998","264769999","264770000","264770001","264770002","264770003","264770004","264770005","264770006","264770007","264770008","264770009","264770010","264770011","264770012","264770013","264770014","264770015","264770016","264770017","264770018","264770019","264770020","264770021","264770022","264770023","264770024","264770025","264770026","264770027","264770028","264770029","264770030","264770031","264770032","264770033","264770034","264770035","264770036","264770037","264770038","264770039","264770040","264770041","264770042","264770043","264770044","264770045","264770046","264770047","264770048","264770049","264770050","264770051","264770052","264770053","264770054","264770055","264770056","264770057","264770058","264770059","264770060","264770061","264770062","264770063","264770064","264770065","264770066","264770067","264770068","264770069","264770070","264770071","264770072","264770073","264770074","264770075","264770076","264770077","264770078","264770079","264770080","264770081","264770082","264770083","264770084","264770085","264770086","264770087","264770088","264770089","264770090","264770091","264770092","264770093","264770094","264770095","264770096","264770097","264770098","264770099","264770100","264770101","264770102","264770103","264770104","264770105","264770106","264770107","264770108","264770109","264770110","264770111","264770112","264770113","264770114","264770115","264770116","264770117","264770118","264770119","264770120","264770121","264770122","264770123","264770124","264770125","264770126","264770127","264770128","264770129","264770130","264770131","264770132","264770133","264770134","264770135","264770136","264770137","264770138","264770139","264770140","264770141","264770142","264770143","264770144","264770145","264770146","264770147","264770148","264770149","264770150","264770151","264770152","264770153","264770154","264770155","264770156","264770157","264770158","264770159","264770160","264770161","264770162","264770163","264770164","264770165","264770166","264770167","264770168","264770169","264770170","264770171","264770172","264770173","264770174","264770175","264770176","264770177","264770178","264770179","264770180","264770181","264770182","264770183","264770184","264770185","264770186","264770187","264770188","264770189","264770190","264770191","264770192","264770193","264770194","264770195","264770196","264770197","264770198","264770199","264770200","264770201","264770202","264770203","264770204","264770205","264770206","264770207","264770208","264770209","264770210","264770211","264770212","264770213","264770214","264770215","264770216","264770217","264770218","264770219","264770220","264770221","264770222","264770223","264770224","264770225","264770226","264770227","264770228","264770229","264770230","264770231","264770232","264770233","264770234","264770235","264770236","264770237","264770238","264770239","264770240","264770241","264770242","264770243","264770244","264770245","264770246","264770247","264770248","264770249","264770250","264770251","264770252","264770253","264770254","264770255","264770256","264770257","264770258","264770259","264770260","264770261","264770262","264770263","264770264","264770265","264770266","264770267","264770268","264770269","264770270","264770271","264770272","264770273","264770274","264770275","264770276","264770277","264770278","264770279","264770280","264770281","264770282","264770283","264770284","264770285","264770286","264770287","264770288","264770289","264770290","264770291","264770292","264770293","264770294","264770295","264770296","264770297","264770298","264770299","264770300","264770301","264770302","264770303","264770304","264770305","264770306","264770307","264770308","264770309","264770310","264770311","264770312","264770313","264770314","264770315","264770316","264770317","264770318","264770319","264770320","264770321","264770322","264770323","264770324","264770325","264770326","264770327","264770328","264770329","264770330","264770331","264770332","264770333","264770334","264770335","264770336","264770337","264770338","264770339","264770340","264770341","264770342","264770343","264770344","264770345","264770346","264770347","264770348","264770349","264770350","264770351","264770352","264770353","264770354","264770355","264770356","264770357","264770358","264770359","264770360","264770361","264770362","264770363","264770364","264770365","264770366","264770367","264770368","264770369","264770370","264770371","264770372","264770373","264770374","264770375","264770376","264770377","264770378","264770379","264770380","264770381","264770382","264770383","264770384","264770385","264770386","264770387","264770388","264770389","264770390","264770391","264770392","264770393","264770394","264770395","264770396","264770397","264770398","264770399","264770400","264770401","264770402","264770403","264770404","264770405","264770406","264770407","264770408","264770409","264770410","264770411","264770412","264770413","264770414","264770415","264770416","264770417","264770418","264770419","264770420","264770421","264770422","264770423","264770424","264770425","264770426","264770427","264770428","264770429","264770430","264770431","264770432","264770433","264770434","264770435","264770436","264770437","264770438","264770439","264770440","264770441","264770442","264770443","264770444","264770445","264770446","264770447","264770448","264770449","264770450","264770451","264770452","264770453","264770454","264770455","264770456","264770457","264770458","264770459","264770460","264770461","264770462","264770463","264770464","264770465","264770466","264770467","264770468","264770469","264770470","264770471","264770472","264770473","264770474","264770475","264770476","264770477","264770478","264770479","264770480","264770481","264770482","264770483","264770484","264770485","264770486","264770487","264770488","264770489","264770490","264770491","264770492","264770493","264770494","264770495","264770496","264770497","264770498","264770499","264770500","264770501","264770502","264770503","264770504","264770505","264770506","264770507","264770508","264770509","264770510","264770511","264770512","264770513","264770514","264770515","264770516","264770517","264770518","264770519","264770520","264770521","264770522","264770523","264770524","264770525","264770526","264770527","264770528","264770529","264770530","264770531","264770532","264770533","264770534","264770535","264770536","264770537","264770538","264770539","264770540","264770541","264770542","264770543","264770544","264770545","264770546","264770547","264770548","264770549","264770550","264770551","264770552","264770553","264770554","264770555","264770556","264770557","264770558","264770559","264770560","264770561","264770562","264770563","264770564","264770565","264770566","264770567","264770568","264770569","264770570","264770571","264770572","264770573","264770574","264770575","264770576","264770577","264770578","264770579","264770580","264770581","264770582","264770583","264770584","264770585","264770586","264770587","264770588","264770589","264770590","264770591","264770592","264770593","264770594","264770595","264770596","264770597","264770598","264770599","264770600","264770601","264770602","264770603","264770604","264770605","264770606","264770607","264770608","264770609","264770610","264770611","264770612","264770613","264770614","264770615","264770616","264770617","264770618","264770619","264770620","264770621","264770622","264770623","264770624","264770625","264770626","264770627","264770628","264770629","264770630","264770631","264770632","264770633","264770634","264770635","264770636","264770637","264770638","264770639","264770640","264770641","264770642","264770643","264770644","264770645","264770646","264770647","264770648","264770649","264770650","264770651","264770652","264770653","264770654","264770655","264770656","264770657","264770658","264770659","264770660","264770661","264770662","264770663","264770664","264770665","264770666","264770667","264770668","264770669","264770670","264770671","264770672","264770673","264770674","264770675","264770676","264770677","264770678","264770679","264770680","264770681","264770682","264770683","264770684","264770685","264770686","264770687","264770688","264770689","264770690","264770691","264770692","264770693","264770694","264770695","264770696","264770697","264770698","264770699","264770700","264770701","264770702","264770703","264770704","264770705","264770706","264770707","264770708","264770709","264770710","264770711","264770712","264770713","264770714","264770715","264770716","264770717","264770718","264770719","264770720","264770721","264770722","264770723","264770724","264770725","264770726","264770727","264770728","264770729","264770730","264770731","264770732","264770733","264770734","264770735","264770736","264770737","264770738","264770739","264770740","264770741","264770742","264770743","264770744","264770745","264770746","264770747","264770748","264770749","264770750","264770751","264770752","264770753","264770754","264770755","264770756","264770757","264770758","264770759","264770760","264770761","264770762","264770763","264770764","264770765","264770766","264770767","264770768","264770769","264770770","264770771","264770772","264770773","264770774","264770775","264770776","264770777","264770778","264770779","264770780","264770781","264770782","264770783","264770784","264770785","264770786","264770787","264770788","264770789","264770790","264770791","264770792","264770793","264770794","264770795","264770796","264770797","264770798","264770799","264770800","264770801","264770802","264770803","264770804","264770805","264770806","264770807","264770808","264770809","264770810","264770811","264770812","264770813","264770814","264770815","264770816","264770817","264770818","264770819","264770820","264770821","264770822","264770823","264770824","264770825","264770826","264770827","264770828","264770829","264770830","264770831","264770832","264770833","264770834","264770835","264770836","264770837","264770838","264770839","264770840","264770841","264770842","264770843","264770844","264770845","264770846","264770847","264770848","264770849","264770850","264770851","264770852","264770853","264770854","264770855","264770856","264770857","264770858","264770859","264770860","264770861","264770862","264770863","264770864","264770865","264770866","264770867","264770868","264770869","264770870","264770871","264770872","264770873","264770874","264770875","264770876","264770877","264770878","264770879","264770880","264770881","264770882","264770883","264770884","264770885","264770886","264770887","264770888","264770889","264770890","264770891","264770892","264770893","264770894","264770895","264770896","264770897","264770898","264770899","264770900","264770901","264770902","264770903","264770904","264770905","264770906","264770907","264770908","264770909","264770910","264770911","264770912","264770913","264770914","264770915","264770916","264770917","264770918","264770919","264770920","264770921","264770922","264770923","264770924","264770925","264770926","264770927","264770928","264770929","264770930","264770931","264770932","264770933","264770934","264770935","264770936","264770937","264770938","264770939","264770940","264770941","264770942","264770943","264770944","264770945","264770946","264770947","264770948","264770949","264770950","264770951","264770952","264770953","264770954","264770955","264770956","264770957","264770958","264770959","264770960","264770961","264770962","264770963","264770964","264770965","264770966","264770967","264770968","264770969","264770970","264770971","264770972","264770973","264770974","264770975","264770976","264770977","264770978","264770979","264770980","264770981","264770982","264770983","264770984","264770985","264770986","264770987","264770988","264770989","264770990","264770991","264770992","264770993","264770994","264770995","264770996","264770997","264770998","264770999","264771000","264771001","264771002","264771003","264771004","264771005","264771006","264771007","264771008","264771009","264771010","264771011","264771012","264771013","264771014","264771015","264771016","264771017","264771018","264771019","264771020","264771021","264771022","264771023","264771024","264771025","264771026","264771027","264771028","264771029","264771030","264771031","264771032","264771033","264771034","264771035","264771036","264771037","264771038","264771039","264771040","264771041","264771042","264771043","264771044","264771045","264771046","264771047","264771048","264771049","264771050","264771051","264771052","264771053","264771054","264771055","264771056","264771057","264771058","264771059","264771060","264771061","264771062","264771063","264771064","264771065","264771066","264771067","264771068","264771069","264771070","264771071","264771072","264771073","264771074","264771075","264771076","264771077","264771078","264771079","264771080","264771081","264771082","264771083","264771084","264771085","264771086","264771087","264771088","264771089","264771090","264771091","264771092","264771093","264771094","264771095","264771096","264771097","264771098","264771099","264771100","264771101","264771102","264771103","264771104","264771105","264771106","264771107","264771108","264771109","264771110","264771111","264771112","264771113","264771114","264771115","264771116","264771117","264771118","264771119","264771120","264771121","264771122","264771123","264771124","264771125","264771126","264771127","264771128","264771129","264771130","264771131","264771132","264771133","264771134","264771135","264771136","264771137","264771138","264771139","264771140","264771141","264771142","264771143","264771144","264771145","264771146","264771147","264771148","264771149","264771150","264771151","264771152","264771153","264771154","264771155","264771156","264771157","264771158","264771159","264771160","264771161","264771162","264771163","264771164","264771165","264771166","264771167","264771168","264771169","264771170","264771171","264771172","264771173","264771174","264771175","264771176","264771177","264771178","264771179","264771180","264771181","264771182","264771183","264771184","264771185","264771186","264771187","264771188","264771189","264771190","264771191","264771192","264771193","264771194","264771195","264771196","264771197","264771198","264771199","264771200","264771201","264771202","264771203","264771204","264771205","264771206","264771207","264771208","264771209","264771210","264771211","264771212","264771213","264771214","264771215","264771216","264771217","264771218","264771219","264771220","264771221","264771222","264771223","264771224","264771225","264771226","264771227","264771228","264771229","264771230","264771231","264771232","264771233","264771234","264771235","264771236","264771237","264771238","264771239","264771240","264771241","264771242","264771243","264771244","264771245","264771246","264771247","264771248","264771249","264771250","264771251","264771252","264771253","264771254","264771255","264771256","264771257","264771258","264771259","264771260","264771261","264771262","264771263","264771264","264771265","264771266","264771267","264771268","264771269","264771270","264771271","264771272","264771273","264771274","264771275","264771276","264771277","264771278","264771279","264771280","264771281","264771282","264771283","264771284","264771285","264771286","264771287","264771288","264771289","264771290","264771291","264771292","264771293","264771294","264771295","264771296","264771297","264771298","264771299","264771300","264771301","264771302","264771303","264771304","264771305","264771306","264771307","264771308","264771309","264771310","264771311","264771312","264771313","264771314","264771315","264771316","264771317","264771318","264771319","264771320","264771321","264771322","264771323","264771324","264771325","264771326","264771327","264771328","264771329","264771330","264771331","264771332","264771333","264771334","264771335","264771336","264771337","264771338","264771339","264771340","264771341","264771342","264771343","264771344","264771345","264771346","264771347","264771348","264771349","264771350","264771351","264771352","264771353","264771354","264771355","264771356","264771357","264771358","264771359","264771360","264771361","264771362","264771363","264771364","264771365","264771366","264771367","264771368","264771369","264771370","264771371","264771372","264771373","264771374","264771375","264771376","264771377","264771378","264771379","264771380","264771381","264771382","264771383","264771384","264771385","264771386","264771387","264771388","264771389","264771390","264771391","264771392","264771393","264771394","264771395","264771396","264771397","264771398","264771399","264771400","264771401","264771402","264771403","264771404","264771405","264771406","264771407","264771408","264771409","264771410","264771411","264771412","264771413","264771414","264771415","264771416","264771417","264771418","264771419","264771420","264771421","264771422","264771423","264771424","264771425","264771426","264771427","264771428","264771429","264771430","264771431","264771432","264771433","264771434","264771435","264771436","264771437","264771438","264771439","264771440","264771441","264771442","264771443","264771444","264771445","264771446","264771447","264771448","264771449","264771450","264771451","264771452","264771453","264771454","264771455","264771456","264771457","264771458","264771459","264771460","264771461","264771462","264771463","264771464","264771465","264771466","264771467","264771468","264771469","264771470","264771471","264771472","264771473","264771474","264771475","264771476","264771477","264771478","264771479","264771480","264771481","264771482","264771483","264771484","264771485","264771486","264771487","264771488","264771489","264771490","264771491","264771492","264771493","264771494","264771495","264771496","264771497","264771498","264771499","264771500","264771501","264771502","264771503","264771504","264771505","264771506","264771507","264771508","264771509","264771510","264771511","264771512","264771513","264771514","264771515","264771516","264771517","264771518","264771519","264771520","264771521","264771522","264771523","264771524","264771525","264771526","264771527","264771528","264771529","264771530","264771531","264771532","264771533","264771534","264771535","264771536","264771537","264771538","264771539","264771540","264771541","264771542","264771543","264771544","264771545","264771546","264771547","264771548","264771549","264771550","264771551","264771552","264771553","264771554","264771555","264771556","264771557","264771558","264771559","264771560","264771561","264771562","264771563","264771564","264771565","264771566","264771567","264771568","264771569","264771570","264771571","264771572","264771573","264771574","264771575","264771576","264771577","264771578","264771579","264771580","264771581","264771582","264771583","264771584","264771585","264771586","264771587","264771588","264771589","264771590","264771591","264771592","264771593","264771594","264771595","264771596","264771597","264771598","264771599","264771600","264771601","264771602","264771603","264771604","264771605","264771606","264771607","264771608","264771609","264771610","264771611","264771612","264771613","264771614","264771615","264771616","264771617","264771618","264771619","264771620","264771621","264771622","264771623","264771624","264771625","264771626","264771627","264771628","264771629","264771630","264771631","264771632","264771633","264771634","264771635","264771636","264771637","264771638","264771639","264771640","264771641","264771642","264771643","264771644","264771645","264771646","264771647","264771648","264771649","264771650","264771651","264771652","264771653","264771654","264771655","264771656","264771657","264771658","264771659","264771660","264771661","264771662","264771663","264771664","264771665","264771666","264771667","264771668","264771669","264771670","264771671","264771672","264771673","264771674","264771675","264771676","264771677","264771678","264771679","264771680","264771681","264771682","264771683","264771684","264771685","264771686","264771687","264771688","264771689","264771690","264771691","264771692","264771693","264771694","264771695","264771696","264771697","264771698","264771699","264771700","264771701","264771702","264771703","264771704","264771705","264771706","264771707","264771708","264771709","264771710","264771711","264771712","264771713","264771714","264771715","264771716","264771717","264771718","264771719","264771720","264771721","264771722","264771723","264771724","264771725","264771726","264771727","264771728","264771729","264771730","264771731","264771732","264771733","264771734","264771735","264771736","264771737","264771738","264771739","264771740","264771741","264771742","264771743","264771744","264771745","264771746","264771747","264771748","264771749","264771750","264771751","264771752","264771753","264771754","264771755","264771756","264771757","264771758","264771759","264771760","264771761","264771762","264771763","264771764","264771765","264771766","264771767","264771768","264771769","264771770","264771771","264771772","264771773","264771774","264771775","264771776","264771777","264771778","264771779","264771780","264771781","264771782","264771783","264771784","264771785","264771786","264771787","264771788","264771789","264771790","264771791","264771792","264771793","264771794","264771795","264771796","264771797","264771798","264771799","264771800","264771801","264771802","264771803","264771804","264771805","264771806","264771807","264771808","264771809","264771810","264771811","264771812","264771813","264771814","264771815","264771816","264771817","264771818","264771819","264771820","264771821","264771822","264771823","264771824","264771825","264771826","264771827","264771828","264771829","264771830","264771831","264771832","264771833","264771834","264771835","264771836","264771837","264771838","264771839","264771840","264771841","264771842","264771843","264771844","264771845","264771846","264771847","264771848","264771849","264771850","264771851","264771852","264771853","264771854","264771855","264771856","264771857","264771858","264771859","264771860","264771861","264771862","264771863","264771864","264771865","264771866","264771867","264771868","264771869","264771870","264771871","264771872","264771873","264771874","264771875","264771876","264771877","264771878","264771879","264771880","264771881","264771882","264771883","264771884","264771885","264771886","264771887","264771888","264771889","264771890","264771891","264771892","264771893","264771894","264771895","264771896","264771897","264771898","264771899","264771900","264771901","264771902","264771903","264771904","264771905","264771906","264771907","264771908","264771909","264771910","264771911","264771912","264771913","264771914","264771915","264771916","264771917","264771918","264771919","264771920","264771921","264771922","264771923","264771924","264771925","264771926","264771927","264771928","264771929","264771930","264771931","264771932","264771933","264771934","264771935","264771936","264771937","264771938","264771939","264771940","264771941","264771942","264771943","264771944","264771945","264771946","264771947","264771948","264771949","264771950","264771951","264771952","264771953","264771954","264771955","264771956","264771957","264771958","264771959","264771960","264771961","264771962","264771963","264771964","264771965","264771966","264771967","264771968","264771969","264771970","264771971","264771972","264771973","264771974","264771975","264771976","264771977","264771978","264771979","264771980","264771981","264771982","264771983","264771984","264771985","264771986","264771987","264771988","264771989","264771990","264771991","264771992","264771993","264771994","264771995","264771996","264771997","264771998","264771999","264772000","264772001","264772002","264772003","264772004","264772005","264772006","264772007","264772008","264772009","264772010","264772011","264772012","264772013","264772014","264772015","264772016","264772017","264772018","264772019","264772020","264772021","264772022","264772023","264772024","264772025","264772026","264772027","264772028","264772029","264772030","264772031","264772032","264772033","264772034","264772035","264772036","264772037","264772038","264772039","264772040","264772041","264772042","264772043","264772044","264772045","264772046","264772047","264772048","264772049","264772050","264772051","264772052","264772053","264772054","264772055","264772056","264772057","264772058","264772059","264772060","264772061","264772062","264772063","264772064","264772065","264772066","264772067","264772068","264772069","264772070","264772071","264772072","264772073","264772074","264772075","264772076","264772077","264772078","264772079","264772080","264772081","264772082","264772083","264772084","264772085","264772086","264772087","264772088","264772089","264772090","264772091","264772092","264772093","264772094","264772095","264772096","264772097","264772098","264772099","264772100","264772101","264772102","264772103","264772104","264772105","264772106","264772107","264772108","264772109","264772110","264772111","264772112","264772113","264772114","264772115","264772116","264772117","264772118","264772119","264772120","264772121","264772122","264772123","264772124","264772125","264772126","264772127","264772128","264772129","264772130","264772131","264772132","264772133","264772134","264772135","264772136","264772137","264772138","264772139","264772140","264772141","264772142","264772143","264772144","264772145","264772146","264772147","264772148","264772149","264772150","264772151","264772152","264772153","264772154","264772155","264772156","264772157","264772158","264772159","264772160","264772161","264772162","264772163","264772164","264772165","264772166","264772167","264772168","264772169","264772170","264772171","264772172","264772173","264772174","264772175","264772176","264772177","264772178","264772179","264772180","264772181","264772182","264772183","264772184","264772185","264772186","264772187","264772188","264772189","264772190","264772191","264772192","264772193","264772194","264772195","264772196","264772197","264772198","264772199","264772200","264772201","264772202","264772203","264772204","264772205","264772206","264772207","264772208","264772209","264772210","264772211","264772212","264772213","264772214","264772215","264772216","264772217","264772218","264772219","264772220","264772221","264772222","264772223","264772224","264772225","264772226","264772227","264772228","264772229","264772230","264772231","264772232","264772233","264772234","264772235","264772236","264772237","264772238","264772239","264772240","264772241","264772242","264772243","264772244","264772245","264772246","264772247","264772248","264772249","264772250","264772251","264772252","264772253","264772254","264772255","264772256","264772257","264772258","264772259","264772260","264772261","264772262","264772263","264772264","264772265","264772266","264772267","264772268","264772269","264772270","264772271","264772272","264772273","264772274","264772275","264772276","264772277","264772278","264772279","264772280","264772281","264772282","264772283","264772284","264772285","264772286","264772287","264772288","264772289","264772290","264772291","264772292","264772293","264772294","264772295","264772296","264772297","264772298","264772299","264772300","264772301","264772302","264772303","264772304","264772305","264772306","264772307","264772308","264772309","264772310","264772311","264772312","264772313","264772314","264772315","264772316","264772317","264772318","264772319","264772320","264772321","264772322","264772323","264772324","264772325","264772326","264772327","264772328","264772329","264772330","264772331","264772332","264772333","264772334","264772335","264772336","264772337","264772338","264772339","264772340","264772341","264772342","264772343","264772344","264772345","264772346","264772347","264772348","264772349","264772350","264772351","264772352","264772353","264772354","264772355","264772356","264772357","264772358","264772359","264772360","264772361","264772362","264772363","264772364","264772365","264772366","264772367","264772368","264772369","264772370","264772371","264772372","264772373","264772374","264772375","264772376","264772377","264772378","264772379","264772380","264772381","264772382","264772383","264772384","264772385","264772386","264772387","264772388","264772389","264772390","264772391","264772392","264772393","264772394","264772395","264772396","264772397","264772398","264772399","264772400","264772401","264772402","264772403","264772404","264772405","264772406","264772407","264772408","264772409","264772410","264772411","264772412","264772413","264772414","264772415","264772416","264772417","264772418","264772419","264772420","264772421","264772422","264772423","264772424","264772425","264772426","264772427","264772428","264772429","264772430","264772431","264772432","264772433","264772434","264772435","264772436","264772437","264772438","264772439","264772440","264772441","264772442","264772443","264772444","264772445","264772446","264772447","264772448","264772449","264772450","264772451","264772452","264772453","264772454","264772455","264772456","264772457","264772458","264772459","264772460","264772461","264772462","264772463","264772464","264772465","264772466","264772467","264772468","264772469","264772470","264772471","264772472","264772473","264772474","264772475","264772476","264772477","264772478","264772479","264772480","264772481","264772482","264772483","264772484","264772485","264772486","264772487","264772488","264772489","264772490","264772491","264772492","264772493","264772494","264772495","264772496","264772497","264772498","264772499","264772500","264772501","264772502","264772503","264772504","264772505","264772506","264772507","264772508","264772509","264772510","264772511","264772512","264772513","264772514","264772515","264772516","264772517","264772518","264772519","264772520","264772521","264772522","264772523","264772524","264772525","264772526","264772527","264772528","264772529","264772530","264772531","264772532","264772533","264772534","264772535","264772536","264772537","264772538","264772539","264772540","264772541","264772542","264772543","264772544","264772545","264772546","264772547","264772548","264772549","264772550","264772551","264772552","264772553","264772554","264772555","264772556","264772557","264772558","264772559","264772560","264772561","264772562","264772563","264772564","264772565","264772566","264772567","264772568","264772569","264772570","264772571","264772572","264772573","264772574","264772575","264772576","264772577","264772578","264772579","264772580","264772581","264772582","264772583","264772584","264772585","264772586","264772587","264772588","264772589","264772590","264772591","264772592","264772593","264772594","264772595","264772596","264772597","264772598","264772599","264772600","264772601","264772602","264772603","264772604","264772605","264772606","264772607","264772608","264772609","264772610","264772611","264772612","264772613","264772614","264772615","264772616","264772617","264772618","264772619","264772620","264772621","264772622","264772623","264772624","264772625","264772626","264772627","264772628","264772629","264772630","264772631","264772632","264772633","264772634","264772635","264772636","264772637","264772638","264772639","264772640","264772641","264772642","264772643","264772644","264772645","264772646","264772647","264772648","264772649","264772650","264772651","264772652","264772653","264772654","264772655","264772656","264772657","264772658","264772659","264772660","264772661","264772662","264772663","264772664","264772665","264772666","264772667","264772668","264772669","264772670","264772671","264772672","264772673","264772674","264772675","264772676","264772677","264772678","264772679","264772680","264772681","264772682","264772683","264772684","264772685","264772686","264772687","264772688","264772689","264772690","264772691","264772692","264772693","264772694","264772695","264772696","264772697","264772698","264772699","264772700","264772701","264772702","264772703","264772704","264772705","264772706","264772707","264772708","264772709","264772710","264772711","264772712","264772713","264772714","264772715","264772716","264772717","264772718","264772719","264772720","264772721","264772722","264772723","264772724","264772725","264772726","264772727","264772728","264772729","264772730","264772731","264772732","264772733","264772734","264772735","264772736","264772737","264772738","264772739","264772740","264772741","264772742","264772743","264772744","264772745","264772746","264772747","264772748","264772749","264772750","264772751","264772752","264772753","264772754","264772755","264772756","264772757","264772758","264772759","264772760","264772761","264772762","264772763","264772764","264772765","264772766","264772767","264772768","264772769","264772770","264772771","264772772","264772773","264772774","264772775","264772776","264772777","264772778","264772779","264772780","264772781","264772782","264772783","264772784","264772785","264772786","264772787","264772788","264772789","264772790","264772791","264772792","264772793","264772794","264772795","264772796","264772797","264772798","264772799","264772800","264772801","264772802","264772803","264772804","264772805","264772806","264772807","264772808","264772809","264772810","264772811","264772812","264772813","264772814","264772815","264772816","264772817","264772818","264772819","264772820","264772821","264772822","264772823","264772824","264772825","264772826","264772827","264772828","264772829","264772830","264772831","264772832","264772833","264772834","264772835","264772836","264772837","264772838","264772839","264772840","264772841","264772842","264772843","264772844","264772845","264772846","264772847","264772848","264772849","264772850","264772851","264772852","264772853","264772854","264772855","264772856","264772857","264772858","264772859","264772860","264772861","264772862","264772863","264772864","264772865","264772866","264772867","264772868","264772869","264772870","264772871","264772872","264772873","264772874","264772875","264772876","264772877","264772878","264772879","264772880","264772881","264772882","264772883","264772884","264772885","264772886","264772887","264772888","264772889","264772890","264772891","264772892","264772893","264772894","264772895","264772896","264772897","264772898","264772899","264772900","264772901","264772902","264772903","264772904","264772905","264772906","264772907","264772908","264772909","264772910","264772911","264772912","264772913","264772914","264772915","264772916","264772917","264772918","264772919","264772920","264772921","264772922","264772923","264772924","264772925","264772926","264772927","264772928","264772929","264772930","264772931","264772932","264772933","264772934","264772935","264772936","264772937","264772938","264772939","264772940","264772941","264772942","264772943","264772944","264772945","264772946","264772947","264772948","264772949","264772950","264772951","264772952","264772953","264772954","264772955","264772956","264772957","264772958","264772959","264772960","264772961","264772962","264772963","264772964","264772965","264772966","264772967","264772968","264772969","264772970","264772971","264772972","264772973","264772974","264772975","264772976","264772977","264772978","264772979","264772980","264772981","264772982","264772983","264772984","264772985","264772986","264772987","264772988","264772989","264772990","264772991","264772992","264772993","264772994","264772995","264772996","264772997","264772998","264772999","264773000","264773001","264773002","264773003","264773004","264773005","264773006","264773007","264773008","264773009","264773010","264773011","264773012","264773013","264773014","264773015","264773016","264773017","264773018","264773019","264773020","264773021","264773022","264773023","264773024","264773025","264773026","264773027","264773028","264773029","264773030","264773031","264773032","264773033","264773034","264773035","264773036","264773037","264773038","264773039","264773040","264773041","264773042","264773043","264773044","264773045","264773046","264773047","264773048","264773049","264773050","264773051","264773052","264773053","264773054","264773055","264773056","264773057","264773058","264773059","264773060","264773061","264773062","264773063","264773064","264773065","264773066","264773067","264773068","264773069","264773070","264773071","264773072","264773073","264773074","264773075","264773076","264773077","264773078","264773079","264773080","264773081","264773082","264773083","264773084","264773085","264773086","264773087","264773088","264773089","264773090","264773091","264773092","264773093","264773094","264773095","264773096","264773097","264773098","264773099","264773100","264773101","264773102","264773103","264773104","264773105","264773106","264773107","264773108","264773109","264773110","264773111","264773112","264773113","264773114","264773115","264773116","264773117","264773118","264773119","264773120","264773121","264773122","264773123","264773124","264773125","264773126","264773127","264773128","264773129","264773130","264773131","264773132","264773133","264773134","264773135","264773136","264773137","264773138","264773139","264773140","264773141","264773142","264773143","264773144","264773145","264773146","264773147","264773148","264773149","264773150","264773151","264773152","264773153","264773154","264773155","264773156","264773157","264773158","264773159","264773160","264773161","264773162","264773163","264773164","264773165","264773166","264773167","264773168","264773169","264773170","264773171","264773172","264773173","264773174","264773175","264773176","264773177","264773178","264773179","264773180","264773181","264773182","264773183","264773184","264773185","264773186","264773187","264773188","264773189","264773190","264773191","264773192","264773193","264773194","264773195","264773196","264773197","264773198","264773199","264773200","264773201","264773202","264773203","264773204","264773205","264773206","264773207","264773208","264773209","264773210","264773211","264773212","264773213","264773214","264773215","264773216","264773217","264773218","264773219","264773220","264773221","264773222","264773223","264773224","264773225","264773226","264773227","264773228","264773229","264773230","264773231","264773232","264773233","264773234","264773235","264773236","264773237","264773238","264773239","264773240","264773241","264773242","264773243","264773244","264773245","264773246","264773247","264773248","264773249","264773250","264773251","264773252","264773253","264773254","264773255","264773256","264773257","264773258","264773259","264773260","264773261","264773262","264773263","264773264","264773265","264773266","264773267","264773268","264773269","264773270","264773271","264773272","264773273","264773274","264773275","264773276","264773277","264773278","264773279","264773280","264773281","264773282","264773283","264773284","264773285","264773286","264773287","264773288","264773289","264773290","264773291","264773292","264773293","264773294","264773295","264773296","264773297","264773298","264773299","264773300","264773301","264773302","264773303","264773304","264773305","264773306","264773307","264773308","264773309","264773310","264773311","264773312","264773313","264773314","264773315","264773316","264773317","264773318","264773319","264773320","264773321","264773322","264773323","264773324","264773325","264773326","264773327","264773328","264773329","264773330","264773331","264773332","264773333","264773334","264773335","264773336","264773337","264773338","264773339","264773340","264773341","264773342","264773343","264773344","264773345","264773346","264773347","264773348","264773349","264773350","264773351","264773352","264773353","264773354","264773355","264773356","264773357","264773358","264773359","264773360","264773361","264773362","264773363","264773364","264773365","264773366","264773367","264773368","264773369","264773370","264773371","264773372","264773373","264773374","264773375","264773376","264773377","264773378","264773379","264773380","264773381","264773382","264773383","264773384","264773385","264773386","264773387","264773388","264773389","264773390","264773391","264773392","264773393","264773394","264773395","264773396","264773397","264773398","264773399","264773400","264773401","264773402","264773403","264773404","264773405","264773406","264773407","264773408","264773409","264773410","264773411","264773412","264773413","264773414","264773415","264773416","264773417","264773418","264773419","264773420","264773421","264773422","264773423","264773424","264773425","264773426","264773427","264773428","264773429","264773430","264773431","264773432","264773433","264773434","264773435","264773436","264773437","264773438","264773439","264773440","264773441","264773442","264773443","264773444","264773445","264773446","264773447","264773448","264773449","264773450","264773451","264773452","264773453","264773454","264773455","264773456","264773457","264773458","264773459","264773460","264773461","264773462","264773463","264773464","264773465","264773466","264773467","264773468","264773469","264773470","264773471","264773472","264773473","264773474","264773475","264773476","264773477","264773478","264773479","264773480","264773481","264773482","264773483","264773484","264773485","264773486","264773487","264773488","264773489","264773490","264773491","264773492","264773493","264773494","264773495","264773496","264773497","264773498","264773499","264773500","264773501","264773502","264773503","264773504","264773505","264773506","264773507","264773508","264773509","264773510","264773511","264773512","264773513","264773514","264773515","264773516","264773517","264773518","264773519","264773520","264773521","264773522","264773523","264773524","264773525","264773526","264773527","264773528","264773529","264773530","264773531","264773532","264773533","264773534","264773535","264773536","264773537","264773538","264773539","264773540","264773541","264773542","264773543","264773544","264773545","264773546","264773547","264773548","264773549","264773550","264773551","264773552","264773553","264773554","264773555","264773556","264773557","264773558","264773559","264773560","264773561","264773562","264773563","264773564","264773565","264773566","264773567","264773568","264773569","264773570","264773571","264773572","264773573","264773574","264773575","264773576","264773577","264773578","264773579","264773580","264773581","264773582","264773583","264773584","264773585","264773586","264773587","264773588","264773589","264773590","264773591","264773592","264773593","264773594","264773595","264773596","264773597","264773598","264773599","264773600","264773601","264773602","264773603","264773604","264773605","264773606","264773607","264773608","264773609","264773610","264773611","264773612","264773613","264773614","264773615","264773616","264773617","264773618","264773619","264773620","264773621","264773622","264773623","264773624","264773625","264773626","264773627","264773628","264773629","264773630","264773631","264773632","264773633","264773634","264773635","264773636","264773637","264773638","264773639","264773640","264773641","264773642","264773643","264773644","264773645","264773646","264773647","264773648","264773649","264773650","264773651","264773652","264773653","264773654","264773655","264773656","264773657","264773658","264773659","264773660","264773661","264773662","264773663","264773664","264773665","264773666","264773667","264773668","264773669","264773670","264773671","264773672","264773673","264773674","264773675","264773676","264773677","264773678","264773679","264773680","264773681","264773682","264773683","264773684","264773685","264773686","264773687","264773688","264773689","264773690","264773691","264773692","264773693","264773694","264773695","264773696","264773697","264773698","264773699","264773700","264773701","264773702","264773703","264773704","264773705","264773706","264773707","264773708","264773709","264773710","264773711","264773712","264773713","264773714","264773715","264773716","264773717","264773718","264773719","264773720","264773721","264773722","264773723","264773724","264773725","264773726","264773727","264773728","264773729","264773730","264773731","264773732","264773733","264773734","264773735","264773736","264773737","264773738","264773739","264773740","264773741","264773742","264773743","264773744","264773745","264773746","264773747","264773748","264773749","264773750","264773751","264773752","264773753","264773754","264773755","264773756","264773757","264773758","264773759","264773760","264773761","264773762","264773763","264773764","264773765","264773766","264773767","264773768","264773769","264773770","264773771","264773772","264773773","264773774","264773775","264773776","264773777","264773778","264773779","264773780","264773781","264773782","264773783","264773784","264773785","264773786","264773787","264773788","264773789","264773790","264773791","264773792","264773793","264773794","264773795","264773796","264773797","264773798","264773799","264773800","264773801","264773802","264773803","264773804","264773805","264773806","264773807","264773808","264773809","264773810","264773811","264773812","264773813","264773814","264773815","264773816","264773817","264773818","264773819","264773820","264773821","264773822","264773823","264773824","264773825","264773826","264773827","264773828","264773829","264773830","264773831","264773832","264773833","264773834","264773835","264773836","264773837","264773838","264773839","264773840","264773841","264773842","264773843","264773844","264773845","264773846","264773847","264773848","264773849","264773850","264773851","264773852","264773853","264773854","264773855","264773856","264773857","264773858","264773859","264773860","264773861","264773862","264773863","264773864","264773865","264773866","264773867","264773868","264773869","264773870","264773871","264773872","264773873","264773874","264773875","264773876","264773877","264773878","264773879","264773880","264773881","264773882","264773883","264773884","264773885","264773886","264773887","264773888","264773889","264773890","264773891","264773892","264773893","264773894","264773895","264773896","264773897","264773898","264773899","264773900","264773901","264773902","264773903","264773904","264773905","264773906","264773907","264773908","264773909","264773910","264773911","264773912","264773913","264773914","264773915","264773916","264773917","264773918","264773919","264773920","264773921","264773922","264773923","264773924","264773925","264773926","264773927","264773928","264773929","264773930","264773931","264773932","264773933","264773934","264773935","264773936","264773937","264773938","264773939","264773940","264773941","264773942","264773943","264773944","264773945","264773946","264773947","264773948","264773949","264773950","264773951","264773952","264773953","264773954","264773955","264773956","264773957","264773958","264773959","264773960","264773961","264773962","264773963","264773964","264773965","264773966","264773967","264773968","264773969","264773970","264773971","264773972","264773973","264773974","264773975","264773976","264773977","264773978","264773979","264773980","264773981","264773982","264773983","264773984","264773985","264773986","264773987","264773988","264773989","264773990","264773991","264773992","264773993","264773994","264773995","264773996","264773997","264773998","264773999","264774000","264774001","264774002","264774003","264774004","264774005","264774006","264774007","264774008","264774009","264774010","264774011","264774012","264774013","264774014","264774015","264774016","264774017","264774018","264774019","264774020","264774021","264774022","264774023","264774024","264774025","264774026","264774027","264774028","264774029","264774030","264774031","264774032","264774033","264774034","264774035","264774036","264774037","264774038","264774039","264774040","264774041","264774042","264774043","264774044","264774045","264774046","264774047","264774048","264774049","264774050","264774051","264774052","264774053","264774054","264774055","264774056","264774057","264774058","264774059","264774060","264774061","264774062","264774063","264774064","264774065","264774066","264774067","264774068","264774069","264774070","264774071","264774072","264774073","264774074","264774075","264774076","264774077","264774078","264774079","264774080","264774081","264774082","264774083","264774084","264774085","264774086","264774087","264774088","264774089","264774090","264774091","264774092","264774093","264774094","264774095","264774096","264774097","264774098","264774099","264774100","264774101","264774102","264774103","264774104","264774105","264774106","264774107","264774108","264774109","264774110","264774111","264774112","264774113","264774114","264774115","264774116","264774117","264774118","264774119","264774120","264774121","264774122","264774123","264774124","264774125","264774126","264774127","264774128","264774129","264774130","264774131","264774132","264774133","264774134","264774135","264774136","264774137","264774138","264774139","264774140","264774141","264774142","264774143","264774144","264774145","264774146","264774147","264774148","264774149","264774150","264774151","264774152","264774153","264774154","264774155","264774156","264774157","264774158","264774159","264774160","264774161","264774162","264774163","264774164","264774165","264774166","264774167","264774168","264774169","264774170","264774171","264774172","264774173","264774174","264774175","264774176","264774177","264774178","264774179","264774180","264774181","264774182","264774183","264774184","264774185","264774186","264774187","264774188","264774189","264774190","264774191","264774192","264774193","264774194","264774195","264774196","264774197","264774198","264774199","264774200","264774201","264774202","264774203","264774204","264774205","264774206","264774207","264774208","264774209","264774210","264774211","264774212","264774213","264774214","264774215","264774216","264774217","264774218","264774219","264774220","264774221","264774222","264774223","264774224","264774225","264774226","264774227","264774228","264774229","264774230","264774231","264774232","264774233","264774234","264774235","264774236","264774237","264774238","264774239","264774240","264774241","264774242","264774243","264774244","264774245","264774246","264774247","264774248","264774249","264774250","264774251","264774252","264774253","264774254","264774255","264774256","264774257","264774258","264774259","264774260","264774261","264774262","264774263","264774264","264774265","264774266","264774267","264774268","264774269","264774270","264774271","264774272","264774273","264774274","264774275","264774276","264774277","264774278","264774279","264774280","264774281","264774282","264774283","264774284","264774285","264774286","264774287","264774288","264774289","264774290","264774291","264774292","264774293","264774294","264774295","264774296","264774297","264774298","264774299","264774300","264774301","264774302","264774303","264774304","264774305","264774306","264774307","264774308","264774309","264774310","264774311","264774312","264774313","264774314","264774315","264774316","264774317","264774318","264774319","264774320","264774321","264774322","264774323","264774324","264774325","264774326","264774327","264774328","264774329","264774330","264774331","264774332","264774333","264774334","264774335","264774336","264774337","264774338","264774339","264774340","264774341","264774342","264774343","264774344","264774345","264774346","264774347","264774348","264774349","264774350","264774351","264774352","264774353","264774354","264774355","264774356","264774357","264774358","264774359","264774360","264774361","264774362","264774363","264774364","264774365","264774366","264774367","264774368","264774369","264774370","264774371","264774372","264774373","264774374","264774375","264774376","264774377","264774378","264774379","264774380","264774381","264774382","264774383","264774384","264774385","264774386","264774387","264774388","264774389","264774390","264774391","264774392","264774393","264774394","264774395","264774396","264774397","264774398","264774399","264774400","264774401","264774402","264774403","264774404","264774405","264774406","264774407","264774408","264774409","264774410","264774411","264774412","264774413","264774414","264774415","264774416","264774417","264774418","264774419","264774420","264774421","264774422","264774423","264774424","264774425","264774426","264774427","264774428","264774429","264774430","264774431","264774432","264774433","264774434","264774435","264774436","264774437","264774438","264774439","264774440","264774441","264774442","264774443","264774444","264774445","264774446","264774447","264774448","264774449","264774450","264774451","264774452","264774453","264774454","264774455","264774456","264774457","264774458","264774459","264774460","264774461","264774462","264774463","264774464","264774465","264774466","264774467","264774468","264774469","264774470","264774471","264774472","264774473","264774474","264774475","264774476","264774477","264774478","264774479","264774480","264774481","264774482","264774483","264774484","264774485","264774486","264774487","264774488","264774489","264774490","264774491","264774492","264774493","264774494","264774495","264774496","264774497","264774498","264774499","264774500","264774501","264774502","264774503","264774504","264774505","264774506","264774507","264774508","264774509","264774510","264774511","264774512","264774513","264774514","264774515","264774516","264774517","264774518","264774519","264774520","264774521","264774522","264774523","264774524","264774525","264774526","264774527","264774528","264774529","264774530","264774531","264774532","264774533","264774534","264774535","264774536","264774537","264774538","264774539","264774540","264774541","264774542","264774543","264774544","264774545","264774546","264774547","264774548","264774549","264774550","264774551","264774552","264774553","264774554","264774555","264774556","264774557","264774558","264774559","264774560","264774561","264774562","264774563","264774564","264774565","264774566","264774567","264774568","264774569","264774570","264774571","264774572","264774573","264774574","264774575","264774576","264774577","264774578","264774579","264774580","264774581","264774582","264774583","264774584","264774585","264774586","264774587","264774588","264774589","264774590","264774591","264774592","264774593","264774594","264774595","264774596","264774597","264774598","264774599","264774600","264774601","264774602","264774603","264774604","264774605","264774606","264774607","264774608","264774609","264774610","264774611","264774612","264774613","264774614","264774615","264774616","264774617","264774618","264774619","264774620","264774621","264774622","264774623","264774624","264774625","264774626","264774627","264774628","264774629","264774630","264774631","264774632","264774633","264774634","264774635","264774636","264774637","264774638","264774639","264774640","264774641","264774642","264774643","264774644","264774645","264774646","264774647","264774648","264774649","264774650","264774651","264774652","264774653","264774654","264774655","264774656","264774657","264774658","264774659","264774660","264774661","264774662","264774663","264774664","264774665","264774666","264774667","264774668","264774669","264774670","264774671","264774672","264774673","264774674","264774675","264774676","264774677","264774678","264774679","264774680","264774681","264774682","264774683","264774684","264774685","264774686","264774687","264774688","264774689","264774690","264774691","264774692","264774693","264774694","264774695","264774696","264774697","264774698","264774699","264774700","264774701","264774702","264774703","264774704","264774705","264774706","264774707","264774708","264774709","264774710","264774711","264774712","264774713","264774714","264774715","264774716","264774717","264774718","264774719","264774720","264774721","264774722","264774723","264774724","264774725","264774726","264774727","264774728","264774729","264774730","264774731","264774732","264774733","264774734","264774735","264774736","264774737","264774738","264774739","264774740","264774741","264774742","264774743","264774744","264774745","264774746","264774747","264774748","264774749","264774750","264774751","264774752","264774753","264774754","264774755","264774756","264774757","264774758","264774759","264774760","264774761","264774762","264774763","264774764","264774765","264774766","264774767","264774768","264774769","264774770","264774771","264774772","264774773","264774774","264774775","264774776","264774777","374893581","374893582","374893583","374893584","374893585","374893586","374893587","374893588","374893589","374893590","374893591","374893592","374893593","374893594","374893595","374893596","374893597","374893598","374893599","374893600","374893601","374893602","374893603","374893604","374893605","374893606","374893607","374893608","374893609","374893610","374893611","374893612","374893613","374893614","374893615","374893616","374893617","374893618","374893619","374893620","374893621","374893622","374893623","374893624","374893625","374893626","374893627","374893628","374893629","374893630","374893631","374893632","374893633","374893634","374893635","374893636","374893637","374893638","374893639","374893640","374893641","374893642","374893643","374893644","374893645","374893646","374893647","374893648","374893649","374893650","374893651","374893652","374893653","374893654","374893655","374893656","374893657","374893658","374893659","374893660","374893661","374893662","374893663","374893664","374893665","374893666","374893667","374893668","374893669","374893670","374893671","374893672","374893673","374893674","374893675","374893676","374893677","374893678","374893679","374893680","374893681","374893682","374893683","374893684","374893685","374893686","374893687","374893688","374893689","374893690","374893691","374893692","374893693","374893694","374893695","374893696","374893697","374893698","374893699","374893700","374893701","374893702","374893703","374893704","374893705","374893706","374893707","374893708","374893709","374893710","374893711","374893712","374893713","374893714","374893715","374893716","374893717","374893718","374893719","374893720","374893721","374893722","374893723","374893724","374893725","374893726","374893727","374893728","374893729","374893730","374893731","374893732","374893733","374893734","374893735","374893736","374893737","374893738","374893739","374893740","374893741","374893742","374893743","374893744","374893745","374893746","374893747","374893748","374893749","374893750","374893751","374893752","374893753","374893754","374893755","374893756","374893757","374893758","374893759","374893760","374893761","374893762","374893763","374893764","374893765","374893766","374893767","374893768","374893769","374893770","374893771","374893772","374893773","374893774","374893775","374893776","374893777","374893778","374893779","374893780","374893781","374893782","374893783","374893784","374893785","374893786","374893787","374893788","374893789","374893790","374893791","374893792","374893793","374893794","374893795","374893796","374893797","374893798","374893799","374893800","374893801","374893802","374893803","374893804","374893805","374893806","374893807","374893808","374893809","374893810","374893811","374893812","374893813","374893814","374893815","374893816","374893817","374893818","374893819","374893820","374893821","374893822","374893823","374893824","374893825","374893826","374893827","374893828","374893829","374893830","374893831","374893832","374893833","374893834","374893835","374893836","374893837","374893838","374893839","374893840","374893841","374893842","374893843","374893844","374893845","374893846","374893847","374893848","374893849","374893850","374893851","374893852","374893853","374893854","374893855","374893856","374893857","374893858","374893859","374893860","374893861","374893862","374893863","374893864","374893865","374893866","374893867","374893868","374893869","374893870","374893871","374893872","374893873","374893874","374893875","374893876","374893877","374893878","374893879","374893880","374893881","374893882","374893883","374893884","374893885","374893886","374893887","374893888","374893889","374893890","374893891","374893892","374893893","374893894","374893895","374893896","374893897","374893898","374893899","374893900","374893901","374893902","374893903","374893904","374893905","374893906","374893907","374893908","374893909","374893910","374893911","374893912","374893913","374893914","374893915","374893916","374893917","374893918","374893919","374893920","374893921","374893922","374893923","374893924","374893925","374893926","374893927","374893928","374893929","374893930","374893931","374893932","374893933","374893934","374893935","374893936","374893937","374893938","374893939","374893940","374893941","374893942","374893943","374893944","374893945","374893946","374893947","374893948","374893949","374893950","374893951","374893952","374893953","374893954","374893955","374893956","374893957","374893958","374893959","374893960","374893961","374893962","374893963","374893964","374893965","374893966","374893967","374893968","374893969","374893970","374893971","374893972","374893973","374893974","374893975","374893976","374893977","374893978","374893979","374893980","374893981","374893982","374893983","374893984","374893985","374893986","374893987","374893988","374893989","374893990","374893991","374893992","374893993","374893994","374893995","374893996","374893997","374893998","374893999","374894000","374894001","374894002","374894003","374894004","374894005","374894006","374894007","374894008","374894009","374894010","374894011","374894012","374894013","374894014","374894015","374894016","374894017","374894018","374894019","374894020","374894021","374894022","374894023","374894024","374894025","374894026","374894027","374894028","374894029","374894030","374894031","374894032","374894033","374894034","374894035","374894036","374894037","374894038","374894039","374894040","374894041","374894042","374894043","374894044","374894045","374894046","374894047","374894048","374894049","374894050","374894051","374894052","374894053","374894054","374894055","374894056","374894057","374894058","374894059","374894060","374894061","374894062","374894063","374894064","374894065","374894066","374894067","374894068","374894069","374894070","374894071","374894072","374894073","374894074","374894075","374894076","374894077","374894078","374894079","374894080","374894081","374894082","374894083","374894084","374894085","374894086","374894087","374894088","374894089","374894090","374894091","374894092","374894093","374894094","374894095","374894096","374894097","374894098","374894099","374894100","374894101","374894102","374894103","374894104","374894105","374894106","374894107","374894108","374894109","374894110","374894111","374894112","374894113","374894114","374894115","374894116","374894117","374894118","374894119","374894120","374894121","374894122","374894123","374894124","374894125","374894126","374894127","374894128","374894129","374894130","374894131","374894132","374894133","374894134","374894135","374894136","374894137","374894138","374894139","374894140","374894141","374894142","374894143","374894144","374894145","374894146","374894147","374894148","374894149","399778805","413103208","413103209","414196010","424604114","424604115","424604116","424604117","424604118","426723167","431849818","431849819","431849820","431849821","431849822","431849823","431849824","431849825","431849826","431849827","431849828","431849829","431849830","431849831","431849832","431849833","431849834","431849835","431849836","431849837","431849838","431849839","431849840","431849841","431849842","431849843","431849844","431849845","431849846","431849847","431849848","431849849","431849850","431849851","431849852","431849853","431849854","431849855","431849856","431849857","431849858","431849859","431849860","431849861","431849862","431849863","431849864","431849865","431849866","431849867","431849868","431849869","431849870","431849871","431849872","431849873","431849874","431849875","431849876","431849877","431849878","431849879","431849880","431849881","431849882","431849883","431849884","431849885","431849886","431849887","431849888","431849889","431849890","431849891","431849892","431849893","431849894","431849895","431849896","431849897","431849898","431849899","431849900","431849901","431849902","431849903","431849904","431849905","431849906","431849907","431849908","431849909","431849910","431849911","431849912","431849913","431849914","431849915","431849916","431849917","431849918","431849919","431849920","431849921","431849922","431849923","431849924","431849925","431849926","431849927","431849928","431849929","431849930","431849931","431849932","431849933","431849934","431849935","431849936","431849937","431849938","431849939","431849940","431849941","431849942","431849943","431849944","431849945","431849946","431849947","431849948","431849949","431849950","431849951","431849952","431849953","431849954","431849955","431849956","431849957","431849958","431849959","431849960","431849961","431849962","431849963","431849964","431849965","431849966","431849967","431849968","431849969","431849970","431849971","431849972","431849973","431849974","431849975","431849976","431849977","431849978","431849979","431849980","431849981","431849982","431849983","431849984","431849985","431849986","431849987","431849988","431849989","431849990","431849991","431849992","431849993","431849994","431849995","431849996","431849997","431849998","431849999","431850000","431850001","431850002","431850003","431850004","431850005","431850006","431850007","431850008","431850009","431850010","431850011","431850012","431850013","431850014","431850015","431850016","431850017","431850018","431850019","431850020","431850021","431850022","431850023","431850024","431850025","431850026","431850027","431850028","431850029","431850030","431850031","431850032","431850033","431850034","431850035","431850036","431850037","431850038","431850039","431850040","431850041","431850042","431850043","431850044","431850045","431850046","431850047","431850048","431850049","431850050","431850051","431850052","431850053","431850054","431850055","431850056","431850057","431850058","431850059","431850060","431850061","431850062","431850063","431850064","431850065","431850066","431850067","431850068","431850069","431850070","431850071","431850072","431850073","431850074","431850075","431850076","431850077","431850078","431850079","431850080","431850081","431850082","431850083","431850084","431850085","431850086","431850087","431850088","431850089","431850090","431850091","431850092","431850093","431850094","431850095","431850096","431850097","431850098","431850099","431850100","431850101","431850102","431850103","431850104","431850105","431850106","431850107","431850108","431850109","431850110","431850111","431850112","431850113","431850114","431850115","431850116","431850117","431850118","431850119","431850120","431850121","431850122","431850123","431850124","431850125","431850126","431850127","431850128","431850129","431850130","431850131","431850132","431850133","431850134","431850135","431850136","431850137","431850138","431850139","431850140","431850141","431850142","431850143","431850144","431850145","431850146","431850147","431850148","431850149","431850150","431850151","431850152","431850153","431850154","431850155","431850156","431850157","431850158","431850159","431850160","431850161","431850162","431850163","431850164","446695174","446695175","446695176","446695177","446695178","446695179","446695180","446695181","446695182","446695183","446695184","446695185","446695186","446695187","446695188","446695189","446695190","446695191","446695192","446695193","446695194","446695195","446695196","446695197","446695198","446695199","446695200","446695201","446695202","446695203","446695204","446695205","446695206","446695207","446695208","446695209","446695210","446695211","446695212","446695213","446695214","446695215","446695216","446695217","446695218","446695219","446695220","446695221","446695222","446695223","446695224","446695225","446695226","446695227","446695228","446695229","446695230","446695231","446695232","446695233","446695234","446695235","446695236","446695237","446695238","446695239","446695240","446695241","446695242","446695243","446695244","446695245","446695246","446695247","446695248","446695249","446695250","446695251","446695252","446695253","446695254","446695255","446695256","446695257","446695258","446695259","446695260","446695261","446695262","446695263","446695264","446695265","446695266","446695267","446695268","446695269","446695270","446695271","446695272","446695273","446695274","446695275","446695276","446695277","446695278","446695279","446695280","446695281","446695282","446695283","446695284","446695285","446695286","446695287","446695288","446695289","446695290","446695291","446695292","446695293","446695294","446695295","446695296","446695297","446695298","446695299","446695300","446695301","446695302","446695303","446695304","446695305","446695306","446695307","446695308","446695309","446695310","446695311","446695312","446695313","446695314","446695315","446695316","446695317","446695318","446695319","446695320","446695321","446695322","446695323","446695324","446695325","446695326","446695327","446695328","446695329","446695330","446695331","446695332","446695333","446695334","446695335","446695336","446695337","446695338","446695339","446695340","446695341","446695342","446695343","446695344","446695345","446695346","446695347","446695348","446695349","446695350","446695351","446695352","446695353","446695354","446695355","446695356","446695357","446695358","446695359","446695360","446695361","446695362","446695363","446695364","446695365","446695366","446695367","446695368","446695369","446695370","446695371","446695372","446695373","446695374","446695375","446695376","446695377","446695378","446695379","446695380","446695381","446695382","446695383","446695384","446695385","446695386","446695387","446695388","446695389","446695390","446695391","446695392","446695393","446695394","446695395","446695396","446695397","446695398","446695399","446695400","446695401","446695402","446695403","446695404","446695405","446695406","446695407","446695408","446695409","446695410","446695411","446695412","446695413","446695414","446695415","446695416","446695417","446695418","446695419","446695420","446695421","446695422","446695423","446695424","446695425","446695426","446695427","446695428","446695429","446695430","446695431","446695432","446695433","446695434","446695435","446695436","446695437","446695438","446695439","446695440","446695441","446695442","446695443","446695444","446695445","446695446","446695447","446695448","446695449","446695450","446695451","446695452","446695453","446695454","446695455","446695456","446695457","446695458","446695459","446695460","446695461","446695462","446695463","446695464","446695465","446695466","446695467","446695468","446695469","446695470","446695471","446695472","446695473","446695474","446695475","446695476","446695477","446695478","446695479","446695480","446695481","446695482","446695483","446695484","446695485","446695486","446695487","446695488","446695489","446695490","446695491","446695492","446695493","446695494","446695495","446695496","446695497","446695498","446695499","446695500","446695501","446695502","446695503","446695504","446695505","446695506","446695507","446695508","446695509","446695510","446695511","446695512","446695513","446695514","446695515","446695516","446695517","446695518","446695519","446695520","446695521","446695522","446695523","446695524","446695525","446695526","446695527","446695528","446695529","446695530","446695531","446695532","446695533","446695534","446695535","446695536","446695537","446695538","446695539","446695540","446695541","446695542","446695543","446695544","446695545","446695546","446695547","446695548","446695549","446695550","446695551","446695552","446695553","446695554","446695555","446695556","446695557","446695558","446695559","446695560","446695561","446695562","446950038","446950039","446950040","446950041","446950042","446950043","446950044","446950045","446950046","446950047","446950048","446950049","446950050","446950051","446950052","446950053","446950054","446950055","446950056","446950057","446950058","446950059","446950060","446950061","446950062","463334783","463334784","463334785","463334786","463334787","463334788","463334789","463334790","463334791","463334792","463334793","463334794","463334795","463334796","463334797","463334798","463334799","463334800","463334801","463334802","463334803","463334804","463334805","463334806","463334807","463334808","463334809","463334810","463334811","463334812","463334813","463334814","463334815","463334816","463334817","463334818","463334819","463334820","463334821","463334822","463334823","463334824","463334825","463334826","463334827","463334828","463334829","463334830","463334831","463334832","463334833","463334834","463334835","463334836","463334837","463334838","463334839","463334840","463334841","463334842","463334843","463334844","463334845","463334846","463334847","463334848","463334849","463334850","463334851","463334852","463334853","463334854","463334855","463334856","463334857","463334858","463334859","463334860","463334861","463334862","463334863","463334864","463334865","463334866","463334867","463334868","463334869","463334870","463334871","463334872","481768758","481768759","481768760","481768761","481768762","483915103","493846926","493846927","493846928","493846929","494039119","494674297","494674298","494674299","494674300","494674301","494674302","494674303","494674304","494674305","494674306","494674307","494674308","494674309","494674310","494674311","494674312","494674313","494674314","494674315","494674316","494674317","494674318","494674319","494674320","494674321","494674322","494674323","494674324","494674325","494674326","495797544","498360364","498360365","498360366","498360367","498360368","498360369","498360370","498360371","498360372","498360373","498360374","498360375","498360376","498360377","501048447","501048448","501048449","501048450","501048451","501048452","501048453","501048454","501048455","501048456","501048457","501048458","501048459","501048460","501048461","501048462","501048463","501048464","501048465","501048466","501048467","501048468","501048469","501048470","501048471","501048472","501048473","501048474","501048475","501048476","501048477","501048478","501048479","501048480","502777444","502777445","502777446","502777447","502777448","502777449","502777450","502777451","502777452","502777453","504576704","504576705","504576706","504576707","504576708","504576709","504576710","504576711","504576712","504576713","504576714","504576715","504576716","504576717","504576718","504576719","504576720","504576721","504576722","504576723","504576724","504576725","504576726","504576727","504576728","504576729","504576730","504576731","504576732","504576733","504576734","504648642","510671892","510671893","510671894","510671895","510671896","510671897","510671898","510671899","510671900","510671901","510671902","510671903","510671904","510671905","510671906","510671907","510671908","510671909","510671910","510671911","510671912","510671913","510671914","510671915","510671916","510671917","510671918","510671919","510671920","510671921","510671922","510671923","510671924","510671925","510671926","510671927","510671928","512987099","513265459","515011565","515011566","515011567","515011568","515011569","515011570","515011571","515011572","515011573","515011574","515011575","515011576","515011577","515011578","515011579","515011580","515011581","515011582","515011583","515011584","515011585","515011586","515011587","515011588","515011589","515011590","515011591","515011592","515011593","515011594","515011595","515011596","515011597","515011598","515011599","515011600","515011601","515011602","515011603","515011604","515011605","515011606","515011607","515011608","515011609","515011610","515011611","515011612","515011613","515011614","515011615","515011616","515011617","515011618","515011619","515011620","515011621","515011622","515011623","515011624","515011625","521992866","521992867","521992868","522009005","522527306","522527307","522527308","522527309","522527310","522527311","522527312","522527313","522527314","522527315","522527316","522527317","522527318","522527319","522527320","522527321","522527322","522527323","522527324","522527325","522527326","522527327","522527328","522527329","522527330","522527331","522527332","522527333","522527334","522527335","522527336","522527337","522527338","522527339","522527340","522527341","526088086","528006329","528006330","528006331","538238626","538238627","538238628","538238629","543117134","543117135","545355627","545421957","545421958","545421959","545421960","545421961","545421962","545421963","545421964","545421965","545421966","545421967","545421968","545421969","561288660","561568120","562307229","562319950","562320399"]
set_data1 = set(data1)
set_data2 = set(data2)
from functions.true_inclusion_coeff import true_inclusion_coeff
from functions.hll_bml import hll_bml
from functions.minHash_bml import minHash_bml
from memory_profiler import profile
import timeit
@profile
def mem_true_inclusion_coeff(SX,SY):
true_inclusion_coeff(SX, SY)
@profile
def mem_hll_bml(SX,SY):
hll_bml(SX,SY)
@profile
def mem_minHash_bml(SX,SY):
minHash_bml(SX, SY)
mem_true_inclusion_coeff(SX=set_data1, SY=set_data2)
mem_hll_bml(SX=data1,SY=data2)
mem_minHash_bml(SX=set_data1, SY=set_data2)
import cProfile
print("Execution times")
#running the functions one more time to avoid conflict with the memory profiler
cProfile.run('true_inclusion_coeff(SX=set_data1, SY=set_data2)')
cProfile.run('hll_bml(SX=data1,SY=data2)')
cProfile.run('minHash_bml(SX=set_data1, SY=set_data2)')
| [
"costadanilo@protonmail.com"
] | costadanilo@protonmail.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.