index
int64 | repo_name
string | branch_name
string | path
string | content
string | import_graph
string |
|---|---|---|---|---|---|
3,302
|
supasate/FBPCS
|
refs/heads/main
|
/onedocker/env.py
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
# This is the repository path that OneDocker downloads binaries from
ONEDOCKER_REPOSITORY_PATH = "ONEDOCKER_REPOSITORY_PATH"
# This is the local path that the binaries reside
ONEDOCKER_EXE_PATH = "ONEDOCKER_EXE_PATH"
|
{"/tests/util/test_yaml.py": ["/fbpcs/util/yaml.py"], "/tests/error/mapper/test_aws.py": ["/fbpcs/error/mapper/aws.py"], "/fbpcs/gateway/ec2.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/vpc_instance.py", "/fbpcs/mapper/aws.py"], "/tests/service/test_container_aws.py": ["/fbpcs/service/container_aws.py"], "/fbpcs/service/log_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py", "/fbpcs/service/log.py"], "/tests/gateway/test_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py"], "/tests/gateway/test_ec2.py": ["/fbpcs/entity/vpc_instance.py", "/fbpcs/gateway/ec2.py"], "/tests/service/test_storage_s3.py": ["/fbpcs/service/storage_s3.py"], "/fbpcs/service/container.py": ["/fbpcs/entity/container_instance.py"], "/tests/util/test_typing.py": ["/fbpcs/util/typing.py"], "/fbpcs/repository/instance_s3.py": ["/fbpcs/entity/instance_base.py", "/fbpcs/service/storage_s3.py"], "/tests/util/test_reflect.py": ["/fbpcs/util/reflect.py", "/fbpcs/util/s3path.py"], "/fbpcs/decorator/error_handler.py": ["/fbpcs/error/mapper/aws.py"], "/tests/service/test_mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/mpc.py"], "/fbpcs/entity/mpc_instance.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/instance_base.py"], "/tests/service/test_onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/onedocker.py"], "/tests/util/test_s3path.py": ["/fbpcs/util/s3path.py"], "/fbpcs/repository/mpc_instance_local.py": ["/fbpcs/entity/mpc_instance.py"], "/tests/gateway/test_s3.py": ["/fbpcs/gateway/s3.py"], "/fbpcs/service/mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/container.py", "/fbpcs/service/onedocker.py", "/fbpcs/service/storage.py", "/fbpcs/util/typing.py"], "/tests/service/test_storage.py": ["/fbpcs/service/storage.py"], "/onedocker/onedocker_runner.py": ["/fbpcs/service/storage_s3.py", "/fbpcs/util/s3path.py"], "/fbpcs/gateway/s3.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/service/onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/container.py"], "/tests/decorator/test_error_handler.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/gateway/ecs.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/mapper/aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/vpc_instance.py"], "/tests/gateway/test_ecs.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py"], "/fbpcs/gateway/cloudwatch.py": ["/fbpcs/decorator/error_handler.py"], "/tests/repository/test_instance_s3.py": ["/fbpcs/entity/mpc_instance.py", "/fbpcs/repository/instance_s3.py", "/fbpcs/service/storage_s3.py"], "/tests/mapper/test_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/service/storage_s3.py": ["/fbpcs/gateway/s3.py", "/fbpcs/service/storage.py", "/fbpcs/util/s3path.py"], "/fbpcs/service/container_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py", "/fbpcs/service/container.py", "/fbpcs/util/typing.py"], "/tests/service/test_log_cloudwatch.py": ["/fbpcs/service/log_cloudwatch.py"]}
|
3,303
|
supasate/FBPCS
|
refs/heads/main
|
/fbpcs/entity/instance_base.py
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
# pyre-strict
import abc
class InstanceBase(abc.ABC):
@abc.abstractmethod
def get_instance_id(self) -> str:
pass
@abc.abstractmethod
def __str__(self) -> str:
pass
|
{"/tests/util/test_yaml.py": ["/fbpcs/util/yaml.py"], "/tests/error/mapper/test_aws.py": ["/fbpcs/error/mapper/aws.py"], "/fbpcs/gateway/ec2.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/vpc_instance.py", "/fbpcs/mapper/aws.py"], "/tests/service/test_container_aws.py": ["/fbpcs/service/container_aws.py"], "/fbpcs/service/log_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py", "/fbpcs/service/log.py"], "/tests/gateway/test_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py"], "/tests/gateway/test_ec2.py": ["/fbpcs/entity/vpc_instance.py", "/fbpcs/gateway/ec2.py"], "/tests/service/test_storage_s3.py": ["/fbpcs/service/storage_s3.py"], "/fbpcs/service/container.py": ["/fbpcs/entity/container_instance.py"], "/tests/util/test_typing.py": ["/fbpcs/util/typing.py"], "/fbpcs/repository/instance_s3.py": ["/fbpcs/entity/instance_base.py", "/fbpcs/service/storage_s3.py"], "/tests/util/test_reflect.py": ["/fbpcs/util/reflect.py", "/fbpcs/util/s3path.py"], "/fbpcs/decorator/error_handler.py": ["/fbpcs/error/mapper/aws.py"], "/tests/service/test_mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/mpc.py"], "/fbpcs/entity/mpc_instance.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/instance_base.py"], "/tests/service/test_onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/onedocker.py"], "/tests/util/test_s3path.py": ["/fbpcs/util/s3path.py"], "/fbpcs/repository/mpc_instance_local.py": ["/fbpcs/entity/mpc_instance.py"], "/tests/gateway/test_s3.py": ["/fbpcs/gateway/s3.py"], "/fbpcs/service/mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/container.py", "/fbpcs/service/onedocker.py", "/fbpcs/service/storage.py", "/fbpcs/util/typing.py"], "/tests/service/test_storage.py": ["/fbpcs/service/storage.py"], "/onedocker/onedocker_runner.py": ["/fbpcs/service/storage_s3.py", "/fbpcs/util/s3path.py"], "/fbpcs/gateway/s3.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/service/onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/container.py"], "/tests/decorator/test_error_handler.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/gateway/ecs.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/mapper/aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/vpc_instance.py"], "/tests/gateway/test_ecs.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py"], "/fbpcs/gateway/cloudwatch.py": ["/fbpcs/decorator/error_handler.py"], "/tests/repository/test_instance_s3.py": ["/fbpcs/entity/mpc_instance.py", "/fbpcs/repository/instance_s3.py", "/fbpcs/service/storage_s3.py"], "/tests/mapper/test_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/service/storage_s3.py": ["/fbpcs/gateway/s3.py", "/fbpcs/service/storage.py", "/fbpcs/util/s3path.py"], "/fbpcs/service/container_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py", "/fbpcs/service/container.py", "/fbpcs/util/typing.py"], "/tests/service/test_log_cloudwatch.py": ["/fbpcs/service/log_cloudwatch.py"]}
|
3,304
|
supasate/FBPCS
|
refs/heads/main
|
/fbpcs/mapper/aws.py
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
# pyre-strict
from functools import reduce
from typing import Any, Dict, List
from fbpcs.entity.cluster_instance import Cluster, ClusterStatus
from fbpcs.entity.container_instance import ContainerInstance, ContainerInstanceStatus
from fbpcs.entity.vpc_instance import Vpc, VpcState
def map_ecstask_to_containerinstance(task: Dict[str, Any]) -> ContainerInstance:
container = task["containers"][0]
ip_v4 = (
container["networkInterfaces"][0]["privateIpv4Address"]
if len(container["networkInterfaces"]) > 0
else None
)
status = container["lastStatus"]
if status == "RUNNING":
status = ContainerInstanceStatus.STARTED
elif status == "STOPPED":
if container["exitCode"] == 0:
status = ContainerInstanceStatus.COMPLETED
else:
status = ContainerInstanceStatus.FAILED
else:
status = ContainerInstanceStatus.UNKNOWN
return ContainerInstance(task["taskArn"], ip_v4, status)
def map_esccluster_to_clusterinstance(cluster: Dict[str, Any]) -> Cluster:
status = cluster["status"]
if status == "ACTIVE":
status = ClusterStatus.ACTIVE
elif status == "INACTIVE":
status = ClusterStatus.INACTIVE
else:
status = ClusterStatus.UNKNOWN
tags = _convert_aws_tags_to_dict(cluster["tags"], "key", "value")
return Cluster(cluster["clusterArn"], cluster["clusterName"], status, tags)
def map_ec2vpc_to_vpcinstance(vpc: Dict[str, Any]) -> Vpc:
state = vpc["State"]
if state == "pending":
state = VpcState.PENDING
elif state == "available":
state = VpcState.AVAILABLE
else:
state = VpcState.UNKNOWN
vpc_id = vpc["VpcId"]
# some vpc instances don't have any tags
tags = (
_convert_aws_tags_to_dict(vpc["Tags"], "Key", "Value") if "Tags" in vpc else {}
)
return Vpc(vpc_id, state, tags)
def _convert_aws_tags_to_dict(
tag_list: List[Dict[str, str]], tag_key: str, tag_value: str
) -> Dict[str, str]:
return reduce(lambda x, y: {**x, **{y[tag_key]: y[tag_value]}}, tag_list, {})
|
{"/tests/util/test_yaml.py": ["/fbpcs/util/yaml.py"], "/tests/error/mapper/test_aws.py": ["/fbpcs/error/mapper/aws.py"], "/fbpcs/gateway/ec2.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/vpc_instance.py", "/fbpcs/mapper/aws.py"], "/tests/service/test_container_aws.py": ["/fbpcs/service/container_aws.py"], "/fbpcs/service/log_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py", "/fbpcs/service/log.py"], "/tests/gateway/test_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py"], "/tests/gateway/test_ec2.py": ["/fbpcs/entity/vpc_instance.py", "/fbpcs/gateway/ec2.py"], "/tests/service/test_storage_s3.py": ["/fbpcs/service/storage_s3.py"], "/fbpcs/service/container.py": ["/fbpcs/entity/container_instance.py"], "/tests/util/test_typing.py": ["/fbpcs/util/typing.py"], "/fbpcs/repository/instance_s3.py": ["/fbpcs/entity/instance_base.py", "/fbpcs/service/storage_s3.py"], "/tests/util/test_reflect.py": ["/fbpcs/util/reflect.py", "/fbpcs/util/s3path.py"], "/fbpcs/decorator/error_handler.py": ["/fbpcs/error/mapper/aws.py"], "/tests/service/test_mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/mpc.py"], "/fbpcs/entity/mpc_instance.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/instance_base.py"], "/tests/service/test_onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/onedocker.py"], "/tests/util/test_s3path.py": ["/fbpcs/util/s3path.py"], "/fbpcs/repository/mpc_instance_local.py": ["/fbpcs/entity/mpc_instance.py"], "/tests/gateway/test_s3.py": ["/fbpcs/gateway/s3.py"], "/fbpcs/service/mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/container.py", "/fbpcs/service/onedocker.py", "/fbpcs/service/storage.py", "/fbpcs/util/typing.py"], "/tests/service/test_storage.py": ["/fbpcs/service/storage.py"], "/onedocker/onedocker_runner.py": ["/fbpcs/service/storage_s3.py", "/fbpcs/util/s3path.py"], "/fbpcs/gateway/s3.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/service/onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/container.py"], "/tests/decorator/test_error_handler.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/gateway/ecs.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/mapper/aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/vpc_instance.py"], "/tests/gateway/test_ecs.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py"], "/fbpcs/gateway/cloudwatch.py": ["/fbpcs/decorator/error_handler.py"], "/tests/repository/test_instance_s3.py": ["/fbpcs/entity/mpc_instance.py", "/fbpcs/repository/instance_s3.py", "/fbpcs/service/storage_s3.py"], "/tests/mapper/test_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/service/storage_s3.py": ["/fbpcs/gateway/s3.py", "/fbpcs/service/storage.py", "/fbpcs/util/s3path.py"], "/fbpcs/service/container_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py", "/fbpcs/service/container.py", "/fbpcs/util/typing.py"], "/tests/service/test_log_cloudwatch.py": ["/fbpcs/service/log_cloudwatch.py"]}
|
3,305
|
supasate/FBPCS
|
refs/heads/main
|
/tests/gateway/test_ecs.py
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import unittest
from unittest.mock import MagicMock, patch
from fbpcs.entity.cluster_instance import ClusterStatus, Cluster
from fbpcs.entity.container_instance import ContainerInstanceStatus, ContainerInstance
from fbpcs.gateway.ecs import ECSGateway
class TestECSGateway(unittest.TestCase):
TEST_TASK_ARN = "test-task-arn"
TEST_TASK_DEFINITION = "test-task-definition"
TEST_CONTAINER = "test-container"
TEST_CLUSTER = "test-cluster"
TEST_CMD = "test-cmd"
TEST_SUBNET = "test-subnet"
TEST_ACCESS_KEY_ID = "test-access-key-id"
TEST_ACCESS_KEY_DATA = "test-access-key-data"
TEST_IP_ADDRESS = "127.0.0.1"
TEST_FILE = "test-file"
TEST_CLUSTER_TAG_KEY = "test-tag-key"
TEST_CLUSTER_TAG_VALUE = "test-tag-value"
REGION = "us-west-2"
@patch("boto3.client")
def setUp(self, BotoClient):
self.gw = ECSGateway(
self.REGION, self.TEST_ACCESS_KEY_ID, self.TEST_ACCESS_KEY_DATA
)
self.gw.client = BotoClient()
def test_run_task(self):
client_return_response = {
"tasks": [
{
"containers": [
{
"name": "container_1",
"exitcode": 123,
"lastStatus": "RUNNING",
"networkInterfaces": [
{
"privateIpv4Address": self.TEST_IP_ADDRESS,
},
],
}
],
"taskArn": self.TEST_TASK_ARN,
}
]
}
self.gw.client.run_task = MagicMock(return_value=client_return_response)
task = self.gw.run_task(
self.TEST_TASK_DEFINITION,
self.TEST_CONTAINER,
self.TEST_CMD,
self.TEST_CLUSTER,
self.TEST_SUBNET,
)
expected_task = ContainerInstance(
self.TEST_TASK_ARN,
self.TEST_IP_ADDRESS,
ContainerInstanceStatus.STARTED,
)
self.assertEqual(task, expected_task)
self.gw.client.run_task.assert_called()
def test_describe_tasks(self):
client_return_response = {
"tasks": [
{
"containers": [
{
"name": self.TEST_CONTAINER,
"exitcode": 123,
"lastStatus": "RUNNING",
"networkInterfaces": [
{
"privateIpv4Address": self.TEST_IP_ADDRESS,
},
],
}
],
"taskArn": self.TEST_TASK_ARN,
}
]
}
self.gw.client.describe_tasks = MagicMock(return_value=client_return_response)
tasks = [
self.TEST_TASK_DEFINITION,
]
tasks = self.gw.describe_tasks(self.TEST_CLUSTER, tasks)
expected_tasks = [
ContainerInstance(
self.TEST_TASK_ARN,
self.TEST_IP_ADDRESS,
ContainerInstanceStatus.STARTED,
),
]
self.assertEqual(tasks, expected_tasks)
self.gw.client.describe_tasks.assert_called()
def test_stop_task(self):
client_return_response = {
"task": {
"containers": [
{
"name": self.TEST_CONTAINER,
"exitcode": 1,
"lastStatus": "STOPPED",
"networkInterfaces": [
{
"privateIpv4Address": self.TEST_IP_ADDRESS,
},
],
}
],
"taskArn": self.TEST_TASK_ARN,
}
}
self.gw.client.stop_task = MagicMock(return_value=client_return_response)
self.gw.stop_task(self.TEST_CLUSTER, self.TEST_TASK_ARN)
self.gw.client.stop_task.assert_called()
def test_list_tasks(self):
client_return_response = {"taskArns": [self.TEST_TASK_ARN]}
self.gw.client.list_tasks = MagicMock(return_value=client_return_response)
tasks = self.gw.list_tasks(self.TEST_CLUSTER)
expected_tasks = [self.TEST_TASK_ARN]
self.assertEqual(tasks, expected_tasks)
self.gw.client.list_tasks.assert_called()
def test_describe_clusers(self):
client_return_response = {
"clusters": [
{
"clusterArn": self.TEST_CLUSTER,
"clusterName": "cluster_1",
"tags": [
{
"key": self.TEST_CLUSTER_TAG_KEY,
"value": self.TEST_CLUSTER_TAG_VALUE,
},
],
"status": "ACTIVE",
}
]
}
self.gw.client.describe_clusters = MagicMock(
return_value=client_return_response
)
clusters = self.gw.describe_clusters(
[
self.TEST_CLUSTER,
]
)
tags = {self.TEST_CLUSTER_TAG_KEY: self.TEST_CLUSTER_TAG_VALUE}
expected_clusters = [
Cluster(self.TEST_CLUSTER, "cluster_1", ClusterStatus.ACTIVE, tags)
]
self.assertEqual(expected_clusters, clusters)
self.gw.client.describe_clusters.assert_called()
|
{"/tests/util/test_yaml.py": ["/fbpcs/util/yaml.py"], "/tests/error/mapper/test_aws.py": ["/fbpcs/error/mapper/aws.py"], "/fbpcs/gateway/ec2.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/vpc_instance.py", "/fbpcs/mapper/aws.py"], "/tests/service/test_container_aws.py": ["/fbpcs/service/container_aws.py"], "/fbpcs/service/log_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py", "/fbpcs/service/log.py"], "/tests/gateway/test_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py"], "/tests/gateway/test_ec2.py": ["/fbpcs/entity/vpc_instance.py", "/fbpcs/gateway/ec2.py"], "/tests/service/test_storage_s3.py": ["/fbpcs/service/storage_s3.py"], "/fbpcs/service/container.py": ["/fbpcs/entity/container_instance.py"], "/tests/util/test_typing.py": ["/fbpcs/util/typing.py"], "/fbpcs/repository/instance_s3.py": ["/fbpcs/entity/instance_base.py", "/fbpcs/service/storage_s3.py"], "/tests/util/test_reflect.py": ["/fbpcs/util/reflect.py", "/fbpcs/util/s3path.py"], "/fbpcs/decorator/error_handler.py": ["/fbpcs/error/mapper/aws.py"], "/tests/service/test_mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/mpc.py"], "/fbpcs/entity/mpc_instance.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/instance_base.py"], "/tests/service/test_onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/onedocker.py"], "/tests/util/test_s3path.py": ["/fbpcs/util/s3path.py"], "/fbpcs/repository/mpc_instance_local.py": ["/fbpcs/entity/mpc_instance.py"], "/tests/gateway/test_s3.py": ["/fbpcs/gateway/s3.py"], "/fbpcs/service/mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/container.py", "/fbpcs/service/onedocker.py", "/fbpcs/service/storage.py", "/fbpcs/util/typing.py"], "/tests/service/test_storage.py": ["/fbpcs/service/storage.py"], "/onedocker/onedocker_runner.py": ["/fbpcs/service/storage_s3.py", "/fbpcs/util/s3path.py"], "/fbpcs/gateway/s3.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/service/onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/container.py"], "/tests/decorator/test_error_handler.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/gateway/ecs.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/mapper/aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/vpc_instance.py"], "/tests/gateway/test_ecs.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py"], "/fbpcs/gateway/cloudwatch.py": ["/fbpcs/decorator/error_handler.py"], "/tests/repository/test_instance_s3.py": ["/fbpcs/entity/mpc_instance.py", "/fbpcs/repository/instance_s3.py", "/fbpcs/service/storage_s3.py"], "/tests/mapper/test_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/service/storage_s3.py": ["/fbpcs/gateway/s3.py", "/fbpcs/service/storage.py", "/fbpcs/util/s3path.py"], "/fbpcs/service/container_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py", "/fbpcs/service/container.py", "/fbpcs/util/typing.py"], "/tests/service/test_log_cloudwatch.py": ["/fbpcs/service/log_cloudwatch.py"]}
|
3,306
|
supasate/FBPCS
|
refs/heads/main
|
/fbpcs/gateway/cloudwatch.py
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
# pyre-strict
from typing import Any, Dict, Optional
import boto3
from fbpcs.decorator.error_handler import error_handler
class CloudWatchGateway:
def __init__(
self,
region: str = "us-west-1",
access_key_id: Optional[str] = None,
access_key_data: Optional[str] = None,
config: Optional[Dict[str, Any]] = None,
) -> None:
self.region = region
config = config or {}
if access_key_id:
config["aws_access_key_id"] = access_key_id
if access_key_data:
config["aws_secret_access_key"] = access_key_data
# pyre-ignore
self.client = boto3.client("logs", region_name=self.region, **config)
@error_handler
def get_log_events(self, log_group: str, log_stream: str) -> Dict[str, Any]:
return self.client.get_log_events(
logGroupName=log_group, logStreamName=log_stream
)
|
{"/tests/util/test_yaml.py": ["/fbpcs/util/yaml.py"], "/tests/error/mapper/test_aws.py": ["/fbpcs/error/mapper/aws.py"], "/fbpcs/gateway/ec2.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/vpc_instance.py", "/fbpcs/mapper/aws.py"], "/tests/service/test_container_aws.py": ["/fbpcs/service/container_aws.py"], "/fbpcs/service/log_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py", "/fbpcs/service/log.py"], "/tests/gateway/test_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py"], "/tests/gateway/test_ec2.py": ["/fbpcs/entity/vpc_instance.py", "/fbpcs/gateway/ec2.py"], "/tests/service/test_storage_s3.py": ["/fbpcs/service/storage_s3.py"], "/fbpcs/service/container.py": ["/fbpcs/entity/container_instance.py"], "/tests/util/test_typing.py": ["/fbpcs/util/typing.py"], "/fbpcs/repository/instance_s3.py": ["/fbpcs/entity/instance_base.py", "/fbpcs/service/storage_s3.py"], "/tests/util/test_reflect.py": ["/fbpcs/util/reflect.py", "/fbpcs/util/s3path.py"], "/fbpcs/decorator/error_handler.py": ["/fbpcs/error/mapper/aws.py"], "/tests/service/test_mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/mpc.py"], "/fbpcs/entity/mpc_instance.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/instance_base.py"], "/tests/service/test_onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/onedocker.py"], "/tests/util/test_s3path.py": ["/fbpcs/util/s3path.py"], "/fbpcs/repository/mpc_instance_local.py": ["/fbpcs/entity/mpc_instance.py"], "/tests/gateway/test_s3.py": ["/fbpcs/gateway/s3.py"], "/fbpcs/service/mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/container.py", "/fbpcs/service/onedocker.py", "/fbpcs/service/storage.py", "/fbpcs/util/typing.py"], "/tests/service/test_storage.py": ["/fbpcs/service/storage.py"], "/onedocker/onedocker_runner.py": ["/fbpcs/service/storage_s3.py", "/fbpcs/util/s3path.py"], "/fbpcs/gateway/s3.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/service/onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/container.py"], "/tests/decorator/test_error_handler.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/gateway/ecs.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/mapper/aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/vpc_instance.py"], "/tests/gateway/test_ecs.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py"], "/fbpcs/gateway/cloudwatch.py": ["/fbpcs/decorator/error_handler.py"], "/tests/repository/test_instance_s3.py": ["/fbpcs/entity/mpc_instance.py", "/fbpcs/repository/instance_s3.py", "/fbpcs/service/storage_s3.py"], "/tests/mapper/test_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/service/storage_s3.py": ["/fbpcs/gateway/s3.py", "/fbpcs/service/storage.py", "/fbpcs/util/s3path.py"], "/fbpcs/service/container_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py", "/fbpcs/service/container.py", "/fbpcs/util/typing.py"], "/tests/service/test_log_cloudwatch.py": ["/fbpcs/service/log_cloudwatch.py"]}
|
3,307
|
supasate/FBPCS
|
refs/heads/main
|
/tests/repository/test_instance_s3.py
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import pickle
import unittest
import uuid
from unittest.mock import MagicMock
from fbpcs.entity.mpc_instance import MPCInstance, MPCInstanceStatus, MPCRole
from fbpcs.repository.instance_s3 import S3InstanceRepository
from fbpcs.service.storage_s3 import S3StorageService
class TestS3InstanceRepository(unittest.TestCase):
TEST_BASE_DIR = "./"
TEST_INSTANCE_ID = str(uuid.uuid4())
TEST_GAME_NAME = "lift"
TEST_MPC_ROLE = MPCRole.SERVER
TEST_NUM_WORKERS = 1
TEST_SERVER_IPS = ["192.0.2.0", "192.0.2.1"]
TEST_INPUT_ARGS = [{"input_filenames": "test_input_file"}]
TEST_OUTPUT_ARGS = [{"output_filenames": "test_output_file"}]
TEST_CONCURRENCY_ARGS = {"concurrency": 2}
TEST_INPUT_DIRECTORY = "TEST_INPUT_DIRECTORY/"
TEST_OUTPUT_DIRECTROY = "TEST_OUTPUT_DIRECTORY/"
ERROR_MSG_ALREADY_EXISTS = f"{TEST_INSTANCE_ID} already exists"
ERROR_MSG_NOT_EXISTS = f"{TEST_INSTANCE_ID} does not exist"
def setUp(self):
storage_svc = S3StorageService("us-west-1")
self.s3_storage_repo = S3InstanceRepository(storage_svc, self.TEST_BASE_DIR)
self.mpc_instance = MPCInstance(
instance_id=self.TEST_INSTANCE_ID,
game_name=self.TEST_GAME_NAME,
mpc_role=self.TEST_MPC_ROLE,
num_workers=self.TEST_NUM_WORKERS,
server_ips=self.TEST_SERVER_IPS,
status=MPCInstanceStatus.CREATED,
input_args=self.TEST_INPUT_ARGS,
output_args=self.TEST_OUTPUT_ARGS,
concurrency_args=self.TEST_CONCURRENCY_ARGS,
input_directory=self.TEST_INPUT_DIRECTORY,
output_directory=self.TEST_OUTPUT_DIRECTROY,
)
def test_create_non_existing_instance(self):
self.s3_storage_repo._exist = MagicMock(return_value=False)
self.s3_storage_repo.s3_storage_svc.write = MagicMock(return_value=None)
self.s3_storage_repo.create(self.mpc_instance)
self.s3_storage_repo.s3_storage_svc.write.assert_called()
def test_create_existing_instance(self):
self.s3_storage_repo._exist = MagicMock(
side_effect=RuntimeError(self.ERROR_MSG_ALREADY_EXISTS)
)
with self.assertRaisesRegex(RuntimeError, self.ERROR_MSG_ALREADY_EXISTS):
self.s3_storage_repo.create(self.mpc_instance)
def test_read_non_existing_instance(self):
self.s3_storage_repo._exist = MagicMock(
side_effect=RuntimeError(self.ERROR_MSG_NOT_EXISTS)
)
with self.assertRaisesRegex(RuntimeError, self.ERROR_MSG_NOT_EXISTS):
self.s3_storage_repo.read(self.TEST_INSTANCE_ID)
def test_read_existing_instance(self):
self.s3_storage_repo._exist = MagicMock(return_value=True)
self.s3_storage_repo.s3_storage_svc.read = MagicMock(
return_value=pickle.dumps(self.mpc_instance, 0).decode()
)
instance = self.s3_storage_repo.read(self.mpc_instance)
self.assertEqual(self.mpc_instance, instance)
def test_update_non_existing_instance(self):
self.s3_storage_repo._exist = MagicMock(
side_effect=RuntimeError(self.ERROR_MSG_NOT_EXISTS)
)
with self.assertRaisesRegex(RuntimeError, self.ERROR_MSG_NOT_EXISTS):
self.s3_storage_repo.update(self.mpc_instance)
def test_update_existing_instance(self):
self.s3_storage_repo._exist = MagicMock(return_value=True)
self.s3_storage_repo.s3_storage_svc.write = MagicMock(return_value=None)
self.s3_storage_repo.update(self.mpc_instance)
self.s3_storage_repo.s3_storage_svc.write.assert_called()
def test_delete_non_existing_instance(self):
self.s3_storage_repo._exist = MagicMock(
side_effect=RuntimeError(self.ERROR_MSG_NOT_EXISTS)
)
with self.assertRaisesRegex(RuntimeError, self.ERROR_MSG_NOT_EXISTS):
self.s3_storage_repo.delete(self.TEST_INSTANCE_ID)
def test_delete_existing_instance(self):
self.s3_storage_repo._exist = MagicMock(return_value=True)
self.s3_storage_repo.s3_storage_svc.delete = MagicMock(return_value=None)
self.s3_storage_repo.delete(self.TEST_INSTANCE_ID)
self.s3_storage_repo.s3_storage_svc.delete.assert_called()
def test_exists(self):
self.s3_storage_repo.s3_storage_svc.file_exists = MagicMock(return_value=True)
self.assertTrue(self.s3_storage_repo._exist(self.TEST_INSTANCE_ID))
|
{"/tests/util/test_yaml.py": ["/fbpcs/util/yaml.py"], "/tests/error/mapper/test_aws.py": ["/fbpcs/error/mapper/aws.py"], "/fbpcs/gateway/ec2.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/vpc_instance.py", "/fbpcs/mapper/aws.py"], "/tests/service/test_container_aws.py": ["/fbpcs/service/container_aws.py"], "/fbpcs/service/log_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py", "/fbpcs/service/log.py"], "/tests/gateway/test_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py"], "/tests/gateway/test_ec2.py": ["/fbpcs/entity/vpc_instance.py", "/fbpcs/gateway/ec2.py"], "/tests/service/test_storage_s3.py": ["/fbpcs/service/storage_s3.py"], "/fbpcs/service/container.py": ["/fbpcs/entity/container_instance.py"], "/tests/util/test_typing.py": ["/fbpcs/util/typing.py"], "/fbpcs/repository/instance_s3.py": ["/fbpcs/entity/instance_base.py", "/fbpcs/service/storage_s3.py"], "/tests/util/test_reflect.py": ["/fbpcs/util/reflect.py", "/fbpcs/util/s3path.py"], "/fbpcs/decorator/error_handler.py": ["/fbpcs/error/mapper/aws.py"], "/tests/service/test_mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/mpc.py"], "/fbpcs/entity/mpc_instance.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/instance_base.py"], "/tests/service/test_onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/onedocker.py"], "/tests/util/test_s3path.py": ["/fbpcs/util/s3path.py"], "/fbpcs/repository/mpc_instance_local.py": ["/fbpcs/entity/mpc_instance.py"], "/tests/gateway/test_s3.py": ["/fbpcs/gateway/s3.py"], "/fbpcs/service/mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/container.py", "/fbpcs/service/onedocker.py", "/fbpcs/service/storage.py", "/fbpcs/util/typing.py"], "/tests/service/test_storage.py": ["/fbpcs/service/storage.py"], "/onedocker/onedocker_runner.py": ["/fbpcs/service/storage_s3.py", "/fbpcs/util/s3path.py"], "/fbpcs/gateway/s3.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/service/onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/container.py"], "/tests/decorator/test_error_handler.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/gateway/ecs.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/mapper/aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/vpc_instance.py"], "/tests/gateway/test_ecs.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py"], "/fbpcs/gateway/cloudwatch.py": ["/fbpcs/decorator/error_handler.py"], "/tests/repository/test_instance_s3.py": ["/fbpcs/entity/mpc_instance.py", "/fbpcs/repository/instance_s3.py", "/fbpcs/service/storage_s3.py"], "/tests/mapper/test_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/service/storage_s3.py": ["/fbpcs/gateway/s3.py", "/fbpcs/service/storage.py", "/fbpcs/util/s3path.py"], "/fbpcs/service/container_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py", "/fbpcs/service/container.py", "/fbpcs/util/typing.py"], "/tests/service/test_log_cloudwatch.py": ["/fbpcs/service/log_cloudwatch.py"]}
|
3,308
|
supasate/FBPCS
|
refs/heads/main
|
/fbpcs/error/mapper/aws.py
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
# pyre-strict
from botocore.exceptions import ClientError
from fbpcs.error.pcs import PcsError
from fbpcs.error.throttling import ThrottlingError
# reference: https://boto3.amazonaws.com/v1/documentation/api/latest/guide/error-handling.html
def map_aws_error(error: ClientError) -> PcsError:
code = error.response["Error"]["Code"]
message = error.response["Error"]["Message"]
if code == "ThrottlingException":
return ThrottlingError(message)
else:
return PcsError(message)
|
{"/tests/util/test_yaml.py": ["/fbpcs/util/yaml.py"], "/tests/error/mapper/test_aws.py": ["/fbpcs/error/mapper/aws.py"], "/fbpcs/gateway/ec2.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/vpc_instance.py", "/fbpcs/mapper/aws.py"], "/tests/service/test_container_aws.py": ["/fbpcs/service/container_aws.py"], "/fbpcs/service/log_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py", "/fbpcs/service/log.py"], "/tests/gateway/test_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py"], "/tests/gateway/test_ec2.py": ["/fbpcs/entity/vpc_instance.py", "/fbpcs/gateway/ec2.py"], "/tests/service/test_storage_s3.py": ["/fbpcs/service/storage_s3.py"], "/fbpcs/service/container.py": ["/fbpcs/entity/container_instance.py"], "/tests/util/test_typing.py": ["/fbpcs/util/typing.py"], "/fbpcs/repository/instance_s3.py": ["/fbpcs/entity/instance_base.py", "/fbpcs/service/storage_s3.py"], "/tests/util/test_reflect.py": ["/fbpcs/util/reflect.py", "/fbpcs/util/s3path.py"], "/fbpcs/decorator/error_handler.py": ["/fbpcs/error/mapper/aws.py"], "/tests/service/test_mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/mpc.py"], "/fbpcs/entity/mpc_instance.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/instance_base.py"], "/tests/service/test_onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/onedocker.py"], "/tests/util/test_s3path.py": ["/fbpcs/util/s3path.py"], "/fbpcs/repository/mpc_instance_local.py": ["/fbpcs/entity/mpc_instance.py"], "/tests/gateway/test_s3.py": ["/fbpcs/gateway/s3.py"], "/fbpcs/service/mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/container.py", "/fbpcs/service/onedocker.py", "/fbpcs/service/storage.py", "/fbpcs/util/typing.py"], "/tests/service/test_storage.py": ["/fbpcs/service/storage.py"], "/onedocker/onedocker_runner.py": ["/fbpcs/service/storage_s3.py", "/fbpcs/util/s3path.py"], "/fbpcs/gateway/s3.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/service/onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/container.py"], "/tests/decorator/test_error_handler.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/gateway/ecs.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/mapper/aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/vpc_instance.py"], "/tests/gateway/test_ecs.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py"], "/fbpcs/gateway/cloudwatch.py": ["/fbpcs/decorator/error_handler.py"], "/tests/repository/test_instance_s3.py": ["/fbpcs/entity/mpc_instance.py", "/fbpcs/repository/instance_s3.py", "/fbpcs/service/storage_s3.py"], "/tests/mapper/test_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/service/storage_s3.py": ["/fbpcs/gateway/s3.py", "/fbpcs/service/storage.py", "/fbpcs/util/s3path.py"], "/fbpcs/service/container_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py", "/fbpcs/service/container.py", "/fbpcs/util/typing.py"], "/tests/service/test_log_cloudwatch.py": ["/fbpcs/service/log_cloudwatch.py"]}
|
3,309
|
supasate/FBPCS
|
refs/heads/main
|
/tests/mapper/test_aws.py
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import unittest
from fbpcs.entity.cluster_instance import ClusterStatus, Cluster
from fbpcs.entity.container_instance import ContainerInstanceStatus, ContainerInstance
from fbpcs.mapper.aws import (
map_ecstask_to_containerinstance,
map_esccluster_to_clusterinstance,
)
class TestAWSMapper(unittest.TestCase):
TEST_IP_ADDRESS = "127.0.0.1"
TEST_TASK_ARN = "test-task-arn"
TEST_CLUSTER_ARN = "test-cluster-arn"
TEST_CLUSTER_NAME = "test-cluster-name"
def test_map_ecstask_to_containerinstance(self):
ecs_task_response = {
"tasks": [
{
"containers": [
{
"exitCode": None,
"lastStatus": "RUNNING",
"networkInterfaces": [
{
"privateIpv4Address": self.TEST_IP_ADDRESS,
},
],
},
],
"taskArn": self.TEST_TASK_ARN,
},
{
"containers": [
{
"exitCode": 0,
"lastStatus": "STOPPED",
"networkInterfaces": [],
},
],
"taskArn": self.TEST_TASK_ARN,
},
{
"containers": [
{
"exitCode": 1,
"lastStatus": "STOPPED",
"networkInterfaces": [],
},
],
"taskArn": self.TEST_TASK_ARN,
},
{
"containers": [
{
"exitCode": -1,
"lastStatus": "UNKNOWN",
"networkInterfaces": [],
},
],
"taskArn": self.TEST_TASK_ARN,
},
]
}
expected_task_list = [
ContainerInstance(
self.TEST_TASK_ARN,
self.TEST_IP_ADDRESS,
ContainerInstanceStatus.STARTED,
),
ContainerInstance(
self.TEST_TASK_ARN,
None,
ContainerInstanceStatus.COMPLETED,
),
ContainerInstance(
self.TEST_TASK_ARN,
None,
ContainerInstanceStatus.FAILED,
),
ContainerInstance(
self.TEST_TASK_ARN,
None,
ContainerInstanceStatus.UNKNOWN,
),
]
tasks_list = [
map_ecstask_to_containerinstance(task)
for task in ecs_task_response["tasks"]
]
self.assertEqual(tasks_list, expected_task_list)
def test_map_esccluster_to_clusterinstance(self):
tag_key_1 = "tag-key-1"
tag_key_2 = "tag-key-2"
tag_value_1 = "tag-value-1"
tag_value_2 = "tag-value-2"
ecs_cluster_response = {
"clusters": [
{
"clusterName": self.TEST_CLUSTER_NAME,
"clusterArn": self.TEST_CLUSTER_ARN,
"status": "ACTIVE",
"tags": [
{
"key": tag_key_1,
"value": tag_value_1,
},
{
"key": tag_key_2,
"value": tag_value_2,
},
],
},
{
"clusterName": self.TEST_CLUSTER_NAME,
"clusterArn": self.TEST_CLUSTER_ARN,
"status": "INACTIVE",
"tags": [
{
"key": tag_key_1,
"value": tag_value_1,
},
],
},
{
"clusterName": self.TEST_CLUSTER_NAME,
"clusterArn": self.TEST_CLUSTER_ARN,
"status": "UNKNOWN",
"tags": [
{
"key": tag_key_1,
"value": tag_value_1,
},
],
},
]
}
multi_tag_value_pair = {
tag_key_1: tag_value_1,
tag_key_2: tag_value_2,
}
single_tag_value_pair = {tag_key_1: tag_value_1}
expected_cluster_list = [
Cluster(
self.TEST_CLUSTER_ARN,
self.TEST_CLUSTER_NAME,
ClusterStatus.ACTIVE,
multi_tag_value_pair,
),
Cluster(
self.TEST_CLUSTER_ARN,
self.TEST_CLUSTER_NAME,
ClusterStatus.INACTIVE,
single_tag_value_pair,
),
Cluster(
self.TEST_CLUSTER_ARN,
self.TEST_CLUSTER_NAME,
ClusterStatus.UNKNOWN,
single_tag_value_pair,
),
]
cluster_list = [
map_esccluster_to_clusterinstance(cluster)
for cluster in ecs_cluster_response["clusters"]
]
self.assertEqual(cluster_list, expected_cluster_list)
|
{"/tests/util/test_yaml.py": ["/fbpcs/util/yaml.py"], "/tests/error/mapper/test_aws.py": ["/fbpcs/error/mapper/aws.py"], "/fbpcs/gateway/ec2.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/vpc_instance.py", "/fbpcs/mapper/aws.py"], "/tests/service/test_container_aws.py": ["/fbpcs/service/container_aws.py"], "/fbpcs/service/log_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py", "/fbpcs/service/log.py"], "/tests/gateway/test_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py"], "/tests/gateway/test_ec2.py": ["/fbpcs/entity/vpc_instance.py", "/fbpcs/gateway/ec2.py"], "/tests/service/test_storage_s3.py": ["/fbpcs/service/storage_s3.py"], "/fbpcs/service/container.py": ["/fbpcs/entity/container_instance.py"], "/tests/util/test_typing.py": ["/fbpcs/util/typing.py"], "/fbpcs/repository/instance_s3.py": ["/fbpcs/entity/instance_base.py", "/fbpcs/service/storage_s3.py"], "/tests/util/test_reflect.py": ["/fbpcs/util/reflect.py", "/fbpcs/util/s3path.py"], "/fbpcs/decorator/error_handler.py": ["/fbpcs/error/mapper/aws.py"], "/tests/service/test_mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/mpc.py"], "/fbpcs/entity/mpc_instance.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/instance_base.py"], "/tests/service/test_onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/onedocker.py"], "/tests/util/test_s3path.py": ["/fbpcs/util/s3path.py"], "/fbpcs/repository/mpc_instance_local.py": ["/fbpcs/entity/mpc_instance.py"], "/tests/gateway/test_s3.py": ["/fbpcs/gateway/s3.py"], "/fbpcs/service/mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/container.py", "/fbpcs/service/onedocker.py", "/fbpcs/service/storage.py", "/fbpcs/util/typing.py"], "/tests/service/test_storage.py": ["/fbpcs/service/storage.py"], "/onedocker/onedocker_runner.py": ["/fbpcs/service/storage_s3.py", "/fbpcs/util/s3path.py"], "/fbpcs/gateway/s3.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/service/onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/container.py"], "/tests/decorator/test_error_handler.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/gateway/ecs.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/mapper/aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/vpc_instance.py"], "/tests/gateway/test_ecs.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py"], "/fbpcs/gateway/cloudwatch.py": ["/fbpcs/decorator/error_handler.py"], "/tests/repository/test_instance_s3.py": ["/fbpcs/entity/mpc_instance.py", "/fbpcs/repository/instance_s3.py", "/fbpcs/service/storage_s3.py"], "/tests/mapper/test_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/service/storage_s3.py": ["/fbpcs/gateway/s3.py", "/fbpcs/service/storage.py", "/fbpcs/util/s3path.py"], "/fbpcs/service/container_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py", "/fbpcs/service/container.py", "/fbpcs/util/typing.py"], "/tests/service/test_log_cloudwatch.py": ["/fbpcs/service/log_cloudwatch.py"]}
|
3,310
|
supasate/FBPCS
|
refs/heads/main
|
/fbpcs/service/storage_s3.py
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
# pyre-strict
import os
from os import path
from os.path import join, normpath, relpath
from typing import Any, Dict, Optional
from fbpcs.gateway.s3 import S3Gateway
from fbpcs.service.storage import PathType, StorageService
from fbpcs.util.s3path import S3Path
class S3StorageService(StorageService):
def __init__(
self,
region: str = "us-west-1",
access_key_id: Optional[str] = None,
access_key_data: Optional[str] = None,
config: Optional[Dict[str, Any]] = None,
) -> None:
self.s3_gateway = S3Gateway(region, access_key_id, access_key_data, config)
def read(self, filename: str) -> str:
"""Read a file data
Keyword arguments:
filename -- "https://bucket-name.s3.Region.amazonaws.com/key-name"
"""
s3_path = S3Path(filename)
return self.s3_gateway.get_object(s3_path.bucket, s3_path.key)
def write(self, filename: str, data: str) -> None:
"""Write data into a file
Keyword arguments:
filename -- "https://bucket-name.s3.Region.amazonaws.com/key-name"`
"""
s3_path = S3Path(filename)
self.s3_gateway.put_object(s3_path.bucket, s3_path.key, data)
def copy(self, source: str, destination: str, recursive: bool = False) -> None:
"""Move a file or folder between local storage and S3, as well as, S3 and S3
Keyword arguments:
source -- source file
destination -- destination file
recursive -- whether to recursively copy a folder
"""
if StorageService.path_type(source) == PathType.Local:
# from local to S3
if StorageService.path_type(destination) == PathType.Local:
raise ValueError("Both source and destination are local files")
s3_path = S3Path(destination)
if path.isdir(source):
if not recursive:
raise ValueError(f"Source {source} is a folder. Use --recursive")
self.upload_dir(source, s3_path.bucket, s3_path.key)
else:
self.s3_gateway.upload_file(source, s3_path.bucket, s3_path.key)
else:
source_s3_path = S3Path(source)
if StorageService.path_type(destination) == PathType.S3:
# from S3 to S3
dest_s3_path = S3Path(destination)
if source_s3_path == dest_s3_path:
raise ValueError(
f"Source {source} and destination {destination} are the same"
)
if source.endswith("/"):
if not recursive:
raise ValueError(
f"Source {source} is a folder. Use --recursive"
)
self.copy_dir(
source_s3_path.bucket,
source_s3_path.key + "/",
dest_s3_path.bucket,
dest_s3_path.key,
)
else:
self.s3_gateway.copy(
source_s3_path.bucket,
source_s3_path.key,
dest_s3_path.bucket,
dest_s3_path.key,
)
else:
# from S3 to local
if source.endswith("/"):
if not recursive:
raise ValueError(
f"Source {source} is a folder. Use --recursive"
)
self.download_dir(
source_s3_path.bucket,
source_s3_path.key + "/",
destination,
)
else:
self.s3_gateway.download_file(
source_s3_path.bucket, source_s3_path.key, destination
)
def upload_dir(self, source: str, s3_path_bucket: str, s3_path_key: str) -> None:
for root, dirs, files in os.walk(source):
for file in files:
local_path = join(root, file)
destination_path = s3_path_key + "/" + relpath(local_path, source)
self.s3_gateway.upload_file(
local_path,
s3_path_bucket,
destination_path,
)
for dir in dirs:
local_path = join(root, dir)
destination_path = s3_path_key + "/" + relpath(local_path, source)
self.s3_gateway.put_object(
s3_path_bucket,
destination_path + "/",
"",
)
def download_dir(
self, s3_path_bucket: str, s3_path_key: str, destination: str
) -> None:
if not self.s3_gateway.object_exists(s3_path_bucket, s3_path_key):
raise ValueError(
f"Key {s3_path_key} does not exist in bucket {s3_path_bucket}"
)
keys = self.s3_gateway.list_object2(s3_path_bucket, s3_path_key)
for key in keys:
local_path = normpath(destination + "/" + key[len(s3_path_key) :])
if key.endswith("/"):
if not path.exists(local_path):
os.makedirs(local_path)
else:
self.s3_gateway.download_file(s3_path_bucket, key, local_path)
def copy_dir(
self,
source_bucket: str,
source_key: str,
destination_bucket: str,
destination_key: str,
) -> None:
if not self.s3_gateway.object_exists(source_bucket, source_key):
raise ValueError(
f"Key {source_key} does not exist in bucket {source_bucket}"
)
keys = self.s3_gateway.list_object2(source_bucket, source_key)
for key in keys:
destination_path = destination_key + "/" + key[len(source_key) :]
if key.endswith("/"):
self.s3_gateway.put_object(
source_bucket,
destination_path,
"",
)
else:
self.s3_gateway.copy(
source_bucket,
key,
destination_bucket,
destination_path,
)
def delete(self, filename: str) -> None:
"""Delete an s3 file
Keyword arguments:
filename -- the s3 file to be deleted
"""
if StorageService.path_type(filename) == PathType.S3:
s3_path = S3Path(filename)
self.s3_gateway.delete_object(s3_path.bucket, s3_path.key)
else:
raise ValueError("The file is not an s3 file")
def file_exists(self, filename: str) -> bool:
if StorageService.path_type(filename) == PathType.S3:
s3_path = S3Path(filename)
return self.s3_gateway.object_exists(s3_path.bucket, s3_path.key)
else:
raise ValueError(f"File {filename} is not an S3 filepath")
def ls_file(self, filename: str) -> Dict[str, Any]:
"""Show file information (last modified time, type and size)
Keyword arguments:
filename -- the s3 file to be shown
"""
s3_path = S3Path(filename)
return self.s3_gateway.get_object_info(s3_path.bucket, s3_path.key)
def get_file_size(self, filename: str) -> int:
s3_path = S3Path(filename)
return self.s3_gateway.get_object_size(s3_path.bucket, s3_path.key)
|
{"/tests/util/test_yaml.py": ["/fbpcs/util/yaml.py"], "/tests/error/mapper/test_aws.py": ["/fbpcs/error/mapper/aws.py"], "/fbpcs/gateway/ec2.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/vpc_instance.py", "/fbpcs/mapper/aws.py"], "/tests/service/test_container_aws.py": ["/fbpcs/service/container_aws.py"], "/fbpcs/service/log_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py", "/fbpcs/service/log.py"], "/tests/gateway/test_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py"], "/tests/gateway/test_ec2.py": ["/fbpcs/entity/vpc_instance.py", "/fbpcs/gateway/ec2.py"], "/tests/service/test_storage_s3.py": ["/fbpcs/service/storage_s3.py"], "/fbpcs/service/container.py": ["/fbpcs/entity/container_instance.py"], "/tests/util/test_typing.py": ["/fbpcs/util/typing.py"], "/fbpcs/repository/instance_s3.py": ["/fbpcs/entity/instance_base.py", "/fbpcs/service/storage_s3.py"], "/tests/util/test_reflect.py": ["/fbpcs/util/reflect.py", "/fbpcs/util/s3path.py"], "/fbpcs/decorator/error_handler.py": ["/fbpcs/error/mapper/aws.py"], "/tests/service/test_mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/mpc.py"], "/fbpcs/entity/mpc_instance.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/instance_base.py"], "/tests/service/test_onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/onedocker.py"], "/tests/util/test_s3path.py": ["/fbpcs/util/s3path.py"], "/fbpcs/repository/mpc_instance_local.py": ["/fbpcs/entity/mpc_instance.py"], "/tests/gateway/test_s3.py": ["/fbpcs/gateway/s3.py"], "/fbpcs/service/mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/container.py", "/fbpcs/service/onedocker.py", "/fbpcs/service/storage.py", "/fbpcs/util/typing.py"], "/tests/service/test_storage.py": ["/fbpcs/service/storage.py"], "/onedocker/onedocker_runner.py": ["/fbpcs/service/storage_s3.py", "/fbpcs/util/s3path.py"], "/fbpcs/gateway/s3.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/service/onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/container.py"], "/tests/decorator/test_error_handler.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/gateway/ecs.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/mapper/aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/vpc_instance.py"], "/tests/gateway/test_ecs.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py"], "/fbpcs/gateway/cloudwatch.py": ["/fbpcs/decorator/error_handler.py"], "/tests/repository/test_instance_s3.py": ["/fbpcs/entity/mpc_instance.py", "/fbpcs/repository/instance_s3.py", "/fbpcs/service/storage_s3.py"], "/tests/mapper/test_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/service/storage_s3.py": ["/fbpcs/gateway/s3.py", "/fbpcs/service/storage.py", "/fbpcs/util/s3path.py"], "/fbpcs/service/container_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py", "/fbpcs/service/container.py", "/fbpcs/util/typing.py"], "/tests/service/test_log_cloudwatch.py": ["/fbpcs/service/log_cloudwatch.py"]}
|
3,311
|
supasate/FBPCS
|
refs/heads/main
|
/fbpcs/service/container_aws.py
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
# pyre-strict
import asyncio
from typing import Any, Dict, List, Optional, Tuple
from fbpcs.entity.container_instance import ContainerInstance, ContainerInstanceStatus
from fbpcs.gateway.ecs import ECSGateway
from fbpcs.service.container import ContainerService
from fbpcs.util.typing import checked_cast
class AWSContainerService(ContainerService):
def __init__(
self,
region: str,
cluster: str,
subnet: str,
access_key_id: Optional[str] = None,
access_key_data: Optional[str] = None,
config: Optional[Dict[str, Any]] = None,
) -> None:
self.region = region
self.cluster = cluster
self.subnet = subnet
self.ecs_gateway = ECSGateway(region, access_key_id, access_key_data, config)
def create_instance(self, container_definition: str, cmd: str) -> ContainerInstance:
return asyncio.run(self._create_instance_async(container_definition, cmd))
def create_instances(
self, container_definition: str, cmds: List[str]
) -> List[ContainerInstance]:
return asyncio.run(self._create_instances_async(container_definition, cmds))
async def create_instances_async(
self, container_definition: str, cmds: List[str]
) -> List[ContainerInstance]:
return await self._create_instances_async(container_definition, cmds)
def get_instance(self, instance_id: str) -> ContainerInstance:
return self.ecs_gateway.describe_task(self.cluster, instance_id)
def get_instances(self, instance_ids: List[str]) -> List[ContainerInstance]:
return self.ecs_gateway.describe_tasks(self.cluster, instance_ids)
def list_tasks(self) -> List[str]:
return self.ecs_gateway.list_tasks(cluster=self.cluster)
def stop_task(self, task_id: str) -> Dict[str, Any]:
return self.ecs_gateway.stop_task(cluster=self.cluster, task_id=task_id)
def _split_container_definition(self, container_definition: str) -> Tuple[str, str]:
"""
container_definition = task_definition#container
"""
s = container_definition.split("#")
return (s[0], s[1])
async def _create_instance_async(
self, container_definition: str, cmd: str
) -> ContainerInstance:
task_definition, container = self._split_container_definition(
container_definition
)
instance = self.ecs_gateway.run_task(
task_definition, container, cmd, self.cluster, self.subnet
)
# wait until the container is in running state
while instance.status is ContainerInstanceStatus.UNKNOWN:
await asyncio.sleep(1)
instance = self.get_instance(instance.instance_id)
return instance
async def _create_instances_async(
self, container_definition: str, cmds: List[str]
) -> List[ContainerInstance]:
tasks = [
asyncio.create_task(self._create_instance_async(container_definition, cmd))
for cmd in cmds
]
res = await asyncio.gather(*tasks)
return [checked_cast(ContainerInstance, instance) for instance in res]
|
{"/tests/util/test_yaml.py": ["/fbpcs/util/yaml.py"], "/tests/error/mapper/test_aws.py": ["/fbpcs/error/mapper/aws.py"], "/fbpcs/gateway/ec2.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/vpc_instance.py", "/fbpcs/mapper/aws.py"], "/tests/service/test_container_aws.py": ["/fbpcs/service/container_aws.py"], "/fbpcs/service/log_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py", "/fbpcs/service/log.py"], "/tests/gateway/test_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py"], "/tests/gateway/test_ec2.py": ["/fbpcs/entity/vpc_instance.py", "/fbpcs/gateway/ec2.py"], "/tests/service/test_storage_s3.py": ["/fbpcs/service/storage_s3.py"], "/fbpcs/service/container.py": ["/fbpcs/entity/container_instance.py"], "/tests/util/test_typing.py": ["/fbpcs/util/typing.py"], "/fbpcs/repository/instance_s3.py": ["/fbpcs/entity/instance_base.py", "/fbpcs/service/storage_s3.py"], "/tests/util/test_reflect.py": ["/fbpcs/util/reflect.py", "/fbpcs/util/s3path.py"], "/fbpcs/decorator/error_handler.py": ["/fbpcs/error/mapper/aws.py"], "/tests/service/test_mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/mpc.py"], "/fbpcs/entity/mpc_instance.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/instance_base.py"], "/tests/service/test_onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/onedocker.py"], "/tests/util/test_s3path.py": ["/fbpcs/util/s3path.py"], "/fbpcs/repository/mpc_instance_local.py": ["/fbpcs/entity/mpc_instance.py"], "/tests/gateway/test_s3.py": ["/fbpcs/gateway/s3.py"], "/fbpcs/service/mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/container.py", "/fbpcs/service/onedocker.py", "/fbpcs/service/storage.py", "/fbpcs/util/typing.py"], "/tests/service/test_storage.py": ["/fbpcs/service/storage.py"], "/onedocker/onedocker_runner.py": ["/fbpcs/service/storage_s3.py", "/fbpcs/util/s3path.py"], "/fbpcs/gateway/s3.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/service/onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/container.py"], "/tests/decorator/test_error_handler.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/gateway/ecs.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/mapper/aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/vpc_instance.py"], "/tests/gateway/test_ecs.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py"], "/fbpcs/gateway/cloudwatch.py": ["/fbpcs/decorator/error_handler.py"], "/tests/repository/test_instance_s3.py": ["/fbpcs/entity/mpc_instance.py", "/fbpcs/repository/instance_s3.py", "/fbpcs/service/storage_s3.py"], "/tests/mapper/test_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/service/storage_s3.py": ["/fbpcs/gateway/s3.py", "/fbpcs/service/storage.py", "/fbpcs/util/s3path.py"], "/fbpcs/service/container_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py", "/fbpcs/service/container.py", "/fbpcs/util/typing.py"], "/tests/service/test_log_cloudwatch.py": ["/fbpcs/service/log_cloudwatch.py"]}
|
3,312
|
supasate/FBPCS
|
refs/heads/main
|
/tests/service/test_log_cloudwatch.py
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import unittest
from unittest.mock import MagicMock, patch
from fbpcs.service.log_cloudwatch import CloudWatchLogService
REGION = "us-west-1"
LOG_GROUP = "test-group-name"
LOG_PATH = "test-log-path"
class TestCloudWatchLogService(unittest.TestCase):
@patch("fbpcs.gateway.cloudwatch.CloudWatchGateway")
def test_fetch(self, MockCloudWatchGateway):
log_service = CloudWatchLogService(LOG_GROUP, REGION)
mocked_log = {"test-events": [{"test-event-name": "test-event-data"}]}
log_service.cloudwatch_gateway = MockCloudWatchGateway()
log_service.cloudwatch_gateway.fetch = MagicMock(return_value=mocked_log)
returned_log = log_service.cloudwatch_gateway.fetch(LOG_PATH)
log_service.cloudwatch_gateway.fetch.assert_called()
self.assertEqual(mocked_log, returned_log)
|
{"/tests/util/test_yaml.py": ["/fbpcs/util/yaml.py"], "/tests/error/mapper/test_aws.py": ["/fbpcs/error/mapper/aws.py"], "/fbpcs/gateway/ec2.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/vpc_instance.py", "/fbpcs/mapper/aws.py"], "/tests/service/test_container_aws.py": ["/fbpcs/service/container_aws.py"], "/fbpcs/service/log_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py", "/fbpcs/service/log.py"], "/tests/gateway/test_cloudwatch.py": ["/fbpcs/gateway/cloudwatch.py"], "/tests/gateway/test_ec2.py": ["/fbpcs/entity/vpc_instance.py", "/fbpcs/gateway/ec2.py"], "/tests/service/test_storage_s3.py": ["/fbpcs/service/storage_s3.py"], "/fbpcs/service/container.py": ["/fbpcs/entity/container_instance.py"], "/tests/util/test_typing.py": ["/fbpcs/util/typing.py"], "/fbpcs/repository/instance_s3.py": ["/fbpcs/entity/instance_base.py", "/fbpcs/service/storage_s3.py"], "/tests/util/test_reflect.py": ["/fbpcs/util/reflect.py", "/fbpcs/util/s3path.py"], "/fbpcs/decorator/error_handler.py": ["/fbpcs/error/mapper/aws.py"], "/tests/service/test_mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/mpc.py"], "/fbpcs/entity/mpc_instance.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/instance_base.py"], "/tests/service/test_onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/onedocker.py"], "/tests/util/test_s3path.py": ["/fbpcs/util/s3path.py"], "/fbpcs/repository/mpc_instance_local.py": ["/fbpcs/entity/mpc_instance.py"], "/tests/gateway/test_s3.py": ["/fbpcs/gateway/s3.py"], "/fbpcs/service/mpc.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/mpc_instance.py", "/fbpcs/service/container.py", "/fbpcs/service/onedocker.py", "/fbpcs/service/storage.py", "/fbpcs/util/typing.py"], "/tests/service/test_storage.py": ["/fbpcs/service/storage.py"], "/onedocker/onedocker_runner.py": ["/fbpcs/service/storage_s3.py", "/fbpcs/util/s3path.py"], "/fbpcs/gateway/s3.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/service/onedocker.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/service/container.py"], "/tests/decorator/test_error_handler.py": ["/fbpcs/decorator/error_handler.py"], "/fbpcs/gateway/ecs.py": ["/fbpcs/decorator/error_handler.py", "/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/mapper/aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/entity/vpc_instance.py"], "/tests/gateway/test_ecs.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py"], "/fbpcs/gateway/cloudwatch.py": ["/fbpcs/decorator/error_handler.py"], "/tests/repository/test_instance_s3.py": ["/fbpcs/entity/mpc_instance.py", "/fbpcs/repository/instance_s3.py", "/fbpcs/service/storage_s3.py"], "/tests/mapper/test_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/mapper/aws.py"], "/fbpcs/service/storage_s3.py": ["/fbpcs/gateway/s3.py", "/fbpcs/service/storage.py", "/fbpcs/util/s3path.py"], "/fbpcs/service/container_aws.py": ["/fbpcs/entity/container_instance.py", "/fbpcs/gateway/ecs.py", "/fbpcs/service/container.py", "/fbpcs/util/typing.py"], "/tests/service/test_log_cloudwatch.py": ["/fbpcs/service/log_cloudwatch.py"]}
|
3,421
|
Zed-chi/dvmn_frontend_ch3
|
refs/heads/master
|
/utils.py
|
import json
import os
import re
import urllib
from bs4 import BeautifulSoup
from pathvalidate import sanitize_filepath
import requests
BASE_URL = "https://tululu.org"
""" helper errors """
class EmptyBookError(ValueError):
pass
class EmptyDetailsError(ValueError):
pass
class EmptyImageError(ValueError):
pass
class EmptyHTMLError(ValueError):
pass
class URLParseError(ValueError):
pass
""" helper functions """
def check_status_code(response):
if response.status_code >= 300:
message = f"Site answered with {response.status_code} code"
raise requests.HTTPError(message)
return True
def get_content_from_url(url, allow_redirects=False):
response = requests.get(url, allow_redirects=allow_redirects, verify=False)
check_status_code(response)
return response.content
def get_text_from_url(url, urlparams=None, allow_redirects=False):
response = requests.get(
url, allow_redirects=allow_redirects, params=urlparams, verify=False
)
check_status_code(response)
return response.text
def get_id_from_book_url(url):
result = re.search(r"b([0-9]+)", url)
if not result:
raise URLParseError(f"Cant get book id from {url}")
return result.group(1)
def get_book_details(html, base_url):
soup = BeautifulSoup(html, "lxml")
header = soup.select_one("#content > h1").text
title, author = [text.strip() for text in header.split("::")]
img = soup.select_one(".bookimage img")
src = urllib.parse.urljoin(base_url, img.get("src"))
comments = [tag.text for tag in soup.select(".texts span")]
genres = [tag.text for tag in soup.select("#content > .d_book > a")]
return {
"title": title,
"author": author,
"img_url": src,
"comments": comments,
"genres": genres,
}
def save_book(filepath, content):
os.makedirs(os.path.dirname(filepath), exist_ok=True)
if os.path.exists(filepath):
raise FileExistsError(f"Book {filepath} already saved")
with open(filepath, "w", encoding="utf-8") as file:
file.write(content)
def download_txt(from_="", to="", urlparams=None):
try:
path = sanitize_filepath(to, platform="auto")
content = get_text_from_url(from_, urlparams)
if not content:
raise EmptyBookError(f"Got empty textfile from {from_}")
save_book(path, content)
except Exception as e:
print(e)
def print_book_details(details):
print("\n==========")
if details["title"]:
print(f"=== Заголовок: {details['title']} ===")
if details["author"]:
print(f"=== Автор: {details['author']} ===")
if details["comments"]:
comments = "\n ".join(details["comments"])
print(f"=== Комментарии: \n{comments} ===")
if details["genres"]:
print(f"=== Жанры: {details['genres']} ===")
if details["img_url"]:
print(f"=== Ссылка: {details['img_url']} ===")
print("==========")
def save_image(filepath, content):
os.makedirs(os.path.dirname(filepath), exist_ok=True)
if os.path.exists(filepath):
raise FileExistsError(f"Image {filepath} is already saved.")
with open(filepath, "wb") as file:
file.write(content)
def download_image(from_=None, to=None):
try:
path = sanitize_filepath(to, platform="auto")
content = get_content_from_url(from_)
if not content:
raise EmptyImageError(f"Got empty image from {from_}")
save_image(path, content)
except Exception as e:
print(e)
def make_description(json_dict, filepath="./books.json"):
with open(filepath, "w", encoding="utf-8") as write_file:
json.dump(json_dict, write_file, indent=4, ensure_ascii=False)
|
{"/app.py": ["/parse_tululu_category.py", "/utils.py"], "/parse_tululu_category.py": ["/utils.py"]}
|
3,422
|
Zed-chi/dvmn_frontend_ch3
|
refs/heads/master
|
/app.py
|
import argparse
import logging
import os
from xml.etree.ElementTree import ParseError
from parse_tululu_category import get_links_from_pages
from requests import HTTPError
from utils import (
EmptyBookError,
EmptyDetailsError,
EmptyHTMLError,
EmptyImageError,
URLParseError,
download_image,
download_txt,
get_book_details,
get_id_from_book_url,
get_text_from_url,
make_description,
)
logging.basicConfig(level=logging.INFO)
BASE_URL = "https://tululu.org"
BASE_BOOK_PAGE = "https://tululu.org/b"
BASE_TXT_URL = "https://tululu.org/txt.php"
def get_args():
parser = argparse.ArgumentParser()
parser.add_argument("--start_page", default=1, type=int)
parser.add_argument("--end_page", type=int)
parser.add_argument("--dest_folder", default="./")
parser.add_argument("--skip_imgs", action="store_true", default=False)
parser.add_argument("--skip_txt", action="store_true", default=False)
parser.add_argument("--json_path")
return parser.parse_args()
def get_name_from_url(url):
return url.split("/")[-1]
def main():
args = get_args()
books_dir = os.path.join(args.dest_folder, "books")
images_dir = os.path.join(args.dest_folder, "images")
json_filepath = args.json_path or os.path.join(
args.dest_folder,
"books.json",
)
links = get_links_from_pages(args.start_page, args.end_page)
description = []
if not links:
logging.warning("No files to download :(")
return None
else:
logging.info(f"Going to download {len(links)} files...")
for id, link in enumerate(links):
try:
html = get_text_from_url(link, allow_redirects=True)
if not html:
raise EmptyHTMLError("Book Page html is empty")
details = get_book_details(html, link)
if not details:
raise EmptyDetailsError("Details is empty")
if not args.skip_imgs:
image_filename = get_name_from_url(details["img_url"])
path = os.path.normcase(
os.path.abspath(os.path.join(images_dir, image_filename))
)
details["img_src"] = path
download_image(from_=details["img_url"], to=details["img_src"])
if args.skip_txt:
continue
book_filename = f"{id}.{details['title']}.txt"
path = os.path.normcase(
os.path.abspath(os.path.join(books_dir, book_filename))
)
details["book_path"] = path
txt_id = get_id_from_book_url(link)
if not txt_id:
continue
download_txt(
from_=BASE_TXT_URL,
to=details["book_path"],
urlparams={"id": txt_id},
)
logging.info(f"File '{book_filename}' has been saved")
description.append(details)
except (
HTTPError,
ParseError,
ConnectionError,
FileExistsError,
EmptyBookError,
EmptyImageError,
EmptyHTMLError,
URLParseError,
) as e:
logging.error(e)
make_description({"books": description}, json_filepath)
logging.info(f"Files are downloaded, description in {json_filepath}")
if __name__ == "__main__":
main()
|
{"/app.py": ["/parse_tululu_category.py", "/utils.py"], "/parse_tululu_category.py": ["/utils.py"]}
|
3,423
|
Zed-chi/dvmn_frontend_ch3
|
refs/heads/master
|
/parse_tululu_category.py
|
import urllib
from bs4 import BeautifulSoup
from utils import get_text_from_url
SFICTION_URL = "https://tululu.org/l55/"
def get_all_book_links_on_page(html):
soup = BeautifulSoup(html, "lxml")
hrefs = soup.select(
"div#content table.d_book tr:first-child td:first-child a"
)
links = [urllib.parse.urljoin(SFICTION_URL, a.get("href")) for a in hrefs]
return links
def get_sfiction_list_books_page(page_num):
url = f"{SFICTION_URL}{page_num}/"
return get_text_from_url(url)
def get_links_from_pages(startpage, endpage=None):
links = []
page_num = startpage
while True:
try:
if endpage and page_num == endpage:
return links
html = get_sfiction_list_books_page(page_num)
if not html:
return links
links.extend(get_all_book_links_on_page(html))
page_num += 1
except:
print(page_num, "error")
break
return links
|
{"/app.py": ["/parse_tululu_category.py", "/utils.py"], "/parse_tululu_category.py": ["/utils.py"]}
|
3,428
|
youngmoon-kang/kiwoomApi_practice
|
refs/heads/master
|
/__init__.py
|
from ui.ui import *
from kiwoom.kiwoom import *
class Main():
def __init__(self):
print("실행할 메인 클래스")
Ui_class()
if __name__ == "__main__":
m = Main()
|
{"/__init__.py": ["/kiwoom/kiwoom.py"]}
|
3,429
|
youngmoon-kang/kiwoomApi_practice
|
refs/heads/master
|
/kiwoom/kiwoom.py
|
from PyQt5.QAxContainer import *
from PyQt5.QtCore import *
from config.errorCode import *
from PyQt5.QtTest import *
class Kiwoom(QAxWidget):
def __init__(self):
super().__init__()
print("kiwoom클래스 입니다.")
########event loop 모음
self.login_event_loop = None
self.detail_account_info_event_loop = QEventLoop()
self.calculator_event_loop = QEventLoop()
#################
####스크린 번호
self.screen_my_info = "2000"
self.screen_calculation_stock = "4000"
##################
########변수 모음
self.account_num = None
self.account_stock_dict = dict()
self.not_account_stock_dict = dict()
##############
########계좌 관련 변수
self.use_money = 0
self.use_money_percent = 0.5
####################
self.get_ocx_instance()
self.event_slots()
self.signal_login_commConnect()
self.get_account_info()
self.detail_account_info()
self.detail_account_mystock()#계좌평가 잔고내역 요청
self.not_concluded_account()
self.calculator_fnc() #종목분석용, 임시용으로 실행
def get_ocx_instance(self):
self.setControl("KHOPENAPI.KHOpenAPICtrl.1")
def event_slots(self):
self.OnEventConnect.connect(self.login_slot)
def signal_login_commConnect(self):
self.dynamicCall("CommConnect()")
self.login_event_loop = QEventLoop()
self.login_event_loop.exec_()
def login_slot(self, errCode):
print(errors(errCode))
self.login_event_loop.exit()
self.OnReceiveTrData.connect(self.trdata_slot)
def get_account_info(self):
account_list = self.dynamicCall("GetLoginInfo(String)", "ACCNO")
self.account_num = account_list.split(';')[0]
print("나의 보유 계좌번호: {0}".format(self.account_num))
def detail_account_info(self):
print("예수금을 요청하는 부분")
self.dynamicCall("SetInputValue(String, String)", "계좌번호", self.account_num)
self.dynamicCall("SetInputValue(String, String)", "비밀번호", "0000")
self.dynamicCall("SetInputValue(String, String)", "비밀번호입력매체구분", "00")
self.dynamicCall("SetInputValue(String, String)", "조회구분", "2")
self.dynamicCall("CommRqData(String, String, String, String)", "예수금상세현황요청", "opw00001", "0", "2000")
self.detail_account_info_event_loop = QEventLoop()
self.detail_account_info_event_loop.exec_()
def detail_account_mystock(self, sPrevNext = "0"):
self.dynamicCall("SetInputValue(String, String)", "계좌번호", self.account_num)
self.dynamicCall("SetInputValue(String, String)", "비밀번호", "0000")
self.dynamicCall("SetInputValue(String, String)", "비밀번호입력매체구분", "00")
self.dynamicCall("SetInputValue(String, String)", "조회구분", "2")
self.dynamicCall("CommRqData(String, String, String, String)", "계좌평가잔고내역요청", "opw00018", sPrevNext, self.screen_my_info)
self.detail_account_info_event_loop.exec_()
def not_concluded_account(self, sPrevNext = "0"):
self.dynamicCall("SetInputValue(String, String)", "계좌번호", self.account_num)
self.dynamicCall("SetInputValue(String, String)", "체결구분", "1")
self.dynamicCall("SetInputValue(String, String)", "매매구분", "0")
self.dynamicCall("CommRqData(String, String, String, String)", "실시간미체결요청", "opt10075", sPrevNext, self.screen_my_info)
self.detail_account_info_event_loop.exec_()
def trdata_slot(self, sScrNo, sRQName, sTrCode, sRecordName, sPrevNext):
'''
tr요청을 받는 구역이다! 슬롯이다!
:param sScrNo: 스크린번호
:param sRQName: 내가 요청했을 때 지은 이름
:param sTrCode: 요청id, tr코드
:param sRecordName: 사용안함
:param sPrevNext: 다음 페이지가 있는지
:return:
'''
if sRQName == "예수금상세현황요청":
deposit = self.dynamicCall("GetCommData(String, String, int, String)", sTrCode, sRQName, 0, "예수금")
print("예수금: ", int(deposit))
self.use_money = int(deposit) * self.use_money_percent
self.use_money = self.use_money / 4
ok_deposit = self.dynamicCall("GetCommData(String, String, int, String)", sTrCode, sRQName, 0, "출금가능금액")
print("출금가능금액: ", int(ok_deposit))
self.detail_account_info_event_loop.exit()
if sRQName == "계좌평가잔고내역요청":
total_buy_money = self.dynamicCall("GetCommData(String, String, int, String)", sTrCode, sRQName, 0, "총매입금액")
print("총 매입금액: ", int(total_buy_money))
total_profit_loss_rate = self.dynamicCall("GetCommData(String, String, int, String)", sTrCode, sRQName, 0, "총수익률(%)")
print("총 수익률(%): ", float(total_profit_loss_rate))
rows = self.dynamicCall("GetRepeatCnt(QString, QString)", sTrCode, sRQName)
cnt = 0
for i in range(rows):
code = self.dynamicCall("GetCommData(QString, Qstring, int, QString)", sTrCode, sRQName, i, "종목번호")
code_nm = self.dynamicCall("GetCommData(QString, Qstring, int, QString)", sTrCode, sRQName, i, "종목명")
code = code.stripe()[1:]
stock_quantity = self.dynamicCall("GetCommData(QString, Qstring, int, QString)", sTrCode, sRQName, i, "보유수량")
buy_price = self.dynamicCall("GetCommData(QString, Qstring, int, QString)", sTrCode, sRQName, i, "매입가")
learn_rate = self.dynamicCall("GetCommData(QString, Qstring, int, QString)", sTrCode, sRQName, i, "수익률(%)")
current_price = self.dynamicCall("GetCommData(QString, Qstring, int, QString)", sTrCode, sRQName, i, "현재가")
total_chagual_price = self.dynamicCall("GetCommData(QString, Qstring, int, QString)", sTrCode, sRQName, i, "매입금액")
possible_quantity = self.dynamicCall("GetCommData(QString, Qstring, int, QString)", sTrCode, sRQName, i, "매매가능수량")
if code in self.account_stock_dict:
pass
else:
self.account_stock_dict.update({code: {}})
code_nm = code_nm.strip()
stock_quantity = int(stock_quantity.strip())
buy_price = int(buy_price.strip())
learn_rate = float(learn_rate.strip())
current_price = int(current_price.strip())
total_chagual_price = int(total_chagual_price)
possible_quantity = int(possible_quantity)
self.account_stock_dict[code].update({"종목명": code_nm})
self.account_stock_dict[code].update({"보유수량": stock_quantity})
self.account_stock_dict[code].update({"매입가": buy_price})
self.account_stock_dict[code].update({"수익률(%)": learn_rate})
self.account_stock_dict[code].update({"현재가": current_price})
self.account_stock_dict[code].update({"매입금액": total_chagual_price})
self.account_stock_dict[code].update({"매매가능수량": possible_quantity})
cnt += 1
print("계좌에 가지고 있는 종목: ", self.account_stock_dict)
if sPrevNext == "2":
self.detail_account_mystock("2")
else:
self.detail_account_info_event_loop.exit()
elif sRQName =="실시간미체결요청":
rows = self.dynamicCall("GetRepeatCnt(QString, QString)", sTrCode, sRQName)
for i in range(rows):
code = self.dynamicCall("GetCommData(QString, Qstring, int, QString)", sTrCode, sRQName, i, "종목번호")
code_nm = self.dynamicCall("GetCommData(QString, Qstring, int, QString)", sTrCode, sRQName, i, "종목명")
order_no = self.dynamicCall("GetCommData(QString, Qstring, int, QString)", sTrCode, sRQName, i, "주문번호")
order_status = self.dynamicCall("GetCommData(QString, Qstring, int, QString)", sTrCode, sRQName, i, "주문상태")
order_quantity = self.dynamicCall("GetCommData(QString, Qstring, int, QString)", sTrCode, sRQName, i, "수문주량")
order_price = self.dynamicCall("GetCommData(QString, Qstring, int, QString)", sTrCode, sRQName, i, "주문가격")
order_gubun = self.dynamicCall("GetCommData(QString, Qstring, int, QString)", sTrCode, sRQName, i, "주문구분")
not_quantity = self.dynamicCall("GetCommData(QString, Qstring, int, QString)", sTrCode, sRQName, i, "미체결수량")
ok_quantity = self.dynamicCall("GetCommData(QString, Qstring, int, QString)", sTrCode, sRQName, i, "체결량")
code = code.stripe()
code_nm = code_nm.strip()
order_no = int(order_no.strip())
order_status = int(order_status.strip())
order_quantity = int(order_quantity.strip())
order_price = float(order_price.strip())
order_gubun = order_gubun.lstrip('+').lstrip('-')
not_quantity = int(not_quantity.strip())
ok_quantity = int(ok_quantity.strip())
if order_no in self.not_account_stock_dict:
pass
else:
self.not_account_stock_dict[order_no] = {}
self.not_account_stock_dict[order_no].update({"종목코드": code})
self.not_account_stock_dict[order_no].update({"종목명": code_nm})
self.not_account_stock_dict[order_no].update({"주문번호": order_no})
self.not_account_stock_dict[order_no].update({"주문상태": order_status})
self.not_account_stock_dict[order_no].update({"주문수량": order_quantity})
self.not_account_stock_dict[order_no].update({"주문가격": order_price})
self.not_account_stock_dict[order_no].update({"주문구분": order_gubun})
self.not_account_stock_dict[order_no].update({"미체결수량": not_quantity})
self.not_account_stock_dict[order_no].update({"체결량": ok_quantity})
print("미체결 종목: ", self.not_account_stock_dict[order_no])
self.detail_account_info_event_loop.exit()
elif sRQName == "주식일봉차트조회":
print("일봉 데이터 요청")
code = self.dynamicCall("GetCommData(QString, Qstring, int, QString)", sTrCode, sRQName, 0, "종목코드")
code = code.strip()
print("{0} 일봉데이터 요청".format(code))
rows = self.dynamicCall("GetRepeatCnt(QString, QString)", sTrCode, sRQName)
print(rows)
if sPrevNext =="2":
self.day_kiwoom_db(code=code, sPrevNext=sPrevNext)
else:
self.calculator_event_loop.exit()
def get_code_list_by_market(self, market_code):
'''
종목 코드들 반환
:param market_code:
:return:
'''
code_list = self.dynamicCall("GetCodeListByMarket(QString)", market_code)
code_list = code_list.split(";")[:-1]
return code_list
def calculator_fnc(self):
'''
whdahr
:return:
'''
code_list = self.get_code_list_by_market("10")
print("코스닥 갯수: ", len(code_list))
for idx, code, in enumerate(code_list):
self.dynamicCall("DisconnectRealData(QString)", self.screen_calculation_stock)
print("{0} / {1} : KOSDAQ Stock Code : {2} is updating...".format(idx + 1, len(code_list), code))
self.day_kiwoom_db(code=code)
def day_kiwoom_db(self, code=None, date=None, sPrevNext="0"):
QTest.qWait(3600)
self.dynamicCall("SetInputValue(QString, QString)", "종목코드", code)
self.dynamicCall("SetInputValue(QString, QString)", "수정주가구분", "1")
if date != None:
self.dynamicCall("SetInputValue(QString, QString)", "기준일자", date)
self.dynamicCall("CommRqData(QString, QString, int, QString)", "주식일봉차트조회", "opt10081", sPrevNext, self.screen_calculation_stock )
self.calculator_event_loop.exec_()
|
{"/__init__.py": ["/kiwoom/kiwoom.py"]}
|
3,450
|
diego1castroo/paralelismo-python
|
refs/heads/main
|
/funcionesquenoocuerrenenelinterpretedepython.py
|
from time import sleep, time
from threading import Thread
start = time()
for _ in range(10):
sleep(1)
print('Tomó {} segundos.'.format(time() - start))
threads = []
start = time()
for _ in range(10):
t = Thread(target=sleep, args=(1,))
t.start()
threads.append(t)
for t in threads:
t.join()
print('Tomó {} segundos.'.format(time() - start))
|
{"/testpi2.py": ["/ccalcpi.py"]}
|
3,451
|
diego1castroo/paralelismo-python
|
refs/heads/main
|
/ccalcpi.py
|
from cython.parallel import parallel, prange
import openmp
from libc.stdlib import malloc, free
import cython
def calcpi(int n):
cdef double result = 0.0
cdef int num_threads
cdef int i, si
with nogil,parallel(num_threads = 6):
for i in prange (2, n * 2, 2):
si = 1 if ((i/2) %2==1) else -1
result += 4.0 * si /(i*(i+1.0)* (i + 2.0))
return result + 3
|
{"/testpi2.py": ["/ccalcpi.py"]}
|
3,452
|
diego1castroo/paralelismo-python
|
refs/heads/main
|
/testpi2.py
|
from ccalcpi import calcpi
nPoints = 50000000
pi = calcpi(nPoints)
print('OpenMP pi = ', pi, ' for ', nPoints)
|
{"/testpi2.py": ["/ccalcpi.py"]}
|
3,453
|
diego1castroo/paralelismo-python
|
refs/heads/main
|
/setup2.py
|
from distutils.core import setup
from Cython.Build import cythonize
from distutils.extension import Extension
from Cython.Distutils import build_ext
ext_modules=[
Extension("ccalcpi",
["ccalcpi.pyx"],
extra_compile_args = ["-03", "-ffast-math", "-march-nativ"],
extra_link_args = ['-fopenmp']
)
]
setup(
name = 'Calc Pi',
cmdclass = {"build_ext": build_ext},
ext_modules = ext_modules
)
|
{"/testpi2.py": ["/ccalcpi.py"]}
|
3,457
|
marbibu/myCad
|
refs/heads/master
|
/Point.py
|
from Sender import Sender
class Point(Sender):
def __init__(s,x,y):
#Dane:
Sender.__init__(s)
s.__x,s.__y=x,y
s.__X,s.__Y=x,y
s.__visible=1
s.__exist=1
s.__selected=0
#Definicje:
s.create()
s.show()
s.select()
def getXY(s):#Zwraca wspolrzedne globalne
return s.__X,s.__Y
def getX(s):#Zwraca wspolrzedna globalna X
return s.__X
def getY(s):#Zwraca wspolrzedna globalna Y
return s.__Y
def getxy(s):#Zwraca wspolrzedne lokalne
return s.__x,s.__y
def getx(s):#Zwraca wspolrzedna lokalna x
return s.__x
def gety(s):#Zwraca wspolrzedna lokalna y
return s.__x
def getVisible(s):#Zwraca parametr widocznosci
return s.__visible
def getExist(s):#Zwraca parametr istnienia
return s.__exist
def getSelected(s):#Zwraca parametr zaznaczenia
return s.__selected
#Definicje ktore beda wysylaly sygnaly do sluchaczy
def create(s):#Tworzy punkt
s.__exist=1
s.sendSignal()
def destroy(s):#Niszczy punkt
s.__exist=0
s.sendSignal()
def show(s):#Wyswietla punkt
s.__visible=1
s.sendSignal()
def hide(s):#Ukrywa punkt
s.__visible=0
s.sendSignal()
def select(s):#Zaznacza punkt
s.__selected=1
s.sendSignal()
def deselect(s):#Odznacza punkt
s.__selected=0
s.sendSignal()
|
{"/Point.py": ["/Sender.py"], "/Main.py": ["/Window.py", "/Desk.py", "/Point.py", "/Path.py"], "/Desk.py": ["/PointGUI.py", "/PathGUI.py"], "/Path.py": ["/Sender.py"]}
|
3,458
|
marbibu/myCad
|
refs/heads/master
|
/Main.py
|
from Window import Window
from Desk import Desk
from Point import Point
from Path import Path
class Main:
def __init__(s):
#Dane:
win=Window("Point",0,0,600,600)
master=win.getMaster()
desk=Desk(master)
C=desk.getC()
p1=Point(100,100)
p2=Point(200,100)
p3=Point(200,200)
p4=Point(100,200)
desk.addPoint(p1)
desk.addPoint(p2)
desk.addPoint(p3)
desk.addPoint(p4)
path=Path()
path.addPoint(p1)
path.addPoint(p2)
path.addPoint(p3)
path.addPoint(p4)
desk.addPath(path)
win.loop()
Main()
#Pytania
#Gdzie bedziemy tworzyc obiekty GUI? moze w Desk?
|
{"/Point.py": ["/Sender.py"], "/Main.py": ["/Window.py", "/Desk.py", "/Point.py", "/Path.py"], "/Desk.py": ["/PointGUI.py", "/PathGUI.py"], "/Path.py": ["/Sender.py"]}
|
3,459
|
marbibu/myCad
|
refs/heads/master
|
/PointGUI.py
|
class PointGUI:
__r=6
def __init__(s,C,point):
#Dane:
s.__C=C
s.__point=point
#Definicje:
s.__draw()
def __draw(s):#Rysuje punkt:
x,y=s.__point.getXY()
s.__tag=s.__C.create_oval(x-s.__r,y-s.__r,x+s.__r,y+s.__r,fill="gold",outline="orange")
|
{"/Point.py": ["/Sender.py"], "/Main.py": ["/Window.py", "/Desk.py", "/Point.py", "/Path.py"], "/Desk.py": ["/PointGUI.py", "/PathGUI.py"], "/Path.py": ["/Sender.py"]}
|
3,460
|
marbibu/myCad
|
refs/heads/master
|
/Desk.py
|
from PointGUI import PointGUI
from PathGUI import PathGUI
from Tkinter import Canvas
class Desk:
def __init__(s,master):
#Dane:
s.__master=master
#Definicje:
s.__draw()
def __draw(s):#Rysuje kontrolke
s.__C=Canvas(s.__master,highlightthickness=0,bg="gray80")
s.__C.pack(side="top",expand=1,fill="both")
def getC(s):#Zwraca id Canvasu
return s.__C
def addPoint(s,point):#Dodaje punkt do Desk
PointGUI(s.__C,point)
def addPath(s,path):#Dodaje sciezke do Desk
PathGUI(s.__C,path)
|
{"/Point.py": ["/Sender.py"], "/Main.py": ["/Window.py", "/Desk.py", "/Point.py", "/Path.py"], "/Desk.py": ["/PointGUI.py", "/PathGUI.py"], "/Path.py": ["/Sender.py"]}
|
3,461
|
marbibu/myCad
|
refs/heads/master
|
/Listener.py
|
class Listener:
#Klasa, ktora umozliwia odbieranie sygnalow od nadawcy
def __init__(s):
#Dane:
pass
def receiveSignal(s,sender):
print "Nie zaimplementowano odbierania sygnalow"
def listen2(s,sender):#Rozpoczyna nasluchiwanie wskazanego nadawcy
sender.addListener(s)
|
{"/Point.py": ["/Sender.py"], "/Main.py": ["/Window.py", "/Desk.py", "/Point.py", "/Path.py"], "/Desk.py": ["/PointGUI.py", "/PathGUI.py"], "/Path.py": ["/Sender.py"]}
|
3,462
|
marbibu/myCad
|
refs/heads/master
|
/Sender.py
|
class Sender:
#Klasa, ktora rozsyla info do odbiorcow
def __init__(s):
#Dane:
s.__listeners=[]
def addListener(s,listener):#Dodaje sluchacza
s.__listeners.append(listener)
def delListener(s,listener):#Usuwa sluchacza
s.__listeners.remove(listener)
def sendSignal(s):#Wysyla sygnal
for i in s.__listeners:
i.receiveSignal(s)
def getName(s):#Zwraca nazwe nadawcy
return s.__class__.__name__
|
{"/Point.py": ["/Sender.py"], "/Main.py": ["/Window.py", "/Desk.py", "/Point.py", "/Path.py"], "/Desk.py": ["/PointGUI.py", "/PathGUI.py"], "/Path.py": ["/Sender.py"]}
|
3,463
|
marbibu/myCad
|
refs/heads/master
|
/PathGUI.py
|
class PathGUI:
def __init__(s,C,path):
#Dane:
s.__C=C
s.__path=path
#Definicje:
s.__draw()
def __draw(s):#Rysuje sciezke
coords=s.__path.getCoords()
s.__C.create_line(*coords)#zalezy od warstwy
|
{"/Point.py": ["/Sender.py"], "/Main.py": ["/Window.py", "/Desk.py", "/Point.py", "/Path.py"], "/Desk.py": ["/PointGUI.py", "/PathGUI.py"], "/Path.py": ["/Sender.py"]}
|
3,464
|
marbibu/myCad
|
refs/heads/master
|
/Path.py
|
from Sender import Sender
class Path(Sender):
def __init__(s):
#Dane:
Sender.__init__(s)
s.__points=[]
s.__current=None
def selectPoint(s,point):#Zaznacza punkt
if s.__current==None:
pass
else:
s.__current.select()
s.__current=point
s.__current.deselect()
def getCurrentPointIndex(s):#Zwraca indeks biezacego punktu
return s.__points.index(s.__current)
def hasPointWithXY(s,point):#Sprawdza czy punkt z podanymi wspolrzednymi istnieje i jezeli tak to go zwraca
result=0
X,Y=point.getXY()
for i in s.__points:
x,y=i.getXY()
if x==X and y==Y:
return i
return None
def __addPoint(s,point):#Dodaje punkt do listy
if s.__current==None:
s.__points.append(point)
else:
index=s.getCurrentPointIndex()
if index==len(s.__points)-1:
s.__points.append(point)
else:
s.__points.insert(index+1,point)
def addPoint(s,point):#Dodaje punkt
new=s.hasPointWithXY(point)
if new==None:
pass
else:
point=new
s.__addPoint(point)
s.selectPoint(point)
def getCoords(s):#Zwraca liste wspolrzednych
result=[]
for i in s.__points:
result.extend(list(i.getXY()))
return result
def getPoints(s):#Zwraca liste punktow
return s.__points
#najpierw sprawdzamy czy punkt o podanych wspolrzednych istnieje
|
{"/Point.py": ["/Sender.py"], "/Main.py": ["/Window.py", "/Desk.py", "/Point.py", "/Path.py"], "/Desk.py": ["/PointGUI.py", "/PathGUI.py"], "/Path.py": ["/Sender.py"]}
|
3,465
|
marbibu/myCad
|
refs/heads/master
|
/Window.py
|
from Tkinter import Tk
class Window:
def __init__(s,title,x,y,w,h):
#Dane:
s.__title=title
s.__x,s.__y=x,y
s.__w,s.__h=w,h
#Definicje:
s.__draw()
def __draw(s):#Rysuje okno
s.__master=Tk()
s.__master.geometry("%sx%s+%s+%s"%(s.__w,s.__h,s.__x,s.__y))
s.__master.title(s.__title)
def getMaster(s):#Zwraca id okna
return s.__master
def loop(s):#Zapetla wyswietlanie okna
s.__master.mainloop()
|
{"/Point.py": ["/Sender.py"], "/Main.py": ["/Window.py", "/Desk.py", "/Point.py", "/Path.py"], "/Desk.py": ["/PointGUI.py", "/PathGUI.py"], "/Path.py": ["/Sender.py"]}
|
3,508
|
Mi7ai/Django
|
refs/heads/master
|
/accounts/urls.py
|
from django.urls import path, include
from django.contrib.auth import views as auth_views
from .views import ProfileView, SignupView, SignupDoneView
from django.views.generic import base as generic_views
# app_name = 'accounts'
urlpatterns = [
path('', include('django.contrib.auth.urls')),
path('profile/', ProfileView.as_view(), name='profile'),
path('signup/', SignupView.as_view(), name='signup'),
path('signupdone/', SignupDoneView.as_view(), name='signupdone'),
]
|
{"/accounts/urls.py": ["/accounts/views.py"]}
|
3,509
|
Mi7ai/Django
|
refs/heads/master
|
/accounts/views.py
|
from django.shortcuts import render
from django.urls import reverse_lazy
from django.views.generic import TemplateView, CreateView
# from django.views.generic import CreateView
from .forms import UserSignUpForm
# Create your views here.
class ProfileView(TemplateView):
template_name = 'accounts/profile.html'
class SignupView(CreateView):
form_class = UserSignUpForm
success_url = reverse_lazy("signupdone")
template_name = 'accounts/signup.html'
class SignupDoneView(TemplateView):
template_name = 'accounts/signup_done.html'
|
{"/accounts/urls.py": ["/accounts/views.py"]}
|
3,510
|
datosh/PyEngine
|
refs/heads/master
|
/mace_runner.py
|
import math
import pygame
import game
from pygame.locals import *
class Player(pygame.sprite.Sprite):
"""A player that is going to run around in the maze."""
def __init__(self, x, y, width, height):
super(Player, self).__init__()
# Set the visuals and the position
self.width = width
self.height = height
self.color = pygame.Color('red')
self.image = pygame.Surface([self.width, self.height])
self.image.fill(self.color)
self.rect = self.image.get_rect()
self.rect.topleft = (x, y)
# Movement related varables
self.old_x = 0
self.old_y = 0
self.x_dir = 0 # Should either be 1, 0 or -1
self.y_dir = 0 # Should either be 1, 0 or -1
self.speed = .2
# Collision detections
self.collider_list = []
def update(self, delta):
# Make sure delta x and y are in range [-1, 1]
if self.x_dir:
self.x_dir = math.copysign(1, self.x_dir)
if self.y_dir:
self.y_dir = math.copysign(1, self.y_dir)
# Move the player
self.old_x = self.rect.x
self.old_y = self.rect.y
# Move and check for collision in x direction
self.rect.x = self.rect.x + self.x_dir * int(self.speed * delta)
coll = pygame.sprite.spritecollide(self, self.collider_list, False)
if coll:
coll = coll[0]
if self.x_dir > 0:
self.rect.right = coll.rect.left
else:
self.rect.left = coll.rect.right
# Move and check for collision in y direction
self.rect.y = self.rect.y + self.y_dir * int(self.speed * delta)
coll = pygame.sprite.spritecollide(self, self.collider_list, False)
if coll:
coll = coll[0]
if self.y_dir > 0:
self.rect.bottom = coll.rect.top
else:
self.rect.top = coll.rect.bottom
class Wall(pygame.sprite.Sprite):
"""Boundrys for the levels"""
def __init__(self, x, y, width, height, color='blue'):
super(Wall, self).__init__()
# Set the visuals and the position
self.width = width
self.height = height
self.color = pygame.Color(color)
self.image = pygame.Surface([self.width, self.height])
self.image.fill(self.color)
self.rect = self.image.get_rect()
self.rect.topleft = (x, y)
class Level(object):
"""Base class for all levels in this mace runner game.
This class should not be use directly but only be extended"""
def __init__(self):
super(Level, self).__init__()
self.wall_list = pygame.sprite.Group()
self.enemy_list = pygame.sprite.Group()
class Level_01(Level):
"""First level"""
def __init__(self, width, height):
super(Level_01, self).__init__()
walls = [
# Outer lines
(0, 0, width, 10),
(0, 470, width, 10),
(0, 0, 10, height),
(630, 0, 10, height / 2 - 15),
(630, height / 2 + 15, 10, height),
(100, 40, 10, height - 80),
(200, 40, 10, height - 80),
(300, 40, 10, height - 80),
(400, 40, 10, height - 80),
(500, 40, 10, height - 80),
]
for wall in walls:
self.wall_list.add(Wall(
wall[0],
wall[1],
wall[2],
wall[3],
color='red'))
class Level_02(Level):
"""Second level"""
def __init__(self, width, height):
super(Level_02, self).__init__()
walls = [
# Outer lines
(0, 0, width, 10),
(0, 470, width, 10),
(0, 0, 10, height),
(630, 0, 10, height / 2 - 15),
(630, height / 2 + 15, 10, height),
(40, 100, width - 80, 10),
(40, 200, width - 80, 10),
(40, height/2, width - 80, 10),
(40, 400, width - 80, 10),
]
for wall in walls:
self.wall_list.add(Wall(wall[0], wall[1], wall[2], wall[3]))
class MaceRunner(game.Game):
"""A simple implementation of mace runner.
Main takeaway should be changing levels/screens."""
def __init__(self):
super(MaceRunner, self).__init__()
# List of all the levels in the game
self.levels = []
self.current_level = 0
self.levels.append(Level_01(self.WINDOWWIDTH, self.WINDOWHEIGHT))
self.levels.append(Level_02(self.WINDOWWIDTH, self.WINDOWHEIGHT))
# List for all the sprites in the game
self.all_sprites = pygame.sprite.Group()
# Add the player to the list
self.player = Player(40, 40, 15, 15)
self.all_sprites.add(self.player)
self.player.collider_list = self.levels[self.current_level].wall_list
def update(self, delta):
self.player.update(delta)
# MAKE LEVEL TRANSITION
if self.player.rect.x > self.WINDOWWIDTH:
self.current_level = 1
self.player.collider_list = self.levels[1].wall_list
self.player.rect.topleft = (30, 30)
def evnt_hndlr(self, event):
if event.type == KEYDOWN:
if event.key == K_d:
self.player.x_dir += 1
if event.key == K_a:
self.player.x_dir += -1
if event.key == K_s:
self.player.y_dir += 1
if event.key == K_w:
self.player.y_dir += -1
if event.type == KEYUP:
if event.key == K_d:
self.player.x_dir += -1
if event.key == K_a:
self.player.x_dir += 1
if event.key == K_s:
self.player.y_dir += -1
if event.key == K_w:
self.player.y_dir += 1
def draw(self, surf):
self.levels[self.current_level].wall_list.draw(surf)
self.all_sprites.draw(surf)
def main():
mr = MaceRunner()
mr.run()
if __name__ == '__main__':
main()
|
{"/mace_runner.py": ["/game.py"], "/gestrandet.py": ["/game.py"], "/brick_buster.py": ["/game.py"]}
|
3,511
|
datosh/PyEngine
|
refs/heads/master
|
/game.py
|
import pygame
import sys
from pygame.locals import *
def terminate():
pygame.quit()
sys.exit()
class Game(object):
"""This represents the abstract base class for any new game. Every new
game should extend this calss"""
def __init__(self, width=640, height=480):
"""Initializes the Game with a standard window size, fps and
background color."""
# Window
self.WINDOWWIDTH = width
self.WINDOWHEIGHT = height
self.WINDOWDIMENSIONS = (self.WINDOWWIDTH, self.WINDOWHEIGHT)
self.FPS = 60
self.background_color = pygame.Color('black')
self.quit_on_esc = True
self.done = False
pygame.init()
self.surf = pygame.display.set_mode(self.WINDOWDIMENSIONS)
self.clock = pygame.time.Clock()
def __str__(self):
return "game.Game: WIDTH = {}, HEIGHT = {}, FPS = {}".format(
self.WINDOWWIDTH,
self.WINDOWHEIGHT,
self.FPS)
def run(self):
"""The run functions implements the main loop of the game. The
functions self.update and self.draw are called, and shall be
overwritten by the super class do to something useful."""
while not self.done:
# wait for frame to pass
delta = self.clock.tick(self.FPS)
for event in pygame.event.get():
# Terminate on X button
if event.type == QUIT:
terminate()
# Terminate on ESC
if self.quit_on_esc:
if event.type == KEYDOWN and event.key == K_ESCAPE:
terminate()
# pass event to event handler
self.evnt_hndlr(event)
# ---- EVENT HANDLING DONE ----
# ---- UPDATE GAME OBJECTS ----
self.update(delta)
# ---- DRAW GAME OBJECTS ----
self.surf.fill(self.background_color)
self.draw(self.surf)
pygame.display.update()
def evnt_hndlr(self, event):
pass
def update(self, delta):
pass
def draw(self, surf):
pass
|
{"/mace_runner.py": ["/game.py"], "/gestrandet.py": ["/game.py"], "/brick_buster.py": ["/game.py"]}
|
3,512
|
datosh/PyEngine
|
refs/heads/master
|
/gestrandet.py
|
import game
import pygame
from pygame.locals import *
class SpriteSheet(object):
"""Helper class to load single images from a sprite sheet"""
# This points to the sprite sheet image
sprite_sheet = None
def __init__(self, file_name):
super(SpriteSheet, self).__init__()
self.file_name = file_name
# Load the sprite sheet
self.sprite_sheet = pygame.image.load(file_name).convert()
def get_image(self, x, y, width, height):
"""Grab a single image out of the larger spritesheet."""
# Create a blank image
image = pygame.Surface([width, height]).convert()
# Copy the sprite from the large sheet onto the smaller one
image.blit(self.sprite_sheet, (0, 0), (x, y, width, height))
# Assuming black works as the transparent color
image.set_colorkey(pygame.Color('black'))
return image
class Player(pygame.sprite.Sprite):
"""The player that is going to run around in the world"""
# Image, Animation and Movement Variables
walking_frames_l = []
walking_frames_r = []
direction = 'R'
def __init__(self):
super(Player, self).__init__()
# Set the visuals and the position
sprite_sheet = SpriteSheet('p1_walk.png')
# TODO: Transform into list comprehension
# Load all the right facing images into a list
# Then flip the image and load it into left facing list
image = sprite_sheet.get_image(0, 0, 66, 90)
self.walking_frames_r.append(image)
image = pygame.transform.flip(image, True, False)
self.walking_frames_l.append(image)
image = sprite_sheet.get_image(66, 0, 66, 90)
self.walking_frames_r.append(image)
image = pygame.transform.flip(image, True, False)
self.walking_frames_l.append(image)
image = sprite_sheet.get_image(132, 0, 67, 90)
self.walking_frames_r.append(image)
image = pygame.transform.flip(image, True, False)
self.walking_frames_l.append(image)
image = sprite_sheet.get_image(0, 93, 66, 90)
self.walking_frames_r.append(image)
image = pygame.transform.flip(image, True, False)
self.walking_frames_l.append(image)
image = sprite_sheet.get_image(66, 93, 66, 90)
self.walking_frames_r.append(image)
image = pygame.transform.flip(image, True, False)
self.walking_frames_l.append(image)
image = sprite_sheet.get_image(132, 93, 72, 90)
self.walking_frames_r.append(image)
image = pygame.transform.flip(image, True, False)
self.walking_frames_l.append(image)
image = sprite_sheet.get_image(0, 186, 70, 90)
self.walking_frames_r.append(image)
image = pygame.transform.flip(image, True, False)
self.walking_frames_l.append(image)
# Load the first image
self.image = self.walking_frames_r[0]
# Set the collider to match the image
self.rect = self.image.get_rect()
class Gestrandet(game.Game):
"""A copy of the game GESTRANDET."""
def __init__(self):
super(Gestrandet, self).__init__(width=1280, height=786)
# TODO: shall we add caves and stuff?
# List of the levels in the game
# self.levels = []
# List of all the sprites in the game
self.all_sprites = pygame.sprite.Group()
# Add the player to the list
self.player = Player()
self.all_sprites.add(self.player)
# List of all the colliders in the current level
self.all_collider = pygame.sprite.Group()
def update(self, delta):
self.player.update(delta)
def evnt_hndlr(self, event):
pass
def draw(self, surf):
self.all_sprites.draw(surf)
def main():
gs = Gestrandet()
gs.run()
if __name__ == '__main__':
main()
|
{"/mace_runner.py": ["/game.py"], "/gestrandet.py": ["/game.py"], "/brick_buster.py": ["/game.py"]}
|
3,513
|
datosh/PyEngine
|
refs/heads/master
|
/brick_buster.py
|
import math
import game
import random
import pygame
from pygame.locals import *
class Block(pygame.sprite.Sprite):
"""Implements a basic block in the game using pygames Sprite class"""
def __init__(self, x=0, y=0):
super(Block, self).__init__()
self.image = pygame.Surface([32, 16])
self.image.fill(pygame.Color('green'))
self.rect = self.image.get_rect()
self.rect.x = x
self.rect.y = y
class Ball(pygame.sprite.Sprite):
"""Implements the ball that is going to bust the bricks"""
def __init__(self, x, y, screen_width, screen_height):
super(Ball, self).__init__()
self.width = 6
self.height = self.width
self.image = pygame.Surface([self.width, self.height])
self.image.fill(pygame.Color('black'))
pygame.draw.circle(self.image,
pygame.Color('yellow'),
(int(self.width / 2), int(self.height / 2)),
int(self.width / 2))
self.rect = self.image.get_rect()
self.rect.center = (x, y)
self.angle = -(math.pi / 2)
self.speed = 0
self.alive = True
self.SPIN = 0.04
self.DEFAULT_SPEED = 4
self.screen_width = screen_width
self.screen_height = screen_height
def update(self):
area = pygame.display.get_surface().get_rect()
assert area, "Couldn't retrieve display surface"
dx = self.speed * math.cos(self.angle)
dy = self.speed * math.sin(self.angle)
self.rect.move_ip((dx, dy))
# Collision with the window, i.e. keep ball in window
if not area.contains(self.rect):
tl = not area.collidepoint(self.rect.topleft)
tr = not area.collidepoint(self.rect.topright)
bl = not area.collidepoint(self.rect.bottomleft)
br = not area.collidepoint(self.rect.bottomright)
if (tr and tl) or (br and bl):
self.angle = -self.angle
if (tl and bl) or (tr and br):
self.angle = math.pi - self.angle
# If ball is at bottom of screen, ball is dead
if self.screen_height - self.rect.y < self.height:
self.alive = False
def collide_with(self, colls):
for coll in colls:
if self.screen_height - self.rect.y < 100:
dist = self.rect.centerx - coll.rect.centerx
self.angle = (-math.pi / 2) + (dist * self.SPIN)
elif not coll.rect.contains(self.rect):
tl = not coll.rect.collidepoint(self.rect.topleft)
tr = not coll.rect.collidepoint(self.rect.topright)
bl = not coll.rect.collidepoint(self.rect.bottomleft)
br = not coll.rect.collidepoint(self.rect.bottomright)
if (tr and tl) or (br and bl):
self.angle = -self.angle
if (tl and bl) or (tr and br):
self.angle = math.pi - self.angle
class Player(pygame.sprite.Sprite):
"""Implements the player of the BrickBuster game."""
def __init__(self, x, y, screen_width):
super(Player, self).__init__()
self.width = 64
self.height = 8
self.image = pygame.Surface([self.width, self.height])
self.image.fill(pygame.Color('blue'))
self.rect = self.image.get_rect()
self.rect.x = x
self.rect.y = y
self.screen_width = screen_width
def update(self):
"""Update the player location."""
# Only update the x position of the player. Not y, since he is only
# allowed to stay at the bottom of the screen
new_x = pygame.mouse.get_pos()[0]
if new_x < 0 + (self.width / 2):
new_x = 0 + (self.width / 2)
elif new_x > self.screen_width - self.width + self.width / 2:
new_x = self.screen_width - self.width + self.width / 2
self.rect.centerx = new_x
class BrickBuster(game.Game):
"""Implementing the old arcade game BrickBuster"""
def __init__(self):
super(BrickBuster, self).__init__()
self.FPS = 120
# Create a container for all sprites
self.all_sprites = pygame.sprite.Group()
# Create a container only for the blocks
self.collide_sprites = pygame.sprite.Group()
# Create some blocks and put them in their containers
for x in range(130, 500, 50):
for y in range(100, 300, 50):
block = Block(x, y)
self.all_sprites.add(block)
self.collide_sprites.add(block)
# Create the player
self.player = Player(self.WINDOWWIDTH / 2,
self.WINDOWHEIGHT - 30,
self.WINDOWWIDTH)
self.all_sprites.add(self.player)
self.collide_sprites.add(self.player)
# Create the ball
self.ball = Ball(300, 300, self.WINDOWWIDTH, self.WINDOWHEIGHT)
self.all_sprites.add(self.ball)
# Some statistics
self.score = 0
# Font
self.text_font = pygame.font.SysFont(None, 48)
self.text_color = pygame.Color('white')
# States
self.intro = 'intro'
self.playing = 'playing'
self.gameover = 'gameover'
self.state = self.intro
def evnt_hndlr(self, event):
# INTRO
if self.state == self.intro:
# If any key is pressed make the transition to playing state
if event.type == KEYDOWN or event.type == MOUSEBUTTONDOWN:
self.state = self.playing
# PLAYING
elif self.state == self.playing:
if event.type == MOUSEBUTTONDOWN:
self.ball.speed = self.ball.DEFAULT_SPEED
# GAMEOVER
elif self.state == self.gameover:
# If any key is pressed make a new game
if event.type == KEYDOWN or event.type == MOUSEBUTTONDOWN:
self.__init__()
def update(self, delta):
# ---- INTRO ----
if self.state == self.intro:
pass
# ---- PLAYING ----
elif self.state == self.playing:
# Get all sprites that collide with the ball. If the player is one
# of them remove it from the list, and delete the remaining blocks
colls = pygame.sprite.spritecollide(self.ball,
self.collide_sprites,
False)
self.ball.collide_with(colls)
if self.player in colls:
colls.remove(self.player)
self.score += len(colls)
self.collide_sprites.remove(colls)
self.all_sprites.remove(colls)
# Update the player and the ball
self.player.update()
self.ball.update()
# Test if the game is over
if not self.ball.alive:
self.state = self.gameover
# ---- GAMEOVER ----
elif self.state == self.gameover:
pass
def draw(self, surf):
if self.state == self.intro:
self.drawText(surf, 'Press a button to start', 140, 200)
elif self.state == self.playing:
self.all_sprites.draw(surf)
self.drawText(surf, str(self.score), 10, 20)
elif self.state == self.gameover:
self.drawText(surf, 'Wanna play again?', 140, 200)
self.drawText(surf,
'Your score was {}'.format(self.score),
140,
240)
def drawText(self, surface, text, x, y):
textobj = self.text_font.render(text, 1, self.text_color)
textrect = textobj.get_rect()
textrect.topleft = (x, y)
surface.blit(textobj, textrect)
def main():
bb = BrickBuster()
bb.run()
if __name__ == '__main__':
main()
|
{"/mace_runner.py": ["/game.py"], "/gestrandet.py": ["/game.py"], "/brick_buster.py": ["/game.py"]}
|
3,618
|
KatherineSeng/CECS450-Group3-Project1
|
refs/heads/master
|
/parse.py
|
import codecs
import re
import nltk
def stripExtra(name):
"""This function removes paranthesis from a string
*Can later be implemented for other uses like removing other characters from string
Args:
name (string): character's name
Returns:
string: character's name without paranthesis
"""
startIndexPer=name.find('(')
start = 0
if(startIndexPer!=-1):
start = startIndexPer
if(start==0):
return name
else:
return name[0:start-1]
def parseText(textFileName):
"""This function parses through a txt file that is formated as a transcript from the website https://www.imsdb.com/
Args:
textFileName (string): name/path of txt file of film transcript.
Returns:
(string,dictionary): a string of the dialogue text and
a dictionary of each characters said words in dialogue with word counter.
dictionary is formated as {character : {word : 2 , anotherword : 4} }
"""
# using https://www.imsdb.com/ as script. Highlight/copy script and save to a text file
charWordDic = {}
with codecs.open(textFileName, 'r', 'utf-8') as f:
# read the file content
f = f.read()
# store all the clean text that's accumulated
spoken_text = ''
test = ''
# once a character's name is found turn True in order to write the following lines of text into dialogue string
currentlySpeaking = False
# once a character's name is found turn True in order to write character's name in the beginning of dialogue string
writtenName=False
# string of current character that is speaking
currentSpeaker = ''
spacing = 0
# split the file into a list of strings, with each line a member in the list
for line in f.split('\n'):
# split the line into a list of words in the line
words = line.split()
# if there are no words, reset speaking and name booleans
if not words:
currentlySpeaking = False
writtenName=False
spacing = 0
continue
# if this line is a person identifier, save characters name into currentSpeaker string and adjust booleans
#Strip the name of non alphebetic characters
nameStriped = [word for word in words if word.isalpha()]
#used to determine if the following line is continuing the dialogue
newSpacing = (len(line) - len(line.lstrip()))
if (spacing == 0):
spacing = newSpacing
#Keep track of person identifer and if its a new one or not
#Name must be less than 3 words, length of name must not be less than 1, len of whitespace should be very long, name should be all uppercase, spacing and newSpacing should be the same since it is the start of the character's dialogue
if len(nameStriped) > 0 and len(nameStriped) <= 3 and len(nameStriped[0]) > 1 and (len(line) - len(line.lstrip()) < 45) and all([i.isupper() for i in nameStriped]) and spacing == newSpacing:
currentSpeaker=line.strip()
writtenName=False
currentlySpeaking = True
continue
# if there's a good amount of whitespace to the left and currentlySpeaking boolean is true, this is a spoken line
# Note: the whitespace indentions in text file may be tabs or spaces. Using integer 2 to satisfy both cases.
if (len(line) - len(line.lstrip()) >=2) and currentlySpeaking:
#strip extra characters such as paranthesis in character's name
currentSpeaker=stripExtra(currentSpeaker)
if '(' in line or ')' in line:
#strip paranthesis from text since it's not dialogue
continue
#if writtenName boolean is false write the name of the speaker and then turn the boolean true
if not writtenName:
# spoken_text+="\n"+currentSpeaker + ": "
writtenName=True
#Needed to know count of word by each character
#Dictionary of Characters containning amount of words
entireWord = ""
#algorithm to find multiple word such that it should be one word. Like someone's First and Last Name said in a sentence
#Example in lego movie: Black Falcon
#Simialrly used in Common words function
for word in words:
if(word[0].isupper() and not word.isupper()):
find = re.compile("['.!?,]").search(word)
if find != None:
index=find.span()[0]
entireWord+=word[:index]
charWordDic=includeInCharacterDic(currentSpeaker,entireWord.strip().lower(), charWordDic)
entireWord=""
continue
else:
entireWord+=word.strip()+" "
continue
else:
find = re.compile("['.!?,]").search(word)
if find != None:
index=find.span()[0]
word=word[:index]
if(entireWord.strip()!=""):
charWordDic=includeInCharacterDic(currentSpeaker,entireWord.strip().lower(), charWordDic)
charWordDic=includeInCharacterDic(currentSpeaker,word.strip().lower(), charWordDic)
entireWord=""
else:
charWordDic=includeInCharacterDic(currentSpeaker,word.strip().lower(), charWordDic)
entireWord=""
#last case
if(entireWord.strip()!=""):
charWordDic=includeInCharacterDic(currentSpeaker,entireWord.strip().lower(), charWordDic)
# # write the dialogue into after character's name or continue dialogue.
# spoken_text += line.lstrip()
#strip all words that are in paranthesis since it is not dialogue
spoken_text+=re.sub(r"\(.*?\)|[^\w\s'.!?,]", '', line.lstrip())
#return the only the dialogue text and a dictionary of each characters said words in dialogue with word counter. Example: {character : {word : 2 , anotherword : 4} }
return spoken_text, charWordDic
def includeInCharacterDic(currentSpeaker, word, charWordDic):
"""This function inputs the word into the dictionary with the character and incrementing the word count
Args:
currentSpeaker (string): name/of character in script.
word (string): word that is being placed into dictionary
dic (dictionary): a dictionary of each characters said words in dialogue with word counter.
dictionary is formated as such {character : {word : 2 , anotherword : 4} }
Returns:
charWordDic (dictionary): a dictionary of each characters said words in dialogue with word counter.
dictionary is formated as such {character : {word : 2 , anotherword : 4} }
"""
word=word.strip()
if currentSpeaker not in charWordDic:
charWordDic[currentSpeaker]={}
#striping useless characters from word such as -- , . ? !
word = re.sub(r"[^\w\s']", '', word)
charWordDic[currentSpeaker][word.lower()]=1
else:
word = re.sub(r"[^\w\s']", '', word)
if word.lower() not in charWordDic[currentSpeaker]:
charWordDic[currentSpeaker][word.lower()]=1
else:
#increment word count by one
charWordDic[currentSpeaker][word.lower()]+=1
return charWordDic
def commonWords(text,amount,stopwords):
"""This function finds the common words of a dialogue only script excluding character's names.
Args:
text (string): a corpus of only dialogue
amount (int): number of common words you'd like to be returned.
stopwords (list): of your own stopwords
Returns:
list: a list of tuples that include the word and amount of times frequently said.
list is formated as such: [(word,2),(anotherword,4),(newword,3)]
"""
#splits the entire text into a list of words
words = text.split()
#second list of stopwords
stopwords2 = nltk.corpus.stopwords.words()
stopwordList=stopwords + list(set(stopwords2) - set(stopwords))
#removes all stopwords from the list of words
#algorithm to find multiple word such that it should be one word. Like someone's First and Last Name said in a sentence
#Example in lego movie: Black Falcon
#Simialrly used in Parse Text function
entireWord=""
newWordsList=[]
for word in words:
if(word[0].isupper() and not word.isupper()):
find = re.compile("['.!?,]").search(word)
if find != None:
index=find.span()[0]
entireWord+=word[:index]
newWordsList.append(entireWord.strip())
entireWord=""
continue
else:
entireWord+=word+" "
continue
else:
find = re.compile("['.!?,]").search(word)
if find != None:
index=find.span()[0]
word=word[:index]
if(entireWord.strip()!=""):
newWordsList.append(entireWord.strip().lower())
newWordsList.append(word.strip().lower())
entireWord=""
else:
newWordsList.append(word.strip().lower())
entireWord=""
#last case
if(entireWord.strip()!=""):
newWordsList.append(entireWord.strip())
# cleansed_words = [word.lower() for word in newWordsList if word.isalpha() and word.lower() not in stopwordList]
cleansed_words=[]
for word in newWordsList:
if(word.lower() not in stopwordList and not word.isdigit() and word):
cleansed_words.append(word.lower())
#using the nltk package, easily find most common words shown from the list of words
fdist = nltk.FreqDist(cleansed_words)
# print(cleansed_words)
common=fdist.most_common(amount)
#returns a list of tuples that include the word and amount of times frequently said
return common
def removeStopwordsDic(dic,stopwords):
"""This function removes stopwords from the dictionary used in parseText(textFileName) function
Args:
dic (dictionary): a dictionary of each characters said words in dialogue with word counter.
dictionary is formated as such {character : {word : 2 , anotherword : 4} }
Returns:
dictionary: a dictionary of each characters said words in dialogue with word counter with no stopwords
dictionary is formated as such {character : {word : 2 , anotherword : 4} }
"""
stopwords2 = nltk.corpus.stopwords.words()
stopwordList=stopwords + list(set(stopwords2) - set(stopwords))
#create temp dictionary that will contain no stopwords
characterDic={}
for character in dic:
#only include words not in stopword lists
# print(character)
characterDic[character]={}
for word in dic[character]:
if word.lower() not in stopwordList and not word.isdigit() and word.strip() and word:
characterDic[character][word]=dic[character][word]
# print(word)
#return a dictionary of each characters said words in dialogue with word counter with no stopwords.
#formated as such {character : {word : 2 , anotherword : 4} }
return characterDic
def keepInCommon(dic,common):
newDic = {}
sort_orders = sorted(dic.items(), key=lambda x: x[1], reverse=True)
for i in sort_orders:
#only show the words that are in common throughout the text
#character may have said more words but we're only showing those that are most common throughout the film
if i[0] in [lis[0] for lis in common]:
newDic[i[0]]=i[1]
return newDic
def formatnSortByChar(dic,text,common):
"""This function returns a formated string of the words each character has said in common with the text
Args:
dic (dictionary): a dictionary of each characters said words in dialogue with word counter.
dictionary is formated as such {character : {word : 2 , anotherword : 4} }
text (string): a corpus of only dialogue
common (list): a list of tuples that include the word and amount of times frequently said.
list is formated as such: [(word,2),(anotherword,4),(newword,3)]
Returns:
text (string): a string that is formated to show only the words that each character said that is commonly said throughout the text
Example:
Character1
word 4
newword 3
Character2
word 2
"""
#remove any character that is not a letter or ' from text
# text = re.sub(r"[^\w\s'-]", ' ', text)
text = ''
for character in dic:
boolean = False
# characterDic = removeStopwordsDic(dic[character],stopwords)
sort_orders = sorted(dic[character].items(), key=lambda x: x[1], reverse=True)
for i in sort_orders:
#only show the words that are in common throughout the text
#character may have said more words but we're only showing those that are most common throughout the film
if i[0] in [lis[0] for lis in common]:
if not boolean:
text +="\n\n"+ character
boolean = True
text+="\n"+ str(i[0])+ " "+ str(i[1])
return text
#give each word said by each character a ratio based on amount said and how many characters said it.
def computeWeightedRatio(dic):
"""This function should create a weighted ratio based on amount of times the word was said by character, said throughout the film, and how many characters said that word
Args:
dic (dictionary): a dictionary of each characters said words in dialogue with word counter.
dictionary is formated as such {character : {word : 2 , anotherword : 4} }
Returns:
to be determined. probably best to return the same dictionary that will also include the ratio with the word count
"""
return
def createNewStopwords(textFileName):
"""This function creates our own stopword list that we can use to remove from the dialogue text.
Args:
textFileName (string): name/path of txt file of stopwords where each word is in a new line.
Returns:
list: of stopword strings
"""
#download nltk stopwords list
nltk.download('stopwords')
stopwords = []
with codecs.open(textFileName, 'r') as f:
line = f.readlines()
for word in line:
stopwords.append(word.strip())
return stopwords
def main():
#download nltk stopwords list
nltk.download('stopwords')
#Our movie transcript string path of txt file using this transcript format only works so far https://www.imsdb.com/scripts/Kung-Fu-Panda.html/
textFileName = 'FilmScripts/LegoMovie.txt'
#create our own stopword list since nltk's stopword list may not remove all stopwords we need.
#stopwords from https://www.ranks.nl/stopwords
stopwords = createNewStopwords('stopwords.txt')
#parse the text and get the dialogue only text and also the character word counter dictionary
spoken_text, charWordDic = parseText(textFileName)
#remove stopwords from dictionary
charWordDic = removeStopwordsDic(charWordDic,stopwords)
#Get 150 most common words from dialogue text
common = commonWords(spoken_text,100,stopwords)
#string that is formated to show only the words that each character said that is commonly said throughout the text
formatedString = formatnSortByChar(charWordDic,spoken_text,common)
#Common Words said in all dialogue of film
# print(common)
newcommon = [i[1] for i in common]
# print(newcommon)
# dic = keepInCommon(charWordDic["EMMET"],common)
# print(dic)
# print(len(dic))
#formated string of each character's said words that are commonly said through the dialogue
# print(formatedString)
# main()
|
{"/wordcloud.py": ["/parse.py"]}
|
3,619
|
KatherineSeng/CECS450-Group3-Project1
|
refs/heads/master
|
/beemovie.py
|
import codecs
import re
def reduceLine(dialogue):
keepReducing = True
spokenTxt = ""
while(keepReducing):
space=dialogue.find(" ",34)
newDialogue = dialogue[:space]
spokenTxt+="\n " + newDialogue.strip()
dialogue=dialogue[space:]
if(len(dialogue)>34):
keepReducing = True
else:
keepReducing = False
return spokenTxt
spokenTxt = ' BEE MOVIE\n'
with codecs.open("FilmScripts/originalBeeMovie.txt", 'r', 'utf-8') as f:
# read the file content
f = f.read()
# split the file into a list of strings, with each line a member in the list
for line in f.split('\n'):
# split the line into a list of words in the line
name = ''
dialogue =''
index = line.find(":")
nameFound = False
if(line.strip()==""):
continue
if(index!=-1):
nameFound = True
name = line[0:index]
dialogue = line[index+1:]
# print(name)
# print(dialogue)
else:
dialogue=line.strip()
if(nameFound):
spokenTxt+="\n\n "+name.upper()
if(len(dialogue)>34):
spokenTxt+=reduceLine(dialogue)
else:
spokenTxt+="\n " + dialogue.strip()
print(spokenTxt)
with codecs.open("FilmScripts/NewBeeMovie.txt", 'w', 'utf-8') as f:
f.write(spokenTxt)
|
{"/wordcloud.py": ["/parse.py"]}
|
3,620
|
KatherineSeng/CECS450-Group3-Project1
|
refs/heads/master
|
/wordcloud.py
|
import tkinter as tk
import random
import parse
from tkinter import simpledialog
colors = ["blue","red","orange","green","purple"]
#word:[label,place]
labelDic = {}
#label placement coordinates
placements = []
class Text(tk.Label):
"""This class is used to generate the words in the word cloud. Also is used to create hover over text in second frame."""
def __init__(self,mainFrame,secondFrame, word, count, charDic,hoverLabel,individualChar=False,char=None):
self.label = tk.Label(mainFrame, text=word)
self.count = count
self.word = word
self.individualChar = individualChar
self.char = char
self.hoverLabel = hoverLabel
self.charDic = charDic
self.label.bind("<Enter>", self.on_enter)
self.label.bind("<Leave>", self.on_leave)
def hoverText(self):
hoverString = "Word: "+self.word + "\nTotal : "+ str(self.count)+"\n"
if(self.individualChar):
hoverString=self.char+"\nWord: "+self.word + "\nTotal : "+ str(self.count)+"\n"
return hoverString
tupleList = []
for char in self.charDic:
if(self.word in self.charDic[char]):
tupleList.append( (char,self.charDic[char][self.word]) )
sort_orders = sorted(tupleList, key=lambda x: x[1], reverse=True)
for i in sort_orders:
char = i[0]
charCount = i[1]
hoverString+="{0:20} : {1:2} \n".format(char,charCount)
return hoverString
def on_enter(self,event):
hoverString = self.hoverText()
self.hoverLabel.configure(text=hoverString)
def on_leave(self, enter):
if(self.individualChar):
self.hoverLabel.configure(text=self.char+"\nHOVER OVER A WORD TO VIEW DETAILS")
else:
self.hoverLabel.configure(text="HOVER OVER A WORD TO VIEW DETAILS")
def place_label(root, label, word,fontSize):
"""This function is the algorithm to generate the word cloud word placements.
Args:
root: tk root
label: tk label for word
word: string of word
fontSize: int for font size of word to be used
dic: dictionary that will contain all the labels that will be made.
we will use this dictionary to edit existing labels for when we want to create new word cloud based on character's words
"""
redo = True
tries = 0
#algorithm to make sure word is not placed in same location as another word
# print(word)
while redo:
colorIndex=random.randint(0,len(colors))-1
if(tries>500):
fontSize=10
elif(tries>10000):
fontSize=3
tries+=1
# print(tries)
label.config(font=("Courier", fontSize),fg=colors[colorIndex])
# print(tries)
# print(fontSize)
root.update()
width = label.winfo_reqwidth()
height = label.winfo_reqheight()
try:
x = random.randint(0, 812-width)
y = random.randint(0, 750-height)
except ValueError:
if(tries>100000):
redo=False
return
else:
continue
# print(placements)
x2=x+width
y2=y+height
xmid = (x+x2)/2
ymid = (y+y2)/2
for placement in placements:
#check if x is between a word that is already placed x,x2 coordinates, also check if the word is between our new word for safety measure.
if(x > placement[0] and x < placement[1]) or (x2 > placement[0] and x2 < placement[1]) or (xmid > placement[0] and xmid < placement[1]) or (placement[2] > x and placement[2] < x2):
if (y > placement[3] and y < placement[4]) or (y2 > placement[3] and y2 < placement[4]) or (ymid > placement[3] and ymid < placement[4]) or (placement[5] > y and placement[5] < y2):
redo = True
break
else:
redo = False
else:
redo = False
if(len(placements)==0):
redo = False
label.place(x=x,y=y)
root.update()
place = [x, label.winfo_width()+x, xmid, y, label.winfo_height()+y, ymid]
placements.append(place)
labelDic[word]=[label,place]
def createWordCloud(root,mainFrame,secondFrame, tuples, charDic, hoverLabel,tupleFontSizeList,individualChar=False,char=None):
"""This function generates words and places the words in the frames
Args:
root: tk root
mainFrame: tk frame
secondFrame: tk frame
tuples: list of tuples of word and count
charDic: dictionary of character and word with count
hoverLabel: tk label of hover text
newTupleSizes: list of tuple of wordcount and font size
"""
# test = {}
#tuples is a list of tuples. example: [(word, count), (word2, count)]
for word in labelDic:
label = labelDic[word][0]
label.destroy()
labelDic.clear()
placements.clear()
# print(tuples)
index = 0
# print(labelDic)
for tup in tuples:
# print(index)
word = tup[0]
count = tup[1]
text = Text(mainFrame,secondFrame,word,count,charDic,hoverLabel,individualChar,char)
size = tupleFontSizeList[index][1]
place_label(root, text.label, word,size)
index+=1
def createWordCloudChar(char,root,mainFrame,secondFrame,charWordDic, hoverLabel,common,sizes):
"""This function generates words and places the words in the frames based on individual character
Args:
char : string of name
root: tk root
mainFrame: tk frame
secondFrame: tk frame
charWordDic: dictionary of character and word with count
hoverLabel: tk label of hover text
common: list of tuple of word and wordcount
sizez: list of default sizes
"""
tupleList = []
# charDic=parse.keepInCommon(charWordDic[char],common)
for word in charWordDic[char]:
tupleList.append ( (word,charWordDic[char][word]) )
tupleList = sorted(tupleList, key=lambda x: x[1], reverse=True)
tupleFontSizeList = generateNewSizes(tupleList,sizes,True)
# print(tupleList)
# createWordCloud(root,mainFrame,secondFrame,common, newChar, hoverLabel, tupleFontSizeList)
hoverLabel.configure(text=char+"\nHOVER OVER A WORD TO VIEW DETAILS")
createWordCloud(root,mainFrame,secondFrame, tupleList, charWordDic, hoverLabel,tupleFontSizeList,True,char)
def parseFunction(fileName,amountOfCommon):
#Our movie transcript string path of txt file using this transcript format only works so far https://www.imsdb.com/scripts/Kung-Fu-Panda.html/
textFileName = fileName
#create our own stopword list since nltk's stopword list may not remove all stopwords we need.
#stopwords from https://www.ranks.nl/stopwords
stopwords = parse.createNewStopwords('stopwords.txt')
#parse the text and get the dialogue only text and also the character word counter dictionary
spoken_text, charWordDic = parse.parseText(textFileName)
#remove stopwords from dictionary
charWordDic = parse.removeStopwordsDic(charWordDic,stopwords)
#Get amount most common words from dialogue text
common = parse.commonWords(spoken_text,amountOfCommon,stopwords)
# #string that is formated to show only the words that each character said that is commonly said throughout the text
# formatedString = parse.formatnSortByChar(charWordDic,spoken_text,common)
return charWordDic, common
def createRangeList(countList,spreadAmount):
newSet=[]
maxNum = max(countList)
for num in countList:
if(num in range(maxNum-spreadAmount,maxNum)):
continue
else:
if(num not in newSet):
newSet.append(num)
maxNum = num
return newSet
def generateNewSizes(tupleList,sizes,individualChar=False):
tupleList = sorted(tupleList, key=lambda x: x[1], reverse=True)
countList = [i[1] for i in tupleList]
if(individualChar):
rangeList = createRangeList(countList,1)
else:
rangeList = createRangeList(countList,6)
if(len(rangeList)>len(sizes)):
rangeList=rangeList[0:len(sizes)-1]
newSizeList = []
newTupleList = []
for count in countList:
added = False
for index in range(0,len(rangeList)-1):
if(count in range(rangeList[index+1],rangeList[index]+1)):
added = True
newSizeList.append(sizes[index])
break
#hard coding this case. if all number counts are the same, set to default size of sizes[1].
if(len(rangeList)==1):
newSizeList.append(sizes[1])
continue
#last case for iteratin of loop
if(not added):
newSizeList.append(sizes[len(sizes)-1])
for count in range(0,len(newSizeList)):
newTupleList.append( (tupleList[count][1],newSizeList[count] ) )
return newTupleList
def movieSelection():
userinput = tk.Tk()
userinput.withdraw()
USER_INP = simpledialog.askstring(title="Test",prompt="Enter Movie Title: ")
return USER_INP
def main():
#run parse function
fileName = ("FilmScripts/"+movieSelection()+".txt")
amountOfCommon = 100
charWordDic , common = parseFunction(fileName,amountOfCommon)
#Default Sizes
sizes=[60,35,20,15,10]
#generate wordCloud text sizes using default sizes. returns list tuple of (count,FontSize)
tupleFontSizeList = generateNewSizes(common,sizes)
#Generate UI
root = tk.Tk()
root.geometry("1124x768")
mainFrame = tk.Frame(root, width=824, height=750)
secondFrame = tk.Frame(root, width=192, height=750)
mainFrame.config(bd=4, relief=tk.SOLID)
secondFrame.config(bd=4, relief=tk.SOLID)
newChar = {}
for char in charWordDic:
# charDic=parse.keepInCommon(charWordDic[char],common)
# charDic=charWordDic[char]
if(not charWordDic[char]):
continue
else:
newChar[char]=charWordDic[char]
#Hover Overable label
hoverLabel = tk.Label(secondFrame, text="HOVER OVER A WORD TO VIEW DETAILS", width=192)
hoverLabel.config(font=("Courier", 10))
#Create Main Word Cloud
#using common as a list of tuples that contain word and count.
text = tk.Text(secondFrame, wrap="none")
vsb = tk.Scrollbar(orient="vertical", command=text.yview)
text.configure(yscrollcommand=vsb.set)
text.insert("end", "Characters: \n")
button = tk.Button (secondFrame, text = "ALL",command= lambda: createWordCloud(root,mainFrame,secondFrame,common, newChar, hoverLabel, tupleFontSizeList))
text.window_create("end", window=button)
text.insert("end", "\n")
#this will be the character buttons will probably create for loop and generate multiple buttons
num = 0
for char in sorted(newChar):
if(len(newChar[char])<3):
continue
name = char
button = tk.Button (secondFrame, text = name,command= lambda name=char: createWordCloudChar(name,root,mainFrame,secondFrame,newChar,hoverLabel,common, sizes))
text.window_create("end", window=button)
text.insert("end", "\n")
text.configure(state="disabled")
createWordCloud(root,mainFrame,secondFrame,common, newChar, hoverLabel, tupleFontSizeList)
#left frame and right frame
mainFrame.pack(side="left", fill="both")
secondFrame.pack(side="right", fill="both")
hoverLabel.pack(side="top", fill="both")
vsb.pack(side="right", fill="y")
text.pack(fill="both", expand=True)
root.mainloop()
if __name__ == "__main__":
main()
|
{"/wordcloud.py": ["/parse.py"]}
|
3,640
|
ashaychangwani/AILearnsSnake
|
refs/heads/master
|
/PlayGameAI.py
|
import pygame
import pickle
from snake import Environment, snake
from nn import NeuralNet
import time
import copy
from params import *
file = open('saved/model.pickle', "rb")
snake_generations = pickle.load(file)
file.close()
pygame.init()
pygame.font.init()
myfont = pygame.font.SysFont('Bitstream Vera Serif', 20)
screen = pygame.display.set_mode((display_width, display_height))
environment = Environment(display_height, display_width, unit)
for i in range(len(snake_generations)):
snakes = snake_generations[i]
prev_score = -1
for j in range(len(snakes)):
saved_snake = snakes[j]
pygame.display.set_caption('Generation : '+str(i+1)+'\t\tSnake Num: '+str(j+1)+'\t\tPrevious Score: '+str(prev_score))
t_snake = snake(display_width, display_height, NN_shape, unit, False)
t_snake.neuralnet.theta = saved_snake.neuralnet.theta
t_snake.neuralnet.bias = saved_snake.neuralnet.bias
t_snake.neuralnet.setNextFood(
environment.create_new_apple(t_snake.snake_position))
screen = environment.create(screen, gray)
screen = environment.draw_apple(screen, pink)
screen = t_snake.draw_snake(screen, blue, cherry)
pygame.display.update()
checkloop = False
start_time = time.time()
while t_snake.isAlive():
for event in pygame.event.get():
if event.type == pygame.KEYDOWN and event.key == pygame.K_q:
t_snake.collision_with_boundary = True
t_snake.collision_with_self = True
if event.type == pygame.QUIT:
pygame.quit()
quit()
if (t_snake.head_x, t_snake.head_y) == environment.apple_position:
t_snake.time_since_apple = 0
result = t_snake.neuralnet.decision(t_snake.head_x, t_snake.head_y, t_snake.snake_position, t_snake.direction)
t_snake.eatApple(result)
t_snake.neuralnet.setNextFood(environment.create_new_apple(t_snake.snake_position))
start_time = time.time()
checkloop = False
if t_snake.time_since_apple > 250:
if not checkloop:
checkloop = True
any_point = (t_snake.head_x, t_snake.head_y)
times = 0
elif (t_snake.head_x, t_snake.head_y) == any_point:
times += 1
if times > 4:
t_snake.collision_with_boundary = True
t_snake.collision_with_self = True
alive = False
if time.time() - start_time > 7:
t_snake.collision_with_boundary = True
t_snake.collision_with_self = True
result = t_snake.neuralnet.decision(
t_snake.head_x, t_snake.head_y, t_snake.snake_position, t_snake.direction)
if not t_snake.move(result):
prev_score = len(t_snake.snake_position) - 1
if t_snake.collision_with_boundary and t_snake.collision_with_self:
print('Generation: ' + str(i+1) + '\t\t' + \
'Snake Number: ' + str(j+1) + '\t\t' + \
'Score: ' + str(prev_score)+'\t\tReason: Stuck in Loop\t[Dead]')
elif t_snake.collision_with_boundary:
print('Generation: ' + str(i+1) + '\t\t' + \
'Snake Number: ' + str(j+1) + '\t\t' + \
'Score: ' + str(prev_score)+'\t\tReason: Collision With Boundary\t[Dead]')
else:
print('Generation: ' + str(i+1) + '\t\t' + \
'Snake Number: ' + str(j+1) + '\t\t' + \
'Score: ' + str(prev_score)+'\t\tReason: Collision With Self\t[Dead]')
screen = environment.create(screen, gray)
screen = environment.draw_apple(screen, pink)
screen = t_snake.draw_snake(screen, blue, cherry)
pygame.display.update()
time.sleep(0.5)
|
{"/PlayGameAI.py": ["/snake.py", "/nn.py", "/params.py"], "/ga.py": ["/snake.py", "/params.py"], "/snake.py": ["/nn.py"], "/PlayGameHuman.py": ["/snake.py", "/nn.py", "/params.py"]}
|
3,641
|
ashaychangwani/AILearnsSnake
|
refs/heads/master
|
/params.py
|
display_width = 540
display_height = 440
unit = 10
NN_shape = [24, 16, 3]
init_NN = True
population_size = 50
no_of_generations = 100
percentage_best_performers = 20.0
percentage_worst_performers = 2.0
mutation_percent = 7.0
mutation_intensity = 0.1
cherry = (150, 0, 0)
blue = (106, 133, 164)
pink = (171, 54, 81)
gray = (55, 55, 55)
|
{"/PlayGameAI.py": ["/snake.py", "/nn.py", "/params.py"], "/ga.py": ["/snake.py", "/params.py"], "/snake.py": ["/nn.py"], "/PlayGameHuman.py": ["/snake.py", "/nn.py", "/params.py"]}
|
3,642
|
ashaychangwani/AILearnsSnake
|
refs/heads/master
|
/ga.py
|
import random
import pickle
import time
from snake import Environment, snake
import numpy as np
import matplotlib.pyplot as plt
from params import *
class GeneticAlgo:
def __init__(self, display_width, display_height, unit, NN_shape, init_NN, population_size, no_of_generations,
percentage_best_performers, percentage_worst_performers, mutation_percent, mutation_intensity):
"""
Initializes an object of class GeneticAlgo with the parameters of the game.
Args:
display_width (int): The width of the frame in pixels
display_height (int): The height of the frame in pixels
unit (int): The size of each block of the frame in pixels
NN_shape (list): The shape of the NeuralNetwork responsible for converting the input to outputs
init_NN (bool): Boolean decribing whether the neural network should be initialized with random wieghts
population_size (int): Number of objects in each generation
no_of_generations (int): Number of generations to run the neural net
percentage_best_performers (int): Percentage of top performers of the previous generation to be used for elitism
percentage_worst_performers (int): Percentage of worst performers of the previous generation to be used for elitism
mutation_percent (int): Percentage chance of mutation of each member in weight matrix
mutation_intensity (int): Intensity of mutation (magnitude of change in the weights)
"""
self.display_width = display_width
self.display_height = display_height
self.unit = unit
self.NN_shape = NN_shape
self.init_NN = init_NN
self.population_size = population_size
self.no_of_generations = no_of_generations
self.percentage_best_performers = percentage_best_performers
self.percentage_worst_performers = percentage_worst_performers
self.mutation_percent = mutation_percent
self.mutation_intensity = mutation_intensity
def run(self, snakes, environment):
"""Runs the snake for a single generation.
Args:
snakes (list of type snake): List of all the snakes of the current generation to be run.
environment (object): Object of type environment
Returns:
average of all scores
90th percentile scores
"""
i = 1
scores = []
generation_seed = random.random()
for snake in snakes:
start_time = time.time()
checkloop = False
self.progress(i/self.population_size, 30)
random.seed(generation_seed)
apple_position = environment.create_new_apple(snake.snake_position)
snake.neuralnet.setNextFood(apple_position)
while(snake.isAlive()):
if (snake.head_x, snake.head_y) == environment.apple_position:
snake.time_since_apple = 0
result = snake.neuralnet.decision(
snake.head_x, snake.head_y, snake.snake_position, snake.direction)
snake.eatApple(result)
start_time = time.time()
snake.neuralnet.setNextFood(
environment.create_new_apple(snake.snake_position))
checkloop = False
if snake.time_since_apple > 250: # could be tuned
if not checkloop:
checkloop = True
any_point = (snake.head_x, snake.head_y)
times = 0
elif (snake.head_x, snake.head_y) == any_point:
times += 1
if times > 2:
snake.collision_with_boundary = True
snake.collision_with_self = True
if time.time() - start_time > 0.5:
snake.collision_with_boundary = True
snake.collision_with_self = True
result = snake.neuralnet.decision(
snake.head_x, snake.head_y, snake.snake_position, snake.direction)
if snake.move(result) == False:
break
random.seed()
scores.append(len(snake.snake_position) - 1)
i += 1
print("\nAverage: %.2f \n90th percentile: %.2f" %
(np.average(scores), np.percentile(scores, 90)))
return np.average(scores), np.percentile(scores, 90)
def print_top(self, snakes):
"""Prints information (number, score, and reason for death) about the top snakes in each generation
Args:
snakes (list): List of objects (of type snake) of the top for current generation
"""
i = 0
for snake in snakes:
i += 1
print('snake ', i, ', score : ', len(snake.snake_position)
- 1, end='\t')
if snake.collision_with_self and snake.collision_with_boundary:
print('stuck in loop')
elif snake.collision_with_boundary and not snake.collision_with_self:
print('crashed wall')
else:
print('crashed body')
def save(self, snakes, filename):
"""Saves the top snakes from every generation into a pickle file to be loaded in the gui.py file
Args:
snakes (list): List of top snakes of every generation
filename (str): String representing filename of the output file
"""
f = open(filename, "wb")
pickle.dump(snakes, f)
f.close()
def cloneOfParents(self, parents):
"""Creates clones of parents selected for elitism to be added to the next generation
Args:
parents (list): List of parents selected for elitism
Returns:
[list]: List of the clones of the input snakes
"""
snakes = []
for parent in parents:
babySnake = snake(self.display_width, self.display_height,
self.NN_shape, self.unit,
False)
babySnake.neuralnet.theta = parent.neuralnet.theta
babySnake.neuralnet.bias = parent.neuralnet.bias
snakes.append(babySnake)
return snakes
def elitism(self, snakes):
"""Selects top performing parents for elitism (along with a few bottom performers for variance)
Args:
snakes (list): List of all snakes in the generation sorted by their scores
Returns:
[list]: List of parents that have been selected for elitism and cloned for future generation
"""
parents = []
num_top = int(self.population_size *
self.percentage_best_performers / 100)
num_bottom = int(self.population_size *
self.percentage_worst_performers / 100)
parents.extend(self.cloneOfParents(snakes[:num_top]))
parents.extend(self.cloneOfParents(snakes[-num_bottom:]))
return parents, num_top, num_bottom
def create_new_pop(self, snakes):
"""Function to create the new generation using the parents from the previous generation
Args:
snakes (list): List of all snakes from the previous generation
Returns:
[list]: List of snakes that represent the next generation
"""
parents, num_top, num_bottom = self.elitism(snakes)
children = self.offspringGeneration(
parents, self.population_size - num_top - num_bottom)
children = self.mutate(children)
parents.extend(children)
return parents
def crossOver(self, parent1, parent2):
"""Performs crossover function of genetic algos
Args:
parent1 (snake): Input parent 1
parent2 (snake): Input parent 2
Returns:
[snake]: Returns the child born from crossover of the two input parents
"""
child = snake(self.display_width, self.display_height,
self.NN_shape, self.unit)
for i in range(len(parent1.neuralnet.theta)):
for j in range(parent1.neuralnet.theta[i].shape[0]):
for k in range(parent1.neuralnet.theta[i].shape[1]):
child.neuralnet.theta[i][j, k] = random.choice([
parent1.neuralnet.theta[i][j, k],
parent2.neuralnet.theta[i][j, k]])
for j in range(parent1.neuralnet.bias[i].shape[1]):
child.neuralnet.bias[i][0, j] = random.choice(
[parent1.neuralnet.bias[i][0, j],
parent2.neuralnet.bias[i][0, j]
]
)
return child
def offspringGeneration(self, parents, no_of_children):
"""Generates the rest of the population after elitism is done by perfoming crossover
on the parents until the members of the next generation is equal to the specified
population
Args:
parents (list): List of snakes that have been selected via elitism
no_of_children (int): Number of snakes that are to be generated via crossover
Returns:
[list]: List of all the snakes of the next generation produced via crossover
"""
all_children = []
for _ in range(no_of_children):
parent1 = random.choice(parents)
parent2 = random.choice(parents)
all_children.append(self.crossOver(parent1, parent2))
return all_children
def mutate(self, children):
"""Performs mutation task of Genetic Algos on the snakes in order to increase variety
Args:
children (list): List of all snakes in current generation (produced via elitism + crossover)
Returns:
[list]: List of all snakes in current generation after mutation is complete
"""
for child in children:
for W in child.neuralnet.theta:
for _ in range(int(W.shape[0] * W.shape[1] * self.mutation_percent/100)):
row = random.randint(0, W.shape[0]-1)
col = random.randint(0, W.shape[1]-1)
W[row][col] += random.uniform(-self.mutation_intensity,
self.mutation_intensity)
return children
def runner(self):
"""
Main function of the GeneticAlgo Class that evaluates the result for each generation of
and populates the next generation along.
Prints the graph of the average and 90th percentile score for each generation to identify
ideal early stopping point
"""
snakes = [snake(self.display_width, self.display_height, self.NN_shape,
self.unit) for _ in range(self.population_size)]
environment = Environment(self.display_height, self.display_width, self.unit)
top_snakes = []
averages = []
percentile = []
for i in range(self.no_of_generations):
print('GENERATION: ', i+1, end='\n')
avg, ptile = self.run(snakes, environment)
averages.append(avg)
percentile.append(ptile)
snakes.sort(key=lambda x:
len(x.snake_position), reverse=True)
self.print_top(snakes[0:5])
top_snakes.append(snakes[:3])
snakes = self.create_new_pop(snakes)
self.save(top_snakes, "saved/test.pickle")
plt.plot(averages)
plt.plot(percentile)
plt.show()
def progress(self, percent, length):
"""Creates a progress bar to check progress of current generation
Args:
percent (int): Percentage that is complete
length (int): Length of the progress bar
"""
hashes = round(percent*length)
print('\r', '*'*hashes + '_'*(length - hashes),
'[{:.2%}]'.format(percent), end='')
if __name__ == '__main__':
ga = GeneticAlgo(display_width, display_height, unit, NN_shape, init_NN, population_size, no_of_generations,
percentage_best_performers, percentage_worst_performers, mutation_percent, mutation_intensity)
ga.runner()
|
{"/PlayGameAI.py": ["/snake.py", "/nn.py", "/params.py"], "/ga.py": ["/snake.py", "/params.py"], "/snake.py": ["/nn.py"], "/PlayGameHuman.py": ["/snake.py", "/nn.py", "/params.py"]}
|
3,643
|
ashaychangwani/AILearnsSnake
|
refs/heads/master
|
/snake.py
|
from pygame import draw, image, transform
from nn import NeuralNet
import random
class Environment:
def __init__(self, display_height, display_width, unit_size):
"""Creates an object of type Environment.
Args:
display_height (int): Height of display in pixels.
display_width (int): Width of display in pixels.
unit_size (int): Size of each block in pixels.
"""
self.display_height = display_height
self.display_width = display_width
self.unit = unit_size
self.apple_position = (0, 0)
def draw_apple(self, environment, color):
"""Draw apple on the screen
Args:
environment (object): Instance of type Environment
color (tuple): RGB values of colour
Returns:
environment: Returns instance of type Environment after drawing apple
"""
apple_image = image.load('apple.png')
apple_image = transform.scale(apple_image, (10, 10))
environment.blit(
apple_image, (self.apple_position[0], self.apple_position[1], self.unit, self.unit))
return environment
def draw_boundary(self, environment, color):
"""Draws boundary on the screen
Args:
environment (object): Instance of type Environment
color (tuple): RGB values of colour
"""
unit = self.unit
for w in range(0, self.display_width, self.unit):
draw.rect(environment, color, (w, 0, unit, unit))
draw.rect(environment, color,
(w, self.display_height - unit, unit, unit))
for h in range(0, self.display_height, self.unit):
draw.rect(environment, color, (0, h, unit, unit))
draw.rect(environment, color,
(self.display_width - unit, h, unit, unit))
def create(self, environment, color):
"""Initialize the environment and draw boundaries
Args:
environment (object): Instance of type Environment
color (tuple): RGB values of colour
Returns:
environment: Returns instance of type Environment after drawing apple
"""
environment.fill((200, 200, 200))
self.draw_boundary(environment, color)
return environment
def create_new_apple(self, snake_position):
"""Creates new apple, checks that the new apple does not appear on the body of the snake
Args:
snake_position (list): List of the snake body coordinates
Returns:
list: Coordinates of new apple position
"""
unit = self.unit
apple_position = (unit*random.randint(2, self.display_width/unit - 2),
unit*random.randint(2, self.display_height/unit - 2))
while any(body == apple_position for body in snake_position):
apple_position = (unit*random.randint(2, self.display_width/unit - 2),
unit*random.randint(2, self.display_height/unit - 2))
self.apple_position = apple_position
return self.apple_position
class snake:
def __init__(self, display_width, display_height, NN_shape, unit, init_NN=True, random_start=True):
"""Initializes an object of type snake
Args:
display_height (int): Height of display in pixels.
display_width (int): Width of display in pixels.
NN_shape (list): Shape of neural network architecure
unit_size (int): Size of each block in pixels.
init_NN (bool, optional): Initalize neural network with random weights. Defaults to True.
random_start (bool, optional): Start the snake position randomly or at predefined location. Defaults to True.
"""
self.snake_position = []
self.display_width = display_width
self.display_height = display_height
self.time_since_apple = 0
self.collision_with_boundary = False
self.collision_with_self = False
self.unit = unit
self.neuralnet = NeuralNet(
NN_shape, self.display_width, self.display_height, self.unit, init_NN)
self.snake_position.append(self.initSnake(random_start))
def initSnake(self, random_start):
"""Set the start position and direction of snake
Args:
random_start (bool): Describes whether the snake should start randomly or
Returns:
tuple: X and Y coordinates of snake_head (starting position)
"""
if random_start:
self.direction = random.choice(['RIGHT', 'UP', 'DOWN', 'LEFT'])
self.head_x = random.randint(
3, self.display_width / self.unit - 3) * self.unit
self.head_y = random.randint(
3, self.display_height / self.unit - 3) * self.unit
else:
self.direction = 'RIGHT'
self.head_x, self.head_y = 40, 40
return (self.head_x, self.head_y)
def isAlive(self):
"""Check if snake is alive
Returns:
bool: True if alive, False otherwise
"""
if not self.collision_with_self and not self.collision_with_boundary:
return True
return False
def eatApple(self, direction):
"""Add the location to snake body and increase snake size by 1
Args:
direction (str): Direction of movement after eating apple
"""
self.snake_position.insert(0, (self.head_x, self.head_y))
self.move(direction)
def eatAppleHuman(self, direction):
"""Eat Apple method but for player playing the game instead of AI
Args:
direction (str): Direction of movement after eating apple
"""
self.snake_position.insert(0, (self.head_x, self.head_y))
self.moveHuman(direction)
def moveInDirection(self, direction):
"""Move the snake in a particular direction, if chosen direction is valid. Else keep moving in current direction.
Args:
direction (str): Direction chosen by user
"""
if direction == 'UP':
self.head_y = self.head_y - self.unit
elif direction == 'DOWN':
self.head_y = self.head_y + self.unit
elif direction == 'LEFT':
self.head_x = self.head_x - self.unit
else:
self.head_x = self.head_x + self.unit
self.direction = direction
self.snake_position.insert(0, (self.head_x, self.head_y))
self.snake_position.pop()
self.check_valid()
def check_valid(self):
"""Check if the snake is alive / has crashed into it's own body or boundary
"""
if self.head_x == self.unit or self.head_x == self.display_width - self.unit or self.head_y == self.unit or self.head_y == self.display_height - self.unit:
self.collision_with_boundary = True
for (body_x, body_y) in self.snake_position[1:]:
if body_x == self.head_x and body_y == self.head_y:
self.collision_with_self = True
def move(self, result):
"""Move the snake in a chosen direction
Args:
result (int): Direction chosen by the AI for movement of the snake
Returns:
bool: Describes whether or not snake is alive after movement
"""
if self.direction == 'UP':
if result == 1:
self.moveInDirection('UP')
elif result == 2:
self.moveInDirection('LEFT')
else:
self.moveInDirection('RIGHT')
elif self.direction == 'RIGHT':
if result == 1:
self.moveInDirection('RIGHT')
elif result == 2:
self.moveInDirection('UP')
else:
self.moveInDirection('DOWN')
elif self.direction == 'DOWN':
if result == 1:
self.moveInDirection('DOWN')
elif result == 2:
self.moveInDirection('RIGHT')
else:
self.moveInDirection('LEFT')
else:
if result == 1:
self.moveInDirection('LEFT')
elif result == 2:
self.moveInDirection('DOWN')
else:
self.moveInDirection('UP')
self.time_since_apple += 1
return self.isAlive()
def moveHuman(self, result):
"""Move the snake in a chosen direction for player, not for AI
Args:
result (int): Direction chosen by the player for movement of the snake
Returns:
bool: Describes whether or not snake is alive after movement
"""
if self.direction == 'UP':
if result == 1:
self.moveInDirection('UP')
elif result == 2:
self.moveInDirection('LEFT')
elif result == 3:
self.moveInDirection('RIGHT')
elif self.direction == 'RIGHT':
if result == 1:
self.moveInDirection('UP')
elif result == 3:
self.moveInDirection('RIGHT')
elif result == 4:
self.moveInDirection('DOWN')
elif self.direction == 'DOWN':
if result == 2:
self.moveInDirection('LEFT')
elif result == 3:
self.moveInDirection('RIGHT')
elif result == 4:
self.moveInDirection('DOWN')
elif self.direction == 'LEFT':
if result == 1:
self.moveInDirection('UP')
elif result == 2:
self.moveInDirection('LEFT')
elif result == 4:
self.moveInDirection('DOWN')
elif result!=0:
self.moveInDirection(self.direction)
return self.isAlive()
def convAIToDirections(self, result):
"""Convert relative integer output by AI helper into absolute direction for the
Args:
result ([int]): Direction as output by the AI helper
Returns:
str : Absolute direction
"""
if self.direction == 'UP':
if result == 1:
return 'UP'
elif result == 2:
return 'LEFT'
else:
return 'RIGHT'
elif self.direction == 'RIGHT':
if result == 1:
return 'RIGHT'
elif result == 2:
return 'UP'
else:
return 'DOWN'
elif self.direction == 'DOWN':
if result == 1:
return 'DOWN'
elif result == 2:
return 'RIGHT'
else:
return 'LEFT'
else:
if result == 1:
return 'LEFT'
elif result == 2:
return 'DOWN'
else:
return 'UP'
def draw_snake(self, environment, color, color_head):
"""Draws the snake on the environment
Args:
environment (object): Instance of class environment
color (tuple): RGB values of color of snake
color_head (tuple): RGB values of color of snake
Returns:
environment: Returns the environment after the snake has been drawn
"""
l = self.unit
for (x, y) in self.snake_position[1:]:
draw.rect(environment, color, (x, y, l, l), 1)
draw.rect(environment, color, (x+2, y+2, l-4, l-4))
draw.rect(environment, color_head, (self.head_x, self.head_y, l, l), 1)
draw.rect(environment, color_head,
(self.head_x+2, self.head_y+2, l-4, l-4))
return environment
|
{"/PlayGameAI.py": ["/snake.py", "/nn.py", "/params.py"], "/ga.py": ["/snake.py", "/params.py"], "/snake.py": ["/nn.py"], "/PlayGameHuman.py": ["/snake.py", "/nn.py", "/params.py"]}
|
3,644
|
ashaychangwani/AILearnsSnake
|
refs/heads/master
|
/nn.py
|
import numpy as np
import random
class NeuralNet:
def __init__(self, NN_shape, display_width, display_height, unit, init_NN=True):
"""Initializes a class of type NeuralNet.
Args:
NN_shape (list): Shape of the neural network architecure
display_width (int): Width of display in pixels
display_height (int): Height of display in pixels
unit (int): Size of each unit
init_NN (bool, optional): Whether the neural network should be initalized with random weights. Defaults to True.
"""
self.display_width = display_width
self.display_height = display_height
self.unit = unit
self.apple_position = ()
self.theta = []
self.bias = []
if init_NN:
self.initialize_weights(NN_shape)
def sigmoid(self, mat):
"""Performs sigmoid operation
Args:
mat (matrix): Input matrix
Returns:
[matrix]: result which is sigmoid(matrix)
"""
return 1.0 / (1.0 + np.exp(-mat))
def relu(self, mat):
"""Performs ReLU operation
Args:
mat (matrix): Input matrix
Returns:
[matrix]: result which is ReLU(matrix)
"""
return mat * (mat > 0)
def softmax(self, mat):
"""Performs Softmax operation
Args:
mat (matrix): Input matrix
Returns:
[matrix]: result which is softmax(matrix)
"""
mat = mat - np.max(mat)
return np.exp(mat) / np.sum(np.exp(mat), axis=1)
def setNextFood(self, apple_position):
"""Sets the next location for the apple
Args:
apple_position ([list]): List of x and y coordinates of apple
"""
self.apple_position = apple_position
def appleSense(self, x, y, dX, dY, foodX, foodY):
"""Check if apple is present along current direction
Args:
x ([int]): X coordinate of snake_head
y ([int]): Y coordinate of snake_head
dX ([int]): Direction of movement of snake in x-direction
dY ([int]): Direction of movement of snake in y-direction
foodX ([int]): X coordinate of food
foodY ([int]): Y coordinate of food
Returns:
[boolean]: Represents 1 if food is present along path, else 0
"""
if dX == 0:
if foodX - x == 0 and (foodY - y)/dY > 0:
return 1
elif dY == 0:
if foodY - y == 0 and (foodX - x)/dX > 0:
return 1
else:
if (foodX - x)/dX == (foodY - y)/dY and (foodY - y)/dY > 0:
return 1
return 0
def bodyCalculation(self, x, y, dX, dY, x2, y2):
"""Checks if specified part of snake's body is present along chosen direction
Args:
x ([int]): X coordinate of selected body part of snake
y ([int]): Y coordinate of snake_head
dX ([int]): Direction of movement of snake in x-direction
dY ([int]): Direction of movement of snake in y-direction
x2 ([int]): [description]
y2 ([int]): Y coordinate of selected body part of snake
Returns:
[type]: [description]
"""
if dX == 0:
if x2 - x == 0 and (y2 - y)/dY > 0:
return (y2 - y)/dY
elif dY == 0:
if y2 - y == 0 and (x2 - x)/dX > 0:
return (x2 - x)/dX
else:
if (x2 - x)/dX == (y2 - y)/dY and (y2 - y)/dY > 0:
return (x2 - x)/dX
return 10000
def bodySense(self, x, y, dX, dY, snake_position):
"""Check if any part of the body of the snake exists along chosen
direction
Args:
x ([int]): X coordinate of selected body part of snake
y ([int]): Y coordinate of snake_head
dX ([int]): Direction of movement of snake in x-direction
dY ([int]): Direction of movement of snake in y-direction
snake_position ([list]): List of the body parts of the snake
Returns:
[int]: Normalized distance between snake_head and closest part of the snake's body
along chosen direction
"""
minDist = 10000
for (body) in snake_position[1:]:
minDist = min(minDist, self.bodyCalculation(x, y, dX, dY, body[0], body[1]))
if minDist == 10000:
return 0
return 1/minDist
def sense_in_direction(self, x, y, dX, dY, foodX, foodY, snake_position):
"""Sense for apple and body parts in selected direction
Args:
x ([int]): X coordinate of selected body part of snake
y ([int]): Y coordinate of snake_head
dX ([int]): Direction of movement of snake in x-direction
dY ([int]): Direction of movement of snake in y-direction
foodX ([int]): X coordinate of apple
foodY ([int]): Y coordinate of apple
snake_position ([list]): list of the positions of the snake's body
Returns:
[list]: 2 values containing results for apple and body part respectively
"""
input = [0, 0]
input[0] = self.appleSense(x, y, dX, dY, foodX, foodY)
input[1] = self.bodySense(x, y, dX, dY, snake_position)
return input
def checkForZero(self, x):
"""Checks for 0 to avoid division by 0 errors
Args:
x ([int]): Input
Returns:
[int]: Output
"""
if x == 0:
return 1
return x
def make_input(self, x, y, foodX, foodY, snake_position, direction):
"""Function to sense in all directions and produce the input for the neural network
Args:
x ([int]): x coordinate of snake head
y ([int]): y coordinate of snake head
foodX ([int]): x coordinate of food
foodY ([int]): y coordinate of food
snake_position ([list]): List of coordinates of snake's body
direction ([int]): previous direction
Returns:
[list]: List of length 24 representing the 3 inputs in each of 8 directions
"""
input = []
input.extend(self.sense_in_direction(x, y, 0, -self.unit, foodX,foodY, snake_position))
input.extend([self.unit/self.checkForZero((y-self.unit))])
input.extend(self.sense_in_direction(x, y, self.unit, -self.unit, foodX, foodY, snake_position))
input.extend([self.unit/self.checkForZero(min(y - self.unit, self.display_width - self.unit - x))])
input.extend(self.sense_in_direction(x, y, self.unit, 0, foodX, foodY, snake_position))
input.extend([self.unit/self.checkForZero((self.display_width - self.unit - x))])
input.extend(self.sense_in_direction(x, y, self.unit,self.unit, foodX, foodY, snake_position))
input.extend([self.unit/self.checkForZero(min(self.display_height - self.unit -y, self.display_width - self.unit - x))])
input.extend(self.sense_in_direction(x, y, 0, self.unit, foodX, foodY, snake_position))
input.extend([self.unit/self.checkForZero((self.display_height - self.unit -y))])
input.extend(self.sense_in_direction(x, y, -self.unit,self.unit, foodX, foodY, snake_position))
input.extend([self.unit/self.checkForZero(min(x - self.unit, self.display_height - self.unit -y))])
input.extend(self.sense_in_direction(x, y, -self.unit, 0, foodX, foodY, snake_position))
input.extend([self.unit/self.checkForZero((x - self.unit))])
input.extend(self.sense_in_direction(x, y, -self.unit, -self.unit, foodX, foodY, snake_position))
input.extend([self.unit/self.checkForZero(min((y-self.unit), (x - self.unit)))])
if(direction == 'RIGHT'):
input = input[6:] + input[:6]
elif (direction == 'DOWN'):
input = input[12:] + input[:12]
elif (direction == 'LEFT'):
input = input[18:] + input[:18]
return input
def initialize_weights(self, NN_shape):
"""Initialize weights of the neural network
Args:
NN_shape ([list]): Shape of the neural network
"""
for i in range(len(NN_shape)-1):
theta = np.random.uniform(-0.5, 0.5,
(NN_shape[i], NN_shape[i+1]))
self.theta.append(theta)
bias = np.random.uniform(-0.1, 0.1, (1, NN_shape[i+1]))
self.bias.append(bias)
def decision(self, x, y, snake_position, direction):
"""Run inputs through neural network to get the output as the decision of
Args:
x (int): X coordinate of snake_head
y (int): Y Coordinate of snake_head
snake_position (list): List of the coordinates of snake's body
direction (str): String representing the previous direction
Returns:
int: Integer output of the neural network
"""
foodX, foodY = self.apple_position
input = self.make_input(x, y, foodX, foodY, snake_position, direction)
input = np.array(input)
outputs = []
output = input
for i in range(len(self.theta) - 1):
output = self.relu(np.dot(output, self.theta[i]) + self.bias[i])
outputs.append(output)
output = self.softmax(
np.dot(output, self.theta[i+1]) + self.bias[i+1])
outputs.append(output)
result = np.argmax(outputs[-1]) + 1
return result
|
{"/PlayGameAI.py": ["/snake.py", "/nn.py", "/params.py"], "/ga.py": ["/snake.py", "/params.py"], "/snake.py": ["/nn.py"], "/PlayGameHuman.py": ["/snake.py", "/nn.py", "/params.py"]}
|
3,645
|
ashaychangwani/AILearnsSnake
|
refs/heads/master
|
/PlayGameHuman.py
|
import pygame
import pickle
from snake import Environment, snake
from nn import NeuralNet
import time
import copy
from params import *
file = open('saved/test.pickle', "rb")
snake_generations = pickle.load(file)
file.close()
best_snake = snake_generations[len(snake_generations)-1][0]
clock = pygame.time.Clock()
pygame.init()
pygame.font.init()
myfont = pygame.font.SysFont('Bitstream Vera Serif', 20)
screen = pygame.display.set_mode((display_width, display_height))
environment = Environment(display_height, display_width, unit)
player = snake(display_width, display_height, NN_shape, unit, False)
player.neuralnet.theta = []
player.neuralnet.bias = []
player.neuralnet.setNextFood(
environment.create_new_apple(player.snake_position))
screen = environment.create(screen, gray)
screen = environment.draw_apple(screen, pink)
screen = player.draw_snake(screen, blue, cherry)
pygame.display.update()
score = 0
decision = 0
while(player.isAlive()):
for event in pygame.event.get():
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_LEFT:
decision = 2
elif event.key == pygame.K_RIGHT:
decision = 3
elif event.key == pygame.K_DOWN:
decision = 4
else:
decision = 1
if event.type == pygame.QUIT:
pygame.quit()
quit()
if (player.head_x, player.head_y) == environment.apple_position:
player.eatAppleHuman(decision)
player.neuralnet.setNextFood(environment.create_new_apple(player.snake_position))
score+=1
player.moveHuman(decision)
screen = environment.create(screen, gray)
screen = environment.draw_apple(screen, pink)
screen = player.draw_snake(screen, blue, cherry)
prediction = player.convAIToDirections(best_snake.neuralnet.decision(
player.head_x, player.head_y, player.snake_position, player.direction))
pygame.display.set_caption('Score: '+str(score)+'\t\tAI recommends moving '+prediction)
pygame.display.update()
clock.tick(6)
pygame.display.update()
largeText=pygame.font.Font('freesansbold.ttf',30)
TextSurf=largeText.render(str("Your final score is "+str(score)),True,pink)
TextRect=TextSurf.get_rect()
TextRect.center=((display_width/2),(display_height/2))
screen.blit(TextSurf,TextRect)
pygame.display.update()
time.sleep(2)
pygame.quit()
|
{"/PlayGameAI.py": ["/snake.py", "/nn.py", "/params.py"], "/ga.py": ["/snake.py", "/params.py"], "/snake.py": ["/nn.py"], "/PlayGameHuman.py": ["/snake.py", "/nn.py", "/params.py"]}
|
3,647
|
hdodenhof/NuimoSonosController
|
refs/heads/master
|
/sonos.py
|
import logging
import threading
from Queue import Empty
import soco
from soco.events import event_listener
class SonosAPI:
STATE_PLAYING = 'PLAYING'
STATE_PAUSED = 'PAUSED_PLAYBACK'
STATE_TRANSITIONING = 'TRANSITIONING'
def __init__(self):
self.players = soco.discover()
for player in self.players:
if player.is_coordinator:
self.coordinator = player
self.state = 'UNKNOWN'
self.eventReceiver = EventReceiver(self.coordinator, self._on_state_change)
self.eventReceiver.start()
def _on_state_change(self, new_state):
logging.debug("New transport state: {}".format(new_state))
if (new_state == self.STATE_TRANSITIONING):
return
self.state = new_state
def disconnect(self):
self.eventReceiver.stop()
def is_playing(self):
return self.state == self.STATE_PLAYING
def get_volume(self):
return self.coordinator.volume
def play(self):
self.coordinator.play()
def pause(self):
self.coordinator.pause()
def next(self):
self.coordinator.next()
def prev(self):
self.coordinator.previous()
def vol_up(self, value):
self._set_volume(self.coordinator.volume + value)
def vol_down(self, value):
self._set_volume(self.coordinator.volume - value)
def _set_volume(self, value):
for player in self.players:
player.volume = value
class EventReceiver(threading.Thread):
def __init__(self, coordinator, state_callback):
super(EventReceiver, self).__init__()
self.subscription = coordinator.avTransport.subscribe()
self.state_callback = state_callback
self.terminate = False
def run(self):
while True:
if self.terminate:
self.subscription.unsubscribe()
event_listener.stop()
break
try:
event = self.subscription.events.get(timeout=0.5)
self.state_callback(event.transport_state)
except Empty:
pass
def stop(self):
self.terminate = True
|
{"/controller.py": ["/nuimo.py", "/sonos.py"], "/nuimo.py": ["/gatt.py"]}
|
3,648
|
hdodenhof/NuimoSonosController
|
refs/heads/master
|
/controller.py
|
#!/usr/bin/python
from __future__ import division
import logging
import math
import signal
import sys
import time
from threading import Timer
import led_configs
from nuimo import Nuimo, NuimoDelegate
from sonos import SonosAPI
nuimo_sonos_controller = None
class NuimoSonosController(NuimoDelegate):
def __init__(self, bled_com, nuimo_mac):
NuimoDelegate.__init__(self)
self.nuimo = Nuimo(bled_com, nuimo_mac, self)
self.sonos = SonosAPI()
self.default_led_timeout = 3
self.max_volume = 42 # should be dividable by 7
self.volume_bucket_size = int(self.max_volume / 7)
self.last_vol_matrix = None
self.vol_reset_timer = None
self.stop_pending = False
def start(self):
self.nuimo.connect()
while not self.stop_pending:
time.sleep(0.1)
self.sonos.disconnect()
self.nuimo.disconnect()
self.nuimo.terminate()
def stop(self):
self.stop_pending = True
def on_button(self):
if self.sonos.is_playing():
self.sonos.pause()
self.nuimo.display_led_matrix(led_configs.pause, self.default_led_timeout)
else:
self.sonos.play()
self.nuimo.display_led_matrix(led_configs.play, self.default_led_timeout)
def on_swipe_right(self):
self.sonos.next()
self.nuimo.display_led_matrix(led_configs.next, self.default_led_timeout)
def on_swipe_left(self):
self.sonos.prev()
self.nuimo.display_led_matrix(led_configs.previous, self.default_led_timeout)
def on_fly_right(self):
self.on_swipe_right()
def on_fly_left(self):
self.on_swipe_left()
def on_wheel_right(self, value):
self.sonos.vol_up(self._calculate_volume_delta(value))
self._show_volume()
def on_wheel_left(self, value):
self.sonos.vol_down(self._calculate_volume_delta(value))
self._show_volume()
def on_connect(self):
self.nuimo.display_led_matrix(led_configs.default, self.default_led_timeout)
def _calculate_volume_delta(self, value):
return min(value / 20 + 1, 5)
def _show_volume(self):
volume = self.sonos.get_volume()
if volume is None: volume = 0
bucket = min(int(math.ceil(volume / self.volume_bucket_size)), 7)
matrix = getattr(led_configs, 'vol' + str(bucket))
if matrix != self.last_vol_matrix:
self.last_vol_matrix = matrix
self.nuimo.display_led_matrix(matrix, self.default_led_timeout)
if self.vol_reset_timer is not None:
self.vol_reset_timer.cancel()
self.vol_reset_timer = Timer(self.default_led_timeout+1, self._reset_vol).start()
def _reset_vol(self):
self.last_vol_matrix = None
self.vol_reset_timer = None
def signal_term_handler(signal, frame):
logging.info('Received SIGTERM signal!')
nuimo_sonos_controller.stop()
def signal_int_handler(signal, frame):
logging.info('Received SIGINT signal. This makes Panda sad! :(')
nuimo_sonos_controller.stop()
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO,
format='%(message)s')
signal.signal(signal.SIGTERM, signal_term_handler)
signal.signal(signal.SIGINT, signal_int_handler)
if len(sys.argv) != 3:
raise RuntimeError('Invalid number of arguments')
com = sys.argv[1]
mac = sys.argv[2]
nuimo_sonos_controller = NuimoSonosController(com, mac)
nuimo_sonos_controller.start()
|
{"/controller.py": ["/nuimo.py", "/sonos.py"], "/nuimo.py": ["/gatt.py"]}
|
3,649
|
hdodenhof/NuimoSonosController
|
refs/heads/master
|
/nuimo.py
|
from __future__ import division
import threading
from bled112 import Bled112Com
from gatt import BleManager, BleRemoteTimeout, BleLocalTimeout
import logging
import time
SERVICE_UUIDS = [
'180f', # Battery
'f29b1525-cb19-40f3-be5c-7241ecb82fd2', # Sensors
'f29b1523-cb19-40f3-be5c-7241ecb82fd1' # LED Matrix
]
CHARACTERISTIC_UUIDS = {
'2a19': 'BATTERY',
'f29b1529-cb19-40f3-be5c-7241ecb82fd2': 'BUTTON',
'f29b1528-cb19-40f3-be5c-7241ecb82fd2': 'ROTATION',
'f29b1527-cb19-40f3-be5c-7241ecb82fd2': 'SWIPE',
'f29b1526-cb19-40f3-be5c-7241ecb82fd2': 'FLY',
'f29b1524-cb19-40f3-be5c-7241ecb82fd1': 'LED_MATRIX'
}
NOTIFICATION_CHARACTERISTIC_UUIDS = [
'BATTERY',
'BUTTON',
'ROTATION',
'SWIPE',
'FLY'
]
class Nuimo:
def __init__(self, com, address, delegate):
self.com = com
self.address = address
self.delegate = delegate
self.bled112 = None
self.ble = None
self.characteristics_handles = {}
self.message_handler = MessageHandler()
self.message_handler.start()
def connect(self):
self.bled112 = Bled112Com(self.com)
self.bled112.start()
self.ble = BleManager(self.bled112, self.address, self)
while not self.ble.isConnected():
try:
self.ble.connect()
self._discover_characteristics()
self._setup_notifications()
self.delegate.on_connect()
except (BleRemoteTimeout, BleLocalTimeout):
time.sleep(5)
def disconnect(self):
self.bled112.reset()
self.bled112.close()
def terminate(self):
self.message_handler.terminate()
def _discover_characteristics(self):
logging.debug("Reading service groups")
groups = self.ble.readAll()
handles = {}
for group in groups.values():
if group.uuid not in SERVICE_UUIDS:
continue
group_handles = self.ble.findInformation(group.start, group.end)
for uuid, handle in group_handles.iteritems():
if uuid not in CHARACTERISTIC_UUIDS:
continue
logging.debug("Found handle {} for {}".format(handle, uuid))
handles[uuid] = handle
self.characteristics_handles = dict((name, handles[uuid]) for uuid, name in CHARACTERISTIC_UUIDS.items())
def _setup_notifications(self):
for name in NOTIFICATION_CHARACTERISTIC_UUIDS:
logging.debug("Setup notifications for {}".format(name))
self.ble.configClientCharacteristic(self.characteristics_handles[name] + 1, notify=True)
def display_led_matrix(self, matrix, timeout):
try:
matrix = '{:<81}'.format(matrix[:81])
bytes = list(map(lambda leds: reduce(lambda acc, led: acc + (1 << led if leds[led] not in [' ', '0'] else 0), range(0, len(leds)), 0), [matrix[i:i+8] for i in range(0, len(matrix), 8)]))
self.ble.writeAttributeByHandle(self.characteristics_handles['LED_MATRIX'], [bytes[0], bytes[1], bytes[2], bytes[3], bytes[4], bytes[5], bytes[6], bytes[7], bytes[8], bytes[9], bytes[10], max(0, min(255, int(255.0 * 1))), max(0, min(255, int(timeout * 10.0)))], False)
except Exception as e:
logging.exception(e)
def on_message(self, message):
if message.attHandle == self.characteristics_handles['BATTERY']:
logging.debug('Battery state')
level = int(message.data[0] / 255 * 100)
MessageHandler.queue((self.delegate.on_battery_state, level))
if message.attHandle == self.characteristics_handles['BUTTON']:
if (message.data[0] == 1):
logging.debug('Button pressed')
MessageHandler.queue(self.delegate.on_button)
else:
logging.debug('Button released')
elif message.attHandle == self.characteristics_handles['SWIPE']:
if (message.data[0] == 0):
logging.debug('Swipe left')
MessageHandler.queue(self.delegate.on_swipe_left)
elif (message.data[0] == 1):
logging.debug('Swipe right')
MessageHandler.queue(self.delegate.on_swipe_right)
elif (message.data[0] == 2):
logging.debug('Swipe up')
else:
logging.debug('Swipe down')
elif message.attHandle == self.characteristics_handles['ROTATION']:
if (message.data[1] == 0):
value = message.data[0]
logging.debug('Wheel right, value: {}'.format(value))
MessageHandler.queue((self.delegate.on_wheel_right, value))
else:
value = 255 - message.data[0]
logging.debug('Wheel left, value: {}'.format(value))
MessageHandler.queue((self.delegate.on_wheel_left, value))
elif message.attHandle == self.characteristics_handles['FLY']:
if (message.data[0] == 0):
logging.debug('Fly left')
MessageHandler.queue(self.delegate.on_fly_left)
elif (message.data[0] == 1):
logging.debug('Fly right')
MessageHandler.queue(self.delegate.on_fly_right)
elif (message.data[0] == 2):
logging.debug('Fly towards')
MessageHandler.queue(self.delegate.on_fly_towards)
elif (message.data[0] == 3):
logging.debug('Fly backwards')
MessageHandler.queue(self.delegate.on_fly_backwards)
else:
logging.debug('Fly up/down, value {}'.format(message.data[1]))
def on_disconnect(self):
self.bled112.close()
time.sleep(5)
logging.debug('Reconnecting...')
self.connect()
class NuimoDelegate:
def __init__(self):
pass
def on_connect(self):
pass
def on_battery_state(self, value):
pass
def on_button(self):
pass
def on_swipe_right(self):
pass
def on_swipe_left(self):
pass
def on_swipe_up(self):
pass
def on_swipe_down(self):
pass
def on_wheel_right(self, value):
pass
def on_wheel_left(self, value):
pass
def on_fly_right(self):
pass
def on_fly_left(self):
pass
def on_fly_towards(self):
pass
def on_fly_backwards(self):
pass
class MessageHandler(threading.Thread):
next_msg = None
def __init__(self):
super(MessageHandler, self).__init__()
self.stop = False
def run(self):
while True:
if self.stop:
break
if not MessageHandler.next_msg:
time.sleep(0.01)
continue
try:
msg = MessageHandler.next_msg
if isinstance(msg, tuple):
func = msg[0]
args = msg[1:]
func(*args)
else:
msg()
except Exception as e:
logging.exception(e)
MessageHandler.next_msg = None
def terminate(self):
self.stop = True
@staticmethod
def queue(msg):
if (MessageHandler.next_msg):
return
MessageHandler.next_msg = msg
|
{"/controller.py": ["/nuimo.py", "/sonos.py"], "/nuimo.py": ["/gatt.py"]}
|
3,650
|
hdodenhof/NuimoSonosController
|
refs/heads/master
|
/gatt.py
|
from bled112 import *
DEBUG = True
INFO = True
def macString(mac):
return '%02X:%02X:%02X:%02X:%02X:%02X' % (mac[5], mac[4], mac[3], mac[2], mac[1], mac[0])
class Timeout:
"""Simplify timeout interval management"""
def __init__(self, interval):
self.start = time.time()
self.interval = interval
def isExpired(self):
return (time.time() - self.start >= self.interval)
# Custom BLED112 exceptions
class BleException(Exception): pass
class BleProcedureFailure(BleException): pass
class BleLocalTimeout(BleException): pass
class BleRemoteTimeout(BleException): pass
class BleValueError(BleException): pass
class BleConnection:
def __init__(self, mac=None):
self.id = None
self.address = mac
class AttributeGroup:
"""Encapsulate a group of GATT attribute/descriptor handles.
uuid -- UUID of the containing characteristic for the group
start -- first handle in the group
end -- last handle in the group
"""
def __init__(self, uuid=None, start=None, end=None):
self.uuid = uuid
self.start = start
self.end = end
class BleManager:
def __init__(self, com, address, delegate = None):
self.reactions = {
ConnectionStatusEvent : self.onConnectionStatusEvent,
ConnectionDisconnectedEvent : self.onConnectionDisconnectedEvent,
AttClientGroupFoundEvent : self.onAttClientGroupFoundEvent,
AttClientFindInformationFoundEvent: self.onAttClientFindInformationFoundEvent,
AttClientAttributeValueEvent : self.onAttClientAttributeValueEvent
}
mac = [int(i, 16) for i in reversed(address.split(':'))]
self.connection = BleConnection(mac)
self.com = com
self.delegate = delegate
self.expectedMessage = None
com.listener = self
self.localTimeout = 5
self.remoteTimeout = 10
# Called by BLED112 thread
def onMessage(self, message):
if self.expectedMessage and message.__class__ == self.expectedMessage.__class__:
self.actualMessage = message
self.expectedMessage = None
else:
reaction = self.reactions.get(message.__class__)
if reaction: reaction(message)
def onConnectionDisconnectedEvent(self, message):
logging.info('Disconnected')
self.connection.id = None
if self.delegate is not None: self.delegate.on_disconnect()
def onConnectionStatusEvent(self, message):
self.connection.id = message.connection
def waitForMessage(self, message, timeout):
t = Timeout(timeout)
self.expectedMessage = message
self.actualMessage = None
while self.expectedMessage and not t.isExpired(): time.sleep(0.01)
return self.actualMessage
def waitLocal(self, message):
msg = self.waitForMessage(message, self.localTimeout)
if not msg: raise BleLocalTimeout()
return msg
def waitRemote(self, message, timeout=None):
msg = self.waitForMessage(message, timeout if timeout is not None else self.remoteTimeout)
if not msg: raise BleRemoteTimeout()
return msg
def connect(self):
logging.info('Connecting to %s...' % macString(self.connection.address))
self.com.send(ConnectDirectCommand(self.connection.address))
self.waitLocal(ConnectDirectResponse())
try:
msg = self.waitRemote(ConnectionStatusEvent())
except BleRemoteTimeout:
logging.error('Failed connecting to %s' % macString(self.connection.address))
raise
logging.info('Connected to %s' % macString(self.connection.address))
self.connection.id = msg.connection
def writeAttribute(self, uuid, data):
logging.debug('Write attribute %s = %s' % (uuid, str(data)))
handle = self.connection.handleByUuid(uuid)
self.writeAttributeByHandle(handle, data)
def writeAttributeByHandle(self, handle, data, wait=True):
self.com.send(AttClientAttributeWriteCommand(self.connection.id, handle, data))
if wait:
self.waitLocal(AttClientAttributeWriteResponse())
self.completeProcedure()
def completeProcedure(self):
msg = self.waitRemote(AttClientProcedureCompleted())
logging.debug('Procedure completed')
return msg.result == 0
def configClientCharacteristic(self, handle, notify=False, indicate=False):
NOTIFY_ENABLE = 1
INDICATE_ENABLE = 2
flags = 0
if notify: flags = flags | NOTIFY_ENABLE
if indicate: flags = flags | INDICATE_ENABLE
self.writeAttributeByHandle(handle, [flags])
def isConnected(self): return self.connection.id is not None
def waitValue(self, uuid):
handle = self.connection.handleByUuid(uuid)
return self.waitRemote(AttClientAttributeValueEvent()).data
def readAttribute(self, uuid):
logging.info('Reading attribute %s' % uuid)
handle = self.connection.handleByUuid(uuid)
self.com.send(AttClientReadByHandleCommand(self.connection.id,
handle))
self.waitLocal(AttClientReadByHandleResponse())
return self.waitValue(uuid)
def readAll(self):
return self.readByGroupType(1, 0xFFFF, Uint16(int('2800',16)).serialize())
def readByGroupType(self, start, end, uuid):
self.groups = {}
self.com.send(ReadByGroupTypeCommand(self.connection.id, start, end, uuid))
self.waitLocal(ReadByGroupTypeResponse())
self.completeProcedure()
return self.groups
def onAttClientGroupFoundEvent(self, message):
self.groups[message.uuid] = AttributeGroup(message.uuid, message.start, message.end)
def findInformation(self, start, end):
self.handles = {}
self.com.send(AttClientFindInformationCommand(self.connection.id, start, end))
self.waitLocal(AttClientFindInformationResponse())
self.completeProcedure()
return self.handles
def onAttClientFindInformationFoundEvent(self, message):
self.handles[message.uuid] = message.chrHandle
def onAttClientAttributeValueEvent(self, message):
if self.delegate is not None: self.delegate.on_message(message)
|
{"/controller.py": ["/nuimo.py", "/sonos.py"], "/nuimo.py": ["/gatt.py"]}
|
3,651
|
jcfellers/DSP539_pyExam
|
refs/heads/master
|
/Jfellers_kmers.py
|
# -*- coding: utf-8 -*-
"""
Created on Wed Apr 21 13:34:17 2021
@author: Justin
"""
from collections import Counter
import pandas as pd
import sys
# function for possible kmers
def possible_kmers(string, k):
'''
Parameters
----------
string :
Type: string
Description: Any string of characters composed of the letters A,C,G,T.
k :
Type: int
Description: Defined length of the sub-string.
Returns
-------
The number of possible kmer combinations: min( len(string)-k+1, 4**k ).
'''
# minimum between string length minus k plus 1 and 4^k
assert type(k) == int
return(min(len(string)-k+1, 4**k))
# function for observed kmers
def observed_kmers(string, k):
'''
Parameters
----------
string :
Type: string
Description: Any string of characters composed of the letters A,C,G,T.
k :
Type: int
Description: Defined length of the sub-string.
Raises
------
IndexError
Check indexing on loop for building substrings.
Returns
-------
The number of observed (e.g. unique) kmer combinations.
'''
assert type(k) == int
# string converted to a list
strLst = list(string)
# list to keep track of substrings
substrings=[]
# for every element in strLst except those that will raise IndexError,
for i in range(0, len(strLst)-k+1):
# create sub of starting, ending, and middle characters & append
sub = strLst[i : i+k : 1]
substrings.append(sub)
# use Counter w/lst comprehension to count unique sub frequencies
uniqueCounts = Counter([tuple(i) for i in substrings])
# return the length of uniqeCounts
return(len(uniqueCounts))
# function for pandas df with all possible k and their observed & possible kmers
def k_df(string):
'''
Parameters
----------
string : string
Any string of characters composed of the letters A,C,G,T.
Returns
-------
k_df : pandas dataframe
Dataframe of k, observed kmers, possible kmers
'''
# create empty dataframe with applicable columns
cols = ['k', 'Observed_kmers', 'Possible_kmers']
k_df = pd.DataFrame(columns = cols)
# set maximum k
max_k = len(string)
# for every value of k up to max_k:
for i in range(1, max_k+1):
pkmers = possible_kmers(string, i)
okmers = observed_kmers(string, i)
data = {'k': [i], 'Observed_kmers': [okmers],
'Possible_kmers': [pkmers]}
data_df = pd.DataFrame.from_dict(data)
k_df = k_df.append(data_df, ignore_index = True)
return (k_df)
# function for linguistic complexity
def ling_complex(string):
'''
Parameters
----------
string : string
Any string of characters composed of the letters A,C,G,T.
Returns
-------
Computed liguistic complexity for provided string.
'''
kmers_df = k_df(string)
complexity = sum(kmers_df['Observed_kmers'])/sum(kmers_df['Possible_kmers'])
return(complexity)
def main(string):
# write the output files for each string in the read-in file
# pandas dataframe to csv
k_df(string).to_csv('Results/%s_kmersDataframe.csv' % string, index = False)
# convert complexity from float to Series and write to csv
complexity = pd.Series(ling_complex(string), name = 'Linguistic Complexity')
complexity.to_csv('Results/%s_lingComplexity.csv' % string, index = False)
if __name__ == '__main__':
# read in strings.csv
#file = 'strings.txt'
file = sys.argv[1]
open_file = open(file, 'r')
line = open_file.readline()[:-1]
# while there is a line to be read-in...
while line:
# test that a string is being read-in from the file
assert type(line) == str
# excecute main script on line
main(line)
# move to the next line
line = open_file.readline()[:-1]
# close the file
open_file.close()
print('Script Complete')
|
{"/test_Jfellers_kmers.py": ["/Jfellers_kmers.py"]}
|
3,652
|
jcfellers/DSP539_pyExam
|
refs/heads/master
|
/test_Jfellers_kmers.py
|
# -*- coding: utf-8 -*-
"""
Created on Thu Apr 29 09:45:29 2021
@author: Justin
"""
from Jfellers_kmers import *
# Setup: decisions for testing environment
file = 'strings.txt'
open_file = open(file, 'r')
# Testing Parameters: use first string in file & k = 7
line = open_file.readline()[:-1]
k_test = 7
# Expected Results
expected_possible_kmers = 3
expected_observed_kmers = 3
expected_kmers_df_shape = (len(line),3)
expected_ling_complexity = 0.875
# close the file
open_file.close()
def test_possible_kmers():
actual_result = possible_kmers(line, k_test)
assert actual_result == expected_possible_kmers
def test_observed_kmers():
actual_result = observed_kmers(line, k_test)
assert actual_result == expected_observed_kmers
def test_k_df():
actual_result = k_df(line).shape
assert actual_result == expected_kmers_df_shape
def test_ling_complex():
actual_result = ling_complex(line)
assert actual_result == expected_ling_complexity
|
{"/test_Jfellers_kmers.py": ["/Jfellers_kmers.py"]}
|
3,653
|
aaronn/django-rest-framework-passwordless
|
refs/heads/master
|
/tests/test_verification.py
|
from rest_framework import status
from rest_framework.authtoken.models import Token
from django.utils.translation import gettext_lazy as _
from rest_framework.test import APITestCase
from django.contrib.auth import get_user_model
from django.urls import reverse
from drfpasswordless.settings import api_settings, DEFAULTS
from drfpasswordless.utils import CallbackToken
User = get_user_model()
class AliasEmailVerificationTests(APITestCase):
def setUp(self):
api_settings.PASSWORDLESS_AUTH_TYPES = ['EMAIL']
api_settings.PASSWORDLESS_EMAIL_NOREPLY_ADDRESS = 'noreply@example.com'
api_settings.PASSWORDLESS_USER_MARK_EMAIL_VERIFIED = True
self.url = reverse('drfpasswordless:auth_email')
self.callback_url = reverse('drfpasswordless:auth_token')
self.verify_url = reverse('drfpasswordless:verify_email')
self.callback_verify = reverse('drfpasswordless:verify_token')
self.email_field_name = api_settings.PASSWORDLESS_USER_EMAIL_FIELD_NAME
self.email_verified_field_name = api_settings.PASSWORDLESS_USER_EMAIL_VERIFIED_FIELD_NAME
def test_email_unverified_to_verified_and_back(self):
email = 'aaron@example.com'
email2 = 'aaron2@example.com'
data = {'email': email}
# create a new user
response = self.client.post(self.url, data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
user = User.objects.get(**{self.email_field_name: email})
self.assertNotEqual(user, None)
self.assertEqual(getattr(user, self.email_verified_field_name), False)
# Verify a token exists for the user, sign in and check verified again
callback = CallbackToken.objects.filter(user=user, type=CallbackToken.TOKEN_TYPE_AUTH, is_active=True).first()
callback_data = {'email': email, 'token': callback}
callback_response = self.client.post(self.callback_url, callback_data)
self.assertEqual(callback_response.status_code, status.HTTP_200_OK)
# Verify we got the token, then check and see that email_verified is now verified
token = callback_response.data['token']
self.assertEqual(token, Token.objects.get(user=user).key)
# Refresh and see that the endpoint is now verified as True
user.refresh_from_db()
self.assertEqual(getattr(user, self.email_verified_field_name), True)
# Change email, should result in flag changing to false
setattr(user, self.email_field_name, email2)
user.save()
user.refresh_from_db()
self.assertEqual(getattr(user, self.email_verified_field_name), False)
# Verify
self.client.force_authenticate(user)
verify_response = self.client.post(self.verify_url)
self.assertEqual(verify_response.status_code, status.HTTP_200_OK)
# Refresh User
user = User.objects.get(**{self.email_field_name: email2})
self.assertNotEqual(user, None)
self.assertNotEqual(getattr(user, self.email_field_name), None)
self.assertEqual(getattr(user, self.email_verified_field_name), False)
# Post callback token back.
verify_token = CallbackToken.objects.filter(user=user, type=CallbackToken.TOKEN_TYPE_VERIFY, is_active=True).first()
self.assertNotEqual(verify_token, None)
verify_callback_response = self.client.post(self.callback_verify, {'email': email2, 'token': verify_token.key})
self.assertEqual(verify_callback_response.status_code, status.HTTP_200_OK)
# Refresh User
user = User.objects.get(**{self.email_field_name: email2})
self.assertNotEqual(user, None)
self.assertNotEqual(getattr(user, self.email_field_name), None)
self.assertEqual(getattr(user, self.email_verified_field_name), True)
def tearDown(self):
api_settings.PASSWORDLESS_AUTH_TYPES = DEFAULTS['PASSWORDLESS_AUTH_TYPES']
api_settings.PASSWORDLESS_EMAIL_NOREPLY_ADDRESS = DEFAULTS['PASSWORDLESS_EMAIL_NOREPLY_ADDRESS']
api_settings.PASSWORDLESS_USER_MARK_EMAIL_VERIFIED = DEFAULTS['PASSWORDLESS_USER_MARK_MOBILE_VERIFIED']
class AliasMobileVerificationTests(APITestCase):
def setUp(self):
api_settings.PASSWORDLESS_TEST_SUPPRESSION = True
api_settings.PASSWORDLESS_AUTH_TYPES = ['MOBILE']
api_settings.PASSWORDLESS_MOBILE_NOREPLY_NUMBER = '+15550000000'
api_settings.PASSWORDLESS_USER_MARK_MOBILE_VERIFIED = True
self.url = reverse('drfpasswordless:auth_mobile')
self.callback_url = reverse('drfpasswordless:auth_token')
self.verify_url = reverse('drfpasswordless:verify_mobile')
self.callback_verify = reverse('drfpasswordless:verify_token')
self.mobile_field_name = api_settings.PASSWORDLESS_USER_MOBILE_FIELD_NAME
self.mobile_verified_field_name = api_settings.PASSWORDLESS_USER_MOBILE_VERIFIED_FIELD_NAME
def test_mobile_unverified_to_verified_and_back(self):
mobile = '+15551234567'
mobile2 = '+15557654321'
data = {'mobile': mobile}
# create a new user
response = self.client.post(self.url, data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
user = User.objects.get(**{self.mobile_field_name: mobile})
self.assertNotEqual(user, None)
self.assertEqual(getattr(user, self.mobile_verified_field_name), False)
# Verify a token exists for the user, sign in and check verified again
callback = CallbackToken.objects.filter(user=user, type=CallbackToken.TOKEN_TYPE_AUTH, is_active=True).first()
callback_data = {'mobile': mobile, 'token': callback}
callback_response = self.client.post(self.callback_url, callback_data)
self.assertEqual(callback_response.status_code, status.HTTP_200_OK)
# Verify we got the token, then check and see that email_verified is now verified
token = callback_response.data['token']
self.assertEqual(token, Token.objects.get(user=user).key)
# Refresh and see that the endpoint is now verified as True
user.refresh_from_db()
self.assertEqual(getattr(user, self.mobile_verified_field_name), True)
# Change mobile, should result in flag changing to false
setattr(user, self.mobile_field_name, '+15557654321')
user.save()
user.refresh_from_db()
self.assertEqual(getattr(user, self.mobile_verified_field_name), False)
# Verify
self.client.force_authenticate(user)
verify_response = self.client.post(self.verify_url)
self.assertEqual(verify_response.status_code, status.HTTP_200_OK)
# Refresh User
user = User.objects.get(**{self.mobile_field_name: mobile2})
self.assertNotEqual(user, None)
self.assertNotEqual(getattr(user, self.mobile_field_name), None)
self.assertEqual(getattr(user, self.mobile_verified_field_name), False)
# Post callback token back.
verify_token = CallbackToken.objects.filter(user=user, type=CallbackToken.TOKEN_TYPE_VERIFY, is_active=True).first()
self.assertNotEqual(verify_token, None)
verify_callback_response = self.client.post(self.callback_verify, {'mobile': mobile2, 'token': verify_token.key})
self.assertEqual(verify_callback_response.status_code, status.HTTP_200_OK)
# Refresh User
user = User.objects.get(**{self.mobile_field_name: mobile2})
self.assertNotEqual(user, None)
self.assertNotEqual(getattr(user, self.mobile_field_name), None)
self.assertEqual(getattr(user, self.mobile_verified_field_name), True)
def tearDown(self):
api_settings.PASSWORDLESS_TEST_SUPPRESSION = DEFAULTS['PASSWORDLESS_TEST_SUPPRESSION']
api_settings.PASSWORDLESS_AUTH_TYPES = DEFAULTS['PASSWORDLESS_AUTH_TYPES']
api_settings.PASSWORDLESS_MOBILE_NOREPLY_ADDRESS = DEFAULTS['PASSWORDLESS_MOBILE_NOREPLY_NUMBER']
api_settings.PASSWORDLESS_USER_MARK_MOBILE_VERIFIED = DEFAULTS['PASSWORDLESS_USER_MARK_MOBILE_VERIFIED']
|
{"/tests/test_verification.py": ["/drfpasswordless/settings.py", "/drfpasswordless/utils.py"], "/drfpasswordless/serializers.py": ["/drfpasswordless/models.py", "/drfpasswordless/settings.py", "/drfpasswordless/utils.py"], "/tests/test_authentication.py": ["/drfpasswordless/settings.py", "/drfpasswordless/utils.py"], "/tests/urls.py": ["/drfpasswordless/settings.py", "/drfpasswordless/views.py"], "/drfpasswordless/services.py": ["/drfpasswordless/settings.py", "/drfpasswordless/utils.py"], "/drfpasswordless/signals.py": ["/drfpasswordless/models.py", "/drfpasswordless/settings.py", "/drfpasswordless/services.py"], "/drfpasswordless/apps.py": ["/drfpasswordless/signals.py"], "/drfpasswordless/migrations/0004_auto_20200125_0853.py": ["/drfpasswordless/models.py"], "/drfpasswordless/utils.py": ["/drfpasswordless/models.py", "/drfpasswordless/settings.py"], "/drfpasswordless/admin.py": ["/drfpasswordless/models.py"], "/drfpasswordless/urls.py": ["/drfpasswordless/settings.py", "/drfpasswordless/views.py"], "/drfpasswordless/views.py": ["/drfpasswordless/models.py", "/drfpasswordless/settings.py", "/drfpasswordless/serializers.py", "/drfpasswordless/services.py"]}
|
3,654
|
aaronn/django-rest-framework-passwordless
|
refs/heads/master
|
/tests/models.py
|
from django.contrib.auth.models import AbstractBaseUser
from django.contrib.auth.models import BaseUserManager
from django.core.validators import RegexValidator
from django.db import models
phone_regex = RegexValidator(regex=r'^\+[1-9]\d{1,14}$',
message="Mobile number must be entered in the format:"
" '+999999999'. Up to 15 digits allowed.")
class CustomUser(AbstractBaseUser):
email = models.EmailField(max_length=255, unique=True, blank=True, null=True)
email_verified = models.BooleanField(default=False)
mobile = models.CharField(validators=[phone_regex], max_length=17, unique=True, blank=True, null=True)
mobile_verified = models.BooleanField(default=False)
objects = BaseUserManager()
USERNAME_FIELD = 'email'
class Meta:
app_label = 'tests'
|
{"/tests/test_verification.py": ["/drfpasswordless/settings.py", "/drfpasswordless/utils.py"], "/drfpasswordless/serializers.py": ["/drfpasswordless/models.py", "/drfpasswordless/settings.py", "/drfpasswordless/utils.py"], "/tests/test_authentication.py": ["/drfpasswordless/settings.py", "/drfpasswordless/utils.py"], "/tests/urls.py": ["/drfpasswordless/settings.py", "/drfpasswordless/views.py"], "/drfpasswordless/services.py": ["/drfpasswordless/settings.py", "/drfpasswordless/utils.py"], "/drfpasswordless/signals.py": ["/drfpasswordless/models.py", "/drfpasswordless/settings.py", "/drfpasswordless/services.py"], "/drfpasswordless/apps.py": ["/drfpasswordless/signals.py"], "/drfpasswordless/migrations/0004_auto_20200125_0853.py": ["/drfpasswordless/models.py"], "/drfpasswordless/utils.py": ["/drfpasswordless/models.py", "/drfpasswordless/settings.py"], "/drfpasswordless/admin.py": ["/drfpasswordless/models.py"], "/drfpasswordless/urls.py": ["/drfpasswordless/settings.py", "/drfpasswordless/views.py"], "/drfpasswordless/views.py": ["/drfpasswordless/models.py", "/drfpasswordless/settings.py", "/drfpasswordless/serializers.py", "/drfpasswordless/services.py"]}
|
3,655
|
aaronn/django-rest-framework-passwordless
|
refs/heads/master
|
/drfpasswordless/serializers.py
|
import logging
from django.utils.translation import gettext_lazy as _
from django.contrib.auth import get_user_model
from django.core.exceptions import PermissionDenied
from django.core.validators import RegexValidator
from rest_framework import serializers
from rest_framework.exceptions import ValidationError
from drfpasswordless.models import CallbackToken
from drfpasswordless.settings import api_settings
from drfpasswordless.utils import verify_user_alias, validate_token_age
logger = logging.getLogger(__name__)
User = get_user_model()
class TokenField(serializers.CharField):
default_error_messages = {
'required': _('Invalid Token'),
'invalid': _('Invalid Token'),
'blank': _('Invalid Token'),
'max_length': _('Tokens are {max_length} digits long.'),
'min_length': _('Tokens are {min_length} digits long.')
}
class AbstractBaseAliasAuthenticationSerializer(serializers.Serializer):
"""
Abstract class that returns a callback token based on the field given
Returns a token if valid, None or a message if not.
"""
@property
def alias_type(self):
# The alias type, either email or mobile
raise NotImplementedError
def validate(self, attrs):
alias = attrs.get(self.alias_type)
if alias:
# Create or authenticate a user
# Return THem
if api_settings.PASSWORDLESS_REGISTER_NEW_USERS is True:
# If new aliases should register new users.
try:
user = User.objects.get(**{self.alias_type+'__iexact': alias})
except User.DoesNotExist:
user = User.objects.create(**{self.alias_type: alias})
user.set_unusable_password()
user.save()
else:
# If new aliases should not register new users.
try:
user = User.objects.get(**{self.alias_type+'__iexact': alias})
except User.DoesNotExist:
user = None
if user:
if not user.is_active:
# If valid, return attrs so we can create a token in our logic controller
msg = _('User account is disabled.')
raise serializers.ValidationError(msg)
else:
msg = _('No account is associated with this alias.')
raise serializers.ValidationError(msg)
else:
msg = _('Missing %s.') % self.alias_type
raise serializers.ValidationError(msg)
attrs['user'] = user
return attrs
class EmailAuthSerializer(AbstractBaseAliasAuthenticationSerializer):
@property
def alias_type(self):
return 'email'
email = serializers.EmailField()
class MobileAuthSerializer(AbstractBaseAliasAuthenticationSerializer):
@property
def alias_type(self):
return 'mobile'
phone_regex = RegexValidator(regex=r'^\+[1-9]\d{1,14}$',
message="Mobile number must be entered in the format:"
" '+999999999'. Up to 15 digits allowed.")
mobile = serializers.CharField(validators=[phone_regex], max_length=17)
"""
Verification
"""
class AbstractBaseAliasVerificationSerializer(serializers.Serializer):
"""
Abstract class that returns a callback token based on the field given
Returns a token if valid, None or a message if not.
"""
@property
def alias_type(self):
# The alias type, either email or mobile
raise NotImplementedError
def validate(self, attrs):
msg = _('There was a problem with your request.')
if self.alias_type:
# Get request.user
# Get their specified valid endpoint
# Validate
request = self.context["request"]
if request and hasattr(request, "user"):
user = request.user
if user:
if not user.is_active:
# If valid, return attrs so we can create a token in our logic controller
msg = _('User account is disabled.')
else:
if hasattr(user, self.alias_type):
# Has the appropriate alias type
attrs['user'] = user
return attrs
else:
msg = _('This user doesn\'t have an %s.' % self.alias_type)
raise serializers.ValidationError(msg)
else:
msg = _('Missing %s.') % self.alias_type
raise serializers.ValidationError(msg)
class EmailVerificationSerializer(AbstractBaseAliasVerificationSerializer):
@property
def alias_type(self):
return 'email'
class MobileVerificationSerializer(AbstractBaseAliasVerificationSerializer):
@property
def alias_type(self):
return 'mobile'
"""
Callback Token
"""
def token_age_validator(value):
"""
Check token age
Makes sure a token is within the proper expiration datetime window.
"""
valid_token = validate_token_age(value)
if not valid_token:
raise serializers.ValidationError("The token you entered isn't valid.")
return value
class AbstractBaseCallbackTokenSerializer(serializers.Serializer):
"""
Abstract class inspired by DRF's own token serializer.
Returns a user if valid, None or a message if not.
"""
phone_regex = RegexValidator(regex=r'^\+[1-9]\d{1,14}$',
message="Mobile number must be entered in the format:"
" '+999999999'. Up to 15 digits allowed.")
email = serializers.EmailField(required=False) # Needs to be required=false to require both.
mobile = serializers.CharField(required=False, validators=[phone_regex], max_length=17)
token = TokenField(min_length=6, max_length=6, validators=[token_age_validator])
def validate_alias(self, attrs):
email = attrs.get('email', None)
mobile = attrs.get('mobile', None)
if email and mobile:
raise serializers.ValidationError()
if not email and not mobile:
raise serializers.ValidationError()
if email:
return 'email', email
elif mobile:
return 'mobile', mobile
return None
class CallbackTokenAuthSerializer(AbstractBaseCallbackTokenSerializer):
def validate(self, attrs):
# Check Aliases
try:
alias_type, alias = self.validate_alias(attrs)
callback_token = attrs.get('token', None)
user = User.objects.get(**{alias_type+'__iexact': alias})
token = CallbackToken.objects.get(**{'user': user,
'key': callback_token,
'type': CallbackToken.TOKEN_TYPE_AUTH,
'is_active': True})
if token.user == user:
# Check the token type for our uni-auth method.
# authenticates and checks the expiry of the callback token.
if not user.is_active:
msg = _('User account is disabled.')
raise serializers.ValidationError(msg)
if api_settings.PASSWORDLESS_USER_MARK_EMAIL_VERIFIED \
or api_settings.PASSWORDLESS_USER_MARK_MOBILE_VERIFIED:
# Mark this alias as verified
user = User.objects.get(pk=token.user.pk)
success = verify_user_alias(user, token)
if success is False:
msg = _('Error validating user alias.')
raise serializers.ValidationError(msg)
attrs['user'] = user
return attrs
else:
msg = _('Invalid Token')
raise serializers.ValidationError(msg)
except CallbackToken.DoesNotExist:
msg = _('Invalid alias parameters provided.')
raise serializers.ValidationError(msg)
except User.DoesNotExist:
msg = _('Invalid user alias parameters provided.')
raise serializers.ValidationError(msg)
except ValidationError:
msg = _('Invalid alias parameters provided.')
raise serializers.ValidationError(msg)
class CallbackTokenVerificationSerializer(AbstractBaseCallbackTokenSerializer):
"""
Takes a user and a token, verifies the token belongs to the user and
validates the alias that the token was sent from.
"""
def validate(self, attrs):
try:
alias_type, alias = self.validate_alias(attrs)
user_id = self.context.get("user_id")
user = User.objects.get(**{'id': user_id, alias_type+'__iexact': alias})
callback_token = attrs.get('token', None)
token = CallbackToken.objects.get(**{'user': user,
'key': callback_token,
'type': CallbackToken.TOKEN_TYPE_VERIFY,
'is_active': True})
if token.user == user:
# Mark this alias as verified
success = verify_user_alias(user, token)
if success is False:
logger.debug("drfpasswordless: Error verifying alias.")
attrs['user'] = user
return attrs
else:
msg = _('This token is invalid. Try again later.')
logger.debug("drfpasswordless: User token mismatch when verifying alias.")
except CallbackToken.DoesNotExist:
msg = _('We could not verify this alias.')
logger.debug("drfpasswordless: Tried to validate alias with bad token.")
pass
except User.DoesNotExist:
msg = _('We could not verify this alias.')
logger.debug("drfpasswordless: Tried to validate alias with bad user.")
pass
except PermissionDenied:
msg = _('Insufficient permissions.')
logger.debug("drfpasswordless: Permission denied while validating alias.")
pass
raise serializers.ValidationError(msg)
"""
Responses
"""
class TokenResponseSerializer(serializers.Serializer):
"""
Our default response serializer.
"""
token = serializers.CharField(source='key')
key = serializers.CharField(write_only=True)
|
{"/tests/test_verification.py": ["/drfpasswordless/settings.py", "/drfpasswordless/utils.py"], "/drfpasswordless/serializers.py": ["/drfpasswordless/models.py", "/drfpasswordless/settings.py", "/drfpasswordless/utils.py"], "/tests/test_authentication.py": ["/drfpasswordless/settings.py", "/drfpasswordless/utils.py"], "/tests/urls.py": ["/drfpasswordless/settings.py", "/drfpasswordless/views.py"], "/drfpasswordless/services.py": ["/drfpasswordless/settings.py", "/drfpasswordless/utils.py"], "/drfpasswordless/signals.py": ["/drfpasswordless/models.py", "/drfpasswordless/settings.py", "/drfpasswordless/services.py"], "/drfpasswordless/apps.py": ["/drfpasswordless/signals.py"], "/drfpasswordless/migrations/0004_auto_20200125_0853.py": ["/drfpasswordless/models.py"], "/drfpasswordless/utils.py": ["/drfpasswordless/models.py", "/drfpasswordless/settings.py"], "/drfpasswordless/admin.py": ["/drfpasswordless/models.py"], "/drfpasswordless/urls.py": ["/drfpasswordless/settings.py", "/drfpasswordless/views.py"], "/drfpasswordless/views.py": ["/drfpasswordless/models.py", "/drfpasswordless/settings.py", "/drfpasswordless/serializers.py", "/drfpasswordless/services.py"]}
|
3,656
|
aaronn/django-rest-framework-passwordless
|
refs/heads/master
|
/tests/test_authentication.py
|
from rest_framework import status
from rest_framework.authtoken.models import Token
from rest_framework.test import APITestCase
from django.contrib.auth import get_user_model
from django.urls import reverse
from drfpasswordless.settings import api_settings, DEFAULTS
from drfpasswordless.utils import CallbackToken
User = get_user_model()
class EmailSignUpCallbackTokenTests(APITestCase):
def setUp(self):
api_settings.PASSWORDLESS_EMAIL_NOREPLY_ADDRESS = 'noreply@example.com'
self.email_field_name = api_settings.PASSWORDLESS_USER_EMAIL_FIELD_NAME
self.url = reverse('drfpasswordless:auth_email')
def test_email_signup_failed(self):
email = 'failedemail182+'
data = {'email': email}
response = self.client.post(self.url, data)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_email_signup_success(self):
email = 'aaron@example.com'
data = {'email': email}
# Verify user doesn't exist yet
user = User.objects.filter(**{self.email_field_name: 'aaron@example.com'}).first()
# Make sure our user isn't None, meaning the user was created.
self.assertEqual(user, None)
# verify a new user was created with serializer
response = self.client.post(self.url, data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
user = User.objects.get(**{self.email_field_name: 'aaron@example.com'})
self.assertNotEqual(user, None)
# Verify a token exists for the user
self.assertEqual(CallbackToken.objects.filter(user=user, is_active=True).exists(), 1)
def test_email_signup_disabled(self):
api_settings.PASSWORDLESS_REGISTER_NEW_USERS = False
# Verify user doesn't exist yet
user = User.objects.filter(**{self.email_field_name: 'aaron@example.com'}).first()
# Make sure our user isn't None, meaning the user was created.
self.assertEqual(user, None)
email = 'aaron@example.com'
data = {'email': email}
# verify a new user was not created
response = self.client.post(self.url, data)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
user = User.objects.filter(**{self.email_field_name: 'aaron@example.com'}).first()
self.assertEqual(user, None)
# Verify no token was created for the user
self.assertEqual(CallbackToken.objects.filter(user=user, is_active=True).exists(), 0)
def tearDown(self):
api_settings.PASSWORDLESS_EMAIL_NOREPLY_ADDRESS = DEFAULTS['PASSWORDLESS_EMAIL_NOREPLY_ADDRESS']
api_settings.PASSWORDLESS_REGISTER_NEW_USERS = DEFAULTS['PASSWORDLESS_REGISTER_NEW_USERS']
class EmailLoginCallbackTokenTests(APITestCase):
def setUp(self):
api_settings.PASSWORDLESS_AUTH_TYPES = ['EMAIL']
api_settings.PASSWORDLESS_EMAIL_NOREPLY_ADDRESS = 'noreply@example.com'
self.email = 'aaron@example.com'
self.url = reverse('drfpasswordless:auth_email')
self.challenge_url = reverse('drfpasswordless:auth_token')
self.email_field_name = api_settings.PASSWORDLESS_USER_EMAIL_FIELD_NAME
self.user = User.objects.create(**{self.email_field_name: self.email})
def test_email_auth_failed(self):
data = {'email': self.email}
response = self.client.post(self.url, data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
# Token sent to alias
challenge_data = {'email': self.email, 'token': '123456'} # Send an arbitrary token instead
# Try to auth with the callback token
challenge_response = self.client.post(self.challenge_url, challenge_data)
self.assertEqual(challenge_response.status_code, status.HTTP_400_BAD_REQUEST)
def test_email_auth_missing_alias(self):
data = {'email': self.email}
response = self.client.post(self.url, data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
# Token sent to alias
callback_token = CallbackToken.objects.filter(user=self.user, is_active=True).first()
challenge_data = {'token': callback_token} # Missing Alias
# Try to auth with the callback token
challenge_response = self.client.post(self.challenge_url, challenge_data)
self.assertEqual(challenge_response.status_code, status.HTTP_400_BAD_REQUEST)
def test_email_auth_bad_alias(self):
data = {'email': self.email}
response = self.client.post(self.url, data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
# Token sent to alias
callback_token = CallbackToken.objects.filter(user=self.user, is_active=True).first()
challenge_data = {'email': 'abcde@example.com', 'token': callback_token} # Bad Alias
# Try to auth with the callback token
challenge_response = self.client.post(self.challenge_url, challenge_data)
self.assertEqual(challenge_response.status_code, status.HTTP_400_BAD_REQUEST)
def test_email_auth_expired(self):
data = {'email': self.email}
response = self.client.post(self.url, data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
# Token sent to alias
callback_token = CallbackToken.objects.filter(user=self.user, is_active=True).first()
challenge_data = {'email': self.email, 'token': callback_token}
data = {'email': self.email}
response = self.client.post(self.url, data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
# Second token sent to alias
second_callback_token = CallbackToken.objects.filter(user=self.user, is_active=True).first()
second_challenge_data = {'email': self.email, 'token': second_callback_token}
# Try to auth with the old callback token
challenge_response = self.client.post(self.challenge_url, challenge_data)
self.assertEqual(challenge_response.status_code, status.HTTP_400_BAD_REQUEST)
# Try to auth with the new callback token
second_challenge_response = self.client.post(self.challenge_url, second_challenge_data)
self.assertEqual(second_challenge_response.status_code, status.HTTP_200_OK)
# Verify Auth Token
auth_token = second_challenge_response.data['token']
self.assertEqual(auth_token, Token.objects.filter(key=auth_token).first().key)
def test_email_auth_success(self):
data = {'email': self.email}
response = self.client.post(self.url, data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
# Token sent to alias
callback_token = CallbackToken.objects.filter(user=self.user, is_active=True).first()
challenge_data = {'email': self.email, 'token': callback_token}
# Try to auth with the callback token
challenge_response = self.client.post(self.challenge_url, challenge_data)
self.assertEqual(challenge_response.status_code, status.HTTP_200_OK)
# Verify Auth Token
auth_token = challenge_response.data['token']
self.assertEqual(auth_token, Token.objects.filter(key=auth_token).first().key)
def tearDown(self):
api_settings.PASSWORDLESS_AUTH_TYPES = DEFAULTS['PASSWORDLESS_AUTH_TYPES']
api_settings.PASSWORDLESS_EMAIL_NOREPLY_ADDRESS = DEFAULTS['PASSWORDLESS_EMAIL_NOREPLY_ADDRESS']
self.user.delete()
"""
Mobile Tests
"""
class MobileSignUpCallbackTokenTests(APITestCase):
def setUp(self):
api_settings.PASSWORDLESS_TEST_SUPPRESSION = True
api_settings.PASSWORDLESS_AUTH_TYPES = ['MOBILE']
api_settings.PASSWORDLESS_MOBILE_NOREPLY_NUMBER = '+15550000000'
self.url = reverse('drfpasswordless:auth_mobile')
self.mobile_field_name = api_settings.PASSWORDLESS_USER_MOBILE_FIELD_NAME
def test_mobile_signup_failed(self):
mobile = 'sidfj98zfd'
data = {'mobile': mobile}
response = self.client.post(self.url, data)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_mobile_signup_success(self):
mobile = '+15551234567'
data = {'mobile': mobile}
# Verify user doesn't exist yet
user = User.objects.filter(**{self.mobile_field_name: '+15551234567'}).first()
# Make sure our user isn't None, meaning the user was created.
self.assertEqual(user, None)
# verify a new user was created with serializer
response = self.client.post(self.url, data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
user = User.objects.get(**{self.mobile_field_name: '+15551234567'})
self.assertNotEqual(user, None)
# Verify a token exists for the user
self.assertEqual(CallbackToken.objects.filter(user=user, is_active=True).exists(), 1)
def test_mobile_signup_disabled(self):
api_settings.PASSWORDLESS_REGISTER_NEW_USERS = False
# Verify user doesn't exist yet
user = User.objects.filter(**{self.mobile_field_name: '+15557654321'}).first()
# Make sure our user isn't None, meaning the user was created.
self.assertEqual(user, None)
mobile = '+15557654321'
data = {'mobile': mobile}
# verify a new user was not created
response = self.client.post(self.url, data)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
user = User.objects.filter(**{self.mobile_field_name: '+15557654321'}).first()
self.assertEqual(user, None)
# Verify no token was created for the user
self.assertEqual(CallbackToken.objects.filter(user=user, is_active=True).exists(), 0)
def tearDown(self):
api_settings.PASSWORDLESS_TEST_SUPPRESSION = DEFAULTS['PASSWORDLESS_TEST_SUPPRESSION']
api_settings.PASSWORDLESS_AUTH_TYPES = DEFAULTS['PASSWORDLESS_AUTH_TYPES']
api_settings.PASSWORDLESS_REGISTER_NEW_USERS = DEFAULTS['PASSWORDLESS_REGISTER_NEW_USERS']
api_settings.PASSWORDLESS_MOBILE_NOREPLY_NUMBER = DEFAULTS['PASSWORDLESS_MOBILE_NOREPLY_NUMBER']
def dummy_token_creator(user):
token = Token.objects.create(key="dummy", user=user)
return (token, True)
class OverrideTokenCreationTests(APITestCase):
def setUp(self):
super().setUp()
api_settings.PASSWORDLESS_AUTH_TOKEN_CREATOR = 'tests.test_authentication.dummy_token_creator'
api_settings.PASSWORDLESS_AUTH_TYPES = ['EMAIL']
api_settings.PASSWORDLESS_EMAIL_NOREPLY_ADDRESS = 'noreply@example.com'
self.email = 'aaron@example.com'
self.url = reverse('drfpasswordless:auth_email')
self.challenge_url = reverse('drfpasswordless:auth_token')
self.email_field_name = api_settings.PASSWORDLESS_USER_EMAIL_FIELD_NAME
self.user = User.objects.create(**{self.email_field_name: self.email})
def test_token_creation_gets_overridden(self):
"""Ensure that if we change the token creation function, the overridden one gets called"""
data = {'email': self.email}
response = self.client.post(self.url, data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
# Token sent to alias
callback_token = CallbackToken.objects.filter(user=self.user, is_active=True).first()
challenge_data = {'email': self.email, 'token': callback_token}
# Try to auth with the callback token
challenge_response = self.client.post(self.challenge_url, challenge_data)
self.assertEqual(challenge_response.status_code, status.HTTP_200_OK)
# Verify Auth Token
auth_token = challenge_response.data['token']
self.assertEqual(auth_token, Token.objects.filter(key=auth_token).first().key)
self.assertEqual('dummy', Token.objects.filter(key=auth_token).first().key)
def tearDown(self):
api_settings.PASSWORDLESS_AUTH_TOKEN_CREATOR = DEFAULTS['PASSWORDLESS_AUTH_TOKEN_CREATOR']
api_settings.PASSWORDLESS_AUTH_TYPES = DEFAULTS['PASSWORDLESS_AUTH_TYPES']
api_settings.PASSWORDLESS_EMAIL_NOREPLY_ADDRESS = DEFAULTS['PASSWORDLESS_EMAIL_NOREPLY_ADDRESS']
self.user.delete()
super().tearDown()
class MobileLoginCallbackTokenTests(APITestCase):
def setUp(self):
api_settings.PASSWORDLESS_TEST_SUPPRESSION = True
api_settings.PASSWORDLESS_AUTH_TYPES = ['MOBILE']
api_settings.PASSWORDLESS_MOBILE_NOREPLY_NUMBER = '+15550000000'
self.mobile = '+15551234567'
self.url = reverse('drfpasswordless:auth_mobile')
self.challenge_url = reverse('drfpasswordless:auth_token')
self.mobile_field_name = api_settings.PASSWORDLESS_USER_MOBILE_FIELD_NAME
self.user = User.objects.create(**{self.mobile_field_name: self.mobile})
def test_mobile_auth_failed(self):
data = {'mobile': self.mobile}
response = self.client.post(self.url, data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
# Token sent to alias
challenge_data = {'mobile': self.mobile, 'token': '123456'} # Send an arbitrary token instead
# Try to auth with the callback token
challenge_response = self.client.post(self.challenge_url, challenge_data)
self.assertEqual(challenge_response.status_code, status.HTTP_400_BAD_REQUEST)
def test_mobile_auth_expired(self):
data = {'mobile': self.mobile}
first_response = self.client.post(self.url, data)
self.assertEqual(first_response.status_code, status.HTTP_200_OK)
# Token sent to alias
first_callback_token = CallbackToken.objects.filter(user=self.user, is_active=True).first()
first_challenge_data = {'mobile': self.mobile, 'token': first_callback_token}
data = {'mobile': self.mobile}
second_response = self.client.post(self.url, data)
self.assertEqual(second_response.status_code, status.HTTP_200_OK)
# Second token sent to alias
second_callback_token = CallbackToken.objects.filter(user=self.user, is_active=True).first()
second_challenge_data = {'mobile': self.mobile, 'token': second_callback_token}
# Try to auth with the old callback token
challenge_response = self.client.post(self.challenge_url, first_challenge_data)
self.assertEqual(challenge_response.status_code, status.HTTP_400_BAD_REQUEST)
# Try to auth with the new callback token
second_challenge_response = self.client.post(self.challenge_url, second_challenge_data)
self.assertEqual(second_challenge_response.status_code, status.HTTP_200_OK)
# Verify Auth Token
auth_token = second_challenge_response.data['token']
self.assertEqual(auth_token, Token.objects.filter(key=auth_token).first().key)
def test_mobile_auth_success(self):
data = {'mobile': self.mobile}
response = self.client.post(self.url, data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
# Token sent to alias
callback_token = CallbackToken.objects.filter(user=self.user, is_active=True).first()
challenge_data = {'mobile': self.mobile, 'token': callback_token}
# Try to auth with the callback token
challenge_response = self.client.post(self.challenge_url, challenge_data)
self.assertEqual(challenge_response.status_code, status.HTTP_200_OK)
# Verify Auth Token
auth_token = challenge_response.data['token']
self.assertEqual(auth_token, Token.objects.filter(key=auth_token).first().key)
def tearDown(self):
api_settings.PASSWORDLESS_TEST_SUPPRESSION = DEFAULTS['PASSWORDLESS_TEST_SUPPRESSION']
api_settings.PASSWORDLESS_AUTH_TYPES = DEFAULTS['PASSWORDLESS_AUTH_TYPES']
api_settings.PASSWORDLESS_MOBILE_NOREPLY_NUMBER = DEFAULTS['PASSWORDLESS_MOBILE_NOREPLY_NUMBER']
self.user.delete()
|
{"/tests/test_verification.py": ["/drfpasswordless/settings.py", "/drfpasswordless/utils.py"], "/drfpasswordless/serializers.py": ["/drfpasswordless/models.py", "/drfpasswordless/settings.py", "/drfpasswordless/utils.py"], "/tests/test_authentication.py": ["/drfpasswordless/settings.py", "/drfpasswordless/utils.py"], "/tests/urls.py": ["/drfpasswordless/settings.py", "/drfpasswordless/views.py"], "/drfpasswordless/services.py": ["/drfpasswordless/settings.py", "/drfpasswordless/utils.py"], "/drfpasswordless/signals.py": ["/drfpasswordless/models.py", "/drfpasswordless/settings.py", "/drfpasswordless/services.py"], "/drfpasswordless/apps.py": ["/drfpasswordless/signals.py"], "/drfpasswordless/migrations/0004_auto_20200125_0853.py": ["/drfpasswordless/models.py"], "/drfpasswordless/utils.py": ["/drfpasswordless/models.py", "/drfpasswordless/settings.py"], "/drfpasswordless/admin.py": ["/drfpasswordless/models.py"], "/drfpasswordless/urls.py": ["/drfpasswordless/settings.py", "/drfpasswordless/views.py"], "/drfpasswordless/views.py": ["/drfpasswordless/models.py", "/drfpasswordless/settings.py", "/drfpasswordless/serializers.py", "/drfpasswordless/services.py"]}
|
3,657
|
aaronn/django-rest-framework-passwordless
|
refs/heads/master
|
/tests/urls.py
|
from django.urls import path, include
from rest_framework.urlpatterns import format_suffix_patterns
from drfpasswordless.settings import api_settings
from drfpasswordless.views import (ObtainEmailCallbackToken,
ObtainMobileCallbackToken,
ObtainAuthTokenFromCallbackToken,
VerifyAliasFromCallbackToken,
ObtainEmailVerificationCallbackToken,
ObtainMobileVerificationCallbackToken, )
app_name = 'drfpasswordless'
urlpatterns = [
path('', include('drfpasswordless.urls')),
]
format_suffix_patterns(urlpatterns)
|
{"/tests/test_verification.py": ["/drfpasswordless/settings.py", "/drfpasswordless/utils.py"], "/drfpasswordless/serializers.py": ["/drfpasswordless/models.py", "/drfpasswordless/settings.py", "/drfpasswordless/utils.py"], "/tests/test_authentication.py": ["/drfpasswordless/settings.py", "/drfpasswordless/utils.py"], "/tests/urls.py": ["/drfpasswordless/settings.py", "/drfpasswordless/views.py"], "/drfpasswordless/services.py": ["/drfpasswordless/settings.py", "/drfpasswordless/utils.py"], "/drfpasswordless/signals.py": ["/drfpasswordless/models.py", "/drfpasswordless/settings.py", "/drfpasswordless/services.py"], "/drfpasswordless/apps.py": ["/drfpasswordless/signals.py"], "/drfpasswordless/migrations/0004_auto_20200125_0853.py": ["/drfpasswordless/models.py"], "/drfpasswordless/utils.py": ["/drfpasswordless/models.py", "/drfpasswordless/settings.py"], "/drfpasswordless/admin.py": ["/drfpasswordless/models.py"], "/drfpasswordless/urls.py": ["/drfpasswordless/settings.py", "/drfpasswordless/views.py"], "/drfpasswordless/views.py": ["/drfpasswordless/models.py", "/drfpasswordless/settings.py", "/drfpasswordless/serializers.py", "/drfpasswordless/services.py"]}
|
3,658
|
aaronn/django-rest-framework-passwordless
|
refs/heads/master
|
/drfpasswordless/__init__.py
|
# -*- coding: utf-8 -*-
__title__ = 'drfpasswordless'
__version__ = '1.5.8'
__author__ = 'Aaron Ng'
__license__ = 'MIT'
__copyright__ = 'Copyright 2022 Aaron Ng'
# Version synonym
VERSION = __version__
default_app_config = 'drfpasswordless.apps.DrfpasswordlessConfig'
|
{"/tests/test_verification.py": ["/drfpasswordless/settings.py", "/drfpasswordless/utils.py"], "/drfpasswordless/serializers.py": ["/drfpasswordless/models.py", "/drfpasswordless/settings.py", "/drfpasswordless/utils.py"], "/tests/test_authentication.py": ["/drfpasswordless/settings.py", "/drfpasswordless/utils.py"], "/tests/urls.py": ["/drfpasswordless/settings.py", "/drfpasswordless/views.py"], "/drfpasswordless/services.py": ["/drfpasswordless/settings.py", "/drfpasswordless/utils.py"], "/drfpasswordless/signals.py": ["/drfpasswordless/models.py", "/drfpasswordless/settings.py", "/drfpasswordless/services.py"], "/drfpasswordless/apps.py": ["/drfpasswordless/signals.py"], "/drfpasswordless/migrations/0004_auto_20200125_0853.py": ["/drfpasswordless/models.py"], "/drfpasswordless/utils.py": ["/drfpasswordless/models.py", "/drfpasswordless/settings.py"], "/drfpasswordless/admin.py": ["/drfpasswordless/models.py"], "/drfpasswordless/urls.py": ["/drfpasswordless/settings.py", "/drfpasswordless/views.py"], "/drfpasswordless/views.py": ["/drfpasswordless/models.py", "/drfpasswordless/settings.py", "/drfpasswordless/serializers.py", "/drfpasswordless/services.py"]}
|
3,659
|
aaronn/django-rest-framework-passwordless
|
refs/heads/master
|
/drfpasswordless/services.py
|
from django.utils.module_loading import import_string
from drfpasswordless.settings import api_settings
from drfpasswordless.utils import (
create_callback_token_for_user,
)
class TokenService(object):
@staticmethod
def send_token(user, alias_type, token_type, **message_payload):
token = create_callback_token_for_user(user, alias_type, token_type)
send_action = None
if user.pk in api_settings.PASSWORDLESS_DEMO_USERS.keys():
return True
if alias_type == 'email':
send_action = import_string(api_settings.PASSWORDLESS_EMAIL_CALLBACK)
elif alias_type == 'mobile':
send_action = import_string(api_settings.PASSWORDLESS_SMS_CALLBACK)
# Send to alias
success = send_action(user, token, **message_payload)
return success
|
{"/tests/test_verification.py": ["/drfpasswordless/settings.py", "/drfpasswordless/utils.py"], "/drfpasswordless/serializers.py": ["/drfpasswordless/models.py", "/drfpasswordless/settings.py", "/drfpasswordless/utils.py"], "/tests/test_authentication.py": ["/drfpasswordless/settings.py", "/drfpasswordless/utils.py"], "/tests/urls.py": ["/drfpasswordless/settings.py", "/drfpasswordless/views.py"], "/drfpasswordless/services.py": ["/drfpasswordless/settings.py", "/drfpasswordless/utils.py"], "/drfpasswordless/signals.py": ["/drfpasswordless/models.py", "/drfpasswordless/settings.py", "/drfpasswordless/services.py"], "/drfpasswordless/apps.py": ["/drfpasswordless/signals.py"], "/drfpasswordless/migrations/0004_auto_20200125_0853.py": ["/drfpasswordless/models.py"], "/drfpasswordless/utils.py": ["/drfpasswordless/models.py", "/drfpasswordless/settings.py"], "/drfpasswordless/admin.py": ["/drfpasswordless/models.py"], "/drfpasswordless/urls.py": ["/drfpasswordless/settings.py", "/drfpasswordless/views.py"], "/drfpasswordless/views.py": ["/drfpasswordless/models.py", "/drfpasswordless/settings.py", "/drfpasswordless/serializers.py", "/drfpasswordless/services.py"]}
|
3,660
|
aaronn/django-rest-framework-passwordless
|
refs/heads/master
|
/drfpasswordless/signals.py
|
import logging
from django.contrib.auth import get_user_model
from django.core.exceptions import ValidationError
from django.dispatch import receiver
from django.db.models import signals
from drfpasswordless.models import CallbackToken
from drfpasswordless.models import generate_numeric_token
from drfpasswordless.settings import api_settings
from drfpasswordless.services import TokenService
logger = logging.getLogger(__name__)
@receiver(signals.post_save, sender=CallbackToken)
def invalidate_previous_tokens(sender, instance, created, **kwargs):
"""
Invalidates all previously issued tokens of that type when a new one is created, used, or anything like that.
"""
if instance.user.pk in api_settings.PASSWORDLESS_DEMO_USERS.keys():
return
if isinstance(instance, CallbackToken):
CallbackToken.objects.active().filter(user=instance.user, type=instance.type).exclude(id=instance.id).update(is_active=False)
@receiver(signals.pre_save, sender=CallbackToken)
def check_unique_tokens(sender, instance, **kwargs):
"""
Ensures that mobile and email tokens are unique or tries once more to generate.
Note that here we've decided keys are unique even across auth and validation.
We could consider relaxing this in the future as well by filtering on the instance.type.
"""
if instance._state.adding:
# save is called on a token to create it in the db
# before creating check whether a token with the same key exists
if isinstance(instance, CallbackToken):
unique = False
tries = 0
if CallbackToken.objects.filter(key=instance.key, is_active=True).exists():
# Try N(default=3) times before giving up.
while tries < api_settings.PASSWORDLESS_TOKEN_GENERATION_ATTEMPTS:
tries = tries + 1
new_key = generate_numeric_token()
instance.key = new_key
if not CallbackToken.objects.filter(key=instance.key, is_active=True).exists():
# Leave the loop if we found a valid token that doesn't exist yet.
unique = True
break
if not unique:
# A unique value wasn't found after three tries
raise ValidationError("Couldn't create a unique token even after retrying.")
else:
# A unique value was found immediately.
pass
else:
# save is called on an already existing token to update it. Such as invalidating it.
# in that case there is no need to check for the key. This way we both avoid an unneccessary db hit
# and avoid to change key field of used tokens.
pass
User = get_user_model()
@receiver(signals.pre_save, sender=User)
def update_alias_verification(sender, instance, **kwargs):
"""
Flags a user's email as unverified if they change it.
Optionally sends a verification token to the new endpoint.
"""
if isinstance(instance, User):
if instance.id:
if api_settings.PASSWORDLESS_USER_MARK_EMAIL_VERIFIED is True:
"""
For marking email aliases as not verified when a user changes it.
"""
email_field = api_settings.PASSWORDLESS_USER_EMAIL_FIELD_NAME
email_verified_field = api_settings.PASSWORDLESS_USER_EMAIL_VERIFIED_FIELD_NAME
# Verify that this is an existing instance and not a new one.
try:
user_old = User.objects.get(id=instance.id) # Pre-save object
instance_email = getattr(instance, email_field) # Incoming Email
old_email = getattr(user_old, email_field) # Pre-save object email
if instance_email != old_email and instance_email != "" and instance_email is not None:
# Email changed, verification should be flagged
setattr(instance, email_verified_field, False)
if api_settings.PASSWORDLESS_AUTO_SEND_VERIFICATION_TOKEN is True:
email_subject = api_settings.PASSWORDLESS_EMAIL_VERIFICATION_SUBJECT
email_plaintext = api_settings.PASSWORDLESS_EMAIL_VERIFICATION_PLAINTEXT_MESSAGE
email_html = api_settings.PASSWORDLESS_EMAIL_VERIFICATION_TOKEN_HTML_TEMPLATE_NAME
message_payload = {'email_subject': email_subject,
'email_plaintext': email_plaintext,
'email_html': email_html}
success = TokenService.send_token(instance, 'email', CallbackToken.TOKEN_TYPE_VERIFY, **message_payload)
if success:
logger.info('drfpasswordless: Successfully sent email on updated address: %s'
% instance_email)
else:
logger.info('drfpasswordless: Failed to send email to updated address: %s'
% instance_email)
except User.DoesNotExist:
# User probably is just initially being created
return
if api_settings.PASSWORDLESS_USER_MARK_MOBILE_VERIFIED is True:
"""
For marking mobile aliases as not verified when a user changes it.
"""
mobile_field = api_settings.PASSWORDLESS_USER_MOBILE_FIELD_NAME
mobile_verified_field = api_settings.PASSWORDLESS_USER_MOBILE_VERIFIED_FIELD_NAME
# Verify that this is an existing instance and not a new one.
try:
user_old = User.objects.get(id=instance.id) # Pre-save object
instance_mobile = getattr(instance, mobile_field) # Incoming mobile
old_mobile = getattr(user_old, mobile_field) # Pre-save object mobile
if instance_mobile != old_mobile and instance_mobile != "" and instance_mobile is not None:
# Mobile changed, verification should be flagged
setattr(instance, mobile_verified_field, False)
if api_settings.PASSWORDLESS_AUTO_SEND_VERIFICATION_TOKEN is True:
mobile_message = api_settings.PASSWORDLESS_MOBILE_MESSAGE
message_payload = {'mobile_message': mobile_message}
success = TokenService.send_token(instance, 'mobile', CallbackToken.TOKEN_TYPE_VERIFY, **message_payload)
if success:
logger.info('drfpasswordless: Successfully sent SMS on updated mobile: %s'
% instance_mobile)
else:
logger.info('drfpasswordless: Failed to send SMS to updated mobile: %s'
% instance_mobile)
except User.DoesNotExist:
# User probably is just initially being created
pass
|
{"/tests/test_verification.py": ["/drfpasswordless/settings.py", "/drfpasswordless/utils.py"], "/drfpasswordless/serializers.py": ["/drfpasswordless/models.py", "/drfpasswordless/settings.py", "/drfpasswordless/utils.py"], "/tests/test_authentication.py": ["/drfpasswordless/settings.py", "/drfpasswordless/utils.py"], "/tests/urls.py": ["/drfpasswordless/settings.py", "/drfpasswordless/views.py"], "/drfpasswordless/services.py": ["/drfpasswordless/settings.py", "/drfpasswordless/utils.py"], "/drfpasswordless/signals.py": ["/drfpasswordless/models.py", "/drfpasswordless/settings.py", "/drfpasswordless/services.py"], "/drfpasswordless/apps.py": ["/drfpasswordless/signals.py"], "/drfpasswordless/migrations/0004_auto_20200125_0853.py": ["/drfpasswordless/models.py"], "/drfpasswordless/utils.py": ["/drfpasswordless/models.py", "/drfpasswordless/settings.py"], "/drfpasswordless/admin.py": ["/drfpasswordless/models.py"], "/drfpasswordless/urls.py": ["/drfpasswordless/settings.py", "/drfpasswordless/views.py"], "/drfpasswordless/views.py": ["/drfpasswordless/models.py", "/drfpasswordless/settings.py", "/drfpasswordless/serializers.py", "/drfpasswordless/services.py"]}
|
3,661
|
aaronn/django-rest-framework-passwordless
|
refs/heads/master
|
/drfpasswordless/apps.py
|
from django.apps import AppConfig
from django.utils.translation import gettext_lazy as _
class DrfpasswordlessConfig(AppConfig):
name = 'drfpasswordless'
verbose = _("DRF Passwordless")
def ready(self):
import drfpasswordless.signals
|
{"/tests/test_verification.py": ["/drfpasswordless/settings.py", "/drfpasswordless/utils.py"], "/drfpasswordless/serializers.py": ["/drfpasswordless/models.py", "/drfpasswordless/settings.py", "/drfpasswordless/utils.py"], "/tests/test_authentication.py": ["/drfpasswordless/settings.py", "/drfpasswordless/utils.py"], "/tests/urls.py": ["/drfpasswordless/settings.py", "/drfpasswordless/views.py"], "/drfpasswordless/services.py": ["/drfpasswordless/settings.py", "/drfpasswordless/utils.py"], "/drfpasswordless/signals.py": ["/drfpasswordless/models.py", "/drfpasswordless/settings.py", "/drfpasswordless/services.py"], "/drfpasswordless/apps.py": ["/drfpasswordless/signals.py"], "/drfpasswordless/migrations/0004_auto_20200125_0853.py": ["/drfpasswordless/models.py"], "/drfpasswordless/utils.py": ["/drfpasswordless/models.py", "/drfpasswordless/settings.py"], "/drfpasswordless/admin.py": ["/drfpasswordless/models.py"], "/drfpasswordless/urls.py": ["/drfpasswordless/settings.py", "/drfpasswordless/views.py"], "/drfpasswordless/views.py": ["/drfpasswordless/models.py", "/drfpasswordless/settings.py", "/drfpasswordless/serializers.py", "/drfpasswordless/services.py"]}
|
3,662
|
aaronn/django-rest-framework-passwordless
|
refs/heads/master
|
/drfpasswordless/migrations/0004_auto_20200125_0853.py
|
# Generated by Django 3.0.2 on 2020-01-25 08:53
from django.db import migrations, models
import drfpasswordless.models
class Migration(migrations.Migration):
dependencies = [
('drfpasswordless', '0003_callbacktoken_type'),
]
operations = [
migrations.AlterField(
model_name='callbacktoken',
name='key',
field=models.CharField(default=drfpasswordless.models.generate_numeric_token, max_length=6),
),
migrations.AlterUniqueTogether(
name='callbacktoken',
unique_together={('is_active', 'key', 'type')},
),
]
|
{"/tests/test_verification.py": ["/drfpasswordless/settings.py", "/drfpasswordless/utils.py"], "/drfpasswordless/serializers.py": ["/drfpasswordless/models.py", "/drfpasswordless/settings.py", "/drfpasswordless/utils.py"], "/tests/test_authentication.py": ["/drfpasswordless/settings.py", "/drfpasswordless/utils.py"], "/tests/urls.py": ["/drfpasswordless/settings.py", "/drfpasswordless/views.py"], "/drfpasswordless/services.py": ["/drfpasswordless/settings.py", "/drfpasswordless/utils.py"], "/drfpasswordless/signals.py": ["/drfpasswordless/models.py", "/drfpasswordless/settings.py", "/drfpasswordless/services.py"], "/drfpasswordless/apps.py": ["/drfpasswordless/signals.py"], "/drfpasswordless/migrations/0004_auto_20200125_0853.py": ["/drfpasswordless/models.py"], "/drfpasswordless/utils.py": ["/drfpasswordless/models.py", "/drfpasswordless/settings.py"], "/drfpasswordless/admin.py": ["/drfpasswordless/models.py"], "/drfpasswordless/urls.py": ["/drfpasswordless/settings.py", "/drfpasswordless/views.py"], "/drfpasswordless/views.py": ["/drfpasswordless/models.py", "/drfpasswordless/settings.py", "/drfpasswordless/serializers.py", "/drfpasswordless/services.py"]}
|
3,663
|
aaronn/django-rest-framework-passwordless
|
refs/heads/master
|
/drfpasswordless/utils.py
|
import logging
import os
from django.contrib.auth import get_user_model
from django.core.exceptions import PermissionDenied
from django.core.mail import send_mail
from django.template import loader
from django.utils import timezone
from rest_framework.authtoken.models import Token
from drfpasswordless.models import CallbackToken
from drfpasswordless.settings import api_settings
logger = logging.getLogger(__name__)
User = get_user_model()
def authenticate_by_token(callback_token):
try:
token = CallbackToken.objects.get(key=callback_token, is_active=True, type=CallbackToken.TOKEN_TYPE_AUTH)
# Returning a user designates a successful authentication.
token.user = User.objects.get(pk=token.user.pk)
token.is_active = False # Mark this token as used.
token.save()
return token.user
except CallbackToken.DoesNotExist:
logger.debug("drfpasswordless: Challenged with a callback token that doesn't exist.")
except User.DoesNotExist:
logger.debug("drfpasswordless: Authenticated user somehow doesn't exist.")
except PermissionDenied:
logger.debug("drfpasswordless: Permission denied while authenticating.")
return None
def create_callback_token_for_user(user, alias_type, token_type):
token = None
alias_type_u = alias_type.upper()
to_alias_field = getattr(api_settings, f'PASSWORDLESS_USER_{alias_type_u}_FIELD_NAME')
if user.pk in api_settings.PASSWORDLESS_DEMO_USERS.keys():
token = CallbackToken.objects.filter(user=user).first()
if token:
return token
else:
return CallbackToken.objects.create(
user=user,
key=api_settings.PASSWORDLESS_DEMO_USERS[user.pk],
to_alias_type=alias_type_u,
to_alias=getattr(user, to_alias_field),
type=token_type
)
token = CallbackToken.objects.create(user=user,
to_alias_type=alias_type_u,
to_alias=getattr(user, to_alias_field),
type=token_type)
if token is not None:
return token
return None
def validate_token_age(callback_token):
"""
Returns True if a given token is within the age expiration limit.
"""
try:
token = CallbackToken.objects.get(key=callback_token, is_active=True)
seconds = (timezone.now() - token.created_at).total_seconds()
token_expiry_time = api_settings.PASSWORDLESS_TOKEN_EXPIRE_TIME
if token.user.pk in api_settings.PASSWORDLESS_DEMO_USERS.keys():
return True
if seconds <= token_expiry_time:
return True
else:
# Invalidate our token.
token.is_active = False
token.save()
return False
except CallbackToken.DoesNotExist:
# No valid token.
return False
def verify_user_alias(user, token):
"""
Marks a user's contact point as verified depending on accepted token type.
"""
if token.to_alias_type == 'EMAIL':
if token.to_alias == getattr(user, api_settings.PASSWORDLESS_USER_EMAIL_FIELD_NAME):
setattr(user, api_settings.PASSWORDLESS_USER_EMAIL_VERIFIED_FIELD_NAME, True)
elif token.to_alias_type == 'MOBILE':
if token.to_alias == getattr(user, api_settings.PASSWORDLESS_USER_MOBILE_FIELD_NAME):
setattr(user, api_settings.PASSWORDLESS_USER_MOBILE_VERIFIED_FIELD_NAME, True)
else:
return False
user.save()
return True
def inject_template_context(context):
"""
Injects additional context into email template.
"""
for processor in api_settings.PASSWORDLESS_CONTEXT_PROCESSORS:
context.update(processor())
return context
def send_email_with_callback_token(user, email_token, **kwargs):
"""
Sends a Email to user.email.
Passes silently without sending in test environment
"""
try:
if api_settings.PASSWORDLESS_EMAIL_NOREPLY_ADDRESS:
# Make sure we have a sending address before sending.
# Get email subject and message
email_subject = kwargs.get('email_subject',
api_settings.PASSWORDLESS_EMAIL_SUBJECT)
email_plaintext = kwargs.get('email_plaintext',
api_settings.PASSWORDLESS_EMAIL_PLAINTEXT_MESSAGE)
email_html = kwargs.get('email_html',
api_settings.PASSWORDLESS_EMAIL_TOKEN_HTML_TEMPLATE_NAME)
# Inject context if user specifies.
context = inject_template_context({'callback_token': email_token.key, })
html_message = loader.render_to_string(email_html, context,)
send_mail(
email_subject,
email_plaintext % email_token.key,
api_settings.PASSWORDLESS_EMAIL_NOREPLY_ADDRESS,
[getattr(user, api_settings.PASSWORDLESS_USER_EMAIL_FIELD_NAME)],
fail_silently=False,
html_message=html_message,)
else:
logger.debug("Failed to send token email. Missing PASSWORDLESS_EMAIL_NOREPLY_ADDRESS.")
return False
return True
except Exception as e:
logger.debug("Failed to send token email to user: %d."
"Possibly no email on user object. Email entered was %s" %
(user.id, getattr(user, api_settings.PASSWORDLESS_USER_EMAIL_FIELD_NAME)))
logger.debug(e)
return False
def send_sms_with_callback_token(user, mobile_token, **kwargs):
"""
Sends a SMS to user.mobile via Twilio.
Passes silently without sending in test environment.
"""
if api_settings.PASSWORDLESS_TEST_SUPPRESSION is True:
# we assume success to prevent spamming SMS during testing.
# even if you have suppression on– you must provide a number if you have mobile selected.
if api_settings.PASSWORDLESS_MOBILE_NOREPLY_NUMBER is None:
return False
return True
base_string = kwargs.get('mobile_message', api_settings.PASSWORDLESS_MOBILE_MESSAGE)
try:
if api_settings.PASSWORDLESS_MOBILE_NOREPLY_NUMBER:
# We need a sending number to send properly
from twilio.rest import Client
twilio_client = Client(os.environ['TWILIO_ACCOUNT_SID'], os.environ['TWILIO_AUTH_TOKEN'])
to_number = getattr(user, api_settings.PASSWORDLESS_USER_MOBILE_FIELD_NAME)
if to_number.__class__.__name__ == 'PhoneNumber':
to_number = to_number.__str__()
twilio_client.messages.create(
body=base_string % mobile_token.key,
to=to_number,
from_=api_settings.PASSWORDLESS_MOBILE_NOREPLY_NUMBER
)
return True
else:
logger.debug("Failed to send token sms. Missing PASSWORDLESS_MOBILE_NOREPLY_NUMBER.")
return False
except ImportError:
logger.debug("Couldn't import Twilio client. Is twilio installed?")
return False
except KeyError:
logger.debug("Couldn't send SMS."
"Did you set your Twilio account tokens and specify a PASSWORDLESS_MOBILE_NOREPLY_NUMBER?")
except Exception as e:
logger.debug("Failed to send token SMS to user: {}. "
"Possibly no mobile number on user object or the twilio package isn't set up yet. "
"Number entered was {}".format(user.id, getattr(user, api_settings.PASSWORDLESS_USER_MOBILE_FIELD_NAME)))
logger.debug(e)
return False
def create_authentication_token(user):
""" Default way to create an authentication token"""
return Token.objects.get_or_create(user=user)
|
{"/tests/test_verification.py": ["/drfpasswordless/settings.py", "/drfpasswordless/utils.py"], "/drfpasswordless/serializers.py": ["/drfpasswordless/models.py", "/drfpasswordless/settings.py", "/drfpasswordless/utils.py"], "/tests/test_authentication.py": ["/drfpasswordless/settings.py", "/drfpasswordless/utils.py"], "/tests/urls.py": ["/drfpasswordless/settings.py", "/drfpasswordless/views.py"], "/drfpasswordless/services.py": ["/drfpasswordless/settings.py", "/drfpasswordless/utils.py"], "/drfpasswordless/signals.py": ["/drfpasswordless/models.py", "/drfpasswordless/settings.py", "/drfpasswordless/services.py"], "/drfpasswordless/apps.py": ["/drfpasswordless/signals.py"], "/drfpasswordless/migrations/0004_auto_20200125_0853.py": ["/drfpasswordless/models.py"], "/drfpasswordless/utils.py": ["/drfpasswordless/models.py", "/drfpasswordless/settings.py"], "/drfpasswordless/admin.py": ["/drfpasswordless/models.py"], "/drfpasswordless/urls.py": ["/drfpasswordless/settings.py", "/drfpasswordless/views.py"], "/drfpasswordless/views.py": ["/drfpasswordless/models.py", "/drfpasswordless/settings.py", "/drfpasswordless/serializers.py", "/drfpasswordless/services.py"]}
|
3,664
|
aaronn/django-rest-framework-passwordless
|
refs/heads/master
|
/drfpasswordless/admin.py
|
from django.contrib import admin
from django.urls import reverse
from drfpasswordless.models import CallbackToken
class UserLinkMixin(object):
"""
A mixin to add a linkable list_display user field.
"""
LINK_TO_USER_FIELD = 'link_to_user'
def link_to_user(self, obj):
link = reverse('admin:users_user_change', args=[obj.user.id])
return u'<a href={}>{}</a>'.format(link, obj.user.username)
link_to_user.allow_tags = True
link_to_user.short_description = 'User'
class AbstractCallbackTokenInline(admin.StackedInline):
max_num = 0
extra = 0
readonly_fields = ('created_at', 'key', 'type', 'is_active')
fields = ('created_at', 'user', 'key', 'type', 'is_active')
class CallbackInline(AbstractCallbackTokenInline):
model = CallbackToken
class AbstractCallbackTokenAdmin(UserLinkMixin, admin.ModelAdmin):
readonly_fields = ('created_at', 'user', 'key', 'type', 'to_alias_type')
list_display = ('created_at', UserLinkMixin.LINK_TO_USER_FIELD, 'key', 'type', 'is_active', 'to_alias_type')
fields = ('created_at', 'user', 'key', 'type', 'is_active', 'to_alias_type')
extra = 0
|
{"/tests/test_verification.py": ["/drfpasswordless/settings.py", "/drfpasswordless/utils.py"], "/drfpasswordless/serializers.py": ["/drfpasswordless/models.py", "/drfpasswordless/settings.py", "/drfpasswordless/utils.py"], "/tests/test_authentication.py": ["/drfpasswordless/settings.py", "/drfpasswordless/utils.py"], "/tests/urls.py": ["/drfpasswordless/settings.py", "/drfpasswordless/views.py"], "/drfpasswordless/services.py": ["/drfpasswordless/settings.py", "/drfpasswordless/utils.py"], "/drfpasswordless/signals.py": ["/drfpasswordless/models.py", "/drfpasswordless/settings.py", "/drfpasswordless/services.py"], "/drfpasswordless/apps.py": ["/drfpasswordless/signals.py"], "/drfpasswordless/migrations/0004_auto_20200125_0853.py": ["/drfpasswordless/models.py"], "/drfpasswordless/utils.py": ["/drfpasswordless/models.py", "/drfpasswordless/settings.py"], "/drfpasswordless/admin.py": ["/drfpasswordless/models.py"], "/drfpasswordless/urls.py": ["/drfpasswordless/settings.py", "/drfpasswordless/views.py"], "/drfpasswordless/views.py": ["/drfpasswordless/models.py", "/drfpasswordless/settings.py", "/drfpasswordless/serializers.py", "/drfpasswordless/services.py"]}
|
3,665
|
aaronn/django-rest-framework-passwordless
|
refs/heads/master
|
/drfpasswordless/urls.py
|
from drfpasswordless.settings import api_settings
from django.urls import path
from drfpasswordless.views import (
ObtainEmailCallbackToken,
ObtainMobileCallbackToken,
ObtainAuthTokenFromCallbackToken,
VerifyAliasFromCallbackToken,
ObtainEmailVerificationCallbackToken,
ObtainMobileVerificationCallbackToken,
)
app_name = 'drfpasswordless'
urlpatterns = [
path(api_settings.PASSWORDLESS_AUTH_PREFIX + 'email/', ObtainEmailCallbackToken.as_view(), name='auth_email'),
path(api_settings.PASSWORDLESS_AUTH_PREFIX + 'mobile/', ObtainMobileCallbackToken.as_view(), name='auth_mobile'),
path(api_settings.PASSWORDLESS_AUTH_PREFIX + 'token/', ObtainAuthTokenFromCallbackToken.as_view(), name='auth_token'),
path(api_settings.PASSWORDLESS_VERIFY_PREFIX + 'email/', ObtainEmailVerificationCallbackToken.as_view(), name='verify_email'),
path(api_settings.PASSWORDLESS_VERIFY_PREFIX + 'mobile/', ObtainMobileVerificationCallbackToken.as_view(), name='verify_mobile'),
path(api_settings.PASSWORDLESS_VERIFY_PREFIX, VerifyAliasFromCallbackToken.as_view(), name='verify_token'),
]
|
{"/tests/test_verification.py": ["/drfpasswordless/settings.py", "/drfpasswordless/utils.py"], "/drfpasswordless/serializers.py": ["/drfpasswordless/models.py", "/drfpasswordless/settings.py", "/drfpasswordless/utils.py"], "/tests/test_authentication.py": ["/drfpasswordless/settings.py", "/drfpasswordless/utils.py"], "/tests/urls.py": ["/drfpasswordless/settings.py", "/drfpasswordless/views.py"], "/drfpasswordless/services.py": ["/drfpasswordless/settings.py", "/drfpasswordless/utils.py"], "/drfpasswordless/signals.py": ["/drfpasswordless/models.py", "/drfpasswordless/settings.py", "/drfpasswordless/services.py"], "/drfpasswordless/apps.py": ["/drfpasswordless/signals.py"], "/drfpasswordless/migrations/0004_auto_20200125_0853.py": ["/drfpasswordless/models.py"], "/drfpasswordless/utils.py": ["/drfpasswordless/models.py", "/drfpasswordless/settings.py"], "/drfpasswordless/admin.py": ["/drfpasswordless/models.py"], "/drfpasswordless/urls.py": ["/drfpasswordless/settings.py", "/drfpasswordless/views.py"], "/drfpasswordless/views.py": ["/drfpasswordless/models.py", "/drfpasswordless/settings.py", "/drfpasswordless/serializers.py", "/drfpasswordless/services.py"]}
|
3,666
|
aaronn/django-rest-framework-passwordless
|
refs/heads/master
|
/drfpasswordless/views.py
|
import logging
from django.utils.module_loading import import_string
from rest_framework import parsers, renderers, status
from rest_framework.response import Response
from rest_framework.permissions import AllowAny, IsAuthenticated
from rest_framework.views import APIView
from drfpasswordless.models import CallbackToken
from drfpasswordless.settings import api_settings
from drfpasswordless.serializers import (
EmailAuthSerializer,
MobileAuthSerializer,
CallbackTokenAuthSerializer,
CallbackTokenVerificationSerializer,
EmailVerificationSerializer,
MobileVerificationSerializer,
)
from drfpasswordless.services import TokenService
logger = logging.getLogger(__name__)
class AbstractBaseObtainCallbackToken(APIView):
"""
This returns a 6-digit callback token we can trade for a user's Auth Token.
"""
success_response = "A login token has been sent to you."
failure_response = "Unable to send you a login code. Try again later."
message_payload = {}
@property
def serializer_class(self):
# Our serializer depending on type
raise NotImplementedError
@property
def alias_type(self):
# Alias Type
raise NotImplementedError
@property
def token_type(self):
# Token Type
raise NotImplementedError
def post(self, request, *args, **kwargs):
if self.alias_type.upper() not in api_settings.PASSWORDLESS_AUTH_TYPES:
# Only allow auth types allowed in settings.
return Response(status=status.HTTP_404_NOT_FOUND)
serializer = self.serializer_class(data=request.data, context={'request': request})
if serializer.is_valid(raise_exception=True):
# Validate -
user = serializer.validated_data['user']
# Create and send callback token
success = TokenService.send_token(user, self.alias_type, self.token_type, **self.message_payload)
# Respond With Success Or Failure of Sent
if success:
status_code = status.HTTP_200_OK
response_detail = self.success_response
else:
status_code = status.HTTP_400_BAD_REQUEST
response_detail = self.failure_response
return Response({'detail': response_detail}, status=status_code)
else:
return Response(serializer.error_messages, status=status.HTTP_400_BAD_REQUEST)
class ObtainEmailCallbackToken(AbstractBaseObtainCallbackToken):
permission_classes = (AllowAny,)
serializer_class = EmailAuthSerializer
success_response = "A login token has been sent to your email."
failure_response = "Unable to email you a login code. Try again later."
alias_type = 'email'
token_type = CallbackToken.TOKEN_TYPE_AUTH
email_subject = api_settings.PASSWORDLESS_EMAIL_SUBJECT
email_plaintext = api_settings.PASSWORDLESS_EMAIL_PLAINTEXT_MESSAGE
email_html = api_settings.PASSWORDLESS_EMAIL_TOKEN_HTML_TEMPLATE_NAME
message_payload = {'email_subject': email_subject,
'email_plaintext': email_plaintext,
'email_html': email_html}
class ObtainMobileCallbackToken(AbstractBaseObtainCallbackToken):
permission_classes = (AllowAny,)
serializer_class = MobileAuthSerializer
success_response = "We texted you a login code."
failure_response = "Unable to send you a login code. Try again later."
alias_type = 'mobile'
token_type = CallbackToken.TOKEN_TYPE_AUTH
mobile_message = api_settings.PASSWORDLESS_MOBILE_MESSAGE
message_payload = {'mobile_message': mobile_message}
class ObtainEmailVerificationCallbackToken(AbstractBaseObtainCallbackToken):
permission_classes = (IsAuthenticated,)
serializer_class = EmailVerificationSerializer
success_response = "A verification token has been sent to your email."
failure_response = "Unable to email you a verification code. Try again later."
alias_type = 'email'
token_type = CallbackToken.TOKEN_TYPE_VERIFY
email_subject = api_settings.PASSWORDLESS_EMAIL_VERIFICATION_SUBJECT
email_plaintext = api_settings.PASSWORDLESS_EMAIL_VERIFICATION_PLAINTEXT_MESSAGE
email_html = api_settings.PASSWORDLESS_EMAIL_VERIFICATION_TOKEN_HTML_TEMPLATE_NAME
message_payload = {
'email_subject': email_subject,
'email_plaintext': email_plaintext,
'email_html': email_html
}
class ObtainMobileVerificationCallbackToken(AbstractBaseObtainCallbackToken):
permission_classes = (IsAuthenticated,)
serializer_class = MobileVerificationSerializer
success_response = "We texted you a verification code."
failure_response = "Unable to send you a verification code. Try again later."
alias_type = 'mobile'
token_type = CallbackToken.TOKEN_TYPE_VERIFY
mobile_message = api_settings.PASSWORDLESS_MOBILE_MESSAGE
message_payload = {'mobile_message': mobile_message}
class AbstractBaseObtainAuthToken(APIView):
"""
This is a duplicate of rest_framework's own ObtainAuthToken method.
Instead, this returns an Auth Token based on our 6 digit callback token and source.
"""
serializer_class = None
def post(self, request, *args, **kwargs):
serializer = self.serializer_class(data=request.data)
if serializer.is_valid(raise_exception=True):
user = serializer.validated_data['user']
token_creator = import_string(api_settings.PASSWORDLESS_AUTH_TOKEN_CREATOR)
(token, _) = token_creator(user)
if token:
TokenSerializer = import_string(api_settings.PASSWORDLESS_AUTH_TOKEN_SERIALIZER)
token_serializer = TokenSerializer(data=token.__dict__, partial=True)
if token_serializer.is_valid():
# Return our key for consumption.
return Response(token_serializer.data, status=status.HTTP_200_OK)
else:
logger.error("Couldn't log in unknown user. Errors on serializer: {}".format(serializer.error_messages))
return Response({'detail': 'Couldn\'t log you in. Try again later.'}, status=status.HTTP_400_BAD_REQUEST)
class ObtainAuthTokenFromCallbackToken(AbstractBaseObtainAuthToken):
"""
This is a duplicate of rest_framework's own ObtainAuthToken method.
Instead, this returns an Auth Token based on our callback token and source.
"""
permission_classes = (AllowAny,)
serializer_class = CallbackTokenAuthSerializer
class VerifyAliasFromCallbackToken(APIView):
"""
This verifies an alias on correct callback token entry using the same logic as auth.
Should be refactored at some point.
"""
serializer_class = CallbackTokenVerificationSerializer
def post(self, request, *args, **kwargs):
serializer = self.serializer_class(data=request.data, context={'user_id': self.request.user.id})
if serializer.is_valid(raise_exception=True):
return Response({'detail': 'Alias verified.'}, status=status.HTTP_200_OK)
else:
logger.error("Couldn't verify unknown user. Errors on serializer: {}".format(serializer.error_messages))
return Response({'detail': 'We couldn\'t verify this alias. Try again later.'}, status.HTTP_400_BAD_REQUEST)
|
{"/tests/test_verification.py": ["/drfpasswordless/settings.py", "/drfpasswordless/utils.py"], "/drfpasswordless/serializers.py": ["/drfpasswordless/models.py", "/drfpasswordless/settings.py", "/drfpasswordless/utils.py"], "/tests/test_authentication.py": ["/drfpasswordless/settings.py", "/drfpasswordless/utils.py"], "/tests/urls.py": ["/drfpasswordless/settings.py", "/drfpasswordless/views.py"], "/drfpasswordless/services.py": ["/drfpasswordless/settings.py", "/drfpasswordless/utils.py"], "/drfpasswordless/signals.py": ["/drfpasswordless/models.py", "/drfpasswordless/settings.py", "/drfpasswordless/services.py"], "/drfpasswordless/apps.py": ["/drfpasswordless/signals.py"], "/drfpasswordless/migrations/0004_auto_20200125_0853.py": ["/drfpasswordless/models.py"], "/drfpasswordless/utils.py": ["/drfpasswordless/models.py", "/drfpasswordless/settings.py"], "/drfpasswordless/admin.py": ["/drfpasswordless/models.py"], "/drfpasswordless/urls.py": ["/drfpasswordless/settings.py", "/drfpasswordless/views.py"], "/drfpasswordless/views.py": ["/drfpasswordless/models.py", "/drfpasswordless/settings.py", "/drfpasswordless/serializers.py", "/drfpasswordless/services.py"]}
|
3,667
|
aaronn/django-rest-framework-passwordless
|
refs/heads/master
|
/drfpasswordless/migrations/0003_callbacktoken_type.py
|
# Generated by Django 3.0.2 on 2020-01-22 08:34
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('drfpasswordless', '0002_auto_20200122_0424'),
]
operations = [
migrations.AddField(
model_name='callbacktoken',
name='type',
field=models.CharField(choices=[('AUTH', 'Auth'), ('VERIFY', 'Verify')], default='VERIFY', max_length=20),
preserve_default=False,
),
]
|
{"/tests/test_verification.py": ["/drfpasswordless/settings.py", "/drfpasswordless/utils.py"], "/drfpasswordless/serializers.py": ["/drfpasswordless/models.py", "/drfpasswordless/settings.py", "/drfpasswordless/utils.py"], "/tests/test_authentication.py": ["/drfpasswordless/settings.py", "/drfpasswordless/utils.py"], "/tests/urls.py": ["/drfpasswordless/settings.py", "/drfpasswordless/views.py"], "/drfpasswordless/services.py": ["/drfpasswordless/settings.py", "/drfpasswordless/utils.py"], "/drfpasswordless/signals.py": ["/drfpasswordless/models.py", "/drfpasswordless/settings.py", "/drfpasswordless/services.py"], "/drfpasswordless/apps.py": ["/drfpasswordless/signals.py"], "/drfpasswordless/migrations/0004_auto_20200125_0853.py": ["/drfpasswordless/models.py"], "/drfpasswordless/utils.py": ["/drfpasswordless/models.py", "/drfpasswordless/settings.py"], "/drfpasswordless/admin.py": ["/drfpasswordless/models.py"], "/drfpasswordless/urls.py": ["/drfpasswordless/settings.py", "/drfpasswordless/views.py"], "/drfpasswordless/views.py": ["/drfpasswordless/models.py", "/drfpasswordless/settings.py", "/drfpasswordless/serializers.py", "/drfpasswordless/services.py"]}
|
3,668
|
aaronn/django-rest-framework-passwordless
|
refs/heads/master
|
/drfpasswordless/settings.py
|
from django.conf import settings
from rest_framework.settings import APISettings
USER_SETTINGS = getattr(settings, 'PASSWORDLESS_AUTH', None)
DEFAULTS = {
# Allowed auth types, can be EMAIL, MOBILE, or both.
'PASSWORDLESS_AUTH_TYPES': ['EMAIL'],
# URL Prefix for Authentication Endpoints
'PASSWORDLESS_AUTH_PREFIX': 'auth/',
# URL Prefix for Verification Endpoints
'PASSWORDLESS_VERIFY_PREFIX': 'auth/verify/',
# Amount of time that tokens last, in seconds
'PASSWORDLESS_TOKEN_EXPIRE_TIME': 15 * 60,
# The user's email field name
'PASSWORDLESS_USER_EMAIL_FIELD_NAME': 'email',
# The user's mobile field name
'PASSWORDLESS_USER_MOBILE_FIELD_NAME': 'mobile',
# Marks itself as verified the first time a user completes auth via token.
# Automatically unmarks itself if email is changed.
'PASSWORDLESS_USER_MARK_EMAIL_VERIFIED': False,
'PASSWORDLESS_USER_EMAIL_VERIFIED_FIELD_NAME': 'email_verified',
# Marks itself as verified the first time a user completes auth via token.
# Automatically unmarks itself if mobile number is changed.
'PASSWORDLESS_USER_MARK_MOBILE_VERIFIED': False,
'PASSWORDLESS_USER_MOBILE_VERIFIED_FIELD_NAME': 'mobile_verified',
# The email the callback token is sent from
'PASSWORDLESS_EMAIL_NOREPLY_ADDRESS': None,
# The email subject
'PASSWORDLESS_EMAIL_SUBJECT': "Your Login Token",
# A plaintext email message overridden by the html message. Takes one string.
'PASSWORDLESS_EMAIL_PLAINTEXT_MESSAGE': "Enter this token to sign in: %s",
# The email template name.
'PASSWORDLESS_EMAIL_TOKEN_HTML_TEMPLATE_NAME': "passwordless_default_token_email.html",
# Your twilio number that sends the callback tokens.
'PASSWORDLESS_MOBILE_NOREPLY_NUMBER': None,
# The message sent to mobile users logging in. Takes one string.
'PASSWORDLESS_MOBILE_MESSAGE': "Use this code to log in: %s",
# Registers previously unseen aliases as new users.
'PASSWORDLESS_REGISTER_NEW_USERS': True,
# Suppresses actual SMS for testing
'PASSWORDLESS_TEST_SUPPRESSION': False,
# Context Processors for Email Template
'PASSWORDLESS_CONTEXT_PROCESSORS': [],
# The verification email subject
'PASSWORDLESS_EMAIL_VERIFICATION_SUBJECT': "Your Verification Token",
# A plaintext verification email message overridden by the html message. Takes one string.
'PASSWORDLESS_EMAIL_VERIFICATION_PLAINTEXT_MESSAGE': "Enter this verification code: %s",
# The verification email template name.
'PASSWORDLESS_EMAIL_VERIFICATION_TOKEN_HTML_TEMPLATE_NAME': "passwordless_default_verification_token_email.html",
# The message sent to mobile users logging in. Takes one string.
'PASSWORDLESS_MOBILE_VERIFICATION_MESSAGE': "Enter this verification code: %s",
# Automatically send verification email or sms when a user changes their alias.
'PASSWORDLESS_AUTO_SEND_VERIFICATION_TOKEN': False,
# What function is called to construct an authentication tokens when
# exchanging a passwordless token for a real user auth token.
'PASSWORDLESS_AUTH_TOKEN_CREATOR': 'drfpasswordless.utils.create_authentication_token',
# What function is called to construct a serializer for drf tokens when
# exchanging a passwordless token for a real user auth token.
'PASSWORDLESS_AUTH_TOKEN_SERIALIZER': 'drfpasswordless.serializers.TokenResponseSerializer',
# A dictionary of demo user's primary key mapped to their static pin
'PASSWORDLESS_DEMO_USERS': {},
'PASSWORDLESS_EMAIL_CALLBACK': 'drfpasswordless.utils.send_email_with_callback_token',
'PASSWORDLESS_SMS_CALLBACK': 'drfpasswordless.utils.send_sms_with_callback_token',
# Token Generation Retry Count
'PASSWORDLESS_TOKEN_GENERATION_ATTEMPTS': 3
}
# List of settings that may be in string import notation.
IMPORT_STRINGS = (
'PASSWORDLESS_EMAIL_TOKEN_HTML_TEMPLATE',
'PASSWORDLESS_CONTEXT_PROCESSORS',
)
api_settings = APISettings(USER_SETTINGS, DEFAULTS, IMPORT_STRINGS)
|
{"/tests/test_verification.py": ["/drfpasswordless/settings.py", "/drfpasswordless/utils.py"], "/drfpasswordless/serializers.py": ["/drfpasswordless/models.py", "/drfpasswordless/settings.py", "/drfpasswordless/utils.py"], "/tests/test_authentication.py": ["/drfpasswordless/settings.py", "/drfpasswordless/utils.py"], "/tests/urls.py": ["/drfpasswordless/settings.py", "/drfpasswordless/views.py"], "/drfpasswordless/services.py": ["/drfpasswordless/settings.py", "/drfpasswordless/utils.py"], "/drfpasswordless/signals.py": ["/drfpasswordless/models.py", "/drfpasswordless/settings.py", "/drfpasswordless/services.py"], "/drfpasswordless/apps.py": ["/drfpasswordless/signals.py"], "/drfpasswordless/migrations/0004_auto_20200125_0853.py": ["/drfpasswordless/models.py"], "/drfpasswordless/utils.py": ["/drfpasswordless/models.py", "/drfpasswordless/settings.py"], "/drfpasswordless/admin.py": ["/drfpasswordless/models.py"], "/drfpasswordless/urls.py": ["/drfpasswordless/settings.py", "/drfpasswordless/views.py"], "/drfpasswordless/views.py": ["/drfpasswordless/models.py", "/drfpasswordless/settings.py", "/drfpasswordless/serializers.py", "/drfpasswordless/services.py"]}
|
3,669
|
aaronn/django-rest-framework-passwordless
|
refs/heads/master
|
/drfpasswordless/models.py
|
import uuid
from django.db import models
from django.conf import settings
import string
from django.utils.crypto import get_random_string
def generate_hex_token():
return uuid.uuid1().hex
def generate_numeric_token():
"""
Generate a random 6 digit string of numbers.
We use this formatting to allow leading 0s.
"""
return get_random_string(length=6, allowed_chars=string.digits)
class CallbackTokenManger(models.Manager):
def active(self):
return self.get_queryset().filter(is_active=True)
def inactive(self):
return self.get_queryset().filter(is_active=False)
class AbstractBaseCallbackToken(models.Model):
"""
Callback Authentication Tokens
These tokens present a client with their authorization token
on successful exchange of a random token (email) or token (for mobile)
When a new token is created, older ones of the same type are invalidated
via the pre_save signal in signals.py.
"""
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False, unique=True)
created_at = models.DateTimeField(auto_now_add=True)
user = models.ForeignKey(settings.AUTH_USER_MODEL, related_name=None, on_delete=models.CASCADE)
is_active = models.BooleanField(default=True)
to_alias = models.CharField(blank=True, max_length=254)
to_alias_type = models.CharField(blank=True, max_length=20)
objects = CallbackTokenManger()
class Meta:
abstract = True
get_latest_by = 'created_at'
ordering = ['-id']
def __str__(self):
return str(self.key)
class CallbackToken(AbstractBaseCallbackToken):
"""
Generates a random six digit number to be returned.
"""
TOKEN_TYPE_AUTH = 'AUTH'
TOKEN_TYPE_VERIFY = 'VERIFY'
TOKEN_TYPES = ((TOKEN_TYPE_AUTH, 'Auth'), (TOKEN_TYPE_VERIFY, 'Verify'))
key = models.CharField(default=generate_numeric_token, max_length=6)
type = models.CharField(max_length=20, choices=TOKEN_TYPES)
class Meta(AbstractBaseCallbackToken.Meta):
verbose_name = 'Callback Token'
|
{"/tests/test_verification.py": ["/drfpasswordless/settings.py", "/drfpasswordless/utils.py"], "/drfpasswordless/serializers.py": ["/drfpasswordless/models.py", "/drfpasswordless/settings.py", "/drfpasswordless/utils.py"], "/tests/test_authentication.py": ["/drfpasswordless/settings.py", "/drfpasswordless/utils.py"], "/tests/urls.py": ["/drfpasswordless/settings.py", "/drfpasswordless/views.py"], "/drfpasswordless/services.py": ["/drfpasswordless/settings.py", "/drfpasswordless/utils.py"], "/drfpasswordless/signals.py": ["/drfpasswordless/models.py", "/drfpasswordless/settings.py", "/drfpasswordless/services.py"], "/drfpasswordless/apps.py": ["/drfpasswordless/signals.py"], "/drfpasswordless/migrations/0004_auto_20200125_0853.py": ["/drfpasswordless/models.py"], "/drfpasswordless/utils.py": ["/drfpasswordless/models.py", "/drfpasswordless/settings.py"], "/drfpasswordless/admin.py": ["/drfpasswordless/models.py"], "/drfpasswordless/urls.py": ["/drfpasswordless/settings.py", "/drfpasswordless/views.py"], "/drfpasswordless/views.py": ["/drfpasswordless/models.py", "/drfpasswordless/settings.py", "/drfpasswordless/serializers.py", "/drfpasswordless/services.py"]}
|
3,681
|
mrpal39/ev_code
|
refs/heads/master
|
/awssam/django-blog/src/django_blog/blogroll.py
|
# -*- coding: utf-8 -*-
"""
-------------------------------------------------
File Name: blogroll
Description :
Author : JHao
date: 2020/10/9
-------------------------------------------------
Change Activity:
2020/10/9:
-------------------------------------------------
"""
__author__ = 'JHao'
sites = [
{"url": "https://www.zaoshu.io/", "name": "造数", "desc": "智能云爬虫"},
{"url": "http://brucedone.com/", "name": "大鱼的鱼塘", "desc": "大鱼的鱼塘 - 一个总会有收获的地方"},
{"url": "http://www.songluyi.com/", "name": "灯塔水母", "desc": "灯塔水母"},
{"url": "http://blog.topspeedsnail.com/", "name": "斗大的熊猫", "desc": "本博客专注于技术,Linux,编程,Python,C,Ubuntu、开源软件、Github等"},
{"url": "https://www.urlteam.org/", "name": "URL-team", "desc": "URL-team"},
]
|
{"/awssam/iam/users/urls.py": ["/awssam/iam/users/views.py"], "/myapi/devfile/core/views.py": ["/myapi/devfile/core/forms.py"], "/Web-UI/scrapyproject/views.py": ["/Web-UI/scrapyproject/forms.py", "/Web-UI/scrapyproject/models.py"], "/awssam/ideablog/core/admin.py": ["/awssam/ideablog/core/models.py"], "/awssam/wikidj/wikidj/codehilite.py": ["/awssam/wikidj/wikidj/settings.py", "/awssam/wikidj/wikidj/dev.py"], "/awssam/ideablog/core/views.py": ["/awssam/ideablog/core/models.py"], "/myapi/fullfeblog/blog/search_indexes.py": ["/myapi/fullfeblog/blog/models.py"], "/myapi/fullfeblog/blog/templatetags/blog_tags.py": ["/myapi/fullfeblog/blog/models.py"], "/myapi/fullfeblog/blog/views.py": ["/myapi/fullfeblog/blog/models.py", "/myapi/fullfeblog/blog/forms.py"], "/awssam/wikidj/wikidj/dev.py": ["/awssam/wikidj/wikidj/settings.py"], "/tc_zufang/django_web/datashow/views.py": ["/tc_zufang/django_web/datashow/models.py"], "/awssam/ideablog/core/forms.py": ["/awssam/ideablog/core/models.py"], "/Web-UI/scrapyproject/admin.py": ["/Web-UI/scrapyproject/models.py"], "/myapi/fullfeblog/blog/urls.py": ["/myapi/fullfeblog/blog/views.py"]}
|
3,682
|
mrpal39/ev_code
|
refs/heads/master
|
/tc_zufang/tc_zufang-slave/tc_zufang/spiders/testip.py
|
# -*- coding: utf-8 -*-
from scrapy_redis.spiders import RedisSpider
from scrapy.selector import Selector
class testSpider(RedisSpider):
name = 'testip'
redis_key = 'testip'
def parse(self,response):
response_selector = Selector(response)
code=response_selector.xpath(r'//div[contains(@class,"well")]/p[1]/code/text()')
print code
|
{"/awssam/iam/users/urls.py": ["/awssam/iam/users/views.py"], "/myapi/devfile/core/views.py": ["/myapi/devfile/core/forms.py"], "/Web-UI/scrapyproject/views.py": ["/Web-UI/scrapyproject/forms.py", "/Web-UI/scrapyproject/models.py"], "/awssam/ideablog/core/admin.py": ["/awssam/ideablog/core/models.py"], "/awssam/wikidj/wikidj/codehilite.py": ["/awssam/wikidj/wikidj/settings.py", "/awssam/wikidj/wikidj/dev.py"], "/awssam/ideablog/core/views.py": ["/awssam/ideablog/core/models.py"], "/myapi/fullfeblog/blog/search_indexes.py": ["/myapi/fullfeblog/blog/models.py"], "/myapi/fullfeblog/blog/templatetags/blog_tags.py": ["/myapi/fullfeblog/blog/models.py"], "/myapi/fullfeblog/blog/views.py": ["/myapi/fullfeblog/blog/models.py", "/myapi/fullfeblog/blog/forms.py"], "/awssam/wikidj/wikidj/dev.py": ["/awssam/wikidj/wikidj/settings.py"], "/tc_zufang/django_web/datashow/views.py": ["/tc_zufang/django_web/datashow/models.py"], "/awssam/ideablog/core/forms.py": ["/awssam/ideablog/core/models.py"], "/Web-UI/scrapyproject/admin.py": ["/Web-UI/scrapyproject/models.py"], "/myapi/fullfeblog/blog/urls.py": ["/myapi/fullfeblog/blog/views.py"]}
|
3,683
|
mrpal39/ev_code
|
refs/heads/master
|
/awssam/ideablog/core/models.py
|
from django.db import models
from tinymce.models import HTMLField
from django.utils import timezone
from django.contrib.auth.models import User
from django.urls import reverse
class Post(models.Model):
title = models.CharField(max_length=100)
content = models.TextField()
description =HTMLField()
date_posted = models.DateTimeField(default=timezone.now)
author = models.ForeignKey(User, on_delete=models.CASCADE)
def __str__(self):
return self.title
def get_absolute_url(self):
return reverse('post-detail', kwargs={'pk': self.pk})
class feeds(models.Model):
title = models.CharField(max_length=100)
overview = models.TextField(max_length=20)
timestamp = models.DateTimeField(auto_now_add=True)
description =HTMLField()
thumbnail = models.ImageField()
featured = models.BooleanField()
# content = HTMLField()
def __str__(self):
return self.title
class Products(models.Model):
title =models.CharField(max_length=100)
description =models.TextField(blank=True)
price =models.DecimalField(decimal_places=2,max_digits=1000)
summary =models.TextField(blank=False, null=False)
# featured =models.BooleanField()
class MyModel(models.Model):
...
content = HTMLField()
|
{"/awssam/iam/users/urls.py": ["/awssam/iam/users/views.py"], "/myapi/devfile/core/views.py": ["/myapi/devfile/core/forms.py"], "/Web-UI/scrapyproject/views.py": ["/Web-UI/scrapyproject/forms.py", "/Web-UI/scrapyproject/models.py"], "/awssam/ideablog/core/admin.py": ["/awssam/ideablog/core/models.py"], "/awssam/wikidj/wikidj/codehilite.py": ["/awssam/wikidj/wikidj/settings.py", "/awssam/wikidj/wikidj/dev.py"], "/awssam/ideablog/core/views.py": ["/awssam/ideablog/core/models.py"], "/myapi/fullfeblog/blog/search_indexes.py": ["/myapi/fullfeblog/blog/models.py"], "/myapi/fullfeblog/blog/templatetags/blog_tags.py": ["/myapi/fullfeblog/blog/models.py"], "/myapi/fullfeblog/blog/views.py": ["/myapi/fullfeblog/blog/models.py", "/myapi/fullfeblog/blog/forms.py"], "/awssam/wikidj/wikidj/dev.py": ["/awssam/wikidj/wikidj/settings.py"], "/tc_zufang/django_web/datashow/views.py": ["/tc_zufang/django_web/datashow/models.py"], "/awssam/ideablog/core/forms.py": ["/awssam/ideablog/core/models.py"], "/Web-UI/scrapyproject/admin.py": ["/Web-UI/scrapyproject/models.py"], "/myapi/fullfeblog/blog/urls.py": ["/myapi/fullfeblog/blog/views.py"]}
|
3,684
|
mrpal39/ev_code
|
refs/heads/master
|
/Web-UI/scrapyproject/scrapy_packages/rabbitmq/scheduler.py
|
import connection
import queue
from scrapy.utils.misc import load_object
from scrapy.utils.job import job_dir
SCHEDULER_PERSIST = False
QUEUE_CLASS = 'queue.SpiderQueue'
IDLE_BEFORE_CLOSE = 0
class Scheduler(object):
def __init__(self, server, persist,
queue_key, queue_cls, idle_before_close,
stats, *args, **kwargs):
self.server = server
self.persist = persist
self.queue_key = queue_key
self.queue_cls = queue_cls
self.idle_before_close = idle_before_close
self.stats = stats
def __len__(self):
return len(self.queue)
@classmethod
def from_crawler(cls, crawler):
if not crawler.spider.islinkgenerator:
settings = crawler.settings
persist = settings.get('SCHEDULER_PERSIST', SCHEDULER_PERSIST)
queue_key = "%s:requests" % crawler.spider.name
queue_cls = queue.SpiderQueue
idle_before_close = settings.get('SCHEDULER_IDLE_BEFORE_CLOSE', IDLE_BEFORE_CLOSE)
server = connection.from_settings(settings, crawler.spider.name)
stats = crawler.stats
return cls(server, persist, queue_key, queue_cls, idle_before_close, stats)
else:
settings = crawler.settings
dupefilter_cls = load_object(settings['DUPEFILTER_CLASS'])
dupefilter = dupefilter_cls.from_settings(settings)
pqclass = load_object(settings['SCHEDULER_PRIORITY_QUEUE'])
dqclass = load_object(settings['SCHEDULER_DISK_QUEUE'])
mqclass = load_object(settings['SCHEDULER_MEMORY_QUEUE'])
logunser = settings.getbool('LOG_UNSERIALIZABLE_REQUESTS', settings.getbool('SCHEDULER_DEBUG'))
core_scheduler = load_object('scrapy.core.scheduler.Scheduler')
return core_scheduler(dupefilter, jobdir=job_dir(settings), logunser=logunser,
stats=crawler.stats, pqclass=pqclass, dqclass=dqclass, mqclass=mqclass)
def open(self, spider):
self.spider = spider
self.queue = self.queue_cls(self.server, spider, self.queue_key)
if len(self.queue):
spider.log("Resuming crawl (%d requests scheduled)" % len(self.queue))
def close(self, reason):
if not self.persist:
self.queue.clear()
connection.close(self.server)
def enqueue_request(self, request):
if self.stats:
self.stats.inc_value('scheduler/enqueued/rabbitmq', spider=self.spider)
self.queue.push(request)
def next_request(self):
request = self.queue.pop()
if request and self.stats:
self.stats.inc_value('scheduler/dequeued/rabbitmq', spider=self.spider)
return request
def has_pending_requests(self):
return len(self) > 0
|
{"/awssam/iam/users/urls.py": ["/awssam/iam/users/views.py"], "/myapi/devfile/core/views.py": ["/myapi/devfile/core/forms.py"], "/Web-UI/scrapyproject/views.py": ["/Web-UI/scrapyproject/forms.py", "/Web-UI/scrapyproject/models.py"], "/awssam/ideablog/core/admin.py": ["/awssam/ideablog/core/models.py"], "/awssam/wikidj/wikidj/codehilite.py": ["/awssam/wikidj/wikidj/settings.py", "/awssam/wikidj/wikidj/dev.py"], "/awssam/ideablog/core/views.py": ["/awssam/ideablog/core/models.py"], "/myapi/fullfeblog/blog/search_indexes.py": ["/myapi/fullfeblog/blog/models.py"], "/myapi/fullfeblog/blog/templatetags/blog_tags.py": ["/myapi/fullfeblog/blog/models.py"], "/myapi/fullfeblog/blog/views.py": ["/myapi/fullfeblog/blog/models.py", "/myapi/fullfeblog/blog/forms.py"], "/awssam/wikidj/wikidj/dev.py": ["/awssam/wikidj/wikidj/settings.py"], "/tc_zufang/django_web/datashow/views.py": ["/tc_zufang/django_web/datashow/models.py"], "/awssam/ideablog/core/forms.py": ["/awssam/ideablog/core/models.py"], "/Web-UI/scrapyproject/admin.py": ["/Web-UI/scrapyproject/models.py"], "/myapi/fullfeblog/blog/urls.py": ["/myapi/fullfeblog/blog/views.py"]}
|
3,685
|
mrpal39/ev_code
|
refs/heads/master
|
/scrap/tutorial/scrap/spiders/testing.py
|
import scrapy
class MySpider(scrapy.Spider):
name = 'myspider'
start_urls = ['http://example.com']
def parse(self, response):
print(f"Existing settings: {self.settings.attributes.keys()}")
class MyExtension:
def __init__(self, log_is_enabled=False):
if log_is_enabled:
print("log is enabled!")
@classmethod
def from_crawler(cls, crawler):
settings = crawler.settings
return cls(settings.getbool('LOG_ENABLED'))
|
{"/awssam/iam/users/urls.py": ["/awssam/iam/users/views.py"], "/myapi/devfile/core/views.py": ["/myapi/devfile/core/forms.py"], "/Web-UI/scrapyproject/views.py": ["/Web-UI/scrapyproject/forms.py", "/Web-UI/scrapyproject/models.py"], "/awssam/ideablog/core/admin.py": ["/awssam/ideablog/core/models.py"], "/awssam/wikidj/wikidj/codehilite.py": ["/awssam/wikidj/wikidj/settings.py", "/awssam/wikidj/wikidj/dev.py"], "/awssam/ideablog/core/views.py": ["/awssam/ideablog/core/models.py"], "/myapi/fullfeblog/blog/search_indexes.py": ["/myapi/fullfeblog/blog/models.py"], "/myapi/fullfeblog/blog/templatetags/blog_tags.py": ["/myapi/fullfeblog/blog/models.py"], "/myapi/fullfeblog/blog/views.py": ["/myapi/fullfeblog/blog/models.py", "/myapi/fullfeblog/blog/forms.py"], "/awssam/wikidj/wikidj/dev.py": ["/awssam/wikidj/wikidj/settings.py"], "/tc_zufang/django_web/datashow/views.py": ["/tc_zufang/django_web/datashow/models.py"], "/awssam/ideablog/core/forms.py": ["/awssam/ideablog/core/models.py"], "/Web-UI/scrapyproject/admin.py": ["/Web-UI/scrapyproject/models.py"], "/myapi/fullfeblog/blog/urls.py": ["/myapi/fullfeblog/blog/views.py"]}
|
3,686
|
mrpal39/ev_code
|
refs/heads/master
|
/tc_zufang/tc_zufang/tc_zufang/spiders/tczufang_detail_spider.py
|
# -*- coding: utf-8 -*-
from scrapy_redis.spiders import RedisSpider
from scrapy.selector import Selector
from tc_zufang.utils.result_parse import list_first_item
from scrapy.http import Request
from tc_zufang.utils.InsertRedis import inserintotc,inserintota
import re
defaultencoding = 'utf-8'
'''
58同城的爬虫
'''
#继承自RedisSpider,则start_urls可以从redis读取
#继承自BaseSpider,则start_urls需要写出来
class TczufangSpider(RedisSpider):
name='basic'
start_urls=(
'http://dg.58.com/chuzu/',
'http://sw.58.com/chuzu/',
'http://sz.58.com/chuzu/',
'http://gz.58.com/chuzu/',
# 'http://fs.58.com/chuzu/',
# 'http://zs.58.com/chuzu/',
# 'http://zh.58.com/chuzu/',
# 'http://huizhou.58.com/chuzu/',
# 'http://jm.58.com/chuzu/',
# 'http://st.58.com/chuzu/',
# 'http://zhanjiang.58.com/chuzu/',
# 'http://zq.58.com/chuzu/',
# 'http://mm.58.com/chuzu/',
# 'http://jy.58.com/chuzu/',
# 'http://mz.58.com/chuzu/',
# 'http://qingyuan.58.com/chuzu/',
# 'http://yj.58.com/chuzu/',
# 'http://sg.58.com/chuzu/',
# 'http://heyuan.58.com/chuzu/',
# 'http://yf.58.com/chuzu/',
# 'http://chaozhou.58.com/chuzu/',
# 'http://taishan.58.com/chuzu/',
# 'http://yangchun.58.com/chuzu/',
# 'http://sd.58.com/chuzu/',
# 'http://huidong.58.com/chuzu/',
# 'http:// boluo.58.com/chuzu/',
# )
# redis_key = 'tczufangCrawler:start_urls'
#解析从start_urls下载返回的页面
#页面页面有两个目的:
#第一个:解析获取下一页的地址,将下一页的地址传递给爬虫调度器,以便作为爬虫的下一次请求
#第二个:获取详情页地址,再对详情页进行下一步的解析
redis_key = 'start_urls'
def parse(self, response):
#获取所访问的地址
response_url=re.findall('^http\:\/\/\w+\.58\.com',response.url)
response_selector = Selector(response)
next_link=list_first_item(response_selector.xpath(u'//div[contains(@class,"pager")]/a[contains(@class,"next")]/@href').extract())
detail_link=response_selector.xpath(u'//div[contains(@class,"listBox")]/ul[contains(@class,"listUl")]/li/@logr').extract()
if next_link:
if detail_link:
# print next_link
# yield Request(next_link,callback=self.parse)
inserintotc(next_link, 1)
print '#########[success] the next link ' + next_link + ' is insert into the redis queue#########'
for detail_link in response_selector.xpath(u'//div[contains(@class,"listBox")]/ul[contains(@class,"listUl")]/li/@logr').extract():
#gz_2_39755299868183_28191154595392_sortid:1486483205000 @ ses:busitime ^ desc @ pubid:5453707因为58同城的详情页做了爬取限制,所以由自己构造详情页id
#构造详情页url
# detail_link='http://dg.58.com/zufang/'+detail_link.split('_')[3]+'x.shtml'
detail_link = response_url[0]+'/zufang/' + detail_link.split('_')[3] + 'x.shtml'
#对详情页进行解析cd
if detail_link:
inserintota(detail_link,2)
print '[success] the detail link ' + detail_link + ' is insert into the redis queue'
|
{"/awssam/iam/users/urls.py": ["/awssam/iam/users/views.py"], "/myapi/devfile/core/views.py": ["/myapi/devfile/core/forms.py"], "/Web-UI/scrapyproject/views.py": ["/Web-UI/scrapyproject/forms.py", "/Web-UI/scrapyproject/models.py"], "/awssam/ideablog/core/admin.py": ["/awssam/ideablog/core/models.py"], "/awssam/wikidj/wikidj/codehilite.py": ["/awssam/wikidj/wikidj/settings.py", "/awssam/wikidj/wikidj/dev.py"], "/awssam/ideablog/core/views.py": ["/awssam/ideablog/core/models.py"], "/myapi/fullfeblog/blog/search_indexes.py": ["/myapi/fullfeblog/blog/models.py"], "/myapi/fullfeblog/blog/templatetags/blog_tags.py": ["/myapi/fullfeblog/blog/models.py"], "/myapi/fullfeblog/blog/views.py": ["/myapi/fullfeblog/blog/models.py", "/myapi/fullfeblog/blog/forms.py"], "/awssam/wikidj/wikidj/dev.py": ["/awssam/wikidj/wikidj/settings.py"], "/tc_zufang/django_web/datashow/views.py": ["/tc_zufang/django_web/datashow/models.py"], "/awssam/ideablog/core/forms.py": ["/awssam/ideablog/core/models.py"], "/Web-UI/scrapyproject/admin.py": ["/Web-UI/scrapyproject/models.py"], "/myapi/fullfeblog/blog/urls.py": ["/myapi/fullfeblog/blog/views.py"]}
|
3,687
|
mrpal39/ev_code
|
refs/heads/master
|
/scrap/properties/properties/pipelines.py
|
# -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://doc.scrapy.org/en/latest/topics/item-pipeline.html
class PropertiesPipeline(object):
def process_item(self, item, spider):
return item
ITEM_PIPELINES = {
'scrapy.pipelines.images.ImagesPipeline': 1,
'properties.pipelines.geo.GeoPipeline': 400,
}
IMAGES_STORE = 'images'
IMAGES_THUMBS = { 'small': (30, 30) }
|
{"/awssam/iam/users/urls.py": ["/awssam/iam/users/views.py"], "/myapi/devfile/core/views.py": ["/myapi/devfile/core/forms.py"], "/Web-UI/scrapyproject/views.py": ["/Web-UI/scrapyproject/forms.py", "/Web-UI/scrapyproject/models.py"], "/awssam/ideablog/core/admin.py": ["/awssam/ideablog/core/models.py"], "/awssam/wikidj/wikidj/codehilite.py": ["/awssam/wikidj/wikidj/settings.py", "/awssam/wikidj/wikidj/dev.py"], "/awssam/ideablog/core/views.py": ["/awssam/ideablog/core/models.py"], "/myapi/fullfeblog/blog/search_indexes.py": ["/myapi/fullfeblog/blog/models.py"], "/myapi/fullfeblog/blog/templatetags/blog_tags.py": ["/myapi/fullfeblog/blog/models.py"], "/myapi/fullfeblog/blog/views.py": ["/myapi/fullfeblog/blog/models.py", "/myapi/fullfeblog/blog/forms.py"], "/awssam/wikidj/wikidj/dev.py": ["/awssam/wikidj/wikidj/settings.py"], "/tc_zufang/django_web/datashow/views.py": ["/tc_zufang/django_web/datashow/models.py"], "/awssam/ideablog/core/forms.py": ["/awssam/ideablog/core/models.py"], "/Web-UI/scrapyproject/admin.py": ["/Web-UI/scrapyproject/models.py"], "/myapi/fullfeblog/blog/urls.py": ["/myapi/fullfeblog/blog/views.py"]}
|
3,688
|
mrpal39/ev_code
|
refs/heads/master
|
/Web-UI/scrapyproject/forms.py
|
from django import forms
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit
from django.contrib.auth.forms import PasswordChangeForm
class CreateProject(forms.Form):
projectname = forms.SlugField(label="Enter project name", max_length=50, required=True)
helper = FormHelper()
helper.form_method = 'POST'
helper.add_input(Submit('submit', 'Create Project'))
helper.add_input(Submit('cancel', 'Cancel', css_class='btn-default'))
class DeleteProject(forms.Form):
helper = FormHelper()
helper.form_method = 'POST'
helper.add_input(Submit('submit', 'Confirm'))
helper.add_input(Submit('cancel', 'Cancel', css_class='btn-default'))
class CreatePipeline(forms.Form):
pipelinename = forms.SlugField(label="Pipeline name", max_length=50, required=True)
pipelineorder = forms.IntegerField(label="Order", required=True, min_value=1, max_value=900)
pipelinefunction = forms.CharField(label="Pipeline function:", required=False, widget=forms.Textarea)
helper = FormHelper()
helper.form_tag = False
class LinkGenerator(forms.Form):
function = forms.CharField(label="Write your link generator function here:", required=False, widget=forms.Textarea)
helper = FormHelper()
helper.form_tag = False
class Scraper(forms.Form):
function = forms.CharField(label="Write your scraper function here:", required=False, widget=forms.Textarea)
helper = FormHelper()
helper.form_tag = False
class ItemName(forms.Form):
itemname = forms.SlugField(label="Enter item name", max_length=50, required=True)
helper = FormHelper()
helper.form_tag = False
class FieldName(forms.Form):
fieldname = forms.SlugField(label="Field 1", max_length=50, required=False)
extra_field_count = forms.CharField(widget=forms.HiddenInput())
helper = FormHelper()
helper.form_tag = False
def __init__(self, *args, **kwargs):
extra_fields = kwargs.pop('extra', 0)
super(FieldName, self).__init__(*args, **kwargs)
self.fields['extra_field_count'].initial = extra_fields
for index in range(int(extra_fields)):
# generate extra fields in the number specified via extra_fields
self.fields['field_{index}'.format(index=index+2)] = forms.CharField(required=False)
class ChangePass(PasswordChangeForm):
helper = FormHelper()
helper.form_method = 'POST'
helper.add_input(Submit('submit', 'Change'))
class Settings(forms.Form):
settings = forms.CharField(required=False, widget=forms.Textarea)
helper = FormHelper()
helper.form_tag = False
class ShareDB(forms.Form):
username = forms.CharField(label="Enter the account name for the user with whom you want to share the database", max_length=150, required=True)
helper = FormHelper()
helper.form_method = 'POST'
helper.add_input(Submit('submit', 'Share'))
helper.add_input(Submit('cancel', 'Cancel', css_class='btn-default'))
class ShareProject(forms.Form):
username = forms.CharField(label="Enter the account name for the user with whom you want to share the project", max_length=150, required=True)
helper = FormHelper()
helper.form_method = 'POST'
helper.add_input(Submit('submit', 'Share'))
helper.add_input(Submit('cancel', 'Cancel', css_class='btn-default'))
|
{"/awssam/iam/users/urls.py": ["/awssam/iam/users/views.py"], "/myapi/devfile/core/views.py": ["/myapi/devfile/core/forms.py"], "/Web-UI/scrapyproject/views.py": ["/Web-UI/scrapyproject/forms.py", "/Web-UI/scrapyproject/models.py"], "/awssam/ideablog/core/admin.py": ["/awssam/ideablog/core/models.py"], "/awssam/wikidj/wikidj/codehilite.py": ["/awssam/wikidj/wikidj/settings.py", "/awssam/wikidj/wikidj/dev.py"], "/awssam/ideablog/core/views.py": ["/awssam/ideablog/core/models.py"], "/myapi/fullfeblog/blog/search_indexes.py": ["/myapi/fullfeblog/blog/models.py"], "/myapi/fullfeblog/blog/templatetags/blog_tags.py": ["/myapi/fullfeblog/blog/models.py"], "/myapi/fullfeblog/blog/views.py": ["/myapi/fullfeblog/blog/models.py", "/myapi/fullfeblog/blog/forms.py"], "/awssam/wikidj/wikidj/dev.py": ["/awssam/wikidj/wikidj/settings.py"], "/tc_zufang/django_web/datashow/views.py": ["/tc_zufang/django_web/datashow/models.py"], "/awssam/ideablog/core/forms.py": ["/awssam/ideablog/core/models.py"], "/Web-UI/scrapyproject/admin.py": ["/Web-UI/scrapyproject/models.py"], "/myapi/fullfeblog/blog/urls.py": ["/myapi/fullfeblog/blog/views.py"]}
|
3,689
|
mrpal39/ev_code
|
refs/heads/master
|
/awssam/ideablog/core/migrations/0004_auto_20201113_0633.py
|
# Generated by Django 3.1.3 on 2020-11-13 06:33
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0003_auto_20201113_0620'),
]
operations = [
migrations.AddField(
model_name='feeds',
name='description',
field=models.TextField(blank=True),
),
migrations.AlterField(
model_name='feeds',
name='overview',
field=models.TextField(max_length=20),
),
]
|
{"/awssam/iam/users/urls.py": ["/awssam/iam/users/views.py"], "/myapi/devfile/core/views.py": ["/myapi/devfile/core/forms.py"], "/Web-UI/scrapyproject/views.py": ["/Web-UI/scrapyproject/forms.py", "/Web-UI/scrapyproject/models.py"], "/awssam/ideablog/core/admin.py": ["/awssam/ideablog/core/models.py"], "/awssam/wikidj/wikidj/codehilite.py": ["/awssam/wikidj/wikidj/settings.py", "/awssam/wikidj/wikidj/dev.py"], "/awssam/ideablog/core/views.py": ["/awssam/ideablog/core/models.py"], "/myapi/fullfeblog/blog/search_indexes.py": ["/myapi/fullfeblog/blog/models.py"], "/myapi/fullfeblog/blog/templatetags/blog_tags.py": ["/myapi/fullfeblog/blog/models.py"], "/myapi/fullfeblog/blog/views.py": ["/myapi/fullfeblog/blog/models.py", "/myapi/fullfeblog/blog/forms.py"], "/awssam/wikidj/wikidj/dev.py": ["/awssam/wikidj/wikidj/settings.py"], "/tc_zufang/django_web/datashow/views.py": ["/tc_zufang/django_web/datashow/models.py"], "/awssam/ideablog/core/forms.py": ["/awssam/ideablog/core/models.py"], "/Web-UI/scrapyproject/admin.py": ["/Web-UI/scrapyproject/models.py"], "/myapi/fullfeblog/blog/urls.py": ["/myapi/fullfeblog/blog/views.py"]}
|
3,690
|
mrpal39/ev_code
|
refs/heads/master
|
/awssam/iam/users/urls.py
|
from django.conf.urls import url, include
import oauth2_provider.views as oauth2_views
from django.conf import settings
from .views import ApiEndpoint
from django.urls import include, path
# OAuth2 provider endpoints
oauth2_endpoint_views = [
path('authorize/', oauth2_views.AuthorizationView.as_view(), name="authorize"),
path('token/', oauth2_views.TokenView.as_view(), name="token"),
path('revoke-token/', oauth2_views.RevokeTokenView.as_view(), name="revoke-token"),
]
if settings.DEBUG:
# OAuth2 Application Management endpoints
oauth2_endpoint_views += [
path('applications/', oauth2_views.ApplicationList.as_view(), name="list"),
path('applications/register/', oauth2_views.ApplicationRegistration.as_view(), name="register"),
path('applications/<pk>/', oauth2_views.ApplicationDetail.as_view(), name="detail"),
path('applications/<pk>/delete/', oauth2_views.ApplicationDelete.as_view(), name="delete"),
path('applications/<pk>/update/', oauth2_views.ApplicationUpdate.as_view(), name="update"),
]
# OAuth2 Token Management endpoints
oauth2_endpoint_views += [
path('authorized-tokens/', oauth2_views.AuthorizedTokensListView.as_view(), name="authorized-token-list"),
path('authorized-tokens/<pk>/delete/', oauth2_views.AuthorizedTokenDeleteView.as_view(),
name="authorized-token-delete"),
]
urlpatterns = [
# OAuth 2 endpoints:
path('o/', include(oauth2_endpoint_views, namespace="oauth2_provider")),
path('api/hello', ApiEndpoint.as_view()), # an example resource endpoint
]
|
{"/awssam/iam/users/urls.py": ["/awssam/iam/users/views.py"], "/myapi/devfile/core/views.py": ["/myapi/devfile/core/forms.py"], "/Web-UI/scrapyproject/views.py": ["/Web-UI/scrapyproject/forms.py", "/Web-UI/scrapyproject/models.py"], "/awssam/ideablog/core/admin.py": ["/awssam/ideablog/core/models.py"], "/awssam/wikidj/wikidj/codehilite.py": ["/awssam/wikidj/wikidj/settings.py", "/awssam/wikidj/wikidj/dev.py"], "/awssam/ideablog/core/views.py": ["/awssam/ideablog/core/models.py"], "/myapi/fullfeblog/blog/search_indexes.py": ["/myapi/fullfeblog/blog/models.py"], "/myapi/fullfeblog/blog/templatetags/blog_tags.py": ["/myapi/fullfeblog/blog/models.py"], "/myapi/fullfeblog/blog/views.py": ["/myapi/fullfeblog/blog/models.py", "/myapi/fullfeblog/blog/forms.py"], "/awssam/wikidj/wikidj/dev.py": ["/awssam/wikidj/wikidj/settings.py"], "/tc_zufang/django_web/datashow/views.py": ["/tc_zufang/django_web/datashow/models.py"], "/awssam/ideablog/core/forms.py": ["/awssam/ideablog/core/models.py"], "/Web-UI/scrapyproject/admin.py": ["/Web-UI/scrapyproject/models.py"], "/myapi/fullfeblog/blog/urls.py": ["/myapi/fullfeblog/blog/views.py"]}
|
3,691
|
mrpal39/ev_code
|
refs/heads/master
|
/Web-UI/mysite/views.py
|
from django.http import HttpResponse, Http404
from django.shortcuts import render
import datetime
from django.http import HttpResponseRedirect
from django.core.mail import send_mail
from django.contrib.auth.views import login as loginview
from registration.backends.simple import views
from django.contrib.auth import authenticate, get_user_model, login
from registration import signals
from scrapyproject.views import mongodb_user_creation, linux_user_creation
from scrapyproject.scrapy_packages import settings
try:
# Python 3
from urllib.parse import urlparse
except ImportError:
# Python 2
from urlparse import urlparse
try:
from urllib.parse import quote
except:
from urllib import quote
User = get_user_model()
class MyRegistrationView(views.RegistrationView):
def register(self, form):
new_user = form.save()
new_user = authenticate(
username=getattr(new_user, User.USERNAME_FIELD),
password=form.cleaned_data['password1']
)
#perform additional account creation here (MongoDB, local Unix accounts, etc.)
mongodb_user_creation(getattr(new_user, User.USERNAME_FIELD), form.cleaned_data['password1'])
if settings.LINUX_USER_CREATION_ENABLED:
try:
linux_user_creation(getattr(new_user, User.USERNAME_FIELD), form.cleaned_data['password1'])
except:
pass
login(self.request, new_user)
signals.user_registered.send(sender=self.__class__,
user=new_user,
request=self.request)
return new_user
def get_success_url(self, user):
return "/project"
def custom_login(request):
if request.user.is_authenticated():
return HttpResponseRedirect('/project')
else:
return loginview(request)
def custom_register(request):
if request.user.is_authenticated():
return HttpResponseRedirect('/project')
else:
register = MyRegistrationView.as_view()
return register(request)
|
{"/awssam/iam/users/urls.py": ["/awssam/iam/users/views.py"], "/myapi/devfile/core/views.py": ["/myapi/devfile/core/forms.py"], "/Web-UI/scrapyproject/views.py": ["/Web-UI/scrapyproject/forms.py", "/Web-UI/scrapyproject/models.py"], "/awssam/ideablog/core/admin.py": ["/awssam/ideablog/core/models.py"], "/awssam/wikidj/wikidj/codehilite.py": ["/awssam/wikidj/wikidj/settings.py", "/awssam/wikidj/wikidj/dev.py"], "/awssam/ideablog/core/views.py": ["/awssam/ideablog/core/models.py"], "/myapi/fullfeblog/blog/search_indexes.py": ["/myapi/fullfeblog/blog/models.py"], "/myapi/fullfeblog/blog/templatetags/blog_tags.py": ["/myapi/fullfeblog/blog/models.py"], "/myapi/fullfeblog/blog/views.py": ["/myapi/fullfeblog/blog/models.py", "/myapi/fullfeblog/blog/forms.py"], "/awssam/wikidj/wikidj/dev.py": ["/awssam/wikidj/wikidj/settings.py"], "/tc_zufang/django_web/datashow/views.py": ["/tc_zufang/django_web/datashow/models.py"], "/awssam/ideablog/core/forms.py": ["/awssam/ideablog/core/models.py"], "/Web-UI/scrapyproject/admin.py": ["/Web-UI/scrapyproject/models.py"], "/myapi/fullfeblog/blog/urls.py": ["/myapi/fullfeblog/blog/views.py"]}
|
3,692
|
mrpal39/ev_code
|
refs/heads/master
|
/Web-UI/scrapyproject/migrations/0005_auto_20170213_1053.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('scrapyproject', '0004_pipeline_pipeline_function'),
]
operations = [
migrations.RemoveField(
model_name='project',
name='settings',
),
migrations.AddField(
model_name='project',
name='settings_link_generator',
field=models.TextField(blank=True),
),
migrations.AddField(
model_name='project',
name='settings_scraper',
field=models.TextField(blank=True),
),
]
|
{"/awssam/iam/users/urls.py": ["/awssam/iam/users/views.py"], "/myapi/devfile/core/views.py": ["/myapi/devfile/core/forms.py"], "/Web-UI/scrapyproject/views.py": ["/Web-UI/scrapyproject/forms.py", "/Web-UI/scrapyproject/models.py"], "/awssam/ideablog/core/admin.py": ["/awssam/ideablog/core/models.py"], "/awssam/wikidj/wikidj/codehilite.py": ["/awssam/wikidj/wikidj/settings.py", "/awssam/wikidj/wikidj/dev.py"], "/awssam/ideablog/core/views.py": ["/awssam/ideablog/core/models.py"], "/myapi/fullfeblog/blog/search_indexes.py": ["/myapi/fullfeblog/blog/models.py"], "/myapi/fullfeblog/blog/templatetags/blog_tags.py": ["/myapi/fullfeblog/blog/models.py"], "/myapi/fullfeblog/blog/views.py": ["/myapi/fullfeblog/blog/models.py", "/myapi/fullfeblog/blog/forms.py"], "/awssam/wikidj/wikidj/dev.py": ["/awssam/wikidj/wikidj/settings.py"], "/tc_zufang/django_web/datashow/views.py": ["/tc_zufang/django_web/datashow/models.py"], "/awssam/ideablog/core/forms.py": ["/awssam/ideablog/core/models.py"], "/Web-UI/scrapyproject/admin.py": ["/Web-UI/scrapyproject/models.py"], "/myapi/fullfeblog/blog/urls.py": ["/myapi/fullfeblog/blog/views.py"]}
|
3,693
|
mrpal39/ev_code
|
refs/heads/master
|
/awssam/ideablog/core/migrations/0006_auto_20201114_0452.py
|
# Generated by Django 3.1.3 on 2020-11-14 04:52
from django.db import migrations, models
import tinymce.models
class Migration(migrations.Migration):
dependencies = [
('core', '0005_feeds_content'),
]
operations = [
migrations.CreateModel(
name='MyModel',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('content', tinymce.models.HTMLField()),
],
),
migrations.RemoveField(
model_name='feeds',
name='content',
),
migrations.RemoveField(
model_name='feeds',
name='description',
),
]
|
{"/awssam/iam/users/urls.py": ["/awssam/iam/users/views.py"], "/myapi/devfile/core/views.py": ["/myapi/devfile/core/forms.py"], "/Web-UI/scrapyproject/views.py": ["/Web-UI/scrapyproject/forms.py", "/Web-UI/scrapyproject/models.py"], "/awssam/ideablog/core/admin.py": ["/awssam/ideablog/core/models.py"], "/awssam/wikidj/wikidj/codehilite.py": ["/awssam/wikidj/wikidj/settings.py", "/awssam/wikidj/wikidj/dev.py"], "/awssam/ideablog/core/views.py": ["/awssam/ideablog/core/models.py"], "/myapi/fullfeblog/blog/search_indexes.py": ["/myapi/fullfeblog/blog/models.py"], "/myapi/fullfeblog/blog/templatetags/blog_tags.py": ["/myapi/fullfeblog/blog/models.py"], "/myapi/fullfeblog/blog/views.py": ["/myapi/fullfeblog/blog/models.py", "/myapi/fullfeblog/blog/forms.py"], "/awssam/wikidj/wikidj/dev.py": ["/awssam/wikidj/wikidj/settings.py"], "/tc_zufang/django_web/datashow/views.py": ["/tc_zufang/django_web/datashow/models.py"], "/awssam/ideablog/core/forms.py": ["/awssam/ideablog/core/models.py"], "/Web-UI/scrapyproject/admin.py": ["/Web-UI/scrapyproject/models.py"], "/myapi/fullfeblog/blog/urls.py": ["/myapi/fullfeblog/blog/views.py"]}
|
3,694
|
mrpal39/ev_code
|
refs/heads/master
|
/Web-UI/examples/link_generator.py
|
# This script is written under the username admin, with project name Retrofm
# Change the class name AdminRetrofmSpider accordingly
import datetime
_start_date = datetime.date(2012, 12, 25)
_initial_date = datetime.date(2012, 12, 25)
_priority = 0
start_urls = ['http://retrofm.ru']
def parse(self, response):
while AdminRetrofmSpider._start_date < self.datetime.date.today():
AdminRetrofmSpider._priority -= 1
AdminRetrofmSpider._start_date += self.datetime.timedelta(days=1)
theurlstart = 'http://retrofm.ru/index.php?go=Playlist&date=%s' % (
AdminRetrofmSpider._start_date.strftime("%d.%m.%Y"))
theurls = []
theurls.append(theurlstart + '&time_start=17%3A00&time_stop=23%3A59')
theurls.append(theurlstart + '&time_start=11%3A00&time_stop=17%3A01')
theurls.append(theurlstart + '&time_start=05%3A00&time_stop=11%3A01')
theurls.append(theurlstart + '&time_start=00%3A00&time_stop=05%3A01')
for theurl in theurls:
request = Request(theurl, method="GET",
dont_filter=True, priority=(AdminRetrofmSpider._priority), callback=self.parse)
self.insert_link(request)
|
{"/awssam/iam/users/urls.py": ["/awssam/iam/users/views.py"], "/myapi/devfile/core/views.py": ["/myapi/devfile/core/forms.py"], "/Web-UI/scrapyproject/views.py": ["/Web-UI/scrapyproject/forms.py", "/Web-UI/scrapyproject/models.py"], "/awssam/ideablog/core/admin.py": ["/awssam/ideablog/core/models.py"], "/awssam/wikidj/wikidj/codehilite.py": ["/awssam/wikidj/wikidj/settings.py", "/awssam/wikidj/wikidj/dev.py"], "/awssam/ideablog/core/views.py": ["/awssam/ideablog/core/models.py"], "/myapi/fullfeblog/blog/search_indexes.py": ["/myapi/fullfeblog/blog/models.py"], "/myapi/fullfeblog/blog/templatetags/blog_tags.py": ["/myapi/fullfeblog/blog/models.py"], "/myapi/fullfeblog/blog/views.py": ["/myapi/fullfeblog/blog/models.py", "/myapi/fullfeblog/blog/forms.py"], "/awssam/wikidj/wikidj/dev.py": ["/awssam/wikidj/wikidj/settings.py"], "/tc_zufang/django_web/datashow/views.py": ["/tc_zufang/django_web/datashow/models.py"], "/awssam/ideablog/core/forms.py": ["/awssam/ideablog/core/models.py"], "/Web-UI/scrapyproject/admin.py": ["/Web-UI/scrapyproject/models.py"], "/myapi/fullfeblog/blog/urls.py": ["/myapi/fullfeblog/blog/views.py"]}
|
3,695
|
mrpal39/ev_code
|
refs/heads/master
|
/myapi/devfile/core/forms.py
|
from django import forms
#Building a search view
class SearchForm(forms.Form):
query =forms.CharField()
class uploadForm(forms.ModelForm):
images=forms.ImageField()
# # from .forms import EmailPostForm, CommentForm , SearchForm
# User Repositories='https://libraries.io/api/github/:login/repositories?api_key=306cf1684a42e4be5ec0a1c60362c2ef'
# user=' https://libraries.io/api/github/andrew?api_key=306cf1684a42e4be5ec0a1c60362c2ef'
# Repository=' https://libraries.io/api/github/:owner/:name?api_key=306cf1684a42e4be5ec0a1c60362c2ef'
# =' https://libraries.io/api/github/gruntjs/grunt/projects?api_key=306cf1684a42e4be5ec0a1c60362c2ef '
# ProjectSearch=' https://libraries.io/api/search?q=grunt&api_key=306cf1684a42e4be5ec0a1c60362c2ef'
# Platforms= ' GET https://libraries.io/api/platforms?api_key=306cf1684a42e4be5ec0a1c60362c2ef '
# https://libraries.io/api/NPM/base62?api_key=306cf1684a42e4be5ec0a1c60362c2ef '
# ProjectDependen https://libraries.io/api/:platform/:name/:version/dependencies?api_key=306cf1684a42e4be5ec0a1c60362c2ef'
# ' https://libraries.io/api/NPM/base62/2.0.1/dependencies?api_key=306cf1684a42e4be5ec0a1c60362c2ef '
# DependentReposito= https://libraries.io/api/NPM/base62/dependent_repositories?api_key=306cf1684a42e4be5ec0a1c60362c2ef '
# ProjectContributo= https://libraries.io/api/NPM/base62/contributors?api_key=306cf1684a42e4be5ec0a1c60362c2ef '
# ProjectSourceRank='https://libraries.io/api/NPM/base62/sourcerank?api_key=306cf1684a42e4be5ec0a1c60362c2ef'
|
{"/awssam/iam/users/urls.py": ["/awssam/iam/users/views.py"], "/myapi/devfile/core/views.py": ["/myapi/devfile/core/forms.py"], "/Web-UI/scrapyproject/views.py": ["/Web-UI/scrapyproject/forms.py", "/Web-UI/scrapyproject/models.py"], "/awssam/ideablog/core/admin.py": ["/awssam/ideablog/core/models.py"], "/awssam/wikidj/wikidj/codehilite.py": ["/awssam/wikidj/wikidj/settings.py", "/awssam/wikidj/wikidj/dev.py"], "/awssam/ideablog/core/views.py": ["/awssam/ideablog/core/models.py"], "/myapi/fullfeblog/blog/search_indexes.py": ["/myapi/fullfeblog/blog/models.py"], "/myapi/fullfeblog/blog/templatetags/blog_tags.py": ["/myapi/fullfeblog/blog/models.py"], "/myapi/fullfeblog/blog/views.py": ["/myapi/fullfeblog/blog/models.py", "/myapi/fullfeblog/blog/forms.py"], "/awssam/wikidj/wikidj/dev.py": ["/awssam/wikidj/wikidj/settings.py"], "/tc_zufang/django_web/datashow/views.py": ["/tc_zufang/django_web/datashow/models.py"], "/awssam/ideablog/core/forms.py": ["/awssam/ideablog/core/models.py"], "/Web-UI/scrapyproject/admin.py": ["/Web-UI/scrapyproject/models.py"], "/myapi/fullfeblog/blog/urls.py": ["/myapi/fullfeblog/blog/views.py"]}
|
3,696
|
mrpal39/ev_code
|
refs/heads/master
|
/tc_zufang/tc_zufang-slave/tc_zufang/utils/result_parse.py
|
# -*- coding: utf-8 -*-
#如果没有下一页的地址则返回none
list_first_item = lambda x:x[0] if x else None
|
{"/awssam/iam/users/urls.py": ["/awssam/iam/users/views.py"], "/myapi/devfile/core/views.py": ["/myapi/devfile/core/forms.py"], "/Web-UI/scrapyproject/views.py": ["/Web-UI/scrapyproject/forms.py", "/Web-UI/scrapyproject/models.py"], "/awssam/ideablog/core/admin.py": ["/awssam/ideablog/core/models.py"], "/awssam/wikidj/wikidj/codehilite.py": ["/awssam/wikidj/wikidj/settings.py", "/awssam/wikidj/wikidj/dev.py"], "/awssam/ideablog/core/views.py": ["/awssam/ideablog/core/models.py"], "/myapi/fullfeblog/blog/search_indexes.py": ["/myapi/fullfeblog/blog/models.py"], "/myapi/fullfeblog/blog/templatetags/blog_tags.py": ["/myapi/fullfeblog/blog/models.py"], "/myapi/fullfeblog/blog/views.py": ["/myapi/fullfeblog/blog/models.py", "/myapi/fullfeblog/blog/forms.py"], "/awssam/wikidj/wikidj/dev.py": ["/awssam/wikidj/wikidj/settings.py"], "/tc_zufang/django_web/datashow/views.py": ["/tc_zufang/django_web/datashow/models.py"], "/awssam/ideablog/core/forms.py": ["/awssam/ideablog/core/models.py"], "/Web-UI/scrapyproject/admin.py": ["/Web-UI/scrapyproject/models.py"], "/myapi/fullfeblog/blog/urls.py": ["/myapi/fullfeblog/blog/views.py"]}
|
3,697
|
mrpal39/ev_code
|
refs/heads/master
|
/myapi/fullfeblog/blog/forms.py
|
from django import forms
from core.models import Comment
#Building a search view
class SearchForm(forms.Form):
query =forms.CharField()
class EmailPostForm(forms.Form):
name = forms.CharField(max_length=25)
email = forms.EmailField()
to = forms.EmailField()
comments = forms.CharField(required=False,
widget=forms.Textarea)
class CommentForm(forms.ModelForm):
url = forms.URLField(label='网址', required=False)
email = forms.EmailField(label='电子邮箱', required=True)
name = forms.CharField(
label='姓名',
widget=forms.TextInput(
attrs={
'value': "",
'size': "30",
'maxlength': "245",
'aria-required': 'true'}))
parent_comment_id = forms.IntegerField(
widget=forms.HiddenInput, required=False)
class Meta:
model = Comment
fields = ['body']
|
{"/awssam/iam/users/urls.py": ["/awssam/iam/users/views.py"], "/myapi/devfile/core/views.py": ["/myapi/devfile/core/forms.py"], "/Web-UI/scrapyproject/views.py": ["/Web-UI/scrapyproject/forms.py", "/Web-UI/scrapyproject/models.py"], "/awssam/ideablog/core/admin.py": ["/awssam/ideablog/core/models.py"], "/awssam/wikidj/wikidj/codehilite.py": ["/awssam/wikidj/wikidj/settings.py", "/awssam/wikidj/wikidj/dev.py"], "/awssam/ideablog/core/views.py": ["/awssam/ideablog/core/models.py"], "/myapi/fullfeblog/blog/search_indexes.py": ["/myapi/fullfeblog/blog/models.py"], "/myapi/fullfeblog/blog/templatetags/blog_tags.py": ["/myapi/fullfeblog/blog/models.py"], "/myapi/fullfeblog/blog/views.py": ["/myapi/fullfeblog/blog/models.py", "/myapi/fullfeblog/blog/forms.py"], "/awssam/wikidj/wikidj/dev.py": ["/awssam/wikidj/wikidj/settings.py"], "/tc_zufang/django_web/datashow/views.py": ["/tc_zufang/django_web/datashow/models.py"], "/awssam/ideablog/core/forms.py": ["/awssam/ideablog/core/models.py"], "/Web-UI/scrapyproject/admin.py": ["/Web-UI/scrapyproject/models.py"], "/myapi/fullfeblog/blog/urls.py": ["/myapi/fullfeblog/blog/views.py"]}
|
3,698
|
mrpal39/ev_code
|
refs/heads/master
|
/awssam/django-blog/src/blog/views.py
|
# -*- coding: utf-8 -*-
# Create your views here.
import json
from django.http import JsonResponse
from django_blog.util import PageInfo
from blog.models import Article, Comment
from django.views.decorators.csrf import csrf_exempt
from django.shortcuts import render, get_object_or_404
def get_page(request):
page_number = request.GET.get("page")
return 1 if not page_number or not page_number.isdigit() else int(page_number)
def index(request):
_blog_list = Article.objects.all().order_by('-date_time')[0:5]
_blog_hot = Article.objects.all().order_by('-view')[0:6]
return render(request, 'blog/index.html', {"blog_list": _blog_list, "blog_hot": _blog_hot})
def blog_list(request):
"""
列表
:param request:
:return:
"""
page_number = get_page(request)
blog_count = Article.objects.count()
page_info = PageInfo(page_number, blog_count)
_blog_list = Article.objects.all()[page_info.index_start: page_info.index_end]
return render(request, 'blog/list.html', {"blog_list": _blog_list, "page_info": page_info})
def category(request, name):
"""
分类
:param request:
:param name:
:return:
"""
page_number = get_page(request)
blog_count = Article.objects.filter(category__name=name).count()
page_info = PageInfo(page_number, blog_count)
_blog_list = Article.objects.filter(category__name=name)[page_info.index_start: page_info.index_end]
return render(request, 'blog/category.html', {"blog_list": _blog_list, "page_info": page_info,
"category": name})
def tag(request, name):
"""
标签
:param request:
:param name
:return:
"""
page_number = get_page(request)
blog_count = Article.objects.filter(tag__tag_name=name).count()
page_info = PageInfo(page_number, blog_count)
_blog_list = Article.objects.filter(tag__tag_name=name)[page_info.index_start: page_info.index_end]
return render(request, 'blog/tag.html', {"blog_list": _blog_list,
"tag": name,
"page_info": page_info})
def archive(request):
"""
文章归档
:param request:
:return:
"""
_blog_list = Article.objects.values("id", "title", "date_time").order_by('-date_time')
archive_dict = {}
for blog in _blog_list:
pub_month = blog.get("date_time").strftime("%Y年%m月")
if pub_month in archive_dict:
archive_dict[pub_month].append(blog)
else:
archive_dict[pub_month] = [blog]
data = sorted([{"date": _[0], "blogs": _[1]} for _ in archive_dict.items()], key=lambda item: item["date"],
reverse=True)
return render(request, 'blog/archive.html', {"data": data})
def message(request):
return render(request, 'blog/message_board.html', {"source_id": "message"})
@csrf_exempt
def get_comment(request):
"""
接收畅言的评论回推, post方式回推
:param request:
:return:
"""
arg = request.POST
data = arg.get('data')
data = json.loads(data)
title = data.get('title')
url = data.get('url')
source_id = data.get('sourceid')
if source_id not in ['message']:
article = Article.objects.get(pk=source_id)
article.commenced()
comments = data.get('comments')[0]
content = comments.get('content')
user = comments.get('user').get('nickname')
Comment(title=title, source_id=source_id, user_name=user, url=url, comment=content).save()
return JsonResponse({"status": "ok"})
def detail(request, pk):
"""
博文详情
:param request:
:param pk:
:return:
"""
blog = get_object_or_404(Article, pk=pk)
blog.viewed()
return render(request, 'blog/detail.html', {"blog": blog})
def search(request):
"""
搜索
:param request:
:return:
"""
key = request.GET['key']
page_number = get_page(request)
blog_count = Article.objects.filter(title__icontains=key).count()
page_info = PageInfo(page_number, blog_count)
_blog_list = Article.objects.filter(title__icontains=key)[page_info.index_start: page_info.index_end]
return render(request, 'blog/search.html', {"blog_list": _blog_list, "pages": page_info, "key": key})
def page_not_found_error(request, exception):
return render(request, "404.html", status=404)
def page_error(request):
return render(request, "404.html", status=500)
|
{"/awssam/iam/users/urls.py": ["/awssam/iam/users/views.py"], "/myapi/devfile/core/views.py": ["/myapi/devfile/core/forms.py"], "/Web-UI/scrapyproject/views.py": ["/Web-UI/scrapyproject/forms.py", "/Web-UI/scrapyproject/models.py"], "/awssam/ideablog/core/admin.py": ["/awssam/ideablog/core/models.py"], "/awssam/wikidj/wikidj/codehilite.py": ["/awssam/wikidj/wikidj/settings.py", "/awssam/wikidj/wikidj/dev.py"], "/awssam/ideablog/core/views.py": ["/awssam/ideablog/core/models.py"], "/myapi/fullfeblog/blog/search_indexes.py": ["/myapi/fullfeblog/blog/models.py"], "/myapi/fullfeblog/blog/templatetags/blog_tags.py": ["/myapi/fullfeblog/blog/models.py"], "/myapi/fullfeblog/blog/views.py": ["/myapi/fullfeblog/blog/models.py", "/myapi/fullfeblog/blog/forms.py"], "/awssam/wikidj/wikidj/dev.py": ["/awssam/wikidj/wikidj/settings.py"], "/tc_zufang/django_web/datashow/views.py": ["/tc_zufang/django_web/datashow/models.py"], "/awssam/ideablog/core/forms.py": ["/awssam/ideablog/core/models.py"], "/Web-UI/scrapyproject/admin.py": ["/Web-UI/scrapyproject/models.py"], "/myapi/fullfeblog/blog/urls.py": ["/myapi/fullfeblog/blog/views.py"]}
|
3,699
|
mrpal39/ev_code
|
refs/heads/master
|
/myapi/fullfeblog/blog/models.py
|
from django.db import models
from django.utils import timezone
from django.contrib.auth.models import User
from taggit.managers import TaggableManager
from django.urls import reverse
import logging
from abc import ABCMeta, abstractmethod, abstractproperty
from django.db import models
from django.urls import reverse
from django.conf import settings
from uuslug import slugify
from django.core.exceptions import ValidationError
from django.utils.translation import gettext_lazy as _
from webdev.utils import get_current_site
from webdev.utils import cache_decorator, cache
from django.utils.timezone import now
from mdeditor . fields import MDTextField
#
logger = logging.getLogger(__name__)
class LinkShowType(models.TextChoices):
I=('i','Homepage' )
L=('l','list page' )
P=('p','article page' )
A=('a','full station' )
S=('s','Friendly Link Page' )
class BaseModel(models.Model):
id = models.AutoField(primary_key=True)
created_time = models.DateTimeField( 'Creation Time' , default = now )
last_mod_time = models.DateTimeField( 'modification time' , default = now )
def save(self, *args, **kwargs):
is_update_views = isinstance(
self,
Article) and 'update_fields' in kwargs and kwargs['update_fields'] == ['views']
if is_update_views:
Article.objects.filter(pk=self.pk).update(views=self.views)
else:
if 'slug' in self.__dict__:
slug = getattr(
self, 'title') if 'title' in self.__dict__ else getattr(
self, 'name')
setattr(self, 'slug', slugify(slug))
super().save(*args, **kwargs)
def get_full_url(self):
site = get_current_site().domain
url = "https://{site}{path}".format(site=site,
path=self.get_absolute_url())
return url
class Meta:
abstract = True
@abstractmethod
def get_absolute_url(self):
pass
class Article(BaseModel):
"""文章"""
STATUS_CHOICES = (
( 'd' , 'draft' ),
( 'p' , 'publish' ),
)
COMMENT_STATUS = (
( 'o' , 'open' ),
( 'c' , 'close' ),
)
TYPE = (
( 'a' , 'article' ),
( 'p' , 'page' ),
)
title = models.CharField('title', max_length=200, unique=True)
body = MDTextField('body')
pub_time = models.DateTimeField(
'Release time', blank=False, null=False, default=now)
status = models.CharField(
'Article status',
max_length=1,
choices=STATUS_CHOICES,
default='p')
comment_status = models.CharField(
' Comment Status' ,
max_length=1,
choices=COMMENT_STATUS,
default='o')
type = models . CharField ( '类型' , max_length = 1 , choices = TYPE , default = 'a' )
views = models . PositiveIntegerField ( 'Views' , default = 0 )
author = models . ForeignKey (
settings . AUTH_USER_MODEL ,
verbose_name = 'Author' ,
blank = False ,
null = False ,
on_delete = models . CASCADE )
article_order = models . IntegerField (
'Sorting, the larger the number, the more advanced' , blank = False , null = False , default = 0 )
category = models . ForeignKey (
'Category' ,
verbose_name = 'Classification' ,
on_delete = models . CASCADE ,
blank = False ,
null = False )
tags = models . ManyToManyField ( 'Tag' , verbose_name = 'tag collection' , blank = True )
def body_to_string ( self ):
return self . body
def __str__ ( self ):
return self . title
class Meta :
ordering = [ '-article_order' , '-pub_time' ]
verbose_name = "article"
verbose_name_plural = verbose_name
get_latest_by = 'id'
def get_absolute_url ( self ):
return reverse ( 'blog:detailbyid' , kwargs = {
'article_id' : self . id ,
'year' : self . created_time . year ,
'month' : self . created_time . month ,
'day' : self . created_time . day
})
@cache_decorator(60 * 60 * 10)
def get_category_tree(self):
tree = self.category.get_category_tree()
names = list(map(lambda c: (c.name, c.get_absolute_url()), tree))
return names
def save(self, *args, **kwargs):
super().save(*args, **kwargs)
def viewed(self):
self.views += 1
self.save(update_fields=['views'])
def comment_list(self):
cache_key = 'article_comments_{id}'.format(id=self.id)
value = cache.get(cache_key)
if value:
logger.info('get article comments:{id}'.format(id=self.id))
return value
else:
comments = self.comment_set.filter(is_enable=True)
cache.set(cache_key, comments, 60 * 100)
logger.info('set article comments:{id}'.format(id=self.id))
return comments
def get_admin_url(self):
info = (self._meta.app_label, self._meta.model_name)
return reverse('admin:%s_%s_change' % info, args=(self.pk,))
@cache_decorator(expiration=60 * 100)
def next_article(self):
# 下一篇
return Article.objects.filter(
id__gt=self.id, status='p').order_by('id').first()
@cache_decorator(expiration=60 * 100)
def prev_article(self):
# 前一篇
return Article.objects.filter(id__lt=self.id, status='p').first()
class Category( BaseModel ):
"""Article Classification"""
name = models . CharField ( 'Category name' , max_length = 30 , unique = True )
parent_category = models . ForeignKey (
'self' ,
verbose_name = "Parent Category" ,
blank = True ,
null = True ,
on_delete = models . CASCADE )
slug = models . SlugField ( default = 'no-slug' , max_length = 60 , blank = True )
class Meta :
ordering = [ 'name' ]
verbose_name = "Category"
verbose_name_plural = verbose_name
def get_absolute_url ( self ):
return reverse (
'blog:category_detail' , kwargs = {
'category_name' : self . slug })
def __str__ ( self ):
return self . name
@cache_decorator(60 * 60 * 10)
def get_category_tree(self):
"""
递归获得分类目录的父级
:return:
"""
categorys = []
def parse(category):
categorys.append(category)
if category.parent_category:
parse(category.parent_category)
parse(self)
return categorys
@cache_decorator(60 * 60 * 10)
def get_sub_categorys(self):
"""
获得当前分类目录所有子集
:return:
"""
categorys = []
all_categorys = Category.objects.all()
def parse(category):
if category not in categorys:
categorys.append(category)
childs = all_categorys.filter(parent_category=category)
for child in childs:
if category not in categorys:
categorys.append(child)
parse(child)
parse(self)
return categorys
class Tag( BaseModel ):
"""Article Tags"""
name = models . CharField ( 'Labelname ' , max_length = 30 , unique = True )
slug = models . SlugField ( default = 'no-slug' , max_length = 60 , blank = True )
def __str__ ( self ):
return self . name
def get_absolute_url ( self ):
return reverse ( 'blog:tag_detail' , kwargs = { 'tag_name' : self . slug })
@ cache_decorator ( 60 * 60 * 10 )
def get_article_count ( self ):
return Article . objects . filter ( tags__name = self . name ). distinct (). count ()
class Meta :
ordering = [ 'name' ]
verbose_name = "label"
verbose_name_plural = verbose_name
class Links( models.Model ):
"""Links"""
name = models . CharField ( 'Link name' , max_length = 30 , unique = True )
link = models . URLField ( 'Link address' )
sequence = models . IntegerField ( '排序' , unique = True )
is_enable = models . BooleanField (
'Whether to display' , default = True , blank = False , null = False )
show_type = models . CharField (
'Display Type' ,
max_length = 1 ,
choices = LinkShowType . choices ,
default = LinkShowType . I )
created_time = models . DateTimeField ( 'Creation Time' , default = now )
last_mod_time = models . DateTimeField ( 'modification time' , default = now )
class Meta :
ordering = [ 'sequence' ]
verbose_name = 'Friendly link'
verbose_name_plural = verbose_name
def __str__ ( self ):
return self . name
class SideBar ( models . Model ):
"""The sidebar can display some html content"""
name = models . CharField ( 'title' , max_length = 100 )
content = models . TextField ( "content" )
sequence = models . IntegerField ( '排序' , unique = True )
is_enable = models . BooleanField ( 'Whether to enable' , default = True )
created_time = models . DateTimeField ( 'Creation Time' , default = now )
last_mod_time = models . DateTimeField ( 'modification time' , default = now )
class Meta :
ordering = [ 'sequence' ]
verbose_name = 'Sidebar'
verbose_name_plural = verbose_name
def __str__ ( self ):
return self . name
class BlogSettings ( models . Model ):
'''Site Settings'''
sitename = models . CharField (
"Site Name" ,
max_length = 200 ,
null = False ,
blank = False ,
default = '' )
site_description = models . TextField (
"Site Description" ,
max_length = 1000 ,
null = False ,
blank = False ,
default = '' )
site_seo_description = models . TextField (
"SEO description of the site" , max_length = 1000 , null = False , blank = False , default = '' )
site_keywords = models . TextField (
"Website Keywords" ,
max_length = 1000 ,
null = False ,
blank = False ,
default = '' )
article_sub_length = models . IntegerField ( "Article summary length" , default = 300 )
sidebar_article_count = models . IntegerField ( "The number of sidebar articles" , default = 10 )
sidebar_comment_count = models . IntegerField ( "The number of sidebar comments" , default = 5 )
show_google_adsense = models . BooleanField ( 'Whether to display Google ads' , default = False )
google_adsense_codes = models . TextField (
'Ad content' , max_length = 2000 , null = True , blank = True , default = '' )
open_site_comment = models . BooleanField ( 'Whether to open website comment function' , default = True )
beiancode = models . CharField (
'Record number' ,
max_length = 2000 ,
null = True ,
blank = True ,
default = '' )
analyticscode = models . TextField (
"Website Statistics Code" ,
max_length = 1000 ,
null = False ,
blank = False ,
default = '' )
show_gongan_code = models . BooleanField (
'Whether to display the public security record number' , default = False , null = False )
gongan_beiancode = models . TextField (
'Public Security Record Number' ,
max_length = 2000 ,
null = True ,
blank = True ,
default = '' )
resource_path = models . CharField (
"Static file storage address" ,
max_length = 300 ,
null = False ,
default = '/var/www/resource/' )
class Meta:
verbose_name = 'Websiteconfiguration'
verbose_name_plural = verbose_name
def __str__(self):
return self.sitename
def clean(self):
if BlogSettings.objects.exclude(id=self.id).count():
raise ValidationError(_('There can only be one configuration'))
def save(self, *args, **kwargs):
super().save(*args, **kwargs)
from webdev.utils import cache
cache.clear()
class PublishedManager(models.Manager):
def get_queryset(self):
return super(PublishedManager,
self).get_queryset()\
.filter(status='published')
class Post(models.Model):
tags = TaggableManager()
objects = models.Manager() # The default manager.
published = PublishedManager() # Our custom manager.
STATUS_CHOICES = (
('draft', 'Draft'),
('published', 'Published'),
)
title = models.CharField(max_length=250)
slug = models.SlugField(max_length=250,
unique_for_date='publish')
author = models.ForeignKey(User,
on_delete=models.CASCADE,
related_name='blog_posts')
body = models.TextField()
publish = models.DateTimeField(default=timezone.now)
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
status = models.CharField(max_length=10,
choices=STATUS_CHOICES,
default='draft')
class Meta:
ordering = ('-publish',)
def __str__(self):
return self.title
def get_absolute_url(self):
return reverse('post-detail', kwargs={'pk': self.pk})
|
{"/awssam/iam/users/urls.py": ["/awssam/iam/users/views.py"], "/myapi/devfile/core/views.py": ["/myapi/devfile/core/forms.py"], "/Web-UI/scrapyproject/views.py": ["/Web-UI/scrapyproject/forms.py", "/Web-UI/scrapyproject/models.py"], "/awssam/ideablog/core/admin.py": ["/awssam/ideablog/core/models.py"], "/awssam/wikidj/wikidj/codehilite.py": ["/awssam/wikidj/wikidj/settings.py", "/awssam/wikidj/wikidj/dev.py"], "/awssam/ideablog/core/views.py": ["/awssam/ideablog/core/models.py"], "/myapi/fullfeblog/blog/search_indexes.py": ["/myapi/fullfeblog/blog/models.py"], "/myapi/fullfeblog/blog/templatetags/blog_tags.py": ["/myapi/fullfeblog/blog/models.py"], "/myapi/fullfeblog/blog/views.py": ["/myapi/fullfeblog/blog/models.py", "/myapi/fullfeblog/blog/forms.py"], "/awssam/wikidj/wikidj/dev.py": ["/awssam/wikidj/wikidj/settings.py"], "/tc_zufang/django_web/datashow/views.py": ["/tc_zufang/django_web/datashow/models.py"], "/awssam/ideablog/core/forms.py": ["/awssam/ideablog/core/models.py"], "/Web-UI/scrapyproject/admin.py": ["/Web-UI/scrapyproject/models.py"], "/myapi/fullfeblog/blog/urls.py": ["/myapi/fullfeblog/blog/views.py"]}
|
3,700
|
mrpal39/ev_code
|
refs/heads/master
|
/awssam/wikidj/wikidj/settings.py
|
import os
from django.urls import reverse_lazy
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
SECRET_KEY = 'vsfygxju9)=k8qxmc9!__ng%dooyn-w7il_z+w)grvkz4ks!)u'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
"django.contrib.humanize.apps.HumanizeConfig",
"django.contrib.auth.apps.AuthConfig",
"django.contrib.contenttypes.apps.ContentTypesConfig",
"django.contrib.sessions.apps.SessionsConfig",
"django.contrib.sites.apps.SitesConfig",
"django.contrib.messages.apps.MessagesConfig",
"django.contrib.staticfiles.apps.StaticFilesConfig",
"django.contrib.admin.apps.AdminConfig",
"django.contrib.admindocs.apps.AdminDocsConfig",
"sekizai",
"sorl.thumbnail",
"django_nyt.apps.DjangoNytConfig",
"wiki.apps.WikiConfig",
"wiki.plugins.macros.apps.MacrosConfig",
"wiki.plugins.help.apps.HelpConfig",
"wiki.plugins.links.apps.LinksConfig",
"wiki.plugins.images.apps.ImagesConfig",
"wiki.plugins.attachments.apps.AttachmentsConfig",
"wiki.plugins.notifications.apps.NotificationsConfig",
"wiki.plugins.editsection.apps.EditSectionConfig",
"wiki.plugins.globalhistory.apps.GlobalHistoryConfig",
"mptt",
]
MIDDLEWARE = [
"django.contrib.sessions.middleware.SessionMiddleware",
"django.middleware.common.CommonMiddleware",
"django.middleware.csrf.CsrfViewMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
"django.middleware.clickjacking.XFrameOptionsMiddleware",
"django.middleware.security.SecurityMiddleware",
]
SITE_ID=1
ROOT_URLCONF = 'wikidj.urls'
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [
os.path.join(BASE_DIR, "templates"),
],
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.contrib.auth.context_processors.auth",
"django.template.context_processors.debug",
"django.template.context_processors.i18n",
"django.template.context_processors.request",
"django.template.context_processors.tz",
"django.contrib.messages.context_processors.messages",
"sekizai.context_processors.sekizai",
],
"debug": DEBUG,
},
},
]
WSGI_APPLICATION = 'wikidj.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = "/static/"
STATIC_ROOT = os.path.join(BASE_DIR, "static")
MEDIA_ROOT = os.path.join(BASE_DIR, "media")
MEDIA_URL = "/media/"
WIKI_ANONYMOUS_WRITE = True
WIKI_ANONYMOUS_CREATE = False
LOGIN_REDIRECT_URL = reverse_lazy('wiki:get', kwargs={'path': ''})
# urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
{"/awssam/iam/users/urls.py": ["/awssam/iam/users/views.py"], "/myapi/devfile/core/views.py": ["/myapi/devfile/core/forms.py"], "/Web-UI/scrapyproject/views.py": ["/Web-UI/scrapyproject/forms.py", "/Web-UI/scrapyproject/models.py"], "/awssam/ideablog/core/admin.py": ["/awssam/ideablog/core/models.py"], "/awssam/wikidj/wikidj/codehilite.py": ["/awssam/wikidj/wikidj/settings.py", "/awssam/wikidj/wikidj/dev.py"], "/awssam/ideablog/core/views.py": ["/awssam/ideablog/core/models.py"], "/myapi/fullfeblog/blog/search_indexes.py": ["/myapi/fullfeblog/blog/models.py"], "/myapi/fullfeblog/blog/templatetags/blog_tags.py": ["/myapi/fullfeblog/blog/models.py"], "/myapi/fullfeblog/blog/views.py": ["/myapi/fullfeblog/blog/models.py", "/myapi/fullfeblog/blog/forms.py"], "/awssam/wikidj/wikidj/dev.py": ["/awssam/wikidj/wikidj/settings.py"], "/tc_zufang/django_web/datashow/views.py": ["/tc_zufang/django_web/datashow/models.py"], "/awssam/ideablog/core/forms.py": ["/awssam/ideablog/core/models.py"], "/Web-UI/scrapyproject/admin.py": ["/Web-UI/scrapyproject/models.py"], "/myapi/fullfeblog/blog/urls.py": ["/myapi/fullfeblog/blog/views.py"]}
|
3,701
|
mrpal39/ev_code
|
refs/heads/master
|
/awssam/fullfeblog/core/models.py
|
from django.db import models
from blog.models import Post
# Creating a comment systems
class Comment(models.Model):
post = models.ForeignKey(Post,
on_delete=models.CASCADE,
related_name='comments')
name=models.CharField(max_length=200)
email=models.EmailField()
body=models.TextField()
created=models.DateTimeField(auto_now_add=True)
updated=models.DateTimeField(auto_now_add=True)
active=models.BooleanField(default=True)
class Meta:
ordering=('created',)
def __str__(self):
return f'comment by {self.name}on{self.post}'
|
{"/awssam/iam/users/urls.py": ["/awssam/iam/users/views.py"], "/myapi/devfile/core/views.py": ["/myapi/devfile/core/forms.py"], "/Web-UI/scrapyproject/views.py": ["/Web-UI/scrapyproject/forms.py", "/Web-UI/scrapyproject/models.py"], "/awssam/ideablog/core/admin.py": ["/awssam/ideablog/core/models.py"], "/awssam/wikidj/wikidj/codehilite.py": ["/awssam/wikidj/wikidj/settings.py", "/awssam/wikidj/wikidj/dev.py"], "/awssam/ideablog/core/views.py": ["/awssam/ideablog/core/models.py"], "/myapi/fullfeblog/blog/search_indexes.py": ["/myapi/fullfeblog/blog/models.py"], "/myapi/fullfeblog/blog/templatetags/blog_tags.py": ["/myapi/fullfeblog/blog/models.py"], "/myapi/fullfeblog/blog/views.py": ["/myapi/fullfeblog/blog/models.py", "/myapi/fullfeblog/blog/forms.py"], "/awssam/wikidj/wikidj/dev.py": ["/awssam/wikidj/wikidj/settings.py"], "/tc_zufang/django_web/datashow/views.py": ["/tc_zufang/django_web/datashow/models.py"], "/awssam/ideablog/core/forms.py": ["/awssam/ideablog/core/models.py"], "/Web-UI/scrapyproject/admin.py": ["/Web-UI/scrapyproject/models.py"], "/myapi/fullfeblog/blog/urls.py": ["/myapi/fullfeblog/blog/views.py"]}
|
3,702
|
mrpal39/ev_code
|
refs/heads/master
|
/tc_zufang/django_web/datashow/models.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from mongoengine import *
from django.db import models
# Create your models here.
class ItemInfo(Document):
# 帖子名称
title = StringField()
# 租金
money = StringField()
# 租赁方式
method = StringField()
# 所在区域
area = StringField()
# 所在小区
community = StringField()
# 帖子详情url
targeturl = StringField()
# 帖子发布时间
pub_time = StringField()
# 所在城市
city = StringField()
phone = StringField()
img1= StringField()
img2 = StringField()
#指定是数据表格
meta={'collection':'zufang_detail'}
|
{"/awssam/iam/users/urls.py": ["/awssam/iam/users/views.py"], "/myapi/devfile/core/views.py": ["/myapi/devfile/core/forms.py"], "/Web-UI/scrapyproject/views.py": ["/Web-UI/scrapyproject/forms.py", "/Web-UI/scrapyproject/models.py"], "/awssam/ideablog/core/admin.py": ["/awssam/ideablog/core/models.py"], "/awssam/wikidj/wikidj/codehilite.py": ["/awssam/wikidj/wikidj/settings.py", "/awssam/wikidj/wikidj/dev.py"], "/awssam/ideablog/core/views.py": ["/awssam/ideablog/core/models.py"], "/myapi/fullfeblog/blog/search_indexes.py": ["/myapi/fullfeblog/blog/models.py"], "/myapi/fullfeblog/blog/templatetags/blog_tags.py": ["/myapi/fullfeblog/blog/models.py"], "/myapi/fullfeblog/blog/views.py": ["/myapi/fullfeblog/blog/models.py", "/myapi/fullfeblog/blog/forms.py"], "/awssam/wikidj/wikidj/dev.py": ["/awssam/wikidj/wikidj/settings.py"], "/tc_zufang/django_web/datashow/views.py": ["/tc_zufang/django_web/datashow/models.py"], "/awssam/ideablog/core/forms.py": ["/awssam/ideablog/core/models.py"], "/Web-UI/scrapyproject/admin.py": ["/Web-UI/scrapyproject/models.py"], "/myapi/fullfeblog/blog/urls.py": ["/myapi/fullfeblog/blog/views.py"]}
|
3,703
|
mrpal39/ev_code
|
refs/heads/master
|
/myapi/devfile/gitapi/urls.py
|
from django.urls import path
from . import views
urlpatterns = [
path('', views.api, name='api'),
path('t/', views.simple_upload, name='test'),
]
|
{"/awssam/iam/users/urls.py": ["/awssam/iam/users/views.py"], "/myapi/devfile/core/views.py": ["/myapi/devfile/core/forms.py"], "/Web-UI/scrapyproject/views.py": ["/Web-UI/scrapyproject/forms.py", "/Web-UI/scrapyproject/models.py"], "/awssam/ideablog/core/admin.py": ["/awssam/ideablog/core/models.py"], "/awssam/wikidj/wikidj/codehilite.py": ["/awssam/wikidj/wikidj/settings.py", "/awssam/wikidj/wikidj/dev.py"], "/awssam/ideablog/core/views.py": ["/awssam/ideablog/core/models.py"], "/myapi/fullfeblog/blog/search_indexes.py": ["/myapi/fullfeblog/blog/models.py"], "/myapi/fullfeblog/blog/templatetags/blog_tags.py": ["/myapi/fullfeblog/blog/models.py"], "/myapi/fullfeblog/blog/views.py": ["/myapi/fullfeblog/blog/models.py", "/myapi/fullfeblog/blog/forms.py"], "/awssam/wikidj/wikidj/dev.py": ["/awssam/wikidj/wikidj/settings.py"], "/tc_zufang/django_web/datashow/views.py": ["/tc_zufang/django_web/datashow/models.py"], "/awssam/ideablog/core/forms.py": ["/awssam/ideablog/core/models.py"], "/Web-UI/scrapyproject/admin.py": ["/Web-UI/scrapyproject/models.py"], "/myapi/fullfeblog/blog/urls.py": ["/myapi/fullfeblog/blog/views.py"]}
|
3,704
|
mrpal39/ev_code
|
refs/heads/master
|
/tc_zufang/tc_zufang-slave/tc_zufang/items.py
|
# -*- coding: utf-8 -*-
#定义需要抓取存进数据库的字段
from scrapy.item import Item,Field
class TcZufangItem(Item):
#帖子名称
title=Field()
#租金
money=Field()
#租赁方式
method=Field()
#所在区域
area=Field()
#所在小区
community=Field()
#帖子详情url
targeturl=Field()
#帖子发布时间
pub_time=Field()
#所在城市
city=Field()
# 联系电话
phone= Field()
# 图片1
img1 = Field()
# 图片2
img2 = Field()
|
{"/awssam/iam/users/urls.py": ["/awssam/iam/users/views.py"], "/myapi/devfile/core/views.py": ["/myapi/devfile/core/forms.py"], "/Web-UI/scrapyproject/views.py": ["/Web-UI/scrapyproject/forms.py", "/Web-UI/scrapyproject/models.py"], "/awssam/ideablog/core/admin.py": ["/awssam/ideablog/core/models.py"], "/awssam/wikidj/wikidj/codehilite.py": ["/awssam/wikidj/wikidj/settings.py", "/awssam/wikidj/wikidj/dev.py"], "/awssam/ideablog/core/views.py": ["/awssam/ideablog/core/models.py"], "/myapi/fullfeblog/blog/search_indexes.py": ["/myapi/fullfeblog/blog/models.py"], "/myapi/fullfeblog/blog/templatetags/blog_tags.py": ["/myapi/fullfeblog/blog/models.py"], "/myapi/fullfeblog/blog/views.py": ["/myapi/fullfeblog/blog/models.py", "/myapi/fullfeblog/blog/forms.py"], "/awssam/wikidj/wikidj/dev.py": ["/awssam/wikidj/wikidj/settings.py"], "/tc_zufang/django_web/datashow/views.py": ["/tc_zufang/django_web/datashow/models.py"], "/awssam/ideablog/core/forms.py": ["/awssam/ideablog/core/models.py"], "/Web-UI/scrapyproject/admin.py": ["/Web-UI/scrapyproject/models.py"], "/myapi/fullfeblog/blog/urls.py": ["/myapi/fullfeblog/blog/views.py"]}
|
3,705
|
mrpal39/ev_code
|
refs/heads/master
|
/scrap/properties/properties/spiders/basictest.py
|
from scrapy.loader.processors import MapCompose, Join
from scrapy.loader import ItemLoader
from properties.items import PropertiesItem
import datetime
from urllib.parse import urlparse
import socket
import scrapy
class BasicSpider(scrapy.Spider):
name = "basictest"
allowed_domains = ["web"]
start_urls=(
'https://developers.facebook.com/blog/post/2021/01/26/introducing-instagram-content-publishing-api/?utm_source=email&utm_medium=fb4d-newsletter-february21&utm_campaign=organic&utm_offering=business-tools&utm_product=instagram&utm_content=body-button-instagram-graph-API&utm_location=2',
)
def parse (self,response):
""" @url https://developers.facebook.com/blog/post/2021/01/26/introducing-instagram-content-publishing-api/?utm_source=email&utm_medium=fb4d-newsletter-february21&utm_campaign=organic&utm_offering=business-tools&utm_product=instagram&utm_content=body-button-instagram-graph-API&utm_location=2
@return item 1
@scrapes title price
@scrapes url project"""
l = ItemLoader(item=PropertiesItem(), response=response)
# Load fields using XPath expressions
l.add_xpath('title', '/html/body/div[1]/div[5]/div[2]/div/div/div/div[2]/div[2]/div[2]/div[1]/div/div/div[2]/div/div/p[1]/text()',
MapCompose(unicode.strip, unicode.title))
# l.add_xpath('price', './/*[@itemprop="price"][1]/text()',
# MapCompose(lambda i: i.replace(',', ''),
# float),
# re='[,.0-9]+')
# l.add_xpath('description', '//*[@itemprop="description"]'
# '[1]/text()',
# MapCompose(unicode.strip), Join())
# Housekeeping fields
l.add_value('url', response.url)
l.add_value('project', self.settings.get('BOT_NAME'))
l.add_value('spider', self.name)
l.add_value('server', socket.gethostname())
l.add_value('date', datetime.datetime.now())
return l.load_item()
|
{"/awssam/iam/users/urls.py": ["/awssam/iam/users/views.py"], "/myapi/devfile/core/views.py": ["/myapi/devfile/core/forms.py"], "/Web-UI/scrapyproject/views.py": ["/Web-UI/scrapyproject/forms.py", "/Web-UI/scrapyproject/models.py"], "/awssam/ideablog/core/admin.py": ["/awssam/ideablog/core/models.py"], "/awssam/wikidj/wikidj/codehilite.py": ["/awssam/wikidj/wikidj/settings.py", "/awssam/wikidj/wikidj/dev.py"], "/awssam/ideablog/core/views.py": ["/awssam/ideablog/core/models.py"], "/myapi/fullfeblog/blog/search_indexes.py": ["/myapi/fullfeblog/blog/models.py"], "/myapi/fullfeblog/blog/templatetags/blog_tags.py": ["/myapi/fullfeblog/blog/models.py"], "/myapi/fullfeblog/blog/views.py": ["/myapi/fullfeblog/blog/models.py", "/myapi/fullfeblog/blog/forms.py"], "/awssam/wikidj/wikidj/dev.py": ["/awssam/wikidj/wikidj/settings.py"], "/tc_zufang/django_web/datashow/views.py": ["/tc_zufang/django_web/datashow/models.py"], "/awssam/ideablog/core/forms.py": ["/awssam/ideablog/core/models.py"], "/Web-UI/scrapyproject/admin.py": ["/Web-UI/scrapyproject/models.py"], "/myapi/fullfeblog/blog/urls.py": ["/myapi/fullfeblog/blog/views.py"]}
|
3,706
|
mrpal39/ev_code
|
refs/heads/master
|
/eswork/articles/articles/pipelines.py
|
# -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html
import logging
# import MySQLdb
# import MySQLdb.cursors
import copy
import pymysql
from twisted.enterprise import adbapi
# class ArticlesPipeline(object):
# def process_item(self, item, spider):
# return item
class MysqlTwistedPipeline(object):
def __init__(self, dbpool):
self.dbpool = dbpool
@classmethod
def from_settings(cls, settings): # 函数名固定,会被scrapy调用,直接可用settings的值
"""
数据库建立连接
:param settings: 配置参数
:return: 实例化参数
"""
adbparams = dict(
host=settings['MYSQL_HOST'],
db=settings['MYSQL_DBNAME'],
user=settings['MYSQL_USER'],
password=settings['MYSQL_PASSWORD'],
cursorclass=pymysql.cursors.DictCursor # 指定cursor类型
)
# 连接数据池ConnectionPool,使用pymysql或者Mysqldb连接
dbpool = adbapi.ConnectionPool('pymysql', **adbparams)
# 返回实例化参数
return cls(dbpool)
def process_item(self, item, spider):
"""
使用twisted将MySQL插入变成异步执行。通过连接池执行具体的sql操作,返回一个对象
"""
# 防止入库速度过慢导致数据重复
item = copy.deepcopy(item)
query = self.dbpool.runInteraction(self.do_insert, item) # 指定操作方法和操作数据
# 添加异常处理
query.addCallback(self.handle_error) # 处理异常
def do_insert(self, cursor, item):
# 对数据库进行插入操作,并不需要commit,twisted会自动commit
insert_sql = """
insert into pm_article(title, create_date, url, content, view, tag, url_id) VALUES (%s, %s, %s, %s, %s, %s, %s)
"""
cursor.execute(insert_sql, (item['title'], item['create_date'], item['url'],
item['content'], item['view'], item['tag'], item['url_id']))
def handle_error(self, failure):
if failure:
# 打印错误信息
print(failure)
class ElasticsearchPipeline(object):
# 将数据写入到es中
def process_item(self, item, spider):
# 将item转换为es的数据
item.save_to_es()
return item
|
{"/awssam/iam/users/urls.py": ["/awssam/iam/users/views.py"], "/myapi/devfile/core/views.py": ["/myapi/devfile/core/forms.py"], "/Web-UI/scrapyproject/views.py": ["/Web-UI/scrapyproject/forms.py", "/Web-UI/scrapyproject/models.py"], "/awssam/ideablog/core/admin.py": ["/awssam/ideablog/core/models.py"], "/awssam/wikidj/wikidj/codehilite.py": ["/awssam/wikidj/wikidj/settings.py", "/awssam/wikidj/wikidj/dev.py"], "/awssam/ideablog/core/views.py": ["/awssam/ideablog/core/models.py"], "/myapi/fullfeblog/blog/search_indexes.py": ["/myapi/fullfeblog/blog/models.py"], "/myapi/fullfeblog/blog/templatetags/blog_tags.py": ["/myapi/fullfeblog/blog/models.py"], "/myapi/fullfeblog/blog/views.py": ["/myapi/fullfeblog/blog/models.py", "/myapi/fullfeblog/blog/forms.py"], "/awssam/wikidj/wikidj/dev.py": ["/awssam/wikidj/wikidj/settings.py"], "/tc_zufang/django_web/datashow/views.py": ["/tc_zufang/django_web/datashow/models.py"], "/awssam/ideablog/core/forms.py": ["/awssam/ideablog/core/models.py"], "/Web-UI/scrapyproject/admin.py": ["/Web-UI/scrapyproject/models.py"], "/myapi/fullfeblog/blog/urls.py": ["/myapi/fullfeblog/blog/views.py"]}
|
3,707
|
mrpal39/ev_code
|
refs/heads/master
|
/myapi/devfile/request/api.py
|
# import requests
# url = "https://proxy-orbit1.p.rapidapi.com/v1/"
# headers = {
# 'x-rapidapi-key': "b188eee73cmsha4c027c9ee4e2b7p1755ebjsn1e0e0b615bcf",
# 'x-rapidapi-host': "proxy-orbit1.p.rapidapi.com"
# }
# # response = requests.request("GET", url, headers=headers)
# print(response.text)
import requests
url= "https://libraries.io/api/"
headers={'?api_key':'306cf1684a42e4be5ec0a1c60362c2ef',
# 'platform':'NPM/base62/dependent_repositories'
}
response = requests.request("GET", url, headers=headers)
print(response.text)
Example: https://libraries.io/api/NPM/base62/dependent_repositories?api_key=306cf1684a42e4be5ec0a1c60362c2ef
import requests
url = "https://scrapingant.p.rapidapi.com/post"
payload = "{\"cookies\": \"cookie_name_1=cookie_value_1;cookie_name_2=cookie_value_2\"\"return_text\": false,\"url\": \"https://example.com\"}"
headers = {
'content-type': "application/json",
'x-rapidapi-key': "b188eee73cmsha4c027c9ee4e2b7p1755ebjsn1e0e0b615bcf",
'x-rapidapi-host': "scrapingant.p.rapidapi.com"
}
response = requests.request("POST", url, data=payload, headers=headers)
print(response.text)
|
{"/awssam/iam/users/urls.py": ["/awssam/iam/users/views.py"], "/myapi/devfile/core/views.py": ["/myapi/devfile/core/forms.py"], "/Web-UI/scrapyproject/views.py": ["/Web-UI/scrapyproject/forms.py", "/Web-UI/scrapyproject/models.py"], "/awssam/ideablog/core/admin.py": ["/awssam/ideablog/core/models.py"], "/awssam/wikidj/wikidj/codehilite.py": ["/awssam/wikidj/wikidj/settings.py", "/awssam/wikidj/wikidj/dev.py"], "/awssam/ideablog/core/views.py": ["/awssam/ideablog/core/models.py"], "/myapi/fullfeblog/blog/search_indexes.py": ["/myapi/fullfeblog/blog/models.py"], "/myapi/fullfeblog/blog/templatetags/blog_tags.py": ["/myapi/fullfeblog/blog/models.py"], "/myapi/fullfeblog/blog/views.py": ["/myapi/fullfeblog/blog/models.py", "/myapi/fullfeblog/blog/forms.py"], "/awssam/wikidj/wikidj/dev.py": ["/awssam/wikidj/wikidj/settings.py"], "/tc_zufang/django_web/datashow/views.py": ["/tc_zufang/django_web/datashow/models.py"], "/awssam/ideablog/core/forms.py": ["/awssam/ideablog/core/models.py"], "/Web-UI/scrapyproject/admin.py": ["/Web-UI/scrapyproject/models.py"], "/myapi/fullfeblog/blog/urls.py": ["/myapi/fullfeblog/blog/views.py"]}
|
3,708
|
mrpal39/ev_code
|
refs/heads/master
|
/awssam/devfile/core/urls.py
|
from django.urls import path
from . import views
from django.conf.urls import include, url
from django.views import generic
from material.frontend import urls as frontend_urls
urlpatterns = [
path('', views.home, name='home'),
path('$/', generic.RedirectView.as_view(url='/workflow/', permanent=False)),
path('/', include(frontend_urls)),
]
# Viewflow PRO Feature Set
# Celery integration
# django-guardian integration
# Flow graph visualization
# Flow BPMN export
# Material Frontend
# Process dashboard view
# Flow migration support
# Subprocess support
# REST API support
|
{"/awssam/iam/users/urls.py": ["/awssam/iam/users/views.py"], "/myapi/devfile/core/views.py": ["/myapi/devfile/core/forms.py"], "/Web-UI/scrapyproject/views.py": ["/Web-UI/scrapyproject/forms.py", "/Web-UI/scrapyproject/models.py"], "/awssam/ideablog/core/admin.py": ["/awssam/ideablog/core/models.py"], "/awssam/wikidj/wikidj/codehilite.py": ["/awssam/wikidj/wikidj/settings.py", "/awssam/wikidj/wikidj/dev.py"], "/awssam/ideablog/core/views.py": ["/awssam/ideablog/core/models.py"], "/myapi/fullfeblog/blog/search_indexes.py": ["/myapi/fullfeblog/blog/models.py"], "/myapi/fullfeblog/blog/templatetags/blog_tags.py": ["/myapi/fullfeblog/blog/models.py"], "/myapi/fullfeblog/blog/views.py": ["/myapi/fullfeblog/blog/models.py", "/myapi/fullfeblog/blog/forms.py"], "/awssam/wikidj/wikidj/dev.py": ["/awssam/wikidj/wikidj/settings.py"], "/tc_zufang/django_web/datashow/views.py": ["/tc_zufang/django_web/datashow/models.py"], "/awssam/ideablog/core/forms.py": ["/awssam/ideablog/core/models.py"], "/Web-UI/scrapyproject/admin.py": ["/Web-UI/scrapyproject/models.py"], "/myapi/fullfeblog/blog/urls.py": ["/myapi/fullfeblog/blog/views.py"]}
|
3,709
|
mrpal39/ev_code
|
refs/heads/master
|
/Web-UI/examples/scraper.py
|
# You need to create an Item name 'played' for running this script
# item['ack_signal'] = int(response.meta['ack_signal']) - this line is used for sending ack signal to RabbitMQ
def parse(self, response):
item = played()
songs = response.xpath('//li[@class="player-in-playlist-holder"]')
indexr = response.url.find('date=')
indexr = indexr + 5
date = response.url[indexr:indexr + 10]
for song in songs:
item['timeplayed'] = song.xpath('.//span[@class="time"]/text()').extract()[0]
item['artist'] = song.xpath('.//div[@class="jp-title"]/strong//span//text()').extract()[0]
item['song'] = song.xpath('.//div[@class="jp-title"]/strong//em//text()').extract()[0]
item['dateplayed'] = date
item['ack_signal'] = int(response.meta['ack_signal'])
yield item
|
{"/awssam/iam/users/urls.py": ["/awssam/iam/users/views.py"], "/myapi/devfile/core/views.py": ["/myapi/devfile/core/forms.py"], "/Web-UI/scrapyproject/views.py": ["/Web-UI/scrapyproject/forms.py", "/Web-UI/scrapyproject/models.py"], "/awssam/ideablog/core/admin.py": ["/awssam/ideablog/core/models.py"], "/awssam/wikidj/wikidj/codehilite.py": ["/awssam/wikidj/wikidj/settings.py", "/awssam/wikidj/wikidj/dev.py"], "/awssam/ideablog/core/views.py": ["/awssam/ideablog/core/models.py"], "/myapi/fullfeblog/blog/search_indexes.py": ["/myapi/fullfeblog/blog/models.py"], "/myapi/fullfeblog/blog/templatetags/blog_tags.py": ["/myapi/fullfeblog/blog/models.py"], "/myapi/fullfeblog/blog/views.py": ["/myapi/fullfeblog/blog/models.py", "/myapi/fullfeblog/blog/forms.py"], "/awssam/wikidj/wikidj/dev.py": ["/awssam/wikidj/wikidj/settings.py"], "/tc_zufang/django_web/datashow/views.py": ["/tc_zufang/django_web/datashow/models.py"], "/awssam/ideablog/core/forms.py": ["/awssam/ideablog/core/models.py"], "/Web-UI/scrapyproject/admin.py": ["/Web-UI/scrapyproject/models.py"], "/myapi/fullfeblog/blog/urls.py": ["/myapi/fullfeblog/blog/views.py"]}
|
3,710
|
mrpal39/ev_code
|
refs/heads/master
|
/awssam/myscrapyproject/scrapyapi/srp/models.py
|
# from __future__ import unicode_literals
# from django.utils.encoding import python_2_unicode_compatible
# from django.db import models
# from django.db.models.signals import pre_delete
# from django.dispatch import receiver
# from scrapy_djangoitem import DjangoItem
# from dynamic_scraper.models import Scraper, SchedulerRuntime
# @python_2_unicode_compatible
# class NewsWebsite(models.Model):
# name = models.CharField(max_length=200)
# url = models.URLField()
# scraper = models.ForeignKey(Scraper, blank=True, null=True, on_delete=models.SET_NULL)
# scraper_runtime = models.ForeignKey(SchedulerRuntime, blank=True, null=True, on_delete=models.SET_NULL)
# def __str__(self):
# return self.name
# @python_2_unicode_compatible
# class Article(models.Model):
# title = models.CharField(max_length=200)
# news_website = models.ForeignKey(NewsWebsite)
# description = models.TextField(blank=True)
# url = models.URLField(blank=True)
# thumbnail = models.CharField(max_length=200, blank=True)
# checker_runtime = models.ForeignKey(SchedulerRuntime, blank=True, null=True, on_delete=models.SET_NULL)
# def __str__(self):
# return self.title
# class ArticleItem(DjangoItem):
# django_model = Article
# @receiver(pre_delete)
# def pre_delete_handler(sender, instance, using, **kwargs):
# if isinstance(instance, NewsWebsite):
# if instance.scraper_runtime:
# instance.scraper_runtime.delete()
# if isinstance(instance, Article):
# if instance.checker_runtime:
# instance.checker_runtime.delete()
# pre_delete.connect(pre_delete_handler)
|
{"/awssam/iam/users/urls.py": ["/awssam/iam/users/views.py"], "/myapi/devfile/core/views.py": ["/myapi/devfile/core/forms.py"], "/Web-UI/scrapyproject/views.py": ["/Web-UI/scrapyproject/forms.py", "/Web-UI/scrapyproject/models.py"], "/awssam/ideablog/core/admin.py": ["/awssam/ideablog/core/models.py"], "/awssam/wikidj/wikidj/codehilite.py": ["/awssam/wikidj/wikidj/settings.py", "/awssam/wikidj/wikidj/dev.py"], "/awssam/ideablog/core/views.py": ["/awssam/ideablog/core/models.py"], "/myapi/fullfeblog/blog/search_indexes.py": ["/myapi/fullfeblog/blog/models.py"], "/myapi/fullfeblog/blog/templatetags/blog_tags.py": ["/myapi/fullfeblog/blog/models.py"], "/myapi/fullfeblog/blog/views.py": ["/myapi/fullfeblog/blog/models.py", "/myapi/fullfeblog/blog/forms.py"], "/awssam/wikidj/wikidj/dev.py": ["/awssam/wikidj/wikidj/settings.py"], "/tc_zufang/django_web/datashow/views.py": ["/tc_zufang/django_web/datashow/models.py"], "/awssam/ideablog/core/forms.py": ["/awssam/ideablog/core/models.py"], "/Web-UI/scrapyproject/admin.py": ["/Web-UI/scrapyproject/models.py"], "/myapi/fullfeblog/blog/urls.py": ["/myapi/fullfeblog/blog/views.py"]}
|
3,711
|
mrpal39/ev_code
|
refs/heads/master
|
/myapi/devfile/core/views.py
|
from django.shortcuts import render
from .forms import SearchForm
import requests
def base(request):
# import requests
# # url = "https://gplaystore.p.rapidapi.com/newFreeApps"
# url="https://libraries.io/api/"
# querystring = {"platforms":"NPM/base62"}
# headers = {'x-rapidapi-key': "?api_key=306cf1684a42e4be5ec0a1c60362c2ef'" }
# response = requests.request("GET", url, headers=headers, params=querystring)
# print(response.text)
return render(request, 'base.html'
)
def home(request):
# Platforms=(' https://libraries.io/api/platforms?api_key=306cf1684a42e4be5ec0a1c60362c2ef')
# Project=('https://libraries.io/api/NPM/base62?api_key=306cf1684a42e4be5ec0a1c60362c2ef')
# url=requests()
# url='https://libraries.io/api/:platform/:name/dependent_repositories?api_key=306cf1684a42e4be5ec0a1c60362c2ef'
# url=requests.get('https://libraries.io/api/github/librariesio/repositories?api_key=306cf1684a42e4be5ec0a1c60362c2ef')
url=requests.get('https://libraries.io/api/platforms?api_key=306cf1684a42e4be5ec0a1c60362c2ef')
form=url.json()
return render(request, 'index.html',{
'form':form
}
)
def Search(request):
# form= SearchForm()
# query=None
# results=[]
# # if 'query' in requests.GET:
# # form=SearchForm(request.GET)
# # if form.is_valid():
# # query=form.cleaned_data['query']
# # results=Post.published.annotate(
# # search =SearchVector('title','body'),
# # ).filter(search=query)
r=requests.get('https://libraries.io/api/search?q=&api_key=306cf1684a42e4be5ec0a1c60362c2ef')
dr=r.json()
return render(request, 'Search.html',{
'search':dr
}
)
# def post_search(request):
# form= SearchForm()
# payload={'key1':'search?q=','key2':['form','&api_key=306cf1684a42e4be5ec0a1c60362c2ef']}
# url=requests.get=('https://libraries.io/api/get',params=payload)
# # results=[]
# # if 'query' in request.GET:
# # form=SearchForm(
# # if form.is_valid():
# # query=form.cleaned_data['query']
# # results=Post.published.annotate(
# # search =SearchVector('title','body'),
# # ).filter(search=query)
# return render(request,'search.html',{
# 'url':url,
# # 'query':query,
# # 'results':results
# })
|
{"/awssam/iam/users/urls.py": ["/awssam/iam/users/views.py"], "/myapi/devfile/core/views.py": ["/myapi/devfile/core/forms.py"], "/Web-UI/scrapyproject/views.py": ["/Web-UI/scrapyproject/forms.py", "/Web-UI/scrapyproject/models.py"], "/awssam/ideablog/core/admin.py": ["/awssam/ideablog/core/models.py"], "/awssam/wikidj/wikidj/codehilite.py": ["/awssam/wikidj/wikidj/settings.py", "/awssam/wikidj/wikidj/dev.py"], "/awssam/ideablog/core/views.py": ["/awssam/ideablog/core/models.py"], "/myapi/fullfeblog/blog/search_indexes.py": ["/myapi/fullfeblog/blog/models.py"], "/myapi/fullfeblog/blog/templatetags/blog_tags.py": ["/myapi/fullfeblog/blog/models.py"], "/myapi/fullfeblog/blog/views.py": ["/myapi/fullfeblog/blog/models.py", "/myapi/fullfeblog/blog/forms.py"], "/awssam/wikidj/wikidj/dev.py": ["/awssam/wikidj/wikidj/settings.py"], "/tc_zufang/django_web/datashow/views.py": ["/tc_zufang/django_web/datashow/models.py"], "/awssam/ideablog/core/forms.py": ["/awssam/ideablog/core/models.py"], "/Web-UI/scrapyproject/admin.py": ["/Web-UI/scrapyproject/models.py"], "/myapi/fullfeblog/blog/urls.py": ["/myapi/fullfeblog/blog/views.py"]}
|
3,712
|
mrpal39/ev_code
|
refs/heads/master
|
/myapi/devfile/gitapi/views.py
|
from django.http import response
from django.shortcuts import render
from .forms import DocumentForm
import requests
from django.shortcuts import render
from django.conf import settings
from django.core.files.storage import FileSystemStorage
def simple_upload(request):
if request.method == 'POST':
myfile = DocumentForm(request.POST, request.FILES)
myfile = request.FILES['file']
fs = FileSystemStorage()
filename = fs.save(myfile.name, myfile)
uploaded_file_url = fs.url(filename)
return render(request, 'imple_upload.html', {
'uploaded_file_url': uploaded_file_url
})
return render(request, 'simple_upload.html')
def model_form_upload(request):
if request.method == 'POST':
form = DocumentForm(request.POST, request.FILES)
if form.is_valid():
form.save()
return redirect('home')
else:
form = DocumentForm()
return render(request, 'core/model_form_upload.html', {
'form': form
})
def api(request):
api_key ='306cf1684a42e4be5ec0a1c60362c2ef'
name='npm'
api_url="https://libraries.io/api/search?q={}&api_key={}".format(name ,api_key)
response=requests.get(api_url)
response_dict = response.json()
return render(request, 'api.html',{'api': response_dict, }
)
# return render(request,'search.html',{
# 'url':url,
# # 'query':query,
# # 'results':results
# })
|
{"/awssam/iam/users/urls.py": ["/awssam/iam/users/views.py"], "/myapi/devfile/core/views.py": ["/myapi/devfile/core/forms.py"], "/Web-UI/scrapyproject/views.py": ["/Web-UI/scrapyproject/forms.py", "/Web-UI/scrapyproject/models.py"], "/awssam/ideablog/core/admin.py": ["/awssam/ideablog/core/models.py"], "/awssam/wikidj/wikidj/codehilite.py": ["/awssam/wikidj/wikidj/settings.py", "/awssam/wikidj/wikidj/dev.py"], "/awssam/ideablog/core/views.py": ["/awssam/ideablog/core/models.py"], "/myapi/fullfeblog/blog/search_indexes.py": ["/myapi/fullfeblog/blog/models.py"], "/myapi/fullfeblog/blog/templatetags/blog_tags.py": ["/myapi/fullfeblog/blog/models.py"], "/myapi/fullfeblog/blog/views.py": ["/myapi/fullfeblog/blog/models.py", "/myapi/fullfeblog/blog/forms.py"], "/awssam/wikidj/wikidj/dev.py": ["/awssam/wikidj/wikidj/settings.py"], "/tc_zufang/django_web/datashow/views.py": ["/tc_zufang/django_web/datashow/models.py"], "/awssam/ideablog/core/forms.py": ["/awssam/ideablog/core/models.py"], "/Web-UI/scrapyproject/admin.py": ["/Web-UI/scrapyproject/models.py"], "/myapi/fullfeblog/blog/urls.py": ["/myapi/fullfeblog/blog/views.py"]}
|
3,713
|
mrpal39/ev_code
|
refs/heads/master
|
/Web-UI/scrapyproject/views.py
|
from django.shortcuts import render
from django.contrib.auth.decorators import login_required
from django.contrib.auth import update_session_auth_hash
from .forms import CreateProject, DeleteProject, ItemName, FieldName, CreatePipeline, LinkGenerator, Scraper, Settings, ShareDB, ChangePass, ShareProject
from django.http import HttpResponseRedirect
from django.http import HttpResponse, HttpResponseNotFound, JsonResponse
from .models import Project, Item, Pipeline, Field, LinkgenDeploy, ScrapersDeploy, Dataset
from django.forms.util import ErrorList
from itertools import groupby
from django.core.urlresolvers import reverse
import os
import shutil
from string import Template
from .scrapy_packages import settings
from pymongo import MongoClient
import glob
import subprocess
import requests
import json
import datetime
import dateutil.parser
import socket
from django.contrib.auth.models import User
from bson.json_util import dumps
import threading
import crypt
try:
# Python 3
from urllib.parse import urlparse
except ImportError:
# Python 2
from urlparse import urlparse
try:
from urllib.parse import quote
except:
from urllib import quote
def generate_default_settings():
settings = """# Crawl responsibly by identifying yourself (and your website) on the user-agent
#USER_AGENT = 'unknown'
# Obey robots.txt rules
ROBOTSTXT_OBEY = True
# Configure maximum concurrent requests performed by Scrapy (default: 16)
#CONCURRENT_REQUESTS = 32
# Configure a delay for requests for the same website (default: 0)
# See http://scrapy.readthedocs.org/en/latest/topics/settings.html#download-delay
# See also autothrottle settings and docs
#DOWNLOAD_DELAY = 3
# The download delay setting will honor only one of:
#CONCURRENT_REQUESTS_PER_DOMAIN = 16
#CONCURRENT_REQUESTS_PER_IP = 16
# Disable cookies (enabled by default)
#COOKIES_ENABLED = False
# Override the default request headers:
#DEFAULT_REQUEST_HEADERS = {
# 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
# 'Accept-Language': 'en',
#}
# Enable and configure the AutoThrottle extension (disabled by default)
# See http://doc.scrapy.org/en/latest/topics/autothrottle.html
#AUTOTHROTTLE_ENABLED = True
# The initial download delay
#AUTOTHROTTLE_START_DELAY = 5
# The maximum download delay to be set in case of high latencies
#AUTOTHROTTLE_MAX_DELAY = 60
# The average number of requests Scrapy should be sending in parallel to
# each remote server
#AUTOTHROTTLE_TARGET_CONCURRENCY = 1.0
# Enable showing throttling stats for every response received:
#AUTOTHROTTLE_DEBUG = False
# Enable and configure HTTP caching (disabled by default)
# See http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html#httpcache-middleware-settings
#HTTPCACHE_ENABLED = True
#HTTPCACHE_EXPIRATION_SECS = 0
#HTTPCACHE_DIR = 'httpcache'
#HTTPCACHE_IGNORE_HTTP_CODES = []
#HTTPCACHE_STORAGE = 'scrapy.extensions.httpcache.FilesystemCacheStorage'"""
return settings
@login_required
def main_page(request):
projects = Project.objects.filter(user=request.user)
datasets = Dataset.objects.filter(user=request.user)
userprojects = []
databases = []
for project in projects:
singleproject = {}
singleproject['name'] = project.project_name
userprojects.append(singleproject)
for dataset in datasets:
databases.append(dataset.database)
return render(request, template_name="mainpage.html",
context={'username': request.user.username, 'projects': userprojects, 'databases': databases})
@login_required
def create_new(request):
if request.method == 'GET':
form = CreateProject()
return render(request, 'createproject.html', {'username': request.user.username, 'form': form})
if request.method == 'POST':
if 'cancel' in request.POST:
return HttpResponseRedirect(reverse("mainpage"))
elif 'submit' in request.POST:
form = CreateProject(request.POST)
if form.is_valid():
allprojects =[]
userprojects = Project.objects.filter(user=request.user)
for project in userprojects:
allprojects.append(project.project_name)
if form.cleaned_data['projectname'] in allprojects:
errors = form._errors.setdefault("projectname", ErrorList())
errors.append('Project named %s already exists. Please choose another name' % form.cleaned_data['projectname'])
return render(request, 'createproject.html', {'username': request.user.username, 'form': form})
else:
project = Project()
project.project_name = form.cleaned_data['projectname']
project.user = request.user
project.settings_scraper = generate_default_settings()
project.settings_link_generator = generate_default_settings()
project.scraper_function = '''def parse(self, response):\n pass'''
project.link_generator = '''start_urls = [""]\ndef parse(self, response):\n pass'''
project.save()
# project data will be saved in username_projectname database, so we need to
# give the current user ownership of that database
mongodbname = request.user.username + "_" + project.project_name
mongouri = "mongodb://" + settings.MONGODB_USER + ":" + quote(settings.MONGODB_PASSWORD) + "@" + settings.MONGODB_URI + "/admin"
connection = MongoClient(mongouri)
connection.admin.command('grantRolesToUser', request.user.username,
roles=[{'role': 'dbOwner', 'db': mongodbname}])
connection.close()
dataset = Dataset()
dataset.user = request.user
dataset.database = mongodbname
dataset.save()
return HttpResponseRedirect(reverse("manageproject", args=(project.project_name,)))
else:
return render(request, 'createproject.html', {'username': request.user.username, 'form': form})
else:
return HttpResponseNotFound('Nothing is here.')
@login_required
def manage_project(request, projectname):
try:
project = Project.objects.get(user=request.user, project_name=projectname)
except Project.DoesNotExist:
return HttpResponseNotFound('Nothing is here.')
projectdata = {}
projectdata['settings_scraper'] = project.settings_scraper
projectdata['settings_link_generator'] = project.settings_link_generator
projectdata['items'] = []
projectdata['pipelines'] = []
if len(project.link_generator) == 0:
projectdata['link_generator'] = False
else:
projectdata['link_generator'] = True
if len(project.scraper_function) == 0:
projectdata['scraper_function'] = False
else:
projectdata['scraper_function'] = True
items = Item.objects.filter(project=project)
pipelines = Pipeline.objects.filter(project=project)
for item in items:
projectdata['items'].append(item)
for pipeline in pipelines:
projectdata['pipelines'].append(pipeline)
return render(request, 'manageproject.html',
{'username': request.user.username, 'project': project.project_name, 'projectdata': projectdata})
@login_required
def delete_project(request, projectname):
try:
project = Project.objects.get(user=request.user, project_name=projectname)
except Project.DoesNotExist:
return HttpResponseNotFound('Nothing is here.')
if request.method == 'GET':
form = DeleteProject()
return render(request, 'deleteproject.html', {'username': request.user.username, 'form': form, 'projectname': projectname})
if request.method == 'POST':
if 'cancel' in request.POST:
return HttpResponseRedirect(reverse("mainpage"))
elif 'submit' in request.POST:
project.delete()
return HttpResponseRedirect(reverse("mainpage"))
else:
return HttpResponseNotFound('Nothing is here.')
@login_required
def create_item(request, projectname):
try:
project = Project.objects.get(user=request.user, project_name=projectname)
except Project.DoesNotExist:
return HttpResponseNotFound('Nothing is here.')
if request.method == 'GET':
form1 = ItemName()
form2 = FieldName()
return render(request, 'additem.html',
{'username': request.user.username, 'form1': form1, 'form2': form2, 'project': project.project_name})
if request.method == 'POST':
if 'submit' in request.POST:
form1 = ItemName(request.POST)
form2 = FieldName(request.POST, extra=request.POST.get('extra_field_count'))
if form1.is_valid() and form2.is_valid():
item = Item.objects.filter(project=project, item_name=form1.cleaned_data['itemname'])
if len(item):
errors = form1._errors.setdefault("itemname", ErrorList())
errors.append(
'Item named %s already exists. Please choose another name' % form1.cleaned_data['itemname'])
return render(request, 'additem.html',
{'username': request.user.username, 'form1': form1,
'form2': form2, 'project': project.project_name})
allfields =[]
valuetofield = {}
for field in form2.fields:
if form2.cleaned_data[field]:
if field != 'extra_field_count':
valuetofield[form2.cleaned_data[field]] = field
allfields.append(form2.cleaned_data[field])
duplicates = [list(j) for i, j in groupby(allfields)]
for duplicate in duplicates:
if len(duplicate) > 1:
errors = form2._errors.setdefault(valuetofield[duplicate[0]], ErrorList())
errors.append('Duplicate fields are not allowed.')
return render(request, 'additem.html',
{'username': request.user.username, 'form1': form1,
'form2': form2, 'project': project.project_name})
item = Item()
item.item_name = form1.cleaned_data['itemname']
item.project = project
item.save()
for field in allfields:
onefield = Field()
onefield.item = item
onefield.field_name = field
onefield.save()
return HttpResponseRedirect(reverse("listitems", args=(project.project_name,)))
else:
return render(request, 'additem.html',
{'username': request.user.username, 'form1': form1,
'form2': form2, 'project': project.project_name})
elif 'cancel' in request.POST:
return HttpResponseRedirect(reverse("listitems", args=(project.project_name,)))
else:
form1 = ItemName(request.POST)
form2 = FieldName(request.POST, extra=request.POST.get('extra_field_count'))
return render(request, 'additem.html',
{'username': request.user.username, 'form1': form1,
'form2': form2, 'project': project.project_name})
@login_required
def itemslist(request, projectname):
try:
project = Project.objects.get(user=request.user, project_name=projectname)
except Project.DoesNotExist:
return HttpResponseNotFound('Nothing is here.')
itemtracker = 0
items = Item.objects.filter(project=project)
itemdata = []
for item in items:
itemdata.append([])
itemdata[itemtracker].append(item.item_name)
fields = Field.objects.filter(item=item)
if fields:
itemdata[itemtracker].append([])
for field in fields:
itemdata[itemtracker][1].append(field.field_name)
itemtracker += 1
return render(request, 'itemslist.html',
{'username': request.user.username, 'project': project.project_name, 'items': itemdata})
@login_required
def deleteitem(request, projectname, itemname):
try:
project = Project.objects.get(user=request.user, project_name=projectname)
except Project.DoesNotExist:
return HttpResponseNotFound('Nothing is here.')
try:
item = Item.objects.get(project=project, item_name=itemname)
except Item.DoesNotExist:
return HttpResponseNotFound('Nothing is here.')
if request.method == 'GET':
# using the form that was used for deleting the project
form = DeleteProject()
return render(request, 'deleteitem.html',
{'username': request.user.username, 'form': form, 'projectname': projectname, 'itemname': itemname})
elif request.method == 'POST':
if 'cancel' in request.POST:
return HttpResponseRedirect(reverse("listitems", args=(projectname,)))
elif 'submit' in request.POST:
item.delete()
return HttpResponseRedirect(reverse("listitems", args=(projectname,)))
@login_required
def edititem(request, projectname, itemname):
try:
project = Project.objects.get(user=request.user, project_name=projectname)
except Project.DoesNotExist:
return HttpResponseNotFound('Nothing is here.')
try:
item = Item.objects.get(project=project, item_name=itemname)
except Item.DoesNotExist:
return HttpResponseNotFound('Nothing is here.')
if request.method == 'GET':
fields = Field.objects.filter(item=item)
fieldcounter = 0
fieldlist = []
fielddata = {}
for field in fields:
fieldlist.append(field.field_name)
fieldcounter += 1
if fieldcounter == 1:
fielddata['fieldname'] = fieldlist[0]
fielddata['extra_field_count'] = 0
elif fieldcounter > 1:
fielddata['fieldname'] = fieldlist[0]
fielddata['extra_field_count'] = fieldcounter - 1
for i in range(1,fieldcounter):
fielddata['field_%d' % (i+1)] = fieldlist[i]
form1 = ItemName({'itemname': itemname})
form2 = FieldName(initial=fielddata, extra=fielddata['extra_field_count'])
return render(request, 'edititem.html',
{'username': request.user.username, 'form1': form1, 'form2': form2, 'project': project.project_name})
elif request.method == 'POST':
if 'submit' in request.POST:
form1 = ItemName(request.POST)
form2 = FieldName(request.POST, extra=request.POST.get('extra_field_count'))
if form1.is_valid() and form2.is_valid():
newitemname = Item.objects.filter(project=project, item_name=form1.cleaned_data['itemname'])
if len(newitemname):
for oneitem in newitemname:
if oneitem.item_name != item.item_name:
errors = form1._errors.setdefault('itemname', ErrorList())
errors.append('Item named %s already exists. Please choose another name' % form1.cleaned_data['itemname'])
return render(request, 'edititem.html',
{'username': request.user.username, 'form1': form1,
'form2': form2, 'project': project.project_name})
allfields = []
valuetofield = {}
for field in form2.fields:
if form2.cleaned_data[field]:
if field != 'extra_field_count':
valuetofield[form2.cleaned_data[field]] = field
allfields.append(form2.cleaned_data[field])
duplicates = [list(j) for i, j in groupby(allfields)]
for duplicate in duplicates:
if len(duplicate) > 1:
errors = form2._errors.setdefault(valuetofield[duplicate[0]], ErrorList())
errors.append('Duplicate fields are not allowed.')
return render(request, 'edititem.html',
{'username': request.user.username, 'form1': form1,
'form2': form2, 'project': project.project_name})
deletefield = Field.objects.filter(item=item)
for field in deletefield:
field.delete()
item.item_name = form1.cleaned_data['itemname']
item.save()
for field in allfields:
onefield = Field()
onefield.item = item
onefield.field_name = field
onefield.save()
return HttpResponseRedirect(reverse("listitems", args=(project.project_name,)))
elif 'cancel' in request.POST:
return HttpResponseRedirect(reverse("listitems", args=(project.project_name,)))
else:
form1 = ItemName(request.POST)
form2 = FieldName(request.POST, extra=request.POST.get('extra_field_count'))
return render(request, 'edititem.html',
{'username': request.user.username, 'form1': form1,
'form2': form2, 'project': project.project_name})
@login_required
def addpipeline(request, projectname):
try:
project = Project.objects.get(user=request.user, project_name=projectname)
except Project.DoesNotExist:
return HttpResponseNotFound('Nothing is here.')
defined_items = {}
items = Item.objects.filter(project=project)
for item in items:
defined_items[item.item_name] = []
fields = Field.objects.filter(item=item)
for field in fields:
defined_items[item.item_name].append(field.field_name)
if request.method == 'GET':
initial_code = '''def process_item(self, item, spider):\n return item
'''
form = CreatePipeline(initial={'pipelinefunction': initial_code})
return render(request, "addpipeline.html",
{'username': request.user.username, 'form': form, 'project': project.project_name, 'items': defined_items})
elif request.method == 'POST':
if 'cancel' in request.POST:
return HttpResponseRedirect(reverse("listpipelines", args=(project.project_name,)))
if 'submit' in request.POST:
form = CreatePipeline(request.POST)
if form.is_valid():
names = []
orders =[]
pipelines = Pipeline.objects.filter(project=project)
for pipeline in pipelines:
names.append(pipeline.pipeline_name)
orders.append(pipeline.pipeline_order)
if form.cleaned_data['pipelinename'] in names:
errors = form._errors.setdefault('pipelinename', ErrorList())
errors.append(
'Pipeline named %s already exists. Please choose another name' % form.cleaned_data['pipelinename'])
return render(request, "addpipeline.html",
{'username': request.user.username, 'form': form, 'project': project.project_name, 'items': defined_items})
if int(form.cleaned_data['pipelineorder']) in orders:
errors = form._errors.setdefault('pipelineorder', ErrorList())
errors.append(
'Pipeline order %s already exists for another pipeline function. Enter a different order' % form.cleaned_data['pipelineorder'])
return render(request, "addpipeline.html",
{'username': request.user.username, 'form': form, 'project': project.project_name, 'items': defined_items})
pipeline = Pipeline()
pipeline.pipeline_name = form.cleaned_data['pipelinename']
pipeline.pipeline_order = form.cleaned_data['pipelineorder']
pipeline.pipeline_function = form.cleaned_data['pipelinefunction']
pipeline.project = project
pipeline.save()
return HttpResponseRedirect(reverse("listpipelines", args=(project.project_name,)))
else:
return render(request, "addpipeline.html",
{'username': request.user.username, 'form': form, 'project': project.project_name, 'items': defined_items})
@login_required
def pipelinelist(request, projectname):
try:
project = Project.objects.get(user=request.user, project_name=projectname)
except Project.DoesNotExist:
return HttpResponseNotFound('Nothing is here.')
itemtracker = 0
pipelines = Pipeline.objects.filter(project=project)
pipelinedata = []
for pipeline in pipelines:
pipelinedata.append([])
pipelinedata[itemtracker].append(pipeline.pipeline_name)
pipelinedata[itemtracker].append(pipeline.pipeline_order)
itemtracker += 1
return render(request, 'pipelinelist.html', {'username': request.user.username, 'project': project.project_name, 'items': pipelinedata})
@login_required
def editpipeline(request, projectname, pipelinename):
try:
project = Project.objects.get(user=request.user, project_name=projectname)
except Project.DoesNotExist:
return HttpResponseNotFound('Nothing is here.')
try:
pipeline = Pipeline.objects.get(project=project, pipeline_name=pipelinename)
except Pipeline.DoesNotExist:
return HttpResponseNotFound('Nothing is here.')
defined_items = {}
items = Item.objects.filter(project=project)
for item in items:
defined_items[item.item_name] = []
fields = Field.objects.filter(item=item)
for field in fields:
defined_items[item.item_name].append(field.field_name)
if request.method == 'GET':
form = CreatePipeline(initial={'pipelinename': pipeline.pipeline_name,
'pipelineorder': pipeline.pipeline_order,
'pipelinefunction': pipeline.pipeline_function})
return render(request, "editpipeline.html",
{'username': request.user.username, 'form': form, 'project': project.project_name, 'items': defined_items})
elif request.method == 'POST':
if 'cancel' in request.POST:
return HttpResponseRedirect(reverse("listpipelines", args=(project.project_name,)))
if 'submit' in request.POST:
form = CreatePipeline(request.POST)
if form.is_valid():
newpipelinename = Pipeline.objects.filter(project=project, pipeline_name=form.cleaned_data['pipelinename'])
if len(newpipelinename):
for oneitem in newpipelinename:
if oneitem.pipeline_name != pipeline.pipeline_name:
errors = form._errors.setdefault('pipelinename', ErrorList())
errors.append(
'Pipeline named %s already exists. Please choose another name' % form.cleaned_data[
'pipelinename'])
return render(request, 'editpipeline.html',
{'username': request.user.username, 'form': form, 'project': project.project_name, 'items': defined_items})
newpipelineorder = Pipeline.objects.filter(project=project,
pipeline_order=form.cleaned_data['pipelineorder'])
if len(newpipelineorder):
for oneitem in newpipelineorder:
if oneitem.pipeline_order != pipeline.pipeline_order:
errors = form._errors.setdefault('pipelineorder', ErrorList())
errors.append(
'Pipeline order %s already exists for another pipeline function. Enter a different order' % form.cleaned_data['pipelineorder'])
return render(request, 'editpipeline.html',
{'username': request.user.username, 'form': form, 'project': project.project_name, 'items': defined_items})
pipeline.pipeline_name = form.cleaned_data['pipelinename']
pipeline.pipeline_order = form.cleaned_data['pipelineorder']
pipeline.pipeline_function = form.cleaned_data['pipelinefunction']
pipeline.save()
return HttpResponseRedirect(reverse("listpipelines", args=(project.project_name,)))
else:
return render(request, "editpipeline.html",
{'username': request.user.username, 'form': form, 'project': project.project_name, 'items': defined_items})
@login_required
def deletepipeline(request, projectname, pipelinename):
try:
project = Project.objects.get(user=request.user, project_name=projectname)
except Project.DoesNotExist:
return HttpResponseNotFound('Nothing is here.')
try:
pipeline = Pipeline.objects.get(project=project, pipeline_name=pipelinename)
except Pipeline.DoesNotExist:
return HttpResponseNotFound('Nothing is here.')
if request.method == 'GET':
form = DeleteProject()
return render(request, 'deletepipeline.html',
{'username': request.user.username,
'form': form, 'projectname': project.project_name, 'pipelinename': pipeline.pipeline_name})
elif request.method == 'POST':
if 'cancel' in request.POST:
return HttpResponseRedirect(reverse("listpipelines", args=(project.project_name,)))
elif 'submit' in request.POST:
pipeline.delete()
return HttpResponseRedirect(reverse("listpipelines", args=(project.project_name,)))
@login_required
def linkgenerator(request, projectname):
try:
project = Project.objects.get(user=request.user, project_name=projectname)
except Project.DoesNotExist:
return HttpResponseNotFound('Nothing is here.')
spiderclassnamelabel = "class " + request.user.username.title() + project.project_name.title() + "Spider:"
if request.method == 'GET':
form = LinkGenerator(initial={'function': project.link_generator})
form.fields['function'].label = spiderclassnamelabel
return render(request,
'addlinkgenerator.html', {'username': request.user.username,
'form': form, 'project': project.project_name})
elif request.method == 'POST':
if 'cancel' in request.POST:
return HttpResponseRedirect(reverse("manageproject", args=(project.project_name,)))
if 'submit' in request.POST:
form = LinkGenerator(request.POST)
form.fields['function'].label = spiderclassnamelabel
if form.is_valid():
project.link_generator = form.cleaned_data['function']
project.save()
return HttpResponseRedirect(reverse("manageproject", args=(project.project_name,)))
else:
return render(request, 'addlinkgenerator.html',
{'username': request.user.username, 'form': form, 'project': project.project_name})
@login_required
def scraper(request, projectname):
try:
project = Project.objects.get(user=request.user, project_name=projectname)
except Project.DoesNotExist:
return HttpResponseNotFound('Nothing is here.')
spiderclassnamelabel = "class " + request.user.username.title() + project.project_name.title() + "Spider:"
if request.method == 'GET':
form = Scraper(initial={'function': project.scraper_function})
form.fields['function'].label = spiderclassnamelabel
return render(request, 'addscraper.html', {'username': request.user.username, 'form': form, 'project': project.project_name})
elif request.method == 'POST':
if 'cancel' in request.POST:
return HttpResponseRedirect(reverse("manageproject", args=(projectname,)))
if 'submit' in request.POST:
form = Scraper(request.POST)
form.fields['function'].label = spiderclassnamelabel
if form.is_valid():
project.scraper_function = form.cleaned_data['function']
project.save()
return HttpResponseRedirect(reverse("manageproject", args=(projectname,)))
else:
return render(request, 'addscraper.html',
{'username': request.user.username, 'form': form, 'project': project.project_name})
def create_folder_tree(tree):
d = os.path.abspath(tree)
if not os.path.exists(d):
os.makedirs(d)
else:
shutil.rmtree(d)
os.makedirs(d)
@login_required
def change_password(request):
if request.method == 'POST':
form = ChangePass(request.user, request.POST)
if form.is_valid():
user = form.save()
update_session_auth_hash(request, user)
mongodb_user_password_change(request.user.username, form.cleaned_data['new_password1'])
if settings.LINUX_USER_CREATION_ENABLED:
try:
linux_user_pass_change(request.user.username, form.cleaned_data['new_password1'])
except:
pass
return HttpResponseRedirect(reverse("mainpage"))
else:
return render(request, 'changepassword.html', {
'username': request.user.username,
'form': form
})
else:
form = ChangePass(request.user)
return render(request, 'changepassword.html', {
'username': request.user.username,
'form': form
})
@login_required
def deploy(request, projectname):
try:
project = Project.objects.get(user=request.user, project_name=projectname)
except Project.DoesNotExist:
return HttpResponseNotFound('Nothing is here.')
projectitems = Item.objects.filter(project=project)
projectlinkgenfunction = project.link_generator
projectscraperfunction = project.scraper_function
if not projectitems or not projectlinkgenfunction or not projectscraperfunction:
return HttpResponseNotFound('Not all required project parts are present for deployment. Please review your project and deploy again.')
basepath = os.path.dirname(os.path.abspath(__file__))
#we are giving a project and its folders a unique name on disk, so that no name conflicts occur when deploying the projects
projectnameonfile = request.user.username + '_' + projectname
#removing the project folder, if exists
create_folder_tree(basepath + "/projects/%s/%s" % (request.user.username, projectname))
#Create project folder structure
folder1 = basepath + "/projects/%s/%s/%s/%s/%s" % (request.user.username, projectname, 'scraper', projectnameonfile, 'spiders')
folder2 = basepath + "/projects/%s/%s/%s/%s/%s" % (request.user.username, projectname, 'linkgenerator', projectnameonfile, 'spiders')
#Link generator folders
linkgenouterfolder = basepath + "/projects/%s/%s/%s" % (request.user.username, projectname, 'linkgenerator')
linkgenprojectfolder = basepath + "/projects/%s/%s/%s/%s" % (request.user.username, projectname, 'linkgenerator', projectnameonfile)
linkgenspiderfolder = basepath + "/projects/%s/%s/%s/%s/%s" % (request.user.username, projectname, 'linkgenerator', projectnameonfile, 'spiders')
#Scraper folders
scraperouterfolder = basepath + "/projects/%s/%s/%s" % (request.user.username, projectname, 'scraper')
scraperprojectfolder = basepath + "/projects/%s/%s/%s/%s" % (request.user.username, projectname, 'scraper', projectnameonfile)
scraperspiderfolder = basepath + "/projects/%s/%s/%s/%s/%s" % (request.user.username, projectname, 'scraper', projectnameonfile, 'spiders')
#Link generator files
linkgencfgfile = linkgenouterfolder + "/scrapy.cfg"
linkgensettingsfile = linkgenprojectfolder + "/settings.py"
linkgenspiderfile = linkgenspiderfolder + "/%s_%s.py" % (request.user.username, projectname)
#Scraper files
scrapercfgfile = scraperouterfolder + "/scrapy.cfg"
scrapersettingsfile = scraperprojectfolder + "/settings.py"
scraperspiderfile = scraperspiderfolder + "/%s_%s.py" % (request.user.username, projectname)
scraperitemsfile = scraperprojectfolder + "/items.py"
scraperpipelinefile = scraperprojectfolder + "/pipelines.py"
#Create needed folders
create_folder_tree(folder1)
create_folder_tree(folder2)
#putting __init.py__ files in linkgenerator
shutil.copy(basepath + '/scrapy_packages/__init__.py', linkgenprojectfolder)
shutil.copy(basepath + '/scrapy_packages/__init__.py', linkgenspiderfolder)
#putting rabbitmq folder alongside project
shutil.copytree(basepath + '/scrapy_packages/rabbitmq', linkgenprojectfolder + '/rabbitmq')
#creating a cfg for link generator
scrapycfg = '''[settings]\n
default = %s.settings
[deploy:linkgenerator]
url = %s
project = %s
''' % (projectnameonfile, settings.LINK_GENERATOR, projectnameonfile)
with open(linkgencfgfile, 'w') as f:
f.write(scrapycfg)
#creating a settings.py file for link generator
with open(basepath + '/scrapy_templates/settings.py.tmpl', 'r') as f:
settingspy = Template(f.read()).substitute(project_name=projectnameonfile)
settingspy += '\n' + project.settings_link_generator
settingspy += '\nSCHEDULER = "%s"' % (projectnameonfile + settings.SCHEDULER)
settingspy += '\nSCHEDULER_PERSIST = %s' % settings.SCHEDULER_PERSIST
settingspy += '\nRABBITMQ_HOST = "%s"' % settings.RABBITMQ_HOST
settingspy += '\nRABBITMQ_PORT = %s' % settings.RABBITMQ_PORT
settingspy += '\nRABBITMQ_USERNAME = "%s"' % settings.RABBITMQ_USERNAME
settingspy += '\nRABBITMQ_PASSWORD = "%s"' % settings.RABBITMQ_PASSWORD
with open(linkgensettingsfile, 'w') as f:
f.write(settingspy)
#creating a spider file for link generator
with open(basepath + '/scrapy_templates/linkgenspider.py.tmpl', 'r') as f:
spider = Template(f.read()).substitute(spider_name=request.user.username + "_" + projectname, SpiderClassName=request.user.username.title() + projectname.title() + "Spider")
spider += '\n'
linkgenlines = project.link_generator.splitlines()
for lines in linkgenlines:
spider += ' ' + lines + '\n'
with open(linkgenspiderfile, 'w') as f:
f.write(spider)
# putting __init.py__ files in scraper
shutil.copy(basepath + '/scrapy_packages/__init__.py', scraperprojectfolder)
shutil.copy(basepath + '/scrapy_packages/__init__.py', scraperspiderfolder)
# putting rabbitmq folder alongside project
shutil.copytree(basepath + '/scrapy_packages/rabbitmq', scraperprojectfolder + '/rabbitmq')
# putting mongodb folder alongside project
shutil.copytree(basepath + '/scrapy_packages/mongodb', scraperprojectfolder + '/mongodb')
# creating a cfg for scraper
scrapycfg = '''[settings]\n
default = %s.settings\n\n''' % (projectnameonfile)
workercount = 1
for worker in settings.SCRAPERS:
scrapycfg += '[deploy:worker%d]\nurl = %s\n' % (workercount, worker)
workercount += 1
scrapycfg += '\nproject = %s' % (projectnameonfile)
with open(scrapercfgfile, 'w') as f:
f.write(scrapycfg)
# creating a spider file for scraper
with open(basepath + '/scrapy_templates/scraperspider.py.tmpl', 'r') as f:
spider = Template(f.read()).substitute(spider_name=request.user.username + "_" + projectname,
SpiderClassName=request.user.username.title() + projectname.title() + "Spider",
project_name=projectnameonfile)
spider += '\n'
scraperlines = project.scraper_function.splitlines()
for lines in scraperlines:
spider += ' ' + lines + '\n'
with open(scraperspiderfile, 'w') as f:
f.write(spider)
#creating items file for scraper
items = Item.objects.filter(project=project)
itemsfile = 'import scrapy\n'
fieldtemplate = ' %s = scrapy.Field()\n'
for item in items:
itemsfile += 'class %s(scrapy.Item):\n' % item.item_name
fields = Field.objects.filter(item=item)
for field in fields:
itemsfile += fieldtemplate % field.field_name
itemsfile += fieldtemplate % 'ack_signal'
itemsfile += '\n'
with open(scraperitemsfile, 'w') as f:
f.write(itemsfile)
#creating pipelines file for scraper
pipelinesfile = ''
pipelinedict = {}
pipelines = Pipeline.objects.filter(project=project)
for pipeline in pipelines:
pipelinedict[pipeline.pipeline_name] = pipeline.pipeline_order
pipelinesfile += 'class %s(object):\n' % pipeline.pipeline_name
pipfunctionlines = pipeline.pipeline_function.splitlines()
for lines in pipfunctionlines:
pipelinesfile += ' ' + lines + '\n'
with open(scraperpipelinefile, 'w') as f:
f.write(pipelinesfile)
# creating a settings.py file for scraper
with open(basepath + '/scrapy_templates/settings.py.tmpl', 'r') as f:
settingspy = Template(f.read()).substitute(project_name=projectnameonfile)
settingspy += '\n' + project.settings_scraper
settingspy += '\nSCHEDULER = "%s"' % (projectnameonfile + settings.SCHEDULER)
settingspy += '\nSCHEDULER_PERSIST = %s' % settings.SCHEDULER_PERSIST
settingspy += '\nRABBITMQ_HOST = "%s"' % settings.RABBITMQ_HOST
settingspy += '\nRABBITMQ_PORT = %s' % settings.RABBITMQ_PORT
settingspy += '\nRABBITMQ_USERNAME = "%s"' % settings.RABBITMQ_USERNAME
settingspy += '\nRABBITMQ_PASSWORD = "%s"' % settings.RABBITMQ_PASSWORD
settingspy += '\nMONGODB_URI = "%s"' % settings.MONGODB_URI
settingspy += '\nMONGODB_SHARDED = %s' % settings.MONGODB_SHARDED
settingspy += '\nMONGODB_BUFFER_DATA = %s' % settings.MONGODB_BUFFER_DATA
settingspy += '\nMONGODB_USER = "%s"' % settings.MONGODB_USER
settingspy += '\nMONGODB_PASSWORD = "%s"' % settings.MONGODB_PASSWORD
settingspy += '\nITEM_PIPELINES = { "%s.mongodb.scrapy_mongodb.MongoDBPipeline": 999, \n' % projectnameonfile
for key in pipelinedict:
settingspy += '"%s.pipelines.%s": %s, \n' % (projectnameonfile, key, pipelinedict[key])
settingspy += '}'
with open(scrapersettingsfile, 'w') as f:
f.write(settingspy)
#putting setup.py files in appropriate folders
with open(basepath + '/scrapy_templates/setup.py', 'r') as f:
setuppy = Template(f.read()).substitute(projectname=projectnameonfile)
with open(linkgenouterfolder + '/setup.py', 'w') as f:
f.write(setuppy)
with open(scraperouterfolder + '/setup.py', 'w') as f:
f.write(setuppy)
class cd:
"""Context manager for changing the current working directory"""
def __init__(self, newPath):
self.newPath = os.path.expanduser(newPath)
def __enter__(self):
self.savedPath = os.getcwd()
os.chdir(self.newPath)
def __exit__(self, etype, value, traceback):
os.chdir(self.savedPath)
with cd(linkgenouterfolder):
os.system("python setup.py bdist_egg")
with cd(scraperouterfolder):
os.system("python setup.py bdist_egg")
linkgeneggfile = glob.glob(linkgenouterfolder + "/dist/*.egg")
scrapereggfile = glob.glob(scraperouterfolder + "/dist/*.egg")
linkgenlastdeploy = LinkgenDeploy.objects.filter(project=project).order_by('-version')[:1]
if linkgenlastdeploy:
linkgenlastdeploy = linkgenlastdeploy[0].version
else:
linkgenlastdeploy = 0
scraperslastdeploy = ScrapersDeploy.objects.filter(project=project).order_by('-version')[:1]
if scraperslastdeploy:
scraperslastdeploy = scraperslastdeploy[0].version
else:
scraperslastdeploy = 0
try:
with open(linkgeneggfile[0], 'rb') as f:
files = {'egg': f}
payload = {'project': '%s' % (projectnameonfile), 'version': (linkgenlastdeploy + 1)}
r = requests.post('%s/addversion.json' % settings.LINK_GENERATOR, data=payload, files=files, timeout=(3, None))
result = r.json()
deploylinkgen = LinkgenDeploy()
deploylinkgen.project = project
deploylinkgen.version = linkgenlastdeploy + 1
if result["status"] != "ok":
deploylinkgen.success = False
else:
deploylinkgen.success = True
deploylinkgen.save()
except:
deploylinkgen = LinkgenDeploy()
deploylinkgen.project = project
deploylinkgen.version = linkgenlastdeploy + 1
deploylinkgen.success = False
deploylinkgen.save()
with open(scrapereggfile[0], 'rb') as f:
eggfile = f.read()
files = {'egg' : eggfile}
payload = {'project': '%s' % (projectnameonfile), 'version': (scraperslastdeploy + 1)}
deployscraper = ScrapersDeploy()
deployscraper.project = project
deployscraper.version = scraperslastdeploy + 1
deployedscraperslist = []
scrapercounter = 1
for onescraper in settings.SCRAPERS:
try:
r = requests.post('%s/addversion.json' % onescraper, data=payload, files=files, timeout=(3, None))
result = r.json()
if result['status'] == 'ok':
deployedscraperslist.append("worker%s" %scrapercounter)
except:
pass
scrapercounter += 1
deployscraper.success = json.dumps(deployedscraperslist)
deployscraper.save()
return HttpResponseRedirect(reverse('deploystatus', args=(projectname,)))
@login_required
def deployment_status(request, projectname):
try:
project = Project.objects.get(user=request.user, project_name=projectname)
except Project.DoesNotExist:
return HttpResponseNotFound('Nothing is here.')
workers = []
counter = 1
workers.append({'name': 'linkgenerator', 'status': 'Loading...', 'version': 'Loading...'})
for worker in settings.SCRAPERS:
workers.append({'name': 'worker%s' % counter, 'status': 'Loading...', 'version': 'Loading...'})
counter += 1
return render(request, "deployment_status.html", {'project': projectname, 'username': request.user.username, 'workers': workers})
@login_required
def get_project_status_from_all_workers(request, projectname):
uniqueprojectname = request.user.username + '_' + projectname
try:
project = Project.objects.get(user=request.user, project_name=projectname)
except Project.DoesNotExist:
return HttpResponseNotFound('Nothing is here.')
counter = 1
if request.method == 'POST':
allworkers = []
workerstatus = {}
workerstatus['name'] = 'linkgenerator'
try:
r = requests.get('%s/listprojects.json' % settings.LINK_GENERATOR,timeout=(3, None))
result = r.json()
if uniqueprojectname in result['projects']:
workerstatus['status'] = 'ready'
try:
q = requests.get('%s/listversions.json' % settings.LINK_GENERATOR, params={'project': uniqueprojectname},timeout=(3, None))
qresult = q.json()
version = qresult['versions'][-1]
workerstatus['version'] = version
except:
workerstatus['version'] = 'unknown'
try:
s = requests.get('%s/listjobs.json' % settings.LINK_GENERATOR, params={'project': uniqueprojectname}, timeout=(3, None))
sresult = s.json()
if sresult['finished']:
workerstatus['status'] = 'finished'
if sresult['pending']:
workerstatus['status'] = 'pending'
if sresult['running']:
workerstatus['status'] = 'running'
except:
workerstatus['status'] = 'unknown'
else:
workerstatus['status'] = 'not delpoyed'
workerstatus['version'] = 'unknown'
except:
workerstatus['status'] = 'unreachable'
workerstatus['version'] = 'unknown'
allworkers.append(workerstatus)
for worker in settings.SCRAPERS:
workerstatus = {}
workerstatus['name'] = 'worker%s' % counter
try:
r = requests.get('%s/listprojects.json' % worker, timeout=(3, None))
result = r.json()
if uniqueprojectname in result['projects']:
workerstatus['status'] = 'ready'
try:
q = requests.get('%s/listversions.json' % worker,
params={'project': uniqueprojectname}, timeout=(3, None))
qresult = q.json()
version = qresult['versions'][-1]
workerstatus['version'] = version
except:
workerstatus['version'] = 'unknown'
try:
s = requests.get('%s/listjobs.json' % worker,
params={'project': uniqueprojectname}, timeout=(3, None))
sresult = s.json()
if sresult['finished']:
workerstatus['status'] = 'finished'
if sresult['pending']:
workerstatus['status'] = 'pending'
if sresult['running']:
workerstatus['status'] = 'running'
except:
workerstatus['status'] = 'unknown'
else:
workerstatus['status'] = 'not delpoyed'
workerstatus['version'] = 'unknown'
except:
workerstatus['status'] = 'unreachable'
workerstatus['version'] = 'unknown'
allworkers.append(workerstatus)
counter += 1
return JsonResponse(allworkers, safe=False)
@login_required
def start_project(request, projectname, worker):
uniqueprojectname = request.user.username + '_' + projectname
try:
project = Project.objects.get(user=request.user, project_name=projectname)
except Project.DoesNotExist:
return HttpResponseNotFound('Nothing is here.')
if request.method == 'POST':
if 'linkgenerator' in worker:
linkgenaddress = settings.LINK_GENERATOR
try:
r = requests.post('%s/schedule.json' % linkgenaddress, data={'project': uniqueprojectname, 'spider': uniqueprojectname}, timeout=(3, None))
except:
pass
elif 'worker' in worker:
workernumber = ''.join(x for x in worker if x.isdigit())
workernumber = int(workernumber)
workeraddress = settings.SCRAPERS[workernumber - 1]
try:
r = requests.post('%s/schedule.json' % workeraddress, data={'project': uniqueprojectname, 'spider': uniqueprojectname}, timeout=(3, None))
except:
pass
return HttpResponse('sent start signal')
@login_required
def stop_project(request, projectname, worker):
uniqueprojectname = request.user.username + '_' + projectname
try:
project = Project.objects.get(user=request.user, project_name=projectname)
except Project.DoesNotExist:
return HttpResponseNotFound('Nothing is here.')
if request.method == 'POST':
if 'linkgenerator' in worker:
linkgenaddress = settings.LINK_GENERATOR
try:
r = requests.get('%s/listjobs.json' % linkgenaddress,
params={'project': uniqueprojectname}, timeout=(3, None))
result = r.json()
jobid = result['running'][0]['id']
s = requests.post('%s/cancel.json' % linkgenaddress, params={'project': uniqueprojectname, 'job': jobid}, timeout=(3, None))
except:
pass
elif 'worker' in worker:
workernumber = ''.join(x for x in worker if x.isdigit())
workernumber = int(workernumber)
workeraddress = settings.SCRAPERS[workernumber - 1]
try:
r = requests.get('%s/listjobs.json' % workeraddress,
params={'project': uniqueprojectname}, timeout=(3, None))
result = r.json()
jobid = result['running'][0]['id']
s = requests.post('%s/cancel.json' % workeraddress, params={'project': uniqueprojectname, 'job': jobid}, timeout=(3, None))
except:
pass
return HttpResponse('sent stop signal')
@login_required
def see_log_file(request, projectname, worker):
uniqueprojectname = request.user.username + '_' + projectname
try:
project = Project.objects.get(user=request.user, project_name=projectname)
except Project.DoesNotExist:
return HttpResponseNotFound('Nothing is here.')
if request.method == 'GET':
if 'linkgenerator' in worker:
linkgenaddress = settings.LINK_GENERATOR
try:
r = requests.get('%s/listjobs.json' % linkgenaddress,
params={'project': uniqueprojectname}, timeout=(3, None))
result = r.json()
jobid = result['finished'][-1]['id']
log = requests.get('%s/logs/%s/%s/%s.log' % (linkgenaddress, uniqueprojectname, uniqueprojectname, jobid))
except:
return HttpResponse('could not retrieve the log file')
elif 'worker' in worker:
workernumber = ''.join(x for x in worker if x.isdigit())
workernumber = int(workernumber)
workeraddress = settings.SCRAPERS[workernumber - 1]
try:
r = requests.get('%s/listjobs.json' % workeraddress,
params={'project': uniqueprojectname}, timeout=(3, None))
result = r.json()
jobid = result['finished'][-1]['id']
log = requests.get('%s/logs/%s/%s/%s.log' % (workeraddress, uniqueprojectname, uniqueprojectname, jobid))
except:
return HttpResponse('could not retrieve the log file')
return HttpResponse(log.text, content_type='text/plain')
@login_required
def gather_status_for_all_projects(request):
projectsdict = {}
workers = []
for worker in settings.SCRAPERS:
workers.append(worker)
workers.append(settings.LINK_GENERATOR)
projects = Project.objects.filter(user=request.user)
for project in projects:
projectsdict[project.project_name] = []
project_items = Item.objects.filter(project=project)
for item in project_items:
projectsdict[project.project_name].append(item.item_name)
if request.method == 'POST':
if projectsdict:
allprojectdata = {}
for key in projectsdict:
workerstatus = {}
earliest_start_time = None
earliest_finish_time = None
latest_start_time = None
latest_finish_time = None
uniqueprojectname = request.user.username + '_' + key
for worker in workers:
try:
log = requests.get('%s/logs/%s/%s/stats.log' % (worker, uniqueprojectname, uniqueprojectname), timeout=(3, None))
if log.status_code == 200:
result = json.loads(log.text.replace("'", '"'))
if result.get('project_stopped', 0):
workerstatus['finished'] = workerstatus.get('finished', 0) + 1
else:
workerstatus['running'] = workerstatus.get('running', 0) + 1
if result.get('log_count/ERROR', 0):
workerstatus['errors'] = workerstatus.get('errors', 0) + result.get('log_count/ERROR', 0)
for item in projectsdict[key]:
if result.get(item, 0):
workerstatus['item-%s' % item] = workerstatus.get('item-%s' % item, 0) + result.get(item, 0)
if result.get('start_time', False):
start_time = dateutil.parser.parse(result['start_time'])
if earliest_start_time is None:
earliest_start_time = start_time
else:
if start_time < earliest_start_time:
earliest_start_time = start_time
if latest_start_time is None:
latest_start_time = start_time
else:
if start_time > latest_start_time:
latest_start_time = start_time
if result.get('finish_time', False):
finish_time = dateutil.parser.parse(result['finish_time'])
if earliest_finish_time is None:
earliest_finish_time = finish_time
else:
if finish_time < earliest_finish_time:
earliest_finish_time = finish_time
if latest_finish_time is None:
latest_finish_time = finish_time
else:
if finish_time > latest_finish_time:
latest_finish_time = finish_time
elif log.status_code == 404:
workerstatus['hasntlaunched'] = workerstatus.get('hasntlaunched', 0) + 1
else:
workerstatus['unknown'] = workerstatus.get('unknown', 0) + 1
except:
workerstatus['unknown'] = workerstatus.get('unknown', 0) + 1
if earliest_start_time is not None:
workerstatus['earliest_start_time'] = earliest_start_time.strftime("%B %d, %Y %H:%M:%S")
if earliest_finish_time is not None:
workerstatus['earliest_finish_time'] = earliest_finish_time.strftime("%B %d, %Y %H:%M:%S")
if latest_start_time is not None:
workerstatus['latest_start_time'] = latest_start_time.strftime("%B %d, %Y %H:%M:%S")
if latest_finish_time is not None:
workerstatus['latest_finish_time'] = latest_finish_time.strftime("%B %d, %Y %H:%M:%S")
allprojectdata[key] = workerstatus
return JsonResponse(allprojectdata, safe=True)
return HttpResponse('{}')
@login_required
def editsettings(request, settingtype, projectname):
try:
project = Project.objects.get(user=request.user, project_name=projectname)
except Project.DoesNotExist:
return HttpResponseNotFound('Nothing is here.')
if request.method == 'GET':
if settingtype == 'linkgenerator':
settingtext = project.settings_link_generator
form = Settings(initial={'settings': settingtext})
return render(request, "editsettings.html", {'username': request.user.username, 'project': projectname, 'form': form, 'settingtype': settingtype})
if settingtype == 'scraper':
settingtext = project.settings_scraper
form = Settings(initial={'settings': settingtext})
return render(request, "editsettings.html", {'username': request.user.username, 'project': projectname, 'form': form, 'settingtype': settingtype})
if request.method == 'POST':
if 'cancel' in request.POST:
return HttpResponseRedirect(reverse("manageproject", args=(projectname,)))
if 'submit' in request.POST:
form = Settings(request.POST)
if form.is_valid():
if settingtype == "linkgenerator":
project.settings_link_generator = form.cleaned_data['settings']
project.save()
if settingtype == "scraper":
project.settings_scraper = form.cleaned_data['settings']
project.save()
return HttpResponseRedirect(reverse("manageproject", args=(projectname,)))
else:
return render(request, "editsettings.html",
{'username': request.user.username, 'project': projectname, 'form': form,
'settingtype': settingtype})
@login_required
def start_project_on_all(request, projectname):
uniqueprojectname = request.user.username + '_' + projectname
try:
project = Project.objects.get(user=request.user, project_name=projectname)
except Project.DoesNotExist:
return HttpResponseNotFound('Nothing is here.')
workers = []
workers.append(settings.LINK_GENERATOR)
for worker in settings.SCRAPERS:
workers.append(worker)
if request.method == 'POST':
for worker in workers:
try:
r = requests.post('%s/schedule.json' % worker, data={'project': uniqueprojectname, 'spider': uniqueprojectname}, timeout=(3, None))
except:
pass
return HttpResponse('sent start signal')
@login_required
def stop_project_on_all(request, projectname):
uniqueprojectname = request.user.username + '_' + projectname
try:
project = Project.objects.get(user=request.user, project_name=projectname)
except Project.DoesNotExist:
return HttpResponseNotFound('Nothing is here.')
workers = []
workers.append(settings.LINK_GENERATOR)
for worker in settings.SCRAPERS:
workers.append(worker)
if request.method == 'POST':
for worker in workers:
try:
r = requests.get('%s/listjobs.json' % worker,
params={'project': uniqueprojectname}, timeout=(3, None))
result = r.json()
jobid = result['running'][0]['id']
s = requests.post('%s/cancel.json' % worker, params={'project': uniqueprojectname, 'job': jobid}, timeout=(3, None))
except:
pass
return HttpResponse('sent stop signal')
@login_required
def get_global_system_status(request):
status = {}
workers = []
for worker in settings.SCRAPERS:
workers.append(worker)
worker_count = 0
for worker in workers:
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(3)
host = urlparse(worker).hostname
port = int(urlparse(worker).port)
result = sock.connect_ex((host, port))
if result == 0:
worker_count += 1
except:
pass
finally:
sock.close()
status['scrapers'] = worker_count
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(3)
host = urlparse(settings.LINK_GENERATOR).hostname
port = int(urlparse(settings.LINK_GENERATOR).port)
result = sock.connect_ex((host, port))
if result == 0:
status['linkgenerator'] = True
else:
status['linkgenerator'] = False
except:
status['linkgenerator'] = False
finally:
sock.close()
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(3)
result = sock.connect_ex((settings.RABBITMQ_HOST, settings.RABBITMQ_PORT))
if result == 0:
status['queue'] = True
else:
status['queue'] = False
except:
status['queue'] = False
finally:
sock.close()
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(3)
host = urlparse("http://" + settings.MONGODB_URI).hostname
port = int(urlparse("http://" + settings.MONGODB_URI).port)
result = sock.connect_ex((host, port))
if result == 0:
status['database'] = True
else:
status['database'] = False
except:
status['database'] = False
finally:
sock.close()
status['databaseaddress'] = settings.MONGODB_PUBLIC_ADDRESS
return JsonResponse(status, safe=False)
@login_required
def share_db(request, projectname):
uniqueprojectname = request.user.username + '_' + projectname
try:
project = Project.objects.get(user=request.user, project_name=projectname)
except Project.DoesNotExist:
return HttpResponseNotFound('Nothing is here.')
if request.method == 'GET':
form = ShareDB()
return render(request, 'sharedb.html', {'username': request.user.username, 'form': form, 'projectname': projectname})
if request.method == 'POST':
if 'cancel' in request.POST:
return HttpResponseRedirect(reverse("mainpage"))
elif 'submit' in request.POST:
form = ShareDB(request.POST)
if form.is_valid():
uname = form.cleaned_data['username']
if uname == request.user.username:
errors = form._errors.setdefault("username", ErrorList())
errors.append('User name %s is your own account name.' % uname)
return render(request, 'sharedb.html',
{'username': request.user.username, 'form': form, 'projectname': projectname})
try:
username = User.objects.get(username=uname)
except User.DoesNotExist:
errors = form._errors.setdefault("username", ErrorList())
errors.append('User %s does not exist in the system.' % uname)
return render(request, 'sharedb.html', {'username': request.user.username, 'form': form, 'projectname': projectname})
#start thread here
thr = threading.Thread(target=sharing_db, args=(uniqueprojectname, username.username, projectname, request.user.username), kwargs={})
thr.start()
return render(request, 'sharedb_started.html',
{'username': request.user.username})
else:
return render(request, 'sharedb.html', {'username': request.user.username, 'form': form, 'projectname': projectname})
@login_required
def share_project(request, projectname):
try:
project = Project.objects.get(user=request.user, project_name=projectname)
except Project.DoesNotExist:
return HttpResponseNotFound('Nothing is here.')
if request.method == 'GET':
form = ShareProject()
return render(request, 'shareproject.html', {'username': request.user.username, 'form': form, 'projectname': projectname})
if request.method == 'POST':
if 'cancel' in request.POST:
return HttpResponseRedirect(reverse("mainpage"))
elif 'submit' in request.POST:
form = ShareProject(request.POST)
if form.is_valid():
uname = form.cleaned_data['username']
if uname == request.user.username:
errors = form._errors.setdefault("username", ErrorList())
errors.append('User name %s is your own account name.' % uname)
return render(request, 'shareproject.html',
{'username': request.user.username, 'form': form, 'projectname': projectname})
try:
username = User.objects.get(username=uname)
except User.DoesNotExist:
errors = form._errors.setdefault("username", ErrorList())
errors.append('User %s does not exist in the system.' % uname)
return render(request, 'shareproject.html', {'username': request.user.username, 'form': form, 'projectname': projectname})
#start thread here
thr = threading.Thread(target=sharing_project, args=(username.username, projectname, request.user.username), kwargs={})
thr.start()
return HttpResponseRedirect(reverse("mainpage"))
else:
return render(request, 'shareproject.html', {'username': request.user.username, 'form': form, 'projectname': projectname})
def sharing_db(dbname, target_user, projectname, username):
target_db_name = '%s_sharedby_%s' % (projectname, username)
targetuser = User.objects.get(username=target_user)
mongouri = "mongodb://" + settings.MONGODB_USER + ":" + quote(
settings.MONGODB_PASSWORD) + "@" + settings.MONGODB_URI + "/admin"
connection = MongoClient(mongouri)
existing_dbs = connection.database_names()
checked_all_database_names = 0
db_version = 1
while not checked_all_database_names:
checked_all_database_names = 1
for onedbname in existing_dbs:
if str(onedbname) == target_db_name:
target_db_name += str(db_version)
db_version += 1
checked_all_database_names = 0
existing_dbs = connection.database_names()
database = connection[dbname]
if settings.MONGODB_SHARDED:
try:
connection.admin.command('enableSharding', target_db_name)
except:
pass
collections = database.collection_names()
for i, collection_name in enumerate(collections):
if collection_name != u'system.indexes':
if settings.MONGODB_SHARDED:
try:
connection.admin.command('shardCollection', '%s.%s' % (target_db_name, collection_name),
key={'_id': "hashed"})
except:
pass
col = connection[dbname][collection_name]
insertcol = connection[target_db_name][collection_name]
skip = 0
collection = col.find(filter={}, projection={'_id': False}, limit=100, skip=skip*100)
items = []
for item in collection:
items.append(item)
while len(items) > 0:
skip += 1
insertcol.insert_many(items)
collection = col.find(filter={}, projection={'_id': False}, limit=100, skip=skip * 100)
items = []
for item in collection:
items.append(item)
connection.admin.command('grantRolesToUser', target_user,
roles=[{'role': 'dbOwner', 'db': target_db_name}])
dataset = Dataset()
dataset.user = targetuser
dataset.database = target_db_name
dataset.save()
connection.close()
def sharing_project(target_user, projectname, username):
target_project_name = '%s_sharedby_%s' % (projectname, username)
targetuser = User.objects.get(username=target_user)
project = Project.objects.get(user=User.objects.get(username=username), project_name=projectname)
newproject = Project(user=targetuser, project_name=target_project_name, link_generator=project.link_generator,
scraper_function=project.scraper_function, settings_scraper=project.settings_scraper,
settings_link_generator=project.settings_link_generator)
newproject.save()
items = Item.objects.filter(project=project)
for item in items:
newitem = Item(item_name=item.item_name, project=newproject)
newitem.save()
fields = Field.objects.filter(item=item)
for field in fields:
newfield = Field(item=newitem, field_name=field.field_name)
newfield.save()
pipelines = Pipeline.objects.filter(project=project)
for pipeline in pipelines:
newpipeline = Pipeline(project=newproject, pipeline_function=pipeline.pipeline_function,
pipeline_name=pipeline.pipeline_name, pipeline_order=pipeline.pipeline_order)
newpipeline.save()
mongouri = "mongodb://" + settings.MONGODB_USER + ":" + quote(
settings.MONGODB_PASSWORD) + "@" + settings.MONGODB_URI + "/admin"
connection = MongoClient(mongouri)
connection.admin.command('grantRolesToUser', target_user,
roles=[{'role': 'dbOwner', 'db': target_user + '_' + target_project_name}])
dataset = Dataset()
dataset.user = targetuser
dataset.database = target_user + '_' + target_project_name
dataset.save()
connection.close()
def mongodb_user_creation(username, password):
mongouri = "mongodb://" + settings.MONGODB_USER + ":" + quote(
settings.MONGODB_PASSWORD) + "@" + settings.MONGODB_URI + "/admin"
connection = MongoClient(mongouri)
connection.admin.command('createUser', username, pwd=password, roles=[])
connection.close()
def mongodb_user_password_change(username, password):
mongouri = "mongodb://" + settings.MONGODB_USER + ":" + quote(
settings.MONGODB_PASSWORD) + "@" + settings.MONGODB_URI + "/admin"
connection = MongoClient(mongouri)
connection.admin.command('updateUser', username, pwd=password)
connection.close()
def linux_user_creation(username, password):
encpass = crypt.crypt(password, "2424")
os.system("useradd -p " + encpass + " %s" % username)
os.system("mkdir /home/%s" % username)
os.system("chown %s:%s /home/%s" % (username, username, username))
def linux_user_pass_change(username, password):
encpass = crypt.crypt(password, "2424")
os.system("usermod -p " + encpass + " %s" % username)
@login_required
def database_preview(request, db):
datasets = Dataset.objects.filter(user=request.user)
databases = []
for dataset in datasets:
databases.append(dataset.database)
if db not in databases:
return HttpResponseNotFound('Nothing is here.')
mongouri = "mongodb://" + settings.MONGODB_USER + ":" + quote(
settings.MONGODB_PASSWORD) + "@" + settings.MONGODB_URI + "/admin"
connection = MongoClient(mongouri)
database = connection[db]
preview_data = {}
collections = database.collection_names()
for i, collection_name in enumerate(collections):
if collection_name != u'system.indexes':
col = database[collection_name]
collection = col.find(filter={}, projection={'_id': False}, limit=10, skip=0)
items = []
for item in collection:
items.append(item)
preview_data[collection_name] = json.dumps(items, ensure_ascii=False)
return render(request, template_name="databasepreview.html",
context={'username': request.user.username, 'databases': databases, 'preview_data': preview_data})
|
{"/awssam/iam/users/urls.py": ["/awssam/iam/users/views.py"], "/myapi/devfile/core/views.py": ["/myapi/devfile/core/forms.py"], "/Web-UI/scrapyproject/views.py": ["/Web-UI/scrapyproject/forms.py", "/Web-UI/scrapyproject/models.py"], "/awssam/ideablog/core/admin.py": ["/awssam/ideablog/core/models.py"], "/awssam/wikidj/wikidj/codehilite.py": ["/awssam/wikidj/wikidj/settings.py", "/awssam/wikidj/wikidj/dev.py"], "/awssam/ideablog/core/views.py": ["/awssam/ideablog/core/models.py"], "/myapi/fullfeblog/blog/search_indexes.py": ["/myapi/fullfeblog/blog/models.py"], "/myapi/fullfeblog/blog/templatetags/blog_tags.py": ["/myapi/fullfeblog/blog/models.py"], "/myapi/fullfeblog/blog/views.py": ["/myapi/fullfeblog/blog/models.py", "/myapi/fullfeblog/blog/forms.py"], "/awssam/wikidj/wikidj/dev.py": ["/awssam/wikidj/wikidj/settings.py"], "/tc_zufang/django_web/datashow/views.py": ["/tc_zufang/django_web/datashow/models.py"], "/awssam/ideablog/core/forms.py": ["/awssam/ideablog/core/models.py"], "/Web-UI/scrapyproject/admin.py": ["/Web-UI/scrapyproject/models.py"], "/myapi/fullfeblog/blog/urls.py": ["/myapi/fullfeblog/blog/views.py"]}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.