index
int64 | repo_name
string | branch_name
string | path
string | content
string | import_graph
string |
|---|---|---|---|---|---|
29,989
|
seung-lab/DynamicAnnotationDB
|
refs/heads/master
|
/dynamicannotationdb/annotation.py
|
import datetime
import logging
from typing import List
from marshmallow import INCLUDE
from sqlalchemy import DDL, event
from .database import DynamicAnnotationDB
from .errors import (
AnnotationInsertLimitExceeded,
NoAnnotationsFoundWithID,
UpdateAnnotationError,
TableNameNotFound,
)
from .models import AnnoMetadata
from .schema import DynamicSchemaClient
class DynamicAnnotationClient:
def __init__(self, sql_url: str) -> None:
self.db = DynamicAnnotationDB(sql_url)
self.schema = DynamicSchemaClient()
@property
def table(self):
return self._table
def load_table(self, table_name: str):
"""Load a table
Parameters
----------
table_name : str
name of table
Returns
-------
DeclarativeMeta
the sqlalchemy table of that name
"""
self._table = self.db.cached_table(table_name)
return self._table
def create_table(
self,
table_name: str,
schema_type: str,
description: str,
user_id: str,
voxel_resolution_x: float,
voxel_resolution_y: float,
voxel_resolution_z: float,
table_metadata: dict = None,
flat_segmentation_source: str = None,
with_crud_columns: bool = True,
read_permission: str = "PUBLIC",
write_permission: str = "PRIVATE",
notice_text: str = None,
):
r"""Create new annotation table unless already exists
Parameters
----------
table_name : str
name of table
schema_type : str
Type of schema to use, must be a valid type from EMAnnotationSchemas
description: str
a string with a human-readable explanation of
what is in the table. Including whom made it
and any information that helps interpret the fields
of the annotations.
user_id: str
user id for this table
voxel_resolution_x: float
voxel_resolution of this annotation table's point in x (typically nm)
voxel_resolution_y: float
voxel_resolution of this annotation table's point in y (typically nm)
voxel_resolution_z: float
voxel_resolution of this annotation table's point in z (typically nm)
table_metadata: dict
flat_segmentation_source: str
a path to a segmentation source associated with this table
i.e. 'precomputed:\\gs:\\my_synapse_seg\example1'
with_crud_columns: bool
add additional columns to track CRUD operations on rows
"""
existing_tables = self.db._check_table_is_unique(table_name)
if table_metadata:
reference_table, track_updates = self.schema._parse_schema_metadata_params(
schema_type, table_name, table_metadata, existing_tables
)
else:
reference_table = None
track_updates = None
AnnotationModel = self.schema.create_annotation_model(
table_name,
schema_type,
table_metadata=table_metadata,
with_crud_columns=with_crud_columns,
)
if hasattr(AnnotationModel, "target_id") and reference_table:
reference_table_name = self.db.get_table_sql_metadata(reference_table)
logging.info(
f"{table_name} is targeting reference table: {reference_table_name}"
)
if track_updates:
self.create_reference_update_trigger(
table_name, reference_table, AnnotationModel
)
description += (
f" [Note: This table '{AnnotationModel.__name__}' will update the 'target_id' "
f"foreign_key when updates are made to the '{reference_table}' table] "
)
self.db.base.metadata.tables[AnnotationModel.__name__].create(
bind=self.db.engine
)
creation_time = datetime.datetime.utcnow()
metadata_dict = {
"description": description,
"user_id": user_id,
"reference_table": reference_table,
"schema_type": schema_type,
"table_name": table_name,
"valid": True,
"created": creation_time,
"flat_segmentation_source": flat_segmentation_source,
"voxel_resolution_x": voxel_resolution_x,
"voxel_resolution_y": voxel_resolution_y,
"voxel_resolution_z": voxel_resolution_z,
"read_permission": read_permission,
"write_permission": write_permission,
"last_modified": creation_time,
"notice_text": notice_text,
}
logging.info(f"Metadata for table: {table_name} is {metadata_dict}")
anno_metadata = AnnoMetadata(**metadata_dict)
self.db.cached_session.add(anno_metadata)
self.db.commit_session()
logging.info(
f"Table: {table_name} created using {AnnotationModel} model at {creation_time}"
)
return table_name
def update_table_metadata(
self,
table_name: str,
description: str = None,
user_id: str = None,
flat_segmentation_source: str = None,
read_permission: str = None,
write_permission: str = None,
notice_text: str = None,
):
r"""Update metadata for an annotation table.
Parameters
----------
table_name : str
Name of the annotation table
description: str, optional
a string with a human-readable explanation of
what is in the table. Including whom made it
and any information that helps interpret the fields
of the annotations.
user_id : str, optional
user id for this table
flat_segmentation_source : str, optional
a path to a segmentation source associated with this table
i.e. 'precomputed:\\gs:\\my_synapse_seg\example1', by default None
read_permission : str, optional
set read permissions, by default None
write_permission : str, optional
set write permissions, by default None
notice_text : str, optional
set notice_text, by default None, if empty string will delete
Returns
-------
dict
The updated metadata for the target table
Raises
------
TableNameNotFound
If no table with 'table_name' found in the metadata table
"""
metadata = (
self.db.cached_session.query(AnnoMetadata)
.filter(AnnoMetadata.table_name == table_name)
.first()
)
if metadata is None:
raise TableNameNotFound(
f"no table named {table_name} in database {self.sql_url} "
)
update_dict = {
"description": description,
"user_id": user_id,
"flat_segmentation_source": flat_segmentation_source,
"read_permission": read_permission,
"write_permission": write_permission,
}
update_dict = {k: v for k, v in update_dict.items() if v is not None}
if notice_text is not None:
if len(notice_text) == 0:
update_dict["notice_text"] = None
else:
update_dict["notice_text"] = notice_text
for column, value in update_dict.items():
if hasattr(metadata, str(column)):
setattr(metadata, column, value)
self.db.commit_session()
logging.info(f"Table: {table_name} metadata updated ")
return self.db.get_table_metadata(table_name)
def create_reference_update_trigger(self, table_name, reference_table, model):
func_name = f"{table_name}_update_reference_id"
func = DDL(
f"""
CREATE or REPLACE function {func_name}()
returns TRIGGER
as $func$
begin
if EXISTS
(SELECT 1
FROM information_schema.columns
WHERE table_name='{reference_table}'
AND column_name='superceded_id') THEN
update {table_name} ref
set target_id = new.superceded_id
where ref.target_id = old.id;
return new;
else
return NULL;
END if;
end;
$func$ language plpgsql;
"""
)
trigger = DDL(
f"""CREATE TRIGGER update_{table_name}_target_id AFTER UPDATE ON {reference_table}
FOR EACH ROW EXECUTE PROCEDURE {func_name}();"""
)
event.listen(
model.__table__,
"after_create",
func.execute_if(dialect="postgresql"),
)
event.listen(
model.__table__,
"after_create",
trigger.execute_if(dialect="postgresql"),
)
return True
def delete_table(self, table_name: str) -> bool:
"""Marks a table for deletion, which will
remove it from user visible calls
and stop materialization from happening on this table
only updates metadata to reflect deleted timestamp.
Parameters
----------
table_name : str
name of table to mark for deletion
Returns
-------
bool
whether table was successfully deleted
"""
metadata = (
self.db.cached_session.query(AnnoMetadata)
.filter(AnnoMetadata.table_name == table_name)
.first()
)
if metadata is None:
raise TableNameNotFound(
f"no table named {table_name} in database {self.sql_url} "
)
metadata.deleted = datetime.datetime.utcnow()
self.db.commit_session()
return True
def insert_annotations(self, table_name: str, annotations: List[dict]):
"""Insert some annotations.
Parameters
----------
table_name : str
name of target table to insert annotations
annotations : list of dict
a list of dicts with the annotations
that meet the schema
Returns
-------
bool
True is successfully inserted annotations
Raises
------
AnnotationInsertLimitExceeded
Exception raised when amount of annotations exceeds defined limit.
"""
insertion_limit = 10_000
if len(annotations) > insertion_limit:
raise AnnotationInsertLimitExceeded(insertion_limit, len(annotations))
schema_type, AnnotationModel = self._load_model(table_name)
formatted_anno_data = []
for annotation in annotations:
annotation_data, __ = self.schema.split_flattened_schema_data(
schema_type, annotation
)
if annotation.get("id"):
annotation_data["id"] = annotation["id"]
if hasattr(AnnotationModel, "created"):
annotation_data["created"] = datetime.datetime.utcnow()
annotation_data["valid"] = True
formatted_anno_data.append(annotation_data)
annos = [
AnnotationModel(**annotation_data)
for annotation_data in formatted_anno_data
]
self.db.cached_session.add_all(annos)
self.db.cached_session.flush()
anno_ids = [anno.id for anno in annos]
(
self.db.cached_session.query(AnnoMetadata)
.filter(AnnoMetadata.table_name == table_name)
.update({AnnoMetadata.last_modified: datetime.datetime.utcnow()})
)
self.db.commit_session()
return anno_ids
def get_annotations(self, table_name: str, annotation_ids: List[int]) -> List[dict]:
"""Get a set of annotations by ID
Parameters
----------
table_name : str
name of table
annotation_ids : List[int]
list of annotation ids to get
Returns
-------
List[dict]
list of returned annotations
"""
schema_type, AnnotationModel = self._load_model(table_name)
annotations = (
self.db.cached_session.query(AnnotationModel)
.filter(AnnotationModel.id.in_(list(annotation_ids)))
.all()
)
anno_schema, __ = self.schema.split_flattened_schema(schema_type)
schema = anno_schema(unknown=INCLUDE)
try:
data = []
for anno in annotations:
anno_data = anno.__dict__
anno_data["created"] = str(anno_data.get("created"))
anno_data["deleted"] = str(anno_data.get("deleted"))
anno_data = {
k: v for (k, v) in anno_data.items() if k != "_sa_instance_state"
}
data.append(anno_data)
return schema.load(data, many=True)
except Exception as e:
logging.exception(e)
raise NoAnnotationsFoundWithID(annotation_ids) from e
def update_annotation(self, table_name: str, annotation: dict) -> str:
"""Update an annotation
Parameters
----------
table_name : str
name of targeted table to update annotations
annotation : dict
new data for that annotation
Returns
-------
dict:
dict mapping of old id : new id values
Raises
------
NoAnnotationsFoundWithID:
Raises if no Ids to be updated are found in the table.
"""
anno_id = annotation.get("id")
if not anno_id:
return "Annotation requires an 'id' to update targeted row"
schema_type, AnnotationModel = self._load_model(table_name)
new_annotation, __ = self.schema.split_flattened_schema_data(
schema_type, annotation
)
if hasattr(AnnotationModel, "created"):
new_annotation["created"] = datetime.datetime.utcnow()
if hasattr(AnnotationModel, "valid"):
new_annotation["valid"] = True
new_data = AnnotationModel(**new_annotation)
try:
old_anno = (
self.db.cached_session.query(AnnotationModel)
.filter(AnnotationModel.id == anno_id)
.one()
)
except NoAnnotationsFoundWithID as e:
raise f"No result found for {anno_id}. Error: {e}" from e
if hasattr(AnnotationModel, "target_id"):
new_data_map = self.db.get_automap_items(new_data)
for column_name, value in new_data_map.items():
setattr(old_anno, column_name, value)
old_anno.valid = True
update_map = {anno_id: old_anno.id}
else:
if old_anno.superceded_id:
raise UpdateAnnotationError(anno_id, old_anno.superceded_id)
self.db.cached_session.add(new_data)
self.db.cached_session.flush()
deleted_time = datetime.datetime.utcnow()
old_anno.deleted = deleted_time
old_anno.superceded_id = new_data.id
old_anno.valid = False
update_map = {anno_id: new_data.id}
(
self.db.cached_session.query(AnnoMetadata)
.filter(AnnoMetadata.table_name == table_name)
.update({AnnoMetadata.last_modified: datetime.datetime.utcnow()})
)
self.db.commit_session()
return update_map
def delete_annotation(
self, table_name: str, annotation_ids: List[int]
) -> List[int]:
"""Delete annotations by ids
Parameters
----------
table_name : str
name of table to delete from
annotation_ids : List[int]
list of ids to delete
Returns
-------
List[int]:
List of ids that were marked as deleted and no longer valid.
"""
schema_type, AnnotationModel = self._load_model(table_name)
annotations = (
self.db.cached_session.query(AnnotationModel)
.filter(AnnotationModel.id.in_(annotation_ids))
.all()
)
deleted_ids = []
if annotations:
deleted_time = datetime.datetime.utcnow()
for annotation in annotations:
# TODO: This should be deprecated, as all tables should have
# CRUD columns now, but leaving this for backward safety.
if not hasattr(AnnotationModel, "deleted"):
self.db.cached_session.delete(annotation)
else:
annotation.deleted = deleted_time
annotation.valid = False
deleted_ids.append(annotation.id)
(
self.db.cached_session.query(AnnoMetadata)
.filter(AnnoMetadata.table_name == table_name)
.update({AnnoMetadata.last_modified: datetime.datetime.utcnow()})
)
self.db.commit_session()
else:
return None
return deleted_ids
def _load_model(self, table_name):
metadata = self.db.get_table_metadata(table_name)
schema_type = metadata["schema_type"]
# load reference table into metadata if not already present
ref_table = metadata.get("reference_table")
if ref_table:
reference_table_name = self.db.cached_table(ref_table)
AnnotationModel = self.db.cached_table(table_name)
return schema_type, AnnotationModel
|
{"/dynamicannotationdb/database.py": ["/dynamicannotationdb/errors.py", "/dynamicannotationdb/models.py", "/dynamicannotationdb/schema.py"], "/dynamicannotationdb/__init__.py": ["/dynamicannotationdb/interface.py"], "/dynamicannotationdb/migration/__init__.py": ["/dynamicannotationdb/migration/migrate.py"], "/dynamicannotationdb/interface.py": ["/dynamicannotationdb/annotation.py", "/dynamicannotationdb/database.py", "/dynamicannotationdb/models.py", "/dynamicannotationdb/schema.py", "/dynamicannotationdb/segmentation.py"], "/dynamicannotationdb/segmentation.py": ["/dynamicannotationdb/database.py", "/dynamicannotationdb/errors.py", "/dynamicannotationdb/key_utils.py", "/dynamicannotationdb/models.py", "/dynamicannotationdb/schema.py"], "/tests/test_errors.py": ["/dynamicannotationdb/errors.py"], "/dynamicannotationdb/schema.py": ["/dynamicannotationdb/errors.py"], "/dynamicannotationdb/migration/migrate.py": ["/dynamicannotationdb/database.py", "/dynamicannotationdb/models.py", "/dynamicannotationdb/schema.py"], "/tests/conftest.py": ["/dynamicannotationdb/__init__.py"], "/dynamicannotationdb/annotation.py": ["/dynamicannotationdb/database.py", "/dynamicannotationdb/errors.py", "/dynamicannotationdb/models.py", "/dynamicannotationdb/schema.py"]}
|
29,990
|
seung-lab/DynamicAnnotationDB
|
refs/heads/master
|
/tests/test_annotation.py
|
import logging
import pytest
from emannotationschemas import type_mapping
from emannotationschemas.schemas.base import ReferenceAnnotation
def test_create_table(dadb_interface, annotation_metadata):
table_name = annotation_metadata["table_name"]
schema_type = annotation_metadata["schema_type"]
vx = annotation_metadata["voxel_resolution_x"]
vy = annotation_metadata["voxel_resolution_y"]
vz = annotation_metadata["voxel_resolution_z"]
table = dadb_interface.annotation.create_table(
table_name,
schema_type,
description="some description",
user_id="foo@bar.com",
voxel_resolution_x=vx,
voxel_resolution_y=vy,
voxel_resolution_z=vz,
table_metadata=None,
flat_segmentation_source=None,
)
assert table_name == table
def test_create_all_schema_types(dadb_interface, annotation_metadata):
vx = annotation_metadata["voxel_resolution_x"]
vy = annotation_metadata["voxel_resolution_y"]
vz = annotation_metadata["voxel_resolution_z"]
ref_metadata = {
"reference_table": "anno_test",
"track_target_id_updates": True,
}
for schema_name, schema_type in type_mapping.items():
table_metadata = (
ref_metadata if issubclass(schema_type, ReferenceAnnotation) else None
)
table = dadb_interface.annotation.create_table(
f"test_{schema_name}",
schema_name,
description="some description",
user_id="foo@bar.com",
voxel_resolution_x=vx,
voxel_resolution_y=vy,
voxel_resolution_z=vz,
table_metadata=table_metadata,
flat_segmentation_source=None,
)
assert f"test_{schema_name}" == table
def test_create_reference_table(dadb_interface, annotation_metadata):
table_name = "presynaptic_bouton_types"
schema_type = "presynaptic_bouton_type"
vx = annotation_metadata["voxel_resolution_x"]
vy = annotation_metadata["voxel_resolution_y"]
vz = annotation_metadata["voxel_resolution_z"]
table_metadata = {
"reference_table": "anno_test",
"track_target_id_updates": True,
}
table = dadb_interface.annotation.create_table(
table_name,
schema_type,
description="some description",
user_id="foo@bar.com",
voxel_resolution_x=vx,
voxel_resolution_y=vy,
voxel_resolution_z=vz,
table_metadata=table_metadata,
flat_segmentation_source=None,
with_crud_columns=False,
)
assert table_name == table
table_info = dadb_interface.database.get_table_metadata(table)
assert table_info["reference_table"] == "anno_test"
def test_bad_schema_reference_table(dadb_interface, annotation_metadata):
table_name = "bad_reference_table"
schema_type = "synapse"
vx = annotation_metadata["voxel_resolution_x"]
vy = annotation_metadata["voxel_resolution_y"]
vz = annotation_metadata["voxel_resolution_z"]
table_metadata = {
"reference_table": "anno_test",
"track_target_id_updates": True,
}
with pytest.raises(Exception) as e:
table = dadb_interface.annotation.create_table(
table_name,
schema_type,
description="some description",
user_id="foo@bar.com",
voxel_resolution_x=vx,
voxel_resolution_y=vy,
voxel_resolution_z=vz,
table_metadata=table_metadata,
flat_segmentation_source=None,
)
assert str(e.value) == "Reference table must be a ReferenceAnnotation schema type"
def test_insert_annotation(dadb_interface, annotation_metadata):
table_name = annotation_metadata["table_name"]
test_data = [
{
"pre_pt": {"position": [121, 123, 1232]},
"ctr_pt": {"position": [121, 123, 1232]},
"post_pt": {"position": [333, 555, 5555]},
"size": 1,
}
]
inserted_id = dadb_interface.annotation.insert_annotations(table_name, test_data)
assert inserted_id == [1]
def test_insert_reference_annotation(dadb_interface, annotation_metadata):
table_name = "presynaptic_bouton_types"
test_data = [
{
"bouton_type": "pancake",
"target_id": 1,
}
]
inserted_id = dadb_interface.annotation.insert_annotations(table_name, test_data)
assert inserted_id == [1]
def test_insert_another_annotation(dadb_interface, annotation_metadata):
table_name = annotation_metadata["table_name"]
test_data = [
{
"pre_pt": {"position": [111, 222, 333]},
"ctr_pt": {"position": [444, 555, 666]},
"post_pt": {"position": [777, 888, 999]},
"size": 1,
}
]
inserted_id = dadb_interface.annotation.insert_annotations(table_name, test_data)
assert inserted_id == [2]
def test_get_annotation(dadb_interface, annotation_metadata):
table_name = annotation_metadata["table_name"]
test_data = dadb_interface.annotation.get_annotations(table_name, [1])
logging.info(test_data)
assert test_data[0]["id"] == 1
def test_update_annotation(dadb_interface, annotation_metadata):
table_name = annotation_metadata["table_name"]
updated_test_data = {
"id": 1,
"pre_pt": {"position": [222, 123, 1232]},
"ctr_pt": {"position": [121, 123, 1232]},
"post_pt": {"position": [555, 555, 5555]},
}
update_map = dadb_interface.annotation.update_annotation(
table_name, updated_test_data
)
assert update_map == {1: 3}
test_data = dadb_interface.annotation.get_annotations(table_name, [1])
assert test_data[0]["superceded_id"] == 3
def test_get_reference_annotation(dadb_interface, annotation_metadata):
table_name = "presynaptic_bouton_types"
test_data = dadb_interface.annotation.get_annotations(table_name, [1])
logging.info(test_data)
assert test_data[0]["id"] == 1
assert test_data[0]["target_id"] == 3
def test_update_reference_annotation(dadb_interface, annotation_metadata):
table_name = "presynaptic_bouton_types"
test_data = {
"id": 1,
"bouton_type": "basmati",
"target_id": 3,
}
update_map = dadb_interface.annotation.update_annotation(table_name, test_data)
assert update_map == {1: 1}
test_data = dadb_interface.annotation.get_annotations(table_name, [1])
assert test_data[0]["bouton_type"] == "basmati"
def test_delete_reference_annotation(dadb_interface, annotation_metadata):
table_name = "presynaptic_bouton_types"
ids_to_delete = [1]
is_deleted = dadb_interface.annotation.delete_annotation(table_name, ids_to_delete)
assert is_deleted == ids_to_delete
def test_delete_annotation(dadb_interface, annotation_metadata):
table_name = annotation_metadata["table_name"]
ids_to_delete = [1]
is_deleted = dadb_interface.annotation.delete_annotation(table_name, ids_to_delete)
assert is_deleted == ids_to_delete
def test_update_table_metadata(dadb_interface, annotation_metadata):
table_name = annotation_metadata["table_name"]
updated_metadata = dadb_interface.annotation.update_table_metadata(
table_name, description="New description"
)
assert updated_metadata["description"] == "New description"
|
{"/dynamicannotationdb/database.py": ["/dynamicannotationdb/errors.py", "/dynamicannotationdb/models.py", "/dynamicannotationdb/schema.py"], "/dynamicannotationdb/__init__.py": ["/dynamicannotationdb/interface.py"], "/dynamicannotationdb/migration/__init__.py": ["/dynamicannotationdb/migration/migrate.py"], "/dynamicannotationdb/interface.py": ["/dynamicannotationdb/annotation.py", "/dynamicannotationdb/database.py", "/dynamicannotationdb/models.py", "/dynamicannotationdb/schema.py", "/dynamicannotationdb/segmentation.py"], "/dynamicannotationdb/segmentation.py": ["/dynamicannotationdb/database.py", "/dynamicannotationdb/errors.py", "/dynamicannotationdb/key_utils.py", "/dynamicannotationdb/models.py", "/dynamicannotationdb/schema.py"], "/tests/test_errors.py": ["/dynamicannotationdb/errors.py"], "/dynamicannotationdb/schema.py": ["/dynamicannotationdb/errors.py"], "/dynamicannotationdb/migration/migrate.py": ["/dynamicannotationdb/database.py", "/dynamicannotationdb/models.py", "/dynamicannotationdb/schema.py"], "/tests/conftest.py": ["/dynamicannotationdb/__init__.py"], "/dynamicannotationdb/annotation.py": ["/dynamicannotationdb/database.py", "/dynamicannotationdb/errors.py", "/dynamicannotationdb/models.py", "/dynamicannotationdb/schema.py"]}
|
30,017
|
subhamkumarmal/images
|
refs/heads/master
|
/uploadapp/urls.py
|
from django.urls import path,include
from . import views
urlpatterns=[
path('',views.StudentDetailsView,name="studentsdetails"),
path('getinfo',views.GetInfoView,name='getinfo'),
path('getallvalue',views.getAll,name='getall'),
path('accounts/',include("django.contrib.auth.urls"))
]
|
{"/uploadapp/views.py": ["/uploadapp/forms.py", "/uploadapp/models.py"], "/uploadapp/forms.py": ["/uploadapp/models.py"]}
|
30,018
|
subhamkumarmal/images
|
refs/heads/master
|
/uploadapp/views.py
|
from django.shortcuts import render,redirect
from .forms import StudentsDetailsForm,Login
from django.http import HttpResponse,JsonResponse
from .models import StudentsDeails
from django.contrib.auth.decorators import login_required
# Create your views here.
def StudentDetailsView(request):
if request.method=="POST":
forms=StudentsDetailsForm(request.POST,request.FILES)
if forms.is_valid():
forms.save()
return redirect('/')
else:
forms=StudentsDetailsForm()
return render(request,'uploadapp/register.html',{'forms':forms})
def GetInfoView(request):
if request.method=='POST':
forms=Login(request.POST)
if forms.is_valid():
email=forms.cleaned_data['email']
password=forms.cleaned_data['password']
getinfo=StudentsDeails.objects.filter(email__exact=email)
if getinfo[0].password==password:
return render(request,'uploadapp/getinfo.html',{'getinfo':getinfo})
else:
forms=Login()
return render(request,'uploadapp/getinfo.html',{'forms':forms})
def getAll(request):
obj=StudentsDeails.objects.all()
res={"details":list(obj.values('name','age','phone','email'))}
return JsonResponse(res)
|
{"/uploadapp/views.py": ["/uploadapp/forms.py", "/uploadapp/models.py"], "/uploadapp/forms.py": ["/uploadapp/models.py"]}
|
30,019
|
subhamkumarmal/images
|
refs/heads/master
|
/uploadapp/forms.py
|
from django import forms
from .models import StudentsDeails
class StudentsDetailsForm(forms.ModelForm):
class Meta:
model=StudentsDeails
fields="__all__"
class Login(forms.Form):
email=forms.EmailField(widget=forms.EmailInput)
password=forms.CharField(widget=forms.PasswordInput)
|
{"/uploadapp/views.py": ["/uploadapp/forms.py", "/uploadapp/models.py"], "/uploadapp/forms.py": ["/uploadapp/models.py"]}
|
30,020
|
subhamkumarmal/images
|
refs/heads/master
|
/uploadapp/models.py
|
from django.db import models
# Create your models here.
class StudentsDeails(models.Model):
name=models.CharField(max_length=40)
age=models.IntegerField()
phone=models.CharField(max_length=11)
email=models.EmailField()
password=models.CharField(max_length=20)
img_filed=models.ImageField(upload_to='upload/images')
def __str__(self):
return self.name
class Meta:
db_table='studentdetial'
|
{"/uploadapp/views.py": ["/uploadapp/forms.py", "/uploadapp/models.py"], "/uploadapp/forms.py": ["/uploadapp/models.py"]}
|
30,029
|
Diareich/My-realization-algorithm-Floyd-Warshall-with-GUI
|
refs/heads/master
|
/logic.py
|
# Floyd Warshall Algorithm in python
# The number of vertices
nV = 4
INF = 999
# Algorithm implementation
def floyd_warshall(G):
distance = list(map(lambda i: list(map(lambda j: j, i)), G))
# Adding vertices individually
for k in range(nV):
for i in range(nV):
for j in range(nV):
distance[i][j] = min(distance[i][j], distance[i][k] + distance[k][j])
return print_solution(distance)
# Printing the solution
def print_solution(distance):
solve = list ()
for i in range(nV):
lol = list ()
for j in range(nV):
if(distance[i][j] == INF):
lol.append("INF")
else:
lol.append(distance[i][j])
solve.append(lol)
return (solve)
if __name__ == '__main__':
G = [[0, 3, INF, 5],
[2, 0, INF, 4],
[INF, 1, 0, INF],
[INF, INF, 2, 0]]
print (floyd_warshall(G))
|
{"/main.py": ["/logic.py"]}
|
30,030
|
Diareich/My-realization-algorithm-Floyd-Warshall-with-GUI
|
refs/heads/master
|
/main.py
|
from tkinter import *
from logic import floyd_warshall
# функция расчета алгоритма флойда-уоршела
def solve():
nV = dimention_entry.get()
graph = graph_entry.get()
path = path_entry.get()
INF = 999
new_graph = [x.replace("[", "").replace("],", "").replace("]]", "") for x in graph.split (" [")]
new_graph = [x.split(", ") for x in new_graph]
new_graph = [ [y.replace("INF", "999") for y in x] for x in new_graph]
new_graph = [ [int(y) for y in x ] for x in new_graph]
print (new_graph)
solution = floyd_warshall(new_graph)
matrix_listbox.insert(0, f"Примечание: число 999 - обозначает бесконечность !!!")
matrix_listbox.insert(0, f"Матрица расстояний: {solution}")
matrix_listbox.insert(0, f"Исходный граф: {graph}")
matrix_listbox.insert(0, f"-------------------------------------Решение-------------------------------------")
path = path.split(" ")
path = [int(x) for x in path]
print (path[0] + path[1])
print (path)
fastest_path = solution [path[0] - 1] [path[1] - 1]
print (fastest_path)
path_listbox.insert(0, f"Примечание: число 999 - обозначает бесконечность !!!")
path_listbox.insert(0, f"Самый короткий путь составит: {fastest_path}")
path_listbox.insert(0, f"Мы хотим попасть из вершины {path[0]} в вершину {path[1]}")
path_listbox.insert(0, f"-------------------------------------Решение-------------------------------------")
root = Tk ()
root.title("Алгоритм Флойд-Уоршелла")
root.geometry("435x660")
# Создаем поля для ввода и кнопку для совершения расчетов
dimention_text = "Введите размерность графа:"
dimention_label = Label(text=dimention_text, fg="#eee", bg="#333")
dimention_label.grid(column=0, row=0, padx=6, pady=6)
dimention_entry = Entry(width=32)
dimention_entry.grid(column=0, row=1, padx=6, pady=6)
graph_text = "Введите матрицу расстояний до вершин в графе:"
graph_label = Label(text=graph_text, fg="#eee", bg="#333")
graph_label.grid (column=0, row = 2, padx=6, pady=6)
solve_button = Button (text="Рассчитать",
background="#555",
foreground="#ccc",
padx="20",
pady="8",
font="16",
command=solve).grid(column=1, row=2, padx=6, pady=6)
graph_entry = Entry(width=32)
graph_entry.grid(column=0, row=3, padx=6, pady=6)
path_text = "Введите из какой в какую вершину\nвы хотите попасть через пробел:"
path_label = Label(text=path_text, fg="#eee", bg="#333")
path_label.grid (column=0, row = 4, padx=6, pady=6)
path_entry = Entry(width=32)
path_entry.grid(column=0, row=5, padx=6, pady=6)
# создаем списки для вывода данных
matrix_text = "Вывод матрицы расстояний до вершин:"
matrix_label = Label(text=matrix_text, fg="#eee", bg="#333")
matrix_label.grid (column=0, row = 6, padx=6, pady=6)
matrix_listbox = Listbox()
matrix_listbox.grid(row=7, column=0, columnspan=2, sticky=W+E, padx=5, pady=5)
path_matrix_text = "Вывод кратчайшего пути из\nодной вершины в другую:"
path_matrix_label = Label(text=path_matrix_text, fg="#eee", bg="#333")
path_matrix_label.grid (column=0, row = 8, padx=6, pady=6)
path_listbox = Listbox()
path_listbox.grid(row=9, column=0, columnspan=2, sticky=W+E, padx=5, pady=5)
root.mainloop()
|
{"/main.py": ["/logic.py"]}
|
30,036
|
MrLawes/awspycli
|
refs/heads/master
|
/awspycli/model/emr_cli.py
|
# -*- coding:utf-8 -*-
import json
import os
class EMR(object):
def __init__(self):
pass
def add_steps(self, **kwargs):
""" Add a list of steps to a cluster.
:param kwargs:
cluster_id: string
steps: [
{},.....
]
:return:
"""
return self.exec_command('add-steps', **kwargs)
def create_cluster(self, **kwargs):
""" Creates an Amazon EMR cluster with the specified configurations.
:param kwargs:
applications:
Args=string,string,string ...
default: Hadoop,Spark,Ganglia
ec2_attributes:
Args=json
InstanceProfile:
default: EMR_EC2_DefaultRole
service_role:
Args=string
default: EMR_DefaultRole
name:
Args=string
default: awspycli
region
Args=string
default: us-east-1
:return:
{u'ClusterId': u'j-1OAMNPOAHUIFP'}
"""
kwargs.setdefault('applications', 'Hadoop,Spark,Ganglia')
kwargs['applications'] = ' Name='.join([''] + kwargs.pop('applications').split(',')).lstrip()
ec2_attributes = kwargs['ec2_attributes']
ec2_attributes.setdefault('InstanceProfile', 'EMR_EC2_DefaultRole')
kwargs.setdefault('service_role', 'EMR_DefaultRole')
kwargs.setdefault('name', 'awspycli')
kwargs.setdefault('region', 'us-east-1')
kwargs.setdefault('instance_groups', {})
instance_groups = kwargs['instance_groups']
for instance_group in instance_groups:
instance_group.setdefault(
'Name', instance_group['InstanceGroupType'] + ' ' + instance_group['InstanceType'] + ' x '
+ str(instance_group['InstanceCount'])
)
kwargs.setdefault('steps', [{
'Name': 'awspycli default step',
'Args': ['sleep', '10'],
'Jar': 'command-runner.jar',
'ActionOnFailure': 'TERMINATE_CLUSTER',
'Type': 'CUSTOM_JAR',
'Properties': ''
}])
return self.exec_command('create-cluster', **kwargs)
def exec_command(self, emr_command, **kwargs):
""" change kwargs to aws command, and run it
:param command:
:param kwargs:
:return:
"""
aws_command = 'aws emr {command} '.format(command=emr_command)
l = []
kwargs_keys = kwargs.keys()
kwargs_keys.sort()
for k in kwargs_keys:
v = kwargs[k]
if isinstance(v, dict) or isinstance(v, list):
v = "' " + json.dumps(v) + " '"
elif isinstance(v, bool):
v = ""
elif isinstance(v, int):
v = str(v)
k = k.replace('_', '-')
l.append('--' + k + ' ' + v)
aws_command += ' '.join(l)
popen_result = os.popen(aws_command).readlines()
try:
popen_result = json.loads(''.join(popen_result))
except:
print(popen_result)
return popen_result
def list_steps(self, **kwargs):
""" Provides a list of steps for the cluster in reverse order unless you specify stepIds with the request.
:param kwargs:
:return:
"""
return self.exec_command('list-steps', **kwargs)
def modify_cluster_attributes(self, **kwargs):
""" Modifies the cluster attributes 'visible-to-all-users' and 'termination-protected'.
:param kwargs:
:return:
"""
return self.exec_command('modify-cluster-attributes', **kwargs)
def schedule_hbase_backup(self, **kwargs):
""" Adds a step to schedule automated HBase backup. This command is only available when using Amazon EMR versionsearlier than 4.0.
"""
return self.exec_command('schedule-hbase-backup ', **kwargs)
def socks(self, cluster_id, key_pair_file):
""" Create a socks tunnel on port 8157 from your machine to the master.
:param cluster_id:
:param key_pair_file:
:return:
"""
return self.exec_command(
'socks', **{'cluster_id': cluster_id, 'key_pair_file': key_pair_file}
)
def ssh(self, cluster_id, key_pair_file, command):
"""
SSH into master node of the cluster.
:param cluster_ids: (string) Cluster Id of cluster you want to ssh into
:param key_pair_file: (string) Private key file to use for login
:param command: Command to execute on Master Node
:return: list. The result
"""
ssh_result = self.exec_command(
'ssh', **{'cluster_id': cluster_id, 'key_pair_file': key_pair_file, 'command': '"' + command + '"'}
)
ssh_result.pop()
return ssh_result
def terminate_clusters(self, cluster_ids):
""" Shuts down one or more clusters, each specified by cluster ID
:param cluster_ids:
:return:
"""
return self.exec_command('terminate-clusters', **{'cluster_ids': cluster_ids})
def wait(self, status, **kwargs):
""" Wait until a particular condition is satisfied.
:param cluster_id:
:param status:
cluster-running
cluster-terminated
step-complete
:return:
"""
return self.exec_command('wait ' + status, **kwargs)
def wait_all_step_complete(self, cluster_id):
""" Wait until all step completed, insure do not add new step into cluster
:return:
"""
list_step = self.list_steps(cluster_id=cluster_id, max_items=1)
last_step_id = list_step['Steps'][0]['Id']
self.wait(status='step-complete', cluster_id=cluster_id, step_id=last_step_id)
emr = EMR()
|
{"/awspycli/__init__.py": ["/awspycli/model/emr_cli.py", "/awspycli/model/s3_cli.py"]}
|
30,037
|
MrLawes/awspycli
|
refs/heads/master
|
/awspycli/model/s3_cli.py
|
# -*- coding:utf-8 -*-
import json
import os
class S3(object):
def __init__(self):
pass
def exec_command(self, emr_command, **kwargs):
""" change kwargs to aws command, and run it
:param command:
:param kwargs:
:return:
"""
aws_command = 'aws s3 {command} '.format(command=emr_command)
l = []
kwargs_keys = list(kwargs.keys())
kwargs_keys.sort()
for k in kwargs_keys:
v = kwargs[k]
if isinstance(v, dict) or isinstance(v, list):
v = "' " + json.dumps(v) + " '"
elif isinstance(v, bool):
v = ""
elif isinstance(v, int):
v = str(v)
k = k.replace('_', '-')
l.append('--' + k + ' ' + v)
aws_command += ' '.join(l)
popen_result = os.popen(aws_command).readlines()
try:
popen_result = json.loads(''.join(popen_result))
except:
print(popen_result)
return popen_result
def cp(self, copy_from, copy_to, **kwargs):
""" Copies a local file or S3 object to another location locally or in S3.
:param copy_from:
:param copy_to:
:param kwargs: https://docs.aws.amazon.com/cli/latest/reference/s3/cp.html
:return:
"""
return self.exec_command('cp %s %s' % (copy_from, copy_to), **kwargs)
def ls(self, s3uri, **kwargs):
""" List S3 objects and common prefixes under a prefix or all S3 buckets. Note that the --output and --no-paginate arguments are ignored for this command.
:param s3uri:
:param kwargs: https://docs.aws.amazon.com/cli/latest/reference/s3/ls.html
:return:
"""
return self.exec_command('ls %s' % (s3uri,), **kwargs)
def mb(self, s3uri, **kwargs):
""" Creates an S3 bucket.
:param s3uri: path (string), startswith s3://
:param kwargs:
:return:
"""
return self.exec_command('mb %s' % (s3uri,), **kwargs)
def mv(self, mv_from, mv_to, **kwargs):
""" Moves a local file or S3 object to another location locally or in S3.
:param mv_from:
:param mv_to:
:param kwargs: https://docs.aws.amazon.com/cli/latest/reference/s3/mv.html
:return:
"""
return self.exec_command('mv %s %s' % (mv_from, mv_to), **kwargs)
def presign(self, s3uri, **kwargs):
""" Generate a pre-signed URL for an Amazon S3 object.
This allows anyone who receives the pre-signed URL to retrieve the S3 object with an HTTP GET request.
For sigv4 requests the region needs to be configured explicitly.
:param s3uri: path (string), startswith s3://
:param kwargs: https://docs.aws.amazon.com/cli/latest/reference/s3/presign.html
:return:
"""
return self.exec_command('presign %s' % (s3uri), **kwargs)[0].strip()
def rb(self, s3uri, **kwargs):
""" Deletes an empty S3 bucket.
A bucket must be completely empty of objects and versioned objects before it can be deleted.
However, the --force parameter can be used to delete the non-versioned objects in the bucket before the bucket is deleted.
:param s3uri: path (string), startswith s3://
:param kwargs: https://docs.aws.amazon.com/cli/latest/reference/s3/rb.html
:return:
"""
return self.exec_command('rb %s' % (s3uri), **kwargs)
def rm(self, s3uri, **kwargs):
""" Deletes an S3 object.
:param s3uri: path (string), startswith s3://
:param kwargs: https://docs.aws.amazon.com/cli/latest/reference/s3/rm.html
:return:
"""
return self.exec_command('rm %s' % (s3uri), **kwargs)
def sync(self, sync_from, sync_to, **kwargs):
""" Syncs directories and S3 prefixes.
Recursively copies new and updated files from the source directory to the destination.
Only creates folders in the destination if they contain one or more files.
:param sync_from:
:param sync_from:
:param kwargs: https://docs.aws.amazon.com/cli/latest/reference/s3/sync.html
"""
return self.exec_command('sync %s %s' % (sync_from, sync_to), **kwargs)
def website(self, s3uri, index_document='index.html', error_document='error.html'):
""" Set the website configuration for a bucket.
:param s3uri: path (string), startswith s3://
"""
return self.exec_command(
'website %s' % (s3uri,), **{'index_document': index_document, 'error_document': error_document}
)
s3 = S3()
|
{"/awspycli/__init__.py": ["/awspycli/model/emr_cli.py", "/awspycli/model/s3_cli.py"]}
|
30,038
|
MrLawes/awspycli
|
refs/heads/master
|
/awspycli/__init__.py
|
from awspycli.model.emr_cli import emr
from awspycli.model.s3_cli import s3
VERSION = '1.0.2.1'
|
{"/awspycli/__init__.py": ["/awspycli/model/emr_cli.py", "/awspycli/model/s3_cli.py"]}
|
30,053
|
dagron/pyspec
|
refs/heads/master
|
/spec/impl/util/strings.py
|
def a_or_an(s: str) -> str:
particle = "an" if s[0] in set('AEIOUaeiou') else "a"
return "{} {}".format(particle, s)
|
{"/spec/impl/iterables.py": ["/spec/impl/core.py"], "/tests/spec/test_core.py": ["/spec/core.py", "/spec/impl/core.py", "/tests/spec/support.py"], "/spec/impl/specs.py": ["/spec/impl/core.py", "/spec/impl/util/strings.py"], "/spec/impl/records/core.py": ["/spec/core.py", "/spec/impl/dicts.py", "/spec/impl/records/annotations.py", "/spec/impl/records/forwardrefs.py", "/spec/impl/records/typevars.py"], "/spec/coercions.py": ["/spec/core.py"], "/spec/core.py": ["/spec/impl/core.py", "/spec/impl/dicts.py", "/spec/impl/iterables.py", "/spec/impl/specs.py", "/spec/impl/util/strings.py"], "/spec/impl/records/annotations.py": ["/spec/impl/records/typevars.py"], "/spec/impl/records/typevars.py": ["/spec/impl/core.py"], "/spec/impl/records/forwardrefs.py": ["/spec/impl/core.py", "/spec/impl/records/annotations.py"], "/tests/spec/test_coercions.py": ["/spec/coercions.py"], "/spec/impl/core.py": ["/spec/impl/util/callables.py"], "/tests/spec/test_records.py": ["/spec/core.py", "/spec/impl/core.py", "/spec/impl/records/core.py"], "/tests/spec/test_dict.py": ["/spec/coercions.py", "/spec/core.py", "/spec/impl/core.py", "/tests/spec/support.py"], "/tests/spec/support.py": ["/spec/core.py", "/spec/impl/core.py"], "/spec/impl/dicts.py": ["/spec/impl/core.py", "/spec/impl/specs.py"]}
|
30,054
|
dagron/pyspec
|
refs/heads/master
|
/spec/impl/iterables.py
|
from typing import Iterable, List
from spec.impl.core import Spec, SpecResult, Problem, Path, isinvalid, INVALID
class CollOf(Spec):
def __init__(self, itemspec: Spec):
super().__init__()
self._itemspec = itemspec
def conform(self, xs: Iterable) -> SpecResult:
if not hasattr(xs, '__iter__'):
return INVALID
result = []
for x in xs:
v = self._itemspec.conform(x)
if isinvalid(v):
return INVALID
result.append(v)
if isinstance(xs, tuple):
return tuple(result)
else:
return result
def describe(self) -> str:
return "a collection where items are {}".format(self._itemspec.describe())
def explain(self, p: Path, xs: Iterable) -> List[Problem]:
if not hasattr(xs, '__iter__'):
return [Problem(p, xs, self, "not iterable")]
result = []
for i, x in enumerate(xs):
problems = self._itemspec.explain(p + (i,), x)
if problems:
result.extend(problems)
return result
|
{"/spec/impl/iterables.py": ["/spec/impl/core.py"], "/tests/spec/test_core.py": ["/spec/core.py", "/spec/impl/core.py", "/tests/spec/support.py"], "/spec/impl/specs.py": ["/spec/impl/core.py", "/spec/impl/util/strings.py"], "/spec/impl/records/core.py": ["/spec/core.py", "/spec/impl/dicts.py", "/spec/impl/records/annotations.py", "/spec/impl/records/forwardrefs.py", "/spec/impl/records/typevars.py"], "/spec/coercions.py": ["/spec/core.py"], "/spec/core.py": ["/spec/impl/core.py", "/spec/impl/dicts.py", "/spec/impl/iterables.py", "/spec/impl/specs.py", "/spec/impl/util/strings.py"], "/spec/impl/records/annotations.py": ["/spec/impl/records/typevars.py"], "/spec/impl/records/typevars.py": ["/spec/impl/core.py"], "/spec/impl/records/forwardrefs.py": ["/spec/impl/core.py", "/spec/impl/records/annotations.py"], "/tests/spec/test_coercions.py": ["/spec/coercions.py"], "/spec/impl/core.py": ["/spec/impl/util/callables.py"], "/tests/spec/test_records.py": ["/spec/core.py", "/spec/impl/core.py", "/spec/impl/records/core.py"], "/tests/spec/test_dict.py": ["/spec/coercions.py", "/spec/core.py", "/spec/impl/core.py", "/tests/spec/support.py"], "/tests/spec/support.py": ["/spec/core.py", "/spec/impl/core.py"], "/spec/impl/dicts.py": ["/spec/impl/core.py", "/spec/impl/specs.py"]}
|
30,055
|
dagron/pyspec
|
refs/heads/master
|
/spec/impl/util/callables.py
|
import inspect
from typing import Callable
def can_be_called_with_one_argument(c: Callable) -> bool:
argspec = inspect.getfullargspec(c)
default_arg_count = len(argspec.defaults) if argspec.defaults else 0
non_default_arg_count = len(argspec.args) - default_arg_count
if not (inspect.isbuiltin(c) or inspect.isfunction(c)):
# this is a class with a __call__ method
non_default_arg_count -= 1
return non_default_arg_count == 1 \
or (non_default_arg_count == 0 and argspec.varargs) \
or default_arg_count >= 1
|
{"/spec/impl/iterables.py": ["/spec/impl/core.py"], "/tests/spec/test_core.py": ["/spec/core.py", "/spec/impl/core.py", "/tests/spec/support.py"], "/spec/impl/specs.py": ["/spec/impl/core.py", "/spec/impl/util/strings.py"], "/spec/impl/records/core.py": ["/spec/core.py", "/spec/impl/dicts.py", "/spec/impl/records/annotations.py", "/spec/impl/records/forwardrefs.py", "/spec/impl/records/typevars.py"], "/spec/coercions.py": ["/spec/core.py"], "/spec/core.py": ["/spec/impl/core.py", "/spec/impl/dicts.py", "/spec/impl/iterables.py", "/spec/impl/specs.py", "/spec/impl/util/strings.py"], "/spec/impl/records/annotations.py": ["/spec/impl/records/typevars.py"], "/spec/impl/records/typevars.py": ["/spec/impl/core.py"], "/spec/impl/records/forwardrefs.py": ["/spec/impl/core.py", "/spec/impl/records/annotations.py"], "/tests/spec/test_coercions.py": ["/spec/coercions.py"], "/spec/impl/core.py": ["/spec/impl/util/callables.py"], "/tests/spec/test_records.py": ["/spec/core.py", "/spec/impl/core.py", "/spec/impl/records/core.py"], "/tests/spec/test_dict.py": ["/spec/coercions.py", "/spec/core.py", "/spec/impl/core.py", "/tests/spec/support.py"], "/tests/spec/support.py": ["/spec/core.py", "/spec/impl/core.py"], "/spec/impl/dicts.py": ["/spec/impl/core.py", "/spec/impl/specs.py"]}
|
30,056
|
dagron/pyspec
|
refs/heads/master
|
/tests/spec/test_core.py
|
from typing import Callable
from spec.core import conform, explain_data, equal_to, any_, is_instance, even, odd, is_none, specize, coerce, \
in_range, gt, lt, lte, gte, describe, is_in, assert_spec, isinvalid, isvalid, coll_of
from spec.impl.core import path, Problem, Explanation, SpecError
from tests.spec.support import check_spec
def test_any():
s = any_()
check_spec(s, 1)
check_spec(s, None)
check_spec(s, "")
def test_equal_to():
s = equal_to(1)
check_spec(s, 1)
check_spec(s, 2,
[Problem(path(), 2, s, "expected 1 (int) but got 2 (int)")])
def test_is_instance():
s = is_instance(int)
check_spec(s, 1)
check_spec(s, "",
[Problem(path(), "", s, "expected an int but got a str")])
assert is_instance(int) == is_instance(int)
def test_types_as_specs():
s = int
check_spec(s, 1)
check_spec(s, "",
[Problem(path(), "", is_instance(s), "expected an int but got a str")])
def test_even():
s = even()
check_spec(s, 2)
check_spec(s, 3,
[Problem(path(), 3, s, "not an even number")])
check_spec(s, "",
[Problem(path(), "", s, "not an even number")])
def test_odd():
s = odd()
check_spec(s, 3)
check_spec(s, 4,
[Problem(path(), 4, s, "not an odd number")])
check_spec(s, "",
[Problem(path(), "", s, "not an odd number")])
def test_is_none():
s = is_none()
check_spec(s, None)
check_spec(s, "",
[Problem(path(), "", s, "not None")])
check_spec(s, [],
[Problem(path(), [], s, "not None")])
def test_in_range():
s = in_range(2, 4)
check_spec(s, 2)
check_spec(s, 3)
check_spec(s, 1,
[Problem(path(), 1, s, "not between 2 and 4")])
check_spec(s, 4,
[Problem(path(), 4, s, "not between 2 and 4")])
def test_greater_than():
s = gt(2)
check_spec(s, 3)
check_spec(s, 2,
[Problem(path(), 2, s, "not greater than 2")])
check_spec(s, 1,
[Problem(path(), 1, s, "not greater than 2")])
def test_less_than():
s = lt(2)
check_spec(s, 1)
check_spec(s, 2,
[Problem(path(), 2, s, "not less than 2")])
check_spec(s, 3,
[Problem(path(), 3, s, "not less than 2")])
def test_less_than_or_equal_to():
s = lte(2)
check_spec(s, 1)
check_spec(s, 2)
check_spec(s, 3,
[Problem(path(), 3, s, "not less than or equal to 2")])
check_spec(s, 4,
[Problem(path(), 4, s, "not less than or equal to 2")])
def test_greater_than_or_equal_to():
s = gte(2)
check_spec(s, 3)
check_spec(s, 2)
check_spec(s, 1,
[Problem(path(), 1, s, "not greater than or equal to 2")])
check_spec(s, 0,
[Problem(path(), 0, s, "not greater than or equal to 2")])
def test_specizing_builtin():
s = callable
check_spec(s, lambda x: x)
assert isinvalid(conform(s, "not callable"))
explanation = explain_data(s, "clearly-not-callable")
assert explanation is not None
assert explanation.problems[0].reason == "not callable"
assert explanation.problems[0].value == "clearly-not-callable"
assert explanation.problems[0].path == path()
def test_specizing_lambda():
s = (lambda x: bool(x))
check_spec(s, True)
assert isinvalid(conform(s, False))
explanation = explain_data(s, False)
assert explanation is not None
# This is obviously not ideal
assert explanation.problems[0].reason == "not <lambda>"
assert explanation.problems[0].value is False
assert explanation.problems[0].path == path()
def test_specizing_arity_1_lambdas():
# arity 1 functions are fine
check_spec(lambda x: True, True)
# additional varargs should also work
check_spec(lambda x, *ys: True, True)
# only varargs should also be fine
check_spec(lambda *xs: True, True)
# extra arguments with defaults shouldn't count
check_spec(lambda x, y=1: True, True)
# only arguments with defaults should work too
check_spec(lambda x=1: True, True)
check_spec(lambda x=1, *xs, **kws: True, True)
# just trying to break it now
check_spec(lambda x, y=1, z=2, *xs, **kws: True, True)
check_spec(lambda x, *xs, **kws: True, True)
def expect_arity_error(c: Callable):
expected_error_message = 'Expected arity 1 callable as check but got {}'.format(c)
try:
# noinspection PyTypeChecker
conform(c, True)
assert False, "expected exception during conform"
except TypeError as e:
assert expected_error_message in str(e), "checking conform"
try:
# noinspection PyTypeChecker
explain_data(c, True)
assert False, "expected exception during explain"
except TypeError as e:
assert expected_error_message in str(e), "checking explain"
try:
# noinspection PyTypeChecker
describe(c)
assert False, "expected exception during describe"
except TypeError as e:
assert expected_error_message in str(e), "checking describe"
def test_specizing_non_arity_1_lambdas():
expect_arity_error(lambda x, y: True)
expect_arity_error(lambda x, y, *varargs: True)
expect_arity_error(lambda x, y, **kwargs: True)
expect_arity_error(lambda **kwargs: True)
expect_arity_error(lambda x, y, *varargs, **kwargs: True)
def test_specizing_callable_objects():
class ArityOneCallableObject:
def __call__(self, x: object) -> bool:
return bool(x)
assert isvalid(conform(ArityOneCallableObject(), True))
assert isinvalid(conform(ArityOneCallableObject(), False))
class DefaultArgsCallableObject:
def __call__(self, x: object = True, y=False) -> bool:
return bool(x)
assert isvalid(conform(DefaultArgsCallableObject(), True))
assert isinvalid(conform(DefaultArgsCallableObject(), False))
def test_sets():
s = {"a", "b"}
check_spec(s, "a")
check_spec(s, "b")
check_spec(s, "c",
[Problem(path(), "c", specize(s), "not in ['a', 'b']")])
def test_is_in_over_sets():
s = is_in({"a", "b"})
check_spec(s, "a")
check_spec(s, "b")
check_spec(s, "c",
[Problem(path(), "c", specize(s), "not in ['a', 'b']")])
def test_is_in_over_dicts():
s = is_in({"a": 1, "b": 2})
check_spec(s, "a")
check_spec(s, "b")
check_spec(s, "c",
[Problem(path(), "c", specize(s), "not in ['a', 'b']")])
def test_is_in_over_lists():
s = is_in(["a", "b"])
check_spec(s, "a")
check_spec(s, "b")
check_spec(s, "c",
[Problem(path(), "c", specize(s), "not in ['a', 'b']")])
class CoercingClass:
def __call__(self, x):
return int(x)
def test_coerce():
underlying_spec = in_range(1, 2)
s = coerce(int, underlying_spec)
check_spec(s, 1)
check_spec(s, "1", expected_conform=1)
# TODO: problem contains underlying_spec. Not sure yet if this is the right behaviour
check_spec(s, 2, [Problem(path(), 2, underlying_spec, "not between 1 and 2")])
check_spec(s, "2", [Problem(path(), 2, underlying_spec, "not between 1 and 2")])
check_spec(s, "one",
[Problem(path(), "one", s,
"could not coerce 'one' (str) using coercer: int because:\n"
"invalid literal for int() with base 10: 'one'")])
spec_using_a_class_as_a_coercer = coerce(CoercingClass(), underlying_spec)
check_spec(spec_using_a_class_as_a_coercer, "one",
[Problem(path(),
"one",
spec_using_a_class_as_a_coercer,
"could not coerce 'one' (str) using coercer: CoercingClass because:\n"
"invalid literal for int() with base 10: 'one'")])
spec_using_a_lambda_as_a_coercer = coerce(lambda x: int(x), underlying_spec)
check_spec(spec_using_a_lambda_as_a_coercer, "one",
[Problem(path(), "one", spec_using_a_lambda_as_a_coercer,
"could not coerce 'one' (str) using coercer: <lambda> because:\n"
"invalid literal for int() with base 10: 'one'")])
def test_assert():
s = specize(int)
assert_spec(s, 1)
try:
assert_spec(s, "one")
assert False, "Expected exception"
except SpecError as e:
error = e
assert error.explanation == Explanation.with_problems(Problem(path(), "one", s, "expected an int but got a str"))
def test_coll_of():
item_spec = specize(int)
s = coll_of(item_spec)
check_spec(s, [1])
check_spec(s, [1, 2])
check_spec(s, (1, 2))
try:
assert_spec(s, ["one", 2, "three"])
assert False, "Expected exception"
except SpecError as e:
error = e
assert error.explanation == Explanation.with_problems(
Problem(path(0), "one", item_spec, "expected an int but got a str"),
Problem(path(2), "three", item_spec, "expected an int but got a str"))
try:
assert_spec(s, 1)
assert False, "Expected exception"
except SpecError as e:
error = e
assert error.explanation == Explanation.with_problems(Problem(path(), 1, s, "not iterable"))
|
{"/spec/impl/iterables.py": ["/spec/impl/core.py"], "/tests/spec/test_core.py": ["/spec/core.py", "/spec/impl/core.py", "/tests/spec/support.py"], "/spec/impl/specs.py": ["/spec/impl/core.py", "/spec/impl/util/strings.py"], "/spec/impl/records/core.py": ["/spec/core.py", "/spec/impl/dicts.py", "/spec/impl/records/annotations.py", "/spec/impl/records/forwardrefs.py", "/spec/impl/records/typevars.py"], "/spec/coercions.py": ["/spec/core.py"], "/spec/core.py": ["/spec/impl/core.py", "/spec/impl/dicts.py", "/spec/impl/iterables.py", "/spec/impl/specs.py", "/spec/impl/util/strings.py"], "/spec/impl/records/annotations.py": ["/spec/impl/records/typevars.py"], "/spec/impl/records/typevars.py": ["/spec/impl/core.py"], "/spec/impl/records/forwardrefs.py": ["/spec/impl/core.py", "/spec/impl/records/annotations.py"], "/tests/spec/test_coercions.py": ["/spec/coercions.py"], "/spec/impl/core.py": ["/spec/impl/util/callables.py"], "/tests/spec/test_records.py": ["/spec/core.py", "/spec/impl/core.py", "/spec/impl/records/core.py"], "/tests/spec/test_dict.py": ["/spec/coercions.py", "/spec/core.py", "/spec/impl/core.py", "/tests/spec/support.py"], "/tests/spec/support.py": ["/spec/core.py", "/spec/impl/core.py"], "/spec/impl/dicts.py": ["/spec/impl/core.py", "/spec/impl/specs.py"]}
|
30,057
|
dagron/pyspec
|
refs/heads/master
|
/spec/impl/specs.py
|
from typing import Callable, List, Iterable
from spec.impl.core import Spec, SpecResult, SimpleSpec, DelegatingSpec, Problem, Path, INVALID, isvalid, \
isinvalid
from spec.impl.util.strings import a_or_an
class Any(Spec):
def conform(self, x) -> SpecResult:
return x
def explain(self, p: Path, x: object) -> List[Problem]:
return []
def describe(self) -> str:
return "anything"
class Never(Spec):
def conform(self, x) -> SpecResult:
return INVALID
def explain(self, p: Path, x: object) -> List[Problem]:
return [Problem(p, x, self, "this spec will always fail")]
def describe(self) -> str:
return "this spec will always fail"
class EqualTo(SimpleSpec):
def __init__(self, value):
super().__init__(str(value),
lambda x: x == value,
lambda x: "expected {} ({}) but got {} ({})".format(value, type(value).__name__, x,
type(x).__name__))
self._value = value
def __eq__(self, other):
if isinstance(other, self.__class__):
return self._value == other._value
return NotImplemented
def __ne__(self, other):
if isinstance(other, self.__class__):
return not self.__eq__(other)
return NotImplemented
def __hash__(self):
return hash(self._value)
class IsInstance(SimpleSpec):
def __init__(self, cls):
description = a_or_an(cls.__name__)
super().__init__(description,
lambda x: isinstance(x, cls),
lambda x: "expected {} but got {}".format(description, a_or_an(type(x).__name__)))
self._cls = cls
def __eq__(self, other):
if isinstance(other, self.__class__):
return self._cls == other._cls
return NotImplemented
def __ne__(self, other):
if isinstance(other, self.__class__):
return not self.__eq__(other)
return NotImplemented
def __hash__(self):
return hash(self._cls)
def Even() -> Spec:
return SimpleSpec("an even number",
lambda x: isinstance(x, int) and not bool(x & 1))
def Odd() -> Spec:
return SimpleSpec("an odd number",
lambda x: isinstance(x, int) and bool(x & 1))
def IsNone() -> Spec:
return SimpleSpec("None",
lambda x: x is None)
def InRange(start, end_exclusive=None) -> Spec:
return SimpleSpec("between {} {}".format(start, "and {}".format(end_exclusive) if end_exclusive else None),
lambda x: x >= start and (end_exclusive is None or x < end_exclusive))
def Gt(value) -> Spec:
return SimpleSpec("greater than {}".format(value),
lambda x: x > value)
def Lt(value) -> Spec:
return SimpleSpec("less than {}".format(value),
lambda x: x < value)
def Gte(value) -> Spec:
return SimpleSpec("greater than or equal to {}".format(value),
lambda x: x >= value)
def Lte(value) -> Spec:
return SimpleSpec("less than or equal to {}".format(value),
lambda x: x <= value)
class IsIn(Spec):
def __init__(self, coll: Iterable):
coll = frozenset(coll)
self._coll = coll
self._coll_for_explain = list(sorted(coll))
def describe(self) -> str:
return "in {}".format(self._coll_for_explain)
def explain(self, p: Path, x: object) -> List[Problem]:
if x in self._coll:
return []
else:
return [Problem(p, x, self, "not {}".format(self.describe()))]
def conform(self, x: object) -> SpecResult:
return x if x in self._coll else INVALID
def __eq__(self, other):
"""Override the default Equals behavior"""
if isinstance(other, self.__class__):
return self._coll == other._coll
return NotImplemented
def __ne__(self, other):
"""Define a non-equality test"""
if isinstance(other, self.__class__):
return not self.__eq__(other)
return NotImplemented
def __hash__(self):
"""Override the default hash behavior (that returns the id or the object)"""
return hash(self._coll)
Coercer = Callable[[object], object]
def name_of(x):
if hasattr(x, '__code__'):
return x.__code__.co_name
elif hasattr(x, '__name__'):
return x.__name__
else:
return type(x).__name__
def _default_coercion_explainer(coercer: Coercer):
return lambda x, e: "could not coerce '{}' ({}) using coercer: {} because:\n{}" \
.format(x, type(x).__name__, name_of(coercer), e)
class Coerce(DelegatingSpec):
def __init__(self,
coercer: Coercer,
spec: Spec,
explain_coercion_failure: Callable[[object], str] = None):
super().__init__(spec)
self._coercer = coercer
self._explain_coercion_failure = explain_coercion_failure or _default_coercion_explainer(coercer)
def conform(self, x) -> SpecResult:
# noinspection PyBroadException
try:
c = self._coercer(x)
except:
return INVALID
else:
return super().conform(c)
def explain(self, p: Path, x: object) -> List[Problem]:
# noinspection PyBroadException
try:
c = self._coercer(x)
except Exception as e:
return [Problem(p, x, self, self._explain_coercion_failure(x, e))]
else:
return super().explain(p, c)
class OneOf(Spec):
def __init__(self, specs: Iterable[Spec]):
self._specs = specs
def conform(self, x) -> SpecResult:
for s in self._specs:
r = s.conform(x)
if isvalid(r):
return r
return INVALID
def describe(self) -> str:
return "one of {}".format([s.describe() for s in self._specs])
def explain(self, p: Path, x: object) -> List[Problem]:
problems = []
for s in self._specs:
ps = s.explain(p, x)
problems.extend(ps)
return problems
class AllOf(Spec):
def __init__(self, specs: Iterable[Spec]):
self._specs = specs
def conform(self, x) -> SpecResult:
for s in self._specs:
x = s.conform(x)
if isinvalid(x):
return x
return x
def describe(self) -> str:
return "all of {}".format([s.describe() for s in self._specs])
def explain(self, p: Path, x: object) -> List[Problem]:
for s in self._specs:
x = s.conform(x)
if isinvalid(x):
return s.explain(p, x)
return []
|
{"/spec/impl/iterables.py": ["/spec/impl/core.py"], "/tests/spec/test_core.py": ["/spec/core.py", "/spec/impl/core.py", "/tests/spec/support.py"], "/spec/impl/specs.py": ["/spec/impl/core.py", "/spec/impl/util/strings.py"], "/spec/impl/records/core.py": ["/spec/core.py", "/spec/impl/dicts.py", "/spec/impl/records/annotations.py", "/spec/impl/records/forwardrefs.py", "/spec/impl/records/typevars.py"], "/spec/coercions.py": ["/spec/core.py"], "/spec/core.py": ["/spec/impl/core.py", "/spec/impl/dicts.py", "/spec/impl/iterables.py", "/spec/impl/specs.py", "/spec/impl/util/strings.py"], "/spec/impl/records/annotations.py": ["/spec/impl/records/typevars.py"], "/spec/impl/records/typevars.py": ["/spec/impl/core.py"], "/spec/impl/records/forwardrefs.py": ["/spec/impl/core.py", "/spec/impl/records/annotations.py"], "/tests/spec/test_coercions.py": ["/spec/coercions.py"], "/spec/impl/core.py": ["/spec/impl/util/callables.py"], "/tests/spec/test_records.py": ["/spec/core.py", "/spec/impl/core.py", "/spec/impl/records/core.py"], "/tests/spec/test_dict.py": ["/spec/coercions.py", "/spec/core.py", "/spec/impl/core.py", "/tests/spec/support.py"], "/tests/spec/support.py": ["/spec/core.py", "/spec/impl/core.py"], "/spec/impl/dicts.py": ["/spec/impl/core.py", "/spec/impl/specs.py"]}
|
30,058
|
dagron/pyspec
|
refs/heads/master
|
/spec/impl/records/core.py
|
from typing import TypeVar, Union, List, _ForwardRef, Any
from spec.core import is_instance, all_of, one_of, coll_of, any_
from spec.impl.dicts import DictSpec
from spec.impl.records.annotations import AnnotationContext, extract_annotations
from spec.impl.records.forwardrefs import resolve_forward_ref, DeferredSpecFromForwardReference
from spec.impl.records.typevars import UnboundTypeVar, UnboundTypeVarSpec, UnboundTypeVarDictSpec, _typevar_key
def resolve_typevar(a: AnnotationContext) -> Union[AnnotationContext, UnboundTypeVar]:
n = a.annotation.__name__
if n not in a.typevars_from_class:
return UnboundTypeVar(a.annotation)
bound_to = a.typevars_from_class[n]
if isinstance(bound_to, TypeVar) and _typevar_key(bound_to) == _typevar_key(a.annotation):
return UnboundTypeVar(bound_to)
else:
return AnnotationContext(bound_to, a.class_annotation_was_on, a.typevars_from_class)
def spec_from(x: Union[AnnotationContext, type]):
if x is None:
return is_instance(type(None))
if isinstance(x, type):
if issubclass(x, Record):
annotations = extract_annotations(x)
specs = {}
for attr, annotation in annotations.items():
specs[attr] = spec_from(annotation)
unbound_typevars = {k: v.typevar for k, v in specs.items() if isinstance(v, UnboundTypeVarSpec)}
if unbound_typevars:
return all_of(DictSpec(specs), UnboundTypeVarDictSpec(unbound_typevars, spec_from))
else:
return DictSpec(specs)
else:
return is_instance(x)
if isinstance(x, UnboundTypeVar):
return UnboundTypeVarSpec(x.typevar)
if isinstance(x, AnnotationContext):
if type(x.annotation) == type(Union):
return one_of(*[spec_from(x.for_hint(a))
for a in x.annotation.__args__])
elif type(x.annotation) == type(Any):
return any_()
elif isinstance(x.annotation, _ForwardRef) or isinstance(x.annotation, str):
return DeferredSpecFromForwardReference(spec_from, lambda: resolve_forward_ref(x))
elif isinstance(x.annotation, TypeVar):
return spec_from(resolve_typevar(x))
elif issubclass(x.annotation, List):
return coll_of(spec_from(x.for_hint(x.annotation.__args__[0])))
else:
return spec_from(x.annotation)
raise NotImplementedError("Can't produce a spec from {}".format(x))
class Record:
pass
|
{"/spec/impl/iterables.py": ["/spec/impl/core.py"], "/tests/spec/test_core.py": ["/spec/core.py", "/spec/impl/core.py", "/tests/spec/support.py"], "/spec/impl/specs.py": ["/spec/impl/core.py", "/spec/impl/util/strings.py"], "/spec/impl/records/core.py": ["/spec/core.py", "/spec/impl/dicts.py", "/spec/impl/records/annotations.py", "/spec/impl/records/forwardrefs.py", "/spec/impl/records/typevars.py"], "/spec/coercions.py": ["/spec/core.py"], "/spec/core.py": ["/spec/impl/core.py", "/spec/impl/dicts.py", "/spec/impl/iterables.py", "/spec/impl/specs.py", "/spec/impl/util/strings.py"], "/spec/impl/records/annotations.py": ["/spec/impl/records/typevars.py"], "/spec/impl/records/typevars.py": ["/spec/impl/core.py"], "/spec/impl/records/forwardrefs.py": ["/spec/impl/core.py", "/spec/impl/records/annotations.py"], "/tests/spec/test_coercions.py": ["/spec/coercions.py"], "/spec/impl/core.py": ["/spec/impl/util/callables.py"], "/tests/spec/test_records.py": ["/spec/core.py", "/spec/impl/core.py", "/spec/impl/records/core.py"], "/tests/spec/test_dict.py": ["/spec/coercions.py", "/spec/core.py", "/spec/impl/core.py", "/tests/spec/support.py"], "/tests/spec/support.py": ["/spec/core.py", "/spec/impl/core.py"], "/spec/impl/dicts.py": ["/spec/impl/core.py", "/spec/impl/specs.py"]}
|
30,059
|
dagron/pyspec
|
refs/heads/master
|
/spec/coercions.py
|
from urllib.parse import urlparse, ParseResult
from uuid import UUID
from spec.core import coerce
def coerce_uuid(x):
return x if isinstance(x, UUID) else UUID(x)
Uuid = coerce(coerce_uuid, UUID)
def coerce_int(x):
return x if isinstance(x, int) else int(x)
Int = coerce(coerce_int, int)
def parse_url(x):
return urlparse(x)
Url = coerce(parse_url, ParseResult)
|
{"/spec/impl/iterables.py": ["/spec/impl/core.py"], "/tests/spec/test_core.py": ["/spec/core.py", "/spec/impl/core.py", "/tests/spec/support.py"], "/spec/impl/specs.py": ["/spec/impl/core.py", "/spec/impl/util/strings.py"], "/spec/impl/records/core.py": ["/spec/core.py", "/spec/impl/dicts.py", "/spec/impl/records/annotations.py", "/spec/impl/records/forwardrefs.py", "/spec/impl/records/typevars.py"], "/spec/coercions.py": ["/spec/core.py"], "/spec/core.py": ["/spec/impl/core.py", "/spec/impl/dicts.py", "/spec/impl/iterables.py", "/spec/impl/specs.py", "/spec/impl/util/strings.py"], "/spec/impl/records/annotations.py": ["/spec/impl/records/typevars.py"], "/spec/impl/records/typevars.py": ["/spec/impl/core.py"], "/spec/impl/records/forwardrefs.py": ["/spec/impl/core.py", "/spec/impl/records/annotations.py"], "/tests/spec/test_coercions.py": ["/spec/coercions.py"], "/spec/impl/core.py": ["/spec/impl/util/callables.py"], "/tests/spec/test_records.py": ["/spec/core.py", "/spec/impl/core.py", "/spec/impl/records/core.py"], "/tests/spec/test_dict.py": ["/spec/coercions.py", "/spec/core.py", "/spec/impl/core.py", "/tests/spec/support.py"], "/tests/spec/support.py": ["/spec/core.py", "/spec/impl/core.py"], "/spec/impl/dicts.py": ["/spec/impl/core.py", "/spec/impl/specs.py"]}
|
30,060
|
dagron/pyspec
|
refs/heads/master
|
/spec/core.py
|
from typing import Callable, Optional, Set, Iterable, Dict
import spec.impl.core as impl
from spec.impl.core import Spec, SpecResult, SimpleSpec, Explanation, path
from spec.impl.dicts import DictSpec
from spec.impl.iterables import CollOf
from spec.impl.specs import Any, EqualTo, IsInstance, Even, Odd, IsNone, Coerce, InRange, Gt, Lt, Gte, Lte, IsIn, Never, \
OneOf, AllOf
from spec.impl.util.strings import a_or_an
Speccable = impl.Speccable
INVALID = impl.INVALID
# noinspection PyProtectedMember
def isvalid(x) -> bool:
"""
Check where the result of spec.core.conform() to see whether the conform operation succeeded
"""
return impl.isvalid(x)
# noinspection PyProtectedMember
def isinvalid(x) -> bool:
"""
Check where the result of spec.core.conform() to see whether the conform operation failed
"""
return impl.isinvalid(x)
def any_():
"""
Spec that conforms any value to itself and never fails
"""
return Any()
def never():
"""
Spec that will fail to conform any value and always fails
"""
return Never()
def equal_to(x: object) -> EqualTo:
return EqualTo(x)
def is_instance(t: type) -> IsInstance:
return IsInstance(t)
def even() -> Even:
"""
An even number?
"""
return Even()
def odd() -> Odd:
"""
An odd number?
"""
return Odd()
def is_none() -> IsNone:
return IsNone()
def in_range(start, end_exclusive=None) -> InRange:
return InRange(start, end_exclusive)
def gt(value) -> InRange:
"""
Greater than
"""
return Gt(value)
def lt(value) -> InRange:
"""
Less than
"""
return Lt(value)
def gte(value) -> InRange:
"""
Greater than or equal to
"""
return Gte(value)
def lte(value) -> InRange:
"""
Less than or equal to
"""
return Lte(value)
def is_in(coll: Iterable) -> IsIn:
return IsIn(frozenset(coll))
def coerce(coercer: Callable[[object], object],
s: Speccable,
explain_coercion_failure: Callable[[object], str] = None) \
-> Coerce:
"""
Returns a spec that runs coercer over the value before passing it to spec for conformance and explanation
If the coercion fails, you can override the default message by providing explain_coercion_failure
"""
return Coerce(coercer, specize(s), explain_coercion_failure=explain_coercion_failure)
def decorated(x: Speccable, description: str = None):
return impl.DecoratedSpec(specize(x), description=description)
def specize(x: Speccable) -> Spec:
"""
Although this is public and in spec.core, you'll probably never need to use it
"""
if isinstance(x, Spec):
return x
if isinstance(x, type):
return is_instance(x)
if isinstance(x, Set):
return is_in(x)
if callable(x):
if hasattr(x, '__name__'):
description = x.__name__
elif hasattr(x, '__code__'):
description = x.__code__.co_name
else:
description = a_or_an(type(x).__name__)
return SimpleSpec(description, x)
raise ValueError("I don't know how to turn a {} into a spec: {}".format(type(x), x))
# noinspection PyBroadException
def conform(s: Speccable, x: object) -> SpecResult:
"""
Given a spec and a value, returns spec.core::INVALID if value does not match spec,
else the (possibly destructured) value."
"""
return specize(s).conform(x)
def explain_data(s: Speccable, x: object) -> Optional[Explanation]:
"""
Given a spec and a value x which ought to conform, returns nil if x
conforms, else an Explanation, which contains a collection of Problems
"""
problems = specize(s).explain(path(), x)
if problems is None or len(problems) == 0:
return None
return Explanation.with_problems(*problems)
def describe(s: Speccable) -> str:
return specize(s).describe()
def isspec(x: object):
return isinstance(x, Spec)
def assert_spec(s: Speccable, x: object) -> object:
return impl.assert_spec(specize(s),x)
def coll_of(s: Speccable):
return CollOf(specize(s))
def one_of(*ss: Speccable):
return OneOf([specize(s) for s in ss])
def all_of(*ss: Speccable):
return AllOf([specize(s) for s in ss])
def dict_spec(d: Dict[object, Speccable]):
def f(x):
if isinstance(x, dict):
return dict_spec(x)
else:
return specize(x)
return DictSpec({k: f(v) for k, v in d.items()})
def dict_example(d: Dict[object, Speccable]):
def f(x):
try:
x = specize(x)
except:
pass
if isspec(x):
return x
elif isinstance(x, dict):
return dict_example(x)
else:
return equal_to(x)
return dict_spec(({k: f(v) for k, v in d.items()}))
|
{"/spec/impl/iterables.py": ["/spec/impl/core.py"], "/tests/spec/test_core.py": ["/spec/core.py", "/spec/impl/core.py", "/tests/spec/support.py"], "/spec/impl/specs.py": ["/spec/impl/core.py", "/spec/impl/util/strings.py"], "/spec/impl/records/core.py": ["/spec/core.py", "/spec/impl/dicts.py", "/spec/impl/records/annotations.py", "/spec/impl/records/forwardrefs.py", "/spec/impl/records/typevars.py"], "/spec/coercions.py": ["/spec/core.py"], "/spec/core.py": ["/spec/impl/core.py", "/spec/impl/dicts.py", "/spec/impl/iterables.py", "/spec/impl/specs.py", "/spec/impl/util/strings.py"], "/spec/impl/records/annotations.py": ["/spec/impl/records/typevars.py"], "/spec/impl/records/typevars.py": ["/spec/impl/core.py"], "/spec/impl/records/forwardrefs.py": ["/spec/impl/core.py", "/spec/impl/records/annotations.py"], "/tests/spec/test_coercions.py": ["/spec/coercions.py"], "/spec/impl/core.py": ["/spec/impl/util/callables.py"], "/tests/spec/test_records.py": ["/spec/core.py", "/spec/impl/core.py", "/spec/impl/records/core.py"], "/tests/spec/test_dict.py": ["/spec/coercions.py", "/spec/core.py", "/spec/impl/core.py", "/tests/spec/support.py"], "/tests/spec/support.py": ["/spec/core.py", "/spec/impl/core.py"], "/spec/impl/dicts.py": ["/spec/impl/core.py", "/spec/impl/specs.py"]}
|
30,061
|
dagron/pyspec
|
refs/heads/master
|
/spec/impl/records/annotations.py
|
from typing import Any, Dict, _ForwardRef
from spec.impl.records.typevars import generic_class_typevars
Hint = Any
class AnnotationContext:
annotation: Hint
class_annotation_was_on: type
typevars_from_class: Dict[str, Hint]
def __init__(self,
annotation: Hint,
klass: type,
typevars: Dict[str, Hint]):
self.annotation = annotation
self.class_annotation_was_on = klass
self.typevars_from_class = typevars
def for_hint(self, hint: Hint) -> 'AnnotationContext':
return AnnotationContext(hint, self.class_annotation_was_on, self.typevars_from_class)
def extract_annotations(cls: type) -> Dict[str, AnnotationContext]:
real_annotations = {} # type: Dict[str,AnnotationContext]
typevars = generic_class_typevars(cls)
for klass in cls.mro():
for attr, annotation in getattr(klass, "__annotations__", {}).items():
if attr in real_annotations:
if real_annotations[attr].annotation != annotation:
continue
real_annotations[attr] = AnnotationContext(annotation, klass, typevars)
return real_annotations
|
{"/spec/impl/iterables.py": ["/spec/impl/core.py"], "/tests/spec/test_core.py": ["/spec/core.py", "/spec/impl/core.py", "/tests/spec/support.py"], "/spec/impl/specs.py": ["/spec/impl/core.py", "/spec/impl/util/strings.py"], "/spec/impl/records/core.py": ["/spec/core.py", "/spec/impl/dicts.py", "/spec/impl/records/annotations.py", "/spec/impl/records/forwardrefs.py", "/spec/impl/records/typevars.py"], "/spec/coercions.py": ["/spec/core.py"], "/spec/core.py": ["/spec/impl/core.py", "/spec/impl/dicts.py", "/spec/impl/iterables.py", "/spec/impl/specs.py", "/spec/impl/util/strings.py"], "/spec/impl/records/annotations.py": ["/spec/impl/records/typevars.py"], "/spec/impl/records/typevars.py": ["/spec/impl/core.py"], "/spec/impl/records/forwardrefs.py": ["/spec/impl/core.py", "/spec/impl/records/annotations.py"], "/tests/spec/test_coercions.py": ["/spec/coercions.py"], "/spec/impl/core.py": ["/spec/impl/util/callables.py"], "/tests/spec/test_records.py": ["/spec/core.py", "/spec/impl/core.py", "/spec/impl/records/core.py"], "/tests/spec/test_dict.py": ["/spec/coercions.py", "/spec/core.py", "/spec/impl/core.py", "/tests/spec/support.py"], "/tests/spec/support.py": ["/spec/core.py", "/spec/impl/core.py"], "/spec/impl/dicts.py": ["/spec/impl/core.py", "/spec/impl/specs.py"]}
|
30,062
|
dagron/pyspec
|
refs/heads/master
|
/spec/impl/records/typevars.py
|
from pprint import pformat
from typing import TypeVar, List, Mapping
from spec.impl import specs as sis
from spec.impl.core import Spec, Path, Problem, SpecResult, INVALID, isinvalid
def generic_class_typevars(cls: type):
typevars = {}
for klass in cls.mro():
for orig_base in getattr(klass, '__orig_bases__', []):
for parameter, arg in extract_generic_parameters(orig_base):
typevars[parameter.__name__] = arg
return typevars
def extract_generic_parameters(cls: type):
return map(lambda p, a: (p, a),
getattr(getattr(cls, '__origin__', None), '__parameters__', []),
getattr(cls, '__args__', []))
class UnboundTypeVar:
def __init__(self, t: TypeVar):
super().__init__()
self.typevar = t
class UnboundTypeVarSpec(sis.Any):
def __init__(self, typevar: TypeVar):
super().__init__()
self.typevar = typevar
def _typevar_key(t: TypeVar):
"""
If I define a class like this:
T=TypeVar('T')
class Foo:
a: T
b: T
...then the two annotations on a and b are two different instances of TypeVar
TypeVars are not hashable and don't implement __eq__, so it's not possible to use this mechanism to detect that
the two annotations are the same, so we need to create a key from each TypeVar.
"""
return tuple(getattr(t, s) for s in t.__slots__)
class UnboundTypeVarDictSpec(Spec):
_NOT_FOUND = object()
def __init__(self, unbound_typevar_keys, spec_generator):
super().__init__()
typevar_to_attr_names = {}
for attr_name, typevar in unbound_typevar_keys.items():
tvk = _typevar_key(typevar)
if not tvk in typevar_to_attr_names:
typevar_to_attr_names[tvk] = []
typevar_to_attr_names[tvk].append(attr_name)
self._typevar_to_attr_names = typevar_to_attr_names
self._spec_generator = spec_generator
def describe(self) -> str:
return "all typevars should be the same: {}".format(pformat(self._typevar_to_attr_names))
def explain(self, p: Path, x: object) -> List[Problem]:
if not isinstance(x, Mapping):
return [Problem(p, x, self, "not a Mapping")]
problems = []
for typevar, names in self._typevar_to_attr_names.items():
first_name_found = next((name for name in names if name in x), None)
implied_type = type(x[first_name_found]) if first_name_found else None
s = self._spec_generator(implied_type)
for name in names:
value = x[name]
ps = s.explain(p, value)
problems.extend(ps)
return problems
def conform(self, x: object) -> SpecResult:
if not isinstance(x, Mapping):
return INVALID
result = dict(x)
for typevar, names in self._typevar_to_attr_names.items():
first_name_found = next((name for name in names if name in x), None)
implied_type = type(x[first_name_found]) if first_name_found else None
s = self._spec_generator(implied_type)
for name in names:
value = s.conform(x[name])
if isinvalid(value):
return INVALID
result[name] = value
return result
|
{"/spec/impl/iterables.py": ["/spec/impl/core.py"], "/tests/spec/test_core.py": ["/spec/core.py", "/spec/impl/core.py", "/tests/spec/support.py"], "/spec/impl/specs.py": ["/spec/impl/core.py", "/spec/impl/util/strings.py"], "/spec/impl/records/core.py": ["/spec/core.py", "/spec/impl/dicts.py", "/spec/impl/records/annotations.py", "/spec/impl/records/forwardrefs.py", "/spec/impl/records/typevars.py"], "/spec/coercions.py": ["/spec/core.py"], "/spec/core.py": ["/spec/impl/core.py", "/spec/impl/dicts.py", "/spec/impl/iterables.py", "/spec/impl/specs.py", "/spec/impl/util/strings.py"], "/spec/impl/records/annotations.py": ["/spec/impl/records/typevars.py"], "/spec/impl/records/typevars.py": ["/spec/impl/core.py"], "/spec/impl/records/forwardrefs.py": ["/spec/impl/core.py", "/spec/impl/records/annotations.py"], "/tests/spec/test_coercions.py": ["/spec/coercions.py"], "/spec/impl/core.py": ["/spec/impl/util/callables.py"], "/tests/spec/test_records.py": ["/spec/core.py", "/spec/impl/core.py", "/spec/impl/records/core.py"], "/tests/spec/test_dict.py": ["/spec/coercions.py", "/spec/core.py", "/spec/impl/core.py", "/tests/spec/support.py"], "/tests/spec/support.py": ["/spec/core.py", "/spec/impl/core.py"], "/spec/impl/dicts.py": ["/spec/impl/core.py", "/spec/impl/specs.py"]}
|
30,063
|
dagron/pyspec
|
refs/heads/master
|
/spec/impl/records/forwardrefs.py
|
import sys
from typing import _ForwardRef, Callable, List, Union
from spec.impl.core import Spec, Path, Problem, SpecResult
from spec.impl.records.annotations import AnnotationContext
def resolve_forward_ref(ac: AnnotationContext):
typeref = ac.annotation
if isinstance(typeref, _ForwardRef):
typeref = ac.annotation.__forward_arg__
module = sys.modules[ac.class_annotation_was_on.__module__]
if isinstance(typeref, str):
# noinspection PyUnresolvedReferences
if hasattr(module, typeref):
return getattr(module, typeref)
elif typeref in __builtins__:
# noinspection PyUnresolvedReferences
return __builtins__[typeref]
else:
raise NameError("name '{}' is not defined in '{}'".format(
typeref, module.__name__
))
else:
return typeref
class DeferredSpecFromForwardReference(Spec):
def __init__(self, spec_factory: Callable[[type], Spec], forward_reference_resolver: Callable[[], type]):
super().__init__()
self._spec_factory = spec_factory
self._forward_reference_resolver = forward_reference_resolver
self._resolved_spec = None
def _resolve_spec(self) -> Spec:
if not self._resolved_spec:
resolved_hint = self._forward_reference_resolver()
self._resolved_spec = self._spec_factory(resolved_hint)
return self._resolved_spec
def describe(self) -> str:
return self._resolve_spec().describe()
def explain(self, p: Path, x: object) -> List[Problem]:
return self._resolve_spec().explain(p, x)
def conform(self, x: object) -> SpecResult:
return self._resolve_spec().conform(x)
|
{"/spec/impl/iterables.py": ["/spec/impl/core.py"], "/tests/spec/test_core.py": ["/spec/core.py", "/spec/impl/core.py", "/tests/spec/support.py"], "/spec/impl/specs.py": ["/spec/impl/core.py", "/spec/impl/util/strings.py"], "/spec/impl/records/core.py": ["/spec/core.py", "/spec/impl/dicts.py", "/spec/impl/records/annotations.py", "/spec/impl/records/forwardrefs.py", "/spec/impl/records/typevars.py"], "/spec/coercions.py": ["/spec/core.py"], "/spec/core.py": ["/spec/impl/core.py", "/spec/impl/dicts.py", "/spec/impl/iterables.py", "/spec/impl/specs.py", "/spec/impl/util/strings.py"], "/spec/impl/records/annotations.py": ["/spec/impl/records/typevars.py"], "/spec/impl/records/typevars.py": ["/spec/impl/core.py"], "/spec/impl/records/forwardrefs.py": ["/spec/impl/core.py", "/spec/impl/records/annotations.py"], "/tests/spec/test_coercions.py": ["/spec/coercions.py"], "/spec/impl/core.py": ["/spec/impl/util/callables.py"], "/tests/spec/test_records.py": ["/spec/core.py", "/spec/impl/core.py", "/spec/impl/records/core.py"], "/tests/spec/test_dict.py": ["/spec/coercions.py", "/spec/core.py", "/spec/impl/core.py", "/tests/spec/support.py"], "/tests/spec/support.py": ["/spec/core.py", "/spec/impl/core.py"], "/spec/impl/dicts.py": ["/spec/impl/core.py", "/spec/impl/specs.py"]}
|
30,064
|
dagron/pyspec
|
refs/heads/master
|
/tests/spec/test_coercions.py
|
from urllib.parse import ParseResult
from spec.coercions import Url
def test_url():
parsed = Url.conform("http://google.com") # type:ParseResult
assert parsed.scheme == "http"
|
{"/spec/impl/iterables.py": ["/spec/impl/core.py"], "/tests/spec/test_core.py": ["/spec/core.py", "/spec/impl/core.py", "/tests/spec/support.py"], "/spec/impl/specs.py": ["/spec/impl/core.py", "/spec/impl/util/strings.py"], "/spec/impl/records/core.py": ["/spec/core.py", "/spec/impl/dicts.py", "/spec/impl/records/annotations.py", "/spec/impl/records/forwardrefs.py", "/spec/impl/records/typevars.py"], "/spec/coercions.py": ["/spec/core.py"], "/spec/core.py": ["/spec/impl/core.py", "/spec/impl/dicts.py", "/spec/impl/iterables.py", "/spec/impl/specs.py", "/spec/impl/util/strings.py"], "/spec/impl/records/annotations.py": ["/spec/impl/records/typevars.py"], "/spec/impl/records/typevars.py": ["/spec/impl/core.py"], "/spec/impl/records/forwardrefs.py": ["/spec/impl/core.py", "/spec/impl/records/annotations.py"], "/tests/spec/test_coercions.py": ["/spec/coercions.py"], "/spec/impl/core.py": ["/spec/impl/util/callables.py"], "/tests/spec/test_records.py": ["/spec/core.py", "/spec/impl/core.py", "/spec/impl/records/core.py"], "/tests/spec/test_dict.py": ["/spec/coercions.py", "/spec/core.py", "/spec/impl/core.py", "/tests/spec/support.py"], "/tests/spec/support.py": ["/spec/core.py", "/spec/impl/core.py"], "/spec/impl/dicts.py": ["/spec/impl/core.py", "/spec/impl/specs.py"]}
|
30,065
|
dagron/pyspec
|
refs/heads/master
|
/spec/impl/core.py
|
from abc import ABCMeta, abstractmethod
from pprint import pformat
from typing import Callable, Union, List, Iterable, Set, NamedTuple, Dict
from typing import Tuple
from spec.impl.util.callables import can_be_called_with_one_argument
class Invalid:
def __str__(self, *args, **kwargs):
return repr(self)
def __repr__(self):
return "<INVALID>"
def __hash__(self):
return 42
def __eq__(self, other):
return isinstance(other, Invalid)
def __ne__(self, other):
return not self.__eq__(other)
INVALID = Invalid()
def isvalid(x) -> bool:
return INVALID != x
def isinvalid(x) -> bool:
return not isvalid(x)
SpecResult = Union[Invalid, object]
PathElement = Union[str, int, object]
Path = Tuple[PathElement, ...]
def path(*elements: Iterable[PathElement]) -> Path:
return tuple(elements)
class Problem(NamedTuple):
path: Path
value: object
spec: 'Spec'
reason: str
class Explanation:
@classmethod
def with_problems(cls, *problems: Iterable[Problem]) -> 'Explanation':
return Explanation(problems)
def __init__(self, problems: Iterable[Problem]):
self._problems = tuple(problems)
@property
def problems(self) -> Tuple[Problem, ...]:
return self._problems
def __eq__(self, other):
if type(self) is not type(other):
return NotImplemented
return self.problems == other.problems
def __ne__(self, other):
return not self == other
def __hash__(self, *args, **kwargs):
return hash(self.problems)
def __str__(self, *args, **kwargs):
return pformat(self._problems)
class SpecError(RuntimeError):
def __init__(self, value: object, explanation: Explanation):
RuntimeError.__init__(self, "\nValue:\n{}\n\nProblems:\n{}".format(value, explanation))
self._value = value
self._explanation = explanation
@property
def explanation(self) -> Explanation:
return self._explanation
@property
def value(self) -> object:
return self._value
class Spec(metaclass=ABCMeta):
@abstractmethod
def conform(self, x: object) -> SpecResult:
raise NotImplementedError()
@abstractmethod
def explain(self, p: Path, x: object) -> List[Problem]:
raise NotImplementedError()
@abstractmethod
def describe(self) -> str:
raise NotImplementedError()
def __str__(self, *args, **kwargs):
return self.describe()
def __repr__(self) -> str:
return self.describe()
class DelegatingSpec(Spec):
def __init__(self, delegate: Spec):
self._delegate = delegate
def conform(self, x) -> SpecResult:
return self._delegate.conform(x)
def explain(self, p: Path, x: object) -> List[Problem]:
return self._delegate.explain(p, x)
def describe(self) -> str:
return self._delegate.describe()
class DecoratedSpec(DelegatingSpec):
def __init__(self, delegate: Spec, description: str = None):
super().__init__(delegate)
self._description = description
def describe(self) -> str:
return self._description or super().describe()
class SimpleSpec(Spec):
"""
Static description
Unless overridden, explain returns a single problem with reason "not {description}" if check fails
Wraps a simple (object) -> bool predicate
"""
def __init__(self,
description: str,
check: Callable[[object], bool],
explain: Callable[[object], str] = None):
super().__init__()
explain = explain or (lambda x: "not {}".format(description))
# noinspection PyTypeChecker
if not can_be_called_with_one_argument(check):
raise TypeError("Expected arity 1 callable as check but got {}".format(check))
# noinspection PyTypeChecker
if not can_be_called_with_one_argument(explain):
raise TypeError("Expected arity 1 callable as explain but got {}".format(explain))
self._description = description # type:str
self._explain = explain
self._check = check # type:Callable[[object], bool]
def conform(self, x) -> SpecResult:
if self._check(x):
return x
return INVALID
def explain(self, p: Path, x: object) -> List[Problem]:
if self._check(x):
return []
return [Problem(p, x, self, self._explain(x))]
def describe(self) -> str:
return self._description
PredFn = Callable[[object], bool]
Speccable = Union[Spec, PredFn, Set, Dict]
def assert_spec(s: Spec, x: object):
conformed = s.conform(x)
if isvalid(conformed):
return conformed
raise SpecError(x, Explanation.with_problems(*s.explain(path(), x)))
|
{"/spec/impl/iterables.py": ["/spec/impl/core.py"], "/tests/spec/test_core.py": ["/spec/core.py", "/spec/impl/core.py", "/tests/spec/support.py"], "/spec/impl/specs.py": ["/spec/impl/core.py", "/spec/impl/util/strings.py"], "/spec/impl/records/core.py": ["/spec/core.py", "/spec/impl/dicts.py", "/spec/impl/records/annotations.py", "/spec/impl/records/forwardrefs.py", "/spec/impl/records/typevars.py"], "/spec/coercions.py": ["/spec/core.py"], "/spec/core.py": ["/spec/impl/core.py", "/spec/impl/dicts.py", "/spec/impl/iterables.py", "/spec/impl/specs.py", "/spec/impl/util/strings.py"], "/spec/impl/records/annotations.py": ["/spec/impl/records/typevars.py"], "/spec/impl/records/typevars.py": ["/spec/impl/core.py"], "/spec/impl/records/forwardrefs.py": ["/spec/impl/core.py", "/spec/impl/records/annotations.py"], "/tests/spec/test_coercions.py": ["/spec/coercions.py"], "/spec/impl/core.py": ["/spec/impl/util/callables.py"], "/tests/spec/test_records.py": ["/spec/core.py", "/spec/impl/core.py", "/spec/impl/records/core.py"], "/tests/spec/test_dict.py": ["/spec/coercions.py", "/spec/core.py", "/spec/impl/core.py", "/tests/spec/support.py"], "/tests/spec/support.py": ["/spec/core.py", "/spec/impl/core.py"], "/spec/impl/dicts.py": ["/spec/impl/core.py", "/spec/impl/specs.py"]}
|
30,066
|
dagron/pyspec
|
refs/heads/master
|
/tests/spec/test_records.py
|
import pytest
from typing import List, Optional, TypeVar, Generic, Any, ClassVar
from spec.core import assert_spec
from spec.impl.core import SpecError
from spec.impl.records.core import spec_from, Record
def check_spec_error(s, value, expected_error_text):
try:
assert_spec(s, value)
assert False, "Expected exception"
except SpecError as e:
assert expected_error_text in str(e)
class JustPrimitive(Record):
k: int
def test_primitives():
s = spec_from(JustPrimitive)
d = assert_spec(s, {'k': 123})
assert d == {'k': 123}
check_spec_error(s, {'k': "not an int"}, "not an int")
class HasList(Record):
k: List[int]
def test_list():
s = spec_from(HasList)
d = assert_spec(s, {'k': [123, 456]})
assert d == {'k': [123, 456]}
check_spec_error(s, {'k': ["not an int"]}, "not an int")
class HasOptional(Record):
k: Optional[int]
def test_optional():
s = spec_from(HasOptional)
assert assert_spec(s, {'k': 123}) == {'k': 123}
assert assert_spec(s, {'k': None}) == {'k': None}
check_spec_error(s, {'k': "not an int"}, "not an int")
class HasForwardReference(Record):
k: Optional['HasForwardReference']
def test_forward_references():
s = spec_from(HasForwardReference)
d = assert_spec(s, {'k': {'k': None}})
assert d == {'k': {'k': None}}
check_spec_error(s, {'k': "not a NeedsForwardReference"}, "not a NeedsForwardReference")
class HasListsOfForwardReference(Record):
k: List['HasListsOfForwardReference']
def test_lists_of_forward_references():
s = spec_from(HasListsOfForwardReference)
d = assert_spec(s, {'k': [{'k': []}]})
assert d == {'k': [{'k': []}]}
check_spec_error(s, {'k': ["not a NeedsForwardReference"]}, "not a NeedsForwardReference")
T = TypeVar('T')
V = TypeVar('V')
class IsGenericSuperclass(Generic[T, V], Record):
t: T
v: V
class BoundGeneric(IsGenericSuperclass[int, str]):
pass
def test_generic_typevars_unconstrained_bound():
s = spec_from(BoundGeneric)
d = assert_spec(s, {'t': 123, 'v': 'string'})
assert d == {'t': 123, 'v': 'string'}
check_spec_error(s, {'t': "not an int", 'v': 'string'}, "not an int")
class UnboundGeneric(IsGenericSuperclass[int, V]):
another_v: V
def test_generic_typevars_unconstrained_unbound():
s = spec_from(UnboundGeneric)
d = assert_spec(s, {'t': 123, 'v': "V type", 'another_v': "V type"})
assert d == {'t': 123, 'v': "V type", 'another_v': "V type"}
# Should not conform if all annotations marked with unbound generic V
# are not of the same type
int_V = 123
str_V = "mooooo"
check_spec_error(s, {'t': 123, 'v': int_V, 'another_v': str_V}, str_V)
class NonGenericWithTypevars(Record):
a: T
b: T
def test_non_generic_class_with_typevar_annotations():
s = spec_from(NonGenericWithTypevars)
d = assert_spec(s, {'a': 123, 'b': 456})
assert d == {'a': 123, 'b': 456}
# Need to ensure all annotations marked with unbound generic V
# are of the same type
int_T = 123
str_T = "mooooo"
check_spec_error(s, {'a': int_T, 'b': str_T}, str_T)
class HasAny(Record):
a: Any
def test_any():
s = spec_from(HasAny)
d = assert_spec(s, {'a': "Whatever"})
assert d == {'a': "Whatever"}
class HasClassVar(Record):
a: ClassVar[int]
@pytest.mark.skip(reason="wip")
def test_classvar_should_never_appear():
s = spec_from(HasClassVar)
d = assert_spec(s, {})
assert d == {}
check_spec_error(s, {'a': 123}, "ClassVar")
check_spec_error(s, {'a': "wrong type doesn't matter"}, "ClassVar")
# # Super-special typing primitives.
# 'Callable',
# 'ClassVar',
# 'Generic',
# 'Optional',
# 'Tuple',
# 'Type',
# 'TypeVar',
# 'Union',
#
# # ABCs (from collections.abc).
# 'AbstractSet', # collections.abc.Set.
# 'ByteString',
# 'Container',
# 'Hashable',
# 'ItemsView',
# 'Iterable',
# 'Iterator',
# 'KeysView',
# 'Mapping',
# 'MappingView',
# 'MutableMapping',
# 'MutableSequence',
# 'MutableSet',
# 'Sequence',
# 'Sized',
# 'ValuesView',
# # The following are added depending on presence
# # of their non-generic counterparts in stdlib:
# # Awaitable,
# # AsyncIterator,
# # AsyncIterable,
# # Coroutine,
# # Collection,
# # ContextManager
#
# # Structural checks, a.k.a. protocols.
# 'Reversible',
# 'SupportsAbs',
# 'SupportsFloat',
# 'SupportsInt',
# 'SupportsRound',
#
# # Concrete collection types.
# 'Dict',
# 'DefaultDict',
# 'List',
# 'Set',
# 'FrozenSet',
# 'NamedTuple', # Not really a type.
# 'Generator',
#
# # One-off things.
# 'AnyStr',
# 'cast',
# 'get_type_hints',
# 'NewType',
# 'no_type_check',
# 'no_type_check_decorator',
# 'overload',
# 'Text',
# 'TYPE_CHECKING',
|
{"/spec/impl/iterables.py": ["/spec/impl/core.py"], "/tests/spec/test_core.py": ["/spec/core.py", "/spec/impl/core.py", "/tests/spec/support.py"], "/spec/impl/specs.py": ["/spec/impl/core.py", "/spec/impl/util/strings.py"], "/spec/impl/records/core.py": ["/spec/core.py", "/spec/impl/dicts.py", "/spec/impl/records/annotations.py", "/spec/impl/records/forwardrefs.py", "/spec/impl/records/typevars.py"], "/spec/coercions.py": ["/spec/core.py"], "/spec/core.py": ["/spec/impl/core.py", "/spec/impl/dicts.py", "/spec/impl/iterables.py", "/spec/impl/specs.py", "/spec/impl/util/strings.py"], "/spec/impl/records/annotations.py": ["/spec/impl/records/typevars.py"], "/spec/impl/records/typevars.py": ["/spec/impl/core.py"], "/spec/impl/records/forwardrefs.py": ["/spec/impl/core.py", "/spec/impl/records/annotations.py"], "/tests/spec/test_coercions.py": ["/spec/coercions.py"], "/spec/impl/core.py": ["/spec/impl/util/callables.py"], "/tests/spec/test_records.py": ["/spec/core.py", "/spec/impl/core.py", "/spec/impl/records/core.py"], "/tests/spec/test_dict.py": ["/spec/coercions.py", "/spec/core.py", "/spec/impl/core.py", "/tests/spec/support.py"], "/tests/spec/support.py": ["/spec/core.py", "/spec/impl/core.py"], "/spec/impl/dicts.py": ["/spec/impl/core.py", "/spec/impl/specs.py"]}
|
30,067
|
dagron/pyspec
|
refs/heads/master
|
/tests/spec/test_dict.py
|
from uuid import UUID
import spec.coercions as sc
from spec.core import equal_to, in_range, dict_spec, dict_example
from spec.impl.core import Problem, path
from tests.spec.support import check_spec
def test_dict_example_treats_values_as_equal_to_spec():
expected_value = UUID('80b71e04-9862-462b-ac0c-0c34dc272c7b')
s = dict_example({'k': expected_value})
check_spec(s, {'k': expected_value})
wrong_value = UUID('a5bef1a0-d139-49d3-91ff-79a69aa39759')
check_spec(s, {'k': wrong_value},
[Problem(path('k'),
wrong_value,
equal_to(expected_value),
"expected 80b71e04-9862-462b-ac0c-0c34dc272c7b (UUID) but got a5bef1a0-d139-49d3-91ff-79a69aa39759 (UUID)")])
def test_dict_example_treats_dict_values_as_more_dict_examples():
expected_value = UUID('80b71e04-9862-462b-ac0c-0c34dc272c7b')
s = dict_example({'j': {'k': expected_value}})
check_spec(s, {'j': {'k': expected_value}})
wrong_value = UUID('a5bef1a0-d139-49d3-91ff-79a69aa39759')
check_spec(s, {'j': {'k': wrong_value}},
[Problem(path('j', 'k'),
wrong_value,
equal_to(expected_value),
"expected 80b71e04-9862-462b-ac0c-0c34dc272c7b (UUID) but got a5bef1a0-d139-49d3-91ff-79a69aa39759 (UUID)")])
def test_dict_spec_returns_conformed_values():
s = dict_spec({'k': sc.Uuid})
expected_conformed_value = UUID('80b71e04-9862-462b-ac0c-0c34dc272c7b')
original_value = str(expected_conformed_value)
check_spec(s, {'k': original_value}, expected_conform={'k': expected_conformed_value})
|
{"/spec/impl/iterables.py": ["/spec/impl/core.py"], "/tests/spec/test_core.py": ["/spec/core.py", "/spec/impl/core.py", "/tests/spec/support.py"], "/spec/impl/specs.py": ["/spec/impl/core.py", "/spec/impl/util/strings.py"], "/spec/impl/records/core.py": ["/spec/core.py", "/spec/impl/dicts.py", "/spec/impl/records/annotations.py", "/spec/impl/records/forwardrefs.py", "/spec/impl/records/typevars.py"], "/spec/coercions.py": ["/spec/core.py"], "/spec/core.py": ["/spec/impl/core.py", "/spec/impl/dicts.py", "/spec/impl/iterables.py", "/spec/impl/specs.py", "/spec/impl/util/strings.py"], "/spec/impl/records/annotations.py": ["/spec/impl/records/typevars.py"], "/spec/impl/records/typevars.py": ["/spec/impl/core.py"], "/spec/impl/records/forwardrefs.py": ["/spec/impl/core.py", "/spec/impl/records/annotations.py"], "/tests/spec/test_coercions.py": ["/spec/coercions.py"], "/spec/impl/core.py": ["/spec/impl/util/callables.py"], "/tests/spec/test_records.py": ["/spec/core.py", "/spec/impl/core.py", "/spec/impl/records/core.py"], "/tests/spec/test_dict.py": ["/spec/coercions.py", "/spec/core.py", "/spec/impl/core.py", "/tests/spec/support.py"], "/tests/spec/support.py": ["/spec/core.py", "/spec/impl/core.py"], "/spec/impl/dicts.py": ["/spec/impl/core.py", "/spec/impl/specs.py"]}
|
30,068
|
dagron/pyspec
|
refs/heads/master
|
/tests/spec/support.py
|
from typing import Optional, Iterable
from spec.core import conform, explain_data, INVALID, specize, Speccable
from spec.impl.core import Problem, path, Explanation
UNDEFINED = object()
def check_spec(s: Speccable,
value: object,
expected_problems: Optional[Iterable[Problem]] = None,
expected_conform: object = UNDEFINED):
"""
Always adds path("inserted_by_check_spec") to explain() call, to ensure paths appear in problems correctly
"""
if expected_problems:
expected_explanation = Explanation.with_problems(*expected_problems)
if expected_conform != UNDEFINED:
raise ValueError("Conform should always be INVALID if explain() is invalid")
expected_conform = INVALID
else:
expected_problems = []
expected_explanation = None
if expected_conform == UNDEFINED:
expected_conform = value
assert explain_data(s, value) == expected_explanation, "\nexpected:\n{}\n\nbut was:\n{}".format(
str(expected_explanation), str(explain_data(s, value)))
assert conform(s, value) == expected_conform, "\nexpected:\n{}\n\nbut was:\n{}".format(str(expected_conform),
str(conform(s, value)))
path_element = "added_by_check_spec"
problems_which_should_include_path = specize(s).explain(path(path_element), value)
for p in problems_which_should_include_path:
assert len(p.path) >= 1 and p.path[0] == path_element, \
"spec {} might not be extending paths correctly in explain".format(type(s))
|
{"/spec/impl/iterables.py": ["/spec/impl/core.py"], "/tests/spec/test_core.py": ["/spec/core.py", "/spec/impl/core.py", "/tests/spec/support.py"], "/spec/impl/specs.py": ["/spec/impl/core.py", "/spec/impl/util/strings.py"], "/spec/impl/records/core.py": ["/spec/core.py", "/spec/impl/dicts.py", "/spec/impl/records/annotations.py", "/spec/impl/records/forwardrefs.py", "/spec/impl/records/typevars.py"], "/spec/coercions.py": ["/spec/core.py"], "/spec/core.py": ["/spec/impl/core.py", "/spec/impl/dicts.py", "/spec/impl/iterables.py", "/spec/impl/specs.py", "/spec/impl/util/strings.py"], "/spec/impl/records/annotations.py": ["/spec/impl/records/typevars.py"], "/spec/impl/records/typevars.py": ["/spec/impl/core.py"], "/spec/impl/records/forwardrefs.py": ["/spec/impl/core.py", "/spec/impl/records/annotations.py"], "/tests/spec/test_coercions.py": ["/spec/coercions.py"], "/spec/impl/core.py": ["/spec/impl/util/callables.py"], "/tests/spec/test_records.py": ["/spec/core.py", "/spec/impl/core.py", "/spec/impl/records/core.py"], "/tests/spec/test_dict.py": ["/spec/coercions.py", "/spec/core.py", "/spec/impl/core.py", "/tests/spec/support.py"], "/tests/spec/support.py": ["/spec/core.py", "/spec/impl/core.py"], "/spec/impl/dicts.py": ["/spec/impl/core.py", "/spec/impl/specs.py"]}
|
30,069
|
dagron/pyspec
|
refs/heads/master
|
/spec/impl/dicts.py
|
import pprint
from typing import Dict, List
from spec.impl.core import Spec, SpecResult, Path, Problem, path, INVALID, isinvalid
from spec.impl.specs import EqualTo
def isspec(x: object):
return isinstance(x, Spec)
def _value_spec(possibly_a_spec):
if isspec(possibly_a_spec):
return possibly_a_spec
elif isinstance(possibly_a_spec, dict):
return DictSpec(possibly_a_spec)
else:
return EqualTo(possibly_a_spec)
def _acceptably_dict_like(x):
return isinstance(x, dict) or not [a for a in {'__getitem__', '__iter__', '__contains__'} if not hasattr(x, a)]
class DictSpec(Spec):
def __init__(self, key_to_spec: Dict[object, Spec]):
self._key_to_spec = key_to_spec
def describe(self) -> str:
return "Dict:\n{}".format(pprint.pformat(self._key_to_spec))
def conform(self, x: Dict) -> SpecResult:
if not _acceptably_dict_like(x):
return INVALID
result = {}
for k, s in self._key_to_spec.items():
if not k in x:
return INVALID
value = x[k]
conformed = s.conform(value)
if isinvalid(conformed):
return INVALID
result[k] = conformed
return result
def explain(self, p: Path, x: object) -> List[Problem]:
if not _acceptably_dict_like(x):
return [Problem(p, x, self, "not a dictionary {}".format(type(x)))]
problems = []
for k, s in self._key_to_spec.items():
if k not in x:
problems.append("Missing {}".format(k))
continue
value = x[k]
explanation_path = p + path(k)
subspec_problems = s.explain(explanation_path, value)
if subspec_problems:
problems.extend(subspec_problems)
return problems
|
{"/spec/impl/iterables.py": ["/spec/impl/core.py"], "/tests/spec/test_core.py": ["/spec/core.py", "/spec/impl/core.py", "/tests/spec/support.py"], "/spec/impl/specs.py": ["/spec/impl/core.py", "/spec/impl/util/strings.py"], "/spec/impl/records/core.py": ["/spec/core.py", "/spec/impl/dicts.py", "/spec/impl/records/annotations.py", "/spec/impl/records/forwardrefs.py", "/spec/impl/records/typevars.py"], "/spec/coercions.py": ["/spec/core.py"], "/spec/core.py": ["/spec/impl/core.py", "/spec/impl/dicts.py", "/spec/impl/iterables.py", "/spec/impl/specs.py", "/spec/impl/util/strings.py"], "/spec/impl/records/annotations.py": ["/spec/impl/records/typevars.py"], "/spec/impl/records/typevars.py": ["/spec/impl/core.py"], "/spec/impl/records/forwardrefs.py": ["/spec/impl/core.py", "/spec/impl/records/annotations.py"], "/tests/spec/test_coercions.py": ["/spec/coercions.py"], "/spec/impl/core.py": ["/spec/impl/util/callables.py"], "/tests/spec/test_records.py": ["/spec/core.py", "/spec/impl/core.py", "/spec/impl/records/core.py"], "/tests/spec/test_dict.py": ["/spec/coercions.py", "/spec/core.py", "/spec/impl/core.py", "/tests/spec/support.py"], "/tests/spec/support.py": ["/spec/core.py", "/spec/impl/core.py"], "/spec/impl/dicts.py": ["/spec/impl/core.py", "/spec/impl/specs.py"]}
|
30,102
|
jingl3s/domoticz_hydroquebec
|
refs/heads/master
|
/common/configuration_loader.py
|
#-*-coding:utf8;-*-
# qpy:3
'''
@author: 2017 jingl3s at yopmail dot com
'''
# license
#
# This code is free software; you can redistribute it and/or modify it
# under the terms of the DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE (see the file
# LICENSE included with the distribution).
import json
import os
import shutil
class ConfigurationLoader(object):
'''
Creation d'un fichier de configuration par d�faut
Chargement de ce fichier de configuration
'''
NOM_FICHIER_DEFAUT = "config_defaut.json"
def __init__(self, dossier_configuration):
'''
Constructor
'''
self._dossier_configuration = dossier_configuration
self._nom_fichier_config = "config.json"
dossier = os.path.realpath(os.path.dirname(__file__))
self._chemin_fichier_config_defaut = dossier
def set_configuration_file_name(self, nom_fichier_config):
self._nom_fichier_config = nom_fichier_config
def set_chemin_configuration_default(self, chemin_fichier_config_defaut):
self._chemin_fichier_config_defaut = chemin_fichier_config_defaut
def obtenir_configuration(self):
'''
@return: Un object JSON de la configuration
'''
json_configuration = None
if not os.path.exists(os.path.join(self._dossier_configuration, self._nom_fichier_config)):
self._creatiom_configuration_defaut()
raise RuntimeError("Veuillez configurer le fichier : {}".format(
os.path.join(self._dossier_configuration, self._nom_fichier_config)))
else:
json_configuration = self._charge_configuration()
return json_configuration
def _charge_configuration(self):
with open(os.path.join(self._dossier_configuration, self._nom_fichier_config), 'r') as f:
config = json.load(f)
return config
def _creatiom_configuration_defaut(self):
'''
Creation d'un fichier par defaut sans le nommer comme attendu
Creation de la structure de dossier si non existante
'''
fichier_config_defaut = os.path.join(
self._chemin_fichier_config_defaut, self.NOM_FICHIER_DEFAUT)
fichier_destination = os.path.join(
self._dossier_configuration, self.NOM_FICHIER_DEFAUT)
print(fichier_config_defaut)
print(fichier_destination)
if not os.path.exists(os.path.dirname(fichier_destination)):
os.makedirs(os.path.dirname(fichier_destination))
if os.path.exists(fichier_config_defaut):
shutil.copy2(fichier_config_defaut, fichier_destination)
raise RuntimeError("\nLe fichier de configuration par defaut a ete cree : \n{}\nRenommer le : {}".format(
fichier_destination, self._nom_fichier_config))
|
{"/hydroquebec.py": ["/common/configuration_loader.py", "/common/logger_config.py"]}
|
30,103
|
jingl3s/domoticz_hydroquebec
|
refs/heads/master
|
/common/logger_config.py
|
#-*-coding:utf8;-*-
# qpy:3
'''
@author: 2017 jingl3s at yopmail dot com
'''
# license
#
# This code is free software; you can redistribute it and/or modify it
# under the terms of the DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE (see the file
# LICENSE included with the distribution).
import os
# import time
import logging
# import logging.handlers
class LoggerConfig(object):
'''
classdocs
'''
def __init__(self, output_dir, file_basename):
'''
Constructor
'''
self.__logger = None
if not os.path.exists(output_dir):
os.mkdir(output_dir)
self.__initialize_logger(output_dir, file_basename)
def __initialize_logger(self, output_dir, file_basename):
'''
Method permettant de definir le logger
'''
self.__logger = logging.getLogger()
self.__logger.setLevel(logging.WARNING)
self.__logger.setLevel(logging.DEBUG)
formatter = logging.Formatter(
"%(asctime)s-%(levelname)7s-%(funcName)s-%(message)s")
# create console handler and set level to info
handler = logging.StreamHandler()
handler.setFormatter(formatter)
self.__logger.addHandler(handler)
# create error file handler and set level to error
# handler = logging.FileHandler(os.path.join(output_dir, file_basename + "_" + time.strftime("%Y.%m.%d_%H.%M.%S") + ".log"),"w", encoding=None, delay="true")
# handler.setFormatter(formatter)
# self.__logger.addHandler(handler)
# create error file handler rotating and set level to error
# if os.path.exists(output_dir):
# log_file = os.path.join(output_dir, file_basename + ".log")
# else:
# log_file = file_basename + ".log"
#
# handler = logging.handlers.RotatingFileHandler(log_file, "w", maxBytes=2097152,
# backupCount=2)
#
# handler.setFormatter(formatter)
# self.__logger.addHandler(handler)
def get_logger(self):
'''
Retourne le logger actuel
'''
return self.__logger
|
{"/hydroquebec.py": ["/common/configuration_loader.py", "/common/logger_config.py"]}
|
30,104
|
jingl3s/domoticz_hydroquebec
|
refs/heads/master
|
/hydroquebec.py
|
#!/usr/bin/python3
# -*- coding: latin-1 -*-
'''
@author: 2017 jingl3s at yopmail dot com
'''
# license
#
# This code is free software; you can redistribute it and/or modify it
# under the terms of the DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE (see the file
# LICENSE included with the distribution).
from datetime import datetime
import json
import logging
import os
import subprocess
from common.configuration_loader import ConfigurationLoader
from common.logger_config import LoggerConfig
from domoticz.domoticz import Domoticz
import sys
def _get_hydroquebec_valeur_veille(json_configuration_hydroquebec):
'''
Communique avec pyhydroquebec pour extraire la valeur de consommation de la veille
Retourne la valeur lue sinon 0.0
:param json_configuration_hydroquebec: configuration JSON de type dictionnaire avec les cls, PYHYDRO (Commande pour pyhydroquebec), U (utilisateur), P (mot de passe)
:return: Float de la valeur lue
'''
CHAMP_HYDRO_JSON = "yesterday_total_consumption"
_logger = logging.getLogger(__name__)
cmd_hydro = [sys.executable, "-m", json_configuration_hydroquebec["PYHYDRO"], "-u",
json_configuration_hydroquebec["U"], "-p", json_configuration_hydroquebec["P"], "-j"]
_logger.debug(' '.join(cmd_hydro))
output = subprocess.check_output(cmd_hydro)
_logger.debug(output)
json_hydro = json.loads(output.decode(encoding='utf_8', errors='strict'))
# json_hydro = json.loads('{"310663889": {"period_average_temperature": 16, "yesterday_higher_price_consumption": 0, "period_length": 36, "period_mean_daily_bill": 0.7, "period_lower_price_consumption": 127, "period_total_days": 60, "period_total_consumption": 127, "yesterday_total_consumption": 2.54, "period_mean_daily_consumption": 3.5, "yesterday_average_temperature": 24, "yesterday_lower_price_consumption": 2.54, "period_total_bill": 25.32, "period_higher_price_consumption": 0}}')
if len(json_hydro) > 1:
_logger.warning("Trop de contrats trouv seul le premier sera utilis")
dict_valeurs = next(iter(json_hydro.values()))
consommation_veille = 0.0
if CHAMP_HYDRO_JSON in dict_valeurs:
consommation_veille = dict_valeurs[CHAMP_HYDRO_JSON]
else:
_logger.error("Aucune information de consommation trouve")
_logger.debug(consommation_veille)
return consommation_veille
def _get_domoticz(json_configuration_domoticz):
'''
:param json_configuration_domoticz:
'''
_logger = logging.getLogger(__name__)
domoticz_interface = None
try:
_logger.debug(json_configuration_domoticz)
domoticz_interface = Domoticz()
domoticz_interface.set_adresse(
json_configuration_domoticz['ADRESSE'])
domoticz_interface.set_url_lecture(
json_configuration_domoticz['URL_LIT'])
except Exception as e:
if _logger is not None:
_logger.exception("Erreur d'execution")
else:
print (e)
return domoticz_interface
def _is_need_update_domoticz(domoticz_interface, json_configuration_domoticz):
'''
:param domoticz_interface:
:param json_configuration_domoticz:
'''
besoin_mise_a_jour = False
_logger = logging.getLogger(__name__)
try:
_logger.debug(json_configuration_domoticz)
idx_capteur = json_configuration_domoticz['HYDRO']['IDX']
domoticz_interface.lit_information_capteur(idx_capteur)
derniere_mise_a_jour = domoticz_interface.lit_valeur(
idx_capteur, "LastUpdate")
_logger.debug(derniere_mise_a_jour)
derniere_mise_a_jour_date = derniere_mise_a_jour.split(" ")[0]
_logger.debug(derniere_mise_a_jour_date)
date_now = datetime.now()
str_date_now = date_now.strftime('%Y-%m-%d')
_logger.debug(str_date_now)
besoin_mise_a_jour = not str(derniere_mise_a_jour_date) in str_date_now
except Exception as e:
if _logger is not None:
_logger.exception("Erreur d'execution")
else:
print (e)
return besoin_mise_a_jour
def _mise_a_jour_domoticz(domoticz_interface, nouvelle_valeur_consommation_kwh, json_configuration_domoticz):
'''
# http://192.168.254.194:8080/json.htm?type=command¶m=udevice&idx=53&nvalue=0&svalue=10;5000
# http://192.168.254.194:8080/json.htm?type=devices&rid=53
:param domoticz_interface:
:param nouvelle_valeur_consommation_kwh:
:param json_configuration_domoticz:
'''
_logger = logging.getLogger(__name__)
try:
_logger.debug(json_configuration_domoticz)
idx_capteur = json_configuration_domoticz['HYDRO']['IDX']
domoticz_interface.lit_information_capteur(idx_capteur)
chaine_valeur_inter = json_configuration_domoticz['HYDRO']['JSON_VAL_LIT']
valeur = domoticz_interface.lit_valeur(
idx_capteur, chaine_valeur_inter)
_logger.debug("valleur actuelle {}".format(valeur))
unite = domoticz_interface.lit_valeur(idx_capteur, "SubType")
_logger.debug(unite)
valeur_sans_unit = valeur.replace(unite, "")
valeur_sans_unit = valeur_sans_unit.strip()
_logger.debug(valeur_sans_unit)
valeur_nouvelle = float(valeur_sans_unit) + \
nouvelle_valeur_consommation_kwh
valeur_nouvelle = valeur_nouvelle * 1000
_logger.debug("nouvelle valleur {}".format(valeur_nouvelle))
domoticz_interface.modifier_interrupteur(
json_configuration_domoticz['HYDRO']['IDX'],
"0;{}".format(int(valeur_nouvelle)),
json_configuration_domoticz['HYDRO']['JSON_VAL_ECRIT'],
json_configuration_domoticz['HYDRO']['URL_ECRIT'])
except Exception as e:
if _logger is not None:
_logger.exception("Erreur d'execution")
else:
print (e)
return
def main():
'''
Partie principale du programme
'''
_logger = None
try:
path = os.path.abspath(os.path.dirname(__file__))
filename_python = os.path.basename(__file__)
if os.path.exists("/mnt/tmpfs/"):
path_log = "/mnt/tmpfs/"
else:
path_log = path
# Configuration des lments du module
logger_obj = LoggerConfig(
path_log, os.path.splitext(os.path.split(filename_python)[1])[0])
_logger = logger_obj.get_logger()
config = ConfigurationLoader(os.path.join(path, "configs"))
json_configuration = config.obtenir_configuration()
_logger.debug(json_configuration)
domoticz_interface = _get_domoticz(json_configuration['DOMOTICZ'])
# Vrification si aucune mise jour dja ralise
need_update = _is_need_update_domoticz(
domoticz_interface, json_configuration['DOMOTICZ'])
# Parameter to force an update when something goes wrong
if len(sys.argv) > 1:
if sys.argv[1] == "force":
need_update = True
if sys.argv[1] == "display":
need_update = False
# Rcupration de la valeur de la veille
consommation_veille = _get_hydroquebec_valeur_veille(
json_configuration['HYDROQUEBEC'])
if need_update:
# Envoi de la nouvelle valeur
_mise_a_jour_domoticz(
domoticz_interface, consommation_veille, json_configuration['DOMOTICZ'])
except Exception as e:
if _logger is not None:
_logger.exception("Erreur d'execution")
else:
print (e)
return
if __name__ == '__main__':
main()
|
{"/hydroquebec.py": ["/common/configuration_loader.py", "/common/logger_config.py"]}
|
30,105
|
ckrapu/rwfmm
|
refs/heads/master
|
/models.py
|
import pymc3 as pm
import numpy as np
import scipy as sp
import theano.tensor as tt
import patsy as p
import utilities
def cross_validate_rwfmm(rwfmm_args,rwfmm_kwargs,param_for_tuning,tuning_set,criterion='LOO'):
model_dict = {}
trace_list = []
for param_val in tuning_set:
modified_kwargs = rwfmm_kwargs.copy()
modified_kwargs[param_for_tuning] = param_val
trace,model = rwfmm(*rwfmm_args,**modified_kwargs)
model_dict[model] = trace
rankings = pm.stats.compare(model_dict,ic = criterion)
return rankings, model_dict
def rwfmm(functional_data,static_data,Y,
func_coef_sd = 'prior', method='nuts',
robust=False, func_coef_sd_hypersd = 0.1,
coefficient_prior='flat', include_random_effect = True,
variable_func_scale = True, time_rescale_func = False,
sampler_kwargs = {'init':'adapt_diag','chains':1,'tune':500,'draws':500},
return_model_only = False, n_spline_knots = 20,
func_coef_type = 'random_walk', spline_degree = 3,spline_coef_sd = 'prior',
spline_coef_hyper_sd = 2.,
spline_coef_prior = 'random_walk',spline_rw_sd = 1.,average_every_n = 1,
spline_rw_hyper_sd = 1.,poly_order=4):
'''
Fits a functional mixed model with a random-walk model of
the functional coefficient. A range of different priors is available for
the model coefficients.
Parameters
----------
functional_data : 4D Numpy array
Data inputs for functional covariates with expected shape (S,V,T,F)
where S denotes the number of subjects, V denotes the number of
visits or repeated observations for each subject, T denotes the
dimension of the functional data (i.e. number of timesteps)
and F denotes the number of functional coefficients.
static_data: 3D Numpy array
Data inputs for static (i.e. non-functional) covariates which are
constant for each subject/visits combination.
This array is expected to have the shape (S,V,C) where
C denotes the number of static covariates.
Y: 3D Numpy array
Responses for the functional regression. This array is expected to
have the same dimensions as static_dataself.
tune: int
Number of tuning steps used in MCMC
draws: int
Number of post-tuning draws sampled.
chains: int
Number of MCMC chains used.
func_coef_sd: float or string
The standard deviation of the Gaussian random walk for all
functional coefficients. If set to "prior", then this quantity
will also be treated as a parameter that needs to be estimated.
method: string
Designates the method to be ued to fit the model.
This must be one of "nuts", "mh" or one of the approximate inference
methods at https://docs.pymc.io/api/inference.html#variational.
n_iter_approx: int
Number of optimization iterations to be used if the model fitting
method is an approximate inference method.
robust: bool
Determines whether a normal error model or a robust Student-T error
model is assumed for the residuals.
func_coef_sd_hypersd: float
If func_coef_sd is set to "prior", then this parameter sets the
standard deviation of the half-normal distribution over the
functional coefficient standard deviation (func_coef_sd). Note that
in this case, each functional coefficient gets its own standard
deviation drawn from the same prior defined by this parameter.
coefficient_prior: string
Determines the prior placed on the static covariate coefficients as
well as the mean (a.k.a. the level) of the functional coefficient.
The options are "flat","normal","horseshoe","finnish_horseshoe".
include_random_effect: bool
Determines whether or not a per-subject random intercept is included.
variable_func_scale : bool
Determines whether or not to allow the functional coefficients be
multiplied by a positive number. This can lead to identifiability issues
if a weak prior is specified on the functional coefficient evolution
variance.
time_rescale_func : bool
If true, divides the functional coefficient by T. This can help make
the coefficient more interpretable.
sampler_kwargs: dict
Any additional arguments to be passed to pm.sample().
return_model_only: bool
If true, returns only the model object without sampling. This can be
helpful for debugging.
func_coef_type : string
One of 'constant','random_walk', 'bspline_recursive', 'natural_spline',
'linear','bspline_design' or 'polynomial'.
This determines how the functional coefficient will be parameterized. If it
is 'random_walk', then the coefficient will be computed as the cumulative
sum of many small normally-distributed jumps whose standard deviation
is controlled by 'func_coef_sd'. Alternatively, if one of the bspline
options is used, then the functional coefficient will be a bspline. The option
'bspline_recursive' builds the coefficient using the de Boor algorithm
while the options 'bspline_design' and 'natural_spline' build a design
matrix using patsy's functionality and then estimates the coefficients
linking that matrix to the functional coefficients. Using 'polynomial'
specifies the functional coefficient as a polynomial of order 'poly_order'.
'linear' makes the functional coefficient a linear function of the function
domain.
poly_order : int
The degree of the polynomial used if the functional coefficient type is
set to 'polynomial'.
n_spline_knots : int
In the event that the functional coefficient is one of the bspline choices,
then this controls how many knots or breakpoints the spline has. In general,
higher numbers for this value are required for higher spline orders.
spline_degree : int
The order of the spline if the functional coefficient is parameterized as a
bspline. This is also the order of the polynomial for each spline section
plus 1. Set this equal to 4 for cubic polynomial approximations in the spline.
spline_coef_sd : float
The standard deviation of the normal prior on the spline coefficients.
spline_coef_prior : string
One of 'normal', 'flat', or 'random_walk'. This controls how the
bspline coefficients are smoothed.
spline_rw_sd : string or float
Either 'prior' or a float. This controls how much the spline coefficients
are allowed to jump when using a random walk for the spline coefficient
prior.
spline_rw_hyper_sd : float
If 'spline_rw_sd' is set to 'prior', this is the standard deviation
of the half-Normal prior on the spline random walk jump standard
deviation.
average_every_n : int
This is used to average every n measurements of the functional data
together. For example, if the functional data corresponds to 96 hourly
timesteps' worth of data, setting this to 4 would take the 24 hour average
and reduce the size of T from 96 to 24. The default setting of 1 leaves
the data unchanged.
Returns
-------
trace: pymc3 Trace
Samples produced either via MCMC or approximate inference during
fitting.
model: pymc3 Model
The model object describing the RWFMM.
'''
with pm.Model() as model:
S,V,T,F = functional_data.shape
_,_,C = static_data.shape
#functional_data = np.mean(functional_data.reshape(-1, average_every_n), axis=1)
# We want to make sure the two data arrays agree in the number of
# subjects (S) and visits (V).
assert static_data.shape[0:2] == functional_data.shape[0:2]
# Total number of functional and static coefficients.
# This does not include the random-walk jumps.
n_covariates = F + C
if include_random_effect:
random_effect_mean = pm.Flat('random_effect_mean')
random_effect_sd = pm.HalfCauchy('random_effect_sd',beta = 1.)
random_effect_unscaled = pm.Normal('random_effect_unscaled',shape = [S,1])
random_effect = pm.Deterministic('random_effect',random_effect_unscaled * random_effect_sd + random_effect_mean)
else:
random_effect = 0.
if coefficient_prior == 'flat':
coef = pm.Flat('coef',shape = n_covariates)
elif coefficient_prior == 'normal':
coef_sd = pm.HalfCauchy('coef_sd',beta = 1.)
coef = pm.Normal('coef',sd = coef_sd,shape = [n_covariates] )
elif coefficient_prior == 'cauchy':
coef_sd = pm.HalfCauchy('coef_sd',beta = 1.0)
coef = pm.Cauchy('coef',alpha = 0., beta = coef_sd,shape = [n_covariates] )
elif coefficient_prior == 'horseshoe':
loc_shrink = pm.HalfCauchy('loc_shrink',beta = 1,shape = [n_covariates])
glob_shrink= pm.HalfCauchy('glob_shrink',beta = 1)
coef = pm.Normal('coef',sd = (loc_shrink * glob_shrink),shape = [n_covariates])
# Implemented per Piironnen and Vehtari '18
elif coefficient_prior == 'finnish_horseshoe':
loc_shrink = pm.HalfCauchy('loc_shrink',beta = 1,shape = [n_covariates])
glob_shrink = pm.HalfCauchy('glob_shrink',beta = 1)
# In order to get some of the values within the prior calculations,
# we need to know the variance of the predictors.
static_var = np.var(static_data,axis = (0,1))
func_var = np.var(functional_data,axis = (0,1,2))
variances = np.concatenate([static_var,func_var])
nu_c = pm.Gamma('nu_c',alpha = 2.0, beta = 0.1)
c = pm.InverseGamma('c',alpha = nu_c/2, beta = nu_c * variances / 2,shape = [n_covariates])
regularized_loc_shrink = c * loc_shrink**2 / (c + glob_shrink**2 * loc_shrink**2)
coef = pm.Normal('coef',sd = (regularized_loc_shrink * glob_shrink**2)**0.5,shape = [n_covariates])
if func_coef_type == 'constant':
func_coef = pm.Deterministic('func_coef',tt.zeros([T,F]) + coef[C:])
elif func_coef_type == 'random_walk':
if func_coef_sd == 'prior':
func_coef_sd = pm.HalfNormal('func_coef_sd',sd = func_coef_sd_hypersd,shape=F)
# The 'jumps' are the small deviations about the mean of the functional
# coefficient.
if variable_func_scale:
log_scale = pm.Normal('log_scale',shape = F)
else:
log_scale = 0.0
jumps = pm.Normal('jumps',sd = func_coef_sd,shape=(T,F))
random_walks = tt.cumsum(jumps,axis=0) * tt.exp(log_scale) + coef[C:]
func_coef = pm.Deterministic('func_coef',random_walks)
elif (func_coef_type == 'natural_spline' or func_coef_type == 'bspline_design'):
x = np.arange(T)
# The -1 in the design matrix creation is to make sure that there
# is no constant term which would be made superfluous by 'coef'
# which is added to the functional coefficient later.
if func_coef_type == 'natural_spline':
spline_basis = p.dmatrix("cr(x, df = {0}) - 1".format(n_spline_knots),{"x":x},return_type = 'dataframe').values
elif func_coef_type == 'bspline_design':
spline_basis = p.dmatrix("bs(x, df = {0}) - 1".format(n_spline_knots),{"x":x},return_type = 'dataframe').values
# If this produces a curve which is too spiky or rapidly-varying,
# then a smoothing prior such as a Gaussian random walk could
# instead be used here.
if spline_coef_prior == 'normal':
spline_coef = pm.Normal('spline_coef',sd = spline_coef_sd,shape = [n_spline_knots,F])
elif spline_coef_prior == 'flat':
spline_coef = pm.Flat('spline_coef',shape = [n_spline_knots,F])
elif spline_coef_prior == 'random_walk':
if spline_rw_sd == 'prior':
spline_rw_sd = pm.HalfNormal('spline_rw_sd',sd = spline_rw_hyper_sd,shape = F)
spline_jumps = pm.Normal('spline_jumps', shape = [n_spline_knots,F])
spline_coef = pm.Deterministic('spline_coef',tt.cumsum(spline_jumps * spline_rw_sd ,axis = 0))
# This inner product sums over the spline coefficients
func_coef = pm.Deterministic('func_coef', (tt.tensordot(spline_basis,spline_coef,axes=[[1],[0]]) + coef[C:]))
# This is deprecated - it is missing some priors.
elif func_coef_type == 'bspline_recursive':
n_spline_coefficients = spline_degree + n_spline_knots + 1
spline_coef = pm.Normal('spline_coef',sd = spline_coef_sd,shape = [n_spline_coefficients,F])
x = np.linspace(-4,4,T)
func_coefs = []
for f in range(F):
func_coefs.append(utilities.bspline(spline_coef[:,f],spline_degree,n_spline_knots,x))
func_coef = pm.Deterministic('func_coef',(tt.stack(func_coefs,axis=1) ))
elif func_coef_type == 'polynomial':
poly_basis = np.zeros([T,poly_order])
for i in range(1,poly_order+1):
poly_basis[:,i-1] = np.arange(T)**i
poly_coef = pm.Flat('poly_coef',shape = [poly_order,F])
func_coef = pm.Deterministic('func_coef',tt.tensordot(poly_basis,poly_coef,axes=[[1],[0]]) + coef[C:])
elif func_coef_type == 'linear':
linear_basis = np.zeros([T,F])
for i in range(F):
linear_basis[:,i] = np.arange(T)
linear_coef = pm.Flat('linear_coef',[F])
func_coef = pm.Deterministic('func_coef',linear_basis * linear_coef + coef[C:])
else:
raise ValueError('Functional coefficient type not recognized.""')
# This is the additive term in y_hat that comes from the functional
# part of the model.
func_contrib = tt.tensordot(functional_data,func_coef,axes=[[2,3],[0,1]])
if time_rescale_func:
func_contrib = func_contrib / T
# The part of y_hat that comes from the static covariates
static_contrib = tt.tensordot(static_data,coef[0:C],axes = [2,0])
noise_sd = pm.HalfCauchy('noise_sd',beta = 1.0)
# y_hat is the predictive mean.
y_hat = pm.Deterministic('y_hat', static_contrib + func_contrib + random_effect)
#y_hat = pm.Deterministic('y_hat', static_contrib +func_contrib )
# If the robust error option is used, then a gamma-Student-T distribution
# is placed on the residuals.
if robust:
DOF = pm.Gamma('DOF',alpha = 2, beta = 0.1)
response = pm.StudentT('response',mu = y_hat,sd = noise_sd,nu = DOF,observed = Y)
else:
response = pm.Normal('response',mu = y_hat,sd = noise_sd,observed = Y)
if return_model_only:
return model
# NUTS is the default PyMC3 sampler and is what we recommend for fitting.
if method == 'nuts':
trace = pm.sample(**sampler_kwargs)
# Metropolis-Hastings does poorly with lots of correlated parameters,
# so this fitting method should only be used if T is small or you are
# fitting a scalarized model.
elif method == 'mh':
trace = pm.sample(step = pm.Metropolis(),**sampler_kwargs)
# There are a number of approximate inference methods available, but
# none of them gave results that were close to what we got with MCMC.
else:
approx = pm.fit(method=method,**sampler_kwargs)
trace = approx.sample(draws)
return trace,model
|
{"/models.py": ["/utilities.py"]}
|
30,106
|
ckrapu/rwfmm
|
refs/heads/master
|
/setup.py
|
from setuptools import setup
setup(
name='rwfmm',
version='0.1',
description='Code for random-walk functional mixed model',
author='Christopher Krapu',
author_email='ckrapu@gmail.com',
py_modules=["models",'utilities'],
install_requires=['theano','numpy','pymc3','matplotlib','pandas']
)
|
{"/models.py": ["/utilities.py"]}
|
30,107
|
ckrapu/rwfmm
|
refs/heads/master
|
/utilities.py
|
import matplotlib.pyplot as plt
import matplotlib.patches as mpatches
import numpy as np
import pandas as pd
import theano
import theano.tensor as tt
theano.config.compute_test_value = 'ignore'
def ar1(beta,sd,length):
'''Creates a realization of a AR(1) process with autoregresssion
coefficient 'beta', a jump standard deviation of 'sd' and is 'length'
elements long.'''
vector = np.zeros(length)
vector[0] = np.random.randn() * sd
for i in range(length-1):
vector[i+1] = vector[i] * beta + np.random.randn() * sd
return vector
def simulate_dataset(S,V,C,F,T,function_type,error_sd,error_type,autocorr=0.5,
functional_covariate_type='normal',spike_variance = 0.25):
'''Function for generating simulated data for a scalar-on-function
regression with longitudinal measurements and scalar covariates.'
Parameters
----------
S : integer
Number of individuals / subjects
V : integer
Number of longitudinal measurements per individual
C : integer
Number of scalar (i.e. non-functional) covariates
F : integer
Number of functional covariates
T : integer
Number of elements for each functional covariate measurement
function_type : string
One of 'logistic', 'sinusoid', or 'spike'. Defines the shape of the
generated functional coefficient.
error_sd : float
The standard deviation of the residual error distribution.
error_type : string
One of 'normal','cauchy', or 'autocorrelated'. Determines the residual distribution.
If 'autocorrelated' is selected, then the per-subject residuals are autocorrelated
across longitudinal measurements.
autocorr : float
If error_type is set to 'autocorrelated', then this sets the autoregressive coefficient.
Otherwise, this has no effect.
functional_covariate_type : string
One of 'normal' or 'autocorrelated'. The first sets the functional covariates to have a
standard normal distribution. The second sets the functional covariates to be AR(1) with
a regression coefficient of 0.5 and a jump standard deviation of 0.5.
spike_variance : float
Variance of the spike function; set this higher to induce a flatter function.
Returns
-------
functional_covariates : 4D Numpy array
Array of functional covariates with shape [S,V,T,F]
longitudinal_covariates : 3D Numpy array
Array of per-visit/longitudinal measurement covariates with shape [S,V,C]
response : 2D Numpy array
Array of scalar response variable for each individual and each measurement.
This has a shape of [S,V].
functional_coefficients : 2D Numpy array
Array of functional coefficients with shape [T,F]
longitudinal_coefficients : 1D Numpy array
Array of coefficients for the nonfunctional predictors with shape [C]
random_effect : 1D Numpy array
Per-subject intercepts in an array with shape [S]
'''
longitudinal_covariates = np.random.randn(S,V,C)
if functional_covariate_type == 'normal':
functional_covariates = np.random.randn(S,V,T,F)
elif functional_covariate_type == 'autocorrelated':
functional_covariates = np.zeros([S,V,T,F])
for s,v,f in product(range(S),range(V),range(F)):
functional_covariates[s,v,:,f] = ar1(0.5,0.5,T)
else:
raise ValueError('Covariate type not recognized.')
random_effect = np.random.randn(S)[:,np.newaxis].repeat(V,axis = 1)
longitudinal_coefficient = np.random.randn(C)
timesteps = np.arange(T)[:,np.newaxis].repeat(F,axis=1)
if function_type == 'logistic':
timesteps = np.linspace(-6,6,T)[:,np.newaxis].repeat(F,axis=1)
functional_coefficients = 1. / (1. + np.exp(-timesteps))
elif function_type == 'sinusoid':
timesteps = np.linspace(-3.14*3,3.14*3,T)[:,np.newaxis].repeat(F,axis=1)
functional_coefficients = np.sin(timesteps)
elif function_type == 'spike':
timesteps = np.linspace(-2,2,T)[:,np.newaxis].repeat(F,axis=1)
functional_coefficients = np.exp(-(timesteps/spike_variance)**2)
else:
raise ValueError('Function type not recognized.')
longitudinal_mean = np.einsum('ijk,k',longitudinal_covariates,longitudinal_coefficient)
functional_mean = np.einsum('ijkl,kl',functional_covariates,functional_coefficients)
mean = random_effect + longitudinal_mean + functional_mean
if error_type == 'normal':
error = np.random.randn(S,V)
elif error_type == 'cauchy':
error = np.random.standard_t(10,size=[S,V])
elif error_type == 'autocorrelated':
error = np.zeros([S,V])
for s in range(S):
error[s,:] = autoregression(autocorr,0.5,V)
else:
raise ValueError('Error type not recognized.')
response = mean + error * error_sd
return functional_covariates,longitudinal_covariates,response,functional_coefficients,longitudinal_coefficient,random_effect
def coef_plot(samples,upper = 97.5,lower = 2.5):
T = samples.shape[1]
plt.figure(figsize = (5,3))
plt.plot(np.arange(T),np.median(samples,axis = 0),color='k',label = 'Median')
plt.plot(np.arange(T),np.percentile(samples[:,:],upper,axis = 0),linestyle='--',color='k')
plt.plot(np.arange(T),np.percentile(samples,lower,axis = 0),linestyle='--',
color='k',label = '{0}% CI'.format(upper-lower))
plt.legend()
plt.xlabel('Timestep')
plt.ylabel('B(t)')
plt.grid('on')
def multiple_coef_plot(samples_array,num_horizontal,num_vertical,titles,upper_q = 97.5,lower_q = 2.5,
fig_kwargs = {'figsize':(8,6),'sharex':True},
xlabel='Timestep',ylabel='B(t)',true_coef = None,
colors = ['k'],trace_labels = [''],true_color='k',
shade_sig = False,sig_excludes = [0.],
sig_alphas = [0.25],sig_intervals = [(97.5,2.5)],average_function = np.mean,
xticks =[0,50,100,150,200,250,300]):
if type(samples_array) != list:
samples_array = [samples_array]
_,T,F = samples_array[0].shape
figure,axes = plt.subplots(num_vertical,num_horizontal,**fig_kwargs)
axes = axes.ravel()
timesteps = np.arange(T)
zeros = np.zeros_like(timesteps)
proxy_artists = []
for j,samples in enumerate(samples_array):
for i in range(F):
axes[i].plot(timesteps,np.zeros(len(timesteps)),color=colors[j],linestyle='--')
average = average_function(samples[:,:,i],axis = 0)
axes[i].plot(timesteps,average,color=colors[j],linewidth = 2)
upper = np.percentile(samples[:,:,i],upper_q,axis=0)
lower = np.percentile(samples[:,:,i],lower_q,axis=0)
axes[i].plot(timesteps,upper,lower,color=colors[j],linewidth=1)
axes[i].xaxis.set_ticks(xticks)
if shade_sig:
for k,alpha in enumerate(sig_alphas):
bounds = np.percentile(samples[:,:,i],[sig_intervals[k][0],sig_intervals[k][1]],axis=0)
is_sig = np.logical_or((bounds[0] < sig_excludes),(bounds[1] > sig_excludes))
axes[i].fill_between(timesteps,upper,lower,where=is_sig,color=colors[j],
alpha = alpha,zorder = k+1)
if true_coef is not None:
axes[i].plot(timesteps,true_coef[i],linewidth = 3,alpha = 0.75,color=true_color)
axes[i].set_title(titles[i])
proxy_artists.append(mpatches.Patch(color=colors[j]))
if len(samples_array) > 1:
plt.figlegend(proxy_artists,trace_labels,loc = 'upper center',ncol = len(samples_array),bbox_to_anchor = (.5,1.04))
plt.tight_layout()
return figure,axes
def get_data(response_col,functional_covariates,static_covariates,log_transform_response = False,T=336,standardize_inputs = False,
filename = '/home/ckrapu/Dropbox/wfmm/intermediate/no_wavelet_dataframe_5_6.p'):
'''Function for loading data from a specific data file for use in functional
linear mixed model.'''
df = pd.read_csv(filename)
P = df.id.unique().shape[0]
V = df.visit.unique().shape[0]
F = len(functional_covariates)
C = len(static_covariates)
D_func = np.zeros([P,V,T,F])
D_static = np.zeros([P,V,C])
Y = np.zeros([P,V])
# This loop will fill in the design matrix / tensor (for functional data).
# Iterate over each unique subject
for p,unique_id in enumerate(df.id.unique()):
# For each subject, iterate over all visits
for v,unique_visit in enumerate(df.visit.unique()):
# Fill in the response variable first.
# We will overwrite these entries with NaNs if the observation is invalid.
scalar_response = df[(df.id == unique_id) & (df.visit == unique_visit)][response_col].values
if len(scalar_response) == 1:
Y[p,v] = scalar_response
# fill in the static covariates
static_row = df[(df.id == unique_id) & (df.visit == unique_visit)][static_covariates]
if len(static_row) > 0:
D_static[p,v,:] = static_row
if np.any(np.isnan(static_row)):
Y[p,v] = np.nan
# Fill in the functional covariates
for f,column_name in enumerate(functional_covariates):
per_func_cov_cols = [col for col in df.columns if column_name in col]
# This picks out a T-long vector of values and puts it into the func. design array.
func_row = df[(df.id == unique_id) & (df.visit == unique_visit)][per_func_cov_cols]
if len(func_row) > 0:
D_func[p,v,:,f] = func_row
# Again, if the covariate is missing then we want to remove this observation.
else:
Y[p,v] = np.nan
if np.any(np.isnan(func_row)):
Y[p,v] = np.nan
if log_transform_response:
Y = np.log(Y)
is_bad = np.isnan(Y) + np.any(np.isnan(D_static),axis =2) + np.any(np.isnan(D_func),axis =(2,3))
is_valid = ~is_bad
Y = np.ma.masked_array(data = Y, mask = is_bad)
# We will also zero out the entries in the design arrays corresponding
# to patient/visit pairs which are not valid.
for p in range(Y.shape[0]):
for v in range(Y.shape[1]):
if is_bad[p,v]:
D_static[p,v,:] = 0.0
D_func[p,v,:,:] = 0.0
assert np.all(np.isfinite(D_static))
assert np.all(np.isfinite(D_func))
if standardize_inputs:
D_func = (D_func - np.mean(D_func,axis = (0,1,2))) / np.std(D_func,axis = (0,1,2))
D_static = (D_static - np.mean(D_static,axis = (0,1))) / np.std(D_static,axis = (0,1))
return D_func,D_static,Y
def build_B_spline_deg_zero_degree_basis_fns(breaks, x):
"""Build B spline 0 order basis coefficients with knots at 'breaks'.
N_{i,0}(x) = { 1 if u_i <= x < u_{i+1}, 0 otherwise }
"""
expr = []
expr.append(tt.switch(x<breaks[1], 1, 0))
for i in range(1, len(breaks)-2):
l_break = breaks[i]
u_break = breaks[i+1]
expr.append(
tt.switch((x>=l_break)&(x<u_break), 1, 0) )
expr.append( tt.switch(x>=breaks[-2], 1, 0) )
return expr
def build_B_spline_higher_degree_basis_fns(
breaks, prev_degree_coefs, degree, x):
"""Build the higer order B spline basis coefficients
N_{i,p}(x) = ((x-u_i)/(u_{i+p}-u_i))N_{i,p-1}(x) \
+ ((u_{i+p+1}-x)/(u_{i+p+1}-u_{i+1}))N_{i+1,p-1}(x)
"""
assert degree > 0
coefs = []
for i in range(len(prev_degree_coefs)-1):
alpha1 = (x-breaks[i])/(breaks[i+degree]-breaks[i]+1e-12)
alpha2 = (breaks[i+degree+1]-x)/(breaks[i+degree+1]-breaks[i+1]+1e-12)
coef = alpha1*prev_degree_coefs[i] + alpha2*prev_degree_coefs[i+1]
coefs.append(coef)
return coefs
def build_B_spline_basis_fns(breaks, max_degree, x):
curr_basis_coefs = build_B_spline_deg_zero_degree_basis_fns(breaks, x)
for degree in range(1, max_degree+1):
curr_basis_coefs = build_B_spline_higher_degree_basis_fns(
breaks, curr_basis_coefs, degree, x)
return curr_basis_coefs
def spline_fn_expr(breaks, intercepts, degree, x):
basis_fns = build_B_spline_basis_fns(breaks, degree, x)
spline = 0
for i, basis in enumerate(basis_fns):
spline += intercepts[i]*basis
return spline
def bspline(intercepts,degree,n_bins,domain):
breaks = np.histogram(domain, n_bins)[1][1:-1]
for i in range(degree+1):
breaks = np.insert(breaks, 0, domain.min()-1e-6)
breaks = np.append(breaks, domain.max()+1e-6)
return spline_fn_expr(breaks, intercepts, degree, domain)
def compile_spline(data,n_bins,degree,intercepts):
breaks = np.histogram(data, n_bins)[1][1:-1]
for i in range(degree+1):
breaks = np.insert(breaks, 0, data.min()-1e-6)
breaks = np.append(breaks, data.max()+1e-6)
xs = tt.vector(dtype=theano.config.floatX)
f = theano.function([intercepts, xs],spline_fn_expr(breaks, intercepts, degree, xs))
return f
|
{"/models.py": ["/utilities.py"]}
|
30,112
|
Valrvn/Butler-Bot
|
refs/heads/main
|
/fun_zone_bot.py
|
#fun_zone_bot.py
import os
import random
import xlrd #Excel File read library for Rules of Acquisition
from google_images_search import GoogleImagesSearch #Google Images API
import discord
#Apig routine
async def apig_message(message):
if '?apig' == message.content.lower():
#send Pig Prayer
await message.channel.send('Our HogFather,\nwho farts in Fedspace,\nhallowed be thy node.\nThy Klingon come.\nThy Rom be done on Earth\nas it is on Vulcan.\nGive us this kill,\nour daily goal,\nAnd forgive us our baseraids,\n\nAs we forgive none\nwho trash talk about us,\nAnd lead us not into empties,\nBut deliver us fat miners.\nFor thine is the pigsty,\nAnd the powergain,\nand the glorykill.\nForever and oinkoink.\n\nApig 🐷')
#notify @here, if the Pig Priestess is requesting the prayer
if "Valravn#5717" == str(message.author):
await message.channel.send("Time for GC Apig @here")
else:
pigPriestess = 'the Pig Priestess'
#id of OG Pig Priestess
priestess_inguild = message.guild.get_member(436182601904947211)
if priestess_inguild is not None:
pigPriestess = str(priestess_inguild.mention)
await message.channel.send("Someone raise "+ pigPriestess +" from slumber for a GC Pig Prayer!")
return
return
#Handler for the special commands for images
async def send_photo_on_command(message):
#send random pigcorn image
if '?unipig' == message.content.lower() or '?pigcorn' == message.content.lower() or '?upig' == message.content.lower():
await message.channel.send(file=discord.File("Images\\Unipigs\\" + random.choice(os.listdir("Images\\Unipigs"))))
return
#send random unicorn image
if '?unicorn' == message.content.lower() or '?🦄' == message.content:
await message.channel.send(file=discord.File("Images\\Unicorns\\" + random.choice(os.listdir("Images\\Unicorns"))))
return
#send random pig image
if '?pig' == message.content.lower() or '?hog' == message.content.lower() or '?🐗' == message.content or '?🐷' == message.content or '?🐖' == message.content or'?🐽' == message.content:
await message.channel.send(file=discord.File("Images\\Pigs\\" + random.choice(os.listdir("Images\\Pigs"))))
return
#send random aubergine image
if '?aubergine' == message.content.lower() or '?eggplant' == message.content.lower() or '?🍆' == message.content:
await message.channel.send(file=discord.File("Images\\Eggplants\\" + random.choice(os.listdir("Images\\Eggplants"))))
return
#send random sausage image
if '?sausage' == message.content.lower():
await message.channel.send(file=discord.File("Images\\Sausages\\" + random.choice(os.listdir("Images\\Sausages"))))
return
#send random bacon image
if '?bacon' == message.content.lower():
await message.channel.send(file=discord.File("Images\\Bacon\\" + random.choice(os.listdir("Images\\Bacon"))))
return
#send random cake image
if '?cake' == message.content.lower() or '?🍰' == message.content.lower() or '?🎂' == message.content.lower() or '?🧁' == message.content.lower() or '?🥧' == message.content.lower():
await message.channel.send(file=discord.File("Images\\Cakes\\" + random.choice(os.listdir("Images\\Cakes"))))
return
#send random octopus image
if '?poulpe' == message.content.lower() or '?8' == message.content.lower() or '?🐙' == message.content.lower() or '?kraken' == message.content.lower() or '?octopus' == message.content.lower() or '?korosensei' == message.content.lower() or '?octo' == message.content.lower():
await message.channel.send(file=discord.File("Images\\Poulpes\\" + random.choice(os.listdir("Images\\Poulpes"))))
return
#send random pineapple image
if '?pineapple' == message.content.lower() or '?🍍' == message.content.lower():
await message.channel.send(file=discord.File("Images\\Pineapples\\" + random.choice(os.listdir("Images\\Pineapples"))))
return
return
#Rules of Acquisition Setup
#path to RoA file:
roa_file_path = "Excels\\RoA.xls"
#RoA file settings:
roa_work_book = xlrd.open_workbook(roa_file_path)
roa_sheet = roa_work_book.sheet_by_index(0)
#Rules of Acquisition message:
async def roa_message(message):
if '?roa' == message.content.lower() or '?rulesofacquisition' == message.content.lower():
#get a random index between 0 and total number of rules:
index = random.randrange(0, 154)
#send cell value with rule
await message.channel.send(str(roa_sheet.cell_value(index,0)))
return
return
#Google Images Parameters
gis = GoogleImagesSearch('AIzaSyAC1N7KAdA7sHYUWmFXZbrLulY-LMBkaRc', '8dbb2bb91041d61c5')
#Google Image Search
async def google_an_image(message):
if '?gi ' == message.content.lower()[:4]:
# define search params:
_search_params = {
'q': message.content[3:],
'num': 5,
'safe': 'high',
'fileType': 'JPG',
'imgSize': 'MEDIUM',
}
#search for images:
gis.search(search_params=_search_params, path_to_dir='Images\\GoogleImagesSearch')
#send the downloaded image
await message.channel.send(file=discord.File("Images\\GoogleImagesSearch\\" + random.choice(os.listdir("Images\\GoogleImagesSearch"))))
#clear the GoogleImagesSearch Directory
for fn in os.listdir("Images\\GoogleImagesSearch"):
os.remove("Images\\GoogleImagesSearch\\" + fn)
return
return
|
{"/bot.py": ["/fun_zone_bot.py", "/moderation_zone_bot.py", "/embed_routine.py"]}
|
30,113
|
Valrvn/Butler-Bot
|
refs/heads/main
|
/bot.py
|
# bot.py
import fun_zone_bot as fzb #Fun Zone implementations for the bot
import moderation_zone_bot as mzb #Moderation Zonde implementations for the bot
import embed_routine as ezb #Embed-based-communication
import os
import discord
from dotenv import load_dotenv #environment for the DC Token
load_dotenv()
TOKEN = os.getenv('DISCORD_TOKEN')
#the code needs a .env file with the following content
# # .env
# DISCORD_TOKEN=<copy and paste your token from the Discord Developer Portal here>
#check sources.txt for more information on this step
#client, with fix for intentions -> this is the bot
client = discord.Client(intents=discord.Intents.all())
#Method for debugging
@client.event
async def on_ready():
print(f'{client.user} has connected to Discord!')
#Method for commands
@client.event
async def on_message(message):
#Routines for messages from the bot itself
if message.author == client.user:
return
#Messages from other users (can also be bots)
await send_butler_menu(message) #Butler menu
await fzb.apig_message(message) #Apig routine
await fzb.roa_message(message) #Method for Rules of Acquisition
await fzb.google_an_image(message) #Google Image Search
await fzb.send_photo_on_command(message) #Special commands for images
await mzb.copy_all_to_channel(message) #Copy all messages of channel to another channel
await mzb.copy_all_from_channel(message) #Copy all messages of another channel to channel
await mzb.move_all_to_channel(message) #Move all messages of channel to another channel
await mzb.move_all_from_channel(message) #Move all messages of channel to another channel
await mzb.copy_new_message_to_channel(message) #copy all new messages of channel to another channel
await mzb.copy_one_msg_to_channel(message) #copy one specific message of channe to another channel
await ezb.start(message) #start a personal session with Butler, based on embeds
return
@client.event
async def on_raw_reaction_add(payload):
#Routines for reactions from the bot itself
if payload.user_id == client.user.id:
return
message = await client.get_channel(payload.channel_id).fetch_message(payload.message_id)
#only edit messages with one embed that have been written by the same bot
if message.author == client.user and len(message.embeds) == 1:
await ezb.edit(message,payload)
return
#Butler Menu (Help)
async def send_butler_menu(message):
if '?butler' == message.content.lower() or '?help' == message.content.lower():
await message.channel.send(
"\t- ?help or ?butler - post the bot menu\n"
"Fun zone:\n"
"\t- ?apig - post the pig prayer. If PigPriestess is the user that requested Apig, also tag here. If not post message, saying that Val needs to wake up...\n"
"\t- ?upig, ?unipig, ?pigcorn = send random unicorn pig image\n"
"\t- ?unicorn, ?🦄 = send random unicorn image\n"
"\t- ?🐽, ?pig, ?🐖, ?🐷, ?🐗, ?hog = send random pig image\n"
"\t- ?aubergine, ?eggplant, ?🍆 = send random aubergine image\n"
"\t- ?sausage = send random sausage image\n"
"\t- ?bacon = send random bacon image\n"
"\t- ?cake, ?🍰, ?🎂, ?🧁, ?🥧 = send random cake image\n"
"\t- ?poulpe, ?8, ?octo, ?korosensei, ?octopus, ?kraken, ?🐙 = send random octopus image\n"
"\t- ?rulesOfAcquisition, ?roa (Capitalization doesn't matter) = send random rule of Acquisition\n"
"\t- ?gi <parameter> = Google Image search. Put search parameters after the ?gi command. Please keep it clean, or Butler will get upset and go offline...\n"
"\n"
"For any complaints or bot requests, message @Valravn aka the one learning Python with this silly little bot.\n"
#"The bot is restricted to #funny-pics and #🦄🐽-upig-of-the-day only when in the UPIG server.\n"
"\t- !start = start a personal interactive session with Butler"
)
return
return
client.run(TOKEN)
|
{"/bot.py": ["/fun_zone_bot.py", "/moderation_zone_bot.py", "/embed_routine.py"]}
|
30,114
|
Valrvn/Butler-Bot
|
refs/heads/main
|
/embed_routine.py
|
import discord
#API for this functionality
#To be called for a Message that is the !start command
async def start(message):
if '!start' == message.content.lower():
sessionMessage = await message.channel.send(embed = await embed_selector("start"))
for r in await embed_reaction_selector("start"):
await sessionMessage.add_reaction(r)
return
elif "!start dm" == message.content.lower():
sessionMessage = await message.channel.send(embed = await embed_selector("dm_start"))
for r in await embed_reaction_selector("dm_start"):
await sessionMessage.add_reaction(r)
return
return
async def edit(message,payload):
await edit_embed(message,payload)
return
#----------END of API-----------------------
#----------EMBED related code---------------
#EMBED GENERATORS
#The local embed types are strings for now.
#idea for a TODO - remake them as enums
#embedTypes = ["start","end","ERROR","dm_start","dm_colour_pick"]
#caller for all embed generators by type
async def embed_selector(type):
if "start" == type:
return await start_embed()
if "end" == type:
return await end_embed()
if "dm_start" == type:
return await start_dm_embed()
if "dm_colour_pick" == type:
return await colour_pick_dm_embed()
return
#a method to give the proper list of reactions for each embed type
async def embed_reaction_selector(type):
if "start" == type:
return ["🖌","🤖","❌","🤫"]
if "end" == type:
return []
if "dm_start" == type:
return ["🎨"]
if "dm_colour_pick" == type:
return ["🐽","❤","🧡","💛","💚","💙","💜"]
return
#a method to map the footer (unique ID) of an embed to it's type
async def embed_footer_to_type_selector(footer):
if footer == "Start of personal session":
return "start"
if footer == "End of personal session":
return "end"
if footer == "Start of private session":
return "dm_start"
if footer == "Colour settings of a private session":
return "dm_colour_pick"
return "ERROR"
#a method to make the proper change in embed upon a reaction
async def edit_embed(message,payload):
#get the old embed type
old_embed_type = await embed_footer_to_type_selector(message.embeds[0].footer.text)
#do not touch these ones, they aren't ours!
if old_embed_type == "ERROR":
return
#embed session was ended, pretent not to see it anymore
if old_embed_type == "end":
return
#make a decision for the next embed type, default is to stay with the current one
new_embed_type = old_embed_type
#make a decision for the embed color, default is to stay with the old one
new_embed_color = message.embeds[0].color
#check the emoji that was used - only unicode reactions can make a change in type
ems = str(payload.emoji)
if payload.emoji.is_unicode_emoji:
if ems == "❌":
new_embed_type = "end"
elif old_embed_type == "start":
if ems == "🤫":
new_embed_type = "end"
dmSessionMessage = await payload.member.send(embed = await embed_selector("dm_start"))
for r in await embed_reaction_selector("dm_start"):
await dmSessionMessage.add_reaction(r)
elif old_embed_type == "dm_start":
if ems == "🎨":
new_embed_type = "dm_colour_pick"
elif old_embed_type == "dm_colour_pick":
new_embed_type = "dm_start"
if ems == "🐽":
new_embed_color = 0xf5b7cd
elif ems == "❤":
new_embed_color = 0xeb1010
elif ems =="🧡":
new_embed_color = 0xfc850d
elif ems == "💛":
new_embed_color = 0xf5ed0f
elif ems =="💚":
new_embed_color = 0x0ff51e
elif ems =="💙":
new_embed_color = 0x0fcef5
elif ems == "💜":
new_embed_color = 0x8605f0
#TODO - expand and change here as the embed types collection grows
#This is the actual logic spot of the embed session
#edit embed accordingly
if "dm" not in old_embed_type:
await message.clear_reactions()
newEmbed = await embed_selector(new_embed_type)
newEmbed.color = new_embed_color
await message.edit(embed = newEmbed)
for r in await embed_reaction_selector(new_embed_type):
await message.add_reaction(r)
else:
newEmbed = await embed_selector(new_embed_type)
newEmbed.color = new_embed_color
newMessage = await message.channel.send(embed = newEmbed)
for r in await embed_reaction_selector(new_embed_type):
await newMessage.add_reaction(r)
return
#----------------EMBED Creators----------------
#start of session
async def start_embed():
embed= discord.Embed(
title="Butler personal session",
description="Welcome to your personal session with Butler",
color=0xf5b7cd,
)
embed.add_field(
name= "Click on the reactions bellow to help me find you what you're looking for",
value= "🖌 to change your colour in Discord\n"
"🤖 to revert to the standard colour for your Discord rank\n"
"🤫 to move this session into your private messages\n"
"❌ to close this session\n",
inline=False
)
embed.set_footer(text=("Start of personal session"))
return embed
#end of session
async def end_embed():
embed= discord.Embed(
title="Butler personal session",
description="Thank you for paying me a visit, my lovely friend!",
color=0xf5b7cd,
)
embed.set_footer(text=("End of personal session"))
return embed
async def start_dm_embed():
embed= discord.Embed(
title="Butler private session",
description="Welcome to your private session with Butler",
color=0xf5b7cd,
)
embed.add_field(
name= "Click on the reactions bellow to help me find you what you're looking for",
value= "🎨 to pick a new colour for this session\n",
inline=False
)
embed.set_footer(text=("Start of private session"))
return embed
async def colour_pick_dm_embed():
embed= discord.Embed(
title="Butler private session",
description="Welcome to your private session with Butler",
color=0xf5b7cd,
)
embed.add_field(
name= "Click on the reactions bellow to help me find you what you're looking for",
value= "🐽 for piggy pink\n"
"❤ for red\n"
"🧡 for organge\n"
"💛 for yellow\n"
"💚 for green\n"
"💙 for blue\n"
"💜 for purple\n",
inline=False
)
embed.set_footer(text=("Colour settings of a private session"))
return embed
|
{"/bot.py": ["/fun_zone_bot.py", "/moderation_zone_bot.py", "/embed_routine.py"]}
|
30,115
|
Valrvn/Butler-Bot
|
refs/heads/main
|
/moderation_zone_bot.py
|
#moderation_zone_bot.py
#Fine-tuned method to copy all new messages from a certain message channel to another channel
#In the UPIG server:
#copy all new messages from #rules (team zone) to #rules (member zone)
async def copy_new_message_to_channel(message):
id_from = 698422697100574770 #rules (team zone)
id_to = 825704830957125642 #rules (new members zone)
if message.channel.id == id_from:
to_channel = message.guild.get_channel(id_to)
if to_channel is not None:
await copy_one_message_to(message,to_channel,message.channel)
return
#Copy one specific message from the channel where you post commmand to another channel.
#Can only be used by server admin (person with admin in their role names) or server owner.
async def copy_one_msg_to_channel(message):
if message.content[0:10] != '?cpyMsgTo ':
return
user_roles_names = []
for r in message.author.roles:
user_roles_names.append(r.name.lower())
if (message.author == message.guild.owner or 'admin' in user_roles_names or 'server admin' in user_roles_names) and len(message.channel_mentions) == 1:
m = await message.channel.fetch_message(message.content[10:28])
to_channel=message.channel_mentions[0]
if len(m.embeds) == 0:
await to_channel.send(m.content)
else:
for e in m.embeds:
await to_channel.send(embed = e)
return
#Copy all messages from one channel to another. Can be used by users that have and "admin" role and/or by the server owner
# '?cpyTo #channel'
async def copy_all_to_channel(message):
if message.content[0:7] != '?cpyTo ':
return
user_roles_names = []
for r in message.author.roles:
user_roles_names.append(r.name.lower())
if (message.author == message.guild.owner or 'admin' in user_roles_names) and len(message.channel_mentions) == 1:
if message.channel.id != message.channel_mentions[0].id:
await copy_messages_from_to(message.channel,message.channel_mentions[0])
return
return
return
#Copy all messages from one channel to another. Can be used by users that have and "admin" role and/or by the server owner
# '?cpyFrom #channel'
async def copy_all_from_channel(message):
if message.content[0:9] != '?cpyFrom ':
return
user_roles_names = []
for r in message.author.roles:
user_roles_names.append(r.name.lower())
if (message.author == message.guild.owner or 'admin' in user_roles_names) and len(message.channel_mentions) == 1:
if message.channel.id != message.channel_mentions[0].id:
await copy_messages_from_to(message.channel_mentions[0],message.channel)
return
return
return
#Move all messages from one channel to another. Can be used by users that have and "admin" role and/or by the server owner
# '?moveTo #channel'
async def move_all_to_channel(message):
if message.content[0:8] != '?moveTo ':
return
user_roles_names = []
for r in message.author.roles:
user_roles_names.append(r.name.lower())
if (message.author == message.guild.owner or 'admin' in user_roles_names) and len(message.channel_mentions) == 1:
if message.channel.id != message.channel_mentions[0].id:
await move_messages_from_to(message.channel,message.channel_mentions[0])
return
return
return
#Move all messages from one channel to another. Can be used by users that have and "admin" role and/or by the server owner
# '?moveFrom #channel'
async def move_all_from_channel(message):
if message.content[0:10] != '?moveFrom ':
return
user_roles_names = []
for r in message.author.roles:
user_roles_names.append(r.name.lower())
if (message.author == message.guild.owner or 'admin' in user_roles_names) and len(message.channel_mentions) == 1:
if message.channel.id != message.channel_mentions[0].id:
await move_messages_from_to(message.channel_mentions[0],message.channel)
return
return
return
#Helper method that copies messages from one channel to another
async def copy_messages_from_to(from_channel, to_channel):
#Possible ToDo: history(limit = ????)
#Copy all messages to recipient channel
async for m in from_channel.history(limit=None,oldest_first=True):
await copy_one_message_to(m,to_channel,from_channel)
return
#Helper method that moves messages from one channel to another
async def move_messages_from_to(from_channel, to_channel):
#Possible ToDo: history(limit = ????)
#Copy all messages to recipient channel
async for m in from_channel.history(limit=None,oldest_first=True):
await copy_one_message_to(m,to_channel,from_channel)
#Clear from from channel
async for m in from_channel.history():
await m.delete()
return
#Helper method to copy one message to a channel
#Extra handling for embeds
async def copy_one_message_to(message,to_channel,from_channel):
m_author = message.author
if m_author in to_channel.members:
m_author = message.author.mention
if len(message.embeds) == 0:
await to_channel.send(m_author + ' wrote in ' + from_channel.mention + ': ' + message.content)
else:
for e in message.embeds:
await to_channel.send(embed = e)
return
|
{"/bot.py": ["/fun_zone_bot.py", "/moderation_zone_bot.py", "/embed_routine.py"]}
|
30,124
|
yanqiangmiffy/bank-marketing
|
refs/heads/master
|
/ensemble_mean.py
|
# !/usr/bin/env python
# -*- coding:utf-8 _*-
"""
@Author:yanqiang
@File: ensemble_mean.py
@Time: 2018/11/9 16:59
@Software: PyCharm
@Description:
"""
import pandas as pd
import numpy as np
from sklearn.naive_bayes import GaussianNB
from sklearn.svm import SVC, LinearSVC
from sklearn.neighbors import KNeighborsClassifier
from sklearn.linear_model.logistic import LogisticRegression
from sklearn.neural_network import MLPClassifier
from sklearn.ensemble import AdaBoostClassifier
from sklearn.ensemble import GradientBoostingClassifier
from sklearn.ensemble import RandomForestClassifier
from sklearn.tree import DecisionTreeClassifier
from xgboost import XGBClassifier
from lightgbm import LGBMClassifier
from sklearn.metrics import roc_auc_score
from sklearn.model_selection import cross_val_score
from sklearn.preprocessing import LabelEncoder
from sklearn.preprocessing import StandardScaler, MinMaxScaler
import gc # 垃圾回收
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
from sklearn.preprocessing import LabelEncoder
from sklearn.model_selection import KFold
from sklearn.metrics import roc_auc_score
from sklearn.svm import LinearSVC
from xgboost import XGBClassifier
from lightgbm import LGBMClassifier
from sklearn.preprocessing import MinMaxScaler
from sklearn.preprocessing import PolynomialFeatures
from sklearn.model_selection import GridSearchCV
from main import create_feature
# 加载数据
df_train = pd.read_csv('input/train.csv')
df_test = pd.read_csv('input/test.csv')
train_len = len(df_train)
df = pd.concat([df_train, df_test], axis=0, ignore_index=True)
train, test = create_feature(df)
cols = [col for col in train.columns if col not in ['id', 'y']]
X_train, y_train, X_test = train[cols], train['y'], test[cols]
# 训练模型1 随机森林
print("rfc..")
rfc_1 = RandomForestClassifier(random_state=0, n_estimators=2000)
rfc_1.fit(X_train, y_train)
pred1 = rfc_1.predict_proba(X_test)[:, 1]
# 训练模型2
print("xgb..")
xgb = XGBClassifier(n_estimators=4000,
learning_rate=0.03,
num_leaves=30,
colsample_bytree=.8,
subsample=.9,
max_depth=7,
reg_alpha=.1,
reg_lambda=.1,
min_split_gain=.01,
min_child_weight=2,
verbose=True)
xgb.fit(X_train, y_train)
pred2 = xgb.predict_proba(X_test)[:, 1]
# 训练模型3 gbm
print("gbm..")
gbm = LGBMClassifier(n_estimators=4000,
learning_rate=0.03,
num_leaves=30,
colsample_bytree=.8,
subsample=.9,
max_depth=7,
reg_alpha=.1,
reg_lambda=.1,
min_split_gain=.01,
min_child_weight=2,
silent=-1,
verbose=-1, )
gbm.fit(X_train, y_train)
pred3 = gbm.predict_proba(X_test)[:, 1]
y_test = np.average(np.array([pred1, pred2, pred3]), axis=0, weights=[0.1, 0.7, 0.2])
test['y'] = y_test
test[['id', 'y']].to_csv('result/ensemble_mean.csv', columns=None, header=False, index=False)
|
{"/ensemble_mean.py": ["/main.py"]}
|
30,125
|
yanqiangmiffy/bank-marketing
|
refs/heads/master
|
/main.py
|
# !/usr/bin/env python
# -*- coding:utf-8 _*-
"""
@Author:yanqiang
@File: main.py
@Time: 2018/11/5 17:11
@Software: PyCharm
@Description:
"""
import gc # 垃圾回收
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
from sklearn.preprocessing import LabelEncoder
from sklearn.model_selection import KFold
from sklearn.metrics import roc_auc_score
from sklearn.svm import LinearSVC
from xgboost import XGBClassifier
from lightgbm import LGBMClassifier
from sklearn.preprocessing import MinMaxScaler
from sklearn.preprocessing import PolynomialFeatures
from sklearn.model_selection import GridSearchCV
import lightgbm as lgb
pd.set_option('display.max_columns',100)
gc.enable()
df_train=pd.read_csv('input/train.csv')
df_test=pd.read_csv('input/test.csv')
train_len = len(df_train)
df = pd.concat([df_train, df_test], axis=0, ignore_index=True)
def add_poly_features(data,column_names):
# 组合特征
features=data[column_names]
rest_features=data.drop(column_names,axis=1)
poly_transformer=PolynomialFeatures(degree=2,interaction_only=False,include_bias=False)
poly_features=pd.DataFrame(poly_transformer.fit_transform(features),columns=poly_transformer.get_feature_names(column_names))
for col in poly_features.columns:
rest_features.insert(1,col,poly_features[col])
return rest_features
def create_feature(df):
# ----------- Start数据预处理 数值型数据--------
# num_cols = ['age', 'balance', 'duration', 'campaign', 'pdays', 'previous']
def standardize_nan(x):
# 标准化
x_mean = np.nanmean(x) # 求平均值,但是个数不包括nan
x_std = np.nanstd(x)
return (x - x_mean) / x_std
df['log_age'] = np.log(df['age'])
df['log_std_age'] = standardize_nan(df['log_age'])
df["log_balance"] = np.log(df['balance'] - df['balance'] .min() + 1)
df["log_duration"] = np.log(df['duration']+ 1)
df["log_campaign"] = np.log(df['campaign'] + 1)
df["log_pdays"] = np.log(df['pdays']- df['pdays'].min() + 1)
df['log_previous'] = np.log(df['previous']+1) # 这里没有+1
# df['log_std_age'] = standardize_nan(df['log_age'])
# df['log_std_balance'] = standardize_nan(df['log_balance'])
# df['log_std_duration'] = standardize_nan(df['log_duration'])
# df['log_std_campaign'] = standardize_nan(df['log_campaign'])
# df['log_std_pdays'] = standardize_nan(df['log_pdays'])
# df['log_std_previous'] = standardize_nan(df['log_previous'])
df = df.drop(["age","balance", "duration", "campaign", "pdays"], axis=1)
# month 文字列与数値的変換
df['month'] = df['month'].map({'jan': 1,
'feb': 2,
'mar': 3,
'apr': 4,
'may': 5,
'jun': 6,
'jul': 7,
'aug': 8,
'sep': 9,
'oct': 10,
'nov': 11,
'dec': 12
}).astype(int)
# 1月:0、2月:31、3月:(31+28)、4月:(31+28+31)、 ...
day_sum = pd.Series(np.cumsum([0, 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30]), index=np.arange(1, 13))
df['date'] = (df['month'].map(day_sum) + df['day']).astype(int)
# ------------End 数据预处理 类别编码-------------
# ---------- Start 数据预处理 类别型数据------------
# cate_cols = ['job', 'marital', 'education', 'default', 'housing', 'loan', 'contact', 'day', 'month', 'poutcome']
cate_cols = ['job', 'marital', 'education', 'default', 'housing', 'loan', 'contact', 'month', 'poutcome']
df = pd.get_dummies(df, columns=cate_cols)
# ------------End 数据预处理 类别编码----------
with open('input/0.94224.txt','w',encoding='utf-8') as f:
f.write(str(list(df.columns)))
# df.to_csv('input/0.94224.csv',index=None)
new_train,new_test=df[:train_len],df[train_len:]
# print(list(new_train.columns))
print(new_train.shape)
return new_train,new_test
# 调整参数
def tune_params(model,params,X,y):
gsearch = GridSearchCV(estimator=model,param_grid=params, scoring='roc_auc')
gsearch.fit(X, y)
print(gsearch.cv_results_, gsearch.best_params_, gsearch.best_score_)
return gsearch
# 特征重要性
def plot_fea_importance(classifier,X_train):
plt.figure(figsize=(10,12))
name = "xgb"
indices = np.argsort(classifier.feature_importances_)[::-1][:40]
g = sns.barplot(y=X_train.columns[indices][:40],
x=classifier.feature_importances_[indices][:40],orient='h')
g.set_xlabel("Relative importance", fontsize=12)
g.set_ylabel("Features", fontsize=12)
g.tick_params(labelsize=9)
g.set_title(name + " feature importance")
plt.show()
def evaluate_cv5_lgb(train_df, test_df, cols, test=False):
kf = KFold(n_splits=5, shuffle=True, random_state=42)
# xgb = XGBClassifier()
# params = {"learning_rate": [0.08, 0.1, 0.12],
# "max_depth": [6, 7],
# "subsample": [0.95, 0.98],
# "colsample_bytree": [0.6, 0.7],
# "min_child_weight": [3, 3.5, 3.8]
# }
# xgb = tune_params(xgb,params,train_df[cols],train_df.y.values)
y_test = 0
oof_train = np.zeros((train_df.shape[0],))
for i, (train_index, val_index) in enumerate(kf.split(train_df[cols])):
X_train, y_train = train_df.loc[train_index, cols], train_df.y.values[train_index]
X_val, y_val = train_df.loc[val_index, cols], train_df.y.values[val_index]
xgb = XGBClassifier(n_estimators=4000,
learning_rate=0.03,
num_leaves=30,
colsample_bytree=.8,
subsample=.9,
max_depth=7,
reg_alpha=.1,
reg_lambda=.1,
min_split_gain=.01,
min_child_weight=2,
verbose=True)
xgb.fit(X_train, y_train,
eval_set=[(X_train, y_train), (X_val, y_val)],
early_stopping_rounds=100, eval_metric=['auc'], verbose=False)
y_pred = xgb.predict_proba(X_val)[:,1]
if test:
y_test += xgb.predict_proba(test_df.loc[:, cols])[:,1]
oof_train[val_index] = y_pred
if i==0:
plot_fea_importance(xgb,X_train)
gc.collect()
auc = roc_auc_score(train_df.y.values, oof_train)
y_test /= 5
print('5 Fold auc:', auc)
return y_test
# train,test=create_feature(df)
# cols = [col for col in train.columns if col not in ['id','y']]
# y_test=evaluate_cv5_lgb(train,test,cols,True)
#
# test['y']=y_test
# test[['id','y']].to_csv('result/01_lgb_cv5.csv',columns=None, header=False, index=False)
|
{"/ensemble_mean.py": ["/main.py"]}
|
30,168
|
igornfaustino/APS_Grafos
|
refs/heads/master
|
/get_data_api.py
|
import anapioficeandfire
import json
import re
api = anapioficeandfire.API()
books = api.get_books()
data = []
# Get all books
for book in books:
# Get data of all characters in the book
for character in book.characters:
# get the character ID
characterId = re.search(r'(\d+)$', character).group(1)
if characterId:
characterData = api.get_character(id=characterId)
allegiances = []
# Get all houses that character follow
for allegiance in characterData.allegiances:
# get the house ID
# print(allegiance)
allegianceId = re.search('(\d+)$', allegiance).group(1)
if allegianceId:
# print(allegianceId)
house = api.get_house(id=allegianceId)
allegiances.append(house.name)
# Save data
newCharacter = {
"url": characterData.url,
"name": characterData.name,
"gender": characterData.gender,
"culture": characterData.culture,
"born": characterData.born,
"died": characterData.died,
"titles": characterData.titles,
"aliases": characterData.aliases,
"father": characterData.father,
"mother": characterData.mother,
"spouse": characterData.spouse,
"allegiances": allegiances,
"books": characterData.books,
"povBooks": characterData.povBooks,
"tvSeries": characterData.tvSeries,
"playedBy": characterData.playedBy
}
# don't put equals characters in the data
if newCharacter not in data:
data.append(newCharacter)
with open('data.json', 'w') as outfile:
json.dump(data, outfile)
|
{"/main.py": ["/get_data.py", "/graph.py"]}
|
30,169
|
igornfaustino/APS_Grafos
|
refs/heads/master
|
/get_data.py
|
import json
def get_data():
with open('data.json') as json_file:
data = json.load(json_file)
return data
|
{"/main.py": ["/get_data.py", "/graph.py"]}
|
30,170
|
igornfaustino/APS_Grafos
|
refs/heads/master
|
/main.py
|
import get_data
import graph
import copy
gp = graph.Graph(directed=True)
characters = get_data.get_data()
characters_valid = []
house_allegiances = {}
for character in characters:
if (character['name']):
# if("Season 6" in character['tvSeries']):
characters_valid.append(character)
for house in character['allegiances']:
if house in house_allegiances:
house_allegiances[house].append(character)
else:
house_allegiances[house] = [character]
gp.add_vertex(character['name'])
for character in characters_valid:
for house in character['allegiances']:
for house_char in house_allegiances[house]:
source_vertex = gp.get_vertex(character['name'])
destination_vertex = gp.get_vertex(house_char['name'])
if(source_vertex != destination_vertex):
gp.add_edge(source_vertex, destination_vertex)
# distance = gp.breadth_first_search(gp.get_vertex('Jon Snow'))
# for character in characters_valid:
# if len(character['povBooks']) > 0:
# print(character['name'], ':', distance[gp.get_vertex(character['name'])])
# components = gp.getSCCs()
# for component in components:
# print(component)
# print('------------------------')
maior = 0
maior_char = ''
for character in characters_valid:
new_gp = copy.deepcopy(gp)
new_gp.remove_vertex(new_gp.get_vertex(character['name']))
components = new_gp.getSCCs()
if len(components) > maior:
maior = len(components)
maior_char = character['name']
print("Maior numero de componentes: ", maior)
print("Personagem ponte: ", maior_char)
# print(len(components))
|
{"/main.py": ["/get_data.py", "/graph.py"]}
|
30,171
|
igornfaustino/APS_Grafos
|
refs/heads/master
|
/graph.py
|
'''
Graph Class
'''
import queue
import vertex
import edge
import graph_utils
class Graph(object):
"""Class to store a edges's array and a vertex's dictionary"""
def __init__(self, directed=False):
"""create a object graph
directed (bool, optional): Defaults to False.
tells if the graph is directed or not
"""
self.__edges = {} # tupla diccionary (no source, no dest)
self.__adjacent_list = {}
self.__directed = directed
self.__distance = {} # guarda a distancia entre os vertices (bfs)
self.__predecessors = {} # predecessores do vertex [bfs]
self.__firstSee = {}
self.__close = {}
self.__time = 0
# edge function
# start here
def add_edge(self, source, destination, label=None, value=1):
"""add a new connetion to the graph
connects two vertex, if the graph is directed, this connection leaves
the origin until the destination only.
if the graph is not directed, the connection will be both the source
to the destination as the destination to the source
Args:
source (Vertex): a source vertex
destination (Vertex): a destination vertex
label (stg, optional): Defaults to None. A label to this connection
value (float, optional): Defaults to None.
A value to this connection
"""
# create a new edge
new_edge = edge.Edge(source, destination, label=label, value=value)
# test if the destination isn't connected with the source
if destination not in self.__adjacent_list[source]:
# insert source
if new_edge.get_source() not in self.__adjacent_list:
self.__adjacent_list[new_edge.get_source()] = []
# insert destination
if new_edge.get_destination() not in self.__adjacent_list:
self.__adjacent_list[new_edge.get_destination()] = []
# insert edge and update adjacent list
self.__edges[(new_edge.get_source(),
new_edge.get_destination())] = new_edge
self.__adjacent_list[new_edge.get_source()].append(
new_edge.get_destination())
# if not directed.. do the same with the other node
if not self.__directed:
if source not in self.__adjacent_list[destination]:
self.__edges[(new_edge.get_destination(),
new_edge.get_source())] = new_edge
self.__adjacent_list[new_edge.get_destination()].append(
new_edge.get_source())
def remove_edge(self, edge_to_remove):
"""remove a connection from the graph
Args:
edge_to_remove (Edge): a edge (connection) that you want to remove
"""
self.__adjacent_list[edge_to_remove.get_source()].remove(
edge_to_remove.get_destination())
self.__edges.pop(
(edge_to_remove.get_source(), edge_to_remove.get_destination())
)
if not self.__directed:
self.__adjacent_list[edge_to_remove.get_destination()].remove(
edge_to_remove.get_source())
self.__edges.pop(
(edge_to_remove.get_destination(), edge_to_remove.get_source())
)
def get_all_edges(self):
"""Return all the edges on the graph
Returns:
list: return a list with all the edges
"""
edges = []
for key in self.__edges:
if (self.__edges[key] not in edges):
edges.append(self.__edges[key])
return edges
def get_edge_from_souce_destination(self, source, destination):
"""Get a edge from a source and a destination vertex
Args:
source (Vertex): a source vertex from the connetion
destination (Vertex): a destination vertex from the connetion
Returns:
Edge: return the egde that maches with the source and destination
or return None
"""
if self.__edges[(source, destination)]:
return self.__edges[(source, destination)]
return None
# end here
# Vertex functions
# start here
def add_vertex(self, name, value=None):
"""Add a new vertex to the graph
Args:
name (str): a name to the new vertex
value (float, optional): Defaults to None.
a value to the new vertex
"""
for key in self.__adjacent_list:
if key.get_name() == name:
return
new_vertex = vertex.Vertex(name, value=None)
self.__adjacent_list[new_vertex] = []
def get_vertex(self, name):
"""get a vertex from the graph
Args:
name (str): name of the vertex that you want
Returns:
Vertex: return a vertex that matches with the name, or return None
"""
for key in self.__adjacent_list:
if key.get_name() == name:
return key
return None
def get_all_vertex(self):
"""Return all the vertex on the graph
Returns:
list: return a list with all the vertex
"""
vertex = []
for key in self.__adjacent_list:
vertex.append(key)
return vertex
def adjacents_vertex(self, vertex):
"""Get the list of adjacents from a vertex
Args:
vertex (vertex): vertex you want to know the adjacent
Returns:
list: list of all adjacents of a vertex
"""
return self.__adjacent_list[vertex]
def remove_vertex(self, vertex_to_remove):
"""Remove a vertex and all the connections he have
Args:
vertex_to_remove (Vertex): vertex you want to remove
"""
for key in self.__adjacent_list:
if vertex_to_remove in self.__adjacent_list[key]:
self.__adjacent_list[key].remove(vertex_to_remove)
self.__adjacent_list.pop(vertex_to_remove, None)
for key in list(self.__edges):
if vertex_to_remove in key:
self.__edges.pop(key, None)
# end here
def print_adjacent_list(self):
"""Print the adjacent list, A.K.A the graph
"""
print(self.__adjacent_list)
def get_order(self):
"""Return o order of the graph
Returns:
int: return the order of the graph
"""
return len(self.__adjacent_list)
def breadth_first_search(self, initial_vertex):
"""Calculate the distance of all vertex from one
if a vertex can't be reach by the initial vertex,
the distance will be infinity.. (float("inf"))
Args:
initial_vertex (Vetex): calculate the distance of
all vertices up to this initial vertex
Returns:
Dict: dictionaty with the key as the vertex and the body
the distance from the initial vertex
"""
# colors:
# white: not visited
# grey: in the queue
# black: nothing more to do
for key in self.__adjacent_list:
if key != initial_vertex:
# set color for all vertices except the initial one to white
key.set_color(0)
self.__distance[key] = float("inf")
self.__predecessors[key] = None
# if the initial_vertex is not a valid one,
# all the vertex will have distance equals to infinity
if not initial_vertex:
return self.__distance
initial_vertex.set_color(1) # inital vertex color to grey
self.__distance[initial_vertex] = 0
q = queue.Queue()
q.put(initial_vertex) # insert in the queue the initial vertex
while not q.empty():
vertex = q.get()
for v in self.__adjacent_list[vertex]:
if v.get_color() == 0: # if a vertex color is white
v.set_color(1) # turn to grey
self.__distance[v] = self.__distance[vertex] + 1
self.__predecessors[v] = vertex
q.put(v)
vertex.set_color(2) # color to black
return self.__distance
def __dfs_visit(self, vertex):
self.__time = self.__time + 1
self.__firstSee[vertex] = self.__time
vertex.set_color(1)
for adjacent in self.__adjacent_list[vertex]:
if adjacent.get_color() == 0:
self.__predecessors[adjacent] = vertex
self.__dfs_visit(adjacent)
vertex.set_color(2)
self.__time += 1
self.__close[vertex] = self.__time
def deep_first_search(self):
# colors:
# white: not visited
# grey: in the queue
# black: nothing more to do
for key in self.__adjacent_list:
# set color for all vertices to white
key.set_color(0)
self.__predecessors[key] = None
self.__time = 0
for key in self.__adjacent_list:
if key.get_color() == 0:
self.__dfs_visit(key)
return self.__firstSee, self.__close, self.__predecessors
def dijkistra(self, initial_vertex):
for key in self.__adjacent_list:
if key != initial_vertex:
self.__distance[key] = float("inf")
self.__predecessors[key] = None
self.__distance[initial_vertex] = 0
nodes = list(self.__adjacent_list.keys())
while len(nodes) != 0:
node = graph_utils.get_min(nodes, self.__distance)
for adjacent in self.__adjacent_list[node]:
value = (self.__distance[node] +
self.get_edge_from_souce_destination(node, adjacent)
.get_value())
if (self.__distance[adjacent] > value):
self.__distance[adjacent] = value
self.__predecessors[adjacent] = node
return self.__distance
def in_degree_vertex(self, vertex):
"""Get the in degree of a vertex
Args:
vertex (Vertex): vertex you want know the degree
Returns:
integer: in degree of a vertex
"""
if self.__directed:
inVertex = 0
for key in self.__adjacent_list:
if vertex in self.__adjacent_list[key]:
inVertex = inVertex + 1
return inVertex
else:
return len(self.__adjacent_list[vertex])
def degree_vertex(self, vertex):
"""Get the degree of a vertex
Args:
vertex (Vertex): vertex you want know the degree
Returns:
integer: degree of a vertex
"""
if self.__directed:
inVertex = 0
outVertex = len(self.__adjacent_list[vertex])
for key in self.__adjacent_list:
if vertex in self.__adjacent_list[key]:
inVertex = inVertex + 1
return outVertex + inVertex
else:
return len(self.__adjacent_list[vertex])
def is_completed(self):
"""tell if a graph is completed or not
Returns:
Bool: return if the graph is completed
"""
for node in self.__adjacent_list:
for key in self.__adjacent_list:
if node != key:
if node not in self.__adjacent_list[key]:
return False
return True
# Strongly Connected Components
# Start Here
def __DFSUtil(self, v, visited, component):
# Mark the current node as visited and print it
visited[v] = True
component.append(v)
# Recur for all the vertices adjacent to this vertex
for i in self.__adjacent_list[v]:
if not visited[i]:
self.__DFSUtil(i, visited, component)
def fillOrder(self, v, visited, stack):
# Mark the current node as visited
visited[v] = True
# Recur for all the vertices adjacent to this vertex
for i in self.__adjacent_list[v]:
if not visited[i]:
self.fillOrder(i, visited, stack)
stack = stack.append(v)
# Function that returns reverse (or transpose) of this graph
def getTranspose(self):
g = Graph(directed=True)
for v in self.get_all_vertex():
g.__adjacent_list[v] = []
# Recur for all the vertices adjacent to this vertex
for i in self.get_all_vertex():
for j in self.adjacents_vertex(i):
g.add_edge(j, i)
return g
# The main function that finds and prints all strongly
# connected components
def getSCCs(self):
stack = []
# Mark all the vertices as not visited (For first DFS)
visited = {}
for v in self.get_all_vertex():
visited[v] = False
# Fill vertices in stack according to their finishing
# times
for v in self.get_all_vertex():
if not visited[v]:
self.fillOrder(v, visited, stack)
# Create a reversed graph
gr = self.getTranspose()
# Mark all the vertices as not visited (For second DFS)
visited = {}
for v in self.get_all_vertex():
visited[v] = False
# Now process all vertices in order defined by Stack
components = []
i = 0
while stack:
v = stack.pop()
if not visited[v]:
components.append([])
gr.__DFSUtil(v, visited, components[i])
i += 1
return components
# End Here
if __name__ == '__main__':
# graph = Graph()
# graph.add_vertex('teste')
# graph.add_vertex('teste')
# graph.add_vertex('teste2')
# graph.add_edge(graph.get_vertex('teste'), graph.get_vertex('teste2'))
# print(graph.adjacents_vertex(graph.get_vertex('teste')))
# print(graph.get_order())
# print(graph.get_all_edges())
# myEdge = graph.get_edge_from_souce_destination(
# graph.get_vertex('teste'), graph.get_vertex('teste2'))
# graph.remove_vertex(graph.get_vertex('teste'))
# print(graph.get_all_edges())
# graph.print_adjacent_list()
# Create a graph given in the above diagram
g = Graph(directed=True)
g.add_vertex(0)
g.add_vertex(1)
g.add_vertex(2)
g.add_vertex(3)
g.add_vertex(4)
g.add_edge(g.get_vertex(1), g.get_vertex(0))
g.add_edge(g.get_vertex(0), g.get_vertex(2))
g.add_edge(g.get_vertex(2), g.get_vertex(1))
g.add_edge(g.get_vertex(0), g.get_vertex(3))
g.add_edge(g.get_vertex(3), g.get_vertex(4))
print ("Following are strongly connected components " +
"in given graph")
print(g.getSCCs())
print(len(g.getSCCs()))
# This code is contributed by Neelam Yadav
|
{"/main.py": ["/get_data.py", "/graph.py"]}
|
30,172
|
mamerisawesome/gyakujinton
|
refs/heads/master
|
/gyakujinton/Shape/Shape.py
|
class Shape():
def __new__(cls, points):
import numpy as np
return np.array([points], np.int32)
|
{"/gyakujinton/Shape/__init__.py": ["/gyakujinton/Shape/Shape.py"], "/gyakujinton/__init__.py": ["/gyakujinton/functions/draw.py", "/gyakujinton/functions/skew.py"], "/gyakujinton/cli.py": ["/gyakujinton/functions/draw.py", "/gyakujinton/functions/skew.py"], "/gyakujinton/functions/draw.py": ["/gyakujinton/Shape/__init__.py"]}
|
30,173
|
mamerisawesome/gyakujinton
|
refs/heads/master
|
/gyakujinton/functions/rotate.py
|
import cv2
from gyakujinton.Window import Window
def rotate_image(
image_path,
output_path=None,
angle=40,
scale=1.0,
patch=None
):
image = Window(image_path=image_path)
(height, width, _) = image.window.shape
if patch is None:
patch = [
[0, 0],
[0, height],
[height, width],
[width, 0],
]
center = (
(patch[0][0] + patch[2][0]) / 2,
(patch[0][1] + patch[2][1]) / 2,
)
matrix = cv2.getRotationMatrix2D(center, angle, scale)
image.window = cv2.warpAffine(
image.window,
matrix,
image.window.shape[1::-1],
flags=cv2.INTER_LINEAR
)
if output_path:
image.save(output_path)
return
return image.show()
|
{"/gyakujinton/Shape/__init__.py": ["/gyakujinton/Shape/Shape.py"], "/gyakujinton/__init__.py": ["/gyakujinton/functions/draw.py", "/gyakujinton/functions/skew.py"], "/gyakujinton/cli.py": ["/gyakujinton/functions/draw.py", "/gyakujinton/functions/skew.py"], "/gyakujinton/functions/draw.py": ["/gyakujinton/Shape/__init__.py"]}
|
30,174
|
mamerisawesome/gyakujinton
|
refs/heads/master
|
/gyakujinton/Shape/__init__.py
|
from .Shape import Shape
|
{"/gyakujinton/Shape/__init__.py": ["/gyakujinton/Shape/Shape.py"], "/gyakujinton/__init__.py": ["/gyakujinton/functions/draw.py", "/gyakujinton/functions/skew.py"], "/gyakujinton/cli.py": ["/gyakujinton/functions/draw.py", "/gyakujinton/functions/skew.py"], "/gyakujinton/functions/draw.py": ["/gyakujinton/Shape/__init__.py"]}
|
30,175
|
mamerisawesome/gyakujinton
|
refs/heads/master
|
/gyakujinton/__init__.py
|
from .functions.draw import generate_superimposition
from .functions.draw import draw_on_image
from .functions.skew import skew_image
|
{"/gyakujinton/Shape/__init__.py": ["/gyakujinton/Shape/Shape.py"], "/gyakujinton/__init__.py": ["/gyakujinton/functions/draw.py", "/gyakujinton/functions/skew.py"], "/gyakujinton/cli.py": ["/gyakujinton/functions/draw.py", "/gyakujinton/functions/skew.py"], "/gyakujinton/functions/draw.py": ["/gyakujinton/Shape/__init__.py"]}
|
30,176
|
mamerisawesome/gyakujinton
|
refs/heads/master
|
/gyakujinton/Window/Window.py
|
import cv2
import numpy as np
class Window():
def __init__(
self,
name="Gyaku Jinton",
width=512,
height=512,
image_path=None
):
self.name = name
if not image_path:
dims = (height, width, 3)
self.window = np.zeros(dims, dtype="uint8")
self.window.fill(0)
else:
self.window = cv2.imread(image_path)
def register(self, points, rgb=(0, 0, 0), thickness=3):
self.canvas = cv2.polylines(
img=self.window,
pts=points,
isClosed=True,
color=rgb, # in rgb
thickness=3
)
def show(self, window_size=None):
try:
cv2.imshow(self.name, self.canvas)
except AttributeError:
cv2.imshow(self.name, self.window)
if window_size:
cv2.namedWindow(self.name, cv2.WINDOW_NORMAL)
cv2.resizeWindow(self.name, *window_size)
cv2.waitKey(0)
cv2.destroyAllWindows()
def save(self, output_path):
try:
cv2.imwrite(output_path, self.canvas)
except AttributeError:
cv2.imwrite(output_path, self.window)
|
{"/gyakujinton/Shape/__init__.py": ["/gyakujinton/Shape/Shape.py"], "/gyakujinton/__init__.py": ["/gyakujinton/functions/draw.py", "/gyakujinton/functions/skew.py"], "/gyakujinton/cli.py": ["/gyakujinton/functions/draw.py", "/gyakujinton/functions/skew.py"], "/gyakujinton/functions/draw.py": ["/gyakujinton/Shape/__init__.py"]}
|
30,177
|
mamerisawesome/gyakujinton
|
refs/heads/master
|
/gyakujinton/functions/skew.py
|
import numpy as np
import cv2
from gyakujinton.Window import Window
def skew_image(image_path, output_path=None, patch=None):
import random
image = Window(image_path=image_path)
if patch is not None:
image.window = image.window[
patch[0][1]: patch[1][1],
patch[0][0]: patch[3][0],
]
(height, width, _) = image.window.shape
original_image = image.window[:]
if patch is None:
patch = [
(0, 0),
(height, 0),
(width, height),
(0, width),
]
all_x = [point[0] for point in patch]
all_y = [point[1] for point in patch]
skew_coords = []
for point in patch:
perc_rand = random.uniform(0.1, 0.4)
new_x = 0
new_y = 0
if point[0] == min(all_x) and point[1] == min(all_y):
new_x = round(point[0] + ((width / 2) * perc_rand))
new_y = round(point[1] + ((height / 2) * perc_rand))
elif point[0] > min(all_x) and point[1] > min(all_y):
new_x = round(point[0] - ((width / 2) * perc_rand))
new_y = round(point[1] - ((height / 2) * perc_rand))
elif point[0] > min(all_x) and point[1] < max(all_y):
new_x = round(point[0] - ((width / 2) * perc_rand))
new_y = round(point[1] + ((height / 2) * perc_rand))
elif point[0] < max(all_x) and point[1] > min(all_y):
new_x = round(point[0] + ((width / 2) * perc_rand))
new_y = round(point[1] - ((height / 2) * perc_rand))
skew_coords += [(new_x, new_y)]
# convert to valid input for cv2 homography
patch = np.array(patch)
skew_coords = np.array(skew_coords)
h, status = cv2.findHomography(patch, skew_coords)
image.window = cv2.warpPerspective(
src=image.window,
M=h,
dsize=(width, height)
)
padding = 0
screen = Window(width=width + padding, height=height + padding)
screen.window[
padding:image.window.shape[0] + padding,
padding:image.window.shape[1] + padding,
:
] = image.window
# set alpha channel
b_channel, g_channel, r_channel = cv2.split(screen.window)
alpha_channel = np.ones(b_channel.shape, dtype=b_channel.dtype) * 255
# set alpha value to transparent of background is black
for d, dimension in enumerate(screen.window):
for p, pixel in enumerate(dimension):
if list(pixel) == [0, 0, 0]:
alpha_channel[d][p] = 0
screen.window = cv2.merge((b_channel, g_channel, r_channel, alpha_channel))
output = {
"original": {
"corners": patch.tolist(),
"image": original_image,
},
"warped": {
"corners": skew_coords.tolist(),
"image": image.window,
},
}
if output_path:
screen.save(output_path)
return output
screen.show()
return output
def batch_skew(
batch_dir,
output_dir,
compressed_path=None
):
import os
if not batch_dir:
raise FileNotFoundError(
"The directory `{}` does not exist".format(batch_dir)
)
images = os.listdir(batch_dir)
original_images = []
warped_images = []
warped_dataset = []
for img in images:
out = skew_image(
image_path="{}/{}".format(batch_dir, img),
output_path="{}/{}.png".format(output_dir, img.split(".")[0])
)
warped = out["warped"]["corners"]
warp_coords = []
for point in range(0, len(warped)):
warp_coords += [warped[point][0]] + [warped[point][1]]
warped_dataset += [warp_coords]
original_images += [out["original"]["image"]]
warped_images += [out["warped"]["image"]]
if compressed_path:
np.savez(
compressed_path,
original=original_images,
warped=warped_images,
offsets=warped_dataset
)
return {
"original": original_images,
"warped": warped_images,
"offsets": warped_dataset,
}
|
{"/gyakujinton/Shape/__init__.py": ["/gyakujinton/Shape/Shape.py"], "/gyakujinton/__init__.py": ["/gyakujinton/functions/draw.py", "/gyakujinton/functions/skew.py"], "/gyakujinton/cli.py": ["/gyakujinton/functions/draw.py", "/gyakujinton/functions/skew.py"], "/gyakujinton/functions/draw.py": ["/gyakujinton/Shape/__init__.py"]}
|
30,178
|
mamerisawesome/gyakujinton
|
refs/heads/master
|
/gyakujinton/cli.py
|
import argparse
import sys
def cli():
commands = {
"draw_on_image": shape_on_image,
"distort": skew_image,
"batch_distort": batch_skew,
}
options = {
"prog": "gyakujinton",
"usage": '%(prog)s [options]',
"description": "OpenCV wrapper to handle shapes and images."
}
for c in commands.keys():
if c in sys.argv:
parser = argparse.ArgumentParser(
add_help=False,
**options
)
commands[c](parser, sys.argv[2:])
return
parser = argparse.ArgumentParser(**options)
args = parser.parse_args()
print(args)
def shape_on_image(parent_parser, arguments):
from .functions.draw import draw_on_image
parser = argparse.ArgumentParser(
prog="draw_on_image",
parents=[parent_parser]
)
parser.add_argument(
"image_path",
help="file path of image to be drawn on"
)
parser.add_argument(
"-o",
"--output_path",
help="output path of image with the modifications"
)
parser.add_argument(
"-p",
"--points",
nargs="+",
action="append",
required=True,
help="x,y points on a 2D plane; e.g. 1,2 3,4, 5,6"
)
args = parser.parse_args(arguments)
draw_on_image(
image_path=args.image_path,
output_path=args.output_path,
points=[
[int(c) for c in point.split(",")] for point in args.points[-1]
]
)
return 0
def skew_image(parent_parser, arguments):
from .functions.skew import skew_image
parser = argparse.ArgumentParser(
prog="distort",
parents=[parent_parser]
)
parser.add_argument(
"image_path",
help="file path of image to be drawn on"
)
parser.add_argument(
"-o",
"--output_path",
help="output path of image with the modifications"
)
parser.add_argument(
"-p",
"--patch",
nargs="+",
action="append",
help=(
"area to focus on the image; x,y points\n"
"should be a four sided polygon\n"
"example: --patch 10,10 10,400 400,400 400,10"
)
)
args = parser.parse_args(arguments)
patch = None
if args.patch:
patch = [
[int(c) for c in point.split(",")] for point in args.patch[-1]
]
skew_image(
image_path=args.image_path,
output_path=args.output_path,
patch=patch
)
return 0
def batch_skew(parent_parser, arguments):
from .functions.skew import batch_skew
parser = argparse.ArgumentParser(
prog="batch_distort",
parents=[parent_parser]
)
parser.add_argument(
"batch_dir",
help="file path of images for batch procssesing"
)
parser.add_argument(
"-o",
"--output_dir",
required=True,
help="output directory of distorted images"
)
parser.add_argument(
"-c",
"--compressed_path",
help="path of the compressed file to be generated"
)
args = parser.parse_args(arguments)
batch_skew(
batch_dir=args.batch_dir,
output_dir=args.output_dir,
compressed_path=args.compressed_path
)
return 0
|
{"/gyakujinton/Shape/__init__.py": ["/gyakujinton/Shape/Shape.py"], "/gyakujinton/__init__.py": ["/gyakujinton/functions/draw.py", "/gyakujinton/functions/skew.py"], "/gyakujinton/cli.py": ["/gyakujinton/functions/draw.py", "/gyakujinton/functions/skew.py"], "/gyakujinton/functions/draw.py": ["/gyakujinton/Shape/__init__.py"]}
|
30,179
|
mamerisawesome/gyakujinton
|
refs/heads/master
|
/gyakujinton/functions/draw.py
|
from gyakujinton.Window import Window
from gyakujinton.Shape import Shape
def draw_on_image(image_path, points, output_path=None, color=(20, 100, 20)):
from pathlib import Path
if not Path(image_path).is_file():
raise FileNotFoundError(
"The path `{}` is not valid".format(image_path)
)
window = Window(image_path=image_path)
square = Shape(points)
window.register(square, rgb=color)
if output_path:
window.save(output_path)
return
return window.show()
def generate_superimposition():
window = Window(width=400, height=400)
square = Shape([
[0, 0],
[0, 100],
[100, 100],
[100, 0],
])
window.register(square, rgb=(20, 100, 20))
reflected_square = Shape([
[50, 50],
[50, 150],
[150, 150],
[150, 50],
])
window.register(reflected_square, rgb=(255, 100, 100))
return window.show()
|
{"/gyakujinton/Shape/__init__.py": ["/gyakujinton/Shape/Shape.py"], "/gyakujinton/__init__.py": ["/gyakujinton/functions/draw.py", "/gyakujinton/functions/skew.py"], "/gyakujinton/cli.py": ["/gyakujinton/functions/draw.py", "/gyakujinton/functions/skew.py"], "/gyakujinton/functions/draw.py": ["/gyakujinton/Shape/__init__.py"]}
|
30,184
|
Khayel/qPost
|
refs/heads/main
|
/qpost/views.py
|
from flask import render_template, url_for, request, session, redirect, jsonify
from flask.blueprints import Blueprint
from .db_funcs import *
from .decorators import login_required, teacher_required
from http import HTTPStatus
views = Blueprint('views', __name__)
@views.route('/')
@login_required
def index():
""" Default page showing logged in user's question."""
if session['is_teacher']:
return redirect(url_for('views.teacher'))
return render_template('index.html',
username=session['username'],
user_id=session['user_id'],
is_mine=True,
my_questions=get_questions(session['user_id']))
@ views.route('/login', methods=['POST', 'GET'])
def login():
"""Login page
Log in the user and add session variables. A POST request is action login.
"""
if request.method == 'GET':
return render_template('login.html')
else:
# check if form is sign_in or create_user
if request.form.get('sign_in'):
login_status = verify_login(request.form.get('username'),
request.form.get('password'))
if not login_status:
return render_template('login.html', status="no_user")
elif login_status[0] == "valid":
session['username'] = request.form.get('username')
session['user_id'] = login_status[1]
if login_status[2]:
session['is_teacher'] = True
else:
session['is_teacher'] = False
return redirect(url_for('views.index'))
elif login_status[0] == "invalid":
return render_template('login.html', status="invalid")
else:
new_user = create_user(request.form.get('username'),
request.form.get('password'))
session['username'] = request.form.get('username')
session['user_id'] = new_user[1]
return redirect(url_for('views.index'))
@views.route('/teacher')
@login_required
@teacher_required
def teacher():
return render_template(
'index.html',
username=session['username'],
user_id=session['user_id'],
is_mine=True,
is_teacher=True,
my_questions=get_questions())
@ views.route('/logout')
def logout():
"""Remove session variables and redirect to login page"""
session.pop('username', None)
session.pop('user_id', None)
return redirect(url_for('views.login'))
@ views.route('/question', methods=["GET"])
def question():
"""Display all questions and answers."""
return render_template(
'index.html',
username=session['username'],
user_id=session['user_id'],
is_mine=False,
my_questions=get_questions())
@ views.route('/answer/<action>', methods=["POST"])
@ login_required
def answer(action):
"""POST operations for an answer.
add, delete, select, and deselect an answer
"""
if action == "add":
answer_question(request.form.get('q_id'), request.form.get(
'answer_input'), session['user_id'])
elif action == "delete":
delete_answer(request.form.get('a_id'))
return redirect(request.referrer)
elif action == "selected":
mark_answer(request.form.get('a_id'), 1)
elif action == "unselected":
mark_answer(request.form.get('a_id'), 0)
else:
return HTTPStatus.BAD_REQUEST
return redirect(request.referrer)
@ views.route('/question/<action>', methods=['POST'])
@ login_required
def question_action(action):
"""POST operations for a question.
create and delete questions.
"""
if action == "delete":
delete_question(request.form.get('q_id'))
elif action == "new":
new_question(request.form.get('question'), session['user_id'])
else:
return HTTPStatus.BAD_REQUEST
return redirect(request.referrer)
|
{"/qpost/views.py": ["/qpost/db_funcs.py", "/qpost/decorators.py"], "/qpost/db_funcs.py": ["/qpost/questions.py"], "/qpost/app.py": ["/qpost/login.py"], "/qpost/__init__.py": ["/qpost/views.py"], "/qpost/login.py": ["/qpost/questions.py"], "/qpost/api.py": ["/qpost/login.py"]}
|
30,185
|
Khayel/qPost
|
refs/heads/main
|
/qpost/questions.py
|
class Question(dict):
"""
Extended dict for questions
{ 'q_id': '1',
'user_id': '2',
'question': 'What is a number between 1 and 3?',
'answer': [(answer, a_id, is_answer)]
answer - answer string
a_id - answer unique id
is_answer - true if the answer is selected as an answer for a question false otherwise
}
"""
def __init__(self, q, answers=[], q_id=-1, user_id=-1):
self['id'] = q_id
self['question'] = q
self['answers'] = answers
self['user_id'] = user_id
|
{"/qpost/views.py": ["/qpost/db_funcs.py", "/qpost/decorators.py"], "/qpost/db_funcs.py": ["/qpost/questions.py"], "/qpost/app.py": ["/qpost/login.py"], "/qpost/__init__.py": ["/qpost/views.py"], "/qpost/login.py": ["/qpost/questions.py"], "/qpost/api.py": ["/qpost/login.py"]}
|
30,186
|
Khayel/qPost
|
refs/heads/main
|
/qpost/db_funcs.py
|
from config import CONNECTION_CONFIG
import mysql.connector
from mysql.connector import errorcode
from .questions import Question
import hashlib
import copy
import os
def select_query(query_string, *q_vars):
"""Helper function for SELECT statements.
Takes query string and tuple q_vars for values in query
"""
try:
cnx = mysql.connector.connect(**CONNECTION_CONFIG)
except mysql.connector.Error as err:
if err.errno == errorcode.ER_ACCESS_DENIED_ERROR:
print("Something is wrong with your user name or password")
elif err.errno == errorcode.ER_BAD_DB_ERROR:
print("Database does not exist")
else:
print(err)
else:
cursor = cnx.cursor()
cursor.execute(query_string, q_vars)
results = cursor.fetchall()
cnx.close()
return results
def modify_query(query_string, *q_vars):
"""Helper function for INSERT, UPDATE or DELETE queries.
Handles connection and commits queries.
"""
try:
cnx = mysql.connector.connect(**CONNECTION_CONFIG)
except mysql.connector.Error as err:
if err.errno == errorcode.ER_ACCESS_DENIED_ERROR:
print("Something is wrong with connection username and password")
elif err.errno == errorcode.ER_BAD_DB_ERROR:
print("Database does not exist")
else:
print(err)
else:
cursor = cnx.cursor()
cursor.execute(query_string, q_vars)
cnx.commit()
cnx.close()
return True
def create_hash(password, salt=None):
"""Hashing function for passwords.
If salt is not provided, generate a new salt and create a new hash.(creating users)
Otherwise, generate the hash with the given salt and password.(verifying users)
returns {'hash': hash in hex, 'salt': salt in hex}
"""
if not salt:
salt = os.urandom(8)
else:
salt = bytes.fromhex(salt)
return {'hash': hashlib.pbkdf2_hmac(
'sha256', # The hash digest algorithm for HMAC
password.encode('utf-8'), # Convert the password to bytes
salt, # Provide the salt
100000, # It is recommended to use at least 100,000 iterations of SHA-256
dklen=128 # Get a 128 byte key
).hex(), 'salt': salt.hex()}
def verify_login(user, password):
"""Verification for logging in a user.
Get password hash and salt from database. Hash the given password with the salt and compare with the password has from the database.
On invalid logins, return None for no user or ['invalid',None] for wrong password or user
On valid login, return list ['valid', userID ]
"""
q = "SELECT password_hash, salt, UserID ,is_teacher FROM Users WHERE Users.username = (%s) "
query_result = select_query(q, (user))
print(query_result)
password_hash, salt, UserID, is_teacher = query_result[0]
new_hash = create_hash(password, salt)
if new_hash['hash'] == password_hash:
return ['valid', UserID, is_teacher]
else:
return ['invalid', None]
def create_user(username, password):
"""Insert username and hashed password into database."""
result = create_hash(password)
modify_query('INSERT INTO Users (username,password_hash,salt) VALUES ((%s), (%s), (%s))',
username, result['hash'], result['salt'])
return verify_login(username, password)
def get_questions(user_id=None):
"""" Get all questions for given user.
If no user is provided,get all questions.
Get questions and create a dictionary object {'q_id': Question(question,q_id,user_id))}
Join answer table with question on the question ID to identify which answers are for the questions the user asked then get all the answers.
Using the q_id add the answers to the Question object. then return the list of Question objects
"""
if user_id:
questions = select_query(
"SELECT q_id,question,user_id FROM question WHERE question.user_id = (%s) ORDER BY create_time DESC ", user_id)
answers = select_query(
"SELECT answer.q_id, answer.answer, answer.a_id,answer.is_answer FROM answer Left JOIN question on answer.q_id=question.q_id WHERE question.user_id =(%s)", user_id)
else:
questions = select_query(
"SELECT q_id,question, user_id FROM question")
answers = select_query(
"SELECT answer.q_id, answer.answer, answer.a_id, answer.is_answer FROM answer Left JOIN question on answer.q_id=question.q_id")
questions = {q_id: copy.deepcopy(
Question(question, q_id=q_id, user_id=user_id)) for q_id, question, user_id in questions}
for q_id, answer, a_id, is_answer in answers:
questions[q_id]['answers'].append((answer, a_id, is_answer))
return questions.values()
def new_question(question, userID):
"""Insert new question"""
query_string = "INSERT INTO Question(question,user_id) VALUES (%s,%s)"
modify_query(query_string, question, userID)
return True
def answer_question(q_id, answer, u_id):
"""Add an answer for a question."""
query_string = "INSERT INTO Answer(q_id, answer,user_id) VALUES (%s, %s, %s)"
modify_query(query_string, q_id, answer, u_id)
return True
def delete_question(q_id):
"""Delete a question."""
query_string = "DELETE FROM question WHERE q_id = (%s)"
modify_query(query_string, q_id)
def delete_answer(a_id):
"""Delete an answer"""
query_string = "DELETE FROM answer WHERE a_id = (%s)"
print("DSADSADASDASE", a_id)
modify_query(query_string, a_id)
def mark_answer(a_id, val):
"""Mark an answer as a selected answer."""
query_string = "UPDATE answer SET is_answer = (%s)WHERE a_id = (%s)"
modify_query(query_string, val, a_id)
|
{"/qpost/views.py": ["/qpost/db_funcs.py", "/qpost/decorators.py"], "/qpost/db_funcs.py": ["/qpost/questions.py"], "/qpost/app.py": ["/qpost/login.py"], "/qpost/__init__.py": ["/qpost/views.py"], "/qpost/login.py": ["/qpost/questions.py"], "/qpost/api.py": ["/qpost/login.py"]}
|
30,187
|
Khayel/qPost
|
refs/heads/main
|
/qpost/app.py
|
from flask import Flask, render_template, url_for, request, session
from flask_restful import Resource, Api
from .login import verify_login, create_user, select_query
class loginAction(Resource):
def post(self):
status = verify_login(request.form.get('username'),
request.form.get('password'))
if status == 'valid':
print("Login successful")
return {
"status": "success",
}
# redirect with flask login
elif status == 'invalid':
print("wrong password")
# prompt wrong password
else:
print("USER doesnt exist")
# prompt to register
class userAction(Resource):
def get(self):
return select_query(f"SELECT * FROM User WHERE User.username='{request.args.get('username')}'")
def post(self):
status = create_user(request.form.get('username'),
request.form.get('password'))
if status == 'succes':
return {
"status": "success"
}
else:
return {"status": "error"}
app = Flask(__name__)
api = Api(app)
api.add_resource(loginAction, '/api/login')
api.add_resource(userAction, '/api/user')
@app.route('/')
def index():
if session['username']:
return render_template('index.html',)
return render_template('index.html')
@app.route('/login', methods=['POST', 'GET'])
def login():
if request.method == 'GET':
return render_template('login.html')
else:
print(request.form)
login_status = verify_login(request.form.get('username'),
request.form.get('password'))
if login_status:
print("Login successful")
# redirect with flask login
else:
print("PLEASE TRY AGAIN")
print("wrong password")
# prompt wrong password
return"DSAD"
|
{"/qpost/views.py": ["/qpost/db_funcs.py", "/qpost/decorators.py"], "/qpost/db_funcs.py": ["/qpost/questions.py"], "/qpost/app.py": ["/qpost/login.py"], "/qpost/__init__.py": ["/qpost/views.py"], "/qpost/login.py": ["/qpost/questions.py"], "/qpost/api.py": ["/qpost/login.py"]}
|
30,188
|
Khayel/qPost
|
refs/heads/main
|
/qpost/__init__.py
|
from flask import Flask
from .views import views
def create_app():
"""
Create Flask object and register views
.views contains endpoints
.api contains api calls used
"""
app = Flask(__name__)
app.config.from_object('config')
app.register_blueprint(views)
return app
|
{"/qpost/views.py": ["/qpost/db_funcs.py", "/qpost/decorators.py"], "/qpost/db_funcs.py": ["/qpost/questions.py"], "/qpost/app.py": ["/qpost/login.py"], "/qpost/__init__.py": ["/qpost/views.py"], "/qpost/login.py": ["/qpost/questions.py"], "/qpost/api.py": ["/qpost/login.py"]}
|
30,189
|
Khayel/qPost
|
refs/heads/main
|
/qpost/decorators.py
|
from functools import wraps
from flask import redirect, session, url_for
def login_required(f):
"""
Decorator function to verify login session of a user
"""
@wraps(f)
def decorated_function(*args, **kwargs):
print("LOGIN REQUIRED CHECK")
if 'username' not in session or 'user_id' not in session:
print(session)
return redirect(url_for('views.login'))
return(f(*args, **kwargs))
return decorated_function
def teacher_required(f):
"""
Decorator function to verify user is a teacher
"""
@wraps(f)
def decorated_function(*args, **kwargs):
print("TEACHER REQUIRED CHECK")
if 'is_teacher' in session and session['is_teacher'] == False:
print(session)
return redirect(url_for('views.index'))
return(f(*args, **kwargs))
return decorated_function
|
{"/qpost/views.py": ["/qpost/db_funcs.py", "/qpost/decorators.py"], "/qpost/db_funcs.py": ["/qpost/questions.py"], "/qpost/app.py": ["/qpost/login.py"], "/qpost/__init__.py": ["/qpost/views.py"], "/qpost/login.py": ["/qpost/questions.py"], "/qpost/api.py": ["/qpost/login.py"]}
|
30,196
|
ollyjc99/Cards
|
refs/heads/master
|
/bus.py
|
import pygame
import random
def setup(win, bus_len):
w, h = win.get_size()
card_width = round(w*.098)
card_height = round(h*.167)
x_spacing = round(w-(card_width * bus_len))
x_bord = 242
x_btwn = 10
y = round((h // 2) - card_height / 2)
grid = [[x, y] for x in range(x_bord, w-x_bord, card_width+x_btwn)]
return card_width, card_height, grid
def flip(card):
card.flipped = not card.flipped
card.image = card.flip()
def bus(win_width, win_height, deck, bus_len, clock):
win = pygame.display.set_mode((win_width, win_height))
pygame.display.set_caption('The Bus')
background_colour = (75, 125, 75)
card_width, card_height, grid = setup(win, bus_len)
hand = pygame.sprite.Group()
p_deck = iter(deck.cards)
for row in grid:
print(row)
card = next(p_deck)
card.rect.topleft = row
hand.add(card)
running = True
while running:
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
if event.type == pygame.MOUSEBUTTONDOWN:
for card in deck.cards:
if card.rect.collidepoint(event.pos):
flip(card)
win.fill((75, 125, 75))
hand.draw(win)
pygame.display.update()
clock.tick(60)
|
{"/main.py": ["/pairs.py", "/bus.py", "/sandbox.py", "/pattern_gen.py", "/cards.py"], "/sandbox.py": ["/cards.py", "/misc.py"], "/pairs.py": ["/misc.py"]}
|
30,197
|
ollyjc99/Cards
|
refs/heads/master
|
/main.py
|
from pairs import *
from bus import bus
from sandbox import sandbox
import pattern_gen
from cards import *
def setup(win):
suits = ['spades', 'clubs', 'diamonds', 'hearts']
faces = ['A', '2', '3', '4', '5', '6', '7', '8', '9', '10', 'J', 'Q', 'K']
list_of_cards = [[face, suit] for face in faces for suit in suits]
temp_deck = [Card(suit, face) for face, suit in random.sample(list_of_cards, 52)]
return Deck(win, temp_deck)
def main():
# Generates Cards
# pattern_gen.main()
clock = pygame.time.Clock()
win_width = 1024
win_height = 768
win = pygame.display.set_mode((win_width, win_height))
icon = pygame.image.load('static/img/template/diamonds.png')
icon.set_colorkey((255, 255, 255))
pygame.display.set_icon(icon)
deck = setup(win)
pairs(1280, 720, deck, clock)
# bus(1024, 768, deck, 5, clock)
# sandbox(1024, 768, deck, clock)
if __name__ == '__main__':
pygame.init()
main()
|
{"/main.py": ["/pairs.py", "/bus.py", "/sandbox.py", "/pattern_gen.py", "/cards.py"], "/sandbox.py": ["/cards.py", "/misc.py"], "/pairs.py": ["/misc.py"]}
|
30,198
|
ollyjc99/Cards
|
refs/heads/master
|
/sandbox.py
|
import pygame
import random
import time
from cards import *
from misc import *
def setup(win, deck):
w, h = win.get_size()
card_width = round(w*.098)
card_height = round(h*.167)
for card in deck.cards:
card.width = card_width
card.height = card_height
def sandbox(w, h, deck, clock):
win = pygame.display.set_mode((w, h))
pygame.display.set_caption('The @')
win.fill((75, 125, 75))
setup(win, deck)
hand = pygame.sprite.Group()
table = pygame.sprite.Group()
running = True
selected_card = None
try:
while running:
clock.tick(60)
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
if not selected_card:
if event.type == pygame.MOUSEBUTTONDOWN:
if event.button == 1:
if hand:
for card in hand:
if card.rect.collidepoint(event.pos):
selected_card = card
break
if deck.cards:
if deck.rect.collidepoint(event.pos):
selected_card = deck.cards[0]
deck.cards.pop(0)
hand.add(selected_card)
selected_card.rect.center = pygame.mouse.get_pos()
elif event.button == 2:
pass
elif event.button == 3:
if hand:
for card in hand:
if card.rect.collidepoint(event.pos):
flip(card)
break
elif event.button == 4:
if hand:
for card in hand:
deck.cards.append(card)
hand.remove(0)
else:
selected_card.rect.center = pygame.mouse.get_pos()
if event.type == pygame.MOUSEBUTTONUP:
if selected_card.rect.colliderect(deck):
deck.cards.append(selected_card)
selected_card.kill()
selected_card = None
win.fill((75, 125, 75))
hand.update()
deck.draw((round(w * .88), round(h * .80)))
hand.draw(win)
pygame.display.update()
except KeyboardInterrupt:
print(KeyboardInterrupt)
except pygame.error:
print(pygame.error)
|
{"/main.py": ["/pairs.py", "/bus.py", "/sandbox.py", "/pattern_gen.py", "/cards.py"], "/sandbox.py": ["/cards.py", "/misc.py"], "/pairs.py": ["/misc.py"]}
|
30,199
|
ollyjc99/Cards
|
refs/heads/master
|
/pattern_gen.py
|
import os
import pygame
import random
from PIL import Image, ImageDraw, ImageFont, ImageEnhance
def main():
faces = ['A','2','3','4','5','6','7','8','9','10','J','Q','K']
suits = ['hearts','diamonds','spades','clubs']
for suit in suits:
for face in faces:
template = Image.open('static/img/template/card_base.png')
w, h = template.size
png_info = template.info
icon = Image.open(f'static/img/template/{suit}.png')
small_icon = icon.resize((9, 9))
template.paste(small_icon, (8,30))
fnt = ImageFont.truetype("static/font/Arial.ttf", 20)
d = ImageDraw.Draw(template)
if suit == 'hearts' or suit == 'diamonds':
colour = (255,0,0)
else:
colour = (0,0,0)
if face == 'A':
d.text((6,8), face, font=fnt, fill=colour)
elif face == '10':
d.text((2,8), '1', font=fnt, fill=colour)
d.text((11, 8), '0', font=fnt, fill=colour)
elif face == 'Q':
d.text((5,8), face, font=fnt, fill=colour)
elif face == 'K':
d.text((6,8), face, font=fnt, fill=colour)
else:
d.text((7,8), face, font=fnt, fill=colour)
region = template.crop((0,0, w, h/2))
region = region.transpose(Image.ROTATE_180)
template.paste(region, (0,64))
gen_pattern(template, icon, face, d, colour)
template.save(f'static/img/{suit}/{face}.png', **png_info)
def gen_pattern(card, icon, face, d, colour):
x = int(card.width/2)
y = int(card.height/2)
fnt = ImageFont.truetype("static/font/Backslash-RpJol.otf", 42)
if face == 'A':
enhancer = ImageEnhance.Sharpness(icon)
enhancer.enhance(2.0)
mid_point = (int(x-icon.width/2), int(y-icon.height/2))
card.paste(icon, mid_point)
elif face == 'J':
d.text((x-5,y-20), 'J', font=fnt, fill=colour)
elif face == 'Q':
d.text((x-15, y - 20), 'Q', font=fnt, fill=colour)
elif face == 'K':
d.text((x-13, y - 20), 'K', font=fnt, fill=colour)
else:
icon = icon.resize((15, 15))
x = int(card.width / 2 - icon.width / 2)
y = int(card.height / 2 - icon.width / 2)
points = get_points(face, (x, y))
if points:
for point in points:
card.paste(icon, point)
def get_points(face, mid_point):
points = []
x, y = mid_point
if face == '2':
points = [
(x, int(y/2)),
(x, int(y*1.5))
]
elif face == '3':
points = [
(x, int(y / 2)),
(x, y),
(x, int(y * 1.5))
]
elif face == '4':
points = [
(int(x*.5), int(y / 2)),
(int(x*1.5), int(y / 2)),
(int(x*.5), int(y * 1.5)),
(int(x*1.5), int(y * 1.5))
]
elif face == '5':
points = [
(int(x * .5), int(y / 2)),
(int(x * 1.5), int(y / 2)),
(x, y),
(int(x * .5), int(y * 1.5)),
(int(x * 1.5), int(y * 1.5))
]
elif face == '6':
points = [
(int(x * .5), int(y / 2)),
(int(x * 1.5), int(y / 2)),
(int(x * .5), y),
(int(x * 1.5), y),
(int(x * .5), int(y * 1.5)),
(int(x * 1.5), int(y * 1.5))
]
elif face == '7':
points = [
(int(x * .5), int(y / 2)),
(int(x * 1.5), int(y / 2)),
(x, int(y*.75)),
(int(x * .5), y),
(int(x * 1.5), y),
(int(x * .5), int(y * 1.5)),
(int(x * 1.5), int(y * 1.5))
]
elif face == '8':
points = [
(int(x * .5), int(y / 2)),
(int(x * 1.5), int(y / 2)),
(x, int(y * .75)),
(int(x * .5), y),
(int(x * 1.5), y),
(x, int(y * 1.25)),
(int(x * .5), int(y * 1.5)),
(int(x * 1.5), int(y * 1.5))
]
elif face == '9':
points = [
(int(x * .5), int(y / 2.3)),
(int(x * 1.5), int(y / 2.3)),
(x, int(y / 1.5)),
(int(x * .5), int(y / 1.15)),
(int(x * 1.5), int(y / 1.15)),
(int(x * .5), int(y * 1.3)),
(int(x * 1.5), int(y * 1.3)),
(int(x * .5), int(y * 1.7)),
(int(x * 1.5), int(y * 1.7))
]
elif face == '10':
points = [
(int(x * .5), int(y / 2.3)),
(int(x * 1.5), int(y / 2.3)),
(x, int(y / 1.5)),
(x, int(y * 1.5)),
(int(x * .5), int(y / 1.15)),
(int(x * 1.5), int(y / 1.15)),
(int(x * .5), int(y * 1.3)),
(int(x * 1.5), int(y * 1.3)),
(int(x * .5), int(y * 1.7)),
(int(x * 1.5), int(y * 1.7))
]
return points
if __name__ == '__main__':
main()
|
{"/main.py": ["/pairs.py", "/bus.py", "/sandbox.py", "/pattern_gen.py", "/cards.py"], "/sandbox.py": ["/cards.py", "/misc.py"], "/pairs.py": ["/misc.py"]}
|
30,200
|
ollyjc99/Cards
|
refs/heads/master
|
/pairs.py
|
import pygame
import random
from misc import *
def setup(win, deck):
width, height = win.get_size()
card_width = round(width*.07)
card_height = round(height*.15)
x_spacing = width-(card_width*13)
x_bord = round((x_spacing*.33) / 2)
x_btwn = round((x_spacing*.66) / 13)
y_spacing = height-(card_height*4)
y_bord = round((y_spacing*.90) / 2)
y_btwn = round((y_spacing*.10) / 2)
return [[[x, y] for x in range(x_bord, width-x_bord, card_width+x_btwn)] for y in range(round(y_bord*1.75), height-round(y_bord*.25), card_height+y_btwn)]
def print_score(win, win_width, win_height, font, count):
pair_count = font.render(str(count), True, (255, 255, 255), (75,125,75))
count_rect = pair_count.get_rect()
count_rect.center = (round(win_width * .96), round(win_height * .05))
win.blit(pair_count, count_rect)
def pairs(win_width, win_height, deck, clock):
win = pygame.display.set_mode((win_width, win_height))
pygame.display.set_caption('Pairs')
background_colour = (75,125,75)
win.fill(background_colour)
font = pygame.font.Font('static/font/Arial.ttf', 32)
count = 0
title = font.render('Pairs', True, (255,255,255))
score = font.render('Score', True, (255,255,255))
title_rect = title.get_rect()
title_rect.center = (round(win_width // 2), round(win_height * .20))
score_rect = score.get_rect()
score_rect.center = (round(win_width * .90), round(win_height * .05))
win.blit(title, title_rect)
win.blit(score, score_rect)
print_score(win, win_width, win_height, font, count)
grid = setup(win, deck)
p_deck = iter(deck.cards)
cards = pygame.sprite.Group()
cards.add(deck.cards)
first, second = None, None
for row in grid:
for col in row:
card = next(p_deck)
card.rect.x, card.rect.y = col
cards.add(deck.cards)
running = True
while running:
if second:
if first.face == second.face:
count += 1
print_score(win, win_width, win_height, font, count)
first, second = None, None
else:
pygame.time.wait(1000)
flip(first)
flip(second)
first, second = None, None
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
if event.type == pygame.MOUSEBUTTONDOWN:
for card in deck.cards:
if card.rect.collidepoint(event.pos):
if not card.flipped:
if not first:
first = card
flip(card)
elif first and not second:
flip(card)
second = card
else:
pass
cards.update()
cards.draw(win)
pygame.display.update()
clock.tick(60)
|
{"/main.py": ["/pairs.py", "/bus.py", "/sandbox.py", "/pattern_gen.py", "/cards.py"], "/sandbox.py": ["/cards.py", "/misc.py"], "/pairs.py": ["/misc.py"]}
|
30,201
|
ollyjc99/Cards
|
refs/heads/master
|
/cards.py
|
import pygame
import time
from threading import Thread
class Deck(object):
def __init__(self, win, cards):
self.win = win
self.x = 0
self.y = 0
self.cards = cards
self.image = pygame.image.load('static/img/template/deck.png')
self.rect = self.image.get_rect(x=self.x, y=self.y)
def __str__(self):
return f'Deck of {len(self.cards)} cards'
def draw(self, pos=None):
if pos:
self.x, self.y = pos
self.rect = self.image.get_rect(x=self.x, y=self.y)
self.win.blit(self.image, self.rect)
class Card(pygame.sprite.Sprite):
def __init__(self, suit, face):
pygame.sprite.Sprite.__init__(self)
self.flipped = False
self.suit = suit
self.face = face
self.image = self.flip()
self.rect = self.image.get_rect()
def __str__(self):
return f'{self.face} of {self.suit}'
def update(self):
pass
def flip(self):
if self.flipped:
return pygame.image.load(f'static/img/{self.suit}/{self.face}.png')
else:
return pygame.image.load('static/img/template/card_back.png')
class DragCard(Thread):
def __init__(self, card):
Thread.__init__(self)
self.daemon = True
self.card = card
self.running = True
self.start()
def run(self):
while self.running:
self.card.center = pygame.mouse.get_pos()
|
{"/main.py": ["/pairs.py", "/bus.py", "/sandbox.py", "/pattern_gen.py", "/cards.py"], "/sandbox.py": ["/cards.py", "/misc.py"], "/pairs.py": ["/misc.py"]}
|
30,202
|
ollyjc99/Cards
|
refs/heads/master
|
/test.py
|
from threading import Thread
class Test(Thread):
def __init__(self, no):
Thread.__init__(self)
self.daemon = True
self.no = no
self.start()
def run(self):
while True:
self.no += 1
print(self.no)
def main():
number = 10
test = Test(number)
while True:
print(number)
if __name__ == '__main__':
main()
|
{"/main.py": ["/pairs.py", "/bus.py", "/sandbox.py", "/pattern_gen.py", "/cards.py"], "/sandbox.py": ["/cards.py", "/misc.py"], "/pairs.py": ["/misc.py"]}
|
30,203
|
ollyjc99/Cards
|
refs/heads/master
|
/misc.py
|
def card_check(card, pos):
if card.image.get_rect(x=card.rect.x, y=card.rect.y).collidepoint(pos):
return True
else:
return False
def flip(card):
card.flipped = not card.flipped
card.image = card.flip()
|
{"/main.py": ["/pairs.py", "/bus.py", "/sandbox.py", "/pattern_gen.py", "/cards.py"], "/sandbox.py": ["/cards.py", "/misc.py"], "/pairs.py": ["/misc.py"]}
|
30,206
|
zhaohuilong0808/apiTestIHRM
|
refs/heads/master
|
/weidaima.py
|
# 员工管理模块登录
import requests
# 发送登录接口
response = requests.post(url="http://ihrm-test.itheima.net/api/sys/login",
json={"mobile": "13800000002", "password": "123456"},
headers={"Content-Type": "application/json"})
# 查看登录结果
print("登录结果为: ", response.json())
|
{"/scrpt/test_ihrm_employee.py": ["/app.py", "/utils.py", "/api/employee_api.py"], "/api/__init__.py": ["/utils.py"], "/scrpt/test_ihrm_login.py": ["/utils.py"], "/run_suite.py": ["/app.py"], "/utils.py": ["/app.py"]}
|
30,207
|
zhaohuilong0808/apiTestIHRM
|
refs/heads/master
|
/scrpt/test_ihrm_employee.py
|
# 导包
import unittest, logging, app
from utils import assert_common
from api.ihrm_login_api import LoginApi
from api.employee_api import EmployeeApi
# 创建测试类
class TestIHRMEmployee(unittest.TestCase):
def setUp(self) -> None:
# 实例化登录
self.login_api = LoginApi()
# 实例化员工
self.emp_api = EmployeeApi()
def tearDown(self) -> None:
pass
# 实现登录成功的接口
def test01_login_success(self):
"""
登录成功接口
:return:
"""
# 发送登录的接口请求
jsonData = {"mobile": "13800000002", "password": "123456"}
response = self.login_api.login(jsonData,
{"Content-Type": "application/json"})
# 打印登录接口返回的结果
logging.info("登录接口返回的结果为:{}".format(response.json()))
# 提取登录返回的令牌
token = 'Bearer ' + response.json().get('data')
# 把令牌拼接成HEADERS并保存到全局变量HEADERS
app.HEADERS = {"Content-Type": "application/json", "Authorization": token}
# 打印请求头
logging.info("保存到全局变量中的请求头为:{}".format(app.HEADERS))
def test02_add_emp(self):
"""
添加员工
:return:
"""
# 发送添加员工登录接口请求
response = self.emp_api.add_emp("貂蝉", "13999915111", app.HEADERS)
# 打印添加员工的结果
logging.info("添加员工的结果为: {}".format(response.json()))
# 提取员工返回的令牌,并保存到全局变量中
app.EMP_ID = response.json().get("data").get("id")
# 打印保存员工ID
logging.info("保存到全局变量的员工ID为: {}".format(app.EMP_ID))
# 断言
assert_common(self, 200, True, 10000, '操作成功', response)
def test03_query_emp(self):
"""
查询员工
:return:
"""
# 发送查询员工登录接口请求
response = self.emp_api.query_emp(app.EMP_ID, app.HEADERS)
# 打印查询员工的结果
logging.info("查询员工的结果为: {}".format(response.json()))
# 断言
assert_common(self, 200, True, 10000, '操作成功', response)
def test04_amend_emp(self):
"""
修改员工
:return:
"""
# 发送修改员工登录接口请求
response = self.emp_api.amend_emp(app.EMP_ID, {"username": "孙尚香"}, app.HEADERS)
# 打印修改员工的结果
logging.info("修改员工的结果为: {}".format(response.json()))
# 断言
assert_common(self, 200, True, 10000, '操作成功', response)
def test05_delete_emp(self):
"""
删除员工
:return:
"""
# 发送删除员工登录接口请求
response = self.emp_api.delete_emp(app.EMP_ID, app.HEADERS)
# 打印删除员工的结果
logging.info("删除员工的结果为: {}".format(response.json()))
# 断言
assert_common(self, 200, True, 10000, '操作成功', response)
|
{"/scrpt/test_ihrm_employee.py": ["/app.py", "/utils.py", "/api/employee_api.py"], "/api/__init__.py": ["/utils.py"], "/scrpt/test_ihrm_login.py": ["/utils.py"], "/run_suite.py": ["/app.py"], "/utils.py": ["/app.py"]}
|
30,208
|
zhaohuilong0808/apiTestIHRM
|
refs/heads/master
|
/api/__init__.py
|
# 初始化日志
import logging
import utils
utils.init_loging()
logging.info("打印日志")
|
{"/scrpt/test_ihrm_employee.py": ["/app.py", "/utils.py", "/api/employee_api.py"], "/api/__init__.py": ["/utils.py"], "/scrpt/test_ihrm_login.py": ["/utils.py"], "/run_suite.py": ["/app.py"], "/utils.py": ["/app.py"]}
|
30,209
|
zhaohuilong0808/apiTestIHRM
|
refs/heads/master
|
/scrpt/test_ihrm_login.py
|
# 导包
import unittest, logging
from api.ihrm_login_api import LoginApi
from utils import assert_common
# 创建类
class TestLHRMLogin(unittest.TestCase):
# 进行初始化
def setUp(self) -> None:
self.login_api = LoginApi()
def tearDown(self) -> None:
pass
# 编写函数
# 1.登录成功
def test01_login_success(self):
response = self.login_api.login({"mobile": "13800000002", "password": "123456"},
{"Content-Type": "application/json"})
# 打印响应数据
logging.info("登录成功的结果为: {}".format(response.json()))
# 断言
assert_common(self, 200, True, 10000, '操作成功', response)
# 2.手机号为空
def test02_mobile_is_empty(self):
response = self.login_api.login({"mobile": "", "password": "error"},
{"Content-Type": "application/json"})
# 打印响应数据
logging.info("手机号码为空的结果为: {}".format(response.json()))
# 断言
assert_common(self, 200, False, 20001, '用户名或密码错误', response)
# 3.手机号码不存在
def test03_mobile_is_not_empty(self):
response = self.login_api.login({"mobile": "13888882222", "password": "123456"},
{"Content-Type": "application/json"})
# 打印响应数据
logging.info("手机号码不存在的结果为: {}".format(response.json()))
# 断言
assert_common(self, 200, False, 20001, '用户名或密码错误', response)
# 4.密码错误
def test04_password_is_empty(self):
response = self.login_api.login({"mobile": "13800000002", "password": "error"},
{"Content-Type": "application/json"})
# 打印响应数据
logging.info("密码错误的结果为: {}".format(response.json()))
# 断言
assert_common(self, 200, False, 20001, '用户名或密码错误', response)
# 5.无参
def test05_No_arguments(self):
response = self.login_api.login({}, {"Content-Type": "application/json"})
# 打印响应数据
logging.info("无参的结果为: {}".format(response.json()))
# 断言
assert_common(self, 200, False, 20001, '用户名或密码错误', response)
# 6.传入null
def test06_introduction_null(self):
response = self.login_api.login(None, {"Content-Type": "application/json"})
# 打印响应数据
logging.info("传入null的结果为: {}".format(response.json()))
# 断言
assert_common(self, 200, False, 99999, '抱歉,系统繁忙,请稍后重试!', response)
# 7.多参
def test07_more_params(self):
response = self.login_api.login({"mobile": "13800000002", "password": "123456", "extras_params": "1"},
{"Content-Type": "application/json"})
# 打印响应数据
logging.info("多参的结果为: {}".format(response.json()))
# 断言
assert_common(self, 200, True, 10000, '操作成功', response)
# 8.少参-缺少mobile
def test08_less_params_mobile(self):
response = self.login_api.login({"password": "123456"},
{"Content-Type": "application/json"})
# 打印响应数据
logging.info("少参-缺少mobile的结果为: {}".format(response.json()))
# 断言
assert_common(self, 200, False, 20001, '用户名或密码错误', response)
# 9.少参-缺少password
def test09_less_params_password(self):
response = self.login_api.login({"mobile": "13800000002"},
{"Content-Type": "application/json"})
# 打印响应数据
logging.info("少参-缺少password的结果为: {}".format(response.json()))
# 断言
assert_common(self, 200, False, 20001, '用户名或密码错误', response)
# 10.密码为空
def test10_password_is_null(self):
response = self.login_api.login({"mobile": "13888882222", "password": ""},
{"Content-Type": "application/json"})
# 打印响应数据
logging.info("密码为空的结果为: {}".format(response.json()))
# 断言
assert_common(self, 200, False, 20001, '用户名或密码错误', response)
# 9.错误参数
def test11_errpr_password(self):
response = self.login_api.login({"mboile": "13800000002", "password": "123456"},
{"Content-Type": "application/json"})
# 打印响应数据
logging.info("错误参数的结果为: {}".format(response.json()))
# 断言
assert_common(self, 200, False, 20001, '用户名或密码错误', response)
|
{"/scrpt/test_ihrm_employee.py": ["/app.py", "/utils.py", "/api/employee_api.py"], "/api/__init__.py": ["/utils.py"], "/scrpt/test_ihrm_login.py": ["/utils.py"], "/run_suite.py": ["/app.py"], "/utils.py": ["/app.py"]}
|
30,210
|
zhaohuilong0808/apiTestIHRM
|
refs/heads/master
|
/app.py
|
# 导入os模块
import os
# 定义全局变量BASE_DIR,通过BASR_DIR定位到项目根目录
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
# 定义请求头
HEADERS = None
# 定义员ID
EMP_ID = None
|
{"/scrpt/test_ihrm_employee.py": ["/app.py", "/utils.py", "/api/employee_api.py"], "/api/__init__.py": ["/utils.py"], "/scrpt/test_ihrm_login.py": ["/utils.py"], "/run_suite.py": ["/app.py"], "/utils.py": ["/app.py"]}
|
30,211
|
zhaohuilong0808/apiTestIHRM
|
refs/heads/master
|
/weidaima_emp.py
|
# 导包
import requests
# 发送登录请求
response = requests.post(url="http://ihrm-test.itheima.net/api/sys/login",
json={"mobile": "13800000002", "password": "123456"},
headers={"Content-Type": "application/json"})
# 打印登录结果
print("登录结果为: ", response.json())
# 提取登录返回的令牌
token = "Bearer " + response.json().get("data")
print("打印令牌为: ", token)
# 令牌放到全局变量中 app.py
# 发送添加员工接口
headers = {"Content-Type": "application/json", "Authorization": token}
response = requests.post(url="http://ihrm-test.itheima.net/api/sys/user",
json={
"username": "娜可露露555",
"mobile": "15922227777",
"timeOfEntry": "2020-05-05",
"formOfEmployment": 1,
"departmentName": "测试部",
"departmentId": "1063678149528784896",
"correctionTime": "2020-05-30T16:00:00.000Z"},
headers=headers)
# 打印添加员工接口
print("添加员工接口返回的数据为: ", response.json())
# 提取添加员工接口返回的员id
emp_id = response.json().get("data").get("id")
print("提取添加员工接口返回的员id为: ", emp_id)
# 拼接查询员工的url
query_url = "http://ihrm-test.itheima.net/api/sys/user/" + emp_id
print("拼接查询员工的url为: ", query_url)
# 发送查询员工的接口请求
response = requests.get(url=query_url, headers=headers)
# 打印查询员工的结果
print("查询员工的结果为: ", response.json())
# 拼接修改员工的url
amend_url = "http://ihrm-test.itheima.net/api/sys/user/" + emp_id
# 发送修改员工的接口请求
response = requests.put(url=query_url, headers=headers, json={"username": "孙尚香111", "mobile": "15911115555"})
# 打印修改工的结果
print("修改工的结果为: ", response.json())
# ###拼接查询员工的url
query_url = "http://ihrm-test.itheima.net/api/sys/user/" + emp_id
print("拼接查询员工的url为: ", query_url)
# 发送查询员工的接口请求
response = requests.get(url=query_url, headers=headers)
# 打印查询员工的结果
print("查询员工的结果为: ", response.json())
# 拼接删除员工的url
delete_url = "http://ihrm-test.itheima.net/api/sys/user/" + emp_id
# 发送删除员工的接口请求
response = requests.delete(url=query_url, headers=headers, json={"username": "孙尚香111", "mobile": "15911115555"})
# 打印删除员工的结果
print("删除员工的结果为: ", response.json())
|
{"/scrpt/test_ihrm_employee.py": ["/app.py", "/utils.py", "/api/employee_api.py"], "/api/__init__.py": ["/utils.py"], "/scrpt/test_ihrm_login.py": ["/utils.py"], "/run_suite.py": ["/app.py"], "/utils.py": ["/app.py"]}
|
30,212
|
zhaohuilong0808/apiTestIHRM
|
refs/heads/master
|
/run_suite.py
|
# 1.导包
import time
import unittest
from app import BASE_DIR
from BeautifulReport import BeautifulReport
# 2.组织测试套件
suite = unittest.TestLoader().discover(BASE_DIR + "/scrpt", "*csh.py")
# 3.定义测试报告文件名
report_file = "IHRM".format()
# 4.使用BeautifulReport批量运行用例生成测试报告
BeautifulReport(suite).report(filename=report_file, description="ihrm测试报告", log_path="./report")
print("-" * 100)
print("增加一行输出语句,检查轮巡构建是否生效")
|
{"/scrpt/test_ihrm_employee.py": ["/app.py", "/utils.py", "/api/employee_api.py"], "/api/__init__.py": ["/utils.py"], "/scrpt/test_ihrm_login.py": ["/utils.py"], "/run_suite.py": ["/app.py"], "/utils.py": ["/app.py"]}
|
30,213
|
zhaohuilong0808/apiTestIHRM
|
refs/heads/master
|
/api/employee_api.py
|
# 导包
import requests
# 创建封装员工类
class EmployeeApi:
def __init__(self):
# 定义员工模块url
self.emp_url = "http://ihrm-test.itheima.net/api/sys/user/"
def add_emp(self, username, mobile, headers):
jsonData = {
"username": username,
"mobile": mobile,
"timeOfEntry": "2020-05-05",
"formOfEmployment": 1,
"departmentName": "测试部",
"departmentId": "1063678149528784896",
"correctionTime": "2020-05-30T16:00:00.000Z"}
# 发送添加员工接口请求,并return返回结果
return requests.post(url=self.emp_url, json=jsonData, headers=headers)
def query_emp(self, emp_id, headers):
# 拼接查询员工url
query_url = self.emp_url + emp_id
# 发送查询员工接口请求,并return返回结果
return requests.get(url=query_url, headers=headers)
def amend_emp(self, emp_id, jsonData, headers):
# 拼接修改员工url
amend_url = self.emp_url + emp_id
# 发送修改员工接口请求,并return返回结果
return requests.put(url=amend_url, json=jsonData, headers=headers)
def delete_emp(self, emp_id, headers):
# 拼接删除员工url
delete_url = self.emp_url + emp_id
# 发送修改员工接口请求,并return返回结果
return requests.delete(url=delete_url, headers=headers)
|
{"/scrpt/test_ihrm_employee.py": ["/app.py", "/utils.py", "/api/employee_api.py"], "/api/__init__.py": ["/utils.py"], "/scrpt/test_ihrm_login.py": ["/utils.py"], "/run_suite.py": ["/app.py"], "/utils.py": ["/app.py"]}
|
30,214
|
zhaohuilong0808/apiTestIHRM
|
refs/heads/master
|
/utils.py
|
# 导包
import json
import app
import logging
from logging import handlers
# 编写初始化日志代码
# 定义一个初始化日志函数
def init_loging():
# 在函数中,设置日志器
logger = logging.getLogger()
# 设置日志等级
logger.setLevel(logging.INFO)
# 设置日志台处理器
sh = logging.StreamHandler()
# 设置文件处理器
log_path = app.BASE_DIR + "/log/ihrm.log"
fh = logging.handlers.TimedRotatingFileHandler(log_path,
when="M",
interval=1,
backupCount=2,
encoding="utf-8")
# 设置格式化器
lf = logging.Formatter(
fmt='%(asctime)s %(levelname)s [%(name)s] [%(filename)s(%(funcName)s:%(lineno)d)] - %(message)s')
# formatter = logging.Formatter(fmt) #另一种
# 将格式化器添加到文件处理器,和控制台处理器
sh.setFormatter(lf) # fmt
fh.setFormatter(lf)
# 将文件处理器和控制台处理器添加到日志器中
logger.addHandler(sh)
logger.addHandler(fh)
# 封装通用断言函数
def assert_common(self, http_code, success, code, masssge, response):
self.assertEqual(http_code, response.status_code)
self.assertEqual(success, response.json().get("success"))
self.assertEqual(code, response.json().get("code"))
self.assertIn(masssge, response.json().get("message"))
# 编写读取登录数据的函数
def rend_login_data(filepath):
# 打开数据文件
with open(filepath, mode="r", encoding="utf-8") as f:
# 使用json加载数据文件为json格式
jsonData = json.load(f)
# 遍历json数据文件,并把数据处理列表元组形式([()])
result_list = list()
for login_data in jsonData: # type:dict
result_list.append(tuple(login_data.values()))
print("读取登录数据为: ", result_list)
return result_list
# 编写读取员工模块的数据函数
def read_emp_data(filepath, interface_name):
# 打开数据文件
with open(filepath, mode="r", encoding="utf-8") as f:
# 把文件加载为json格式
jaonData = json.load(f)
# 读取加载的json数据数据当中对应接口数据集
emp_data = jaonData.get(interface_name) # type:dict
# 把数据处理成列表元组对象,然后添加到空列表当中
result_list = list()
result_list.append(tuple(emp_data.values()))
# 返回数据
print("读取的{}员工数据为:{}".format(interface_name, result_list))
return result_list
# 作用:只有在当前函数运行时,才会运行if条件的代码
if __name__ == '__main__':
# 定义数据文件的目录(文件一定要存在)
filrpath = app.BASE_DIR + "/data/login_data.json"
# 读取路径中的数据,并返回接收结果
result = rend_login_data(filrpath)
# 打印返回结果
print("返回的result_list的结果为", result)
if __name__ == '__main__':
# 定义员工数据路径
filepath2 = app.BASE_DIR + "/data/emp_data.json"
# 读取员工数据
read_emp_data(filepath2, 'add_emp')
read_emp_data(filepath2, 'query_emp')
read_emp_data(filepath2, 'modify_emp')
read_emp_data(filepath2, 'delete_emp')
|
{"/scrpt/test_ihrm_employee.py": ["/app.py", "/utils.py", "/api/employee_api.py"], "/api/__init__.py": ["/utils.py"], "/scrpt/test_ihrm_login.py": ["/utils.py"], "/run_suite.py": ["/app.py"], "/utils.py": ["/app.py"]}
|
30,215
|
ysuurme/game_snakebattle
|
refs/heads/master
|
/snakebattle/snack.py
|
from .config import COLORS, SNACK
class Snack:
def __init__(self, x, y, color=COLORS['WHITE']):
self.x = x # x position in the game 'grid'
self.y = y # y position in the game 'grid'
self.color = color
self.image = SNACK
|
{"/snakebattle/snack.py": ["/snakebattle/config.py"], "/snakebattle/game.py": ["/snakebattle/config.py", "/snakebattle/snake.py", "/snakebattle/snack.py"], "/main.py": ["/snakebattle/config.py", "/snakebattle/game.py"], "/snakebattle/snake.py": ["/snakebattle/config.py"]}
|
30,216
|
ysuurme/game_snakebattle
|
refs/heads/master
|
/snakebattle/game.py
|
import pygame
import random
from .config import QUIT, BACKGROUND, COLS, ROWS, WIDTH, HEIGHT, SQ_SIZE, COLORS, FONT_SCORE, FONT_WINNER,\
SOUND_MUNCH, SOUND_HIT
from .snake import Player1, Player2
from .snack import Snack
class Game:
def __init__(self, win):
self.win = win
self.game_over = False
self.player1 = None
self.player2 = None
self.snack = None
self.init_players()
self.init_snack()
def update(self):
self.win.blit(BACKGROUND, (0, 0))
self.draw_game()
self.draw_snack()
self.draw_snake(self.player1)
self.draw_snake(self.player2)
self.move_snake()
self.handle_snack()
pygame.display.update()
def init_players(self):
self.player1 = Player1()
self.player2 = Player2()
def draw_game(self):
x = 0
y = 0
for i in range(COLS):
x = x + SQ_SIZE
pygame.draw.line(self.win, COLORS['WHITE'], (x, 0), (x, HEIGHT))
for i in range(ROWS):
y = y + SQ_SIZE
pygame.draw.line(self.win, COLORS['WHITE'], (0, y), (WIDTH, y))
p1_score = FONT_SCORE.render(f"P1 Score: {self.player1.length}", 1, self.player1.color)
p2_score = FONT_SCORE.render(f"P2 Score: {self.player2.length}", 1, self.player2.color)
self.win.blit(p1_score, (10, 10))
self.win.blit(p2_score, (WIDTH - p2_score.get_width() - 10, 10))
def draw_snake(self, snake):
for i, cube in enumerate(snake.body):
pygame.draw.rect(self.win, snake.color,
(cube.x * SQ_SIZE + 1, cube.y * SQ_SIZE + 1, SQ_SIZE - 2, SQ_SIZE - 2))
if i == 0: # first cube in body is snake head
tongue_size = SQ_SIZE / 3
tongue_pos = SQ_SIZE / 2 - tongue_size / 2
eye_size = 4
x = snake.head.x * SQ_SIZE
y = snake.head.y * SQ_SIZE
eyeh1 = (x + SQ_SIZE * (1 / 3) - eye_size / 3, y + SQ_SIZE / 2) # horizontal eye
eyeh2 = (x + SQ_SIZE * (2 / 3) + eye_size / 3, y + SQ_SIZE / 2) # horizontal eye
eyev1 = (x + SQ_SIZE / 2, y + SQ_SIZE * (1 / 3) - eye_size / 3) # vertical eye
eyev2 = (x + SQ_SIZE / 2, y + SQ_SIZE * (2 / 3) + eye_size / 3) # vertical eye
eye1, eye2 = eyeh1, eyeh2
if snake.dir == (0, 0): # start
x += tongue_pos
y -= tongue_size - 2
elif snake.dir == (0, -1): # up
x += tongue_pos
y -= tongue_size - 2
eye1, eye2 = eyeh1, eyeh2
elif snake.dir == (0, 1): # down
x += tongue_pos
y += SQ_SIZE - 1
eye1, eye2 = eyeh1, eyeh2
elif snake.dir == (-1, 0): # left
x -= tongue_size - 2
y += tongue_pos
eye1, eye2 = eyev1, eyev2
elif snake.dir == (1, 0): # right
x += SQ_SIZE - 1
y += tongue_pos
eye1, eye2 = eyev1, eyev2
pygame.draw.circle(self.win, COLORS['BLACK'], eye1, eye_size)
pygame.draw.circle(self.win, COLORS['BLACK'], eye2, eye_size)
tongue = pygame.Rect(x, y, tongue_size, tongue_size)
pygame.draw.rect(self.win, COLORS['RED'], tongue)
def draw_snack(self):
self.win.blit(self.snack.image, (self.snack.x * SQ_SIZE, self.snack.y * SQ_SIZE, SQ_SIZE - 2, SQ_SIZE - 2))
def move_snake(self):
keys_pressed = pygame.key.get_pressed()
if keys_pressed[pygame.K_ESCAPE]: # Quit game
pygame.event.post(pygame.event.Event(QUIT))
if keys_pressed[pygame.K_LEFT]: # P1 left
if self.player1.dir != (1, 0) or len(self.player1.body) == 1:
self.player1.dir = (-1, 0)
elif keys_pressed[pygame.K_UP]: # P1 up
if self.player1.dir != (0, 1) or len(self.player1.body) == 1:
self.player1.dir = (0, -1)
elif keys_pressed[pygame.K_RIGHT]: # P1 right
if self.player1.dir != (-1, 0) or len(self.player1.body) == 1:
self.player1.dir = (1, 0)
elif keys_pressed[pygame.K_DOWN]: # P1 down
if self.player1.dir != (0, -1) or len(self.player1.body) == 1:
self.player1.dir = (0, 1)
if keys_pressed[pygame.K_a]: # P2 left
if self.player2.dir != (1, 0) or len(self.player2.body) == 1:
self.player2.dir = (-1, 0)
elif keys_pressed[pygame.K_w]: # P2 up
if self.player2.dir != (0, 1) or len(self.player2.body) == 1:
self.player2.dir = (0, -1)
elif keys_pressed[pygame.K_d]: # P2 right
if self.player2.dir != (-1, 0) or len(self.player2.body) == 1:
self.player2.dir = (1, 0)
elif keys_pressed[pygame.K_s]: # P2 down
if self.player2.dir != (0, -1) or len(self.player2.body) == 1:
self.player2.dir = (0, 1)
if not self.player1.move_snake_body(): # P1 move snake, if can't move P1 hit itself, P2 wins!
self.winner(self.player2)
elif not self.player2.move_snake_body(): # P2 move snake, if can't move P2 hit itself, P1 wins!
self.winner(self.player1)
else:
self.winner() # Check if a player hits another player
def init_snack(self):
x, y = 0, 0
for cube in self.player1.body:
while True:
x = random.randrange(COLS)
y = random.randrange(ROWS)
if cube.x == x and cube.y == y: # validates if snack is not in snake body
continue # define new x, y for snack
else:
break
self.snack = Snack(x, y)
def handle_snack(self):
munch = False
if self.player1.head.x == self.snack.x and self.player1.head.y == self.snack.y:
self.player1.length += 1
munch = True
elif self.player2.head.x == self.snack.x and self.player2.head.y == self.snack.y:
self.player2.length += 1
munch = True
if munch:
SOUND_MUNCH.play()
self.init_snack()
def winner(self, winner=None):
winner_text = FONT_WINNER.render("Game completed!", 1, COLORS['WHITE'])
if winner == self.player1:
winner_text = FONT_WINNER.render("Player 1 has won the game!", 1, self.player1.color)
self.game_over = True
if winner == self.player2:
winner_text = FONT_WINNER.render("Player 2 has won the game!", 1, self.player2.color)
self.game_over = True
for part in self.player2.body: # validate if snake head P1 is not in body P2
if self.player1.head.x == part.x and self.player1.head.y == part.y:
winner_text = FONT_WINNER.render("Player 2 has won the game!", 1, self.player2.color)
self.game_over = True
break
for part in self.player1.body: # validate if snake head P2 is not in body P1
if self.player2.head.x == part.x and self.player2.head.y == part.y:
winner_text = FONT_WINNER.render("Player 1 has won the game!", 1, self.player1.color)
self.game_over = True
break
if self.game_over:
SOUND_HIT.play()
self.win.blit(winner_text,
(WIDTH / 2 - winner_text.get_width() / 2, HEIGHT / 2 - winner_text.get_height() / 2))
|
{"/snakebattle/snack.py": ["/snakebattle/config.py"], "/snakebattle/game.py": ["/snakebattle/config.py", "/snakebattle/snake.py", "/snakebattle/snack.py"], "/main.py": ["/snakebattle/config.py", "/snakebattle/game.py"], "/snakebattle/snake.py": ["/snakebattle/config.py"]}
|
30,217
|
ysuurme/game_snakebattle
|
refs/heads/master
|
/main.py
|
import pygame
import sys
from snakebattle.config import QUIT, WIDTH, HEIGHT, DELAY, FPS
from snakebattle.game import Game
def init_game():
win = pygame.display.set_mode((WIDTH, HEIGHT))
pygame.display.set_caption('Snake Battle!') # todo implement AI
game = Game(win)
return game
def run(game):
clock = pygame.time.Clock()
while True:
pygame.time.delay(DELAY)
clock.tick(FPS)
for event in pygame.event.get():
if event.type == pygame.QUIT or event.type == QUIT:
pygame.quit()
sys.exit()
game.update()
if game.game_over:
pygame.time.delay(5000)
break
main()
def main(): # todo implement sounds for game start, game won
snake_battle = init_game()
run(snake_battle)
# Press the green button in the gutter to run the script.
if __name__ == '__main__':
main()
|
{"/snakebattle/snack.py": ["/snakebattle/config.py"], "/snakebattle/game.py": ["/snakebattle/config.py", "/snakebattle/snake.py", "/snakebattle/snack.py"], "/main.py": ["/snakebattle/config.py", "/snakebattle/game.py"], "/snakebattle/snake.py": ["/snakebattle/config.py"]}
|
30,218
|
ysuurme/game_snakebattle
|
refs/heads/master
|
/snakebattle/config.py
|
# Configuration file holding constant values used throughout the project.
import pygame
pygame.font.init()
pygame.mixer.init()
# pygame window:
SQ_SIZE = 25
ROWS, COLS = 25, 25
WIDTH, HEIGHT = COLS*SQ_SIZE, ROWS*SQ_SIZE
DELAY = 50 # game delay in ms, hence a higher value is a slower gameplay
FPS = 10 # game frames per second, hence a higher value is a faster gameplay
BORDER_WIDTH = 10
BORDER = pygame.Rect((WIDTH - BORDER_WIDTH) / 2, 0, BORDER_WIDTH, HEIGHT)
# pygame fonts:
FONT_SCORE = pygame.font.SysFont('comicsans', 40)
FONT_WINNER = pygame.font.SysFont('comicsans', 60)
# Game colors:
COLORS = {
"BLACK": (0, 0, 0),
"GREY": (128, 128, 128),
"RED": (255, 0, 0),
"GREEN": (0, 255, 0),
"BLUE": (0, 0, 255),
"YELLOW": (255, 255, 0),
"WHITE": (255, 255, 255)
}
# Game images:
BACKGROUND = pygame.transform.scale(pygame.image.load('assets/snakeBackground.png'), (WIDTH, HEIGHT))
SNACK = pygame.transform.scale(pygame.image.load('assets/snack.png'), (SQ_SIZE, SQ_SIZE))
# Game sounds:
SOUND_MUNCH = pygame.mixer.Sound('assets/snakeMunch.mp3')
SOUND_HIT = pygame.mixer.Sound('assets/snakeHit.mp3')
# Game events:
QUIT = pygame.USEREVENT + 1
# Snake colors:
COLOR_P1 = COLORS["BLUE"]
COLOR_P2 = COLORS["YELLOW"]
|
{"/snakebattle/snack.py": ["/snakebattle/config.py"], "/snakebattle/game.py": ["/snakebattle/config.py", "/snakebattle/snake.py", "/snakebattle/snack.py"], "/main.py": ["/snakebattle/config.py", "/snakebattle/game.py"], "/snakebattle/snake.py": ["/snakebattle/config.py"]}
|
30,219
|
ysuurme/game_snakebattle
|
refs/heads/master
|
/snakebattle/snake.py
|
from snakebattle.config import COLORS, COLOR_P1, COLOR_P2, ROWS, COLS
class Snake:
def __init__(self):
self.color = None
self.head = None
self.length = 1
self.body = []
self.dir = (0, 0)
def move_snake_body(self):
x = self.head.x
y = self.head.y
if self.head.x == 0 and self.dir[0] == -1: # snake moves from column 0 to left, enter game right
x = COLS - 1
elif self.head.x == COLS - 1 and self.dir[0] == 1: # snake moves from max column to right, enter game left
x = 0
elif self.head.y == 0 and self.dir[1] == -1: # snake moves from top row to above, enter game last row
y = ROWS - 1
elif self.head.y == ROWS - 1 and self.dir[1] == 1: # snake moves from last row down, enter game top row
y = 0
else:
x += self.dir[0]
y += self.dir[1]
for part in self.body:
if self.length > 1 and part.x == x and part.y == y:
return False
self.head = Cube(x, y, self.color)
self.body.insert(0, self.head)
while len(self.body) > self.length:
self.body.pop()
return True
def eat_snack(self):
self.head = Cube(self.head.x, self.head.y, self.color)
self.body.insert(0, self.head)
class Player1(Snake):
def __init__(self):
super().__init__()
self.color = COLOR_P1
self.head = Cube(10, 10)
self.body.append(self.head)
class Player2(Snake):
def __init__(self):
super().__init__()
self.color = COLOR_P2
self.head = Cube(20, 20)
self.body.append(self.head)
class Cube:
def __init__(self, x, y, color=COLORS['WHITE']):
self.x = x # x position in the game 'grid'
self.y = y # y position in the game 'grid'
self.color = color
|
{"/snakebattle/snack.py": ["/snakebattle/config.py"], "/snakebattle/game.py": ["/snakebattle/config.py", "/snakebattle/snake.py", "/snakebattle/snack.py"], "/main.py": ["/snakebattle/config.py", "/snakebattle/game.py"], "/snakebattle/snake.py": ["/snakebattle/config.py"]}
|
30,220
|
kirankh7/clean_flask_app
|
refs/heads/master
|
/app/diag/diag.py
|
# import boto3
from flask import render_template
import boto.utils
import boto.ec2
from . import diag
import requests
@diag.route('/diag')
def diagnose_metrics():
data = boto.utils.get_instance_identity()
region_name = data['document']['region']
conn = boto.ec2.connect_to_region(region_name)
count = 0
server_list = []
for instance in conn.get_only_instances():
count += 1
server_list.append(instance)
# nginx version
return render_template('diag.html',
region = region_name,
instance_count=count,
instance_ids=server_list,
nginx_version=get_nginx_version()
)
def get_nginx_version():
r = requests.get('http://flaskapp.thebetterengineers.com')
val = r.headers
return val.get('Server')
|
{"/app/diag/diag.py": ["/app/diag/__init__.py"], "/app/hello/hello.py": ["/app/hello/__init__.py", "/app/models.py"], "/app/health/health.py": ["/app/models.py", "/app/health/__init__.py"], "/app/__init__.py": ["/app/config.py", "/app/models.py", "/app/hello/hello.py", "/app/health/health.py", "/app/diag/diag.py"], "/app/routes.py": ["/app/__init__.py"], "/run.py": ["/app/__init__.py"]}
|
30,221
|
kirankh7/clean_flask_app
|
refs/heads/master
|
/app/hello/__init__.py
|
from flask import Blueprint
hello = Blueprint('hello', __name__)
|
{"/app/diag/diag.py": ["/app/diag/__init__.py"], "/app/hello/hello.py": ["/app/hello/__init__.py", "/app/models.py"], "/app/health/health.py": ["/app/models.py", "/app/health/__init__.py"], "/app/__init__.py": ["/app/config.py", "/app/models.py", "/app/hello/hello.py", "/app/health/health.py", "/app/diag/diag.py"], "/app/routes.py": ["/app/__init__.py"], "/run.py": ["/app/__init__.py"]}
|
30,222
|
kirankh7/clean_flask_app
|
refs/heads/master
|
/app/health/__init__.py
|
from flask import Blueprint
health = Blueprint('health', __name__)
|
{"/app/diag/diag.py": ["/app/diag/__init__.py"], "/app/hello/hello.py": ["/app/hello/__init__.py", "/app/models.py"], "/app/health/health.py": ["/app/models.py", "/app/health/__init__.py"], "/app/__init__.py": ["/app/config.py", "/app/models.py", "/app/hello/hello.py", "/app/health/health.py", "/app/diag/diag.py"], "/app/routes.py": ["/app/__init__.py"], "/run.py": ["/app/__init__.py"]}
|
30,223
|
kirankh7/clean_flask_app
|
refs/heads/master
|
/app/config.py
|
import json
import os
basedir = os.path.abspath(os.path.dirname(__file__))
with open('./config.json') as config_file:
config = json.load(config_file)
class Config:
# DB Connection settings
SQLALCHEMY_TRACK_MODIFICATIONS = False
SECRET_KEY = "someshabasednumberlkira"
# SQLALCHEMY_DATABASE_URI = config.get('DATABASE_URL') or \
# 'sqlite:///' + os.path.join(basedir, 'example.db')
SQLALCHEMY_DATABASE_URI = "mysql://" + config.get('DB_USERNAME') + ':' + config.get('DB_PASSWORD') + '@' + config.get('DB_ENDPOINT') + '/' + config.get('DB_NAME')
# DB_ENDPOINT = config.get('DB_ENDPOINT')
# DB_NAME = config.get('DB_NAME')
# DB_USERNAME = config.get('DB_USERNAME')
# DB_PASSWORD = config.get('DB_PASSWORD')
|
{"/app/diag/diag.py": ["/app/diag/__init__.py"], "/app/hello/hello.py": ["/app/hello/__init__.py", "/app/models.py"], "/app/health/health.py": ["/app/models.py", "/app/health/__init__.py"], "/app/__init__.py": ["/app/config.py", "/app/models.py", "/app/hello/hello.py", "/app/health/health.py", "/app/diag/diag.py"], "/app/routes.py": ["/app/__init__.py"], "/run.py": ["/app/__init__.py"]}
|
30,224
|
kirankh7/clean_flask_app
|
refs/heads/master
|
/app/hello/hello.py
|
from flask import render_template,request, current_app
from datetime import datetime
import pytz
from . import hello
from ..models import DatabaseTables,db
from random import randint
# First / root of the flask app
@hello.route('/')
def hello_world():
message = "Hello World! {}".format(get_pst_time())
image_src = "https://s3.amazonaws.com/kiran-test-2/cruiser80.jpg"
random_id = randint(0, 10000)
# Database
db_string = DatabaseTables.query.all
me = DatabaseTables(id=random_id, name='Kiran')
db.session.add(me)
db.session.commit()
row = db.session.query(DatabaseTables).filter(
DatabaseTables.name == 'Kiran').first()
print('original:', row.name, row.id)
# query= db.session.query(DatabaseTables.name).order_by(DatabaseTables.id)
# print('original:', query.key, query.val)
return render_template("hello.html",
src_hello=message,
image_name=image_src,
db_message=row
)
def get_pst_time():
# date_format = '%m/%d/%Y %H:%M:%S %Z'
date = datetime.now(tz=pytz.utc)
date = date.astimezone(pytz.timezone('US/Pacific'))
return str(date) + " PST"
def string_from_db():
pass
|
{"/app/diag/diag.py": ["/app/diag/__init__.py"], "/app/hello/hello.py": ["/app/hello/__init__.py", "/app/models.py"], "/app/health/health.py": ["/app/models.py", "/app/health/__init__.py"], "/app/__init__.py": ["/app/config.py", "/app/models.py", "/app/hello/hello.py", "/app/health/health.py", "/app/diag/diag.py"], "/app/routes.py": ["/app/__init__.py"], "/run.py": ["/app/__init__.py"]}
|
30,225
|
kirankh7/clean_flask_app
|
refs/heads/master
|
/app/health/health.py
|
from flask import render_template,request, current_app
from ..models import DatabaseTables,db
from . import health
@health.route("/health")
def check_rds_conn():
validation = ""
try:
db.session.query("1").from_statement("SELECT 1").all()
validation = 'OK'
except:
validation = 'ERROR'
return render_template("health.html", validation_msg=validation)
|
{"/app/diag/diag.py": ["/app/diag/__init__.py"], "/app/hello/hello.py": ["/app/hello/__init__.py", "/app/models.py"], "/app/health/health.py": ["/app/models.py", "/app/health/__init__.py"], "/app/__init__.py": ["/app/config.py", "/app/models.py", "/app/hello/hello.py", "/app/health/health.py", "/app/diag/diag.py"], "/app/routes.py": ["/app/__init__.py"], "/run.py": ["/app/__init__.py"]}
|
30,226
|
kirankh7/clean_flask_app
|
refs/heads/master
|
/app/__init__.py
|
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from app.config import Config
from app.models import db
from flask_migrate import Migrate
# # Creating an instance of the Flask(kind of module)
# app = Flask(__name__)
# # Passing configs from Json file
# app.config.from_object(Config)
# pass db instance
# db = SQLAlchemy()
def create_app(confi_class=Config):
app = Flask(__name__)
app.config.from_object(Config)
# DB Settings
from app.models import db
db.init_app(app)
migrate = Migrate(app, db)
from app.hello.hello import hello
from app.health.health import health
from app.diag.diag import diag
app.register_blueprint(hello)
app.register_blueprint(health)
app.register_blueprint(diag)
return app
|
{"/app/diag/diag.py": ["/app/diag/__init__.py"], "/app/hello/hello.py": ["/app/hello/__init__.py", "/app/models.py"], "/app/health/health.py": ["/app/models.py", "/app/health/__init__.py"], "/app/__init__.py": ["/app/config.py", "/app/models.py", "/app/hello/hello.py", "/app/health/health.py", "/app/diag/diag.py"], "/app/routes.py": ["/app/__init__.py"], "/run.py": ["/app/__init__.py"]}
|
30,227
|
kirankh7/clean_flask_app
|
refs/heads/master
|
/app/routes.py
|
from flask import Flask,render_template,request, current_app
from app import db
from datetime import datetime
import pytz
# First / root of the flask app
@app.route('/hello.html') # same hello_world
@app.route('/')
def hello_world():
message = "Hello World! {}".format(get_pst_time())
image_src = "https://s3.amazonaws.com/kiran-test-2/cruiser80.jpg"
return render_template("hello.html",
src_hello=message,
image_name=image_src
)
# return message
# pass some values
@app.route('/surnames/')
def get_surname(surname="Enter Name=Some Name"):
query_val = request.args.get("Name", surname)
get_surname = query_val.split()
return '<p>Name Is : {}<p/>'.format("".join(get_surname[1:]))
# This route must print database connetion
# Check Connection Auth in infinete While loop & sleep for 5 minute
# Configure db config using chef
# SQLAlchemy
@app.route('/hello')
def health_rds():
return "OK"
# '/diag' must print
# 1. Number of instance in the region
# 2. Version from the Nginx(Configure something into setting with using chef)
# 3. Health of each instance name: try to get instance IP get /hello
@app.route('/diag')
def status_cheker():
return "OK"
class Publication(db.Model):
__tablename__ = 'publication'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(80), nullable=False)
def __init__(self, id, name):
self.id = id
self.name = name
def __repr__(self):
return 'The id is {}, Name is is {}'.format(self.id, self.name)
|
{"/app/diag/diag.py": ["/app/diag/__init__.py"], "/app/hello/hello.py": ["/app/hello/__init__.py", "/app/models.py"], "/app/health/health.py": ["/app/models.py", "/app/health/__init__.py"], "/app/__init__.py": ["/app/config.py", "/app/models.py", "/app/hello/hello.py", "/app/health/health.py", "/app/diag/diag.py"], "/app/routes.py": ["/app/__init__.py"], "/run.py": ["/app/__init__.py"]}
|
30,228
|
kirankh7/clean_flask_app
|
refs/heads/master
|
/app/models.py
|
from flask_sqlalchemy import SQLAlchemy
from flask import current_app
db = SQLAlchemy()
class DatabaseTables(db.Model):
__tablename__ = 'flask_app'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(80), nullable=False)
def __init__(self, id, name):
self.id = id
self.name = name
def __repr__(self):
return 'The id is {}, Name is is {}'.format(self.id, self.name)
|
{"/app/diag/diag.py": ["/app/diag/__init__.py"], "/app/hello/hello.py": ["/app/hello/__init__.py", "/app/models.py"], "/app/health/health.py": ["/app/models.py", "/app/health/__init__.py"], "/app/__init__.py": ["/app/config.py", "/app/models.py", "/app/hello/hello.py", "/app/health/health.py", "/app/diag/diag.py"], "/app/routes.py": ["/app/__init__.py"], "/run.py": ["/app/__init__.py"]}
|
30,229
|
kirankh7/clean_flask_app
|
refs/heads/master
|
/app/diag/__init__.py
|
from flask import Blueprint
diag = Blueprint("diag", __name__)
# ^^^^ coming
from . import diag
|
{"/app/diag/diag.py": ["/app/diag/__init__.py"], "/app/hello/hello.py": ["/app/hello/__init__.py", "/app/models.py"], "/app/health/health.py": ["/app/models.py", "/app/health/__init__.py"], "/app/__init__.py": ["/app/config.py", "/app/models.py", "/app/hello/hello.py", "/app/health/health.py", "/app/diag/diag.py"], "/app/routes.py": ["/app/__init__.py"], "/run.py": ["/app/__init__.py"]}
|
30,230
|
kirankh7/clean_flask_app
|
refs/heads/master
|
/run.py
|
from app import create_app
app = create_app()
# db.create_all()
# Run the file direct... do not import to anything else
if __name__ == '__main__':
# db.create_all()
app.run(host='0.0.0.0', port=8000, debug=True)
|
{"/app/diag/diag.py": ["/app/diag/__init__.py"], "/app/hello/hello.py": ["/app/hello/__init__.py", "/app/models.py"], "/app/health/health.py": ["/app/models.py", "/app/health/__init__.py"], "/app/__init__.py": ["/app/config.py", "/app/models.py", "/app/hello/hello.py", "/app/health/health.py", "/app/diag/diag.py"], "/app/routes.py": ["/app/__init__.py"], "/run.py": ["/app/__init__.py"]}
|
30,236
|
vaziozio/sentiment-analysis-app
|
refs/heads/master
|
/hourcounter.py
|
import datetime
#json to count
tweets_count = {0:{'Negativo':0,'Neutro':0,'Positivo':0},
1:{'Negativo':0,'Neutro':0,'Positivo':0},
2:{'Negativo':0,'Neutro':0,'Positivo':0},
3:{'Negativo': 0, 'Neutro': 0, 'Positivo': 0},
4: {'Negativo': 0, 'Neutro': 0, 'Positivo': 0},
5: {'Negativo': 0, 'Neutro': 0, 'Positivo': 0},
6: {'Negativo': 0, 'Neutro': 0, 'Positivo': 0},
7: {'Negativo': 0, 'Neutro': 0, 'Positivo': 0},
8: {'Negativo': 0, 'Neutro': 0, 'Positivo': 0},
9: {'Negativo': 0, 'Neutro': 0, 'Positivo': 0},
10: {'Negativo': 0, 'Neutro': 0, 'Positivo': 0},
11: {'Negativo': 0, 'Neutro': 0, 'Positivo': 0},
12:{'Negativo':0,'Neutro':0,'Positivo':0},
13:{'Negativo':0,'Neutro':0,'Positivo':0},
14:{'Negativo':0,'Neutro':0,'Positivo':0},
15:{'Negativo': 0, 'Neutro': 0, 'Positivo': 0},
16: {'Negativo': 0, 'Neutro': 0, 'Positivo': 0},
17: {'Negativo': 0, 'Neutro': 0, 'Positivo': 0},
18: {'Negativo': 0, 'Neutro': 0, 'Positivo': 0},
19:{'Negativo':0,'Neutro':0,'Positivo':0},
20:{'Negativo':0,'Neutro':0,'Positivo':0},
21:{'Negativo':0,'Neutro':0,'Positivo':0},
22:{'Negativo': 0, 'Neutro': 0, 'Positivo': 0},
23: {'Negativo': 0, 'Neutro': 0, 'Positivo': 0}
}
def count_tweets(sentiment):
hour = datetime.datetime.now().hour
tweets_count[hour][sentiment] +=1
return tweets_count
|
{"/app.py": ["/hourcounter.py", "/streaming.py"]}
|
30,237
|
vaziozio/sentiment-analysis-app
|
refs/heads/master
|
/app.py
|
#importing libraries
from flask import Flask, render_template, url_for, jsonify, request
from sklearn.naive_bayes import MultinomialNB
from sklearn.feature_extraction.text import CountVectorizer
import pickle
import os
from hourcounter import tweets_count, count_tweets
from streaming import StreamListener
#loading saved model
clf = pickle.load(open('sentiment_analysis.sav','rb'))
vect = pickle.load(open('vectorizer.sav','rb'))
#declaring flask app
app = Flask(__name__)
#set port
port = int(os.getenv('VCAP_APP_PORT'))
#starting tweets
tweet_json = {}
#post tweets to our api and run the classifications to serve interface
@app.route('/add_tweet', methods=['POST'])
def add_tweet():
request_data = request.get_json()
values_to_predict = vect.transform([request_data['tweet']])
prediction = clf.predict(values_to_predict)
tweet, sentiment = request_data['tweet'],prediction[0]
tweet_json['tweet'],tweet_json['sentiment'] = tweet, sentiment
count_tweets(sentiment)
return 'Success'
#defining template route '/'
@app.route('/')
def index():
return render_template('data.html')
#api endpoint
@app.route('/api', methods=['GET'])
def api():
return jsonify(tweet_json)
#counter endpoint
@app.route('/count', methods=['GET'])
def count():
return jsonify(tweets_count)
#running app
if __name__ == '__main__':
app.run(host='0.0.0.0',debug=True, port=port)
|
{"/app.py": ["/hourcounter.py", "/streaming.py"]}
|
30,238
|
vaziozio/sentiment-analysis-app
|
refs/heads/master
|
/streaming.py
|
from tweepy import Stream
from tweepy import OAuthHandler
from tweepy.streaming import StreamListener
import requests
import pickle
import json
import time
#loading credentials
ckey = 'Client key'
csecret = 'Client secret'
atoken ='Access token'
asecret = 'Access secret'
#streaming listener
class listener(StreamListener):
#getting data
def on_data(self, data):
try:
json_content = json.loads(data)
json_text = json_content['text']
requests.post('http://doug-sentiment-analyzer.mybluemix.net/add_tweet', json={'tweet':json_text})
except Exception as e:
print(e)
requests.post('http://doug-sentiment-analyzer.mybluemix.net/add_tweet', json={'tweet':e})
time.sleep(3)
return True
#composing authentication
auth = OAuthHandler(ckey, csecret)
auth.set_access_token(atoken, asecret)
#setting up streaming functionality
twitterStream = Stream(auth, listener())
twitterStream = twitterStream.sample(languages=['pt'],is_async=True)
|
{"/app.py": ["/hourcounter.py", "/streaming.py"]}
|
30,262
|
fraperr/python
|
refs/heads/master
|
/monPremierPackage/nombre.py
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
import monPremierPackage.nombre
def verifEntier(entier):
try:
entier = int(entier)
assert entier > 0
except ValueError:
print("Vous n'avez pas saisi un nombre.")
except AssertionError:
print("Vous avez misé une somme négative.")
else:
return True
def saisirEntier(message):
estUnEntier = False
while not estUnEntier:
mise = input(message)
if monPremierPackage.nombre.verifEntier(mise):
estUnEntier = True
return int(mise)
|
{"/ZCasino.py": ["/monPremierPackage/nombre.py"]}
|
30,263
|
fraperr/python
|
refs/heads/master
|
/ZCasino.py
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
import random
import math
import monPremierPackage.nombre
sommeDeDepart = monPremierPackage.nombre.saisirEntier("Quelle somme de départ: ")
mise = 5
while sommeDeDepart > 0:
mise = monPremierPackage.nombre.saisirEntier("Combien voulez-vous miser: ")
# Choisir un nombre entre 0 et 49
# On vérifie si la saisie est un entier
# On vérifie que le nombre est compris entre 0 et 50
entreeCorrecte = False
while not entreeCorrecte:
nombreJoueur = monPremierPackage.nombre.saisirEntier(
"Choisissez un nombre entre 0 et 49: ")
if 0<= nombreJoueur <= 50:
entreeCorrecte = True
# le jeu à roulette tourne...
nombreRoulette = random.randrange(50)
# information sur le nombre choisi par l'utilateur, et le casino
print("Votre choix: ", nombreJoueur, "\n", "Numéro sortant: ", nombreRoulette)
# les deux numéros sont identiques, le joueur gagne 3 fois la mise
if nombreJoueur == nombreRoulette:
sommeDeDepart += 3 * mise
print("Vous avez gagné: ", 3 * mise, "\n", "Votre fortune est de",
sommeDeDepart)
# si les deux numéros sont paires ou impairs, le joueur gagne la moitié
# de sa mise
elif nombreJoueur % 2 == nombreRoulette % 2:
sommeDeDepart += math.ceil(mise / 2)
print("La banque vous remet: ", math.ceil(mise / 2), "\n",
"Votre fortune est de ", sommeDeDepart)
# dans les autres cas, le joueur perd sa mise
else:
sommeDeDepart -= mise
print("Désolé, vous avez perdu votre mise: ", mise, "\n",
"Il vous reste: ", sommeDeDepart )
print("Désolé, vous n'avez plus assez d'argent en banque", sommeDeDepart)
|
{"/ZCasino.py": ["/monPremierPackage/nombre.py"]}
|
30,265
|
Kronholt/harding
|
refs/heads/master
|
/events/models.py
|
from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class Volunteer(models.Model):
user = models.OneToOneField(User, null=True, blank=True, on_delete=models.CASCADE)
user_name = models.CharField(max_length=200, null=True, blank=True)
first_name = models.CharField(max_length=200, null=True, blank=True)
last_name = models.CharField(max_length=200, null=True, blank=True)
user_rank = models.IntegerField(default=0)
user_points = models.IntegerField(default = 0)
user_coins = models.IntegerField(default = 0)
user_icon = models.ImageField(default='profile1.png', null=True, blank=True)
user_datecreated = models.DateTimeField(auto_now_add=True)
def __str__(self):
if self.user_name is not None:
return self.user_name
else:
return "default"
class Tag(models.Model):
name = models.CharField(max_length=200, null=True)
def __str__(self):
return self.name
class Post(models.Model):
TYPE = ('Event', 'Event'), ('Story', 'Story')
content_name = models.CharField(max_length=200, null=True)
content_author = models.ForeignKey(Volunteer, null=True, on_delete=models.SET_NULL)
content_date = models.DateTimeField(auto_now_add=True)
content_date_start = models.DateTimeField(null=True, blank=True)
content_date_end = models.DateTimeField(null=True, blank=True)
content_social_description = models.CharField(max_length=1000, null=True)
content_image = models.ImageField(default='profile1.png', null=True, blank=True)
tags = models.ManyToManyField(Tag)
full_story = models.CharField(max_length=10000, null=True, blank=True)
post_type = models.CharField(max_length=200, null=True, choices=TYPE)
attending = models.ManyToManyField(User)
def __str__(self):
return self.content_name
class Comment(models.Model):
message = models.CharField(max_length=300, null=True)
author = models.ForeignKey(Volunteer, null=True, on_delete=models.CASCADE)
post = models.ManyToManyField(Post)
|
{"/events/filters.py": ["/events/models.py"], "/events/forms.py": ["/events/models.py"], "/events/views.py": ["/events/filters.py", "/events/forms.py", "/events/models.py"]}
|
30,266
|
Kronholt/harding
|
refs/heads/master
|
/events/urls.py
|
from django.urls import path
from . import views
urlpatterns = [
path('', views.index, name="home"),
path('volunteering/', views.volunteering, name="volunteering"),
path('event/<str:pk>/', views.event,name='event'),
path('stories/', views.stories, name="stories"),
path('story/<str:pk>/', views.story, name="story"),
path('register/', views.register, name="register"),
path('login/', views.loginPage, name='login'),
path('attend/<str:pk>/', views.attend, name="attend"),
path('logout/', views.logoutUser, name="logout"),
]
|
{"/events/filters.py": ["/events/models.py"], "/events/forms.py": ["/events/models.py"], "/events/views.py": ["/events/filters.py", "/events/forms.py", "/events/models.py"]}
|
30,267
|
Kronholt/harding
|
refs/heads/master
|
/events/migrations/0006_post_attending.py
|
# Generated by Django 3.1.2 on 2021-01-07 17:16
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('events', '0005_post_full_story'),
]
operations = [
migrations.AddField(
model_name='post',
name='attending',
field=models.ManyToManyField(to=settings.AUTH_USER_MODEL),
),
]
|
{"/events/filters.py": ["/events/models.py"], "/events/forms.py": ["/events/models.py"], "/events/views.py": ["/events/filters.py", "/events/forms.py", "/events/models.py"]}
|
30,268
|
Kronholt/harding
|
refs/heads/master
|
/events/filters.py
|
import django_filters
from .models import Post
from django_filters import DateFilter, CharFilter
class PostFilter(django_filters.FilterSet):
start_date = DateFilter(field_name="start_date", lookup_expr='gte'),
end_date = DateFilter(field_name="end_date", lookup_expr='lte'),
# tag = CharFilter(field_name="tags", lookup_expr='icontains')
class Meta:
model = Post
fields = ['content_date_start','content_date_end', 'tags']
|
{"/events/filters.py": ["/events/models.py"], "/events/forms.py": ["/events/models.py"], "/events/views.py": ["/events/filters.py", "/events/forms.py", "/events/models.py"]}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.