index
int64 | repo_name
string | branch_name
string | path
string | content
string | import_graph
string |
|---|---|---|---|---|---|
13,927
|
mao-liu/bricklayer
|
refs/heads/master
|
/bricklayer/catalog/crawler.py
|
"""
delta_tables crawlers
two functions supported
- restore delta tables from delta_log location
- update existing delta table from delta_log location
```
"""
import typing
import logging
from pathlib import Path
from pyspark.sql import SparkSession
from . import dbricks_catalog
class Crawler():
def __init__(self):
self.spark = SparkSession.builder.getOrCreate()
def restore_delta_tables(
self,
dbfs_path: str,
table_names: typing.Iterable[str] = None,
prefixes: typing.Iterable[str] = None
) -> None:
"""recreate delta tables for all delta_log/ path which was found in the target directory
Args:
dbfs_path (str): relative path to dbfs/ in which save the delta table data
tables (typing.Iterable[str], optional): tables(table_sql_name) to be restored
prefixes (typing.Iterable[str], optional): prefix of tables to be relocated.
If `talbe_names` and `prefixes` are using at the same time, only `table_names` start with `prefixes` will in
"""
if isinstance(table_names, str):
table_names = [table_names]
if isinstance(prefixes, str):
prefixes = [prefixes]
logging.info(f'Input `dbfs_path`: {dbfs_path}')
dbfs_path = dbfs_path.strip('/')
abs_path = Path(f'/dbfs/{dbfs_path}')
logging.info(f'Absolute full path of the directory: {str(abs_path)}')
if not table_names:
table_names = self._get_all_tables_from_dbfs_path(abs_path)
if prefixes:
logging.debug(f'table_names before filtering: {table_names}')
table_names = self._filter_tables_by_prefixes(table_names, prefixes)
logging.debug(f'table_names after filtering: {table_names}')
if not table_names:
logging.warn('Cannot find any qualified table to restore')
return
success_paths = []
failure_paths = []
for t in table_names:
table_name, version = t.split('_version_')
table_location_path = f'/{dbfs_path}/{table_name}/version={version}'
if self._create_delta_table(t, table_location_path):
success_paths.append(table_location_path)
else:
failure_paths.append(table_location_path)
logging.info(f"Restoring successful: {success_paths}")
logging.info(f"Restoring failed: {failure_paths}")
def _create_delta_table(self, table_sql_name: str, table_location_path: str) -> bool:
sql = f"""
CREATE TABLE {table_sql_name}
USING DELTA
LOCATION '{table_location_path}'
"""
if Path(f'/dbfs{table_location_path}/_delta_log').exists():
self.spark.sql(sql)
logging.info(f'Restoring delta table for {table_sql_name} at {table_location_path} SUCCESS')
return True
else:
logging.debug(f'`/dbfs{table_location_path}/_delta_log` doesn\'t exist')
logging.debug(f'Restoring delta table for {table_sql_name} at {table_location_path} FAILED')
return False
def relocate_delta_tables(
self,
dbfs_path: str,
table_names: typing.Iterable[str] = None,
prefixes: typing.Iterable[str] = None
) -> None:
"""update the location url for all tables which could be retrieved by Databricks sql
Args:
dbfs_path (str): working directory in which save the delta table data
table_names (typing.Iterable[str], optional): tables to be relocated
prefixes (typing.Iterable[str], optional): prefix of tables to be relocated.
If `talbe_names` and `prefixes` are using at the same time, only `table_names` start with `prefixes` will in
"""
if isinstance(table_names, str):
table_names = [table_names]
if isinstance(prefixes, str):
prefixes = [prefixes]
logging.info(f'Input `dbfs_path`: {dbfs_path}')
dbfs_path = dbfs_path.strip('/')
if not table_names:
table_names = self._get_all_tables_from_dbs_catalog()
if prefixes:
logging.debug(f'table_names before filtering: {table_names}')
table_names = self._filter_tables_by_prefixes(table_names, prefixes)
logging.debug(f'table_names after filtering: {table_names}')
if not table_names:
logging.warn('Cannot find any qualified table to relocate')
return
success_tables = []
failure_tables = []
for t in table_names:
table_name, version = t.split('_version_')
table_new_location_path = f'/{dbfs_path}/{table_name}/version={version}'
if self._update_delta_table_location(t, table_new_location_path):
success_tables.append(t)
else:
failure_tables.append(t)
logging.info(f"Relocating successful: {success_tables}")
logging.info(f"Relocating failed: {failure_tables}")
def _update_delta_table_location(self, table_sql_name: str, table_new_location_path: str) -> bool:
sql = f"""
ALTER TABLE {table_sql_name}
SET LOCATION '{table_new_location_path}'
"""
if Path(f'/dbfs{table_new_location_path}/_delta_log').exists():
self.spark.sql(sql)
logging.info(f'Relocating delta table for {table_sql_name} to {table_new_location_path} SUCCESS')
return True
else:
logging.debug(f'`/dbfs{table_new_location_path}/_delta_log` doesn\'t exist')
logging.debug(f'Relocating delta table for {table_sql_name} to {table_new_location_path} FAILED')
return False
def _get_all_tables_from_dbs_catalog(self):
return [
table.sql_name
for db in dbricks_catalog.DbricksCatalog().get_databases()
for table in db.get_tables()
if not table.is_view
]
def _get_all_tables_from_dbfs_path(self, abs_path: str):
return [
f"{p.relative_to(abs_path).parts[0]}_version_{p.relative_to(abs_path).parts[1].split('version=')[1]}"
for p in abs_path.glob('*.*/version=*/_delta_log/')
]
def _filter_tables_by_prefixes(
self,
table_names: typing.Iterable[str],
prefixes: typing.Iterable[str]
) -> typing.Iterable[str]:
return [
table_name
for prefix in prefixes
for table_name in table_names
if table_name.startswith(prefix)
]
def restore_delta_tables(
dbfs_path: str,
table_names: typing.Iterable[str] = None,
prefixes: typing.Iterable[str] = None
):
Crawler().restore_delta_tables(dbfs_path, table_names, prefixes)
def relocate_delta_tables(
dbfs_path: str,
table_names: typing.Iterable[str] = None,
prefixes: typing.Iterable[str] = None
):
Crawler().relocate_delta_tables(dbfs_path, table_names, prefixes)
|
{"/bricklayer/catalog/crawler.py": ["/bricklayer/catalog/__init__.py"], "/bricklayer/api/__init__.py": ["/bricklayer/__init__.py"]}
|
13,928
|
mao-liu/bricklayer
|
refs/heads/master
|
/bricklayer/__init__.py
|
import json
import logging
import sys
from logging import NullHandler
# Set default logging handler to avoid "No handler found" warnings.
logging.getLogger(__name__).addHandler(NullHandler())
logging.getLogger("py4j").setLevel(logging.ERROR)
logging.basicConfig(
level='INFO',
stream=sys.stdout,
format='[{levelname}] [{asctime}] [{name}] [{module}.{funcName}] {message}',
style='{'
)
def get_dbutils(spark):
from pyspark.dbutils import DBUtils
return DBUtils(spark)
def get_spark():
from pyspark.sql import SparkSession
return SparkSession.builder.getOrCreate()
class NotebookContext():
def __init__(self):
dbutils = get_dbutils(get_spark())
self._context = dbutils.notebook.entry_point.getDbutils().notebook().getContext()
self._context_tags = json.loads(self._context.toJson()).get('tags')
def get_run_id(self):
"""Return the current run id"""
return self._context.currentRunId().toString()
def get_api_token(self):
"""Return the token id"""
return self._context.apiToken().value()
def get_browser_host_name(self):
"""Return the notebook host name"""
return self._context_tags.get('browserHostName')
def get_browser_host_name_url(self):
"""Return the notebook url host name"""
return f'https://{self.get_browser_host_name()}'
def get_notebook_path(self):
return self._context.notebookPath().value()
def get_notebook_cluster_id(self):
return self._context.clusterId().value()
def get_notebook_context():
return NotebookContext()
|
{"/bricklayer/catalog/crawler.py": ["/bricklayer/catalog/__init__.py"], "/bricklayer/api/__init__.py": ["/bricklayer/__init__.py"]}
|
13,929
|
mao-liu/bricklayer
|
refs/heads/master
|
/bricklayer/api/__init__.py
|
"""
Wrappers for databricks_cli api and bring some sanity back with namespaces.
Usage:
```
import DBSApi
# export notebook
db = DBSApi()
db.export_notebook(
source_path='/Repos/deploy/dac-dbs-volume-projection-validation/02_validation_notebooks/90_run_vp_6',
target_path= '/dbfs/mnt/external/tmp/90_run_vp_6'
)
# To save the current notebook to the runs folder
db.export_current_notebook_run()
```
"""
import pathlib
import random
import datetime
import json
import requests
from databricks_cli.workspace.api import WorkspaceApi
from databricks_cli.jobs.api import JobsApi
from databricks_cli.sdk import ApiClient
from databricks_cli.sdk import JobsService
from databricks_cli.clusters.api import ClusterApi
from databricks_cli.runs.api import RunsApi
from .. import get_notebook_context
class DBJobRun(object):
'''Wrapper for a Job Run'''
def __init__(self, job, run_id, client):
self.job = job
self.run_id = run_id
self._client = client
@property
def data(self):
'''Return the data from the raw API call'''
return RunsApi(self._client).get_run(self.run_id)
@property
def result_state(self):
return self.data['state'].get('result_state')
@property
def life_cycle_state(self):
"""Can be PENDING, RUNNING or TERMINATED"""
return self.data['state'].get('life_cycle_state')
@property
def state_message(self):
return self.data['state'].get('state_message')
@property
def run_page_url(self):
'''Return the URL of the run in the datbricks API'''
return self.data['run_page_url']
@property
def attempt_number(self):
return self.data['attempt_number']
def get_run_output(self):
'''Return the output of the job as defined in the
job notebook with a call to `dbutils.notebook.exit` function'''
data = RunsApi(self._client).get_run_output(self.run_id)
return data.get('notebook_output')
class DBJob(object):
'''Wrapper for a Job Run'''
def __init__(self, job_id, client):
self.job_id = job_id
self._client = client
self.runs = []
def run_now(self, jar_params=None, notebook_params=None, python_params=None,
spark_submit_params=None):
"""Run this job.
:param jar_params: list of jars to be included
:param notebook_params: map (dict) with the params to be passed to the job
:param python_params: To pa passed to the notebook as if they were command-line parameters
:param spark_submit_params: A list of parameters for jobs with spark submit task as command-line
parameters.
"""
data = JobsApi(self._client).run_now(
self.job_id,
jar_params=jar_params,
notebook_params=notebook_params,
python_params=python_params,
spark_submit_params=spark_submit_params
)
run = DBJobRun(self, data['run_id'], self._client)
self.runs.append(run)
return run
def stop(self):
"Stop this job."
for run in self.runs:
JobsService(self._client).client.perform_query(
'POST', '/jobs/runs/cancel', data={
"run_id": run.run_id
}
)
class DBSApi(object):
def __init__(
self,
token=None,
host=None,
apiVersion='2.0',
):
if token is None:
token = get_notebook_context().get_api_token()
if host is None:
host = get_notebook_context().get_browser_host_name_url()
self._client = ApiClient(
host=host,
apiVersion=apiVersion,
token=token
)
def export_notebook(self, source_path, target_path, fmt='DBC', is_overwrite=False):
"Export a notebook to a local file"
(
WorkspaceApi(self._client)
.export_workspace(
source_path,
target_path,
fmt,
is_overwrite
)
)
def import_notebook(self, source_path, target_path, language='PYTHON', fmt='DBC', is_overwrite=False):
"Import a notebook from a local file"
(
WorkspaceApi(self._client)
.import_workspace(
source_path,
target_path,
language,
fmt,
is_overwrite
)
)
def mkdir(self, dir_path):
"Create a dir in the workspace"
(
WorkspaceApi(self._client)
.mkdirs(
dir_path
)
)
def backup_notebook(self, source_path, target_path, fmt="DBC"):
"Backup a notebook to another place in the workspace"
tmp_dir = '/dbfs/tmp/'
tmp_name = 'backup'
intermediate_location = pathlib.Path(tmp_dir).joinpath(tmp_name)
self.export_notebook(source_path, intermediate_location.as_posix(), fmt)
try:
self.import_notebook(intermediate_location, target_path, fmt)
finally:
intermediate_location.unlink()
def export_current_notebook_run(self, target_path, fmt="DBC"):
"""Save the current notebook to a given location in the required format (default DBC)
and preserving the path and timestamp.
Formats allowed:
SOURCE : The notebook will be imported/exported as source code.
HTML : The notebook will be imported/exported as an HTML file.
JUPYTER: The notebook will be imported/exported as a Jupyter/IPython Notebook file.
DBC : The notebook will be imported/exported as Databricks archive format.
"""
current_path = get_notebook_context().get_notebook_path()
timestamp = datetime.datetime.now().strftime('%Y%m%d%H%M%S')
target_path = (
pathlib.Path(target_path)
.joinpath(current_path[1:])
.joinpath(timestamp)
)
try:
self.backup_notebook(current_path, target_path.as_posix(), fmt)
except requests.exceptions.HTTPError as _e:
error_code = _e.response.json()['error_code']
if error_code == 'RESOURCE_DOES_NOT_EXIST':
self.mkdir(target_path.parent.as_posix())
self.backup_notebook(current_path, target_path.as_posix(), fmt)
else:
raise
def create_job(self, notebook_path, job_name=None, cluster_name=None,
cluster_id=None, notifications_email=None):
"""Create a databricks job.
:param notebook_path: The path of the notebook to be run in the job, can be relative
:param job_name: Name of the job to be run, if missing it will use the notebook_path
:param cluster_name: If provided the job will run in the cluster with this name
:param cluster_id: If provided the job will run in the cluster with this id (should not
be provided at the same time with cluster_name)
:param notifications_email: If provided notifications on success or failure on the job run
will be sent to this email address.
Examples
--------
```
job = DBSApi().create_job('./dummy_job')
job.run_now()
#
job = DBSApi().create_job('./dummy_job',cluster_name='Shared Writer')
run = job.run_now(notebook_params={'PARAM':'PARAM_VALUE'})
#
# Example on how to run jobs with a max number of concurrent runs
# this can help when we have capacity limits in cpu in the infrastructure side
import time
NUM_JOBS_TO_RUN = 6
MAX_CONCURRENT_JOBS = 3
jobs_to_run = [
DBSApi().create_job('./dummy_job') for x in range(NUM_JOBS_TO_RUN)
]
runs = []
while True:
running_runs = list(filter(lambda r:r.life_cycle_state !='TERMINATED', runs))
print(f'running runs:{len(running_runs)}')
if len(running_runs) < MAX_CONCURRENT_JOBS:
if not jobs_to_run:
break
job_to_run = jobs_to_run.pop()
new_run = job_to_run.run_now()
runs.append(new_run)
else:
time.sleep(2)
```
"""
if cluster_name:
assert cluster_id is None
_cluster_id = ClusterApi(self._client).get_cluster_id_for_name(cluster_name)
elif cluster_id:
_cluster_id = cluster_id
else:
_cluster_id = get_notebook_context().get_notebook_cluster_id()
if job_name:
_job_name = job_name
else:
_job_name = notebook_path
if not pathlib.Path(notebook_path).is_absolute():
notebook_path = (
pathlib
.Path(get_notebook_context().get_notebook_path())
.parent
.joinpath(notebook_path)
.as_posix()
)
_json = (
{
"name": _job_name,
"existing_cluster_id": _cluster_id,
"notebook_task": {
"notebook_path": notebook_path
},
"email_notifications": {
"on_success": [
notifications_email
],
"on_failure": [
notifications_email
]
}
}
)
jobdata = JobsApi(self._client).create_job(_json)
return DBJob(
jobdata['job_id'],
self._client
)
def list_jobs(self, job_name='', job_id=''):
"""List all jobs with job name or job id
"""
jobs = []
_jobs = JobsApi(self._client).list_jobs()['jobs']
if job_name:
result = list(
filter(
lambda job:
job_name in job['settings']['name'],
_jobs
))
if job_id:
result = list(
filter(
lambda job:
job_id in job['job_id'],
_jobs
))
for jobdata in result:
job = DBJob(
jobdata['job_id'],
self._client
)
jobs.append(job)
return jobs
|
{"/bricklayer/catalog/crawler.py": ["/bricklayer/catalog/__init__.py"], "/bricklayer/api/__init__.py": ["/bricklayer/__init__.py"]}
|
13,930
|
mao-liu/bricklayer
|
refs/heads/master
|
/bricklayer/catalog/dbricks_catalog.py
|
"""Module to access the databricks catalog"""
from typing import Iterator
from pyspark.sql.utils import AnalysisException
from pyspark.sql import SparkSession
class DbricksTable:
"""A table found in the databricks catalog"""
def __init__(self, database_name, table_name , table_version, info, spark):
self.database_name = database_name
self.table_name = table_name
self.table_version = table_version
self.spark = spark
self.info = info
@property
def table_created_time(self):
return self.info.get('Created Time')
@property
def table_type(self):
return self.info.get('Type')
@property
def table_provider(self):
return self.info.get('Provider')
@property
def table_location(self):
return self.info.get('Location')
@property
def is_view(self):
return self.table_type == 'VIEW'
@property
def sql_name(self):
"""Name of the table as used in SQL"""
return f"{self.database_name}.{self.table_name}_version_{self.table_version}"
class DbricksDatabase:
"""Database found in the databricks catalog"""
RELEVANT_TABLE_INFO = {'Created Time', 'Type', 'Provider', 'Location'}
def __init__(self, name, spark):
self.name = name
self.spark = spark
def get_tables(self) -> Iterator[DbricksTable]:
"""Generator to iterate over the databricks tables"""
for table_row in self.spark.sql(f"SHOW TABLE EXTENDED IN {self.name} LIKE '*'").collect():
info = self._parse_extended_info(table_row.information)
yield DbricksTable(
self.name,
table_row.tableName.split('_version_')[0],
table_row.tableName.split('_version_')[-1],
info=info,
spark=self.spark
)
def _parse_extended_info(self, info):
result = {}
for line in info.split('\n'):
line_parts = line.split(':', maxsplit=1)
if len(line_parts) > 1:
if line_parts[0] in self.RELEVANT_TABLE_INFO:
result[line_parts[0]] = line_parts[1].strip()
return result
def __repr__(self):
return f"{self.__class__.__name__}:{self.name}"
class DbricksCatalog:
"""Databricks catalog"""
def __init__(self, spark=None):
if spark is None:
self.spark = SparkSession.builder.getOrCreate()
else:
self.spark = spark
def get_databases(self) -> Iterator[DbricksDatabase]:
"""Iterator over all the databases in the databricks catalog"""
for db_row in self.spark.sql('SHOW DATABASES').collect():
yield DbricksDatabase(db_row.databaseName, spark=self.spark)
|
{"/bricklayer/catalog/crawler.py": ["/bricklayer/catalog/__init__.py"], "/bricklayer/api/__init__.py": ["/bricklayer/__init__.py"]}
|
13,931
|
mao-liu/bricklayer
|
refs/heads/master
|
/setup.py
|
import setuptools
with open('bricklayer/__version__.py') as fd:
version = fd.read().split('=')[1].strip().strip("'")
with open("README.md", "r", encoding="utf-8") as fh:
long_description = fh.read()
setuptools.setup(
name="bricklayer",
version=version,
author="Intelematics",
description="Internal Databricks utils",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/intelematics/bricklayer",
packages=setuptools.find_packages(),
classifiers=[
"Programming Language :: Python :: 3",
"Operating System :: OS Independent",
],
python_requires='>=3.6',
install_requires=[
'databricks_cli', 'shapely', 'folium'
]
)
|
{"/bricklayer/catalog/crawler.py": ["/bricklayer/catalog/__init__.py"], "/bricklayer/api/__init__.py": ["/bricklayer/__init__.py"]}
|
13,932
|
mao-liu/bricklayer
|
refs/heads/master
|
/bricklayer/display/map/__init__.py
|
''' Module to display a folium map in databricks notebooks'''
import math
import pyspark
from pyspark.sql import SparkSession
import pandas as pd
import folium
import shapely.wkt as wkt
import shapely.geometry
import shapely.geometry.base
class Layer():
''' Layer to be rendered in the map '''
def __init__(self, data, geometry_col=None, popup_attrs=False, color='red',
weight=None, radius=1):
"""
Args:
data (*): pandas dataframe, or a geodataframe or a spark dataframe or a databricks SQL query.
popup_attrs (list): the attributes used to populate a pop up, if False there will be no popup. If True it will put all the attrs.
color (str): Color to render the layer. Color name or RGB. (i.e. '#3388ff')
weight (int): Width of the stroke when rendering lines or points. By default is 1.
radius (int): Radius of the circles used for points default is 1.
Returns:
folium.Map: Folium map to be rendered.
"""
dataframe = self.get_dataframe(data)
if dataframe.empty:
raise ValueError('No data to display')
self.geometry_col = self.get_geometry_col(geometry_col, dataframe)
self.dataframe = self.get_dataframe_with_geom(dataframe, self.geometry_col)
self.centroid = self.get_centroid(self.dataframe, self.geometry_col)
self.popup_attrs = popup_attrs
self.color = color
self.weight = weight
self.radius = radius
def get_geometry_col(self, geometry_col: str, dataframe: pd.DataFrame):
'''Return the name of the geometry column'''
if geometry_col is not None:
if geometry_col not in dataframe.columns:
raise ValueError(f"Column {geometry_col} not found in data columns")
return geometry_col
else:
candidates = []
for column in dataframe.columns:
if 'geom' in column:
candidates.append(column)
elif 'geography' in column:
candidates.append(column)
elif 'wkt' in column:
candidates.append(column)
if len(candidates) > 1:
raise ValueError("Specify the geometry_col argument for the data")
return candidates[0]
def get_dataframe(self, data)->pd.DataFrame:
'''Get the data in a pandas DataFrame'''
if isinstance(data, pd.DataFrame):
return data.copy()
if isinstance(data, pyspark.sql.dataframe.DataFrame):
return data.toPandas()
if isinstance(data, str):
spark = SparkSession.builder.getOrCreate()
return spark.sql(data).toPandas()
raise NotImplementedError(f"Can't interpret data with type {type(data)}")
def get_dataframe_with_geom(self, dataframe: pd.DataFrame, geometry_col: str):
'''Convert the geometry column to a shapely geometry'''
geom = dataframe.iloc[0][geometry_col]
if isinstance(geom, str):
dataframe[geometry_col] = dataframe[geometry_col].apply(wkt.loads)
return dataframe
if isinstance(geom, shapely.geometry.base.BaseGeometry):
return dataframe
raise ValueError(f"Invalida type for geometry_colum in the data ({type(geom)})")
def get_centroid(self, datafraame: pd.DataFrame, geometry_col: str):
'''Get the centroid of all the geometries in the layer'''
centroids = [r.centroid for _, r in datafraame[geometry_col].items()]
multipoint = shapely.geometry.MultiPoint(centroids)
return multipoint.centroid
def get_popup(self, row: pd.Series):
'''Get a folium pop-up with the requested attributes'''
if isinstance(self.popup_attrs, list):
non_geom_cols = self.popup_attrs
else:
non_geom_cols = list(self.dataframe.columns)
non_geom_cols.remove(self.geometry_col)
return folium.Popup((
row
[non_geom_cols]
.to_frame()
.to_html()
))
def get_map_geom(self, row: pd.Series):
'''Get folium geometry from the shapely geom'''
sgeom = row[self.geometry_col]
kwargs = {'color': self.color}
if self.popup_attrs:
html_popup = self.get_popup(row)
else:
html_popup = None
if self.weight is not None:
kwargs['weight'] = self.weight
if isinstance(sgeom, shapely.geometry.LineString):
coords = [(y, x) for x,y in sgeom.coords]
fgeom = folium.PolyLine(
coords,
**kwargs
)
elif isinstance(sgeom, shapely.geometry.Point):
kwargs['radius'] = self.radius
coords = [(y, x) for x,y in sgeom.coords]
fgeom = folium.CircleMarker(
coords[0],
**kwargs
)
else:
raise NotImplementedError(f'Geometry Type not Supported {type(sgeom)}')
if html_popup:
fgeom.add_child(html_popup)
return fgeom
def get_bounds(self):
'''Get the bounds for all the geometries'''
minx, miny, maxx, maxy = None, None, None, None
geoms_bounds = self.dataframe[self.geometry_col].apply(lambda g:g.bounds)
for _minx, _miny, _maxx, _maxy in geoms_bounds:
if minx is None:
minx, miny, maxx, maxy = _minx, _miny, _maxx, _maxy
else:
minx = min(minx, _minx)
miny = min(miny, _miny)
maxx = max(maxx, _maxx)
maxy = max(maxy, _maxy)
return minx, miny, maxx, maxy
def render_to_map(self, folium_map):
'''Render the layer into the map'''
for _, row in self.dataframe.iterrows():
map_geom = self.get_map_geom(row)
map_geom.add_to(folium_map)
class Map():
'''Map that can render layers'''
def __init__(self, layers: list, **map_args):
self.layers = layers
self.map_args = map_args.copy()
self.map_args['zoom_start'] = self.map_args.get('zoom_start', 13)
def get_centroid(self):
'''Get the centroid of all the layers'''
centroids = [layer.centroid for layer in self.layers]
multipoint = shapely.geometry.MultiPoint(centroids)
return multipoint.centroid
def get_bounds(self):
'''Get the bounds of all the layers'''
minx, miny, maxx, maxy = None, None, None, None
for layer in self.layers:
_minx, _miny, _maxx, _maxy = layer.get_bounds()
if minx is None:
minx, miny, maxx, maxy = _minx, _miny, _maxx,_maxy
else:
minx = min(minx, _minx)
miny = min(miny, _miny)
maxx = max(maxx, _maxx)
maxy = max(maxy, _maxy)
return minx, miny, maxx, maxy
def render(self):
'''Render the map'''
map_centroid = self.get_centroid()
folium_map = folium.Map(
[map_centroid.y,map_centroid.x],
**self.map_args
)
for layer in self.layers:
layer.render_to_map(folium_map)
return folium_map
|
{"/bricklayer/catalog/crawler.py": ["/bricklayer/catalog/__init__.py"], "/bricklayer/api/__init__.py": ["/bricklayer/__init__.py"]}
|
13,933
|
mao-liu/bricklayer
|
refs/heads/master
|
/bricklayer/catalog/__init__.py
|
from . import dbricks_catalog
from . import crawler
|
{"/bricklayer/catalog/crawler.py": ["/bricklayer/catalog/__init__.py"], "/bricklayer/api/__init__.py": ["/bricklayer/__init__.py"]}
|
13,935
|
LuisDiego19FV/Graficas-SR6
|
refs/heads/master
|
/bmp_processor.py
|
#bmp_processor
#Por Luis Diego Fernandez
#V_A
import sys
import math
import struct
import random
import numpy as np
class bmpImage:
# Define init(). Attributes Initializer
def __init__(self, new_width, new_height):
# image data
self.image_data = bytes()
# image attributes
self.width = 0
self.height = 0
self.bits_per_pixel = 0
self.row_bytes = 0
self.row_padding = 0
# viewport
self.vp_x = 0
self.vp_y = 0
self.vp_width = 0
self.vp_height = 0
# clear colors
self.clearRgbRed = 0
self.clearRgbGreen = 0
self.clearRgbBlue = 0
# paint colors
self.paintRgbRed = 0
self.paintRgbGreen = 0
self.paintRgbBlue = 0
# texture image
self.textureImg = []
self.texture_width = 0
self.texture_height = 0
self.texture_width_ratio = 0
self.texture_height_ratio = 0
# add values
self.constructImage(new_width, new_height)
# Define constructImage(int, int). Creates the header for the BMP image
# returns: 0 on success
def constructImage(self, new_width, new_height):
self.width = new_width
self.height = new_height
self.row_bytes = new_width * 4
self.row_padding = int(math.ceil(int(self.row_bytes / 4.0))) * 4 - self.row_bytes
data = bytes('BM', 'utf-8')
data += struct.pack('i', 26 + 4 * self.width * self.height)
data += struct.pack('h', 0)
data += struct.pack('h', 0)
data += struct.pack('i', 26)
data += struct.pack('i', 12)
data += struct.pack('h', self.width)
data += struct.pack('h', self.height)
data += struct.pack('h', 1)
data += struct.pack('h', 32)
self.image_data = data
self.z_buffer = [
[-float('inf') for x in range(self.width)]
for y in range(self.height)
]
return 0
# Define glAbsolutePointPaint(int, int). Paints an individual pixel
# returns: 0 on success
def glAbsolutePoint(self,x, y):
# changes the data of an individual pixel
data = self.image_data[:26 + ((y - 1) * (self.width + self.row_padding) + (x - 1)) * 4]
data += self.rgbToByte(self.paintRgbRed, self.paintRgbGreen, self.paintRgbBlue)
data += self.image_data[30 + ((y - 1) * (self.width + self.row_padding) + (x - 1)) * 4:]
self.image_data = data
return 0
# Define glAbsolutePointPaint(int, int). Paints an individual pixel
# returns: 0 on success
def glAbsolutePointWithColor(self,x, y,color):
# changes the data of an individual pixel
data = self.image_data[:26 + ((y - 1) * (self.width + self.row_padding) + (x - 1)) * 4]
data += color
data += self.image_data[30 + ((y - 1) * (self.width + self.row_padding) + (x - 1)) * 4:]
self.image_data = data
return 0
# Define glLine(). Paints a line from point (xi,yi) to (xf,yf)
# returns: 0 on success
def glAbsoluteLine(self,xi,yi,xf,yf):
dy = yf - yi
dx = xf - xi
if (dx == 0):
for y in range(dy + 1):
self.glAbsolutePoint(xi,y + yi)
return 0
m = dy/dx
grad = m <= 1 and m >= 0
if grad and xi > xf:
xi, xf = xf, xi
yi, yf = yf, yi
dy = yf - yi
dx = xf - xi
m = dy/dx
grad = m <= 1 and m >= 0
elif yi > yf:
xi, xf = xf, xi
yi, yf = yf, yi
dy = yf - yi
dx = xf - xi
m = dy/dx
grad = m <= 1 and m >= 0
if (grad):
for x in range(dx + 1):
y = round(m*x + yi)
self.glAbsolutePoint(x+xi,y)
else:
m = 1/m
for y in range(dy + 1):
x = round(m*y + xi)
self.glAbsolutePoint(x,y + yi)
return 0
# Define glClear(). It paints the whole image in a specific rgb color.
# returns: 0 on success
def glClear(self):
first = True
pixel = self.rgbToByte(self.clearRgbRed, self.clearRgbGreen, self.clearRgbBlue)
for y in range(self.height):
if (first):
data = pixel * self.width
first = False
else:
data += pixel * self.width
# padding for each line
for x in range(self.row_padding):
data += bytes('\x00', 'utf-8')
self.image_data = self.image_data[:27] + data
return 0
#Define glClearColor(float, float, float). It change the colors used for the glClear
# returns: 0 on success
def glClearColor(self,r,g,b):
# the rgb data for glClear is store after converting the rgb numbers from float to integers
# on a scale from 0 to 255
self.clearRgbRed = int(math.ceil(float(r/1)*255))
self.clearRgbGreen = int(math.ceil(float(g/1)*255))
self.clearRgbBlue = int(math.ceil(float(b/1)*255))
return 0
# Define glColor(float, float, float). It change the colors used for painting a specific pixel
# returns: 0 on success
def glColor(self,r,g,b):
# the rgb data for the pixel painting is store after converting the rgb numbers from float
# to integers on a scale from 0 to 255
self.paintRgbRed = int(math.ceil(float(r/1)*255))
self.paintRgbGreen = int(math.ceil(float(g/1)*255))
self.paintRgbBlue = int(math.ceil(float(b/1)*255))
return 0
def glLoadTextureImage(self, texture, scale_X, scale_Y):
image = open(texture + '.bmp', "rb")
image.seek(10)
header_size = struct.unpack("=l", image.read(4))[0]
image.seek(18)
self.texture_width = struct.unpack("=l", image.read(4))[0]
self.texture_height = struct.unpack("=l", image.read(4))[0]
self.texture_width_ratio = (self.texture_width/self.width)/scale_X
self.texture_height_ratio = (self.texture_height/self.height)/scale_Y
self.textureImg = []
image.seek(header_size)
for y in range(self.texture_height):
self.textureImg.append([])
for x in range(self.texture_width):
b = ord(image.read(1))
g = ord(image.read(1))
r = ord(image.read(1))
self.textureImg[y].append(self.rgbToByte(r,g,b))
image.close()
return 0
def glObjMover(self, vertices, scale, translateX, translateY):
new_vertices = []
# transform np
scale_it = np.matrix([
[scale,0,0],
[0,scale,0],
[0,0,1]
])
for vertice in vertices:
vertice = np.matmul(scale_it,vertice)
vertice = np.sum([vertice,[translateX,translateY,0]],axis=0)
new_vertices.append([vertice.item(0),vertice.item(1),vertice.item(2)])
return new_vertices
def glObjRotate(self,vertices,angle):
new_vertices = []
# transform np
rotate_it = np.matrix([
[np.cos(angle), -np.sin(angle), 0],
[np.sin(angle), np.cos(angle), 0],
[0.01, 0.001, 1]
])
for vertice in vertices:
vertice = np.matmul(rotate_it,vertice)
new_vertices.append([vertice.item(0),vertice.item(1),vertice.item(2)])
return new_vertices
def glObjReader(self, objectName):
# opens obj file
file = open(objectName + '.obj')
lines = file.read().splitlines()
# vertices and faces
vertices = []
textures = []
faces = []
# reads each line and stores each vertice and face
for line in lines:
# gets the prefix and the values of either a vertice or a face
try:
prefix, value = line.split(' ',1)
except ValueError:
continue
# reads and store vertices
if prefix == 'v':
try:
vertices.append(list(map(float, value.split(' '))))
except ValueError:
break
# reads and store vertices
if prefix == 'vt':
try:
textures.append(list(map(float, value.split(' '))))
except ValueError:
break
# reads and store faces
elif prefix == 'f':
section = []
for face in value.split(' '):
try:
section.append(list(map(int, face.split('/'))))
except ValueError:
try:
section.append(list(map(int, face.split('//'))))
except ValueError:
break
faces.append(section)
# 2D list to return with the vertices and faces
object_skeleton = [vertices,faces,textures]
return object_skeleton
# Define glObjWriter(). Makes BMP out of a flat .obj
# Return 0 on success
def glObjWriter(self,object_skeleton,scale,translate_x, translate_y,angle = 0):
# vertices and faces
vertices = self.glObjMover(object_skeleton[0],1/scale,translate_x,translate_y)
if angle != 0:
vertices = self.glObjRotate(vertices,angle)
faces = object_skeleton[1]
textures = object_skeleton[2]
# counter
counter = 0
# draws each face of the object
for face in faces:
counter += 1
if counter%50 == 0:
sys.stdout.write('\r' + str(counter/len(faces)*100)[0:4] + "% complete")
pollygon = []
texturesToPaint = []
z_avg = 0
paint_pol = True
# gets all the vertices in a face
for i in range(len(face)):
x = int((vertices[face[i][0]-1][0])*self.width)
y = int((vertices[face[i][0]-1][1])*self.height)
z = int(vertices[face[i][0]-1][2])
tex_X = textures[face[i][1]-1][0]
tex_Y = textures[face[i][1]-1][1]
z_avg += z
texturesToPaint.append([tex_X,tex_Y])
pollygon.append([x,y])
if x >= self.width or y >= self.height:
paint_pol = False
if x < 0 or y < 0:
paint_pol = False
# avarage cooordinate
z_avg = z_avg/len(face)
# paints the face
if paint_pol:
self.glPolygonMaker(pollygon,texturesToPaint,z_avg)
sys.stdout.write('\r' + "100% complete ")
return 0
# Define glPolygonMaker(). Paints a figure given the vertices in a list.
# returns: 0 on success
def glPolygonMaker(self, vertices, textures, z_coordinate):
# lista para guardar los puntos de la figura
figurePoints = []
# se reutiliza el codigo para hacer lineas solo que se guarda cada punto
# que se pinta en figurePoints
for i in range(len(vertices)):
xi = vertices[i][0]
yi = vertices[i][1]
if i == len(vertices)-1:
xf = vertices[0][0]
yf = vertices[0][1]
else:
xf = vertices[i+1][0]
yf = vertices[i+1][1]
dy = yf - yi
dx = xf - xi
if (dx == 0):
if dy > 0:
for y in range(dy + 1):
figurePoints.append([xi,y + yi])
if z_coordinate >= self.z_buffer[xi][y+yi]:
self.z_buffer[xi][y+yi] = z_coordinate
else:
for y in range(abs(dy) + 1):
figurePoints.append([xi,y + yf])
if z_coordinate >= self.z_buffer[xi][y+yf]:
self.z_buffer[xi][y+yf] = z_coordinate
else:
m = dy/dx
grad = m <= 1 and m >= 0
if grad and xi > xf:
xi, xf = xf, xi
yi, yf = yf, yi
dy = yf - yi
dx = xf - xi
m = dy/dx
grad = m <= 1 and m >= 0
elif yi > yf:
xi, xf = xf, xi
yi, yf = yf, yi
dy = yf - yi
dx = xf - xi
m = dy/dx
grad = m <= 1 and m >= 0
if (grad):
for x in range(dx + 1):
y = round(m*x + yi)
figurePoints.append([x+xi,y])
if z_coordinate >= self.z_buffer[x+xi][y]:
self.z_buffer[x+xi][y] = z_coordinate
else:
m = 1/m
for y in range(dy + 1):
x = round(m*y + xi)
figurePoints.append([x,y + yi])
if z_coordinate >= self.z_buffer[x][y+yi]:
self.z_buffer[x][y+yi] = z_coordinate
# avoids processing the same point twice.
avoidPoints = []
counter_for_tex_Y = 0
for point in figurePoints:
if (int(textures[0][1]*self.texture_height)-1 + counter_for_tex_Y) > self.texture_height:
counter_for_tex_Y -= self.texture_height_ratio
if point[1] not in avoidPoints:
# finds which points are in the same y coordinate in the figure.
pointsToPaint = []
for i in range(len(figurePoints)):
if figurePoints[i][1] == point[1]:
pointsToPaint.append(figurePoints[i][0])
# order the points
pointsToPaint.sort()
pointsLen = len(pointsToPaint)
counter_for_tex_X = 0
if pointsLen != 0:
for xToDraw in range(pointsToPaint[0],pointsToPaint[pointsLen-1]+1):
if z_coordinate >= self.z_buffer[xToDraw][point[1]]:
if (int(textures[0][1]*self.texture_width)-1 + counter_for_tex_X) > self.texture_width:
counter_for_tex_X -= self.texture_width_ratio
self.glAbsolutePointWithColor(xToDraw,point[1], \
self.textureImg[int(textures[0][1]*self.texture_height + counter_for_tex_Y)-1][int(textures[0][0]*self.texture_width + counter_for_tex_X)])
self.z_buffer[xToDraw][point[1]] = z_coordinate
counter_for_tex_X += self.texture_width_ratio
avoidPoints.append(point[1])
counter_for_tex_Y += self.texture_height_ratio
return 0
# Define glVertex(int, int). Paints an individual pixel
# returns: 0 on success
def glVertex(self,x, y):
# painting cordinates
pcx = self.vp_x + x
pcy = self.vp_y + y
# changes the data of an individual pixel
data = self.image_data[:26 + ((pcy - 1) * (self.width + self.row_padding) + (pcx - 1)) * 4]
data += self.rgbToByte(self.paintRgbRed, self.paintRgbGreen, self.paintRgbBlue)
data += self.image_data[30 + ((pcy - 1) * (self.width + self.row_padding) + (pcx - 1)) * 4:]
self.image_data = data
return 0
# Define glColor(). Paint the whole viewport
# returns: 0 on success
def glVertexPaintVp(self):
for y in range(self.vp_height):
for x in range(self.vp_width):
self.glVertex(x,y)
return 0
# Define glViewPort(int, int, int, int). Establish an area of work for the painting process
# returns: 0 on success
def glViewPort(self, viewport_x, viewport_y, viewport_width, viewport_height):
self.vp_x = viewport_x
self.vp_y = viewport_y
self.vp_width = viewport_width
self.vp_height = viewport_height
return 0
# Define rgbToByte(int, int, int). Converts RGB to bytes
# returns: 4 bytes indicating the RGB of a pixel
def rgbToByte(self, r,g,b):
data = struct.pack('B', b)
data += struct.pack('B', g)
data += struct.pack('B', r)
data += struct.pack('B', 0)
return data
# Define finish(). Takes the image_data and makes a file out of it with
# a specif name
# returns: 0 on success
def writeImage(self, fileName):
# Makes the image file
img = open(fileName + ".bmp", 'wb')
img.write(self.image_data)
return 0
def get_bmp_processor_info(self):
return "bmp_processor Version B"
def get_header_info(self):
return [self.width, self.height,self.bits_per_pixel, self.row_bytes, self.row_padding]
def get_viewport_info(self):
return [slef.viewport_x, self.viewport_y, self.viewport_width, self.viewport_height]
def get_clearColors_info(self):
return [self.clearRgbRed, self.clearRgbGreen, self.clearRgbBlue]
def get_paintColors_info(self):
return [self.paintRgbRed, self.paintRgbGreen, self.paintRgbBlue]
|
{"/SR6.py": ["/bmp_processor.py"]}
|
13,936
|
LuisDiego19FV/Graficas-SR6
|
refs/heads/master
|
/SR6.py
|
#SR5
#Luis Diego Fernandez
import sys
import bmp_processor
import numpy as np
# image
image = bmp_processor.bmpImage(600,600)
print(image.get_bmp_processor_info())
#Decide color
image.glClearColor(0,0,0)
image.glColor(0,0,0)
image.glClear()
image_skeleton = image.glObjReader("obj/earth")
image.glLoadTextureImage('obj/earth',2,1)
#Load model
# pipeline: readImage -> loadTextures -> writeObject
def lowangle():
print("lowangle")
imageLow = bmp_processor.bmpImage(400,400)
imageLow.glLoadTextureImage('obj/earth',2,1)
imageLow.glClearColor(0,0,0)
imageLow.glColor(0,0,0)
imageLow.glClear()
imageLow.glObjWriter(image_skeleton,700,0.5,0.65,0)
imageLow.writeImage("earth-lowangle")
print("\n")
def mediumshot():
print("mediumshot")
imageMid = bmp_processor.bmpImage(400,400)
imageMid.glLoadTextureImage('obj/earth',2,1)
imageMid.glClearColor(0,0,0)
imageMid.glColor(0,0,0)
imageMid.glClear()
imageMid.glObjWriter(image_skeleton,600,0.5,0.5,0)
imageMid.writeImage("earth-mediumshot")
print("\n")
def highangle():
print("highangle")
imageHigh = bmp_processor.bmpImage(400,400)
imageHigh.glLoadTextureImage('obj/earth',2,1)
imageHigh.glClearColor(0,0,0)
imageHigh.glColor(0,0,0)
imageHigh.glClear()
imageHigh.glObjWriter(image_skeleton,1200,0.5,0.25,0)
imageHigh.writeImage("earth-highangle")
print("\n")
if len(sys.argv) == 2:
if str.lower(sys.argv[1]) == "low":
lowangle()
elif str.lower(sys.argv[1]) == "mid":
mediumshot()
elif str.lower(sys.argv[1]) == "high":
highangle()
elif str.lower(sys.argv[1]) == "all":
lowangle()
mediumshot()
highangle()
else:
print("Es necesario un argumento valido, elija: low, mid, high o all")
else:
print("Es necesario uno de los siguientes argumentos: low, mid, high o all")
|
{"/SR6.py": ["/bmp_processor.py"]}
|
13,959
|
vinnn/FSND_Capstone
|
refs/heads/main
|
/src/api.py
|
#########################################################
#I# IMPORTS
#########################################################
# print('__file__={0:<35} | __name__={1:<25} | __package__={2:<25}'.format(__file__,__name__,str(__package__)))
import os
from flask import Flask, request, jsonify, abort
from sqlalchemy import exc
import json
from flask_cors import CORS
#I### Imports from models and auth
#I# Note: written previously as '.database.models' and '.auth.auth'
from .database.models import db_drop_and_create_all, setup_db, Actor #, Movie
from .auth.auth import AuthError, requires_auth
#########################################################
#I# INITIALISATION
#########################################################
def create_app(test_config=None):
#########################################################
#I# INITIALISATION
#########################################################
app = Flask(__name__)
setup_db(app)
CORS(app)
#########################################################
## CORS Headers [TO BE CHECKED]
#########################################################
# @app.after_request
# def after_request(response):
# response.headers.add(
# 'Access-Control-Allow-Headers',
# 'Content-Type,Authorization,true')
# response.headers.add(
# 'Access-Control-Allow-Methods',
# 'GET,PUT,POST,DELETE,OPTIONS')
# return response
#########################################################
## DROP ALL RECORDS AND START YOUR DB FROM SCRATCH
#########################################################
#db_drop_and_create_all()
#########################################################
## ROUTES
#########################################################
'''
endpoint GET /actors
required permissions:
'get: actors'
returns
status code 200 and json {"success": True, "actors": actors}
or error status code with reason for failure
'''
@app.route('/actors', methods=['GET'])
@requires_auth(permission='get:actors')
#def get_actors():
def get_actors(payload):
try:
actors = Actor.query.order_by(Actor.id).all()
actors_array = [actor.todictionary() for actor in actors]
print(actors_array)
except:
abort(422)
return jsonify({
'success': True,
'actors': actors_array
}, 200)
'''
endpoint POST /actor
should create a new row in the Actors table
required permissions:
'post: actor'
returns
status code 200 and json {"success": True, "actor": actor} where actor is an array containing only the newly created actor
or error status code with reason for failure
'''
@app.route('/actors', methods=['POST'])
@requires_auth(permission='post:actors')
def post_actors(payload):
#def post_actors():
try:
body = request.get_json()
new_name = body.get("name", None)
new_age = body.get("age", None)
new_gender = body.get("gender", None)
new_actor = Actor(
name = new_name,
age = new_age,
gender = new_gender
)
new_actor.insert()
return jsonify({
'success': True,
'actor': new_actor.todictionary()
}, 200)
except:
abort(422)
'''
endpoint PATCH /actors/id
where <id> is the existing actor id
it should respond with a 404 error if <id> is not found
it should update the corresponding row for <id>
required permissions:
'patch: actors'
returns
status code 200 and json {"success": True, "actor": actor} where actor an array containing only the updated actor
or error status code with reason for failure
'''
@app.route('/actors/<int:id>', methods=['PATCH'])
@requires_auth(permission='patch:actors')
def patch_actors(payload, id):
#def patch_actors(id):
actor_to_patch = Actor.query.filter(Actor.id == id).one_or_none()
if actor_to_patch is None:
abort(404)
try:
body = request.get_json()
new_name = body.get("name", None)
new_age = body.get("age", None)
new_gender = body.get("gender", None)
print(new_name)
if new_name != "null":
actor_to_patch.name = new_name
if new_age != "null":
actor_to_patch.age = new_age
if new_gender != "null":
actor_to_patch.gender = new_gender
actor_to_patch.update()
return jsonify({
'success': True,
'actor': actor_to_patch.todictionary()
}, 200)
except:
abort(422, "bad request etc error description")
'''
endpoint DELETE /actors/id
where <id> is the existing actor id
it should respond with a 404 error if <id> is not found
it should delete the corresponding row for <id>
required permissions:
'delete: actors'
returns
status code 200 and json {"success": True, "actor": actor} where actor an array containing only the deleted actor
or error status code with reason for failure
'''
@app.route('/actors/<int:id>', methods=['DELETE'])
@requires_auth(permission='delete:actors')
def delete_actors(payload, id):
#def delete_actors(id):
actor_to_delete = Actor.query.filter(Actor.id == id).one_or_none()
if actor_to_delete is None:
abort(404)
try:
actor_to_delete.delete()
return jsonify({
'success': True,
'actor': actor_to_delete.todictionary()
}, 200)
except:
abort(422, "bad request etc error description")
#########################################################
## Error Handling
#########################################################
'''
Example error handling for unprocessable entity
'''
@app.errorhandler(422)
def unprocessable(error):
return jsonify({
"success": False,
"error": 422,
"message": "unprocessable"
}), 422
@app.errorhandler(404)
def not_found(error):
return jsonify({
"success": False,
"error": 404,
"message": "resource not found"
}), 404
@app.errorhandler(400)
def bad_request(error):
return jsonify({
'success': False,
'error': 400,
'message': 'bad request'
}), 400
@app.errorhandler(405)
def method_not_allowed(error):
return jsonify({
'success': False,
'error': 405,
'message': 'method not allowed'
}), 405
'''
@TODO implement error handler for AuthError
error handler should conform to general task above
'''
#@app.errorhandler(AuthError)
def handle_auth_error(ex):
'''
Receive the raised authorization error and include it in the response.
'''
response = jsonify(ex.error)
response.status_code = ex.status_code
return response
return app
app = create_app()
if __name__ == '__main__':
#app.run(host='0.0.0.0', port=8080, debug=True)
app.run()
|
{"/src/api.py": ["/src/database/models.py"], "/src/test_api.py": ["/src/api.py", "/src/database/models.py"]}
|
13,960
|
vinnn/FSND_Capstone
|
refs/heads/main
|
/src/test_api.py
|
# ----------------------------------------------------------------------------#
# Imports.
# ----------------------------------------------------------------------------#
# print('__file__={0:<35} | __name__={1:<25} | __package__={2:<25}'.format(__file__,__name__,str(__package__)))
import os
import unittest
import json
from flask_sqlalchemy import SQLAlchemy
from .api import create_app # import the app=Flask(__name__) from api.py
from .database.models import setup_db, Actor # import funtions and models form models.py
# ----------------------------------------------------------------------------#
# Test Class.
# ----------------------------------------------------------------------------#
class CastingTestCase(unittest.TestCase):
"""This class represents the Casting test case"""
# Setup.
# ----------------------------------------#
def setUp(self):
"""Executed before each test.
Define test variables and initialize app."""
# MODIFIED START
# self.app = create_app()
# self.client = self.app.test_client
self.app = create_app()
self.client = self.app.test_client
# self.database_name = "casting_test"
# self.database_path = "postgres://{}/{}".format('localhost:5432', self.database_name)
self.database_filename = "database11_test.db"
self.project_dir = os.path.dirname(os.path.abspath(__file__))
self.database_path = "sqlite:///{}".format(os.path.join(self.project_dir, self.database_filename))
# setup_db(self.app, self.database_path)
setup_db(self.app) #, self.database_path)
self.castassistant_jwt = 'eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCIsImtpZCI6IkJUdEl4MFNJZVNLRlpXMkFpbFMxMiJ9.eyJpc3MiOiJodHRwczovL2ZzbmQtY2Fwc3RvbmUtdGVuYW50LmV1LmF1dGgwLmNvbS8iLCJzdWIiOiJhdXRoMHw2MTVkYTAzZWFmMGM3NTAwNjkxNTZiOGIiLCJhdWQiOiJmc25kLWNhcHN0b25lLWFwaS1pZGVudGlmaWVyIiwiaWF0IjoxNjM0MjA0MDg4LCJleHAiOjE2MzQyOTA0ODgsImF6cCI6IjVEeFBsQ2tPYUdDSWJmUHg2bFlVMXpuaEZqaUVpRnNDIiwic2NvcGUiOiIiLCJwZXJtaXNzaW9ucyI6WyJnZXQ6YWN0b3JzIiwiZ2V0Om1vdmllcyJdfQ.ua1CjrluoPgZDlPKK2UJokTfSeCnGOv5L-4UjZ_fWIp1PclvBZrdTzQdpEIcIpVqOjFgg3AFdIkhUiFcJjoLxDNn77RZMqEOJ2xURG6c-KO-oiTzT_ZJkzUOgw4pB5Bxv_wc60GSEtDUdTXRQ_z4UdmzPdfO1Ire5zGBNM2esodq3lh8bdAsJgV7QGst9t0qyP1xyxJjn2RdYClGIGiIVc_GwMoHwmb0IaSHZWyXBpXYRJ6OuzfLVkQYGUZKE79NmXbq2BXN5MrkK_sNkr2zgrpmJQjKN-9EOPGBdtGVj72lk4tYfZRrWV_rP7_v2cvT4FN9aq9oVHW4BRurrGnk9w'
self.prodexec_jwt = 'eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCIsImtpZCI6IkJUdEl4MFNJZVNLRlpXMkFpbFMxMiJ9.eyJpc3MiOiJodHRwczovL2ZzbmQtY2Fwc3RvbmUtdGVuYW50LmV1LmF1dGgwLmNvbS8iLCJzdWIiOiJhdXRoMHw2MTYwMWEyZjVjZDk2YjAwNzAyZDA5NGUiLCJhdWQiOiJmc25kLWNhcHN0b25lLWFwaS1pZGVudGlmaWVyIiwiaWF0IjoxNjM0MjA0ODE0LCJleHAiOjE2MzQyOTEyMTQsImF6cCI6IjVEeFBsQ2tPYUdDSWJmUHg2bFlVMXpuaEZqaUVpRnNDIiwic2NvcGUiOiIiLCJwZXJtaXNzaW9ucyI6WyJkZWxldGU6YWN0b3JzIiwiZGVsZXRlOm1vdmllcyIsImdldDphY3RvcnMiLCJnZXQ6bW92aWVzIiwicGF0Y2g6YWN0b3JzIiwicGF0Y2g6bW92aWVzIiwicG9zdDphY3RvcnMiLCJwb3N0Om1vdmllcyJdfQ.5VDTMXjy7oc_EqXi5ImwygbCVfvb-iEF6fyWxJG2HBwfbjiOZZyRDU431wqsMvy8MElX1Yy79mm3LzRJPa2mA4Mluq3_aMdjwXT4Nz95KzIhlzgrkD32KYUK-NtlIOue-4AEczVQuZPXvFduEe3RvhzKwna4f9G7QMevV1phglE39IkZcNzcZ4cyNLmDYn3aSPIwfMN7r3Ij4_sslcJSM7gTsz_FUVZa9NSxxitc5i5CFimMItZaFUMrxXwP4Xc6V6-67jFj2hENXq_XUtdPKHs23GFeVnK9G-f7NFiFZdo3EiU4gsddQSU8B0HfzS8EmLYlWWCida0FBHxwacawWw'
# MODIFIED END
# binds the app to the current context
with self.app.app_context():
self.db = SQLAlchemy()
self.db.init_app(self.app)
# create all tables
self.db.create_all()
# creates a new question object, to be used
# in the POST question tests:
self.new_actor = {
'name': 'Titi?',
'age': 40,
'gender': 'Male',
}
self.update_actor = {
'name': 'actor3newname',
'age': 100,
'gender': 'Male',
}
self.new_quiz = {
'quiz_category': {'type': 'Geography', 'id': '3'},
'previous_questions': []
}
# Teardown.
# ----------------------------------------#
def tearDown(self):
"""Executed after reach test"""
pass
# Test. [GET NON-EXISTENT URL => ERROR ]
# ----------------------------------------#
def test_404_nonexistent_url(self):
# Get response by making client make the GET request:
res = self.client().get('/actors2',
headers={'Authorization':'Bearer'+ self.castassistant_jwt}
)
# Load the data using json.loads:
data = json.loads(res.data)
# check responses:
self.assertEqual(res.status_code, 404)
self.assertEqual(data['success'], False)
self.assertEqual(data['message'], 'resource not found')
# Test. [GET ACTORS => OK ]
# ----------------------------------------#
def test_200_get_categories(self):
# Get response by making client make the GET request:
res = self.client().get('/actors',
headers={'Authorization':'Bearer '+ self.castassistant_jwt}
)
# Load the data using json.loads:
data = json.loads(res.data)
# check responses:
self.assertEqual(res.status_code, 200)
self.assertEqual(data[0]['success'], True)
self.assertTrue(data[0]['actors']) # check the result contains 'actors' dictionary
# Test. [DELETE ACTOR id => OK ]
# ----------------------------------------#
# def test_200_delete_actor(self):
# Get response by making client make the DELETE request:
# res = self.client().delete('/actors/8',
# headers={'Authorization':'Bearer '+ self.prodexec_jwt}
# )
# # Load the data using json.loads:
# data = json.loads(res.data)
# # check responses:
# self.assertEqual(res.status_code, 200)
# self.assertEqual(data[0]['success'], True)
# Test. [DELETE NON-EXISTENT ACTOR => ERROR ]
# ----------------------------------------#
def test_404_delete_nonexistent_actor(self):
# Get response by making client make the GET request:
res = self.client().delete('/actors/2000',
headers={'Authorization':'Bearer '+ self.prodexec_jwt}
)
# Load the data using json.loads:
data = json.loads(res.data)
print("DATA : ")
print(data)
# check responses:
self.assertEqual(res.status_code, 404)
self.assertEqual(data['success'], False)
self.assertEqual(data['message'], 'resource not found')
# Test. [POST ACTOR id => OK ]
# ----------------------------------------#
# def test_200_post_actor(self):
# # Get response by making client make the
# # POST request (new_question is defined above):
# res = self.client().post('/actors',
# json=self.new_actor,
# headers={'Authorization':'Bearer '+ self.prodexec_jwt}
# )
# # Load the data using json.loads:
# data = json.loads(res.data)
# # check responses:
# self.assertEqual(res.status_code, 200)
# self.assertEqual(data[0]['success'], True)
# Test. [POST ACTOR WITH NO INFO => ERROR ]
# ----------------------------------------#
def test_422_post_wrong_actor_info(self):
# Get response by making client make the
# POST request, without json input info:
res = self.client().post('/actors',
json='wrongactor',
headers={'Authorization':'Bearer '+ self.prodexec_jwt}
)
# Load the data using json.loads:
data = json.loads(res.data)
# check responses:
self.assertEqual(res.status_code, 422)
self.assertEqual(data['success'], False)
self.assertEqual(data['message'], 'unprocessable')
# Test. [PATCH ACTOR id => OK ]
# ----------------------------------------#
def test_200_patch_actor(self):
# Get response by making client make the
# PATCH request (update_actor is defined above):
res = self.client().patch('/actors/3',
json=self.update_actor,
headers={'Authorization':'Bearer '+ self.prodexec_jwt}
)
# Load the data using json.loads:
data = json.loads(res.data)
# check responses:
self.assertEqual(res.status_code, 200)
self.assertEqual(data[0]['success'], True)
# Test. [PATCH ACTOR WITH NO INFO => ERROR ]
# ----------------------------------------#
def test_422_patch_no_patchdata(self):
# Get response by making client make the
# PATCH request, without json input info:
res = self.client().patch('/actors/3',
json='wrongpatch',
headers={'Authorization':'Bearer '+ self.prodexec_jwt}
)
# Load the data using json.loads:
data = json.loads(res.data)
# check responses:
self.assertEqual(res.status_code, 422)
self.assertEqual(data['success'], False)
self.assertEqual(data['message'], 'unprocessable')
# Make the tests conveniently executable
if __name__ == "__main__":
unittest.main()
|
{"/src/api.py": ["/src/database/models.py"], "/src/test_api.py": ["/src/api.py", "/src/database/models.py"]}
|
13,961
|
vinnn/FSND_Capstone
|
refs/heads/main
|
/src/database/models.py
|
#########################################################
#I# IMPORTS
#########################################################
import os
from sqlalchemy import Column, String, Integer
from flask_sqlalchemy import SQLAlchemy
import json
#########################################################
#I# DATABASE CONFIGURATION
#########################################################
# SQLITE SETUP ###################################
# database_filename = "database11.db"
# project_dir = os.path.dirname(os.path.abspath(__file__))
# database_path = "sqlite:///{}".format(os.path.join(project_dir, database_filename))
# '''
# use db = SQLAlchemy() + db.init_app(app), instead of db = SQLAlchemy(app)
# https://flask.palletsprojects.com/en/1.1.x/patterns/appfactories/factories-extensions
# '''
# db = SQLAlchemy()
# '''
# setup_db(app)
# binds a flask application and a SQLAlchemy service
# '''
# def setup_db(app):
# app.config["SQLALCHEMY_DATABASE_URI"] = database_path
# app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False
# db.app = app
# db.init_app(app)
# POSTGRES SETUP ###################################
database_name = "db_capstone"
database_path = "postgresql://{}/{}".format('localhost:5432', database_name)
# postgresql://localhost:5432/db_capstone
database_path = "postgres://nfkphcncctfhsr:28a0b6b1e059768d27a4f75e8034b9d8dfa36395ca7011c1614f28503974b6ac@ec2-54-195-246-55.eu-west-1.compute.amazonaws.com:5432/d69ah6men0oka"
db = SQLAlchemy()
print("%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%")
print("%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%")
print("%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%")
print("%%%%%%%%%%%%%%%%%%%%%% DATABASE_URL : %%%%%%%%%%%%%%%%%%%%%%%%%%%")
print(database_path)
print("%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%")
print("%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%")
print("%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%")
'''
setup_db(app)
binds a flask application and a SQLAlchemy service
'''
def setup_db(app, database_path=database_path):
app.config["SQLALCHEMY_DATABASE_URI"] = database_path
app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False
db.app = app
db.init_app(app)
db.create_all()
#########################################################
#########################################################
'''
db_drop_and_create_all()
drops the database tables and starts fresh
can be used to initialize a clean database
!!NOTE you can change the database_filename variable to have multiple versions of a database
'''
def db_drop_and_create_all():
db.drop_all()
db.create_all()
#########################################################
#########################################################
'''
Actor
an actor entity, extends the base SQLAlchemy Model
'''
class Actor(db.Model):
__tablename__ = 'actors' # table name to be plural, non-capitalized
# Autoincrementing, unique primary key
id = Column(Integer().with_variant(Integer, "sqlite"), primary_key=True)
# String Name
name = Column(String(80), unique=True, nullable=False)
# String Age
age = Column(Integer, unique=False, nullable=False)
# String Gender
gender = Column(String(80), unique=False, nullable=False)
'''
todictionary()
dictionary representation of the model
'''
def todictionary(self):
return {
'name': self.name,
'age': self.age,
'gender': self.gender
}
'''
insert()
inserts a new model into a database
the model must have a unique name
the model must have a unique id or null id
EXAMPLE
actor = Actor(name=req_name, age=req_age, gender=req_gender)
actor.insert()
'''
def insert(self):
db.session.add(self)
db.session.commit()
'''
delete()
deletes a new model into a database
the model must exist in the database
EXAMPLE
actor = Actor(name=req_name, age=req_age)
actor.delete()
'''
def delete(self):
db.session.delete(self)
db.session.commit()
'''
update()
updates a new model into a database
the model must exist in the database
EXAMPLE
actor = Actor.query.filter(Actor.id == id).one_or_none()
actor.name = 'James'
actor.update()
'''
def update(self):
db.session.commit()
#########################################################
#I# method to give a readable string representation (for debugging and testing)
#########################################################
def __repr__(self):
return '<name %r>' % self.name
|
{"/src/api.py": ["/src/database/models.py"], "/src/test_api.py": ["/src/api.py", "/src/database/models.py"]}
|
13,962
|
TurnA-Lab/auto-rename-pic
|
refs/heads/master
|
/pick_stu_number.py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
__author__ = 'iskye'
import os
import re
import string
from shutil import copyfile
from typing import List
import beeprint
import cv2
import numpy
from cnocr import CnOcr
from cnstd import CnStd
from tqdm import tqdm
class PickStuNumber:
def __init__(self, path: str, show_img: bool = False):
self.__ext = {'jpg', 'jpeg'}
self.__ocr = CnOcr(model_name='densenet-lite-gru', cand_alphabet=string.digits, name=path)
self.__std = CnStd(name=path)
self.__info_dict = {}
self.__dup_name_dict = {}
# 先对路径进行替换
path = self.__format_path(path)
# 根据传入的路径判断操作
if os.path.isdir(path) or os.path.isfile(path):
files = [self.__format_path(os.path.join(path, f)) for f in os.listdir(path) if
(os.path.isfile(os.path.join(path, f)) and self.__is_image(f))] \
if os.path.isdir(path) \
else [path]
for file in tqdm(files):
self.__handle_info(file,
self.__ocr_number(self.__std_number(self.__cutter(file, show_img))))
else:
print(f'获取数据错误,“{path}”既不是文件也不是文件夹')
@staticmethod
def __format_path(path: str):
return os.path.abspath(path).replace('\\', '/')
@staticmethod
def __get_suffix(path: str) -> str:
"""
获取后缀
:param path: 图片路径
:return: 是否为图片
"""
return path.split('.')[-1]
def __is_image(self, path: str) -> bool:
return self.__get_suffix(path) in self.__ext
@staticmethod
def __cutter(path: str, show_img: bool = False) -> numpy.ndarray:
"""
切割图片
:param path: 图片路径
:param show_img: 是否需要展示图片
:return: 图片对应的 ndarray
"""
print(path)
# 以灰度模式读取图片
origin_img = cv2.imread(path, 0)
if show_img:
# 自由拉伸窗口
# cv2.namedWindow('bin img', 0)
cv2.imshow('origin img', origin_img)
# 切出一部分,取值是经验值
origin_img = origin_img[:origin_img.shape[0] // 2]
# 二值化
_, origin_img = cv2.threshold(origin_img, 0, 255, cv2.THRESH_BINARY + cv2.THRESH_OTSU)
if show_img:
# 自由拉伸窗口
# cv2.namedWindow('bin img', 0)
cv2.imshow('bin img', origin_img)
# 形态学转换,主要为了检测出那个红色的 banner
kernel = numpy.ones((15, 15), dtype=numpy.uint8)
# img = cv2.erode(img, kernel=kernel, iterations=1)
img = cv2.dilate(origin_img, kernel=kernel, iterations=2)
# 边缘检测
contours, _ = cv2.findContours(img, 1, 2)
# 找出第二大的,即红色的 banner
contours = sorted(contours, key=cv2.contourArea, reverse=True)
if len(contours) > 1:
# 获取包围 banner 的矩形数据
x, y, w, h = cv2.boundingRect(contours[1])
# 目前所有的数值设定使用的是经验值
if w * h > 250000:
# 需要识别的学号部分
# 左上角坐标
left_top_x = x
left_top_y = y + h + 20
# 右下角坐标
right_down_x = x + w
right_down_y = y + h + 190
img = origin_img[left_top_y:right_down_y, left_top_x:right_down_x]
else:
img = origin_img[120:]
else:
img = origin_img[120:]
# 对切出的图片进行再次处理,以便图像识别
kernel = numpy.ones((2, 2), dtype=numpy.uint8)
# 腐蚀以加粗
img = cv2.erode(img, kernel=kernel, iterations=1)
# 重新映射回 rgb
img = cv2.cvtColor(img, cv2.COLOR_GRAY2RGB)
if show_img:
# 自由拉伸窗口
# cv2.namedWindow('final img', 0)
cv2.imshow('final img', img)
cv2.waitKey(0)
cv2.destroyAllWindows()
return img
def __ocr_number(self, img_list: List[numpy.ndarray]):
"""
识别数字
:param img_list:
:return:
"""
return self.__ocr.ocr_for_single_lines(img_list)
def __std_number(self, img: numpy.ndarray):
"""
定位数字
:param img:
:return:
"""
return [i['cropped_img'] for i in self.__std.detect(img)]
@staticmethod
def __handle_result_list(result_list: List[List[str]]) -> [str, bool]:
"""
处理结果列表
:param result_list: 结果列表
:return: 结果,是否有效
"""
result = result_list[0]
if len(result) < 12 and len(result_list) > 1:
for i in result_list:
if len(i) >= 12:
result = i
result = ''.join(result[:12] if len(result) >= 12 else result)
print(result, re.match(r'\d{12}', result) is not None)
return result, re.match(r'\d{12}', result) is not None
def __handle_dup_name(self, name, path):
dup_keys = self.__dup_name_dict.get(name)
# 如设置过,即表明有重复的
if dup_keys:
# 设置重复的为 True,只要第一次重复时设置即可
if 1 == len(dup_keys):
self.__info_dict[dup_keys[0]]['dup'] = True
# 将本次的 path 也添加进去
self.__dup_name_dict[name].append(path)
return True
else:
self.__dup_name_dict[name] = [path]
return False
def __handle_info(self, key, value):
"""
处理每条信息
:param key:
:param value:
"""
name, is_legal = self.__handle_result_list(value)
self.__info_dict[key] = {
'name': name,
'suffix': self.__get_suffix(key),
'legal': is_legal,
'dup': self.__handle_dup_name(name, key)
}
def print_info(self):
"""
打印图片信息
:return:
"""
beeprint.pp(self.__info_dict)
return self
def print_dup(self):
"""
打印重复图片信息
:return:
"""
beeprint.pp(self.__dup_name_dict)
return self
def write_out(self,
path: str = '.',
out_path_suc: str = 'output_suc',
out_path_dup: str = 'output_dup',
out_path_fail: str = 'output_fail'):
"""
输出重命名后的图片到文件夹
:param path: 文件夹路径
:param out_path_suc: 合规且不重复图片所在的文件夹
:param out_path_dup: 合规但是重复图片所在的文件夹
:param out_path_fail: 其它图片所在文件夹
:return: self
"""
# 处理路径
path = self.__format_path(path)
if os.path.isdir(path):
# 拼接文件路径
suc = os.path.join(path, out_path_suc)
fail = os.path.join(path, out_path_fail)
dup = os.path.join(path, out_path_dup)
# 创建结果文件夹
not os.path.exists(suc) and os.makedirs(suc)
not os.path.exists(fail) and os.makedirs(fail)
not os.path.exists(dup) and os.makedirs(dup)
# 将图片输出到相应的文件夹
for key, value in self.__info_dict.items():
# 合规且不重复
if value.get('legal') is True and value.get('dup') is False:
copyfile(key, os.path.join(suc, f'{value.get("name")}.{value.get("suffix")}'))
# 合规但是重复
elif value.get('legal') is True and value.get('dup') is True:
index = self.__dup_name_dict[value.get("name")].index(key)
copyfile(key,
os.path.join(dup, f'{value.get("name")}.{index}.{value.get("suffix")}'))
else:
copyfile(key,
os.path.join(fail, f'{value.get("name")}.{value.get("suffix")}' or os.path.split(key)[1]))
else:
print(f'“{path}” 并非一个合法的路径!')
return self
def main():
"""请自行寻找测试数据"""
# PickStuNumber("./pics", show_img=False).print_info().write_out()
# PickStuNumber("./pics/test.jpeg", show_img=True).print_info()
# PickStuNumber("./pics/IMG.jpg", show_img=True).print_info()
# PickStuNumber("./pics/IMG_023.jpg", show_img=True).print_info()
# PickStuNumber("./pics/F6D35171-ECCF-4D28-BFF5-69B31453A2FB_big.jpg", show_img=True).write_out()
pass
if __name__ == '__main__':
main()
|
{"/desktop.py": ["/pick_stu_number.py"], "/console.py": ["/pick_stu_number.py"]}
|
13,963
|
TurnA-Lab/auto-rename-pic
|
refs/heads/master
|
/desktop.py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
__author__ = 'iskye'
import ctypes
import platform
import subprocess
import PySimpleGUI as sg
from pick_stu_number import PickStuNumber
def exe_cmd_subprocess(command, *args):
try:
sp = subprocess.Popen([command, *args], shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = sp.communicate()
if out:
print(out.decode("utf-8"))
if err:
print(err.decode("utf-8"))
except Exception:
pass
def make_dpi_aware():
"""
高清分辨率
来源:https://github.com/PySimpleGUI/PySimpleGUI/issues/1179#issuecomment-475899050
"""
if int(platform.release()) >= 8:
ctypes.windll.shcore.SetProcessDpiAwareness(True)
def main():
make_dpi_aware()
# 设置主题
sg.theme('Light Grey 4')
# 图片文件夹路径
img_folder = {
'key': '-ImgFolder-',
'default': '.'
}
# 是否显示图片
show_img = {
'key': '-ShowImg-',
'keyT': '-ShowImgT-',
'keyF': '-ShowImgF-',
'default': False
}
# 输出路径
output = {
'key': '-Output-',
'default': '.'
}
# 合规且不重复
output_suc = {
'key': '-OutputSuc-',
'default': 'output_suc'
}
# 合规但是重复
output_dup = {
'key': '-OutputDup-',
'default': 'output_dup'
}
# 不合规
output_fail = {
'key': '-OutputFail-',
'default': 'output_fail'
}
# 开始执行
basic = [[sg.Text('请选择图片所在文件夹路径')],
[sg.Input(sg.user_settings_get_entry(img_folder.get('key'),
img_folder.get('default')),
key=img_folder.get('key'), size=(25, 1)),
sg.FolderBrowse(button_text='浏览')],
[sg.OK(button_text='立即开始')],
[sg.Output(size=(30, 10))]]
# 配置
config = [[sg.Frame(title='处理配置',
layout=
[[sg.Column(
size=(320, 60),
layout=
[[sg.Text('显示处理图片过程', size=(18, 1)),
sg.Radio('是', 'show_img',
default=sg.user_settings_get_entry(
show_img.get('key'), show_img.get('default')) is True,
key=show_img.get('key')),
sg.Radio('否', 'show_img',
default=sg.user_settings_get_entry(
show_img.get('key'), show_img.get('default')) is False)]]
)]])
],
[sg.Frame(title='输出配置',
layout=
[[sg.Column(
size=(320, 160),
layout=
[[sg.Text('输出路径', size=(18, 1)),
sg.Input(sg.user_settings_get_entry(output.get('key'), output.get('default')),
key=output.get('key'), size=(6, 1)),
sg.FolderBrowse(button_text='浏览')],
[sg.Text('合规图片文件夹名', size=(18, 1)),
sg.Input(sg.user_settings_get_entry(output_suc.get('key'), output_suc.get('default')),
key=output_suc.get('key'), size=(15, 1))],
[sg.Text('重复图片文件夹名', size=(18, 1)),
sg.Input(sg.user_settings_get_entry(output_dup.get('key'), output_dup.get('default')),
key=output_dup.get('key'), size=(15, 1))],
[sg.Text('其它图片文件夹名', size=(18, 1)),
sg.Input(sg.user_settings_get_entry(output_fail.get('key'), output_fail.get('default')),
key=output_fail.get('key'), size=(15, 1))]]
)]])
],
[sg.OK(button_text='保存')]]
# 选项卡
layout = [[sg.TabGroup(layout=[[sg.Tab('开始', basic), sg.Tab('配置', config)]])]]
# 显示的窗口
window = sg.Window(title='青年大学习截图重命名',
margins=(30, 30),
font=('Microsoft YaHei', 10),
finalize=True,
layout=layout).finalize()
# 处理事件
while True:
event, values = window.read()
# print(event, values)
if event == sg.WIN_CLOSED:
break
elif event == '立即开始':
# 禁用关闭
window.DisableClose = True
print('即将开始处理图片')
print('请在处理完毕后再关闭本窗口\n')
print('-' * 30)
PickStuNumber(
values.get(img_folder.get('key')),
sg.user_settings_get_entry(show_img.get('key'), show_img.get('default'))) \
.write_out(
sg.user_settings_get_entry(output.get('key'), output.get('default')),
sg.user_settings_get_entry(output_suc.get('key'), output_suc.get('default')),
sg.user_settings_get_entry(output_dup.get('key'), output_dup.get('default')),
sg.user_settings_get_entry(output_fail.get('key'), output_fail.get('default')))
print()
print('处理完毕')
print('-' * 30)
# 启用关闭
window.DisableClose = False
elif event == '保存':
for key in {img_folder.get('key'), show_img.get('keyT'), output.get('key'),
output_suc.get('key'), output_dup.get('key'), output_fail.get('key')}:
if key is show_img.get('keyT'):
sg.user_settings_set_entry(show_img.get('key'), values.get(show_img.get('key')))
else:
sg.user_settings_set_entry(key, values.get(key))
# 关闭窗口
window.close()
pass
if __name__ == '__main__':
main()
|
{"/desktop.py": ["/pick_stu_number.py"], "/console.py": ["/pick_stu_number.py"]}
|
13,964
|
TurnA-Lab/auto-rename-pic
|
refs/heads/master
|
/console.py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
__author__ = 'iskye'
import click
from pick_stu_number import PickStuNumber
@click.command()
@click.option('--path', default='.', help='图片或者图片所在文件夹的路径')
@click.option('--show-img', default=False, help='是否展示处理图片过程')
@click.option('--show-info', default=False, help='是否展示处理图片结果')
@click.option('--output', default='.', help='图片输出到的文件夹路径')
@click.option('--output-suc', default='output_suc', help='合规且不重复图片所在的文件夹名')
@click.option('--output-dup', default='output_dup', help='合规但是重复图片所在的文件夹名')
@click.option('--output-fail', default='output_fail', help='其它图片所在的文件夹名')
@click.help_option(help='显示帮助')
def main(path,
show_img,
show_info,
output,
output_suc,
output_dup,
output_fail):
"""自动重命名图片为学号"""
psn = PickStuNumber(path, show_img).write_out(output, output_suc, output_dup, output_fail)
if show_info:
psn.print_info()
pass
if __name__ == '__main__':
main()
|
{"/desktop.py": ["/pick_stu_number.py"], "/console.py": ["/pick_stu_number.py"]}
|
13,989
|
luozhouyang/datas
|
refs/heads/master
|
/bajiuwang/html_downloader.py
|
import time
import argparse
import os
from urllib import request
class Downloader:
def __init__(self, base_url, record_file, save_folder, ranges):
cur_path = os.path.dirname(__file__)
self.record_file = record_file
if not self.record_file:
self.record_file = os.path.join(cur_path, "record.txt")
self.save_folder = save_folder
if not save_folder:
self.save_folder = os.path.join(cur_path, "download")
if not base_url:
raise TypeError("base url is NoneType")
self.base_url = base_url
if ranges and len(ranges) == 2:
self.start_id = int(ranges[0])
self.end_id = int(ranges[1])
else:
self.start_id = 700000
self.end_id = 800000
def download(self):
start_id = self._read_start_id()
print("Start id: %d" % start_id)
success_count = 0
prev_err_id = start_id
continuous_err_count = 0
for id in range(start_id, self.end_id):
try:
time.sleep(1)
resp = request.urlopen(self.base_url + str(id), timeout=2.0)
if resp.getcode() != 200:
continue
page = resp.read().decode('gbk')
if not os.path.exists(self.save_folder):
os.makedirs(self.save_folder)
file = self.save_folder + "/" + str(id) + '.txt'
with open(file, mode='wt', encoding='utf-8', buffering=8192) as f:
f.write(page)
except Exception as e:
print("Exception occurs in %d" % id)
if id == prev_err_id + 1:
continuous_err_count += 1
else:
continuous_err_count = 0
if continuous_err_count == 50:
print("Continuous error count: %d" % continuous_err_count)
# print("Stop program")
# break
time.sleep(60 * 60)
self.download()
prev_err_id = id
continue
else:
print(str(id))
success_count += 1
if success_count % 100 == 0:
self._save_start_id(str(id))
def _read_start_id(self):
id = self.start_id
if not os.path.exists(self.record_file):
return id
with open(self.record_file, mode="rt", encoding="utf8") as fin:
id = fin.readline()
if id:
id = int(id)
return id
def _save_start_id(self, id):
with open(self.record_file, mode="wt", encoding="utf8") as fout:
fout.write(id)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--base_url", type=str, default="http://www.89yn.com/member.asp?id=")
parser.add_argument("--record_file", type=str, default="/home/allen/PycharmProjects/datas/bajiuwang/record.txt",
help="A file to save latest download id.")
parser.add_argument("--save_folder", type=str, default="/home/allen/PycharmProjects/datas/www89yn_data",
help="A folder to save download files.")
parser.add_argument("--range", type=str, default="700000,800000", help="Comma-separated list of ids.")
args, _ = parser.parse_known_args()
downloader = Downloader(base_url=args.base_url, record_file=args.record_file, save_folder=args.save_folder,
ranges=args.range.split(","))
downloader.download()
|
{"/bajiuwang/html_parser.py": ["/bajiuwang/patterns.py", "/bajiuwang/item.py"], "/bajiuwang/csv_analyzer.py": ["/bajiuwang/csv_image_generator.py", "/bajiuwang/csv_table_printer.py", "/bajiuwang/csv_xlsx_saver.py"]}
|
13,990
|
luozhouyang/datas
|
refs/heads/master
|
/zuyouw/zuyouw_html_download.py
|
import time
from urllib import request
headers = {
'Connection': 'keep-alive',
'Cache-Control': 'max-age=0',
'Accept': 'text/html',
'X-Requested-With': 'XMLHttpRequest',
'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.89 Safari/537.36',
'DNT': '1',
}
for id in range(162315, 167900):
try:
time.sleep(1)
url = 'http://www.zuyouw.com/home/' + str(id)
print(url)
req = request.Request(url=url, headers=headers)
resp = request.urlopen(req, timeout=2.0)
if resp.getcode() != 200:
continue
page = resp.read().decode('utf-8')
file = '/home/allen/PycharmProjects/datas/zuyouw_data/' + str(id) + '.html'
with open(file, mode='wt', encoding='utf-8', buffering=8192) as f:
f.write(page)
except Exception as e:
print(e)
continue
else:
print(str(id))
|
{"/bajiuwang/html_parser.py": ["/bajiuwang/patterns.py", "/bajiuwang/item.py"], "/bajiuwang/csv_analyzer.py": ["/bajiuwang/csv_image_generator.py", "/bajiuwang/csv_table_printer.py", "/bajiuwang/csv_xlsx_saver.py"]}
|
13,991
|
luozhouyang/datas
|
refs/heads/master
|
/bajiuwang/csv_xlsx_saver.py
|
from openpyxl import Workbook
import os
class XLSXSaver:
def __init__(self, filename, ages, services, educations, origins, cities):
self.filename = os.path.join(os.path.dirname(__file__), "xlsx", filename)
self.ages_all_dict = ages[0]
self.ages_men_dict = ages[1]
self.ages_women_dict = ages[2]
self.services_all_dict = services[0]
self.services_men_dict = services[1]
self.services_women_dict = services[2]
self.edus_all_dict = educations[0]
self.edus_men_dict = educations[1]
self.edus_women_dict = educations[2]
self.origins_all_dict = origins[0]
self.origins_men_dict = origins[1]
self.origins_women_dict = origins[2]
self.cities_all_dict = cities[0]
self.cities_men_dict = cities[1]
self.cities_women_dict = cities[2]
@staticmethod
def _save_xlsx_horizontal(wb, title, index, c0, c1, c2):
sheet = wb.create_sheet(title=title, index=index)
row = ['']
men_row = ['男']
women_row = ['女']
for k, _ in c0.items():
row.append(k)
if k in c1.keys():
v0 = int(c1[k])
else:
v0 = 0
men_row.append(v0)
if k in c2.keys():
v1 = int(c2[k])
else:
v1 = 0
women_row.append(v1)
sheet.append(row)
sheet.append(men_row)
sheet.append(women_row)
@staticmethod
def _save_xlsx_vertical(wb, title, index, c0, c1, c2):
sheet = wb.create_sheet(title, index)
row = ['', '男', '女']
sheet.append(row)
for k, _ in c0.items():
if k in c1.keys():
v_men = int(c1[k])
else:
v_men = 0
if k in c2.keys():
v_women = int(c2[k])
else:
v_women = 0
row = [k, v_men, v_women]
sheet.append(row)
def _save_xlsx(self, wb, title, index, c0, c1, c2):
self._save_xlsx_horizontal(wb, title + "_1", index, c0, c1, c2)
self._save_xlsx_vertical(wb, title + "_2", index + 5, c0, c1, c2)
def _save_age_xlsx(self, wb):
self._save_xlsx(wb, title='age', index=0,
c0=self.ages_all_dict, c1=self.ages_men_dict, c2=self.ages_women_dict)
def _save_service_type_xlsx(self, wb):
self._save_xlsx(wb, title='service_type', index=1,
c0=self.services_all_dict, c1=self.services_men_dict, c2=self.services_women_dict)
def _save_edu_xlsx(self, wb):
self._save_xlsx(wb, title='education', index=2,
c0=self.edus_all_dict, c1=self.edus_men_dict, c2=self.edus_women_dict)
def _save_origin_xlsx(self, wb):
self._save_xlsx(wb, title='origin', index=3,
c0=self.origins_all_dict, c1=self.origins_men_dict, c2=self.origins_women_dict)
def _save_lives_xlsx(self, wb):
self._save_xlsx(wb, title='lives_in', index=4,
c0=self.cities_all_dict, c1=self.cities_men_dict, c2=self.cities_women_dict)
def save_to_xlsx(self):
wb = Workbook()
self._save_age_xlsx(wb)
self._save_service_type_xlsx(wb)
self._save_edu_xlsx(wb)
self._save_origin_xlsx(wb)
self._save_lives_xlsx(wb)
wb.save(self.filename)
|
{"/bajiuwang/html_parser.py": ["/bajiuwang/patterns.py", "/bajiuwang/item.py"], "/bajiuwang/csv_analyzer.py": ["/bajiuwang/csv_image_generator.py", "/bajiuwang/csv_table_printer.py", "/bajiuwang/csv_xlsx_saver.py"]}
|
13,992
|
luozhouyang/datas
|
refs/heads/master
|
/zuyouw/zuyouw_html_parser.py
|
import os
headline = "身 份,用 户 名,用户 ID,性 别,婚姻状态,年 龄,学 历,身 高,月薪收入,星 座,职 业,所在地区,自我介绍," \
"个性描述,相貌自评,体 重,体 型,魅力部位,发 型,发 色,脸 型,租友类型,方便联系时间," \
"提供的线上服务,收 费,提供的线下租友服务,收 费"
class Item:
def __init__(self, identify, name, id, gender, marriage, age, education, height, incoming, constellation,
occupational, area, charecter, look, weight, charm, hair, hair_color, face, rent_type,
time, service_online, pay_online, service_offline, pay_offline, self_intro=""):
self.identify = identify
self.name = name
self.id = id
self.gender = gender
self.marriage = marriage
self.age = age
self.education = education
self.height = height
self.incoming = incoming
self.constellation = constellation
self.occupational = occupational
self.area = area
self.charecter = charecter
self.look = look
self.weight = weight
self.charm = charm
self.hair = hair
self.hair_color = hair_color
self.face = face
self.rent_type = rent_type
self.time = time
self.service_online = service_online
self.pay_online = pay_online
self.service_offline = service_offline
self.pay_offline = pay_offline
self.intro = self_intro
def to_csv_line(self):
return self.identify+","
filedir = "/home/allen/PycharmProjects/datas/zuyouw_data/"
files = os.listdir(filedir)
for f in files:
if not f.endswith(".html"):
continue
with open(filedir + f, mode="rt", encoding="utf-8", buffering=8192) as fin:
item = Item()
while True:
position = 0
line = fin.readline()
if not line:
break
if "infolist" in line:
if position == 0:
def parse(line):
line = line.split(":")[1]
return line[:line.index("<")]
position += 1
lines = []
for _ in range(12):
lines.append(fin.readline())
item.identify = parse(lines[0])
item.name = parse(lines[1])
item.id = parse(lines[2])
item.gender = parse(lines[3])
item.marriage = parse(lines[4])
item.age = parse(lines[5])
item.education = parse(lines[6])
item.height = parse(lines[7])
item.incoming = parse(lines[8])
item.constellation = parse(lines[9])
item.occupational = parse(lines[10])
item.area = parse(lines[11])
continue
elif position == 1:
position += 1
continue
elif position == 2:
position += 1
continue
else:
pass
|
{"/bajiuwang/html_parser.py": ["/bajiuwang/patterns.py", "/bajiuwang/item.py"], "/bajiuwang/csv_analyzer.py": ["/bajiuwang/csv_image_generator.py", "/bajiuwang/csv_table_printer.py", "/bajiuwang/csv_xlsx_saver.py"]}
|
13,993
|
luozhouyang/datas
|
refs/heads/master
|
/zuyouw/zuyou77.py
|
import bs4
from urllib import request
headers = {
'Connection': 'keep-alive',
'Cache-Control': 'max-age=0',
'Accept': 'text/html',
'X-Requested-With': 'XMLHttpRequest',
'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.89 Safari/537.36',
'DNT': '1',
}
url = "http://wxapp.zuyou77.com/web"
req = request.Request(url=url, headers=headers)
resp = request.urlopen(req, timeout=2)
html = resp.read().decode('utf8')
print(html)
bs = bs4.BeautifulSoup(html)
for e in bs.div:
print(e)
|
{"/bajiuwang/html_parser.py": ["/bajiuwang/patterns.py", "/bajiuwang/item.py"], "/bajiuwang/csv_analyzer.py": ["/bajiuwang/csv_image_generator.py", "/bajiuwang/csv_table_printer.py", "/bajiuwang/csv_xlsx_saver.py"]}
|
13,994
|
luozhouyang/datas
|
refs/heads/master
|
/bajiuwang/item.py
|
class Item:
def __init__(self, name="", id="", gender="", age="", birth="", constellation="",
height="", weight="", size="", degree="", marriage="", occupational="",
lives="", origin="", area="", payment="", serve_time="", language="",
serve_type="", hobbits="", characteristic="", message=""):
self.name = name
self.id = id
self.gender = gender
self.age = age
self.birth = birth
self.constellation = constellation
self.height = height
self.weight = weight
self.size = size
self.degree = degree
self.marriage = marriage
self.occupational = occupational
self.lives = lives
self.origin = origin
self.area = area
self.payment = payment
self.serve_time = serve_time
self.language = language
self.serve_type = serve_type
self.hobbits = hobbits
self.character = characteristic
self.message = message
def __str__(self):
return "name={},id={},gender={},age={},birth={},constellation={},height={},weight={}," \
"size={},degree={},marriage={},occupational={},lives={},origin={},area={},payment={}," \
"serve_time={},language={},serve_type={},hobbits={},characteristic={},message={}" \
.format(self.name, self.id, self.gender, self.age, self.birth, self.constellation,
self.height, self.weight, self.size, self.degree, self.marriage, self.occupational,
self.lives, self.origin, self.area, self.payment, self.serve_time, self.language,
self.serve_type, self.hobbits, self.character, self.message)
|
{"/bajiuwang/html_parser.py": ["/bajiuwang/patterns.py", "/bajiuwang/item.py"], "/bajiuwang/csv_analyzer.py": ["/bajiuwang/csv_image_generator.py", "/bajiuwang/csv_table_printer.py", "/bajiuwang/csv_xlsx_saver.py"]}
|
13,995
|
luozhouyang/datas
|
refs/heads/master
|
/bajiuwang/csv_table_printer.py
|
from prettytable import PrettyTable
class TablePrinter:
def __init__(self, ages, services, educations, origins, cities):
self.ages_all_dict = ages[0]
self.ages_men_dict = ages[1]
self.ages_women_dict = ages[2]
self.services_all_dict = services[0]
self.services_men_dict = services[1]
self.services_women_dict = services[2]
self.edus_all_dict = educations[0]
self.edus_men_dict = educations[1]
self.edus_women_dict = educations[2]
self.origins_all_dict = origins[0]
self.origins_men_dict = origins[1]
self.origins_women_dict = origins[2]
self.cities_all_dict = cities[0]
self.cities_men_dict = cities[1]
self.cities_women_dict = cities[2]
@staticmethod
def _total_value(values):
total = 0
for v in values:
total += int(v)
return total
def print_age(self):
self._print_table(["Age", "Count", "Percent"], "Age distribution table", self.ages_all_dict)
def print_age_men(self):
self._print_table(["Age", "Count", "Percent"], "Male age distribution table", self.ages_men_dict)
def print_age_women(self):
self._print_table(["Age", "Count", "Percent"], "Female age distribution table", self.ages_women_dict)
def _print_table(self, columns, header, collection):
print("=====" + header)
table = PrettyTable(columns)
table.align[columns[0]] = "l"
table.padding_width = 1
total = self._total_value(collection.values())
for k, v in collection.items():
p = "%.2f" % (int(v) / total * 100)
table.add_row([k, v, p])
print(table)
print("Total: %d" % total)
def print_service_types(self):
self._print_table(["Type", "Count", "Percent"], "Service types distribution table", self.services_all_dict)
def print_service_types_men(self):
self._print_table(["Type", "Count", "Percent"], "Male service types distribution table",
self.services_men_dict)
def print_service_types_women(self):
self._print_table(["Type", "Count", "Percent"], "Female service types distribution table",
self.services_women_dict)
def print_edu(self):
self._print_table(columns=["Education", "Count", "Percent"], header="Education distribution table",
collection=self.edus_all_dict)
def print_edu_men(self):
self._print_table(columns=["Education", "Count", "Percent"], header="Male education distribution table",
collection=self.edus_men_dict)
def print_edu_women(self):
self._print_table(columns=["Education", "Count", "Percent"], header="Female education distribution table",
collection=self.edus_women_dict)
def print_origin(self):
self._print_table(["Origin", "Count", "Percent"], "Origin distribution table", self.origins_all_dict)
def print_origin_men(self):
self._print_table(["Origin", "Count", "Percent"], "Male origin distribution table", self.origins_men_dict)
def print_origin_women(self):
self._print_table(["Origin", "Count", "Percent"], "Female origin distribution table", self.origins_women_dict)
def print_cities(self):
self._print_table(["City", "Count", "Percent"], "Cities distribution table", self.cities_all_dict)
def print_cities_men(self):
self._print_table(["City", "Count", "Percent"], "Male cities distribution table", self.cities_men_dict)
def print_cities_women(self):
self._print_table(["City", "Count", "Percent"], "Female cities distribution table", self.cities_women_dict)
def print_tables(self):
self.print_age()
self.print_age_men()
self.print_age_women()
self.print_service_types()
self.print_service_types_men()
self.print_service_types_women()
self.print_edu()
self.print_edu_men()
self.print_edu_women()
self.print_origin()
self.print_origin_men()
self.print_origin_women()
self.print_cities()
self.print_cities_men()
self.print_cities_women()
|
{"/bajiuwang/html_parser.py": ["/bajiuwang/patterns.py", "/bajiuwang/item.py"], "/bajiuwang/csv_analyzer.py": ["/bajiuwang/csv_image_generator.py", "/bajiuwang/csv_table_printer.py", "/bajiuwang/csv_xlsx_saver.py"]}
|
13,996
|
luozhouyang/datas
|
refs/heads/master
|
/bajiuwang/html_parser.py
|
import os
from bajiuwang.patterns import Patterns
from bajiuwang.item import Item
files = os.listdir("/home/allen/PycharmProjects/datas/www89yn_data")
infos = []
def _parse_line(line, type=1):
contents = line.split(":")
if len(contents) != 2:
return ""
if type == 1:
result = Patterns.PATTERN_TYPE_1.sub("", contents[1])
return result.replace(",", ";").replace("\s+", " ").strip()
elif type == 2:
result = Patterns.PATTERN_TYPE_ID.sub("", contents[1])
return result.replace(",", ";").replace("\s+", " ").strip()
elif type == 3:
result = Patterns.PATTERN_TYPE_PAYMENT.sub("", contents[1])
return result.replace(",", ";").replace("\s+", " ").strip()
return ""
for f in files:
if not f.endswith(".txt"):
continue
p = "/home/allen/PycharmProjects/datas/www89yn_data/" + f
if not os.path.exists(p):
continue
with open(p, mode="rt", encoding="utf-8") as fin:
item = Item()
for l in fin:
if not l.strip():
continue
if Patterns.PATTERN_NAME.findall(l):
item.name = _parse_line(l)
continue
if Patterns.PATTERN_ID.findall(l):
item.id = _parse_line(l, type=2)
continue
if Patterns.PATTERN_GENDER.findall(l):
item.gender = _parse_line(l)
continue
if Patterns.PATTERN_AGE.findall(l):
item.age = _parse_line(l)
continue
if Patterns.PATTERN_BIRTH.findall(l):
item.birth = _parse_line(l)
continue
if Patterns.PATTERN_CONSTELLATION.findall(l):
item.constellation = _parse_line(l)
continue
if Patterns.PATTERN_HEIGHT.findall(l):
item.height = _parse_line(l)
continue
if Patterns.PATTERN_WEIGHT.findall(l):
item.weight = _parse_line(l)
continue
if Patterns.PATTERN_SIZE.findall(l):
item.size = _parse_line(l)
continue
if Patterns.PATTERN_DEGREE.findall(l):
item.degree = _parse_line(l)
continue
if Patterns.PATTERN_MARRIAGE.findall(l):
item.marriage = _parse_line(l)
continue
if Patterns.PATTERN_OCCUPATIONAL.findall(l):
item.occupational = _parse_line(l)
continue
if Patterns.PATTERN_LIVES.findall(l):
item.lives = _parse_line(l)
continue
if Patterns.PATTERN_ORIGIN.findall(l):
item.origin = _parse_line(l)
continue
if Patterns.PATTERN_AREA.findall(l):
item.area = _parse_line(l)
continue
if Patterns.PATTERN_PAYMENT.findall(l):
item.payment = _parse_line(l, type=3)
continue
if Patterns.PATTERN_SERVE_TIME.findall(l):
item.serve_time = _parse_line(l)
continue
if Patterns.PATTERN_LANGUAGE.findall(l):
item.language = _parse_line(l)
continue
if Patterns.PATTERN_SERVICE_TYPE_PROVIDED.findall(l) or Patterns.PATTERN_SERVICE_TYPE_NEEDED.findall(l):
item.serve_type = _parse_line(l)
continue
if Patterns.PATTERN_HOBBITS.findall(l):
item.hobbits = _parse_line(l)
continue
if Patterns.PATTERN_CHARACTERISTIC.findall(l):
item.character = _parse_line(l)
continue
if Patterns.PATTERN_MESSAGE.findall(l):
item.message = _parse_line(l)
continue
if item.id:
# print(count)
infos.append(item)
outdir = "/home/allen/PycharmProjects/datas/www89yn_data/"
if not os.path.exists(outdir):
os.mkdir(outdir)
outfile = outdir + '0_info_20180609.csv'
with open(outfile, mode="wt", encoding="utf-8", buffering=8192) as f:
f.write("姓名, Id, 性别, 年龄, 生日, 星座, 身高, 体重, 体型, 学位, 婚姻," +
"职业, 居住城市, 籍贯, 可去地区, 是否收费, 服务时间, 使用语种, 提供服务," +
"兴趣爱好, 性格类型, 心情留言\n")
count = 0
for item in infos:
count += 1
print(count)
line = item.name + "," + item.id + "," + item.gender + "," + item.age + "," + item.birth + "," + \
item.constellation + "," + item.height + "," + item.weight + "," + item.size + "," + \
item.degree + "," + item.marriage + "," + item.occupational + "," + item.lives + "," + \
item.origin + "," + item.area + "," + item.payment + "," + item.serve_time + "," + \
item.language + "," + item.serve_type + "," + item.hobbits + "," + item.character + "," + \
item.message + "\n"
f.write(line)
|
{"/bajiuwang/html_parser.py": ["/bajiuwang/patterns.py", "/bajiuwang/item.py"], "/bajiuwang/csv_analyzer.py": ["/bajiuwang/csv_image_generator.py", "/bajiuwang/csv_table_printer.py", "/bajiuwang/csv_xlsx_saver.py"]}
|
13,997
|
luozhouyang/datas
|
refs/heads/master
|
/bajiuwang/csv_analyzer.py
|
import csv
from collections import Counter, OrderedDict
import jieba
from .csv_image_generator import ImageGenerator
from .csv_table_printer import TablePrinter
from .csv_xlsx_saver import XLSXSaver
jieba.load_userdict("/home/allen/PycharmProjects/datas/jieba_dict.txt")
class Analyzer:
def __init__(self, csv_file):
self.csv_file = csv_file
self.has_parse_file = False
self.ages_dict = None
self.ages_men_dict = None
self.ages_women_dict = None
self.genders_dict = None
self.educations_dict = None
self.educations_men_dict = None
self.educations_women_dict = None
self.service_types_dict = None
self.service_types_men_dict = None
self.service_types_women_dict = None
self.origin_dict = None
self.origin_men_dict = None
self.origin_women_dict = None
self.lives_dict = None
self.lives_men_dict = None
self.lives_women_dict = None
def parse_csv_file(self):
ages = []
ages_men = []
ages_women = []
genders = []
educations = []
educations_men = []
educations_women = []
origins = []
origins_men = []
origins_women = []
service_types = []
service_types_men = []
service_types_women = []
lives = []
lives_men = []
lives_women = []
def callback(row):
age = str(row[3].replace('岁', ''))
gender = row[2].strip()
education = row[9].strip()
live_cities = []
for r in jieba.cut(row[12], cut_all=True):
live_cities.append(r)
if len(live_cities) == 0:
live_cities.append('其它')
first = live_cities[0].strip()
if first:
if len(first) >= 2:
live = live_cities[0].strip()
if live == '马来':
live = '马来西亚'
else:
live = '其他'
else:
live = '其他'
origin = row[13].strip()
services_tmp = row[18].strip().split(";")
services = []
for v in services_tmp:
if v.strip():
services.append(v.strip())
ages.append(age)
educations.append(education)
origins.append(origin)
service_types.extend(services)
lives.append(live)
if gender == "男":
ages_men.append(age)
educations_men.append(education)
origins_men.append(origin)
service_types_men.extend(services)
lives_men.append(live)
elif gender == "女":
ages_women.append(age)
educations_women.append(education)
origins_women.append(origin)
service_types_women.extend(services)
lives_women.append(live)
genders.append(gender)
self._read_csv_file(callback)
self.ages_dict = OrderedDict(sorted(Counter(ages).items()))
self.ages_men_dict = OrderedDict(sorted(Counter(ages_men).items()))
self.ages_women_dict = OrderedDict(sorted(Counter(ages_women).items()))
self.genders_dict = OrderedDict(sorted(Counter(genders).items()))
self.educations_dict = OrderedDict(sorted(Counter(educations).items()))
self.educations_men_dict = OrderedDict(sorted(Counter(educations_men).items()))
self.educations_women_dict = OrderedDict(sorted(Counter(educations_women).items()))
self.service_types_dict = OrderedDict(sorted(Counter(service_types).items()))
self.service_types_men_dict = OrderedDict(sorted(Counter(service_types_men).items()))
self.service_types_women_dict = OrderedDict(sorted(Counter(service_types_women).items()))
self.origin_dict = OrderedDict(sorted(Counter(origins).items()))
self.origin_men_dict = OrderedDict(sorted(Counter(origins_men).items()))
self.origin_women_dict = OrderedDict(sorted(Counter(origins_women).items()))
self.lives_dict = OrderedDict(sorted(Counter(lives).items()))
self.lives_men_dict = OrderedDict(sorted(Counter(lives_men).items()))
self.lives_women_dict = OrderedDict(sorted(Counter(lives_women).items()))
self.has_parse_file = True
def _read_csv_file(self, parse_line_callback):
with open(self.csv_file, mode="rt", encoding="utf8", buffering=8192) as f:
reader = csv.reader(f)
header = next(reader)
for row in reader:
parse_line_callback(row)
def analyze(self):
if not self.has_parse_file:
self.parse_csv_file()
self.print_tables()
self.gen_images()
self.save_to_xlsx()
def print_tables(self):
ages = [self.ages_dict, self.ages_men_dict, self.ages_women_dict]
services = [self.service_types_dict, self.service_types_men_dict, self.service_types_women_dict]
edus = [self.educations_dict, self.educations_men_dict, self.educations_women_dict]
origins = [self.origin_dict, self.origin_men_dict, self.origin_women_dict]
cities = [self.lives_dict, self.lives_men_dict, self.lives_women_dict]
printer = TablePrinter(ages, services, edus, origins, cities)
printer.print_tables()
def save_to_xlsx(self):
ages = [self.ages_dict, self.ages_men_dict, self.ages_women_dict]
services = [self.service_types_dict, self.service_types_men_dict, self.service_types_women_dict]
edus = [self.educations_dict, self.educations_men_dict, self.educations_women_dict]
origins = [self.origin_dict, self.origin_men_dict, self.origin_women_dict]
cities = [self.lives_dict, self.lives_men_dict, self.lives_women_dict]
saver = XLSXSaver(filename="datas.xlsx", ages=ages, services=services,
educations=edus, origins=origins, cities=cities)
saver.save_to_xlsx()
def gen_images(self):
ages = [self.ages_dict, self.ages_men_dict, self.ages_women_dict]
services = [self.service_types_dict, self.service_types_men_dict, self.service_types_women_dict]
edus = [self.educations_dict, self.educations_men_dict, self.educations_women_dict]
origins = [self.origin_dict, self.origin_men_dict, self.origin_women_dict]
cities = [self.lives_dict, self.lives_men_dict, self.lives_women_dict]
generator = ImageGenerator(ages, services, edus, origins, cities)
generator.gen_images()
if __name__ == "__main__":
v = Analyzer("/home/allen/PycharmProjects/datas/www89yn_data/0_info_20180609.csv")
v.analyze()
|
{"/bajiuwang/html_parser.py": ["/bajiuwang/patterns.py", "/bajiuwang/item.py"], "/bajiuwang/csv_analyzer.py": ["/bajiuwang/csv_image_generator.py", "/bajiuwang/csv_table_printer.py", "/bajiuwang/csv_xlsx_saver.py"]}
|
13,998
|
luozhouyang/datas
|
refs/heads/master
|
/bajiuwang/patterns.py
|
import re
class Patterns:
# <li>可去地区:<span>待议 </span></li>
# <li>是否收费:<span>收费<FONT COLOR=#888888>..</FONT></span></li>
# <li>服务时间:<span>待议 </span></li>
# <li>使用语种:<span>普通话 </span></li>
# <li>提供服务:<span>待议</span></li>
# <li>兴趣爱好:<span>聊天, 赚钱 </span></li>
# <li>性格类型:<span>阳光, 活泼可爱 </span></li>
# <li>心情留言:<span>找工作 </span></li>
PATTERN_AREA = re.compile("可去地区")
PATTERN_PAYMENT = re.compile("是否收费")
PATTERN_SERVE_TIME = re.compile("服务时间")
PATTERN_LANGUAGE = re.compile("使用语种")
PATTERN_SERVICE_TYPE_PROVIDED = re.compile("提供服务")
PATTERN_SERVICE_TYPE_NEEDED = re.compile("所需服务")
PATTERN_HOBBITS = re.compile("兴趣爱好")
PATTERN_CHARACTERISTIC = re.compile("性格类型")
PATTERN_MESSAGE = re.compile("心情留言")
# <li class="li539"><span>昵 称:鑫大宝</span> </li>
# <li class="li539"><SPAN>I D:</SPAN>700002
# <!--诚意 登陆时间-->
# 诚意:22<IMG alt="" src="imageszny/images/cy2.gif" align="absMiddle">
#
# </li>
# </li>
# <li class="li265"><SPAN>性 别:</SPAN>女</li>
# <li class="li265"><SPAN>年 龄:</SPAN>24岁</li>
# <li class="li265"><SPAN>出生年月:</SPAN>1987-8-19</li>
# <li class="li265"><SPAN>星 座:</SPAN>狮子</li>
# <li class="li265"><SPAN>身 高:</SPAN>162CM</li>
# <li class="li265"><SPAN>体 重:</SPAN>55KG</li>
# <li class="li265"><SPAN>体 形:</SPAN>匀称</li>
# <li class="li265"><SPAN>学 历:</SPAN>中专</li>
# <li class="li265"><SPAN>婚 姻:</SPAN>未婚</li>
# <li class="li265"><SPAN>职 业:</SPAN>医生</li>
# <li class="li265"><SPAN>居住城市:</SPAN>黑龙江 哈尔滨
# </li>
# <li class="li265"><SPAN>籍 贯:</SPAN>山东</li>
# <li class="li265"><SPAN>注册日期:</SPAN>VIP会员可见</li>
# <li class="li265"><SPAN>登陆日期:</SPAN>VIP会员可见</li>
# </ul>
PATTERN_NAME = re.compile("昵 称")
PATTERN_ID = re.compile("I D")
PATTERN_GENDER = re.compile("性 别")
PATTERN_AGE = re.compile("年 龄")
PATTERN_BIRTH = re.compile("出生年月")
PATTERN_CONSTELLATION = re.compile("星 座")
PATTERN_HEIGHT = re.compile("身 高")
PATTERN_WEIGHT = re.compile("体 重")
PATTERN_SIZE = re.compile("体 形")
PATTERN_DEGREE = re.compile("学 历")
PATTERN_MARRIAGE = re.compile("婚 姻")
PATTERN_OCCUPATIONAL = re.compile("职 业")
PATTERN_LIVES = re.compile("居住城市")
PATTERN_ORIGIN = re.compile("籍 贯")
PATTERN_TYPE_1 = re.compile("[/<>SPANlispan;&b\"]")
PATTERN_TYPE_ID = re.compile("[</>a-zA-Z&;]")
PATTERN_TYPE_PAYMENT = re.compile("[</>a-zA-Z0-9=.#]")
|
{"/bajiuwang/html_parser.py": ["/bajiuwang/patterns.py", "/bajiuwang/item.py"], "/bajiuwang/csv_analyzer.py": ["/bajiuwang/csv_image_generator.py", "/bajiuwang/csv_table_printer.py", "/bajiuwang/csv_xlsx_saver.py"]}
|
13,999
|
luozhouyang/datas
|
refs/heads/master
|
/bajiuwang/csv_image_generator.py
|
import os
import matplotlib.pyplot as plt
class ImageGenerator:
def __init__(self, ages, services, educations, origins, cities):
self.ages_all_dict = ages[0]
self.ages_men_dict = ages[1]
self.ages_women_dict = ages[2]
self.services_all_dict = services[0]
self.services_men_dict = services[1]
self.services_women_dict = services[2]
self.edus_all_dict = educations[0]
self.edus_men_dict = educations[1]
self.edus_women_dict = educations[2]
self.origins_all_dict = origins[0]
self.origins_men_dict = origins[1]
self.origins_women_dict = origins[2]
self.cities_all_dict = cities[0]
self.cities_men_dict = cities[1]
self.cities_women_dict = cities[2]
@staticmethod
def _total_value(values):
total = 0
for v in values:
total += int(v)
return total
def plot_age_line(self):
plt.figure(figsize=(10, 7))
plt.title('Age distribution line chart')
plt.plot(self.ages_all_dict.keys(), self.ages_all_dict.values())
plt.xlabel('Age')
plt.savefig("images/age_line.png")
def plot_age_pie(self):
self._plot_pie("Age distribution pie chart", self.ages_all_dict, "age_pie.png")
def plot_age_men_pie(self):
self._plot_pie("Male age distribution pie chart", self.ages_men_dict, "age_men_pie.png")
def plot_age_women_pie(self):
self._plot_pie("Female age distribution pie chart", self.ages_women_dict, "age_women_pie.png")
def _plot_pie(self, title, collection, filename, rotatelabels=False):
total = self._total_value(collection.values())
plt.figure(figsize=(7, 7))
plt.title(title)
labels = []
for k, v in collection.items():
v = "%s - %.2f" % (k, int(v) / total * 100)
labels.append(v + "%")
plt.pie(collection.values(), labels=labels, rotatelabels=rotatelabels)
plt.savefig(os.path.join(os.path.dirname(__file__), "images", filename))
def plot_service_types_pie(self):
self._plot_pie("Service types distribution pie chart", self.services_all_dict, "service_types_pie.png")
def plot_service_types_men_pie(self):
self._plot_pie("Male service types distribution pie chart", self.services_men_dict,
"service_types_men_pie.png")
def plot_service_types_women_pie(self):
self._plot_pie("Female service types distribution pie chart", self.services_women_dict,
"service_types_women_pie.png")
def plot_edus_pie(self):
self._plot_pie("Educations distribution pie chart", self.edus_all_dict, "educations_pie.png")
def plot_edus_men_pie(self):
self._plot_pie("Male educations distribution pie chart", self.edus_men_dict, "educations_men_pie.png")
def plot_edus_women_pie(self):
self._plot_pie("Female educations distribution pie chart", self.edus_women_dict, "educations_women.png")
def plot_origins_pie(self):
self._plot_pie("Origins distribution pie chart", self.origins_all_dict, "origins_pie.png")
def plot_origins_men_pie(self):
self._plot_pie("Male origins distribution pie chart", self.origins_men_dict, "origins_men_pie.png")
def plot_origins_women_pie(self):
self._plot_pie("Female origins distribution pie chart", self.origins_women_dict, "origins_women.png")
def plot_cities_pie(self):
self._plot_pie("Cities distribution pie chart", self.cities_all_dict, "cities_pie.png")
def plot_cities_men_pie(self):
self._plot_pie("Male cities distribution pie chart", self.cities_men_dict, "cities_men_pie.png")
def plot_cities_women_pie(self):
self._plot_pie("Female cities distribution pie chart", self.cities_women_dict, "cities_women_pie.png")
def gen_images(self):
self.plot_age_pie()
self.plot_age_men_pie()
self.plot_age_women_pie()
self.plot_service_types_pie()
self.plot_service_types_men_pie()
self.plot_service_types_women_pie()
self.plot_edus_pie()
self.plot_edus_men_pie()
self.plot_edus_women_pie()
self.plot_origins_pie()
self.plot_origins_men_pie()
self.plot_edus_women_pie()
self.plot_cities_pie()
self.plot_cities_men_pie()
self.plot_cities_women_pie()
|
{"/bajiuwang/html_parser.py": ["/bajiuwang/patterns.py", "/bajiuwang/item.py"], "/bajiuwang/csv_analyzer.py": ["/bajiuwang/csv_image_generator.py", "/bajiuwang/csv_table_printer.py", "/bajiuwang/csv_xlsx_saver.py"]}
|
14,018
|
ClaudioDavi/dagscience
|
refs/heads/master
|
/tests/test_steps.py
|
from dagscience.step_manager import Step
from .mock_workflow import MockGetData, MockPreprocess, MockTrain, MockSaveModel
import os
import configparser
class TestStep():
step = Step(MockGetData(), MockPreprocess(),
MockTrain(), MockSaveModel())
def test_step_writer(self):
self.step.step_writer()
assert os.path.exists('.steps')
os.remove('.steps')
def test_step_writer_sections(self):
self.step.step_writer()
sections = configparser.ConfigParser()
sections.read('.steps')
assert 'STEPS' in sections.sections()
os.remove('.steps')
|
{"/tests/test_steps.py": ["/dagscience/step_manager.py", "/tests/mock_workflow.py"], "/dagscience/workflow.py": ["/dagscience/step_manager.py"]}
|
14,019
|
ClaudioDavi/dagscience
|
refs/heads/master
|
/dagscience/workflow.py
|
from abc import ABC, abstractmethod
import logging
from .step_manager import Step
class DagflowCycle:
"""
Every cicle on a DAG workflow begins with run.
To implement a workflow you should build all the
Tasks classes according to your needs and then pass
them as parameters to the DagFlowCicle Object.
After that, the workflow will take care of the process
"""
logger = logging.getLogger(__name__)
def __init__(self, task_get_data, task_preprocess, task_train, task_model_saver):
"""
Creates workflow.
params:
task_get_data -- Implementation of the TaskGetData class
task_preprocess -- Implementation of the TaskPreprocess class
task_train -- Implementation of the TaskTrain class
task_model_saver -- Implementation of the TaskSaveModel class
returns:
The workflow Cycle
"""
if (
issubclass(task_get_data.__class__, TaskGetData)
and issubclass(task_preprocess.__class__, TaskPreprocess)
and issubclass(task_train.__class__, TaskTrain)
and issubclass(task_model_saver.__class__, TaskSaveModel)
):
self.step = Step(task_get_data, task_preprocess, task_train, task_model_saver)
def run(self, step_1=True, step_2=True, step_3=True):
"""
Runs the workflow cycle you can disable steps as a parameter.
Will return the Machine Learning Model
params:
step_1 -- Default(True) Enables Loading the data from external sources.
If false will load from disk, or as defined in load_from_filesystem
step_2 -- Default(True) Enables the preprocessing of the data.
If false will return the original data.
step_3 -- Default(True) Enables the creation and training of the model.
If false will only load model from file system
returns:
Machine learning model.
"""
return self.step.execute_steps(step_1, step_2, step_3)
def describe(self):
pass
class TaskGetData(ABC):
"""Abstract class to load data from the specified sources"""
def __init__(self):
pass
@abstractmethod
def load_from_source(self, *args, **kwargs):
"""
Loads data from the source
"""
@abstractmethod
def load_from_filesystem(self, *args, **kwargs):
"""
Loads data from filesystem
"""
@abstractmethod
def save(self, data, *args, **kwargs):
"""
Saves data to the repository
"""
class TaskPreprocess(ABC):
"""Abstract class to preprocess the data"""
def __init__(self, *args, **kwargs):
pass
@abstractmethod
def run(self, data, *args, **kwargs):
"""
Does Preprocessing on the data, returns dataframe
"""
class TaskTrain(ABC):
""" Abstract class to train and build the machine learning model"""
def __init__(self):
pass
@abstractmethod
def build_model(self):
"""
builds your machine learning algorithm.
Use this for hiperparemeter tuning and not bloat the run method
"""
@abstractmethod
def run(self, model, data):
"""
Runs the training job
"""
class TaskSaveModel(ABC):
"""Abstract class to save the machine learning model"""
def __init__(self):
pass
@abstractmethod
def save(self, model):
"""
Saves the model to the destination output
"""
@abstractmethod
def load(self):
"""
Loads the model from target destination
"""
|
{"/tests/test_steps.py": ["/dagscience/step_manager.py", "/tests/mock_workflow.py"], "/dagscience/workflow.py": ["/dagscience/step_manager.py"]}
|
14,020
|
ClaudioDavi/dagscience
|
refs/heads/master
|
/tests/mock_workflow.py
|
from dagscience import workflow
class MockGetData(workflow.TaskGetData):
def __init__(self):
pass
def load_from_source(self, *args, **kwargs):
pass
def load_from_filesystem(self, *args, **kwargs):
pass
def save(self, data, *args, **kwargs):
pass
class MockPreprocess(workflow.TaskPreprocess):
def __init(self):
pass
def run(self):
pass
class MockTrain(workflow.TaskTrain):
def __init__(self):
pass
def build_model(self):
pass
def run(self, model, data):
pass
class MockSaveModel(workflow.TaskSaveModel):
def __init__(self):
pass
def save(self, model):
pass
def load(self):
pass
|
{"/tests/test_steps.py": ["/dagscience/step_manager.py", "/tests/mock_workflow.py"], "/dagscience/workflow.py": ["/dagscience/step_manager.py"]}
|
14,021
|
ClaudioDavi/dagscience
|
refs/heads/master
|
/setup.py
|
from setuptools import setup, find_packages
setup(name='dagscience',
version='0.1.0beta',
description='Machine Learning Engineering Workflow Simplified',
url='http://github.com/claudiodavi/dagscience',
author='Claudio Davi',
author_email='cdavisouza@gmail.com',
license='MIT',
packages=find_packages(exclude=["tests"]),
zip_safe=False)
|
{"/tests/test_steps.py": ["/dagscience/step_manager.py", "/tests/mock_workflow.py"], "/dagscience/workflow.py": ["/dagscience/step_manager.py"]}
|
14,022
|
ClaudioDavi/dagscience
|
refs/heads/master
|
/dagscience/step_manager.py
|
import logging
import os
import configparser
import traceback
class Step():
"""
Steps to be executed to create the machine learning model
"""
logger = logging.getLogger(__name__)
default = {
"STEPS": {
"STEP_1": 'ready',
"STEP_2": 'ready',
"STEP_3": 'ready'
}
}
def __init__(self, get_data, preprocess, train, model_saver):
self.data_class = get_data
self.preprocess_class = preprocess
self.train_class = train
self.model_class = model_saver
def execute_steps(self, step_1=True, step_2=True, step_3=True):
data = self.step_get_data(step_1)
data = self.step_preprocess(data, step_2)
model = self.step_train(data, step_3)
return model
def step_get_data(self, execute):
self.logger.info('=============================================')
self.logger.info('Step 1: Loading Data')
if not execute:
self.logger.info(
'Step 1: Not getting new data, loading from file system directly')
return self.data_class.load_from_filesystem()
else:
self.logger.info(
'Step 1: Loading data from original source and saving to filesystem')
data = self.data_class.load_from_source()
self.data_class.save(data)
return data
def step_preprocess(self, data, execute):
self.logger.info('=============================================')
self.logger.info('Step 2: Preprocess')
if not execute:
self.logger.info('Step 2: Not preprocessing')
return data
else:
self.logger.info("Step 2: Starting preprocessing step")
return self.preprocess_class.run(data)
def step_train(self, data, execute):
self.logger.info('=============================================')
self.logger.info('Step 3: Training Model')
if not execute:
self.logger.info(
'Step 3: Not using training, loading model from file system')
return self.model_class.load()
else:
self.logger.info('Step 3: Building and training model')
model = self.train_class.build_model()
model = self.train_class.run(model, data)
self.model_class.save(model)
return model
def step_writer(self):
config = configparser.ConfigParser()
if os.path.exists('.steps'):
config.read('.steps')
print(config.sections)
else:
with open('.steps', 'w') as configfile:
try:
config.read_dict(self.default)
config.write(configfile)
except Exception as ex:
traceback.print_stack()
|
{"/tests/test_steps.py": ["/dagscience/step_manager.py", "/tests/mock_workflow.py"], "/dagscience/workflow.py": ["/dagscience/step_manager.py"]}
|
14,023
|
meswapnilwagh/remote-tools
|
refs/heads/master
|
/scp_r2r.py
|
#!/usr/bin/env python
"""Use scp to copy files bewteen two remote hosts directly.
Copies the ssh key needed to get from host1 to host2.
Requires ~/.ssh/config file
"""
import os
from optparse import OptionParser
from paramiko import SSHConfig
def main():
USAGE = "usage: %prog [options] host1:path1 host2:path2"
parser = OptionParser(usage=USAGE)
parser.add_option("-F", "--config-file",
action="store",
dest="config_file",
default="%s/.ssh/config" % os.environ['HOME'],
help="SSH config file (default: ~/.ssh/config)",)
parser.add_option("--scp-options",
action="store",
dest="scp_options",
default="",
help="string of options (in quotes) passed directy to the scp command",)
(options, args) = parser.parse_args()
host1, path1 = args[0].split(':', 1)
host2, path2 = args[1].split(':', 1)
# ssh config file
config = SSHConfig()
config.parse(open(options.config_file))
o = config.lookup(host2)
# copy keyfile
keyfile_remote = '/tmp/%s' % os.path.basename(o['identityfile'])
run('scp %s %s:%s' % (o['identityfile'], host1, keyfile_remote))
# copy actual file
ssh_options = ' -o'.join(['='.join([k, v]) for k, v in o.iteritems()
if k != 'hostname' and k != 'identityfile'])
if ssh_options:
ssh_options = '-o' + ssh_options
run('ssh %s scp %s -i %s -oStrictHostKeyChecking=no %s %s %s:%s' % (
host1, options.scp_options, keyfile_remote, ssh_options, path1,
o['hostname'], path2))
def run(cmd):
print cmd
os.system(cmd)
if __name__ == '__main__':
main()
|
{"/rml_cat.py": ["/multilog.py"]}
|
14,024
|
meswapnilwagh/remote-tools
|
refs/heads/master
|
/multilog.py
|
"""Assumes SSH config file at ~/.ssh/config
"""
import fnmatch
import gzip
import os
import re
import sys
from paramiko import SSHClient, SSHConfig
def rml_cat(host, glob, skip_files=0):
rml = RemoteMultiLog()
rml.connect(host)
lines = rml.get_lines(glob, skip_files)
for line in lines:
print line.rstrip()
rml.close()
class RemoteMultiLog(object):
def connect(self, host):
# ssh config file
config = SSHConfig()
config.parse(open('%s/.ssh/config' % os.environ['HOME']))
o = config.lookup(host)
# ssh client
self.ssh_client = ssh = SSHClient()
ssh.load_system_host_keys()
ssh.connect(o['hostname'], username=o['user'], key_filename=o['identityfile'])
self.sftp_client = ssh.open_sftp()
def get_lines(self, glob, skip_files=0):
"""wildcards only allowed in filename (not path)
"""
(dirname, filepattern) = os.path.split(glob)
filelist = self.sftp_client.listdir(dirname)
filelist = fnmatch.filter(filelist, filepattern)
filelist = [os.path.join(dirname, filename) for filename in filelist]
filelist = sorted(filelist, self.sort_by_integer_suffix)
for filepath in filelist[skip_files:]:
sys.stderr.write("Processing %s...\n" % filepath)
sftp_file = self.sftp_client.open(filepath)
if filepath.endswith('.gz'):
fh = gzip.GzipFile(fileobj=sftp_file)
else:
fh = sftp_file
for line in fh:
yield line
sftp_file.close()
def close(self):
self.sftp_client.close()
self.ssh_client.close()
def sort_by_integer_suffix(self, a, b):
"""Files are sorted by the integer in the suffix of the log filename.
Suffix may be one of the following:
.X (where X is an integer)
.X.gz (where X is an integer)
If the filename does not end in either suffix, it is treated as if X=0
"""
def get_suffix(fname):
m = re.search(r'.(?:\.(\d+))?(?:\.gz)?$', fname)
if m.lastindex:
suf = int(m.group(1))
else:
suf = 0
return suf
return get_suffix(b) - get_suffix(a)
|
{"/rml_cat.py": ["/multilog.py"]}
|
14,025
|
meswapnilwagh/remote-tools
|
refs/heads/master
|
/rml_cat.py
|
#!/usr/bin/env python
"""Print lines for all files matching glob pattern on a remote host
"""
from optparse import OptionParser
from multilog import rml_cat
def main():
USAGE = "usage: %prog [options] host:glob"
parser = OptionParser(usage=USAGE)
parser.add_option("--skip-files",
action="store",
dest="skip_files",
type="int",
default=0,
help="number of files to skip (default=0)",)
(options, args) = parser.parse_args()
host, glob = args[0].split(':', 1)
rml_cat(host, glob, options.skip_files)
if __name__ == '__main__':
main()
|
{"/rml_cat.py": ["/multilog.py"]}
|
14,028
|
royaflash/amira
|
refs/heads/master
|
/tests/s3_test.py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import unicode_literals
import boto
import pytest
from boto.s3.key import Key
from mock import ANY
from mock import MagicMock
from mock import patch
from amira.results_uploader import FileMetaInfo
from amira.s3 import S3Handler
from amira.s3 import S3ResultsUploader
class TestS3Handler(object):
"""Tests ``amira.s3.S3Handler`` class."""
@pytest.fixture
def s3_handler(self):
boto.connect_s3 = MagicMock()
return S3Handler()
def test_get_contents_as_string(self, s3_handler):
s3_connection_mock = boto.connect_s3.return_value
bucket_mock = s3_connection_mock.get_bucket.return_value
key_mock = bucket_mock.get_key.return_value
key_mock.get_contents_as_string.return_value = 'test key contents'
contents = s3_handler.get_contents_as_string(
'amira-test', 'MALWARE-1564-2016_01_11-10_55_12.tar.gz',
)
assert 'test key contents' == contents
s3_connection_mock.get_bucket.assert_called_once_with(
'amira-test', validate=False,
)
bucket_mock.get_key.assert_called_once_with(
'MALWARE-1564-2016_01_11-10_55_12.tar.gz',
)
key_mock.get_contents_as_string.assert_called_once_with()
class TestS3ResultsUploader():
"""Tests ``amira.s3.S3ResultsUploader`` class."""
@pytest.fixture
def s3_results_uploader(self):
boto.connect_s3 = MagicMock()
return S3ResultsUploader('lorem-ipsum')
def test_upload_results(self, s3_results_uploader):
s3_connection_mock = boto.connect_s3.return_value
fileobj_mock1 = MagicMock()
fileobj_mock2 = MagicMock()
results = [
FileMetaInfo('etaoin', fileobj_mock1, 'text/html; charset=UTF-8'),
FileMetaInfo('shrdlu', fileobj_mock2, 'application/json'),
]
with patch.object(Key, 'set_contents_from_file', autospec=True) \
as patched_set_contents_from_file:
s3_results_uploader.upload_results(results)
s3_connection_mock.get_bucket.assert_called_once_with(
'lorem-ipsum', validate=False,
)
assert [
(
(ANY, fileobj_mock1), {
'headers': {
'Content-Type': 'text/html; charset=UTF-8',
},
},
),
(
(ANY, fileobj_mock2), {
'headers': {
'Content-Type': 'application/json',
},
},
),
] == patched_set_contents_from_file.call_args_list
|
{"/tests/s3_test.py": ["/amira/s3.py"], "/tests/sqs_test.py": ["/amira/sqs.py"]}
|
14,029
|
royaflash/amira
|
refs/heads/master
|
/amira/sqs.py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import unicode_literals
import logging
from collections import namedtuple
import boto.sqs
import simplejson
from boto.sqs.message import RawMessage
# 10 is the maximum number of messages to read at once:
# http://docs.aws.amazon.com/AWSSimpleQueueService/latest/APIReference/API_ReceiveMessage.html
MAX_NUMBER_MESSAGES = 10
CreatedObject = namedtuple('ObjectCreated', ['bucket_name', 'key_name'])
class SqsHandler(object):
"""Retrieves the S3 event notifications about the objects created
in the bucket for which the notifications were configured.
:param region_name: The AWS region name where the SQS queue
containing the S3 event notifications is
configured.
:type region_name: string
:param queue_name: The name of the SQS queue containing the S3
event notifications.
:type queue_name: string
"""
def __init__(self, region_name, queue_name):
self._setup_sqs_queue(region_name, queue_name)
def _setup_sqs_queue(self, region_name, queue_name):
"""Connects to the SQS queue in a given AWS region.
:param region_name: The AWS region name.
:type region_name: string
:param queue_name: The SQS queue name.
:type queue_name: string
"""
sqs_connection = boto.sqs.connect_to_region(region_name)
self.sqs_queue = sqs_connection.get_queue(queue_name)
if not self.sqs_queue:
raise SqsQueueNotFoundException(queue_name)
logging.info(
'Successfully connected to {0} SQS queue'.format(
queue_name,
),
)
self.sqs_queue.set_message_class(RawMessage)
def get_created_objects(self):
"""Retrieves the S3 event notifications about the objects
created in the OSXCollector output bucket yields the (bucket
name, key name) pairs describing these objects.
"""
messages = self.sqs_queue.get_messages(MAX_NUMBER_MESSAGES)
logging.info(
'Received {0} message(s) from the SQS queue'.format(
len(messages),
),
)
if messages:
for message in messages:
objects_created = self._retrieve_created_objects_from_message(
message,
)
for object_created in objects_created:
yield object_created
self.sqs_queue.delete_message_batch(messages)
def _retrieve_created_objects_from_message(self, message):
"""Retrieves the bucket name and the key name, describing the
created object, from the `Records` array in the SQS message.
Yields each (bucket name, key name) pair as an `CreatedObject`
named tuple.
:param message: The SQS message. It should be in the JSON
format.
:type message: string
"""
json_body = message.get_body()
body = simplejson.loads(json_body)
if 'Records' not in body:
logging.warning(
'"Records" field not found in the SQS message. '
'Message body: {0}'.format(body),
)
return []
records = body['Records']
return self._extract_created_objects_from_records(records)
def _extract_created_objects_from_records(self, records):
logging.info(
'Found {0} record(s) in the SQS message'.format(len(records)),
)
for record in records:
bucket_name = record['s3']['bucket']['name']
key_name = record['s3']['object']['key']
yield CreatedObject(bucket_name=bucket_name, key_name=key_name)
class SqsQueueNotFoundException(Exception):
"""An exception thrown when the SQS queue cannot be found."""
def __init__(self, queue_name):
self.queue_name = queue_name
def __str__(self):
return 'SQS queue {0} not found.'.format(self.queue_name)
|
{"/tests/s3_test.py": ["/amira/s3.py"], "/tests/sqs_test.py": ["/amira/sqs.py"]}
|
14,030
|
royaflash/amira
|
refs/heads/master
|
/amira/s3.py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import unicode_literals
import logging
import boto
from boto.s3.key import Key
from amira.results_uploader import ResultsUploader
class S3Handler(object):
"""Handles the operations with S3, like retrieving the key
(object) contents from a bucket and creating a new key
(object) with the contents of a given file.
AWS and boto use the ambiguous term "key" to describe the objects
inside the S3 bucket. They are unrelated to AWS keys used to access
the resources.
"""
def __init__(self):
self._s3_connection = boto.connect_s3()
def get_contents_as_string(self, bucket_name, key_name):
"""Retrieves the S3 key (object) contents.
:param bucket_name: The S3 bucket name.
:type bucket_name: string
:param key_name: The S3 key (object) name.
:type key_name: string
:returns: The key (object) contents as a string.
:rtype: string
"""
bucket = self._s3_connection.get_bucket(bucket_name, validate=False)
key = bucket.get_key(key_name)
contents = key.get_contents_as_string()
return contents
class S3ResultsUploader(ResultsUploader):
"""Uploads the analysis results to an S3 bucket.
:param bucket_name: The name of the S3 bucket where the analysis
results will be uploaded.
:type bucket_name: string
"""
def __init__(self, bucket_name):
self._bucket_name = bucket_name
logging.info(
'Connecting to S3 to obtain access to {0} bucket.'.format(
bucket_name,
),
)
s3_connection = boto.connect_s3()
self._bucket = s3_connection.get_bucket(bucket_name, validate=False)
logging.info(
'S3 bucket {0} retrieved successfully.'.format(
bucket_name,
),
)
def upload_results(self, results):
"""Uploads the analysis results to an S3 bucket.
:param results: The list containing the meta info (name,
content and content-type) of the files which
needs to be uploaded.
:type results: list of ``FileMetaInfo`` tuples
"""
for file_meta_info in results:
logging.info(
'Uploading the analysis results in the file "{0}" to the S3 '
'bucket "{1}"'.format(file_meta_info.name, self._bucket_name),
)
self._create_object_from_file(file_meta_info)
def _create_object_from_file(self, file_meta_info):
"""Creates a new key (object) in the S3 bucket with the
contents of a given file.
"""
key = Key(self._bucket)
key.key = file_meta_info.name
key.set_contents_from_file(
file_meta_info.content,
headers={'Content-Type': file_meta_info.content_type},
)
|
{"/tests/s3_test.py": ["/amira/s3.py"], "/tests/sqs_test.py": ["/amira/sqs.py"]}
|
14,031
|
royaflash/amira
|
refs/heads/master
|
/tests/sqs_test.py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import unicode_literals
import boto
import pytest
import simplejson
from mock import MagicMock
from amira.sqs import SqsHandler
from amira.sqs import SqsQueueNotFoundException
TEST_DATA_DIR_PATH = 'tests/data'
@pytest.fixture
def mock_sqs_queue():
boto.sqs.connect_to_region = MagicMock()
sqs_connection_mock = boto.sqs.connect_to_region.return_value
return sqs_connection_mock.get_queue.return_value
def read_s3_event_notifications_file(s3_event_notifications_file_path):
with open(s3_event_notifications_file_path) as fp:
s3_event_notifications = simplejson.load(fp)
json_s3_event_notifications = [
simplejson.dumps(s3_event_notification)
for s3_event_notification in s3_event_notifications
]
return json_s3_event_notifications
def create_s3_event_notification_message_mocks(
s3_event_notifications_file_name,
):
"""Creates SQS queue message mocks that will return the JSON content of
`s3_event_notifications_file_path` JSON file as the body of the message.
"""
s3_event_notifications_file_path = '{0}/{1}'.format(
TEST_DATA_DIR_PATH, s3_event_notifications_file_name,
)
json_s3_event_notifications = read_s3_event_notifications_file(
s3_event_notifications_file_path,
)
s3_event_notification_message_mocks = [
MagicMock(**{'get_body.return_value': json_s3_event_notification})
for json_s3_event_notification in json_s3_event_notifications
]
return s3_event_notification_message_mocks
def mock_s3_event_notifications(
mock_sqs_queue, s3_event_notifications_file_name,
):
"""`SqsHandler.get_created_objects()` is a generator, so we need to
mock multiple values returned by `get_messages()` method.
In this case only one as the test cases do not operate on more than
one message.
"""
s3_event_notification_message_mocks = \
create_s3_event_notification_message_mocks(
s3_event_notifications_file_name,
)
mock_sqs_queue.get_messages.side_effect = \
[s3_event_notification_message_mocks]
return s3_event_notification_message_mocks
class TestSqsHandler(object):
def test_queue_not_found(self):
boto.sqs.connect_to_region = MagicMock()
sqs_connection_mock = boto.sqs.connect_to_region.return_value
sqs_connection_mock.get_queue.return_value = None
with pytest.raises(SqsQueueNotFoundException) as e:
SqsHandler('us-west-1', 'godzilla')
assert 'SQS queue godzilla not found.' == str(e.value)
boto.sqs.connect_to_region.assert_called_once_with('us-west-1')
sqs_connection_mock.get_queue.assert_called_once_with('godzilla')
def test_get_created_objects(self, mock_sqs_queue):
s3_event_notification_message_mocks = mock_s3_event_notifications(
mock_sqs_queue, 's3_event_notifications.json',
)
sqs_handler = SqsHandler('us-west-1', 'godzilla')
created_objects = sqs_handler.get_created_objects()
actual_key_names = [
created_object.key_name
for created_object in created_objects
]
expected_key_names = [
'AMIRA-1561-2016_01_11-10_54_07.tar.gz',
'AMIRA-1562-2016_01_11-10_54_47.tar.gz',
'AMIRA-1563-2016_01_11-10_54_58.tar.gz',
'AMIRA-1564-2016_01_11-10_55_12.tar.gz',
'AMIRA-1565-2016_01_11-10_55_32.tar.gz',
'AMIRA-1566-2016_01_11-10_55_49.tar.gz',
'AMIRA-1567-2016_01_11-10_56_09.tar.gz',
]
assert expected_key_names == actual_key_names
mock_sqs_queue.delete_message_batch.assert_called_once_with(
s3_event_notification_message_mocks,
)
def test_get_created_objects_no_created_objects(self, mock_sqs_queue):
mock_sqs_queue.get_messages.side_effect = [[]]
sqs_handler = SqsHandler('us-west-1', 'godzilla')
created_objects = sqs_handler.get_created_objects()
assert 0 == len(list(created_objects))
assert mock_sqs_queue.delete_message_batch.called is False
def test_get_created_objects_no_records(self, mock_sqs_queue):
"""Tests the behavior of `get_created_objects()` method in case
the message received from SQS does not contain the "Records"
field in the message body.
"""
mock_s3_event_notifications(
mock_sqs_queue, 's3_test_event_notification.json',
)
sqs_handler = SqsHandler('us-west-2', 'godzilla')
created_objects = sqs_handler.get_created_objects()
created_objects = list(created_objects)
assert [] == created_objects
|
{"/tests/s3_test.py": ["/amira/s3.py"], "/tests/sqs_test.py": ["/amira/sqs.py"]}
|
14,040
|
MotazBellah/currency-converter-exchangeratesAPI
|
refs/heads/master
|
/app.py
|
import os
import datetime
from flask import Flask, render_template, request, redirect, url_for, jsonify
import httplib2
import json
from wtform_fields import *
app = Flask(__name__)
app.secret_key = os.urandom(12).hex()
# set secret key to cross site requset forgery
# to generate a token when WTF submitted
app.config['WTF_CSRF_SECRET_KEY'] = "b'f\xfa\x8b{X\x8b\x9eM\x83l\x19\xad\x84\x08\xaa"
# Main route display the form with get requset
# redirect to convert route with post request
@app.route('/', methods=['GET', 'POST'])
def index():
# get Currency Convert Form from WTF
conv_form = CurrencyCovert()
# convert if the validation success
if conv_form.validate_on_submit():
# Get the data from the form field
src_currency = conv_form.src_currency.data
dest_currency = conv_form.dest_currency.data
amount = conv_form.amount.data
date = conv_form.date.data
# redirect to convert route
# Pass the form's data as a parameter to convert route
return redirect(url_for('convert',
src_currency=src_currency,
dest_currency=dest_currency,
amount=amount,
date=date))
return render_template('index.html', form=conv_form)
# Get the data from the URL
# Accept only get request
@app.route('/convert', methods=['GET'])
def convert():
src = request.args.get('src_currency').upper()
dest = request.args.get('dest_currency').upper()
amount = float(request.args.get('amount'))
date = request.args.get('date')
# Declare the data dict
data = {}
if src == dest:
data['amount'] = amount
data['currency'] = dest
return jsonify(data)
# Make sure the date in the format YYYY-MM-DD
# else return empty JSON
try:
time = datetime.datetime.strptime(date, '%Y-%m-%d')
except ValueError:
return jsonify(data)
else:
url = "https://api.exchangeratesapi.io/{}?base={}&symbols={}".format(date, src, dest)
# use HTTP client library, to be able to send GET request
h = httplib2.Http()
# convert the JSON format to python dictionary
result = json.loads(h.request(url, 'GET')[1])
# parse the result and get the rate value
if 'rates' in result:
rate = result['rates'][dest]
data['amount'] = amount * rate
data['currency'] = dest
return jsonify(data)
if __name__ == '__main__':
PORT = int(os.environ.get('PORT', 3000))
app.debug = True
app.run(host='0.0.0.0', port=PORT)
|
{"/app.py": ["/wtform_fields.py"], "/convert_test.py": ["/app.py"]}
|
14,041
|
MotazBellah/currency-converter-exchangeratesAPI
|
refs/heads/master
|
/wtform_fields.py
|
from flask_wtf import FlaskForm
from wtforms import StringField, FloatField, SelectField
from wtforms.validators import InputRequired, Length, ValidationError
import datetime
CURRENCY_TYPE = [("EUR", "EUR"), ("USD", "USD"), ("JPY", "JPY"),
("BGN", "BGN"), ("CZK", "CZK"), ("GBP", "GBP"),
("HUF", "HUF"), ("PLN", "PLN"), ("RON", "RON"),
("SEK", "SEK"), ("CHF", "CHF"), ("ISK", "ISK"),
("NOK", "NOK"), ("HRK", "HRK"), ("RUB", "RUB"),
("TRY", "TRY"), ("AUD", "AUD"), ("BRL", "BRL"),
("CAD", "CAD"), ("CNY", "CNY"), ("HKD", "HKD"),
("IDR", "IDR"), ("ILS", "ILS"), ("INR", "INR"),
("KRW", "KRW"), ("MXN", "MXN"), ("MYR", "MYR"),
("NZD", "NZD"), ("PHP", "PHP"), ("SGD", "SGD"),
("THB", "THB"), ("ZAR", "ZAR"), ("DKK", "DKK")]
# custom validator for the form,
# to check if date has a valid format and exsit in DB
def date_validate(form, field):
date_text = field.data
# Make sure the date has a correct format
try:
time = datetime.datetime.strptime(date_text, '%Y-%m-%d')
except ValueError:
raise ValidationError("Incorrect date format, should be YYYY-MM-DD")
class CurrencyCovert(FlaskForm):
"""CurrencyCovert form"""
src_currency = SelectField('source currency',
choices=CURRENCY_TYPE,
validators=[InputRequired(message="currency required")])
dest_currency = SelectField('destination currency',
choices=CURRENCY_TYPE,
validators=[InputRequired(message="currency required")])
amount = FloatField('amount',
validators=[InputRequired(message="amount required")])
date = StringField('reference date',
validators=[InputRequired(message="Date required"),
date_validate])
|
{"/app.py": ["/wtform_fields.py"], "/convert_test.py": ["/app.py"]}
|
14,042
|
MotazBellah/currency-converter-exchangeratesAPI
|
refs/heads/master
|
/convert_test.py
|
from flask import current_app
from app import app
import unittest
import json
class AppTestCase(unittest.TestCase):
# executed prior to each test
def setUp(self):
self.app = app
app.config['TESTING'] = True
self.app.config['WTF_CSRF_ENABLED'] = False
self.app_context = self.app.app_context()
self.app_context.push()
self.client = self.app.test_client()
# executed after each test
def tearDown(self):
self.app_context.pop()
def test_app_exists(self):
"""Test if the app exists """
self.assertFalse(current_app is None)
def test_home_page(self):
"""Test the home page"""
response = self.client.get('/')
self.assertEqual(response.status_code, 200)
# test the convert action
def test_convert(self):
# 1st index is the src then dest then amount then date
url_data = ['EUR', 'EUR', 10, '2019-10-11']
response = self.client.get("/convert?src_currency={}&dest_currency={}&amount={}&date={}".format(*url_data))
data = json.loads(response.get_data(as_text=True))
self.assertEqual(response.status_code, 200)
self.assertEqual(bool(data), True)
self.assertEqual(data['currency'], "EUR")
self.assertEqual(data['amount'], 10)
#
def test_convert_date(self):
url_data = ['EUR', 'JPY', 80, '2018-10-11']
response = self.client.get("/convert?src_currency={}&dest_currency={}&amount={}&date={}".format(*url_data))
data = json.loads(response.get_data(as_text=True))
#
self.assertEqual(response.status_code, 200)
self.assertEqual(bool(data), True)
self.assertEqual(data['currency'], "JPY")
self.assertEqual(data['amount'], 10400.0)
def test_convert_curr(self):
url_data = ['AUD', 'RON', 1, '2019-09-17']
response = self.client.get("/convert?src_currency={}&dest_currency={}&amount={}&date={}".format(*url_data))
data = json.loads(response.get_data(as_text=True))
self.assertEqual(response.status_code, 200)
# self.assertEqual(bool(data), True)
self.assertEqual(data['currency'], "RON")
self.assertEqual(data['amount'], 2.9363602531)
#
def test_convert_curr2(self):
url_data = ['BGN', 'NOK', 1, '2019-09-17']
response = self.client.get("/convert?src_currency={}&dest_currency={}&amount={}&date={}".format(*url_data))
data = json.loads(response.get_data(as_text=True))
self.assertEqual(response.status_code, 200)
self.assertEqual(bool(data), True)
self.assertEqual(data['currency'], "NOK")
self.assertEqual(data['amount'], 5.0472952245)
#
def test_convert_curr3(self):
url_data = ['BGN', 'NOK', 894368950, '2019-09-17']
response = self.client.get("/convert?src_currency={}&dest_currency={}&amount={}&date={}".format(*url_data))
data = json.loads(response.get_data(as_text=True))
self.assertEqual(response.status_code, 200)
self.assertEqual(bool(data), True)
self.assertEqual(data['currency'], "NOK")
self.assertEqual(data['amount'], 4514144130.276079)
#
def test_covert_form(self):
response = self.client.post(
'/', data={
'src_currency': 'EUR',
'dest_currency': 'EUR',
'amount': 10,
'date': '2019-10-11'
}, follow_redirects=True)
data = json.loads(response.get_data(as_text=True))
self.assertEqual(bool(data), True)
self.assertEqual(data['currency'], "EUR")
self.assertEqual(data['amount'], 10)
#
def test_covert_date_format(self):
response = self.client.post(
'/', data={
'src_currency': 'EUR',
'dest_currency': 'USD',
'amount': 89,
'date': '11-10-2019'
}, follow_redirects=True)
self.assertIn(b"Incorrect date format, should be YYYY-MM-DD", response.data)
def test_covert_amount(self):
response = self.client.post(
'/', data={
'src_currency': 'EUR',
'dest_currency': 'USD',
'amount': 'test',
'date': '2019-10-11'
}, follow_redirects=True)
self.assertIn(b"Not a valid float value", response.data)
if __name__ == '__main__':
unittest.main()
|
{"/app.py": ["/wtform_fields.py"], "/convert_test.py": ["/app.py"]}
|
14,052
|
aleph-im/aleph-client
|
refs/heads/master
|
/src/aleph_client/commands/message.py
|
import json
import os.path
import subprocess
import tempfile
from pathlib import Path
from typing import Dict, List, Optional
import typer
from aleph.sdk import AlephClient, AuthenticatedAlephClient
from aleph.sdk.account import _load_account
from aleph.sdk.conf import settings as sdk_settings
from aleph.sdk.models import MessagesResponse
from aleph.sdk.types import AccountFromPrivateKey, StorageEnum
from aleph_message.models import AlephMessage, ItemHash, MessageType, ProgramMessage
from aleph_client.commands import help_strings
from aleph_client.commands.utils import (
colorful_json,
colorful_message_json,
input_multiline,
setup_logging,
str_to_datetime,
)
app = typer.Typer()
@app.command()
def get(
item_hash: str,
):
with AlephClient(api_server=sdk_settings.API_HOST) as client:
message = client.get_message(item_hash=ItemHash(item_hash))
typer.echo(colorful_message_json(message))
@app.command()
def find(
pagination: int = 200,
page: int = 1,
message_type: Optional[str] = None,
content_types: Optional[str] = None,
content_keys: Optional[str] = None,
refs: Optional[str] = None,
addresses: Optional[str] = None,
tags: Optional[str] = None,
hashes: Optional[str] = None,
channels: Optional[str] = None,
chains: Optional[str] = None,
start_date: Optional[str] = None,
end_date: Optional[str] = None,
ignore_invalid_messages: bool = True,
):
message_type = MessageType(message_type) if message_type else None
parsed_content_types: Optional[List[str]] = None
parsed_content_keys: Optional[List[str]] = None
parsed_refs: Optional[List[str]] = None
parsed_addresses: Optional[List[str]] = None
parsed_tags: Optional[List[str]] = None
parsed_hashes: Optional[List[str]] = None
parsed_channels: Optional[List[str]] = None
parsed_chains: Optional[List[str]] = None
parsed_content_types = content_types.split(",") if content_types else None
parsed_content_keys = content_keys.split(",") if content_keys else None
parsed_refs = refs.split(",") if refs else None
parsed_addresses = addresses.split(",") if addresses else None
parsed_tags = tags.split(",") if tags else None
parsed_hashes = hashes.split(",") if hashes else None
parsed_channels = channels.split(",") if channels else None
parsed_chains = chains.split(",") if chains else None
message_type = MessageType(message_type) if message_type else None
start_time = str_to_datetime(start_date)
end_time = str_to_datetime(end_date)
with AlephClient(api_server=sdk_settings.API_HOST) as client:
response: MessagesResponse = client.get_messages(
pagination=pagination,
page=page,
message_type=message_type,
content_types=parsed_content_types,
content_keys=parsed_content_keys,
refs=parsed_refs,
addresses=parsed_addresses,
tags=parsed_tags,
hashes=parsed_hashes,
channels=parsed_channels,
chains=parsed_chains,
start_date=start_time,
end_date=end_time,
ignore_invalid_messages=ignore_invalid_messages,
)
typer.echo(colorful_json(response.json(sort_keys=True, indent=4)))
@app.command()
def post(
path: Optional[Path] = typer.Option(
None,
help="Path to the content you want to post. If omitted, you can input your content directly",
),
type: str = typer.Option("test", help="Text representing the message object type"),
ref: Optional[str] = typer.Option(None, help=help_strings.REF),
channel: Optional[str] = typer.Option(default=None, help=help_strings.CHANNEL),
private_key: Optional[str] = typer.Option(
sdk_settings.PRIVATE_KEY_STRING, help=help_strings.PRIVATE_KEY
),
private_key_file: Optional[Path] = typer.Option(
sdk_settings.PRIVATE_KEY_FILE, help=help_strings.PRIVATE_KEY_FILE
),
debug: bool = False,
):
"""Post a message on aleph.im."""
setup_logging(debug)
account: AccountFromPrivateKey = _load_account(private_key, private_key_file)
storage_engine: StorageEnum
content: Dict
if path:
if not path.is_file():
typer.echo(f"Error: File not found: '{path}'")
raise typer.Exit(code=1)
file_size = os.path.getsize(path)
storage_engine = (
StorageEnum.ipfs if file_size > 4 * 1024 * 1024 else StorageEnum.storage
)
with open(path, "r") as fd:
content = json.load(fd)
else:
content_raw = input_multiline()
storage_engine = (
StorageEnum.ipfs
if len(content_raw) > 4 * 1024 * 1024
else StorageEnum.storage
)
try:
content = json.loads(content_raw)
except json.decoder.JSONDecodeError:
typer.echo("Not valid JSON")
raise typer.Exit(code=2)
with AuthenticatedAlephClient(
account=account, api_server=sdk_settings.API_HOST
) as client:
result, status = client.create_post(
post_content=content,
post_type=type,
ref=ref,
channel=channel,
inline=True,
storage_engine=storage_engine,
)
typer.echo(json.dumps(result.dict(), indent=4))
@app.command()
def amend(
item_hash: str = typer.Argument(..., help="Hash reference of the message to amend"),
private_key: Optional[str] = typer.Option(
sdk_settings.PRIVATE_KEY_STRING, help=help_strings.PRIVATE_KEY
),
private_key_file: Optional[Path] = typer.Option(
sdk_settings.PRIVATE_KEY_FILE, help=help_strings.PRIVATE_KEY_FILE
),
debug: bool = False,
):
"""Amend an existing aleph.im message."""
setup_logging(debug)
account: AccountFromPrivateKey = _load_account(private_key, private_key_file)
with AlephClient(api_server=sdk_settings.API_HOST) as client:
existing_message: AlephMessage = client.get_message(item_hash=item_hash)
editor: str = os.getenv("EDITOR", default="nano")
with tempfile.NamedTemporaryFile(suffix="json") as fd:
# Fill in message template
fd.write(existing_message.content.json(indent=4).encode())
fd.seek(0)
# Launch editor
subprocess.run([editor, fd.name], check=True)
# Read new message
fd.seek(0)
new_content_json = fd.read()
content_type = type(existing_message).__annotations__["content"]
new_content_dict = json.loads(new_content_json)
new_content = content_type(**new_content_dict)
if isinstance(existing_message, ProgramMessage):
new_content.replaces = existing_message.item_hash
else:
new_content.ref = existing_message.item_hash
typer.echo(new_content)
with AuthenticatedAlephClient(
account=account, api_server=sdk_settings.API_HOST
) as client:
message, _status = client.submit(
content=new_content.dict(),
message_type=existing_message.type,
channel=existing_message.channel,
)
typer.echo(f"{message.json(indent=4)}")
@app.command()
def forget(
hashes: str = typer.Argument(
..., help="Comma separated list of hash references of messages to forget"
),
reason: Optional[str] = typer.Option(
None, help="A description of why the messages are being forgotten."
),
channel: Optional[str] = typer.Option(default=None, help=help_strings.CHANNEL),
private_key: Optional[str] = typer.Option(
sdk_settings.PRIVATE_KEY_STRING, help=help_strings.PRIVATE_KEY
),
private_key_file: Optional[Path] = typer.Option(
sdk_settings.PRIVATE_KEY_FILE, help=help_strings.PRIVATE_KEY_FILE
),
debug: bool = False,
):
"""Forget an existing aleph.im message."""
setup_logging(debug)
hash_list: List[str] = hashes.split(",")
account: AccountFromPrivateKey = _load_account(private_key, private_key_file)
with AuthenticatedAlephClient(
account=account, api_server=sdk_settings.API_HOST
) as client:
client.forget(hashes=hash_list, reason=reason, channel=channel)
@app.command()
def watch(
ref: str = typer.Argument(..., help="Hash reference of the message to watch"),
indent: Optional[int] = typer.Option(None, help="Number of indents to use"),
debug: bool = False,
):
"""Watch a hash for amends and print amend hashes"""
setup_logging(debug)
with AlephClient(api_server=sdk_settings.API_HOST) as client:
original: AlephMessage = client.get_message(item_hash=ref)
for message in client.watch_messages(
refs=[ref], addresses=[original.content.address]
):
typer.echo(f"{message.json(indent=indent)}")
|
{"/tests/integration/itest_aggregates.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"], "/src/aleph_client/vm/cache.py": ["/src/aleph_client/conf.py"], "/tests/unit/test_vm_app.py": ["/tests/unit/test_app/main.py"], "/tests/integration/itest_forget.py": ["/tests/integration/config.py", "/tests/integration/toolkit.py"], "/tests/integration/itest_posts.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"]}
|
14,053
|
aleph-im/aleph-client
|
refs/heads/master
|
/tests/integration/itest_aggregates.py
|
import json
from typing import Dict
import pytest
from aleph.sdk import AuthenticatedAlephClient
from aleph.sdk.types import Account
from tests.integration.toolkit import try_until
from .config import REFERENCE_NODE, TARGET_NODE
async def create_aggregate_on_target(
account: Account,
key: str,
content: Dict,
emitter_node: str,
receiver_node: str,
channel="INTEGRATION_TESTS",
):
async with AuthenticatedAlephClient(
account=account, api_server=emitter_node
) as emitter_client:
aggregate_message, message_status = await emitter_client.create_aggregate(
key=key,
content=content,
channel="INTEGRATION_TESTS",
)
assert aggregate_message.sender == account.get_address()
assert aggregate_message.channel == channel
# Note: lots of duplicates in the response
item_content = json.loads(aggregate_message.item_content)
assert item_content["key"] == key
assert item_content["content"] == content
assert item_content["address"] == account.get_address()
assert aggregate_message.content.key == key
assert aggregate_message.content.address == account.get_address()
assert aggregate_message.content.content == content
async with AuthenticatedAlephClient(
account=account, api_server=receiver_node
) as receiver_client:
aggregate_from_receiver = await try_until(
receiver_client.fetch_aggregate,
lambda aggregate: aggregate is not None,
timeout=5,
address=account.get_address(),
key=key,
api_server=receiver_node,
)
for key, value in content.items():
assert key in aggregate_from_receiver
assert aggregate_from_receiver[key] == value
@pytest.mark.asyncio
async def test_create_aggregate_on_target(fixture_account):
"""
Attempts to create an aggregate on the target node and validates that the aggregate can be fetched
from the reference node.
"""
await create_aggregate_on_target(
fixture_account,
key="test_target",
content={"a": 1, "b": 2},
emitter_node=TARGET_NODE,
receiver_node=REFERENCE_NODE,
)
@pytest.mark.asyncio
async def test_create_aggregate_on_reference(fixture_account):
"""
Attempts to create an aggregate on the reference node and validates that the aggregate can be fetched
from the target node.
"""
await create_aggregate_on_target(
fixture_account,
key="test_reference",
content={"c": 3, "d": 4},
emitter_node=REFERENCE_NODE,
receiver_node=TARGET_NODE,
)
|
{"/tests/integration/itest_aggregates.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"], "/src/aleph_client/vm/cache.py": ["/src/aleph_client/conf.py"], "/tests/unit/test_vm_app.py": ["/tests/unit/test_app/main.py"], "/tests/integration/itest_forget.py": ["/tests/integration/config.py", "/tests/integration/toolkit.py"], "/tests/integration/itest_posts.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"]}
|
14,054
|
aleph-im/aleph-client
|
refs/heads/master
|
/src/aleph_client/vm/app.py
|
from dataclasses import dataclass
from typing import (
Any,
Awaitable,
Callable,
Dict,
List,
Mapping,
MutableMapping,
Optional,
)
AsgiApplication = Callable
@dataclass
class EventHandler:
filters: List[Dict]
handler: Callable
def matches(self, scope: Mapping[str, Any]) -> bool:
for filter in self.filters:
# if [filter matches scope]: TODO
if True:
return True
return False
class AlephApp:
"""ASGI compatible wrapper for apps running inside aleph.im Virtual Machines.
The wrapper adds support to register functions to react to non-HTTP events.
"""
http_app: Optional[AsgiApplication] = None
event_handlers: List[EventHandler]
def __init__(self, http_app: Optional[AsgiApplication] = None):
self.http_app = http_app
self.event_handlers = []
def event(self, filters: List[Dict]):
"""Use this decorator to register event calls.
```python
@app.event(filters=[...])
def on_event(event):
...
```
"""
def inner(func: Callable):
# Register the event handler
event_handler = EventHandler(filters=filters, handler=func)
self.event_handlers.append(event_handler)
return func
return inner
async def __call__(
self,
scope: MutableMapping[str, Any],
receive: Optional[Callable[[], Awaitable[Any]]] = None,
send: Optional[Callable[[Dict[Any, Any]], Awaitable[Any]]] = None,
):
if scope["type"] in ("http", "websocket", "lifespan"):
if self.http_app:
await self.http_app(scope=scope, receive=receive, send=send)
else:
raise ValueError("No HTTP app registered")
elif scope["type"] == "aleph.message":
for event_handler in self.event_handlers:
if event_handler.matches(scope):
# event_handler.handler(scope=scope, receive=receive, send=send)
async def send_handler_result():
result = await event_handler.handler(event=scope)
if send:
await send(result)
else:
raise ValueError("No send method specified")
return send_handler_result()
else:
raise ValueError(f"Unknown scope type '{scope['type']}'")
def __getattr__(self, name):
# Default all calls to the HTTP handler
return getattr(self.http_app, name)
|
{"/tests/integration/itest_aggregates.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"], "/src/aleph_client/vm/cache.py": ["/src/aleph_client/conf.py"], "/tests/unit/test_vm_app.py": ["/tests/unit/test_app/main.py"], "/tests/integration/itest_forget.py": ["/tests/integration/config.py", "/tests/integration/toolkit.py"], "/tests/integration/itest_posts.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"]}
|
14,055
|
aleph-im/aleph-client
|
refs/heads/master
|
/tests/integration/config.py
|
TARGET_NODE = "http://163.172.70.92:4024"
REFERENCE_NODE = "https://api2.aleph.im"
TEST_CHANNEL = "INTEGRATION_TESTS"
|
{"/tests/integration/itest_aggregates.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"], "/src/aleph_client/vm/cache.py": ["/src/aleph_client/conf.py"], "/tests/unit/test_vm_app.py": ["/tests/unit/test_app/main.py"], "/tests/integration/itest_forget.py": ["/tests/integration/config.py", "/tests/integration/toolkit.py"], "/tests/integration/itest_posts.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"]}
|
14,056
|
aleph-im/aleph-client
|
refs/heads/master
|
/examples/store.py
|
import asyncio
import click
from aleph.sdk.chains.common import get_fallback_private_key
from aleph.sdk.chains.ethereum import ETHAccount
from aleph.sdk.client import AuthenticatedAlephClient
from aleph_message.models import StoreMessage
from aleph_message.status import MessageStatus
DEFAULT_SERVER = "https://api2.aleph.im"
async def print_output_hash(message: StoreMessage, status: MessageStatus):
print("Successfully created STORE message")
print(f"File hash ({message.content.item_type}): {message.content.item_hash}")
print("Sender: ", message.sender)
print(f"Message hash: {message.item_hash}")
print(
f"Explorer URL: https://explorer.aleph.im/address/{message.chain.value}/{message.sender}/message/{message.item_hash}"
)
async def do_upload(account, engine, channel, filename=None, file_hash=None):
async with AuthenticatedAlephClient(
account=account, api_server=DEFAULT_SERVER
) as client:
print(filename, account.get_address())
if filename:
try:
with open(filename, "rb") as f:
# Do something with the file
content = f.read()
if len(content) > 4 * 1024 * 1024 and engine == "STORAGE":
print("File too big for native STORAGE engine")
return
message, status = await client.create_store(
account,
file_content=content,
channel=channel,
storage_engine=engine.lower(),
)
except IOError:
print("File not accessible")
raise
elif file_hash:
message, status = await client.create_store(
account,
file_hash=file_hash,
channel=channel,
storage_engine=engine.lower(),
)
await print_output_hash(message, status)
@click.command()
@click.argument(
"filename",
)
@click.option(
"--pkey",
envvar="PKEY",
default=None,
help="Account private key (optional, will default to device.key file)",
)
@click.option(
"--storage-engine",
default="IPFS",
help="Storage engine to use (default: IPFS)",
type=click.Choice(["STORAGE", "IPFS"], case_sensitive=False),
)
@click.option(
"--channel",
envvar="ALEPH_CHANNEL",
default="TEST",
help="Channel to write in (default: TEST)",
)
def main(filename, pkey=None, storage_engine="IPFS", channel="TEST"):
"""Uploads or store FILENAME.
If FILENAME is an IPFS multihash and IPFS is selected as an engine (default), don't try to upload, just pin it to the network.
Else, uploads the file to the network before pining it.
"""
if pkey is None:
pkey = get_fallback_private_key()
account = ETHAccount(private_key=pkey)
upload_filename = None
upload_hash = None
if (
46 <= len(filename) <= 48
and filename.startswith("Q")
and storage_engine == "IPFS"
):
upload_hash = filename
else:
upload_filename = filename
asyncio.run(
do_upload(
account,
storage_engine,
channel,
filename=upload_filename,
file_hash=upload_hash,
)
)
if __name__ == "__main__":
main()
|
{"/tests/integration/itest_aggregates.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"], "/src/aleph_client/vm/cache.py": ["/src/aleph_client/conf.py"], "/tests/unit/test_vm_app.py": ["/tests/unit/test_app/main.py"], "/tests/integration/itest_forget.py": ["/tests/integration/config.py", "/tests/integration/toolkit.py"], "/tests/integration/itest_posts.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"]}
|
14,057
|
aleph-im/aleph-client
|
refs/heads/master
|
/tests/integration/conftest.py
|
import asyncio
import pytest
from aleph.sdk.chains.common import get_fallback_private_key
from aleph.sdk.chains.ethereum import ETHAccount
@pytest.fixture
def fixture_account():
private_key = get_fallback_private_key()
return ETHAccount(private_key)
# Fixes the "Event loop is closed" error that happens when running several tests in a row
@pytest.fixture(scope="session")
def event_loop(request):
"""Create an instance of the default event loop for each test case."""
loop = asyncio.get_event_loop_policy().new_event_loop()
yield loop
loop.close()
|
{"/tests/integration/itest_aggregates.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"], "/src/aleph_client/vm/cache.py": ["/src/aleph_client/conf.py"], "/tests/unit/test_vm_app.py": ["/tests/unit/test_app/main.py"], "/tests/integration/itest_forget.py": ["/tests/integration/config.py", "/tests/integration/toolkit.py"], "/tests/integration/itest_posts.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"]}
|
14,058
|
aleph-im/aleph-client
|
refs/heads/master
|
/src/aleph_client/vm/cache.py
|
import abc
import fnmatch
import re
import threading
from functools import lru_cache
from typing import Any, Dict, List, NewType, Optional, Union
import aiohttp
from aiohttp import ClientSession
from ..conf import settings
CacheKey = NewType("CacheKey", str)
@lru_cache()
def _get_fallback_session(thread_id: Optional[int]) -> ClientSession:
if settings.API_UNIX_SOCKET:
connector = aiohttp.UnixConnector(path=settings.API_UNIX_SOCKET)
return aiohttp.ClientSession(connector=connector)
else:
return aiohttp.ClientSession()
def get_fallback_session() -> ClientSession:
thread_id = threading.get_native_id()
return _get_fallback_session(thread_id=thread_id)
def sanitize_cache_key(key: str) -> CacheKey:
if not re.match(r"^\w+$", key):
raise ValueError("Key may only contain letters, numbers and underscore")
return CacheKey(key)
class BaseVmCache(abc.ABC):
"""Virtual Machines can use this cache to store temporary data in memory on the host."""
@abc.abstractmethod
async def get(self, key: str) -> Optional[bytes]:
"""Get the value for a given key string."""
pass
@abc.abstractmethod
async def set(self, key: str, value: Union[str, bytes]) -> Any:
"""Set the value for a given key string."""
pass
@abc.abstractmethod
async def delete(self, key: str) -> Any:
"""Delete the value for a given key string."""
pass
@abc.abstractmethod
async def keys(self, pattern: str = "*") -> List[str]:
"""Get all keys matching a given glob pattern."""
pass
class VmCache(BaseVmCache):
"""Virtual Machines can use this cache to store temporary data in memory on the host."""
session: ClientSession
cache: Dict[str, bytes]
api_host: str
def __init__(
self, session: Optional[ClientSession] = None, api_host: Optional[str] = None
):
self.session = session or get_fallback_session()
self.cache = {}
self.api_host = api_host if api_host else settings.API_HOST
async def get(self, key: str) -> Optional[bytes]:
sanitized_key = sanitize_cache_key(key)
async with self.session.get(f"{self.api_host}/cache/{sanitized_key}") as resp:
if resp.status == 404:
return None
resp.raise_for_status()
return await resp.read()
async def set(self, key: str, value: Union[str, bytes]) -> Any:
sanitized_key = sanitize_cache_key(key)
data = value if isinstance(value, bytes) else value.encode()
async with self.session.put(
f"{self.api_host}/cache/{sanitized_key}", data=data
) as resp:
resp.raise_for_status()
return await resp.json()
async def delete(self, key: str) -> Any:
sanitized_key = sanitize_cache_key(key)
async with self.session.delete(
f"{self.api_host}/cache/{sanitized_key}"
) as resp:
resp.raise_for_status()
return await resp.json()
async def keys(self, pattern: str = "*") -> List[str]:
if not re.match(r"^[\w?*^\-]+$", pattern):
raise ValueError(
"Pattern may only contain letters, numbers, underscore, ?, *, ^, -"
)
async with self.session.get(
f"{self.api_host}/cache/?pattern={pattern}"
) as resp:
resp.raise_for_status()
return await resp.json()
class TestVmCache(BaseVmCache):
"""This is a local, dict-based cache that can be used for testing purposes."""
def __init__(self):
self._cache: Dict[str, bytes] = {}
async def get(self, key: str) -> Optional[bytes]:
sanitized_key = sanitize_cache_key(key)
return self._cache.get(sanitized_key)
async def set(self, key: str, value: Union[str, bytes]) -> None:
sanitized_key = sanitize_cache_key(key)
data = value if isinstance(value, bytes) else value.encode()
self._cache[sanitized_key] = data
async def delete(self, key: str) -> None:
sanitized_key = sanitize_cache_key(key)
del self._cache[sanitized_key]
async def keys(self, pattern: str = "*") -> List[str]:
if not re.match(r"^[\w?*^\-]+$", pattern):
raise ValueError(
"Pattern may only contain letters, numbers, underscore, ?, *, ^, -"
)
all_keys = list(self._cache.keys())
return fnmatch.filter(all_keys, pattern)
|
{"/tests/integration/itest_aggregates.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"], "/src/aleph_client/vm/cache.py": ["/src/aleph_client/conf.py"], "/tests/unit/test_vm_app.py": ["/tests/unit/test_app/main.py"], "/tests/integration/itest_forget.py": ["/tests/integration/config.py", "/tests/integration/toolkit.py"], "/tests/integration/itest_posts.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"]}
|
14,059
|
aleph-im/aleph-client
|
refs/heads/master
|
/tests/unit/test_app/main.py
|
from fastapi import FastAPI
from aleph_client.vm.app import AlephApp
# Create a test app
http_app = FastAPI()
app = AlephApp(http_app=http_app)
@app.get("/")
async def index():
return {"index": "/"}
@app.event(filters=[])
async def aleph_event(event):
print("aleph_event", event)
|
{"/tests/integration/itest_aggregates.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"], "/src/aleph_client/vm/cache.py": ["/src/aleph_client/conf.py"], "/tests/unit/test_vm_app.py": ["/tests/unit/test_app/main.py"], "/tests/integration/itest_forget.py": ["/tests/integration/config.py", "/tests/integration/toolkit.py"], "/tests/integration/itest_posts.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"]}
|
14,060
|
aleph-im/aleph-client
|
refs/heads/master
|
/examples/metrics.py
|
""" Server metrics upload.
"""
# -*- coding: utf-8 -*-
import os
import platform
import time
import psutil
from aleph.sdk import AuthenticatedAlephClient
from aleph.sdk.account import _load_account
def get_sysinfo():
uptime = int(time.time() - psutil.boot_time())
sysinfo = {
"uptime": uptime,
"os": platform.platform(),
"load_avg": os.getloadavg(),
"num_cpus": psutil.cpu_count(),
}
return sysinfo
def get_memory():
return psutil.virtual_memory()._asdict()
def get_swap_space():
sm = psutil.swap_memory()
swap = {
"total": sm.total,
"free": sm.free,
"used": sm.used,
"percent": sm.percent,
"swapped_in": sm.sin,
"swapped_out": sm.sout,
}
return swap
def get_cpu():
return psutil.cpu_times_percent(0)._asdict()
def get_cpu_cores():
return [c._asdict() for c in psutil.cpu_times_percent(0, percpu=True)]
def send_metrics(account, metrics):
with AuthenticatedAlephClient(
account=account, api_server="https://api2.aleph.im"
) as client:
return client.create_aggregate("metrics", metrics, channel="SYSINFO")
def collect_metrics():
return {
"memory": get_memory(),
"swap": get_swap_space(),
"cpu": get_cpu(),
"cpu_cores": get_cpu_cores(),
}
def main():
account = _load_account()
while True:
metrics = collect_metrics()
message, status = send_metrics(account, metrics)
print("sent", message.item_hash)
time.sleep(10)
if __name__ == "__main__":
main()
|
{"/tests/integration/itest_aggregates.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"], "/src/aleph_client/vm/cache.py": ["/src/aleph_client/conf.py"], "/tests/unit/test_vm_app.py": ["/tests/unit/test_app/main.py"], "/tests/integration/itest_forget.py": ["/tests/integration/config.py", "/tests/integration/toolkit.py"], "/tests/integration/itest_posts.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"]}
|
14,061
|
aleph-im/aleph-client
|
refs/heads/master
|
/src/aleph_client/main.py
|
"""This module only exists for backward compatibility and will be removed in a future release.
"""
import warnings
warnings.warn(
"`aleph_client.main` is deprecated and will be removed. "
"Use `aleph_client.synchronous` instead.",
DeprecationWarning,
)
|
{"/tests/integration/itest_aggregates.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"], "/src/aleph_client/vm/cache.py": ["/src/aleph_client/conf.py"], "/tests/unit/test_vm_app.py": ["/tests/unit/test_app/main.py"], "/tests/integration/itest_forget.py": ["/tests/integration/config.py", "/tests/integration/toolkit.py"], "/tests/integration/itest_posts.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"]}
|
14,062
|
aleph-im/aleph-client
|
refs/heads/master
|
/examples/httpgateway.py
|
""" Server metrics upload.
"""
import click
from aiohttp import web
from aleph.sdk import AuthenticatedAlephClient
from aleph.sdk.chains.common import get_fallback_private_key
from aleph.sdk.chains.ethereum import ETHAccount
app = web.Application()
routes = web.RouteTableDef()
@routes.get("/")
async def hello(request):
return web.Response(text="Hello, world")
@routes.post("/p/{source}")
async def source_post(request):
# print(await request.text())
data = await request.post()
data = dict(data.copy().items())
secret = data.pop("secret", None)
data["source"] = request.match_info["source"]
if app["secret"] is not None:
if secret != app["secret"]:
return web.json_response(
{"status": "error", "message": "unauthorized secret"}
)
async with AuthenticatedAlephClient(
account=app["account"], api_server="https://api2.aleph.im"
) as client:
message, _status = await client.create_post(
data,
"event",
channel=app["channel"],
)
return web.json_response({"status": "success", "item_hash": message.item_hash})
@click.command()
@click.option("--host", default="localhost", help="http host")
@click.option("--port", default=80, help="http port")
@click.option("--channel", default="GATEWAY", help="Channel for data post")
@click.option(
"--pkey",
default=None,
help="Account private key (optionnal, will default to device.key file)",
)
@click.option("--secret", default=None, help="Needed secret to be allowed to post")
def main(host, port, channel, pkey=None, secret=None):
app.add_routes(routes)
app["secret"] = secret
app["channel"] = channel
if pkey is None:
pkey = get_fallback_private_key()
account = ETHAccount(private_key=pkey)
app["account"] = account
web.run_app(app, host=host, port=port)
if __name__ == "__main__":
main()
|
{"/tests/integration/itest_aggregates.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"], "/src/aleph_client/vm/cache.py": ["/src/aleph_client/conf.py"], "/tests/unit/test_vm_app.py": ["/tests/unit/test_app/main.py"], "/tests/integration/itest_forget.py": ["/tests/integration/config.py", "/tests/integration/toolkit.py"], "/tests/integration/itest_posts.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"]}
|
14,063
|
aleph-im/aleph-client
|
refs/heads/master
|
/src/aleph_client/vm/__init__.py
|
"""
Aleph helpers for apps running inside aleph.im Virtual Machines.
"""
|
{"/tests/integration/itest_aggregates.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"], "/src/aleph_client/vm/cache.py": ["/src/aleph_client/conf.py"], "/tests/unit/test_vm_app.py": ["/tests/unit/test_app/main.py"], "/tests/integration/itest_forget.py": ["/tests/integration/config.py", "/tests/integration/toolkit.py"], "/tests/integration/itest_posts.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"]}
|
14,064
|
aleph-im/aleph-client
|
refs/heads/master
|
/tests/unit/test_vm_cache.py
|
import pytest
from aleph_client.vm.cache import TestVmCache, sanitize_cache_key
@pytest.mark.asyncio
async def test_local_vm_cache():
cache = TestVmCache()
assert (await cache.get("doesnotexist")) is None
assert len(await (cache.keys())) == 0
key = "thisdoesexist"
value = "yay, I exist!"
await cache.set(key, value)
cached_value = await cache.get(key)
assert cached_value is not None
assert cached_value.decode() == value
assert (await cache.keys())[0] == key
assert (await cache.keys("*exist"))[0] == key
await cache.delete(key)
assert (await cache.get(key)) is None
assert len(await (cache.keys())) == 0
def test_sanitize_cache_keys():
assert sanitize_cache_key("abc")
assert sanitize_cache_key("abc123")
assert sanitize_cache_key("abc_123")
with pytest.raises(ValueError):
sanitize_cache_key("abc-123")
with pytest.raises(ValueError):
sanitize_cache_key("abc!123")
with pytest.raises(ValueError):
assert sanitize_cache_key("*")
|
{"/tests/integration/itest_aggregates.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"], "/src/aleph_client/vm/cache.py": ["/src/aleph_client/conf.py"], "/tests/unit/test_vm_app.py": ["/tests/unit/test_app/main.py"], "/tests/integration/itest_forget.py": ["/tests/integration/config.py", "/tests/integration/toolkit.py"], "/tests/integration/itest_posts.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"]}
|
14,065
|
aleph-im/aleph-client
|
refs/heads/master
|
/tests/unit/test_chain_ethereum.py
|
from dataclasses import asdict, dataclass
from pathlib import Path
from tempfile import NamedTemporaryFile
import pytest
from aleph.sdk.chains.ethereum import get_fallback_account
@dataclass
class Message:
chain: str
sender: str
type: str
item_hash: str
def test_get_fallback_account():
with NamedTemporaryFile() as private_key_file:
account = get_fallback_account(path=Path(private_key_file.name))
assert account.CHAIN == "ETH"
assert account.CURVE == "secp256k1"
assert account._account.address
@pytest.mark.asyncio
async def test_ETHAccount(ethereum_account):
account = ethereum_account
message = Message("ETH", account.get_address(), "SomeType", "ItemHash")
signed = await account.sign_message(asdict(message))
assert signed["signature"]
assert len(signed["signature"]) == 132
address = account.get_address()
assert address
assert type(address) == str
assert len(address) == 42
pubkey = account.get_public_key()
assert type(pubkey) == str
assert len(pubkey) == 68
@pytest.mark.asyncio
async def test_decrypt_secp256k1(ethereum_account):
account = ethereum_account
assert account.CURVE == "secp256k1"
content = b"SomeContent"
encrypted = await account.encrypt(content)
assert type(encrypted) == bytes
decrypted = await account.decrypt(encrypted)
assert type(decrypted) == bytes
assert content == decrypted
|
{"/tests/integration/itest_aggregates.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"], "/src/aleph_client/vm/cache.py": ["/src/aleph_client/conf.py"], "/tests/unit/test_vm_app.py": ["/tests/unit/test_app/main.py"], "/tests/integration/itest_forget.py": ["/tests/integration/config.py", "/tests/integration/toolkit.py"], "/tests/integration/itest_posts.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"]}
|
14,066
|
aleph-im/aleph-client
|
refs/heads/master
|
/tests/unit/test_vm_app.py
|
import asyncio
import pytest
from fastapi.testclient import TestClient
from tests.unit.test_app.main import app
# Note: for some reason, the test client must be declared at the same level as the import.
client = TestClient(app)
@pytest.mark.asyncio
async def test_app_event():
# Call the app with an ASGI context
scope = {
"type": "aleph.message",
}
async def receive():
return {"type": "aleph.message", "body": b"BODY", "more_body": False}
send_queue: asyncio.Queue = asyncio.Queue()
async def send(dico):
await send_queue.put(dico)
await app(scope, receive, send)
def test_app_http():
response = client.get("/")
assert response.status_code == 200
assert response.json() == {"index": "/"}
|
{"/tests/integration/itest_aggregates.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"], "/src/aleph_client/vm/cache.py": ["/src/aleph_client/conf.py"], "/tests/unit/test_vm_app.py": ["/tests/unit/test_app/main.py"], "/tests/integration/itest_forget.py": ["/tests/integration/config.py", "/tests/integration/toolkit.py"], "/tests/integration/itest_posts.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"]}
|
14,067
|
aleph-im/aleph-client
|
refs/heads/master
|
/tests/integration/toolkit.py
|
import asyncio
import time
from typing import Awaitable, Callable, TypeVar
T = TypeVar("T")
async def try_until(
coroutine: Callable[..., Awaitable[T]],
condition: Callable[[T], bool],
timeout: float,
time_between_attempts: float = 0.5,
*args,
**kwargs,
) -> T:
start_time = time.monotonic()
while time.monotonic() < start_time + timeout:
result = await coroutine(*args, **kwargs)
if condition(result):
return result
await asyncio.sleep(time_between_attempts)
else:
raise TimeoutError(f"No success in {timeout} seconds.")
|
{"/tests/integration/itest_aggregates.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"], "/src/aleph_client/vm/cache.py": ["/src/aleph_client/conf.py"], "/tests/unit/test_vm_app.py": ["/tests/unit/test_app/main.py"], "/tests/integration/itest_forget.py": ["/tests/integration/config.py", "/tests/integration/toolkit.py"], "/tests/integration/itest_posts.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"]}
|
14,068
|
aleph-im/aleph-client
|
refs/heads/master
|
/src/aleph_client/utils.py
|
import logging
import os
from pathlib import Path
from shutil import make_archive
from typing import Tuple, Type
from zipfile import BadZipFile, ZipFile
from aleph.sdk.types import GenericMessage
from aleph_message.models import MessageType
from aleph_message.models.program import Encoding
from aleph_client.conf import settings
logger = logging.getLogger(__name__)
try:
import magic
except ImportError:
logger.info("Could not import library 'magic', MIME type detection disabled")
magic = None # type:ignore
def try_open_zip(path: Path) -> None:
"""Try opening a zip to check if it is valid"""
assert path.is_file()
with open(path, "rb") as archive_file:
with ZipFile(archive_file, "r") as archive:
if not archive.namelist():
raise BadZipFile("No file in the archive.")
def create_archive(path: Path) -> Tuple[Path, Encoding]:
"""Create a zip archive from a directory"""
if os.path.isdir(path):
if settings.CODE_USES_SQUASHFS:
logger.debug("Creating squashfs archive...")
archive_path = Path(f"{path}.squashfs")
os.system(f"mksquashfs {path} {archive_path} -noappend")
assert archive_path.is_file()
return archive_path, Encoding.squashfs
else:
logger.debug("Creating zip archive...")
make_archive(str(path), "zip", path)
archive_path = Path(f"{path}.zip")
return archive_path, Encoding.zip
elif os.path.isfile(path):
if path.suffix == ".squashfs" or (
magic and magic.from_file(path).startswith("Squashfs filesystem")
):
return path, Encoding.squashfs
else:
try_open_zip(Path(path))
return path, Encoding.zip
else:
raise FileNotFoundError("No file or directory to create the archive from")
def get_message_type_value(message_type: Type[GenericMessage]) -> MessageType:
"""Returns the value of the 'type' field of a message type class."""
type_literal = message_type.__annotations__["type"]
return type_literal.__args__[0] # Get the value from a Literal
|
{"/tests/integration/itest_aggregates.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"], "/src/aleph_client/vm/cache.py": ["/src/aleph_client/conf.py"], "/tests/unit/test_vm_app.py": ["/tests/unit/test_app/main.py"], "/tests/integration/itest_forget.py": ["/tests/integration/config.py", "/tests/integration/toolkit.py"], "/tests/integration/itest_posts.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"]}
|
14,069
|
aleph-im/aleph-client
|
refs/heads/master
|
/src/aleph_client/commands/aggregate.py
|
from pathlib import Path
from typing import Optional
import typer
from aleph.sdk.account import _load_account
from aleph.sdk.client import AuthenticatedAlephClient
from aleph.sdk.conf import settings as sdk_settings
from aleph.sdk.types import AccountFromPrivateKey
from aleph_message.models import MessageType
from aleph_client.commands import help_strings
from aleph_client.commands.utils import setup_logging
app = typer.Typer()
@app.command()
def forget(
key: str = typer.Argument(..., help="Aggregate item hash to be removed."),
reason: Optional[str] = typer.Option(
None, help="A description of why the messages are being forgotten"
),
channel: Optional[str] = typer.Option(default=None, help=help_strings.CHANNEL),
private_key: Optional[str] = typer.Option(
sdk_settings.PRIVATE_KEY_STRING, help=help_strings.PRIVATE_KEY
),
private_key_file: Optional[Path] = typer.Option(
sdk_settings.PRIVATE_KEY_FILE, help=help_strings.PRIVATE_KEY_FILE
),
debug: bool = False,
):
"""Forget all the messages composing an aggregate."""
setup_logging(debug)
account: AccountFromPrivateKey = _load_account(private_key, private_key_file)
with AuthenticatedAlephClient(
account=account, api_server=sdk_settings.API_HOST
) as client:
message_response = client.get_messages(
addresses=[account.get_address()],
message_type=MessageType.aggregate.value,
content_keys=[key],
)
hash_list = [message["item_hash"] for message in message_response.messages]
client.forget(hashes=hash_list, reason=reason, channel=channel)
|
{"/tests/integration/itest_aggregates.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"], "/src/aleph_client/vm/cache.py": ["/src/aleph_client/conf.py"], "/tests/unit/test_vm_app.py": ["/tests/unit/test_app/main.py"], "/tests/integration/itest_forget.py": ["/tests/integration/config.py", "/tests/integration/toolkit.py"], "/tests/integration/itest_posts.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"]}
|
14,070
|
aleph-im/aleph-client
|
refs/heads/master
|
/src/aleph_client/commands/utils.py
|
import logging
from typing import Dict, List, Optional, Union
from aleph.sdk.types import GenericMessage
from pygments import highlight
from pygments.formatters.terminal256 import Terminal256Formatter
from pygments.lexers import JsonLexer
from typer import echo
from datetime import datetime
def colorful_json(obj: str):
"""Render a JSON string with colors."""
return highlight(
obj,
lexer=JsonLexer(),
formatter=Terminal256Formatter(),
)
def colorful_message_json(message: GenericMessage):
"""Render a message in JSON with colors."""
return colorful_json(message.json(sort_keys=True, indent=4))
def input_multiline() -> str:
"""Prompt the user for a multiline input."""
echo("Enter/Paste your content. Ctrl-D or Ctrl-Z ( windows ) to save it.")
contents = ""
while True:
try:
line = input()
except EOFError:
break
contents += line + "\n"
return contents
def setup_logging(debug: bool = False):
level = logging.DEBUG if debug else logging.WARNING
logging.basicConfig(level=level)
def yes_no_input(text: str, default: Optional[bool] = None):
while True:
if default is True:
response = input(f"{text} [Y/n] ")
elif default is False:
response = input(f"{text} [y/N] ")
else:
response = input(f"{text} ")
if response.lower() in ("y", "yes"):
return True
elif response.lower() in ("n", "no"):
return False
elif response == "" and default is not None:
return default
else:
if default is None:
echo("Please enter 'y', 'yes', 'n' or 'no'")
else:
echo("Please enter 'y', 'yes', 'n', 'no' or nothing")
continue
def prompt_for_volumes():
while yes_no_input("Add volume ?", default=False):
comment = input("Description: ") or None
mount = input("Mount: ")
persistent = yes_no_input("Persist on VM host ?", default=False)
if persistent:
name = input("Volume name: ")
size_mib = int(input("Size in MiB: "))
yield {
"comment": comment,
"mount": mount,
"name": name,
"persistence": "host",
"size_mib": size_mib,
}
else:
ref = input("Ref: ")
use_latest = yes_no_input("Use latest version ?", default=True)
yield {
"comment": comment,
"mount": mount,
"ref": ref,
"use_latest": use_latest,
}
def volume_to_dict(volume: List[str]) -> Optional[Dict[str, Union[str, int]]]:
if not volume:
return None
dict_store: Dict[str, Union[str, int]] = {}
for word in volume:
split_values = word.split(",")
for param in split_values:
p = param.split("=")
if p[1].isdigit():
dict_store[p[0]] = int(p[1])
elif p[1] in ["True", "true", "False", "false"]:
dict_store[p[0]] = bool(p[1].capitalize())
else:
dict_store[p[0]] = p[1]
return dict_store
def str_to_datetime(date: Optional[str]) -> Optional[datetime]:
"""
Converts a string representation of a date/time to a datetime object.
The function can accept either a timestamp or an ISO format datetime string as the input.
"""
if date is None:
return None
try:
date_f = float(date)
return datetime.fromtimestamp(date_f)
except ValueError:
pass
return datetime.fromisoformat(date)
|
{"/tests/integration/itest_aggregates.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"], "/src/aleph_client/vm/cache.py": ["/src/aleph_client/conf.py"], "/tests/unit/test_vm_app.py": ["/tests/unit/test_app/main.py"], "/tests/integration/itest_forget.py": ["/tests/integration/config.py", "/tests/integration/toolkit.py"], "/tests/integration/itest_posts.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"]}
|
14,071
|
aleph-im/aleph-client
|
refs/heads/master
|
/tests/unit/test_commands.py
|
import subprocess
from pathlib import Path
from tempfile import NamedTemporaryFile
import pytest
from aleph.sdk.chains.common import generate_key
from typer.testing import CliRunner
from aleph_client.__main__ import app
from typing import Generator
runner = CliRunner()
@pytest.fixture
def empty_account_file() -> Generator[Path, None, None]:
with NamedTemporaryFile() as key_file:
yield Path(key_file.name)
@pytest.fixture
def account_file(empty_account_file: Path) -> Path:
private_key = generate_key()
empty_account_file.write_bytes(private_key)
return empty_account_file
def test_account_create(account_file: Path):
old_key = account_file.read_bytes()
result = runner.invoke(
app, ["account", "create", "--replace", "--private-key-file", str(account_file)]
)
assert result.exit_code == 0, result.stdout
new_key = account_file.read_bytes()
assert new_key != old_key
def test_account_address(account_file: Path):
result = runner.invoke(
app, ["account", "address", "--private-key-file", str(account_file)]
)
assert result.exit_code == 0
assert result.stdout.startswith("0x")
assert len(result.stdout.strip()) == 42
def test_account_export_private_key(account_file: Path):
result = runner.invoke(
app, ["account", "export-private-key", "--private-key-file", str(account_file)]
)
assert result.exit_code == 0
assert result.stdout.startswith("0x")
assert len(result.stdout.strip()) == 66
def test_message_get():
# Use subprocess to avoid border effects between tests caused by the initialisation
# of the aiohttp client session out of an async context in the SDK. This avoids
# a "no running event loop" error when running several tests back to back.
result = subprocess.run(
[
"aleph",
"message",
"get",
"bd79839bf96e595a06da5ac0b6ba51dea6f7e2591bb913deccded04d831d29f4",
],
capture_output=True,
)
assert result.returncode == 0
assert b"0x101d8D16372dBf5f1614adaE95Ee5CCE61998Fc9" in result.stdout
def test_message_find():
result = subprocess.run(
[
"aleph",
"message",
"find",
"--pagination=1",
"--page=1",
"--start-date=1234",
"--chains=ETH",
"--hashes=bd79839bf96e595a06da5ac0b6ba51dea6f7e2591bb913deccded04d831d29f4",
],
capture_output=True,
)
assert result.returncode == 0
assert b"0x101d8D16372dBf5f1614adaE95Ee5CCE61998Fc9" in result.stdout
assert (
b"bd79839bf96e595a06da5ac0b6ba51dea6f7e2591bb913deccded04d831d29f4"
in result.stdout
)
|
{"/tests/integration/itest_aggregates.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"], "/src/aleph_client/vm/cache.py": ["/src/aleph_client/conf.py"], "/tests/unit/test_vm_app.py": ["/tests/unit/test_app/main.py"], "/tests/integration/itest_forget.py": ["/tests/integration/config.py", "/tests/integration/toolkit.py"], "/tests/integration/itest_posts.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"]}
|
14,072
|
aleph-im/aleph-client
|
refs/heads/master
|
/src/aleph_client/conf.py
|
import os
from pathlib import Path
from shutil import which
from typing import Optional
from pydantic import BaseSettings, Field
class Settings(BaseSettings):
CONFIG_HOME: Optional[str] = None
# In case the user does not want to bother with handling private keys himself,
# do an ugly and insecure write and read from disk to this file.
PRIVATE_KEY_FILE: Path = Field(
default=Path("ethereum.key"),
description="Path to the private key used to sign messages",
)
PRIVATE_KEY_STRING: Optional[str] = None
API_HOST: str = "https://api2.aleph.im"
MAX_INLINE_SIZE: int = 50000
API_UNIX_SOCKET: Optional[str] = None
REMOTE_CRYPTO_HOST: Optional[str] = None
REMOTE_CRYPTO_UNIX_SOCKET: Optional[str] = None
ADDRESS_TO_USE: Optional[str] = None
DEFAULT_CHANNEL: str = "TEST"
DEFAULT_RUNTIME_ID: str = (
"bd79839bf96e595a06da5ac0b6ba51dea6f7e2591bb913deccded04d831d29f4"
)
DEFAULT_VM_MEMORY: int = 128
DEFAULT_VM_VCPUS: int = 1
DEFAULT_VM_TIMEOUT: float = 30.0
CODE_USES_SQUASHFS: bool = which("mksquashfs") is not None # True if command exists
VM_URL_PATH = "https://aleph.sh/vm/{hash}"
VM_URL_HOST = "https://{hash_base32}.aleph.sh"
class Config:
env_prefix = "ALEPH_"
case_sensitive = False
env_file = ".env"
# Settings singleton
settings = Settings()
if settings.CONFIG_HOME is None:
xdg_data_home = os.environ.get("XDG_DATA_HOME")
if xdg_data_home is not None:
os.environ["ALEPH_CONFIG_HOME"] = str(Path(xdg_data_home, ".aleph-im"))
else:
home = os.path.expanduser("~")
os.environ["ALEPH_CONFIG_HOME"] = str(Path(home, ".aleph-im"))
settings = Settings()
assert settings.CONFIG_HOME
if str(settings.PRIVATE_KEY_FILE) == "ethereum.key":
settings.PRIVATE_KEY_FILE = Path(
settings.CONFIG_HOME, "private-keys", "ethereum.key"
)
|
{"/tests/integration/itest_aggregates.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"], "/src/aleph_client/vm/cache.py": ["/src/aleph_client/conf.py"], "/tests/unit/test_vm_app.py": ["/tests/unit/test_app/main.py"], "/tests/integration/itest_forget.py": ["/tests/integration/config.py", "/tests/integration/toolkit.py"], "/tests/integration/itest_posts.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"]}
|
14,073
|
aleph-im/aleph-client
|
refs/heads/master
|
/src/aleph_client/commands/account.py
|
import base64
import logging
from pathlib import Path
from typing import Optional
import typer
from aleph.sdk.account import _load_account
from aleph.sdk.chains.common import generate_key
from aleph.sdk.chains.ethereum import ETHAccount
from aleph.sdk.conf import settings as sdk_settings
from aleph.sdk.types import AccountFromPrivateKey
from typer.colors import GREEN, RED
from aleph_client.commands import help_strings
from aleph_client.commands.utils import setup_logging
logger = logging.getLogger(__name__)
app = typer.Typer()
@app.command()
def create(
private_key: Optional[str] = typer.Option(None, help=help_strings.PRIVATE_KEY),
private_key_file: Optional[Path] = typer.Option(
..., help=help_strings.PRIVATE_KEY_FILE
),
replace: bool = False,
debug: bool = False,
):
"""Create or import a private key."""
setup_logging(debug)
if private_key_file is None:
private_key_file = Path(
typer.prompt(
"Enter file in which to save the key", sdk_settings.PRIVATE_KEY_FILE
)
)
if private_key_file.exists() and not replace:
typer.secho(f"Error: key already exists: '{private_key_file}'", fg=RED)
raise typer.Exit(1)
private_key_bytes: bytes
if private_key is not None:
# Validate the private key bytes by instantiating an account.
_load_account(private_key_str=private_key, account_type=ETHAccount)
private_key_bytes = private_key.encode()
else:
private_key_bytes = generate_key()
if not private_key_bytes:
typer.secho("An unexpected error occurred!", fg=RED)
raise typer.Exit(2)
private_key_file.parent.mkdir(parents=True, exist_ok=True)
private_key_file.write_bytes(private_key_bytes)
typer.secho(f"Private key stored in {private_key_file}", fg=RED)
@app.command()
def address(
private_key: Optional[str] = typer.Option(
sdk_settings.PRIVATE_KEY_STRING, help=help_strings.PRIVATE_KEY
),
private_key_file: Optional[Path] = typer.Option(
sdk_settings.PRIVATE_KEY_FILE, help=help_strings.PRIVATE_KEY_FILE
),
):
"""
Display your public address.
"""
if private_key is not None:
private_key_file = None
elif private_key_file and not private_key_file.exists():
typer.secho("No private key available", fg=RED)
raise typer.Exit(code=1)
account: AccountFromPrivateKey = _load_account(private_key, private_key_file)
typer.echo(account.get_address())
@app.command()
def export_private_key(
private_key: Optional[str] = typer.Option(
sdk_settings.PRIVATE_KEY_STRING, help=help_strings.PRIVATE_KEY
),
private_key_file: Optional[Path] = typer.Option(
sdk_settings.PRIVATE_KEY_FILE, help=help_strings.PRIVATE_KEY_FILE
),
):
"""
Display your private key.
"""
if private_key is not None:
private_key_file = None
elif private_key_file and not private_key_file.exists():
typer.secho("No private key available", fg=RED)
raise typer.Exit(code=1)
account: AccountFromPrivateKey = _load_account(private_key, private_key_file)
if hasattr(account, "private_key"):
private_key_hex: str = base64.b16encode(account.private_key).decode().lower()
typer.echo(f"0x{private_key_hex}")
else:
typer.secho(f"Private key cannot be read for {account}", fg=RED)
@app.command()
def path():
if sdk_settings.PRIVATE_KEY_FILE:
typer.echo(sdk_settings.PRIVATE_KEY_FILE)
|
{"/tests/integration/itest_aggregates.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"], "/src/aleph_client/vm/cache.py": ["/src/aleph_client/conf.py"], "/tests/unit/test_vm_app.py": ["/tests/unit/test_app/main.py"], "/tests/integration/itest_forget.py": ["/tests/integration/config.py", "/tests/integration/toolkit.py"], "/tests/integration/itest_posts.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"]}
|
14,074
|
aleph-im/aleph-client
|
refs/heads/master
|
/tests/integration/itest_forget.py
|
from typing import Callable, Dict
import pytest
from aleph.sdk import AlephClient, AuthenticatedAlephClient
from aleph.sdk.types import Account
from .config import REFERENCE_NODE, TARGET_NODE, TEST_CHANNEL
from .toolkit import try_until
from aleph.sdk import AlephClient
async def create_and_forget_post(
account: Account, emitter_node: str, receiver_node: str, channel=TEST_CHANNEL
) -> str:
async with AuthenticatedAlephClient(
account=account, api_server=receiver_node
) as receiver_client:
async def wait_matching_posts(
item_hash: str, condition: Callable[[Dict], bool], timeout: int = 5
):
return await try_until(
receiver_client.get_posts,
condition,
timeout=timeout,
hashes=[item_hash],
api_server=receiver_node,
)
async with AuthenticatedAlephClient(
account=account, api_server=receiver_node
) as emitter_client:
post_message, message_status = await emitter_client.create_post(
post_content="A considerate and politically correct post.",
post_type="POST",
channel="INTEGRATION_TESTS",
)
# Wait for the message to appear on the receiver. We don't check the values,
# they're checked in other integration tests.
get_post_response = await wait_matching_posts(
post_message.item_hash,
lambda response: len(response["posts"]) > 0,
)
print(get_post_response)
post_hash = post_message.item_hash
reason = "This well thought-out content offends me!"
forget_message, forget_status = await emitter_client.forget(
hashes=[post_hash],
reason=reason,
channel=channel,
)
assert forget_message.sender == account.get_address()
assert forget_message.content.reason == reason
assert forget_message.content.hashes == [post_hash]
print(forget_message)
# Wait until the message is forgotten
forgotten_posts = await wait_matching_posts(
post_hash,
lambda response: "forgotten_by" in response["posts"][0],
timeout=15,
)
assert len(forgotten_posts["posts"]) == 1
forgotten_post = forgotten_posts["posts"][0]
assert forgotten_post["forgotten_by"] == [forget_message.item_hash]
assert forgotten_post["item_content"] is None
print(forgotten_post)
return post_hash
@pytest.mark.asyncio
async def test_create_and_forget_post_on_target(fixture_account):
"""
Create a post on the target node, then forget it and check that the change is propagated
to the reference node.
"""
_ = await create_and_forget_post(fixture_account, TARGET_NODE, REFERENCE_NODE)
@pytest.mark.asyncio
async def test_create_and_forget_post_on_reference(fixture_account):
"""
Create a post on the reference node, then forget it and check that the change is propagated
to the target node.
"""
_ = await create_and_forget_post(fixture_account, REFERENCE_NODE, TARGET_NODE)
@pytest.mark.asyncio
async def test_forget_a_forget_message(fixture_account):
"""
Attempts to forget a forget message. This should fail.
"""
# TODO: this test should be moved to the PyAleph API tests, once a framework is in place.
async with AlephClient(api_server=TARGET_NODE) as client:
post_hash = await create_and_forget_post(
fixture_account, TARGET_NODE, TARGET_NODE
)
get_post_response = await client.get_posts(hashes=[post_hash])
assert len(get_post_response["posts"]) == 1
post = get_post_response["posts"][0]
forget_message_hash = post["forgotten_by"][0]
async with AuthenticatedAlephClient(account=fixture_account, api_server=TARGET_NODE) as my_client:
forget_message, forget_status = await my_client.forget(
hashes=[forget_message_hash],
reason="I want to remember this post. Maybe I can forget I forgot it?",
channel=TEST_CHANNEL,
)
print(forget_message)
get_forget_message_response = await client.get_messages(
hashes=[forget_message_hash], channels=[TEST_CHANNEL]
)
assert len(get_forget_message_response.messages) == 1
forget_message = get_forget_message_response.messages[0]
print(forget_message)
assert "forgotten_by" not in forget_message
|
{"/tests/integration/itest_aggregates.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"], "/src/aleph_client/vm/cache.py": ["/src/aleph_client/conf.py"], "/tests/unit/test_vm_app.py": ["/tests/unit/test_app/main.py"], "/tests/integration/itest_forget.py": ["/tests/integration/config.py", "/tests/integration/toolkit.py"], "/tests/integration/itest_posts.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"]}
|
14,075
|
aleph-im/aleph-client
|
refs/heads/master
|
/src/aleph_client/exceptions.py
|
from abc import ABC
class QueryError(ABC, ValueError):
"""The result of an API query is inconsistent."""
pass
class MessageNotFoundError(QueryError):
"""A message was expected but could not be found."""
pass
class MultipleMessagesError(QueryError):
"""Multiple messages were found when a single message is expected."""
pass
class BroadcastError(Exception):
"""
Data could not be broadcast to the aleph.im network.
"""
pass
class InvalidMessageError(BroadcastError):
"""
The message could not be broadcast because it does not follow the aleph.im
message specification.
"""
pass
|
{"/tests/integration/itest_aggregates.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"], "/src/aleph_client/vm/cache.py": ["/src/aleph_client/conf.py"], "/tests/unit/test_vm_app.py": ["/tests/unit/test_app/main.py"], "/tests/integration/itest_forget.py": ["/tests/integration/config.py", "/tests/integration/toolkit.py"], "/tests/integration/itest_posts.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"]}
|
14,076
|
aleph-im/aleph-client
|
refs/heads/master
|
/tests/unit/test_utils.py
|
from aleph_message.models import (
AggregateMessage,
ForgetMessage,
MessageType,
PostMessage,
ProgramMessage,
StoreMessage,
)
from aleph_client.utils import get_message_type_value
def test_get_message_type_value():
assert get_message_type_value(PostMessage) == MessageType.post
assert get_message_type_value(AggregateMessage) == MessageType.aggregate
assert get_message_type_value(StoreMessage) == MessageType.store
assert get_message_type_value(ProgramMessage) == MessageType.program
assert get_message_type_value(ForgetMessage) == MessageType.forget
|
{"/tests/integration/itest_aggregates.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"], "/src/aleph_client/vm/cache.py": ["/src/aleph_client/conf.py"], "/tests/unit/test_vm_app.py": ["/tests/unit/test_app/main.py"], "/tests/integration/itest_forget.py": ["/tests/integration/config.py", "/tests/integration/toolkit.py"], "/tests/integration/itest_posts.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"]}
|
14,077
|
aleph-im/aleph-client
|
refs/heads/master
|
/tests/unit/conftest.py
|
# -*- coding: utf-8 -*-
"""
Dummy conftest.py for aleph_client.
If you don't know what this is for, just leave it empty.
Read more about conftest.py under:
https://pytest.org/latest/plugins.html
"""
from pathlib import Path
from tempfile import NamedTemporaryFile
import aleph.sdk.chains.ethereum as ethereum
import aleph.sdk.chains.sol as solana
import aleph.sdk.chains.tezos as tezos
import pytest
from aleph.sdk.chains.common import get_fallback_private_key
@pytest.fixture
def fallback_private_key() -> bytes:
with NamedTemporaryFile() as private_key_file:
yield get_fallback_private_key(path=Path(private_key_file.name))
@pytest.fixture
def ethereum_account() -> ethereum.ETHAccount:
with NamedTemporaryFile(delete=False) as private_key_file:
private_key_file.close()
yield ethereum.get_fallback_account(path=Path(private_key_file.name))
@pytest.fixture
def solana_account() -> solana.SOLAccount:
with NamedTemporaryFile(delete=False) as private_key_file:
private_key_file.close()
yield solana.get_fallback_account(path=Path(private_key_file.name))
@pytest.fixture
def tezos_account() -> tezos.TezosAccount:
with NamedTemporaryFile(delete=False) as private_key_file:
private_key_file.close()
yield tezos.get_fallback_account(path=Path(private_key_file.name))
|
{"/tests/integration/itest_aggregates.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"], "/src/aleph_client/vm/cache.py": ["/src/aleph_client/conf.py"], "/tests/unit/test_vm_app.py": ["/tests/unit/test_app/main.py"], "/tests/integration/itest_forget.py": ["/tests/integration/config.py", "/tests/integration/toolkit.py"], "/tests/integration/itest_posts.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"]}
|
14,078
|
aleph-im/aleph-client
|
refs/heads/master
|
/src/aleph_client/commands/files.py
|
import logging
from pathlib import Path
from typing import Optional
import typer
from aleph.sdk import AuthenticatedAlephClient
from aleph.sdk.account import _load_account
from aleph.sdk.conf import settings as sdk_settings
from aleph.sdk.types import AccountFromPrivateKey, StorageEnum
from aleph_message.models import StoreMessage
from aleph_message.status import MessageStatus
from aleph_client.commands import help_strings
from aleph_client.commands.utils import setup_logging
logger = logging.getLogger(__name__)
app = typer.Typer()
@app.command()
def pin(
item_hash: str = typer.Argument(..., help="IPFS hash to pin on aleph.im"),
channel: Optional[str] = typer.Option(default=None, help=help_strings.CHANNEL),
private_key: Optional[str] = typer.Option(
sdk_settings.PRIVATE_KEY_STRING, help=help_strings.PRIVATE_KEY
),
private_key_file: Optional[Path] = typer.Option(
sdk_settings.PRIVATE_KEY_FILE, help=help_strings.PRIVATE_KEY_FILE
),
ref: Optional[str] = typer.Option(None, help=help_strings.REF),
debug: bool = False,
):
"""Persist a file from IPFS on aleph.im."""
setup_logging(debug)
account: AccountFromPrivateKey = _load_account(private_key, private_key_file)
with AuthenticatedAlephClient(
account=account, api_server=sdk_settings.API_HOST
) as client:
result: StoreMessage
status: MessageStatus
result, status = client.create_store(
file_hash=item_hash,
storage_engine=StorageEnum.ipfs,
channel=channel,
ref=ref,
)
logger.debug("Upload finished")
typer.echo(f"{result.json(indent=4)}")
@app.command()
def upload(
path: Path = typer.Argument(..., help="Path of the file to upload"),
channel: Optional[str] = typer.Option(default=None, help=help_strings.CHANNEL),
private_key: Optional[str] = typer.Option(
sdk_settings.PRIVATE_KEY_STRING, help=help_strings.PRIVATE_KEY
),
private_key_file: Optional[Path] = typer.Option(
sdk_settings.PRIVATE_KEY_FILE, help=help_strings.PRIVATE_KEY_FILE
),
ref: Optional[str] = typer.Option(None, help=help_strings.REF),
debug: bool = False,
):
"""Upload and store a file on aleph.im."""
setup_logging(debug)
account: AccountFromPrivateKey = _load_account(private_key, private_key_file)
with AuthenticatedAlephClient(
account=account, api_server=sdk_settings.API_HOST
) as client:
if not path.is_file():
typer.echo(f"Error: File not found: '{path}'")
raise typer.Exit(code=1)
with open(path, "rb") as fd:
logger.debug("Reading file")
# TODO: Read in lazy mode instead of copying everything in memory
file_content = fd.read()
storage_engine = (
StorageEnum.ipfs
if len(file_content) > 4 * 1024 * 1024
else StorageEnum.storage
)
logger.debug("Uploading file")
result: StoreMessage
status: MessageStatus
result, status = client.create_store(
file_content=file_content,
storage_engine=storage_engine,
channel=channel,
guess_mime_type=True,
ref=ref,
)
logger.debug("Upload finished")
typer.echo(f"{result.json(indent=4)}")
|
{"/tests/integration/itest_aggregates.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"], "/src/aleph_client/vm/cache.py": ["/src/aleph_client/conf.py"], "/tests/unit/test_vm_app.py": ["/tests/unit/test_app/main.py"], "/tests/integration/itest_forget.py": ["/tests/integration/config.py", "/tests/integration/toolkit.py"], "/tests/integration/itest_posts.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"]}
|
14,079
|
aleph-im/aleph-client
|
refs/heads/master
|
/tests/unit/test_remote_account.py
|
from unittest.mock import patch
import pytest
from aleph.sdk.chains.ethereum import ETHAccount
from aleph.sdk.chains.remote import AccountProperties, RemoteAccount
@pytest.mark.asyncio
async def test_remote_storage():
host = "http://localhost:8888"
private_key = (
b"xRR\xd4P\xdb9\x93(U\xa7\xd5\x81\xba\xc7\x9fiT"
b"\xb8]\x12\x82 \xd1\x81\xc8\x94\xf0\xdav\xbb\xfb"
)
local_account = ETHAccount(private_key=private_key)
with patch("aiohttp.client.ClientSession") as mock_session:
mock_session.get.return_value.__aenter__.return_value.json.return_value = (
AccountProperties(
chain="ETH",
curve="secp256k1",
address=local_account.get_address(),
public_key=local_account.get_public_key(),
).dict()
)
remote_account = await RemoteAccount.from_crypto_host(
host=host, session=mock_session
)
assert remote_account.get_address() == local_account.get_address()
assert remote_account.get_public_key() == local_account.get_public_key()
# --- Test remote signing ---
expected_signature = (
"0xa943de6c550ddf9cd1d3e58e77e9952b9f97e1bcb2c69"
"a2f4ee56446dc8a38f02fb4a4e85c2d02efa26750456090"
"3b983b4eef8b8030cc0d89550c18c69aef081c"
)
message = {
"chain": "ETH",
"sender": local_account.get_address(),
"type": "POST",
"item_hash": "HASH",
}
expected_signed_message = {
"signature": expected_signature,
}
expected_signed_message.update(message)
mock_session.post.return_value.__aenter__.return_value.json.return_value = (
expected_signed_message
)
signed_message = await remote_account.sign_message(message)
assert set(signed_message.keys()) == set(message.keys()).union(["signature"])
assert signed_message["signature"] == expected_signature
local_signed_message = await local_account.sign_message(message)
assert signed_message == local_signed_message
|
{"/tests/integration/itest_aggregates.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"], "/src/aleph_client/vm/cache.py": ["/src/aleph_client/conf.py"], "/tests/unit/test_vm_app.py": ["/tests/unit/test_app/main.py"], "/tests/integration/itest_forget.py": ["/tests/integration/config.py", "/tests/integration/toolkit.py"], "/tests/integration/itest_posts.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"]}
|
14,080
|
aleph-im/aleph-client
|
refs/heads/master
|
/tests/unit/test_chain_nuls1_compat.py
|
"""The NULS1 implementation switched from lib `secp256k1` to `coincurve`.
This file tests that both implementations returns identical results.
"""
from pathlib import Path
from tempfile import NamedTemporaryFile
import pytest
import secp256k1
from aleph.sdk.chains.common import get_fallback_private_key
from aleph.sdk.chains.nuls1 import LOGGER, MESSAGE_TEMPLATE, NulsSignature, VarInt
from coincurve.keys import PrivateKey
SECRET = (
b"\xc4\xfe\xe65\x96\x14\xb4:\r: \x05;\x12j\x9bJ"
b"\x14\x0eY\xe3BY\x0f\xd6\xee\xfc\x9d\xfe\x8fv\xbc"
)
class NulsSignatureSecp256k1(NulsSignature):
@classmethod
def sign_data_deprecated(cls, pri_key: bytes, digest_bytes: bytes):
# TODO: Test compatibility and remove
privkey = secp256k1.PrivateKey(
pri_key, raw=True
) # we expect to have a private key as bytes. unhexlify it before passing.
item = cls()
item.pub_key = privkey.pubkey.serialize()
item.digest_bytes = digest_bytes
sig_check = privkey.ecdsa_sign(digest_bytes, raw=True)
print("sig_check", sig_check)
item.sig_ser = privkey.ecdsa_serialize(sig_check)
return item
@classmethod
def sign_message_deprecated(cls, pri_key: bytes, message):
# TODO: Test compatibility and remove
# we expect to have a private key as bytes. unhexlify it before passing
privkey = secp256k1.PrivateKey(pri_key, raw=True)
item = cls()
message = VarInt(len(message)).encode() + message
item.pub_key = privkey.pubkey.serialize()
# item.digest_bytes = digest_bytes
sig_check = privkey.ecdsa_sign(MESSAGE_TEMPLATE.format(message).encode())
item.sig_ser = privkey.ecdsa_serialize(sig_check)
return item
def verify_deprecated(self, message):
pub = secp256k1.PublicKey(self.pub_key, raw=True)
message = VarInt(len(message)).encode() + message
print("message", message)
# LOGGER.debug("Comparing with %r" % (MESSAGE_TEMPLATE.format(message).encode()))
try:
sig_raw = pub.ecdsa_deserialize(self.sig_ser)
good = pub.ecdsa_verify(MESSAGE_TEMPLATE.format(message).encode(), sig_raw)
except Exception:
LOGGER.exception("Verification failed")
good = False
return good
def test_sign_data_deprecated():
"""Test the data signature"""
data = None
signature = NulsSignature(data=data)
with NamedTemporaryFile() as private_key_file:
private_key = get_fallback_private_key(path=Path(private_key_file.name))
assert signature
sign_deprecated: NulsSignatureSecp256k1 = (
NulsSignatureSecp256k1.sign_data_deprecated(
pri_key=private_key, digest_bytes=b"x" * (256 // 8)
)
)
assert sign_deprecated
@pytest.mark.asyncio
async def test_compare_sign_data():
private_key = PrivateKey(SECRET)
sign: NulsSignature = NulsSignature.sign_data(
pri_key=private_key.secret, digest_bytes=b"x" * (256 // 8)
)
sign_deprecated: NulsSignatureSecp256k1 = (
NulsSignatureSecp256k1.sign_data_deprecated(
pri_key=private_key.secret, digest_bytes=b"x" * (256 // 8)
)
)
assert sign.sig_ser is not None
assert sign_deprecated.sig_ser is not None
assert len(sign.sig_ser) == len(sign_deprecated.sig_ser)
assert sign.sig_ser == sign_deprecated.sig_ser
assert sign == sign_deprecated
@pytest.mark.asyncio
async def test_compare_sign_message():
private_key = PrivateKey(SECRET)
message = b"GOOD"
sign: NulsSignature = await NulsSignature.sign_message(
pri_key=private_key.secret, message=message
)
sign_deprecated: NulsSignatureSecp256k1 = (
NulsSignatureSecp256k1.sign_message_deprecated(
pri_key=private_key.secret, message=message
)
)
assert sign.sig_ser is not None
assert sign_deprecated.sig_ser is not None
assert len(sign.sig_ser) == len(sign_deprecated.sig_ser)
assert sign.sig_ser == sign_deprecated.sig_ser
assert sign == sign_deprecated
@pytest.mark.asyncio
async def test_verify():
private_key = PrivateKey(SECRET)
message = b"GOOD"
sign: NulsSignatureSecp256k1 = await NulsSignatureSecp256k1.sign_message(
pri_key=private_key.secret, message=message
)
assert sign.verify(message=message)
assert not sign.verify(message=b"BAD")
assert sign.verify_deprecated(message=message)
assert not sign.verify_deprecated(message=b"BAD")
|
{"/tests/integration/itest_aggregates.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"], "/src/aleph_client/vm/cache.py": ["/src/aleph_client/conf.py"], "/tests/unit/test_vm_app.py": ["/tests/unit/test_app/main.py"], "/tests/integration/itest_forget.py": ["/tests/integration/config.py", "/tests/integration/toolkit.py"], "/tests/integration/itest_posts.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"]}
|
14,081
|
aleph-im/aleph-client
|
refs/heads/master
|
/tests/unit/test_chain_solana.py
|
import json
from dataclasses import asdict, dataclass
from pathlib import Path
from tempfile import NamedTemporaryFile
import base58
import pytest
from aleph.sdk.chains.common import get_verification_buffer
from aleph.sdk.chains.sol import SOLAccount, get_fallback_account
from nacl.signing import VerifyKey
@dataclass
class Message:
chain: str
sender: str
type: str
item_hash: str
def test_get_fallback_account():
with NamedTemporaryFile() as private_key_file:
account: SOLAccount = get_fallback_account(path=Path(private_key_file.name))
assert account.CHAIN == "SOL"
assert account.CURVE == "curve25519"
assert account._signing_key.verify_key
assert type(account.private_key) == bytes
assert len(account.private_key) == 32
@pytest.mark.asyncio
async def test_SOLAccount(solana_account):
message = asdict(
Message("SOL", solana_account.get_address(), "SomeType", "ItemHash")
)
initial_message = message.copy()
await solana_account.sign_message(message)
assert message["signature"]
address = message["sender"]
assert address
assert type(address) == str
# assert len(address) == 44 # can also be 43?
signature = json.loads(message["signature"])
pubkey = base58.b58decode(signature["publicKey"])
assert type(pubkey) == bytes
assert len(pubkey) == 32
# modeled according to https://github.com/aleph-im/pyaleph/blob/master/src/aleph/chains/solana.py
verify_key = VerifyKey(pubkey)
verification_buffer = get_verification_buffer(message)
assert get_verification_buffer(initial_message) == verification_buffer
verif = verify_key.verify(
verification_buffer, signature=base58.b58decode(signature["signature"])
)
assert verif == verification_buffer
assert message["sender"] == signature["publicKey"]
@pytest.mark.asyncio
async def test_decrypt_curve25516(solana_account):
assert solana_account.CURVE == "curve25519"
content = b"SomeContent"
encrypted = await solana_account.encrypt(content)
assert type(encrypted) == bytes
decrypted = await solana_account.decrypt(encrypted)
assert type(decrypted) == bytes
assert content == decrypted
|
{"/tests/integration/itest_aggregates.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"], "/src/aleph_client/vm/cache.py": ["/src/aleph_client/conf.py"], "/tests/unit/test_vm_app.py": ["/tests/unit/test_app/main.py"], "/tests/integration/itest_forget.py": ["/tests/integration/config.py", "/tests/integration/toolkit.py"], "/tests/integration/itest_posts.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"]}
|
14,082
|
aleph-im/aleph-client
|
refs/heads/master
|
/src/aleph_client/commands/help_strings.py
|
IPFS_HASH = "IPFS Content identifier (CID)"
CHANNEL = "Aleph.im network channel where the message is located"
PRIVATE_KEY = "Your private key. Cannot be used with --private-key-file"
PRIVATE_KEY_FILE = "Path to your private key file"
REF = "Checkout https://aleph-im.gitbook.io/aleph-js/api-resources-reference/posts"
|
{"/tests/integration/itest_aggregates.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"], "/src/aleph_client/vm/cache.py": ["/src/aleph_client/conf.py"], "/tests/unit/test_vm_app.py": ["/tests/unit/test_app/main.py"], "/tests/integration/itest_forget.py": ["/tests/integration/config.py", "/tests/integration/toolkit.py"], "/tests/integration/itest_posts.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"]}
|
14,083
|
aleph-im/aleph-client
|
refs/heads/master
|
/tests/unit/test_chain_tezos.py
|
from dataclasses import asdict, dataclass
from pathlib import Path
from tempfile import NamedTemporaryFile
import pytest
from aleph.sdk.chains.tezos import TezosAccount, get_fallback_account
@dataclass
class Message:
chain: str
sender: str
type: str
item_hash: str
def test_get_fallback_account(tezos_account: TezosAccount):
with NamedTemporaryFile() as private_key_file:
account: TezosAccount = get_fallback_account(path=Path(private_key_file.name))
assert account.CHAIN == "TEZOS"
assert account.CURVE == "secp256k1"
assert account._account.public_key()
@pytest.mark.asyncio
async def test_tezos_account(tezos_account: TezosAccount):
message = Message("TEZOS", tezos_account.get_address(), "SomeType", "ItemHash")
signed = await tezos_account.sign_message(asdict(message))
assert signed["signature"]
assert len(signed["signature"]) == 188
address = tezos_account.get_address()
assert address is not None
assert isinstance(address, str)
assert len(address) == 36
pubkey = tezos_account.get_public_key()
assert isinstance(pubkey, str)
assert len(pubkey) == 55
@pytest.mark.asyncio
async def test_decrypt_secp256k1(tezos_account: TezosAccount):
assert tezos_account.CURVE == "secp256k1"
content = b"SomeContent"
encrypted = await tezos_account.encrypt(content)
assert isinstance(encrypted, bytes)
decrypted = await tezos_account.decrypt(encrypted)
assert isinstance(decrypted, bytes)
assert content == decrypted
|
{"/tests/integration/itest_aggregates.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"], "/src/aleph_client/vm/cache.py": ["/src/aleph_client/conf.py"], "/tests/unit/test_vm_app.py": ["/tests/unit/test_app/main.py"], "/tests/integration/itest_forget.py": ["/tests/integration/config.py", "/tests/integration/toolkit.py"], "/tests/integration/itest_posts.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"]}
|
14,084
|
aleph-im/aleph-client
|
refs/heads/master
|
/tests/integration/itest_posts.py
|
import pytest
from aleph_message.models import PostMessage
from aleph.sdk.exceptions import MessageNotFoundError
from tests.integration.toolkit import try_until
from .config import REFERENCE_NODE, TARGET_NODE
from aleph.sdk import AuthenticatedAlephClient
from aleph.sdk.conf import settings as sdk_settings
from aleph.sdk import AlephClient
from aleph_message.status import MessageStatus
async def get_message(item_hash: str):
async with AlephClient(api_server=sdk_settings.API_HOST) as client:
try:
response = await client.get_message(item_hash, message_type=PostMessage)
return response
except MessageNotFoundError:
return None
async def create_message_on_target(
fixture_account, emitter_node: str, receiver_node: str
):
"""
Create a POST message on the target node, then fetch it from the reference node.
"""
data = {"content": "test"}
async with AuthenticatedAlephClient(
account=fixture_account, api_server=sdk_settings.API_HOST
) as client:
message, status = await client.create_post(
post_content=data,
post_type="POST",
ref=None,
channel="INTEGRATION_TESTS",
inline=True,
sync=True,
)
response = await try_until(
get_message,
lambda r: r is not None and r.content is not None,
timeout=5,
time_between_attempts=0.5,
item_hash=message.item_hash,
)
assert status == MessageStatus.PROCESSED
assert response.content == message.content
@pytest.mark.asyncio
async def test_create_message_on_target(fixture_account):
"""
Attempts to create a new message on the target node and verifies if the message can be fetched from
the reference node.
"""
await create_message_on_target(
fixture_account, emitter_node=REFERENCE_NODE, receiver_node=TARGET_NODE
)
|
{"/tests/integration/itest_aggregates.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"], "/src/aleph_client/vm/cache.py": ["/src/aleph_client/conf.py"], "/tests/unit/test_vm_app.py": ["/tests/unit/test_app/main.py"], "/tests/integration/itest_forget.py": ["/tests/integration/config.py", "/tests/integration/toolkit.py"], "/tests/integration/itest_posts.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"]}
|
14,085
|
aleph-im/aleph-client
|
refs/heads/master
|
/src/aleph_client/commands/program.py
|
import json
import logging
from base64 import b16decode, b32encode
from pathlib import Path
from typing import Dict, List, Optional
from zipfile import BadZipFile
import typer
from aleph.sdk import AuthenticatedAlephClient
from aleph.sdk.account import _load_account
from aleph.sdk.conf import settings as sdk_settings
from aleph.sdk.types import AccountFromPrivateKey, StorageEnum
from aleph_message.models import (
ItemHash,
MessagesResponse,
ProgramContent,
ProgramMessage,
StoreMessage,
)
from aleph_message.status import MessageStatus
from aleph_client.commands import help_strings
from aleph_client.commands.utils import (
input_multiline,
prompt_for_volumes,
setup_logging,
volume_to_dict,
yes_no_input,
)
from aleph_client.conf import settings
from aleph_client.utils import create_archive
logger = logging.getLogger(__name__)
app = typer.Typer()
@app.command()
def upload(
path: Path = typer.Argument(..., help="Path to your source code"),
entrypoint: str = typer.Argument(..., help="Your program entrypoint"),
channel: Optional[str] = typer.Option(default=None, help=help_strings.CHANNEL),
memory: int = typer.Option(
sdk_settings.DEFAULT_VM_MEMORY, help="Maximum memory allocation on vm in MiB"
),
vcpus: int = typer.Option(
sdk_settings.DEFAULT_VM_VCPUS, help="Number of virtual cpus to allocate."
),
timeout_seconds: float = typer.Option(
sdk_settings.DEFAULT_VM_TIMEOUT,
help="If vm is not called after [timeout_seconds] it will shutdown",
),
private_key: Optional[str] = typer.Option(
sdk_settings.PRIVATE_KEY_STRING, help=help_strings.PRIVATE_KEY
),
private_key_file: Optional[Path] = typer.Option(
sdk_settings.PRIVATE_KEY_FILE, help=help_strings.PRIVATE_KEY_FILE
),
print_messages: bool = typer.Option(False),
print_code_message: bool = typer.Option(False),
print_program_message: bool = typer.Option(False),
runtime: str = typer.Option(
None,
help="Hash of the runtime to use for your program. Defaults to aleph debian with Python3.8 and node. You can also create your own runtime and pin it",
),
beta: bool = typer.Option(False),
debug: bool = False,
persistent: bool = False,
persistent_volume: Optional[List[str]] = typer.Option(
None,
help="""Takes 3 parameters
A persistent volume is allocated on the host machine at any time
eg: Use , to seperate the parameters and no spaces
--persistent_volume persistence=host,name=my-volume,size=100 ./my-program main:app
""",
),
ephemeral_volume: Optional[List[str]] = typer.Option(
None,
help="""Takes 1 parameter Only
Ephemeral volumes can move and be removed by the host,Garbage collected basically, when the VM isn't running
eg: Use , to seperate the parameters and no spaces
--ephemeral-volume size_mib=100 ./my-program main:app """,
),
immutable_volume: Optional[List[str]] = typer.Option(
None,
help="""Takes 3 parameters
Immutable volume is one whose contents do not change
eg: Use , to seperate the parameters and no spaces
--immutable-volume ref=25a393222692c2f73489dc6710ae87605a96742ceef7b91de4d7ec34bb688d94,use_latest=true,mount=/mnt/volume ./my-program main:app
""",
),
):
"""Register a program to run on aleph.im virtual machines from a zip archive."""
setup_logging(debug)
path = path.absolute()
try:
path_object, encoding = create_archive(path)
except BadZipFile:
typer.echo("Invalid zip archive")
raise typer.Exit(3)
except FileNotFoundError:
typer.echo("No such file or directory")
raise typer.Exit(4)
account: AccountFromPrivateKey = _load_account(private_key, private_key_file)
runtime = (
runtime
or input(f"Ref of runtime ? [{sdk_settings.DEFAULT_RUNTIME_ID}] ")
or sdk_settings.DEFAULT_RUNTIME_ID
)
volumes = []
# Check if the volumes are empty
if (
persistent_volume is None
or ephemeral_volume is None
or immutable_volume is None
):
for volume in prompt_for_volumes():
volumes.append(volume)
typer.echo("\n")
# else Parse all the volumes that have passed as the cli parameters and put it into volume list
else:
if len(persistent_volume) > 0:
persistent_volume_dict = volume_to_dict(volume=persistent_volume)
volumes.append(persistent_volume_dict)
if len(ephemeral_volume) > 0:
ephemeral_volume_dict = volume_to_dict(volume=ephemeral_volume)
volumes.append(ephemeral_volume_dict)
if len(immutable_volume) > 0:
immutable_volume_dict = volume_to_dict(volume=immutable_volume)
volumes.append(immutable_volume_dict)
subscriptions: Optional[List[Dict]]
if beta and yes_no_input("Subscribe to messages ?", default=False):
content_raw = input_multiline()
try:
subscriptions = json.loads(content_raw)
except json.decoder.JSONDecodeError:
typer.echo("Not valid JSON")
raise typer.Exit(code=2)
else:
subscriptions = None
with AuthenticatedAlephClient(
account=account, api_server=sdk_settings.API_HOST
) as client:
# Upload the source code
with open(path_object, "rb") as fd:
logger.debug("Reading file")
# TODO: Read in lazy mode instead of copying everything in memory
file_content = fd.read()
storage_engine = (
StorageEnum.ipfs
if len(file_content) > 4 * 1024 * 1024
else StorageEnum.storage
)
logger.debug("Uploading file")
user_code: StoreMessage
status: MessageStatus
user_code, status = client.create_store(
file_content=file_content,
storage_engine=storage_engine,
channel=channel,
guess_mime_type=True,
ref=None,
)
logger.debug("Upload finished")
if print_messages or print_code_message:
typer.echo(f"{user_code.json(indent=4)}")
program_ref = user_code.item_hash
# Register the program
message, status = client.create_program(
program_ref=program_ref,
entrypoint=entrypoint,
runtime=runtime,
storage_engine=StorageEnum.storage,
channel=channel,
memory=memory,
vcpus=vcpus,
timeout_seconds=timeout_seconds,
persistent=persistent,
encoding=encoding,
volumes=volumes,
subscriptions=subscriptions,
)
logger.debug("Upload finished")
if print_messages or print_program_message:
typer.echo(f"{message.json(indent=4)}")
item_hash: ItemHash = message.item_hash
hash_base32 = (
b32encode(b16decode(item_hash.upper())).strip(b"=").lower().decode()
)
typer.echo(
f"Your program has been uploaded on aleph.im .\n\n"
"Available on:\n"
f" {settings.VM_URL_PATH.format(hash=item_hash)}\n"
f" {settings.VM_URL_HOST.format(hash_base32=hash_base32)}\n"
"Visualise on:\n https://explorer.aleph.im/address/"
f"{message.chain}/{message.sender}/message/PROGRAM/{item_hash}\n"
)
@app.command()
def update(
item_hash: str,
path: Path,
private_key: Optional[str] = sdk_settings.PRIVATE_KEY_STRING,
private_key_file: Optional[Path] = sdk_settings.PRIVATE_KEY_FILE,
print_message: bool = True,
debug: bool = False,
):
"""Update the code of an existing program"""
setup_logging(debug)
account = _load_account(private_key, private_key_file)
path = path.absolute()
with AuthenticatedAlephClient(
account=account, api_server=sdk_settings.API_HOST
) as client:
program_message: ProgramMessage = client.get_message(
item_hash=item_hash, message_type=ProgramMessage
)
code_ref = program_message.content.code.ref
code_message: StoreMessage = client.get_message(
item_hash=code_ref, message_type=StoreMessage
)
try:
path, encoding = create_archive(path)
except BadZipFile:
typer.echo("Invalid zip archive")
raise typer.Exit(3)
except FileNotFoundError:
typer.echo("No such file or directory")
raise typer.Exit(4)
if encoding != program_message.content.code.encoding:
logger.error(
f"Code must be encoded with the same encoding as the previous version "
f"('{encoding}' vs '{program_message.content.code.encoding}'"
)
raise typer.Exit(1)
# Upload the source code
with open(path, "rb") as fd:
logger.debug("Reading file")
# TODO: Read in lazy mode instead of copying everything in memory
file_content = fd.read()
logger.debug("Uploading file")
message, status = client.create_store(
file_content=file_content,
storage_engine=code_message.content.item_type,
channel=code_message.channel,
guess_mime_type=True,
ref=code_message.item_hash,
)
logger.debug("Upload finished")
if print_message:
typer.echo(f"{message.json(indent=4)}")
@app.command()
def unpersist(
item_hash: str,
private_key: Optional[str] = sdk_settings.PRIVATE_KEY_STRING,
private_key_file: Optional[Path] = sdk_settings.PRIVATE_KEY_FILE,
debug: bool = False,
):
"""Stop a persistent virtual machine by making it non-persistent"""
setup_logging(debug)
account = _load_account(private_key, private_key_file)
with AuthenticatedAlephClient(
account=account, api_server=sdk_settings.API_HOST
) as client:
existing: MessagesResponse = client.get_messages(hashes=[item_hash])
message: ProgramMessage = existing.messages[0]
content: ProgramContent = message.content.copy()
content.on.persistent = False
content.replaces = message.item_hash
message, _status = client.submit(
content=content.dict(exclude_none=True),
message_type=message.type,
channel=message.channel,
)
typer.echo(f"{message.json(indent=4)}")
|
{"/tests/integration/itest_aggregates.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"], "/src/aleph_client/vm/cache.py": ["/src/aleph_client/conf.py"], "/tests/unit/test_vm_app.py": ["/tests/unit/test_app/main.py"], "/tests/integration/itest_forget.py": ["/tests/integration/config.py", "/tests/integration/toolkit.py"], "/tests/integration/itest_posts.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"]}
|
14,086
|
aleph-im/aleph-client
|
refs/heads/master
|
/tests/unit/test_chain_nuls1.py
|
import pytest
from aleph.sdk.chains.nuls1 import NulsSignature
from coincurve.keys import PrivateKey
SECRET = (
b"\xc4\xfe\xe65\x96\x14\xb4:\r: \x05;\x12j\x9bJ"
b"\x14\x0eY\xe3BY\x0f\xd6\xee\xfc\x9d\xfe\x8fv\xbc"
)
@pytest.mark.asyncio
async def test_sign_data():
private_key = PrivateKey(SECRET)
sign: NulsSignature = NulsSignature.sign_data(
pri_key=private_key.secret, digest_bytes=b"x" * (256 // 8)
)
assert sign
assert type(sign.pub_key) == bytes
assert type(sign.digest_bytes) == bytes
assert type(sign.sig_ser) == bytes
assert sign.ecc_type == None
@pytest.mark.asyncio
async def test_sign_message():
private_key = PrivateKey(SECRET)
message = b"GOOD"
sign: NulsSignature = await NulsSignature.sign_message(
pri_key=private_key.secret, message=message
)
assert sign.sig_ser is not None
assert len(sign.sig_ser) == 70
assert sign
assert type(sign.pub_key) == bytes
assert sign.digest_bytes == None
assert type(sign.sig_ser) == bytes
assert sign.ecc_type == None
@pytest.mark.asyncio
async def test_verify():
private_key = PrivateKey(SECRET)
message = b"GOOD"
sign: NulsSignature = await NulsSignature.sign_message(
pri_key=private_key.secret, message=message
)
assert sign.verify(message=message)
assert not sign.verify(message=b"BAD")
|
{"/tests/integration/itest_aggregates.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"], "/src/aleph_client/vm/cache.py": ["/src/aleph_client/conf.py"], "/tests/unit/test_vm_app.py": ["/tests/unit/test_app/main.py"], "/tests/integration/itest_forget.py": ["/tests/integration/config.py", "/tests/integration/toolkit.py"], "/tests/integration/itest_posts.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"]}
|
14,087
|
aleph-im/aleph-client
|
refs/heads/master
|
/src/aleph_client/__main__.py
|
"""
Aleph Client command-line interface.
"""
import typer
from .commands import account, aggregate, files, message, program
app = typer.Typer()
app.add_typer(account.app, name="account", help="Manage account")
app.add_typer(
aggregate.app, name="aggregate", help="Manage aggregate messages on aleph.im"
)
app.add_typer(
files.app, name="file", help="File uploading and pinning on IPFS and aleph.im"
)
app.add_typer(
message.app,
name="message",
help="Post, amend, watch and forget messages on aleph.im",
)
app.add_typer(
program.app, name="program", help="Upload and update programs on aleph.im VM"
)
if __name__ == "__main__":
app()
|
{"/tests/integration/itest_aggregates.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"], "/src/aleph_client/vm/cache.py": ["/src/aleph_client/conf.py"], "/tests/unit/test_vm_app.py": ["/tests/unit/test_app/main.py"], "/tests/integration/itest_forget.py": ["/tests/integration/config.py", "/tests/integration/toolkit.py"], "/tests/integration/itest_posts.py": ["/tests/integration/toolkit.py", "/tests/integration/config.py"]}
|
14,089
|
kritik123/Pavement-Damage-Detection
|
refs/heads/master
|
/project_gui_shi_main_right.py
|
import sys
try:
import Tkinter as tk
except ImportError:
import tkinter as tk
try:
import ttk
py3 = False
except ImportError:
import tkinter.ttk as ttk
py3 = True
import project_gui_shi_main_right_support
import os.path
from tkinter import filedialog
from PIL import ImageTk,Image
from prediction import pred
from tkinter import messagebox
import tkinter as tk
def vp_start_gui():
'''Starting point when module is the main routine.'''
global val, w, root
global prog_location
prog_call = sys.argv[0]
prog_location = os.path.split(prog_call)[0]
root = tk.Tk()
top = Toplevel1 (root)
project_gui_shi_main_right_support.init(root, top)
root.mainloop()
w = None
def create_Toplevel1(root, *args, **kwargs):
'''Starting point when module is imported by another program.'''
global w, w_win, rt
global prog_location
prog_call = sys.argv[0]
prog_location = os.path.split(prog_call)[0]
rt = root
w = tk.Toplevel (root)
top = Toplevel1 (w)
project_gui_shi_main_right_support.init(w, top, *args, **kwargs)
return (w, top)
def destroy_Toplevel1():
global w
w.destroy()
w = None
class Toplevel1:
def __init__(self, top=None):
'''This class configures and populates the toplevel window.
top is the toplevel containing window.'''
_bgcolor = '#d9d9d9' # X11 color: 'gray85'
_fgcolor = '#000000' # X11 color: 'black'
_compcolor = '#d9d9d9' # X11 color: 'gray85'
_ana1color = '#d9d9d9' # X11 color: 'gray85'
_ana2color = '#ececec' # Closest X11 color: 'gray92'
font12 = "-family Arimo -size 10 -weight bold -slant italic " \
"-underline 0 -overstrike 0"
font13 = "-family Arimo -size 10 -weight bold -slant roman " \
"-underline 0 -overstrike 0"
font9 = "-family Arimo -size 10 -weight normal -slant roman " \
"-underline 0 -overstrike 0"
top.geometry("996x638+360+52")
top.minsize(1, 1)
top.maxsize(1351, 738)
top.resizable(1, 1)
top.title("PAVEMENT CONDITION DETECTOR")
top.configure(background="#d6d8ab")
top.configure(highlightcolor="#5e5e5e")
self.menubar = tk.Menu(top,font=font9,bg='#cdd8d3',fg=_fgcolor)
top.configure(menu = self.menubar)
# import tkinter as tk
self.Canvas1 = tk.Canvas(top)
self.Canvas1.place(relx=0.0, rely=0.0, relheight=1.624, relwidth=2.268)
self.photo=Image.open('C:\\Users\\KRITIK SHIVANSHU\\Desktop\\Pavement_condition_assessment-master\\main1-0.jpg')
self.photo_=ImageTk.PhotoImage(self.photo)
self.Canvas1.create_image(0,0,image=self.photo_,anchor='nw')
# self.Canvas1=tk.Canvas(top)
# self.Canvas1.place(relx=0.0,rely=0.0,relheight=1.624,relwidth=2.268)
# self.photo=Image.open("C://Users//GOVINDA//Downloads//sih_main//main1-0.jpg")
# self.photo_=ImageTk.PhotoImage(self.photo)
# self.Canvas1.create_image(0,0,image=self.photo_,anchor=NW)
# """self.Label1 = tk.Label(top)
# self.Label1.place(relx=-0.01, rely=0.078, height=650, width=515)
# self.Label1.configure(background="#d6d8ab")
# photo_location = os.path.join(prog_location,"C:/Users/GOVINDA/Downloads/sih_main/main1-0.png")
# global _img0
# _img0 = tk.PhotoImage(file=photo_location)
# self.Label1.configure(image=_img0)"""
self.Label_insert_image = tk.Label(top)
self.Label_insert_image .place(relx=0.422, rely=0.204, height=250, width=475)
self.Label_insert_image .configure(activebackground="#ffffff")
self.Label_insert_image .configure(background="#ffffff")
self.Label_insert_image .configure(highlightbackground="#ffffff")
self.Button_ok = tk.Button(top)
self.Button_ok.place(relx=0.672, rely=0.643, height=30, width=96)
self.Button_ok.configure(background="#beb323")
self.Button_ok.configure(font=font12)
self.Button_ok.configure(text='''Ok''')
self.Button_add_image = tk.Button(top)
self.Button_add_image .place(relx=0.472, rely=0.643, height=30, width=96)
self.Button_add_image .configure(background="#beb323")
self.Button_add_image .configure(font=font12)
self.Button_add_image .configure(text='''Add Image''')
self.Button_add_image.configure(command=self.add_image)
self.Button_ok.configure(command=self.ok_button)
def add_image(self):
self.file_name=filedialog.askopenfilename(filetypes=(("JPG","*.jpg"),("All files","*.*")))
self.path=self.file_name
self.photo=Image.open(self.file_name).resize((475,250),Image.ANTIALIAS)
self.photo_image=ImageTk.PhotoImage(self.photo)
self.Label_insert_image.configure(image=self.photo_image)
def ok_button(self):
self.Label_predict=pred(self.path)
self.x=self.Label_predict
print(type(self.x))
messagebox.showinfo("Prediction",self.Label_predict)
if __name__ == '__main__':
vp_start_gui()
|
{"/project_gui_shi_main_right.py": ["/prediction.py"]}
|
14,090
|
kritik123/Pavement-Damage-Detection
|
refs/heads/master
|
/prediction.py
|
# from keras.preprocessing import image
import tensorflow as tf
from tensorflow.keras.preprocessing import image
import numpy as np
# from keras.models import load_model
model=tf.keras.models.load_model('C:\\Users\\KRITIK SHIVANSHU\\Desktop\\Pavement_condition_assessment-master\\testing_model_first.h5')
def pred(image_path):
img = image.load_img(image_path, target_size=(250, 250))
x = image.img_to_array(img)
x = np.expand_dims(x, axis=0)
# images = np.vstack([x])
classes = model.predict(x, batch_size=6)
if(classes[0][0]>=0.5):
classes[0][0]=1
else:
classes[0][0]=0
if(classes[0][0])==0:
return ("The road photograph is normal")
else:
return ("The road photograph contains potholes")
# def pred(image_path):
# path=image_path
# type(path)
# print("nihal")
# classes=path
# return classes
|
{"/project_gui_shi_main_right.py": ["/prediction.py"]}
|
14,143
|
mdrdatalab/pykraken
|
refs/heads/master
|
/connection.py
|
# -*- coding: utf-8 -*-
"""
Created on Sat Jun 24 05:58:36 2017
@author: michael
"""
import http.client
import urllib.request
import urllib.parse
import urllib.error
class Connection(object):
def __init__(self, uri='api.kraken.com', timeout=30):
self.headers = {
'User-Agent': 'pykraken'
}
self.conn = http.client.HTTPSConnection(uri, timeout=timeout)
return
def close(self):
self.conn.close()
return
def _request(self, url, req=None, headers=None):
if req is None:
req = {}
if headers is None:
headers = {}
data = urllib.parse.urlencode(req)
headers.update(self.headers)
self.conn.request('POST', url, data, headers)
response = self.conn.getresponse()
if response.status not in (200, 201, 202):
raise http.client.HTTPException(response.status)
return response.read().decode()
|
{"/priceHistory.py": ["/queries.py"], "/api.py": ["/connection.py"], "/portfolioHistory.py": ["/queries.py"], "/queries.py": ["/api.py"], "/simPortfolio-v0.py": ["/queries.py"]}
|
14,144
|
mdrdatalab/pykraken
|
refs/heads/master
|
/priceHistory.py
|
# -*- coding: utf-8 -*-
"""
Created on Sun Jul 2 20:05:15 2017
@author: michael
"""
import queries
import time
import csv
import os.path
import datetime
import numpy as np
def get_history(asset):
pairs = queries.get_pairs()
pairs = list(pairs.keys())
last = 0
trades = []
#TODO: I don't like only having it in USD anymore... fix this?
pair = asset+'ZUSD'
#TODO: first check if price history has been saved, load it if so
if os.path.isfile('data\\'+pair+'-history.csv'):
history = hist_reader(pair)
last = history['last']
trades = history['trades']
#why list doesn't come out flat?
if pair in pairs:
print('yes')
temp = {pair: ['x'], last: 0}
while temp[pair] != []:
try:
print(last)
temp = queries.get_trades(pair, last)
last = temp['last']
for t in temp[pair]:
trades.append(t)
time.sleep(1)
except Exception as e:
print('Error: ', e)
history = {'trades': trades, 'last': last}
hist_writer(pair, history)
return history
def hist_writer(pair, history):
with open('data\%s-history.csv' % pair, 'w', newline='') as f:
writer = csv.writer(f, quoting=csv.QUOTE_ALL)
writer.writerow([history['last']])
writer.writerows(history['trades'])
def hist_reader(pair):
trades = []
with open('data\%s-history.csv' % pair, 'r') as f:
reader = csv.reader(f)
#TODO: make sure last gets written properly, then fix below on read
last = next(reader)[0]
for row in reader:
trades.append(row)
history = {'last': last, 'trades': trades}
return history
def compute_ohlc(trades):
'''
takes a window of trades and computes open price, close price, low price,
high price, volume weighted average price, volume, and count
'''
trades = np.array(trades)[:,0:3].astype(float)
count = len(trades)
p_open = trades[0][0]
p_close = trades[-1][0]
p_high = max(trades[:,0])
p_low = min(trades[:,0])
volume = sum(trades[:,1])
vwap = sum(trades[:,0]*trades[:,1])/volume
ohlc = {'open': p_open,
'close': p_close,
'high': p_high,
'low': p_low,
'volume': volume,
'vwap': vwap,
'count': count}
return ohlc
def get_ohlc(pair, interval=1440, start=None, end=None):
'''
retrieves trade history for a pair between start and end
and returns ohlc data for that window at specified interval
'''
# get trade data
# filter by window
# break down into interval
# iteratively retrieve ohlc per window
pass
#asset or pair?
def groupHist(pair):
#read history
#for each trade, extract datetime
#place in dict for year:month:date
#return dict
history = {}
#maybe do this through get_history so it updates?
trades = hist_reader(pair)['trades']
for trade in trades:
date = datetime.datetime.fromtimestamp(float(trade[2])) #index of timestamp
year = date.year
month = date.month
day = date.day
if not year in history.keys():
history[year] = {}
if not month in history[year].keys():
history[year][month] = {}
if not day in history[year][month].keys():
history[year][month][day] = []
history[year][month][day].append(trade)
print('Done')
return history
|
{"/priceHistory.py": ["/queries.py"], "/api.py": ["/connection.py"], "/portfolioHistory.py": ["/queries.py"], "/queries.py": ["/api.py"], "/simPortfolio-v0.py": ["/queries.py"]}
|
14,145
|
mdrdatalab/pykraken
|
refs/heads/master
|
/api.py
|
# -*- coding: utf-8 -*-
"""
Created on Sat Jun 24 06:19:39 2017
@author: michael
"""
import json
import urllib.request
import urllib.parse
import urllib.error
import time
import hashlib
import hmac
import base64
import connection
class API(object):
def __init__(self, key='', secret='', conn=None):
self.key = key
self.secret = secret
self.uri = 'https://api.kraken.com'
self.apiversion = '0'
self.conn = conn
return
def load_key(self, path):
with open(path, 'r') as f:
self.key = f.readline().strip()
self.secret = f.readline().strip()
return
def set_connection(self, conn):
self.conn = conn
return
def _query(self, urlpath, req, conn=None, headers=None):
url = self.uri + urlpath
if conn is None:
if self.conn is None:
conn = connection.Connection()
else:
conn = self.conn
if headers is None:
headers = {}
ret = conn._request(url, req, headers)
return json.loads(ret)
def query_public(self, method, req=None, conn=None):
urlpath = '/' + self.apiversion + '/public/' + method
if req is None:
req = {}
return self._query(urlpath, req, conn)
def query_private(self, method, req=None, conn=None):
if req is None:
req = {}
# TODO: check if self.{key,secret} are set
urlpath = '/' + self.apiversion + '/private/' + method
req['nonce'] = int(1000*time.time())
postdata = urllib.parse.urlencode(req)
encoded = (str(req['nonce']) + postdata).encode()
message = urlpath.encode() + hashlib.sha256(encoded).digest()
signature = hmac.new(base64.b64decode(self.secret),
message, hashlib.sha512)
sigdigest = base64.b64encode(signature.digest())
headers = {
'API-Key': self.key,
'API-Sign': sigdigest.decode()
}
return self._query(urlpath, req, conn, headers)
|
{"/priceHistory.py": ["/queries.py"], "/api.py": ["/connection.py"], "/portfolioHistory.py": ["/queries.py"], "/queries.py": ["/api.py"], "/simPortfolio-v0.py": ["/queries.py"]}
|
14,146
|
mdrdatalab/pykraken
|
refs/heads/master
|
/portfolioHistory.py
|
# -*- coding: utf-8 -*-
"""
Created on Sun Jul 2 18:48:06 2017
@author: michael
"""
import queries
import datetime as dt
import pandas as pd
ledger = pd.DataFrame.from_dict(queries.get_ledger(),
orient='index').sort_values(by=['time'])
assets = list(set(ledger['asset']))
|
{"/priceHistory.py": ["/queries.py"], "/api.py": ["/connection.py"], "/portfolioHistory.py": ["/queries.py"], "/queries.py": ["/api.py"], "/simPortfolio-v0.py": ["/queries.py"]}
|
14,147
|
mdrdatalab/pykraken
|
refs/heads/master
|
/queries.py
|
# -*- coding: utf-8 -*-
"""
Created on Sat Jul 1 14:16:38 2017
@author: michael
"""
import api
k = api.API()
k.load_key('kraken.key')
###Public
def get_pairs():
'''Returns pairs'''
pairs = k.query_public('AssetPairs')
# TODO: error handling
# TODO: clean up returned data
return pairs['result']
def get_ticker(assetPairs):
'''
Input: comma delimited list of asset pairs
Output:
'''
# TODO: error handling
req = {'pair':','.join(assetPairs)}
ticker = k.query_public('Ticker', req)
return ticker['result']
def get_orders(pair):
req = {'pair':pair}
ob = k.query_public('Depth', req)
return ob['result']
def get_trades(pair, since):
'''
Outputs: list of trades, last id for next
'''
req = {'pair':pair, 'since':since}
return k.query_public('Trades', req)['result']
###Private
def get_balance():
return k.query_private('Balance')['result']
def get_history():
return k.query_private('TradesHistory')
def get_ledger():
return k.query_private('Ledgers')['result']['ledger']
|
{"/priceHistory.py": ["/queries.py"], "/api.py": ["/connection.py"], "/portfolioHistory.py": ["/queries.py"], "/queries.py": ["/api.py"], "/simPortfolio-v0.py": ["/queries.py"]}
|
14,148
|
mdrdatalab/pykraken
|
refs/heads/master
|
/simPortfolio-v0.py
|
# -*- coding: utf-8 -*-
"""
Created on Wed Jul 19 18:32:51 2017
@author: michael
"""
import queries
#TODO make this work for all pairs...
#works for XXBT, XETH
#TODO make one for historical prices...
class portfolio():
def __init__(self, initial_funds):
self.initial_funds = initial_funds
self.balance = {'ZUSD': self.initial_funds}
def buy(self, asset, qty):
price = get_price('buy', asset)
#check available funds
cost = price * qty
if self.balance['ZUSD'] >= cost:
self.balance['ZUSD'] -= cost
if not asset in self.balance.keys():
self.balance[asset] = 0
self.balance[asset] += qty
else:
print('Insufficient Funds')
def sell(self, asset, qty):
if not asset in self.balance.keys():
print('Insusfficient Funds')
return
if self.balance[asset] < qty:
print('Insufficient Funds')
return
price = get_price('sell', asset)
self.balance['ZUSD'] += price*qty
self.balance[asset] -= qty
def get_price(action, asset):
pair = asset +'ZUSD'
orders = queries.get_orders(pair)[pair]
if action == 'buy':
prices = orders['asks']
else:
prices = orders['bids']
#simple version here takes price as first element
#TODO incorporate qty
return float(prices[0][0])
|
{"/priceHistory.py": ["/queries.py"], "/api.py": ["/connection.py"], "/portfolioHistory.py": ["/queries.py"], "/queries.py": ["/api.py"], "/simPortfolio-v0.py": ["/queries.py"]}
|
14,149
|
aths2041/myproject
|
refs/heads/main
|
/blog/admin.py
|
from django.contrib import admin
from .models import Post, Contact
# Register your models here.
# @admin.register(Post)
# class PostModelAdmin(admin.ModelAdmin):
# list_display = ['id', 'title', 'desc', 'title_tag', 'post_date', 'post_image']
admin.site.register(Post)
admin.site.register(Contact)
|
{"/blog/admin.py": ["/blog/models.py"], "/blog/views.py": ["/blog/models.py"]}
|
14,150
|
aths2041/myproject
|
refs/heads/main
|
/blog/migrations/0004_alter_post_post_date.py
|
# Generated by Django 3.2.4 on 2021-07-13 07:43
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('blog', '0003_auto_20210713_1047'),
]
operations = [
migrations.AlterField(
model_name='post',
name='post_date',
field=models.DateTimeField(auto_now_add=True),
),
]
|
{"/blog/admin.py": ["/blog/models.py"], "/blog/views.py": ["/blog/models.py"]}
|
14,151
|
aths2041/myproject
|
refs/heads/main
|
/blog/views.py
|
from django.shortcuts import render, HttpResponseRedirect
from .forms import SignUpForm, LoginForm, PostForm
from django.contrib import messages
from django.contrib.auth import authenticate, login, logout
from .models import Post, Contact
from django.contrib.auth.models import Group
# home
def home(request):
posts = Post.objects.all()
return render(request, 'blog/home.html', {'posts':posts})
# All Post Contents
def allposts(request, slug=None):
posts = Post.objects.all()
return render(request, 'blog/allposts.html', {'posts':posts})
# About
def about(request):
return render(request, 'blog/about.html')
# Contact
def contact(request):
if request.method == 'POST':
name = request.POST['name']
email = request.POST['email']
phone = request.POST['phone']
message = request.POST['message']
if len(name)<3 or len(email)<3 or len(phone)<10 or len(message)<5:
messages.error(request, "Please fill correct data")
else:
contact = Contact(name=name, email=email, phone=phone, message=message)
contact.save()
messages.success(request, "Your message has been sucessfully sent")
return render(request, 'blog/contact.html')
# Dashboard
def dashboard(request):
if request.user.is_authenticated:
posts = Post.objects.all()
user = request.user
full_name = user.get_full_name()
gps = user.groups.all()
return render(request, 'blog/dashboard.html', {'posts':posts, 'full_name':full_name, 'groups':gps})
else:
return HttpResponseRedirect('/login/')
return render(request, 'blog/dashboard.html')
# Logout
def user_logout(request):
logout(request)
return HttpResponseRedirect('/')
# Signup
def user_signup(request):
if request.method =="POST":
form = SignUpForm(request.POST)
if form.is_valid():
messages.success(request, 'Congratulations!!! Now you are a Writer Login to continue.')
user = form.save()
group = Group.objects.get(name='Writer')
return HttpResponseRedirect('/login/')
else:
form = SignUpForm()
return render(request, 'blog/signup.html',{'form':form})
# Login
def user_login(request):
if not request.user.is_authenticated:
if request.method == "POST":
form = LoginForm(request=request, data=request.POST)
if form.is_valid():
uname = form.cleaned_data['username']
upass = form.cleaned_data['password']
user = authenticate(username=uname, password=upass)
if user is not None:
login(request, user)
messages.success(request, 'Logged in sucessfully!!!')
return HttpResponseRedirect('/dashboard/')
else:
form = LoginForm()
return render(request, 'blog/login.html', {'form':form})
else:
return HttpResponseRedirect('/dashboard/')
# Add New Post
def add_post(request):
if request.user.is_authenticated:
if request.method == 'POST':
form = PostForm(request.POST, request.FILES)
if form.is_valid():
title = form.cleaned_data['title']
desc = form.cleaned_data['desc']
author = form.cleaned_data['author']
title_tag = form.cleaned_data['title_tag']
post_image = form.cleaned_data['post_image']
pst = Post(title=title, desc=desc, title_tag=title_tag, post_image=post_image)
pst.save()
form = PostForm()
else:
form = PostForm()
return render(request, 'blog/addpost.html', {'form':form})
else:
return HttpResponseRedirect('/login/')
# Update/Edit Post
def update_post(request, id):
if request.user.is_authenticated:
if request.method == 'POST':
pi = Post.objects.get(pk=id)
form = PostForm(request.POST, instance=pi)
if form.is_valid():
form.save()
else:
pi = Post.objects.get(pk=id)
form = PostForm(instance=pi)
return render(request, 'blog/updatepost.html', {'form':form})
else:
return HttpResponseRedirect('/login/')
# Delete Post
def delete_post(request, id):
if request.user.is_authenticated:
if request.method == 'POST':
pi = Post.objects.get(pk=id)
pi.delete()
return HttpResponseRedirect('/dashboard/')
else:
return HttpResponseRedirect('/login/')
# Mobiles
def mobile(request, slug):
post = Post.objects.filter(slug=slug).first()
post.save()
context = {'post': post, 'user': request.user}
return render(request, 'blog/mobile.html', context)
|
{"/blog/admin.py": ["/blog/models.py"], "/blog/views.py": ["/blog/models.py"]}
|
14,152
|
aths2041/myproject
|
refs/heads/main
|
/blog/migrations/0008_alter_post_slug.py
|
# Generated by Django 3.2.4 on 2021-07-16 06:41
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('blog', '0007_post_author'),
]
operations = [
migrations.AlterField(
model_name='post',
name='slug',
field=models.CharField(blank=True, max_length=130, null=True),
),
]
|
{"/blog/admin.py": ["/blog/models.py"], "/blog/views.py": ["/blog/models.py"]}
|
14,153
|
aths2041/myproject
|
refs/heads/main
|
/blog/models.py
|
from django.db import models
from django.db.models.signals import pre_save
from myblog.utils import unique_slug_generator
from ckeditor.fields import RichTextField
# Create your models here.
class Post(models.Model):
title = models.CharField(max_length=200)
# desc = models.TextField()
desc = RichTextField(null=True, blank=True)
author = models.CharField(max_length=20)
title_tag = models.CharField(max_length=200, null=True, blank=True)
post_image = models.ImageField(null=True, blank=True, upload_to="images/")
post_date = models.DateTimeField(auto_now_add=True, blank=True)
slug = models.SlugField(max_length=130, null=True, blank=True)
def __str__(self):
return self.title + 'by' + self.author
def slug_generator(sender, instance, *args, **kwargs):
if not instance.slug:
instance.slug = unique_slug_generator(instance)
pre_save.connect(slug_generator, sender=Post)
class Contact(models.Model):
sno = models.AutoField(primary_key=True)
name = models.CharField(max_length=255)
phone = models.CharField(max_length=20)
email = models.CharField(max_length=100)
message = models.TextField()
timeStamp = models.DateTimeField(auto_now_add=True, blank=True)
def __str__(self):
return 'Message from '+ self.name+'-'+ self.email
class Key(models.Model):
ID = models.ForeignKey(Post, on_delete=models.CASCADE)
|
{"/blog/admin.py": ["/blog/models.py"], "/blog/views.py": ["/blog/models.py"]}
|
14,165
|
jjisnow/gmail_response_bot
|
refs/heads/master
|
/canned_response_bot.py
|
from __future__ import print_function
import base64
import email
import os
import pprint
import time
from email.mime.text import MIMEText
from email.utils import parseaddr
import httplib2
from apiclient import discovery
from apiclient import errors
from oauth2client import client
from oauth2client import tools
from oauth2client.file import Storage
from .canned_reply_config import to, senders, user_id, message_text, canned_label, \
sender_credentials_file, client_secret_file
try:
import argparse
flags = argparse.ArgumentParser(parents=[tools.argparser]).parse_args()
except ImportError:
flags = None
# If modifying these scopes, delete your previously saved credentials
# at ~/.credentials/<sender_credentials_file>.json
SCOPES = ['https://www.googleapis.com/auth/gmail.send',
'https://www.googleapis.com/auth/gmail.readonly',
'https://www.googleapis.com/auth/gmail.modify']
APPLICATION_NAME = 'Gmail API Python Quickstart'
seconds_between_checks = 15
def get_credentials():
"""Gets valid user credentials from storage.
If nothing has been stored, or if the stored credentials are invalid,
the OAuth2 flow is completed to obtain the new credentials.
Returns:
Credentials, the obtained credential.
"""
home_dir = os.path.expanduser('~')
credential_dir = os.path.join(home_dir, '.credentials')
if not os.path.exists(credential_dir):
os.makedirs(credential_dir)
credential_path = os.path.join(credential_dir,
sender_credentials_file)
store = Storage(credential_path)
credentials = store.get()
if not credentials or credentials.invalid:
flow = client.flow_from_clientsecrets(client_secret_file, SCOPES)
flow.user_agent = APPLICATION_NAME
if flags:
credentials = tools.run_flow(flow, store, flags)
else: # Needed only for compatibility with Python 2.6
credentials = tools.run(flow, store)
print('Storing credentials to ' + credential_path)
return credentials
def create_message(origin=None, destination=to, subject=None, msg_txt=None, thr_id=None):
"""Create a message for an email.
Args:
origin: Email address of the sender.
destination: Email address of the receiver.
subject: The subject of the email message.
msg_txt: The text of the email message.
thr_id: the threadId of the message to attach
Returns:
An object containing a base64url encoded email object.
"""
message = MIMEText(msg_txt)
message['to'] = destination
message['from'] = origin
message['subject'] = subject
return {'raw': (base64.urlsafe_b64encode(message.as_bytes()).decode()),
'threadId': thr_id}
def send_message(service, user_id, message):
"""Send an email message.
Args:
service: Authorized Gmail API service instance.
user_id: User's email address. The special value "me"
can be used to indicate the authenticated user.
message: Message to be sent.
Returns:
Sent Message.
"""
try:
message = (service.users().messages().send(userId=user_id, body=message)
.execute())
print('Message Id: {}'.format(message['id']))
return message
except errors.HttpError as error:
print('An error occurred: {}'.format(error))
def list_messages_matching_query(service, user_id, query='', label_ids=[],
maxResults=None):
"""List all Messages of the user's mailbox matching the query.
Args:
service: Authorized Gmail API service instance.
user_id: User's email address. The special value "me"
can be used to indicate the authenticated user.
query: String used to filter messages returned.
Eg.- 'from:user@some_domain.com' for Messages from a particular sender.
label_ids: the list of labelIds present in the query
maxResults: number of messages to return and to obtain at a time
Returns:
List of Messages that match the criteria of the query. Note that the
returned list contains Message IDs, you must use get with the
appropriate ID to get the details of a Message.
"""
try:
response = service.users().messages().list(userId=user_id, q=query,
labelIds=label_ids,
maxResults=maxResults).execute()
messages = []
if 'messages' in response:
messages.extend(response['messages'])
while 'nextPageToken' in response:
if len(messages) >= maxResults:
break
page_token = response['nextPageToken']
response = service.users().messages().list(userId=user_id, q=query,
labelIds=label_ids,
pageToken=page_token).execute()
messages.extend(response['messages'])
return messages
except errors.HttpError as error:
print('An error occurred: %s' % error)
def list_labels(service, user_id):
"""Get a list all labels in the user's mailbox.
Args:
service: Authorized Gmail API service instance.
user_id: User's email address. The special value "me"
can be used to indicate the authenticated user.
Returns:
A list all Labels in the user's mailbox.
"""
try:
response = service.users().labels().list(userId=user_id).execute()
labels = response['labels']
for label in labels:
print('Label id: %s - Label name: %s' % (label['id'], label['name']))
return labels
except errors.HttpError as error:
print('An error occurred: %s' % error)
def simple_list_labels(service, user_id):
"""Provide a simple list of labels present in account
:param service:
:param user_id:
:return: list of label names
"""
results = service.users().labels().list(userId=user_id).execute()
labels = results.get('labels', ())
list_labels = []
if not labels:
print('No labels found.')
else:
for label in labels:
list_labels.append(label['name'])
return list_labels
def get_message(service, user_id, msg_id):
"""Get a Message with given ID.
Args:
service: Authorized Gmail API service instance.
user_id: User's email address. The special value "me"
can be used to indicate the authenticated user.
msg_id: The ID of the Message required.
Returns:
A Message.
"""
try:
message = service.users().messages().get(userId=user_id, id=msg_id).execute()
return message
except errors.HttpError as error:
print('An error occurred: %s' % error)
def message_body_as_string(message):
"""Returns the message body decoded as text
:param message:
:return: string
"""
if 'multipart' in message['payload']['mimeType']:
# for multi-part messages
for part in message['payload']['parts']:
if part['mimeType'] == 'text/plain':
return base64.urlsafe_b64decode(part['body']['data']).decode()
# for straightforward messages
else:
for part in [message['payload']]:
if part['mimeType'] == 'text/plain':
return base64.urlsafe_b64decode(part['body']['data']).decode()
def get_mime_message(service, user_id, msg_id):
"""Get a Message and use it to create a MIME Message.
Args:
service: Authorized Gmail API service instance.
user_id: User's email address. The special value "me"
can be used to indicate the authenticated user.
msg_id: The ID of the Message required.
Returns:
A MIME Message, consisting of data from Message.
"""
try:
message = service.users().messages().get(userId=user_id, id=msg_id,
format='raw').execute()
msg_str = base64.urlsafe_b64decode(message['raw']).decode()
mime_msg = email.message_from_string(msg_str)
return mime_msg
except errors.HttpError as error:
print('An error occurred: %s' % error)
def print_mime_message(message):
# from email - print header details
print('From: {}'.format(message['From']))
print('To: {}'.format(message['To']))
print('Subject: {}'.format(message['Subject']))
print('Date: {}'.format(message['Date']))
print('Message-ID: {}'.format(message['Message-ID']))
print('---')
# print body of email
for parts in message.walk():
if parts.get_content_type() == 'text/plain':
print('---------')
print(parts.get_payload())
print('---------')
def modify_message(service, user_id, msg_id, msg_labels):
"""Modify the Labels on the given Message.
Args:
service: Authorized Gmail API service instance.
user_id: User's email address. The special value "me"
can be used to indicate the authenticated user.
msg_id: The id of the message required.
msg_labels: The change in labels.
Returns:
Modified message, containing updated labelIds, id and threadId.
"""
try:
message = service.users().messages().modify(userId=user_id, id=msg_id,
body=msg_labels).execute()
label_ids = message['labelIds']
print('Message ID: %s - With Label IDs %s' % (msg_id, label_ids))
return message
except errors.HttpError as error:
print('An error occurred: {}'.format(error))
def field_from_message(message, field_name):
# 3 different ways of getting the from field from the message
msg_headers = message['payload']['headers']
msg_sender = None
clean_name = field_name.strip().lower()
# Way 1: a standard for , if loop, which breaks as soon as it finds the message
# clean_name in the message fields
for field in msg_headers:
clean_field = field['name'].strip().lower()
if clean_field == clean_name:
msg_sender = field['value']
break
# Way 2: a dictionary generator
# msg_sender = next((item['value'] for item in msg_headers if
# item['name'].strip().lower() == clean_name), None)
# Way 3: a filter
# msg_sender = next(filter(lambda field: field['name'].strip().lower() == clean_name,
# msg_headers))['value']
return msg_sender
def create_msg_labels(service, addLabels=[], removeLabels=[]):
"""Create object to update labels.
Returns:
A label update object.
"""
for label in addLabels:
if label not in simple_list_labels(service, user_id):
new_label = make_label(label)
label_obj = create_label(service, user_id, new_label)
print('Added label {}, label_id: '.format(label, label_obj))
return {'removeLabelIds': removeLabels, 'addLabelIds': addLabels}
def create_label(service, user_id, label_object):
"""Creates a new label within user's mailbox, also prints Label ID.
Args:
service: Authorized Gmail API service instance.
user_id: User's email address. The special value "me"
can be used to indicate the authenticated user.
label_object: label to be added.
Returns:
Created Label.
"""
try:
label = service.users().labels().create(userId=user_id,
body=label_object).execute()
print(
'created label name: {}, label id: {}'.format(label_object["name"],
label['id']))
return label
except errors.HttpError as error:
print('An error occurred: %s' % error)
def make_label(label_name, mlv='show', llv='labelShow'):
"""Create Label object.
Args:
label_name: The name of the Label.
mlv: Message list visibility, show/hide.
llv: Label list visibility, labelShow/labelHide.
Returns:
Created Label.
"""
label = {'messageListVisibility': mlv,
'name': label_name,
'labelListVisibility': llv}
return label
def get_thread(service, user_id, thread_id):
"""Get a Thread.
Args:
service: Authorized Gmail API service instance.
user_id: User's email address. The special value "me"
can be used to indicate the authenticated user.
thread_id: The ID of the Thread required.
Returns:
Thread with matching ID.
"""
try:
thread = service.users().threads().get(userId=user_id, id=thread_id).execute()
messages = thread['messages']
print('thread id: {} , messages in this thread: {}'.format(thread['id'],
len(messages)))
return thread
except errors.HttpError as error:
print('An error occurred: %s' % error)
def modify_thread(service, user_id, thread_id, msg_labels):
"""Add labels to a Thread.
Args:
service: Authorized Gmail API service instance.
user_id: User's email address. The special value "me"
can be used to indicate the authenticated user.
thread_id: The id of the thread to be modified.
msg_labels: The change in labels.
Returns:
Thread with modified Labels.
"""
try:
thread = service.users().threads().modify(userId=user_id, id=thread_id,
body=msg_labels).execute()
thread_id = thread['id']
label_ids = thread['messages'][0]['labelIds']
print('Thread ID: {} - With Label IDs: {}'.format(thread_id, label_ids))
return thread
except errors.HttpError as error:
print('An error occurred: {}'.format(error))
def find_label_id(service, user_id, label_name):
try:
response = service.users().labels().list(userId=user_id).execute()
labels = response['labels']
for label in labels:
if label["name"] == label_name:
return label["id"]
except errors.HttpError as error:
print('An error occurred: %s' % error)
def get_label_id(service, canned_label):
label_id = find_label_id(service, user_id, canned_label)
if label_id is None:
new_label = make_label(canned_label)
label_id = create_label(service, user_id, new_label)['id']
return label_id
def find_label_names(service, label_ids=[]):
""" finds the label names given a list of label_ids
:param service:
:param label_ids: list of label_ids
:return: list of label_names
"""
try:
response = service.users().labels().list(userId=user_id).execute()
labels = response['labels']
label_names = []
for label_id in label_ids:
for label in labels:
if label_id == label['id']:
label_names.append(label['name'])
break
return label_names
except errors.HttpError as error:
print('An error occurred: %s' % error)
def main():
"""Canned reply responder using the Gmail API.
Creates a Gmail API service object and responds to a query with a standard response
whilst giving it a label to ensure only 1 response per thread is sent
"""
# start time in milliseconds to compare with last message time
start_time = int(time.time()) * 1000
# get credentials first and create gmail service object
credentials = get_credentials()
http = credentials.authorize(httplib2.Http())
service = discovery.build('gmail', 'v1', http=http)
while True:
# receive email messages
q_to_list = ['from:' + e_mail for e_mail in senders]
q = 'in:inbox {}'.format(' OR '.join(q_to_list))
messages = list_messages_matching_query(service, user_id,
query=q,
maxResults=1)
if not messages:
print("No messages to show")
time.sleep(seconds_between_checks)
continue
else:
pprint.pprint('Messages to show: {}'.format(messages))
# get thread of first document - so you can label the thread itself if need be
thread_id = messages[0]['threadId']
thread = get_thread(service, user_id, thread_id)
msg_id = messages[0]['id']
message = get_message(service, user_id, msg_id)
msg_sender = field_from_message(message, 'From')
canned_label_id = get_label_id(service, canned_label)
thread_label_ids = thread['messages'][0]["labelIds"]
# check that the date is later than starting, and emails match list
if int(message["internalDate"]) < start_time:
print('internalDate earlier than start_time!')
print("better luck next time")
# check if it's already replied to
elif canned_label_id in thread_label_ids:
print("you replied already to this one, even if it is later than startup")
print("better luck next time")
else:
# check cleaned sender email in list
sender_email = parseaddr(msg_sender)[1]
if sender_email not in senders:
print("emails don't match!!")
# after all tests passed, reply to message with same subject
else:
subject = 'Re: ' + field_from_message(message, 'Subject')
msg = create_message(destination=msg_sender, origin=to,
subject=subject,
msg_txt=message_text, thr_id=thread_id)
send_message(service, user_id, msg)
print("Replied to message!")
start_time = int(time.time()) * 1000
# then label the thread
labels = create_msg_labels(service, addLabels=[canned_label_id])
modify_thread(service, user_id, thread_id, labels)
print("Added a label: {} ".format(canned_label))
print('done!')
# always print blank line and wait a few seconds
print('=====\n')
time.sleep(seconds_between_checks)
if __name__ == '__main__':
main()
|
{"/canned_response_bot.py": ["/canned_reply_config.py"]}
|
14,166
|
jjisnow/gmail_response_bot
|
refs/heads/master
|
/canned_reply_config.py
|
# Your email - gets added in the "from" field when you write your reply
to = 'your email <your@email.com'
# a list of blacklisted senders whom this applies to. Emails must be surrounded by ''
# and separated by commas, and the list inside []
senders = ['boring@mail.com', 'spammer@mail.com']
# the standard reply text to reply with
message_text = """
Please respond to someone who cares
"""
user_id = 'me'
# the label for the responses
canned_label = 'Canned-reply'
# where the gmail api credentials and client_secret file is located
sender_credentials_file = 'email-sender-creds.json'
client_secret_file = 'your_client_secret.json'
|
{"/canned_response_bot.py": ["/canned_reply_config.py"]}
|
14,190
|
saisua/BeerProgramming
|
refs/heads/master
|
/run_server.py
|
#!./python3.7/python
import Server #, front
import logging
#from wx import App
from multiprocessing import Process, Manager
from re import finditer, sub
from random import randint
import time, datetime
# This function gets executed when you run
# python run_server.py and its use is to deploy
# the main server used in Beer_Programming
def main():
logging.basicConfig(format="%(asctime)s %(levelname)s | %(message)s", level=logging.DEBUG)
from sys import argv
Beer_programming(gui=False,**arg_parse(argv)).play()
# arg_parse does take a list of arguments and returns
# one dict with the parameters and values (str) determined
# by the keys and values of arg_dict
# if one key is found, the following argument is
# chosen as a value
def arg_parse(args:list, arg_dict:dict=
{"--ip":"ip","--port":"port",
"-i":"ip","-p":"port",
"-pl":"player_num",
"--players":"player_num",
"-t":"compile_time",
"--time":"compile_time",
"-dpe":"drinks_per_error",
"--drinks_per_error":"drinks_per_error"}) -> dict:
final = {}
before = False
for arg in args[1:]:
if(before):
logging.debug(f"Found arg ({before}) : {arg}")
final[before] = arg
before = False
continue
value = arg_dict.get(arg, None)
if(not value is None):
before = value
return final
"""
The class Beer_programming is the main class used to
play the game. It will create the server to which the
clients will connect to. Between its features, it will
mostly stay still until it is time for the users to
compile, and then it will send them the order to do so,
until someone finishes.
"""
class Beer_programming():
def __init__(self, ip:str=None, port:int=12412, player_num:int=1,
compile_time:int=240, gui:bool=True,
drinks_per_error:"(float,function)"=(1,round)):
self.serv = Server.Server(ip, int(port), order_dict=
{"--add_player":self.add_player,
"--send_players":self.send_players,
"--chat":self.add_chat,"--send_chat":self.send_chat,
"--play":self.play,
"--drink":self.drink})
self.players = Manager().dict()
self.players_drinks = Manager().dict()
self.players_last_drink = Manager().dict()
self.compile_time = float(compile_time)
self.compile_at = Manager().list()
if(type(drinks_per_error) is str): drinks_per_error = eval(drinks_per_error)
if(not hasattr(drinks_per_error, '__iter__')):
drinks_per_error = (float(drinks_per_error), round)
self.drinks_per_error = drinks_per_error
self.end = Manager().list([False])
self.conn_step = [";;"]
self.conn_symbols = {"Serv_to_Client":";;", "Client_to_Serv":"::",
"Serv_listen":"->", "Serv_send":"<_",
"Client_listen":"<-", "Client_send":"<_",
"Urgency":"!!", "Evaluate":"#"}
self.chat = Manager().list()
if(gui): self.gui()
self.serv.listen_connections(int(player_num))
# __play has debugging purposes, and it allows the
# user to send the server direct orders
def __play(self, addr:tuple):
logging.debug(f"__play({addr})")
while(len(self.conn_step)):
step = self.conn_step[0]
self.conn_step.pop(0)
if(step == self.conn_symbols["Serv_to_Client"] or step == self.conn_symbols["Serv_send"]):
decoded_data = input("> ")
if(step == self.conn_symbols["Evaluate"]): decoded_data = eval(decoded_data)
self.symbol_parse(decoded_data)
self.serv.sendto(decoded_data,addr)
elif(step == self.conn_symbols["Client_to_Serv"] or step == self.conn_symbols["Serv_listen"]):
self.listen(addr)
logging.debug(f"Conn_steps: {self.conn_step}")
# play is the main function that shold be run to
# ensure the game is automatic and no problems arise.
# It will first ask the users to open an instance of
# the compiler
def play(self, addr:tuple):
logging.debug(f"play({addr})")
self.serv.sendto("--_start!!<-", addr)
self.listen(addr)
while(not self.end[0]):
if(self.conn_step[0] == ";;" or self.conn_step[0] == "<_"): self.conn_step.pop(0)
self.sleep(compile_after=True, addr=addr)
print("\n\n")
for pl,v in self.players_last_drink.items(): print(f"player {pl} must drink {v}")
print("\n\n")
# symbol_parse is used by the user and the client
# to tell (into a queue) what the Server should do
# next (listen/send something)
def symbol_parse(self, command:str):
urgent = False
num = 0
for symbol in finditer('|'.join(self.conn_symbols.values()), command):
if(symbol.group(0) == self.conn_symbols["Urgency"]):
urgent = True
else:
if(urgent):
self.conn_step.insert(num, symbol.group(0))
urgent = False
num += 1
else:
self.conn_step.append(symbol.group(0))
# listen does wait for the client in addr to send data
# which then is parsed and executed if it matches any
# order_dict key
def listen(self, addr:tuple, max_timeout:float=None):
if(addr is None): return
timeout = 0
if(max_timeout is None):
max_timeout = datetime.datetime.now() + datetime.timedelta(days=30)
else:
max_timeout = datetime.datetime.now() + datetime.timedelta(seconds=int(max_timeout),
milliseconds=int(max_timeout*1000%1000))
while(datetime.datetime.now() < max_timeout):
data = self.serv._client_from_addr[addr].recv(1024)
decoded_data = data.decode("utf-8")
if(data is None):
timeout += 1
logging.debug(f"Timeout of user {addr} increased to {timeout}")
if(timeout > 9):
logging.warning(f"User {addr} has disconnected")
break
elif(decoded_data != ''):
timeout = 0
logging.info(f"Recived data '{decoded_data}' from address {addr}")
self.symbol_parse(decoded_data)
decoded_data = sub('|'.join(self.conn_symbols.values()),'',decoded_data)
self.serv.parse_data(decoded_data, addr)
break
# Ausiàs cabro pegali a ton pare
#
# Game related functions
#
# add_player adds to self.players the alias given
# by the player with the key being the sender's address
# if the player did not exist, it gets assigned
# to 0 the record of drinks haven
def add_player(self, addr:tuple, name:str):
self.players[addr] = name
if(self.players_drinks.get(addr, None) is None):
self.players_drinks[addr] = 0
self.players_last_drink[addr] = False
# send_players is a mean for the clients to keep
# the record of players updated. It will send to
# the client the order to add/overwrite the name
# of all players asked. Making use of python's dict
# being ordered structures, it is possible to ask only
# all players that came after number {last}, to reduce
# the amount of data to send in cases where there's
# a lot of players
def send_players(self, addr:tuple, last:int=0):
players = list(self.players.items())
if(last >= len(players) or last < 0): return
for player in players[last:-1]:
self.serv.sendto(f"--add_player;{player}{self.conn_symbols['Urgency']}{self.conn_symbols['Client_listen']}", addr)
self.serv.sendto(f"--add_player;{players[-1]}", addr)
# add_chat will add an entry into the self.chat list
# with a tuple containing the address of the sender
# and the message he/she sent
def add_chat(self, addr:tuple, text:str):
self.chat.append((addr, text))
# send_chat will send the client in addr all chat entries
# from {last}
def send_chat(self, addr:tuple, last:int=0):
if(last >= len(self.chat) or last < 0): return
for mssg in self.chat[last:-1]:
self.serv.sendto(f"--add_chat;{mssg}{self.conn_symbols['Urgency']}{self.conn_symbols['Client_listen']}",addr)
self.serv.sendto(f"--add_chat;{self.chat[-1]}",addr)
# drink is a function executed by the client to add as a
# record how many drink has an user had.
# It also does send back the client to tell the user
# what to drink
def drink(self, addr:tuple, drinks:int=1):
drinks = self.drinks_per_error[1](drinks*
self.drinks_per_error[0])
print(f"Player {addr} drinks {drinks}")
self.players_drinks[addr] += int(drinks)
self.players_last_drink[addr] = int(drinks)
self.serv.sendto(f"--drink;{drinks}",addr)
# sleep (kind of) overloads the time.sleep in order to
# make the process sleep but until the time defined by
# the server (stored in self.compile_at[0]) and to
# tell the client to compile if {compile_after}
def sleep(self, sleep_time:float=None, compile_after:bool=False, addr:tuple=None):
if(sleep_time is None):
if(not len(self.compile_at)):
self.compile_at.append(datetime.datetime.now() + datetime.timedelta(seconds=self.compile_time))
elif(self.compile_at[0] < datetime.datetime.now()):
self.compile_at[0] = datetime.datetime.now() + datetime.timedelta(seconds=self.compile_time-randint(0,20))
sleep_time = (self.compile_at[0] - datetime.datetime.now()).seconds
logging.info(f"Sleeping for {sleep_time} seconds.")
time.sleep(sleep_time)
if(compile_after):
self.serv.sendto(f"--compile{self.conn_symbols['Client_listen']}", addr)
self.listen(addr, max_timeout=5)
# gui is used for displaying the connected users if
# the server device has a display attached to it
def gui(self):
logging.debug("BeerProgramming.gui(self)")
"""
app = App()
frame = front.Frame(name="Beer Programming")
panel = frame.new_panel(bgcolor=(50,50,50))
player_panel = panel.new_panel("(%0.49,%1)")
chat_panel = panel.new_panel("(%0.49,%1)","(%0.51,0)")
self.text_list = {self.name:panel.add_text((0,0),"(%1,%0.2)", self.name)}
app.MainLoop()
"""
if __name__ == "__main__":
main()
|
{"/run_server.py": ["/Server.py"], "/play.py": ["/Client.py", "/front.py"]}
|
14,191
|
saisua/BeerProgramming
|
refs/heads/master
|
/play.py
|
#!./python3.7/python
import Client , front
from wx import App
from multiprocessing import Process
import logging
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.common.by import By
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
from selenium.common.exceptions import TimeoutException, StaleElementReferenceException, NoAlertPresentException
from selenium.webdriver.firefox.firefox_binary import FirefoxBinary
from selenium import webdriver
from re import finditer, sub
import time
# This function gets executed when you run
# python play.py and it serves as a way to run
# the Beer_programming client and parse the user's
# arguments
def main():
logging.basicConfig(format="%(asctime)s %(levelname)s | %(message)s", level=logging.DEBUG)
from sys import argv
Beer_programming(**arg_parse(argv)).play(False)
# arg_parse does take a list of arguments and returns
# one dict with the parameters and values (str) determined
# by the keys and values of arg_dict
# if one key is found, the following argument is
# chosen as a value
def arg_parse(args:list, arg_dict:dict=
{"--ip":"ip","--port":"port",
"-i":"ip","-p":"port"}) -> dict:
final = {}
before = False
for arg in args[1:]:
if(before):
logging.debug(f"Found arg ({before}) : {arg}")
final[before] = arg
before = False
continue
value = arg_dict.get(arg, None)
if(not value is None):
before = value
return final
"""
The class Beer_programming is the main class used to
play the game. It will create a Client, who will then
connect to the server, which will rule the game.
Between its features there is the possibility to
run and compile the chosen online compiler, and to
chat (unused)
"""
class Beer_programming():
def __init__(self, ip:str=None, port:int=12412):
logging.debug(f"BeerProgramming.__init__(self,{ip},{port})")
self.client = Client.Client()
self.listener = None
self.order_dict = {"--compile":self.compile,
"--drink":self.drink,
"--add_player":self.add_player,
"--add_chat":self.add_chat,
"--_print":self.__print_gui,
"--_start":self.start_compiler}
self.players = {}
self.chat = []
self.drinks = 0
self.conn_step = [";;"]
self.conn_symbols = {"Serv_to_Client":";;", "Client_to_Serv":"::",
"Serv_listen":"->", "Serv_send":"<_",
"Client_listen":"<-", "Client_send":"<_",
"Urgency":"!!", "Evaluate":"#"}
if(not ip is None): self.connect(ip,port)
else: self.connect("127.0.0.1", port)
# The play function should be the first function to be
# executed when the server starts to listen to the socket.
# When executed, play will send the server the order to
# add itself as a named user, will ask the server
# what are the names of the other players and then it
# will give the server the control.
# play can open a gui(unused) and will then run
# _play_process
def play(self, gui:bool=True) -> None:
logging.debug(f"BeerProgramming.play(self, {gui})")
name = False
while(not name):
name = input("In-game alias: ")
name = name if input("Confirm?[Y/n] ").lower() in ["y","ye","yes","s","si"] else False
self.name = name
self.client.send_to_server(f"--add_player;{name}{self.conn_symbols['Client_to_Serv']}")
logging.debug("Sent alias to server")
self.client.send_to_server(f"--play{self.conn_symbols['Serv_to_Client']}")
logging.debug("Sent play petition to server")
#self.start_compiler()
if(gui):
play_pro = Process(target=self._play_process)
play_pro.start()
self.gui()
play_pro.join()
else: self._play_process()
# _play_process is the main function to interactuate
# with the server. Based on the actual state of the
# Beer_programming.conn_steps queue it will either
# listen or ask what to send to the server.
# It's the server's job to determine if it should or
# should not need the user's input
def _play_process(self) -> None:
while(len(self.conn_step)):
step = self.conn_step[0]
self.conn_step.pop(0)
if(step == self.conn_symbols["Client_to_Serv"] or step == self.conn_symbols["Client_send"]):
decoded_data = input("> ")
if(step == self.conn_symbols["Evaluate"]): decoded_data = eval(decoded_data)
self.symbol_parse(decoded_data)
self.client.send_to_server(decoded_data)
elif(step == self.conn_symbols["Serv_to_Client"] or step == self.conn_symbols["Client_listen"]):
decoded_data = self.listen()
logging.info(f"Recived data '{decoded_data}' from server")
self.symbol_parse(decoded_data)
decoded_data = sub(f'|'.join(self.conn_symbols.values()),'',decoded_data)
self.data_parse(decoded_data)
logging.debug(f"Conn_steps: {self.conn_step}")
# connect (kind of) overrides Client.connect. Even if
# unnecessary, I think this function makes the code
# cleaner
def connect(self, ip:str, port:int=12412) -> None:
logging.debug(f"BeerProgramming.connect(self, {ip}, {port})")
self.listener = self.client.connect(ip,port)
# listen does make use of the Client's generator to
# listen to the server and return a string
def listen(self, listener:"generator"=None) -> str:
#logging.debug(f"BeerProgramming.listen(self, {listener})")
if(listener is None):
if(self.listener is None): return
listener = self.listener
return(next(listener))
# (unused) gui does open a gui for the user to see
# all clients connected and chat with them
def gui(self) -> None:
logging.debug("BeerProgramming.gui(self)")
app = App()
frame = front.Frame(name="Beer Programming")
panel = frame.new_panel(bgcolor=(50,50,50))
player_panel = panel.new_panel("(%0.49,%1)")
chat_panel = panel.new_panel("(%0.49,%1)","(%0.51,0)")
self.text_list = {self.name:panel.add_text((0,0),"(%1,%0.2)", self.name)}
app.MainLoop()
# start_compiler does start a new selenium instance (gecko)
# controlled by the game to make sure nobody can cheat
# with (at least) one saved file
def start_compiler(self) -> None:
logging.info("Configuration complete. Trying to run the drivers. This could take some time...")
self.driver = webdriver.Firefox(executable_path=(
__file__).replace("play.py", "geckodriver"))
#options=options, firefox_profile=profile,# capabilities=firefox_capabilities,
#firefox_binary=FirefoxBinary((__file__).replace("play.py", "geckodriver")))
logging.info("Drivers ran succesfully!")
self.driver.get("https://www.onlinegdb.com/online_java_compiler")
self.tab = self.driver.current_window_handle
self.driver.find_element_by_xpath("//*[@class='glyphicon glyphicon-menu-left']").click()
self.client.send_to_server("Done")
# data_parse takes any message sent by the server
# and it executes the function assigned as key
# in the self.order_dict dictionary
def data_parse(self, data:str) -> None:
#print(f"data_parse {data}")
order = None
args = ()
for arg in data.split(';'):
new_ord = self.order_dict.get(arg.strip(), None)
print(f"arg:{arg}, new_ord:{new_ord}")
if(not new_ord is None):
if(not order is None):
print(f"{order}{args}")
try:
order(*args)
except Exception as err: print(f"ERROR: {err}")
order = new_ord
args = ()
elif(arg.strip() != ''): args+=(arg.strip(),)
if(not order is None):
print(f"{order}{args}.")
try:
order(*args)
except Exception as err:
print(order)
print(args)
raise err
print(f"ERROR: {err}.")
# symbol_parse is used by the user and the server
# to tell (into a queue) what the Client should do
# next (listen/send something)
def symbol_parse(self, command:str):
urgent = False
num = 0
for symbol in finditer('|'.join(self.conn_symbols.values()), command):
if(symbol.group(0) == self.conn_symbols["Urgency"]):
urgent = True
else:
if(urgent):
self.conn_step.insert(0, symbol.group(0))
urgent = False
num += 1
else:
self.conn_step.append(symbol.group(0))
#
# Game related functions
#
# compile will make use of the selenium instance to
# try to compile the code. Any error in the code will
# be sent to the server, who will answer how
# many times will have the user to drink
def compile(self) -> int:
self.driver.switch_to.window(self.tab)
try: self.driver.switch_to.alert.dismiss()
except NoAlertPresentException: pass
while(True):
try:
self.driver.switch_to.window(self.tab)
self.driver.find_elements_by_xpath('''//*[@class='glyphicon glyphicon-stop']''').click()
except: pass
try:
self.driver.find_element_by_xpath("//*[@class='glyphicon glyphicon-play']").click()
break
except: pass
time.sleep(3)
self.driver.switch_to.window(self.tab)
try:
self.client.send_to_server(f"--drink;{len(self.driver.find_elements_by_xpath('''//*[@class='error_line']'''))}"
f"{self.conn_symbols['Urgency']}{self.conn_symbols['Client_listen']}")
except:
self.client.send_to_server(f"--drink;0{self.conn_symbols['Urgency']}{self.conn_symbols['Client_listen']}")
self.driver.switch_to.window(self.tab)
# drink will be executed by the server when the code is
# compiled. It will then tell the user how many times
# it should drink
def drink(self, drinks:str='0'):
logging.info(f"Recieved order to drink {drinks} times")
self.driver.execute_script(f"alert('Drink {drinks} times');", [])
# add_player will record a new player, when given by the
# server. It is meant to have the utility of listing
# all users avaliable to chat with
def add_player(self, player:"str(addr, name)"):
addr, name = eval(player)
logging.info(f"Added new player {name} ({addr})")
self.players[addr] = name
# add_chat adds a new message to the chat list
def add_chat(self, text:str):
self.chat.append(eval(text))
# __print_gui is a debugging function that prints all
# server-recieved variables
def __print_gui(self):
print()
print(f"Chat: {self.chat}")
print()
print(f"Players: {self.players}")
print()
print(f"Drinks:{self.drinks}")
print()
if __name__ == "__main__":
main()
|
{"/run_server.py": ["/Server.py"], "/play.py": ["/Client.py", "/front.py"]}
|
14,192
|
saisua/BeerProgramming
|
refs/heads/master
|
/front.py
|
# Imports
import logging, wx
from re import split as multisplit
from cv2 import imread
# Main function for testing
def __main():
# level = logging. + [CRITICAL, ERROR, WARNING, INFO, DEBUGGING]
logging.basicConfig(format='%(asctime)s %(levelname)s | %(message)s', level=logging.DEBUG)
return
app = wx.App()
frame = Frame(name="Un xicotet test", size=(1000,700))
#panel = frame.new_panel(bgcolor=(50,50,50))
winsize = frame._size
#panel = frame.new_panel((winsize[0],40),bgcolor=(255,0,0))
#panel.add_text("Un petit test", (0,0), (300,100),font=wx.Font(37,wx.ROMAN,wx.NORMAL,wx.NORMAL))
#panel = frame.new_panel((300,400),(0,41),(170,170,170),(0,600))
#panel.add_checkbox("Did i say it was a test?", (10, 50), (200,180), on_click=lambda ev: print(ev))
#but = panel.add_button("Try it out", (20, 250),(160,50), on_click=lambda ev: print(ev))
#but.Bind(wx.EVT_ENTER_WINDOW, (lambda ev: ev.GetEventObject().SetLabelText("Tested :P")))
#but.Bind(wx.EVT_LEAVE_WINDOW, (lambda ev: ev.GetEventObject().SetLabelText("Try it out")))
panel = frame.new_panel("(_size[0],@350)", "(0,@41)")
panel.add_textbox(f"(@{panel.Size.x/2},0)", f"(@{panel.Size.x/2},@{panel.Size.y})", "Editable test", on_event=lambda e: print(f"User pressed enter. | {e.String} \n END |"))
panel.add_image("Special.PNG",(0,0),f"(@{panel.Size.x/2},@{panel.Size.y})")
panel = frame.new_panel("(_size[0]-@320,20+@20)", "(@300,@391)")
panel.add_textbox((0,0), f"(@{panel.Size.x},{panel.Size.y/2}+@{panel.Size.y/2})", "Editable password", hidden = True, on_event=lambda e: print(f"User password | {e.String}"))
app.MainLoop()
# Not typing hints 'cause is just a test funct
"""
The class Frame creates a new empty window. To add widgets such as
buttons you have to first run Frame.new_panel and use the object
it returns to draw those widgets. The first panel may not follow size arg
but you can stack panels on top of each other.
"""
class Frame(wx.Frame):
def __init__(self, parent:wx.Frame=None, name:str="New frame", size:tuple=None, resizable:bool=True):
logging.info("New Frame(wx.Frame) created.")
logging.debug(f"Frame.__init__(self, {parent}, {name}, {size})")
self._parent = parent
if(not parent is None): self.__parent_size = parent._size
# Check size format
if(size is None or (type(size) != tuple and type(size) != list) or len(size) != 2):
logging.warning("Looks like size was either not set or in a wrong format\n"
f" size={size}")
size = wx.DisplaySize()
self.__class__.__base__.__init__(self, parent, -1, name, (0,0), size)
if(not resizable):
pass
else: self.Bind(wx.EVT_SIZE, lambda _: self.__resize__())
self.windows = []
self.panels = []
self.Show(True)
# Creates a new unique window, child of parent. It returns a Frame
# object becaue all windows are Frames
def new_window(self,size:tuple=None, resizable:bool=True, parent:object=None) -> 'Frame':
logging.debug(f"Frame.new_window(self, {size}, {parent})")
if(parent is None): parent = self
if(size is None): size = self._size
self.windows.append(Frame(parent, size=size, resizable=resizable))
self.windows[-1].Show()
return self.windows[-1]
# new_panel creates a canvas inside the Frame. It returns a Panel object
# which has functions to create and draw widgets
def new_panel(self, size:tuple=None, location:tuple=(0,0),
bgcolor:tuple=(90,90,90), scrollbarsxy_size:tuple=False, scrollbarsxy_extend:tuple=0,
style:"style1 | style2"=wx.BORDER_THEME, resizable:bool=True,
name:str='', allow_files_drop:bool=False, parent:object=None) -> 'Panel':
logging.debug(f"Frame.new_panel(self, {size}, {location}, {bgcolor}, {scrollbarsxy_size},"
f" {scrollbarsxy_extend}, {style}, {resizable}, {name}, {allow_files_drop},"
f" {parent})")
if(parent is None): parent = self
if(size is None): size = self._size
self.panels.append(Panel(parent, size, location, bgcolor, scrollbarsxy_size, scrollbarsxy_extend,
style, resizable, name, allow_files_drop))
return self.panels[-1]
@property
def _size(self) -> tuple:
return (self.Size.x, self.Size.y)
# Function to resize all marked as resizable
def __resize__(self) -> None:
for panel in self.panels:
panel.__resize__()
"""
The Panel object is the canvas of a Frame object and should
have a wx.Frame object as a parent. The functions here are
written to create locally (in self Panel) widgets.
"""
class Panel(wx.ScrolledWindow):
def __init__(self, parent:wx.Frame, size:tuple, location:tuple=(0,0), bgcolor:tuple=(90,90,90),
scrollbarsxy_size:tuple=False, scrollbarsxy_extend:tuple=0,
style:"style1 | style2"=wx.BORDER_THEME,
resizable:tuple=True, name:str='', allow_files_drop:bool=False):
logging.info("New Panel(wx.ScrolledWindow) created.")
logging.debug(f"Panel.__init__(self, {parent}, {size}, {location}, {bgcolor}, {scrollbarsxy_size}, "
f"{scrollbarsxy_extend}, {style}, {resizable}, {name}, {allow_files_drop}")
self._parent = parent
self.name = name
logging.debug(f"loc: {location} -> {self.check_format(location,False)} -> {self.check_format(location)}")
logging.debug(f"siz: {size} -> {self.check_format(size,False)} -> {self.check_format(size)}")
self.__class__.__base__.__init__(self, parent, -1, self.check_format(location), self.check_format(size), style)
self.SetBackgroundColour(bgcolor)
if(scrollbarsxy_size):
self.SetScrollbars(1,1,*scrollbarsxy_size)
self.extend_scrollbar = scrollbarsxy_extend
self.Bind(wx.EVT_SCROLL_BOTTOM, self.__extend_scrollbar__)
self.resizable = resizable
if(resizable):
self.resize = self.check_format(size, False)
self.relocate = self.check_format(location, False)
self.Bind(wx.EVT_SCROLL, lambda _: self.Refresh())
self.DragAcceptFiles(allow_files_drop)
self.buttons = []
self.text = []
self.checkbox = []
self.bitmaps = []
self.textbox = []
self.widgets_list = [self.buttons, self.text, self.checkbox,
self.bitmaps, self.textbox]
self.panels = []
# SetBackgroundColor won't work if this is not called
self.Refresh()
# new_panel creates a canvas inside the Panel parent.
def new_panel(self, size:tuple=None, location:tuple=(0,0),
bgcolor:tuple=(90,90,90), scrollbarsxy_size:tuple=False, scrollbarsxy_extend:tuple=0,
style:"style1 | style2"=wx.BORDER_THEME, resizable:bool=True,
name:str='', allow_files_drop:bool=False, parent:object=None) -> 'Panel':
logging.debug(f"Parent.new_panel(self, {size}, {location}, {bgcolor}, {scrollbarsxy_size}, "
f"{scrollbarsxy_extend}, {style}, {resizable}, {name}, {allow_files_drop}, "
f"{parent}")
if(parent is None): parent = self
if(size is None): size = self._size
#Do not chech_format since it is done in __init__
self.panels.append(Panel(parent, size, location, bgcolor, scrollbarsxy_size, scrollbarsxy_extend,
style, resizable, name, allow_files_drop))
return self.panels[-1]
# Adds a button in the panel
def add_button(self, label:str, location:tuple, size:tuple, on_click:"function",
style:"style1 | style2"=0, parent:wx.Window=None) -> wx.Button:
logging.debug(f"Panel.add_button(self, {label}, {location}, {size}"
f", {on_click}, {style}, {parent})")
if(parent is None): parent = self
self.buttons.append([wx.Button(parent, label=label, pos=self.check_format(location),
size=self.check_format(size), style=style),
self.check_format(size,False), self.check_format(location,False)])
self._parent.Bind(wx.EVT_BUTTON, on_click, self.buttons[-1][0])
return self.buttons[-1][0]
# Adds a text box in the panel
def add_text(self, location:tuple, size:tuple, text:str, color:tuple=(0,0,0),
bgcolor:tuple=None, font:wx.Font=None,
style:"style1 | style2"=0, parent:wx.Window=None) -> wx.StaticText:
logging.debug(f"Panel.add_text(self, {text}, {location}, {size},"
f"{color}, {bgcolor}, {font}, {style}, {parent})")
if(parent is None): parent = self
self.text.append([wx.StaticText(parent, -1, text, self.check_format(location),
self.check_format(size), style),
self.check_format(size,False), self.check_format(location,False)])
self.text[-1][0].SetForegroundColour(color)
if(not bgcolor is None): self.text[-1][0].SetBackgroundColour(bgcolor)
if(not font is None): self.text[-1][0].SetFont(font)
return self.text[-1][0]
# Add a writable text box
def add_textbox(self, location:tuple, size:tuple, text:str='',
style:"style1 | style2"=0, on_event:"function"=None,
multiline:bool=True, hidden:bool=False, writable:bool=True,
event:"wx.EVT_TEXT"=wx.EVT_TEXT_ENTER,
parent:wx.Window=None) -> wx.TextCtrl:
logging.debug(f"Panel.add_textbox(self, {location}, {size}, {text}, {style},"
f"{multiline}, {hidden}, {writable}, {event}, {parent})")
if(parent is None): parent = self
# Looks like you can't have a multiline password
# (at least i can't nor care) maybe i'll retry it later
if(hidden):
style = style | wx.TE_PASSWORD
elif(multiline): style = style | wx.TE_MULTILINE
elif(not writable): style = style | wx.TE_READONLY
self.textbox.append([wx.TextCtrl(parent, -1, text, self.check_format(location),
self.check_format(size), style),
self.check_format(size,False), self.check_format(location,False)])
if(not on_event is None):
if(hidden and event == wx.EVT_TEXT_ENTER): event = wx.EVT_TEXT
self.textbox[-1][0].Bind(event, on_event)
return self.textbox[-1][0]
# Adds a checkbox in the panel
def add_checkbox(self, text:str, location:tuple, size:tuple, on_click:'function'=None,
style:'style1 | style2'=0, validator:wx.Validator=wx.Validator,
parent:wx.Window=None) -> wx.CheckBox:
logging.debug(f"Panel.add_checkbox(self, {text}, {location}, {size},"
f"{on_click}, {style}, {validator}, {parent})")
if(parent is None): parent = self
self.checkbox.append([wx.CheckBox(parent, -1, text, self.check_format(location),
self.check_format(size), style),
self.check_format(size,False), self.check_format(location,False)])
if(not on_click is None): self.checkbox[-1][0].Bind(wx.EVT_CHECKBOX, on_click)
return self.checkbox[-1][0]
# Adds an image as a StaticBitmap in the frame
def add_image(self, location:tuple, size:tuple, image_path:"str(path)"="None.jpg",
style:"style1 | style2"=wx.Center, allow_files_drop:bool=False,
multiimage:bool=True, parent:wx.Window=None, menu:bool=True,
bitmap_images:list=[]) -> wx.StaticBitmap:
logging.debug(f"Panel.add_image(self, {image_path}, {location}, {size}, {style}, {parent})")
if(parent is None): parent = self
if(menu): size = f"{str(size[:-1])}-@30)"
self.bitmaps.append([Image_container(self, location, size, style, image_path, menu, bitmap_images),
self.check_format(size,False), self.check_format(location,False)])
if(allow_files_drop): self.Bind(wx.EVT_DROP_FILES, self.bitmaps[-1][0].image_drop_event)
return self.bitmaps[-1][0]
# Runs instances times function(**args). This function is designed to create
# rows or columns
def create_multiple(self, instances:int, function:'function', args:dict={}) -> list:
logging.debug(f"Panel.create_multiple(self, {instances}, {function}, {args})")
returned = []
for num in range(instances):
checked_args = {}
for arg,value in args.items():
if(type(value) == str):
checked_args[arg] = value.replace("num", str(num))
continue
checked_args[arg] = value
returned.append(function(**checked_args))
return returned
def check_format(self, variable, do_eval:bool=True):
if(type(variable) == str):
_size = self._parent._size
if(variable.find('@') + 1 or variable.find('%') + 1): variable = self.__resize_formatting__(variable)
if(do_eval): return eval(variable)
return variable
# This will format any n (2 pref) dimensional tuple and calculate any number starting with
# @ so that it will be able to resize based on parent's size
# Example [size=(700,1000)] (@300, @500 + 10) -> (0.42857*size[0], 0.5*size[1] + 10) [(300/700*size[0], 500/1000*size[1] + 10)]
def __resize_formatting__(self, formula:"str(tuple)", characters:list=['+','-','*','/','(',')']) -> "str(tuple)":
logging.debug(f"Panel.__resize_formatting(self, {formula}, {characters})")
start = formula.find("(")
# end should be -= 1 but it's a wasted operation
end = len(formula)-formula[::-1].find(")")
before = formula[:start]
after = formula[end:]
formula = formula[start:end]
del start, end
final = ()
# Just in case not to mess up the eval
size = self._parent._size
for dim_num, dimension in enumerate(formula.replace(' ','')[1:][:-1].split(',')):
dimension_formatted = ""
# Maybe this was better but didn't work. I think my understanding of re was insufficient
# Idk, this is O(6*n) algorithm and the one below is only m, but usually m won't be bigger than 50
# Also the second one has way more assignments '-.-
#splitables = [not_empty for found in findall(characters, dimension) for not_empty in found if not_empty] + ['']
# Just get all +, -, *, /, ( and ) in order
splitables = []
splitable = ""
for num, char in enumerate(dimension):
if(not char in characters):
if(splitable):
# Check if there's any symbol before any number
if(num == len(splitable)):
dimension_formatted += splitable
splitable = ''
continue
splitables.append(splitable)
splitable = ''
continue
splitable += char
if(splitable): splitables.append(splitable)
else: splitables.append('')
del splitable
num = 0
for splitted in multisplit("\\"+"|\\".join(characters)+"+", dimension):
logging.debug(f"dim: {dim_num} || splitted: {splitted}")
if(not splitted): continue
if(splitted[0] == '@'): splitted = f"{splitted[1:]}/{size[dim_num]}*_size[{dim_num}]"
elif(splitted[0] == '%'): splitted = splitted[1:]+f"*_size[{dim_num}]"
dimension_formatted += splitted + splitables[num]
num += 1
final = final + (dimension_formatted,)
return before + str(final).replace('\'','') + after
def __resize__(self):
logging.debug("Panel.__resize__(self)")
if(not self.resizable): return
self.SetPosition(self.check_format(self.relocate))
self.SetSize(self.check_format(self.resize))
for panel in self.panels:
panel.__resize__()
# This can be done since list are immutable
for widgets in self.widgets_list:
for widget in widgets:
widget[0].SetSize(self.check_format(widget[1]))
widget[0].SetPosition(self.check_format(widget[2]))
self.Refresh()
def __extend_scrollbar__(self, event):
logging.debug(f"Panel.__extend_scrollbar__(self, {event})")
new_size = check_format(self.extend_scrollbar)
if(type(new_size) == int): new_size = (new_size, new_size)
event.GetParent().GetScroll(event.GetOrientation()).Range = (event.GetParent().GetScroll(event.GetOrientation()).Range +
new_size[event.GetOrientation()])
@property
def delete(self, step:int=-1) -> None:
logging.debug(f"Panel.delete(self)")
logging.info("Removed one panel")
if(step == 1 or step < 0):
self._parent.panels.remove(self)
if(step == 2 or step < 0):
self.Destroy()
@property
def _size(self) -> tuple:
return (self.Size.x, self.Size.y)
"""
A special class in order to make ALL StaticBitmap containers look better
that's the reason it is in front.py
"""
class Image_container(wx.StaticBitmap):
def __init__(self, parent:Panel, location:tuple, size:tuple, style:'style1 | style2',
image_path:str="None.jpg", menu:bool=True, bitmap_images:list=[]):
self._parent = parent
self.bitmap_images = bitmap_images
self.__class__.__base__.__init__(self, parent, -1, self.image_from_path(image_path, parent),
self._parent.check_format(location), self._parent.check_format(size), style)
if(image_path != "None.jpg"):
self.bitmap_images.append(image_path)
self.image_num = 0
if(menu):
self.menu = parent._parent.new_panel(f"{str(size)[:str(size).find(',')]}, @30)",
f"{str(location)[:str(size).find(',')]}{str(size)[str(size).find(',')+1:]}",
(255,255,255))
# in case of adding anything i have to change the way of accessing it in self.set_image
self.menu_sub = [
self.menu.add_button('<', (0,0), "(self.Size.y, self.Size.y)", lambda _: self.set_image(self.image_num-1)),
self.menu.add_textbox("(self.Size.x/2-self.Size.y,0)", "(self.Size.y*2,self.Size.y)",
f"{self.image_num}/{len(self.bitmap_images)}", multiline=False,
on_event=lambda event: self.set_image(event.String), event=wx.EVT_TEXT),
self.menu.add_button('>', "(self.Size.x-self.Size.y,0)", "(self.Size.y, self.Size.y)",
lambda _: self.set_image(self.image_num+1))
]
else:
self.menu = None
self.menu_sub = None
self.menu.Refresh()
self.set_image(len(self.bitmap_images))
# Saves all images droped on event
def image_drop_event(self, event:wx.EVT_DROP_FILES, filetypes:list=["jpg","jpeg","png"]):
logging.debug(f"Image_container.image_drop_event(self, {event}, {filetypes})")
logging.info(f"The user has dropped {event.GetNumberOfFiles()} file"
f"{'s' if event.GetNumberOfFiles() != 1 else ''}")
for image in event.GetFiles():
if(image[len(image)-image[::-1].find("."):].lower() in filetypes):
self.bitmap_images.append(self.image_from_path(image, self._parent))
if(len(self.bitmap_images)):
self.set_image(len(self.bitmap_images))
self.CenterOnParent()
self.Refresh()
# Returns a bitmap from a image_path
@staticmethod
def image_from_path(image_path:str, panel:wx.Window, keep_ratio:bool=True, scale_to:"fit/fill"='fit') -> wx.Bitmap:
logging.debug(f"Image_container.image_from_path({image_path}, {panel}, {scale_to})")
if(keep_ratio or scale_to == "fit"):
img_size = imread(image_path).shape[:2][::-1]
logging.debug(f"Non-formatted image has size {img_size}")
if(scale_to == "fill"):
if(img_size[0] < img_size[1]):
img_size = (panel.Size.x, img_size[1]*panel.Size.x/img_size[0])
elif(img_size[0] > img_size[1]):
img_size = (img_size[0]*panel.Size.y/img_size[1], panel.Size.y)
else: img_size = (min(panel._size),)*2
else: # Done so that i don't have to raise ValueError of scale_to
if(img_size[0] < img_size[1]):
img_size = (img_size[0]*panel.Size.y/img_size[1], panel.Size.y)
elif(img_size[0] > img_size[1]):
img_size = (panel.Size.x, img_size[1]*panel.Size.x/img_size[0])
else: img_size = (min(panel._size),)*2
else:
img_size = panel.Size
logging.debug(f"Formatted image has size {img_size}")
return wx.Image(image_path, wx.BITMAP_TYPE_ANY).Scale(img_size[0], img_size[1],
wx.IMAGE_QUALITY_HIGH).ConvertToBitmap()
def set_image(self, image_num:int):
logging.debug(f"Image_container.set_image(self, {image_num})")
if(str(image_num).find('/')+1): image_num = image_num[:image_num.find('/')]
try:
if(int(image_num) < 1 or int(image_num) > len(self.bitmap_images) or int(image_num) == self.image_num): return
except: return
self.image_num = int(image_num)
# This will trigger itself
if(not self.menu is None): self.menu_sub[1].SetValue(f"{image_num}/{len(self.bitmap_images)}")
self.SetBitmap(self.bitmap_images[self.image_num-1])
self.CenterOnParent()
self.Refresh()
def SetSize(self, size:tuple) -> None:
logging.debug(f"Image_container.SetSize(self, {size})")
self.__class__.__base__.SetSize(self, size)
self.CenterOnParent()
def SetPosition(self, location:tuple) -> None:
logging.debug(f"Image_container.SetPosition(self, {location})")
self.__class__.__base__.SetPosition(self, location)
# self.CenterOnParent here looks unnecessary
# Testing
if __name__ == "__main__":
__main()
|
{"/run_server.py": ["/Server.py"], "/play.py": ["/Client.py", "/front.py"]}
|
14,193
|
saisua/BeerProgramming
|
refs/heads/master
|
/Client.py
|
import socket
import logging
import time
# This function gets executed when you run
# python Client.py and its use is to test the code
# so it will usually be empty
def main():
return
import sys
#multiprocessing_logging.install_mp_handler()
logging.basicConfig(format="%(asctime)s %(levelname)s | %(message)s", level=logging.DEBUG)
cl = Client()
cl.connect(sys.argv[1], 12412)
"""
The Client class is the main socket client class.
It is programmed to connect to an instance of
Server.Server. It mostly listens and sends data
from and to the server
"""
class Client():
def __init__(self):
logging.debug(f"Client.__init__(self)")
logging.info("Created new client")
self.listener = None
self.server = None
# connect will connect to the server in ip:port .
# if given a password, the client will try to send it to
# the server (not working)
def connect(self, ip:str, port:int=12412, password:str=None):
logging.debug(f"Client.connect(self, {ip}, {port}, {password})")
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
while(True):
try:
server.connect((ip, int(port)))
break
except ConnectionRefusedError: pass
self.server = server
logging.info(f"Connected to personal port in {ip}:{port}")
self.listener = self.listen(server)
return self.listener
# listen takes one connected instance of socket.socket
# and returns one generator. Each time that the
# generator executes its .next() , tbe function will
# be resumed, and it will return any data collected
# from the server
def listen(self, server:socket.socket) -> "generator":
timeout = 0
#server.settimeout(10)
while(True):
data = server.recv(1024)
decoded_data = data.decode("utf-8")
if(data is None):
timeout += 1
if(timeout > 9): break
elif(decoded_data != ''):
timeout = 0
del data
yield decoded_data
# send_to_server turns {data} into utf-8 formatted
# bytes and sends them to the server
def send_to_server(self, data:str):
if(not self.server is None):
self.server.sendall(bytes(data, "utf-8"))
if __name__ == "__main__":
main()
|
{"/run_server.py": ["/Server.py"], "/play.py": ["/Client.py", "/front.py"]}
|
14,194
|
saisua/BeerProgramming
|
refs/heads/master
|
/Server.py
|
from multiprocessing import Process, Manager
import socket, logging
import typing
# This function gets executed when you run
# python Server.py and its use is to test the code
# so it will usually be empty
def main():
logging.error("Run run_server instead")
"""
The Server class is the main builder for the Sockets
over TCP. It will by default allow an unlimited number
of clients, but it will accept only 1.
When told so, it can accept multiple clients at once.
When one client gets connected, the Server assigns
one process to that client, and that process divides
into two, one that operates from the point of view of the
server, and a second one, as a daemon, who listens and
runs the functions given in order_dict
"""
class Server():
def __init__(self, ip:str=None, port:int=12412, password:str=None, max_connections:int=-1,
order_dict:dict={}):
self.threaded = [False, False]
logging.debug(f"Server.__init__(self, {ip}, {port}, {password}, {max_connections})")
#self._clients_process = []
#self._clients_p_obj = []
self.__manager = Manager()
self._client_from_addr = self.__manager.dict()
self._process_from_addr = {}
self.open = self.__manager.dict()
self.order_dict = order_dict
if(ip is None):
ip = socket.gethostbyname_ex(socket.gethostname())[-1]
if(type(ip) is list or type(ip) is tuple): ip = ip[-1]
logging.warning(f"Ip set automatically to {ip}")
ip = "127.0.0.1"
logging.warning(f"Ip set automatically to {ip}")
self.ip = ip
self.port = int(port)
self.password = password
self.max_connections = int(max_connections) if max_connections >= -1 else -1
self._connection = socket.socket(socket.AF_INET,
socket.SOCK_STREAM)
self._connection.bind((ip, port))
logging.info("Created new server")
# listen_connections sets up {connections} connections,
# that when connected by a client, will assign one new
# thread to that client
def listen_connections(self, connections:int=1, ip:str=None, port:int=None) -> None:
logging.debug(f"Server.listen_connections(self, {connections}, {ip}, {port})")
if(ip is None): ip = self.ip
if(port is None): port = self.port
else: self.port = int(port)
if(self.threaded[0]):
process = [] #miau
for _ in range(connections):
process.append(Process(target=self.new_connection, args=(ip, port)))
print("stop")
process[-1].start()
for conn in process: conn.join()
else: self.new_connection(ip, port)
# new_connection is run by a client-assigned thread,
# and it does wait for the client to send an order
# that when parsed, will coincide with one of tge keys
# of ord_dict, and so its value will be executed
def new_connection(self, ip:str=None, port:int=None) -> None:
logging.debug(f"Server.new_connection(self, {ip}, {port})")
if(self.max_connections + 1 and len(self._client_from_addr) >= self.max_connections): return
if(ip is None): ip = self.ip
if(port is None): port = self.port
self._connection.listen()
listener, addr = self._connection.accept()
logging.info(f"Connected new user: {addr}")
self._client_from_addr[addr] = listener
self.open[addr] = True
if(self.threaded[1]):
self._process_from_addr[addr] = Process(target=self.listen, args=(addr, listener))#, daemon=True)
self._process_from_addr[addr].start()
else: self.listen(addr,listener)
# sendto (kind of) overloads socket.socket.sendto .
# Given a message and an address, the server will
# turn message into utf-8 formatted bytes, and it
# will send it (if possible) to the client with the
# given address
def sendto(self, message:str, addr:tuple) -> "iterable":
self._client_from_addr[addr].sendto(bytes(str(message), "utf-8"), addr)
# sendall (kind of) overloads socket.socket.sendall .
# Even if it is not tested, it theorically turns message
# into utf-8 formatted bytes and sends it to all clients
# in the socket server.
def sendall(self, message:str):
self._connection.sendall(bytes(str(message), "utf-8"))
# listen will make use of listener to (if given one)
# ask for a password, and then it will return a generator
def listen(self, addr:tuple, listener:"socket.socket") -> "generator[str]":
logging.debug("Client.listen(self)")
if(not self.open[addr]): return
with listener:
timeout = 0
if(not self.password is None):
wrong_att = 0
accepted = False
while(not accepted):
password = listener.recv(1024)
decoded_password = password.decode("utf-8")
if(password is None):
timeout += 1
if(timeout > 9):
self.open[addr] = False
break
elif(decoded_password != ''):
timeout = 0
if(decoded_password == self.password):
accepted = True
del wrong_att
del password
del decoded_password
else:
wrong_att += 1
if(wrong_att > 3):
del wrong_att
self.open[addr] = False
break
while(self.open[addr]):
data = listener.recv(1024)
decoded_data = data.decode("utf-8")
if(data is None):
timeout += 1
logging.debug(f"Timeout of user {addr} increased to {timeout}")
if(timeout > 9):
logging.warning(f"User {addr} has disconnected")
break
elif(decoded_data != ''):
timeout = 0
logging.info(f"Recived data '{decoded_data}' from address {addr}")
self.parse_data(decoded_data, addr)
del self._process_from_addr[addr]
del self._client_from_addr[addr]
del self.open[addr]
# parse_data takes one string recieved from one client
# and its address and executes (if found) any matches
# separated by ';' in the string as keys in ord_dict
# the functions in the values of the dict must take
# addr as the first parameter even if unnecessary
def parse_data(self, data:str, addr:str) -> None:
#print(f"parse_data {data}")
order = None
args = (addr,)
for arg in data.split(';'):
new_ord = self.order_dict.get(arg.strip(), None)
print(f"arg:{arg}, new_ord:{new_ord}")
if(not new_ord is None):
if(not order is None):
print(f"{order}{args}")
try:
order(*args)
except Exception as err: print("ERROR: {err}")
order = new_ord
args = (addr,)
elif(arg.strip() != ''): args+=(arg.strip(),)
if(not order is None):
print(f"{order}{args}.")
try:
order(*args)
except Exception as err: print(f"ERROR: {err}")
|
{"/run_server.py": ["/Server.py"], "/play.py": ["/Client.py", "/front.py"]}
|
14,198
|
fumpen/marbar
|
refs/heads/master
|
/marbar/score_board/migrations/0001_initial.py
|
# Generated by Django 2.1.1 on 2018-10-26 15:02
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('management', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='ScoreUnit',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(default='', max_length=200)),
('points', models.IntegerField(default=0)),
('placement', models.IntegerField(default=0)),
('marbar', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='management.MarBar')),
],
),
]
|
{"/marbar/score_board/views.py": ["/marbar/score_board/models.py"], "/marbar/management/admin.py": ["/marbar/management/models.py"], "/marbar/management/views.py": ["/marbar/management/models.py", "/marbar/management/forms.py"]}
|
14,199
|
fumpen/marbar
|
refs/heads/master
|
/marbar/management/urls.py
|
from django.urls import path
from . import views
urlpatterns = [
path('', views.general_management, name='management_view'),
path('login/', views.crude_login_view, name='login_view'),
path('login_post/', views.crude_login, name='login'),
path('logout/', views.logout_user, name='logout'),
path('create_marbar/', views.create_marbar, name='create_marbar'),
path('create_user/', views.create_user, name='create_user'),
path('update_marbar/', views.update_marbar, name='update_marbar'),
path('activate_marbar/', views.activate_marbar, name='activate_marbar'),
path('events/', views.events_view, name='events'),
path('delete_event/', views.delete_event, name='delete_event'),
]
|
{"/marbar/score_board/views.py": ["/marbar/score_board/models.py"], "/marbar/management/admin.py": ["/marbar/management/models.py"], "/marbar/management/views.py": ["/marbar/management/models.py", "/marbar/management/forms.py"]}
|
14,200
|
fumpen/marbar
|
refs/heads/master
|
/marbar/management/forms.py
|
from django import forms
from django.contrib.auth.models import User
from management.models import MarBar, Event
from score_board.models import ScoreUnit
from django.db import transaction
from django.core.exceptions import ObjectDoesNotExist
from django.contrib import messages
STD_FORM = [('1A', 1), ('2A', 1), ('3A', 1), ('4A', 1), ('5A', 1), ('6A', 1), ('7A', 1),
('1B', 2), ('2B', 2), ('3B', 2), ('4B', 2), ('5B', 2), ('6B', 2), ('7B', 2),
('1C', 3), ('2C', 3), ('3C', 3), ('4C', 3), ('5C', 3), ('6C', 3), ('7C', 3),
('1D', 4), ('2D', 4), ('3D', 4), ('4D', 4), ('5D', 4), ('6D', 4), ('7D', 4),
('Aspirants', 0), ('Crew', 0), ('MarBar Committee', 0)]
class NewUser(forms.Form):
title = forms.CharField(max_length=100)
mail = forms.CharField(max_length=100)
password1 = forms.CharField(max_length=100)
password2 = forms.CharField(max_length=100)
def clean_user(self):
cleaned_data = super().clean()
pass1 = cleaned_data.get('password1')
pass2 = cleaned_data.get('password2')
if pass1 == pass2:
return cleaned_data
else:
forms.ValidationError('The provided passwords must be the same')
class MarBarForm(forms.Form):
title = forms.CharField()
#banner = forms.ImageField(required=False)
users = forms.ModelMultipleChoiceField(queryset=None, to_field_name='username',
widget=forms.CheckboxSelectMultiple, required=False)
end_date = forms.DateTimeField(widget=forms.DateTimeInput, input_formats=['%Y-%m-%d %H:%M:%S', '%Y-%m-%d %H:%M',
'%m/%d/%Y %H:%M:%S', '%m/%d/%Y %H:%M'])
create_standard_fields = forms.BooleanField(required=False)
intended_pk = forms.IntegerField(widget=forms.HiddenInput, required=False)
class Meta:
model = MarBar
fields = ['title', 'end_date', 'is_active', 'create_standard_fields']
def __init__(self, *args, **kwargs):
user_choices = User.objects.all().exclude(is_superuser=True)
super().__init__(*args, **kwargs)
self.fields['users'].queryset = user_choices
def save(self, new_instance=False, update_instance=False):
if self.is_valid() & new_instance:
with transaction.atomic():
new_marbar = MarBar.objects.create(title=self.cleaned_data['title'],
end_date=self.cleaned_data['end_date'], is_active=False)
for u in self.cleaned_data['users']:
new_marbar.users.add(u)
new_marbar.save()
if self.cleaned_data['create_standard_fields']:
for n, p in STD_FORM:
ScoreUnit.objects.create(title=n, points=0, placement=p, marbar=new_marbar)
if self.is_valid() & update_instance:
marbar_update = MarBar.objects.get(pk=self.cleaned_data['intended_pk'])
with transaction.atomic():
marbar_update.title = self.cleaned_data['title']
marbar_update.end_date = self.cleaned_data['end_date']
marbar_update.users.clear()
for u in self.cleaned_data['users']:
marbar_update.users.add(u)
marbar_update.save()
if self.cleaned_data['create_standard_fields']:
for n, p in STD_FORM:
ScoreUnit.objects.create(title=n, points=0, placement=p, marbar=marbar_update)
class EventForm(forms.Form):
title = forms.CharField()
info = forms.CharField(required=False)
start_date = forms.DateTimeField(widget=forms.DateTimeInput, input_formats=['%Y-%m-%d %H:%M:%S', '%Y-%m-%d %H:%M',
'%m/%d/%Y %H:%M:%S', '%m/%d/%Y %H:%M'])
end_date = forms.DateTimeField(widget=forms.DateTimeInput, input_formats=['%Y-%m-%d %H:%M:%S', '%Y-%m-%d %H:%M',
'%m/%d/%Y %H:%M:%S', '%m/%d/%Y %H:%M'])
class Meta:
model = MarBar
fields = ['title', 'info', 'start_date', 'end_date']
def save(self, active_marbar):
if self.is_valid():
with transaction.atomic():
new_event = Event(marbar=active_marbar, title=self.cleaned_data['title'],
info=self.cleaned_data['info'], start_date=self.cleaned_data['start_date'],
end_date=self.cleaned_data['end_date'])
new_event.save()
|
{"/marbar/score_board/views.py": ["/marbar/score_board/models.py"], "/marbar/management/admin.py": ["/marbar/management/models.py"], "/marbar/management/views.py": ["/marbar/management/models.py", "/marbar/management/forms.py"]}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.