index
int64 | repo_name
string | branch_name
string | path
string | content
string | import_graph
string |
|---|---|---|---|---|---|
31,852,766
|
kfinn/elizabeth-pipeline
|
refs/heads/main
|
/models/image_name_dictionaries/image_filename_LSM.py
|
import re
import logging
LOGGER = logging.getLogger()
IMAGE_FILE_RE = re.compile(
"(?P<experiment>.+)" +
"/" +
"(?P<well>[A-Za-z0-9]+)" +
"_(?P<timestamp>\\d{4}_\\d{2}_\\d{2}__\\d{2}_\\d{2}_\\d{2})/" +
"p(?P<f>\\d{1,3}|XXX)/" +
"ch(?P<c>\\d{1}|XX)/" +
"z(?P<z>\\d{2}|XX)" +
"(?P<suffix>.*)" +
"\\." +
"(?P<extension>.+)"
)
class LSMImageFilename:
@classmethod
def parse(cls, image_filename_str):
match = IMAGE_FILE_RE.match(image_filename_str)
if not match:
raise Exception("invalid image filename: %s" % image_filename_str)
return cls(
experiment=match["experiment"],
well=match["well"],
timestamp=match["timestamp"],
f=(None if match["f"] == "XXX" else int(match["f"])),
z=(None if match["z"] == "XX" else int(match["z"])),
c=(None if match["c"] == "XX" else int(match["c"])),
suffix=match["suffix"],
extension=match["extension"],
)
def __init__(self, experiment, well, timestamp, f, z, c, suffix, extension):
self.experiment = experiment
self.well = well
self.timestamp = timestamp
self.f = f
self.z = z
self.c = c
self.suffix = suffix
self.extension = extension
def __str__(self):
return "%s/%s_%s/p%s/ch%s/z%s%s.%s" % (
self.experiment,
self.well,
self.timestamp,
self.f_str,
self.c_str,
self.z_str,
self.suffix,
self.extension
)
def __copy__(self):
return LSMImageFilename(
experiment=self.experiment,
well=self.well,
timestamp=self.timestamp,
f=self.f,
z=self.z,
c=self.c,
suffix=self.suffix,
extension=self.extension
)
@property
def f_str(self):
return "XXX" if not self.f else ("%i" % self.f)
@property
def z_str(self):
return "XX" if not self.z else ("%02i" % self.z)
@property
def c_str(self):
return "XX" if not self.c else ("%i" % self.c)
|
{"/scripts/generate_all_maximum_projections.py": ["/generate_maximum_projection.py"], "/generate_all_maximum_projections.py": ["/generate_maximum_projection.py", "/models/image_filename.py", "/models/image_filename_glob.py", "/models/paths.py", "/models/swarm_job.py"], "/generate_all_spot_positions.py": ["/generate_spot_positions.py", "/models/paths.py", "/models/swarm_job.py", "/models/image_filename.py", "/models/image_filename_glob.py"], "/generate_all_cropped_cell_images.py": ["/generate_cropped_cell_image.py", "/models/image_filename.py", "/models/image_filename_glob.py", "/models/paths.py", "/models/swarm_job.py"], "/generate_all_nuclear_masks.py": ["/generate_nuclear_masks.py", "/models/paths.py", "/models/swarm_job.py"], "/generate_maximum_projection.py": ["/models/paths.py", "/models/z_sliced_image.py"], "/generate_spot_positions.py": ["/models/generate_spot_positions_config.py", "/models/image_filename.py", "/models/paths.py"], "/generate_cropped_cell_image.py": ["/models/image_filename.py", "/models/nuclear_mask.py", "/models/paths.py"], "/generate_all_distance_transforms.py": ["/generate_distance_transform.py", "/models/paths.py", "/models/swarm_job.py"], "/generate_distance_transform.py": ["/models/paths.py"], "/generate_all_nuclear_segmentations.py": ["/generate_nuclear_segmentation.py", "/models/paths.py", "/models/swarm_job.py", "/models/image_filename_glob.py"], "/models/z_sliced_image.py": ["/models/image_filename.py"], "/models/image_filename.py": ["/models/image_name_dictionaries/image_filename_CV.py", "/models/image_name_dictionaries/image_filename_LSM.py"], "/generate_spot_result_line.py": ["/models/image_filename.py", "/models/paths.py"], "/generate_nuclear_segmentation.py": ["/models/image_filename.py", "/models/paths.py"], "/generate_nuclear_masks.py": ["/models/nuclear_mask.py", "/models/paths.py"], "/models/image_filename_glob.py": ["/models/image_name_dictionaries/image_filename_glob_CV.py", "/models/image_name_dictionaries/image_filename_glob_LSM.py"], "/generate_all_spot_result_lines.py": ["/generate_spot_result_line.py", "/models/paths.py", "/models/image_filename.py", "/models/image_filename_glob.py", "/models/swarm_job.py"], "/generate_spot_results_file.py": ["/models/image_filename.py", "/models/image_filename_glob.py", "/models/paths.py"]}
|
31,852,767
|
kfinn/elizabeth-pipeline
|
refs/heads/main
|
/models/nuclear_mask.py
|
import skimage
class NuclearMask:
def __init__(self, mask, offset):
self.mask = mask
self.offset = offset
@classmethod
def build(cls, masks, regionprops):
(min_row, min_col, max_row, max_col) = regionprops.bbox
offset = (min_row, min_col)
mask = masks[min_row:max_row, min_col:max_col] == regionprops.label
return cls(mask, offset)
|
{"/scripts/generate_all_maximum_projections.py": ["/generate_maximum_projection.py"], "/generate_all_maximum_projections.py": ["/generate_maximum_projection.py", "/models/image_filename.py", "/models/image_filename_glob.py", "/models/paths.py", "/models/swarm_job.py"], "/generate_all_spot_positions.py": ["/generate_spot_positions.py", "/models/paths.py", "/models/swarm_job.py", "/models/image_filename.py", "/models/image_filename_glob.py"], "/generate_all_cropped_cell_images.py": ["/generate_cropped_cell_image.py", "/models/image_filename.py", "/models/image_filename_glob.py", "/models/paths.py", "/models/swarm_job.py"], "/generate_all_nuclear_masks.py": ["/generate_nuclear_masks.py", "/models/paths.py", "/models/swarm_job.py"], "/generate_maximum_projection.py": ["/models/paths.py", "/models/z_sliced_image.py"], "/generate_spot_positions.py": ["/models/generate_spot_positions_config.py", "/models/image_filename.py", "/models/paths.py"], "/generate_cropped_cell_image.py": ["/models/image_filename.py", "/models/nuclear_mask.py", "/models/paths.py"], "/generate_all_distance_transforms.py": ["/generate_distance_transform.py", "/models/paths.py", "/models/swarm_job.py"], "/generate_distance_transform.py": ["/models/paths.py"], "/generate_all_nuclear_segmentations.py": ["/generate_nuclear_segmentation.py", "/models/paths.py", "/models/swarm_job.py", "/models/image_filename_glob.py"], "/models/z_sliced_image.py": ["/models/image_filename.py"], "/models/image_filename.py": ["/models/image_name_dictionaries/image_filename_CV.py", "/models/image_name_dictionaries/image_filename_LSM.py"], "/generate_spot_result_line.py": ["/models/image_filename.py", "/models/paths.py"], "/generate_nuclear_segmentation.py": ["/models/image_filename.py", "/models/paths.py"], "/generate_nuclear_masks.py": ["/models/nuclear_mask.py", "/models/paths.py"], "/models/image_filename_glob.py": ["/models/image_name_dictionaries/image_filename_glob_CV.py", "/models/image_name_dictionaries/image_filename_glob_LSM.py"], "/generate_all_spot_result_lines.py": ["/generate_spot_result_line.py", "/models/paths.py", "/models/image_filename.py", "/models/image_filename_glob.py", "/models/swarm_job.py"], "/generate_spot_results_file.py": ["/models/image_filename.py", "/models/image_filename_glob.py", "/models/paths.py"]}
|
31,852,768
|
kfinn/elizabeth-pipeline
|
refs/heads/main
|
/models/image_filename_glob.py
|
import os
from models.image_name_dictionaries.image_filename_glob_CV import CVImageFilenameGlob
from models.image_name_dictionaries.image_filename_glob_LSM import LSMImageFilenameGlob
IMAGE_FILETYPE = os.environ.get('FILE_TYPE')
if IMAGE_FILETYPE == 'CV':
IMAGE_FILENAME_KEYS = set([
"experiment",
"well",
"t",
"f",
"l",
"a",
"z",
"c",
"suffix",
"extension"
])
elif IMAGE_FILETYPE == 'LSM':
IMAGE_FILENAME_KEYS = set([
"experiment",
"well",
"timestamp",
"f",
"z",
"c",
"suffix",
"extension"
])
class ImageFilenameGlob:
image_filetype = os.environ.get('FILE_TYPE')
@classmethod
def from_image_filename(cls, image_filename, excluding_keys=[]):
keys = IMAGE_FILENAME_KEYS - set(excluding_keys)
return cls(**{ key: getattr(image_filename, key) for key in keys })
def __init__(self, experiment=None, well=None, timestamp=None, t=None, f=None, l=None, a=None, z=None, c=None, suffix=None, extension=None):
if IMAGE_FILETYPE == 'CV':
self.glob = CVImageFilenameGlob(experiment, well, t, f, l, a, z, c, suffix, extension)
elif IMAGE_FILETYPE == 'LSM':
self.glob = LSMImageFilenameGlob(experiment, well, timestamp, f, z, c, suffix, extension)
def __str__(self):
if IMAGE_FILETYPE == 'CV':
return str(self.glob)
elif IMAGE_FILETYPE == 'LSM':
return str(self.glob)
def __hash__(self):
if IMAGE_FILETYPE == 'CV':
return hash(self.glob)
elif IMAGE_FILETYPE == 'LSM':
return hash(self.glob)
def __eq__(self, other):
if IMAGE_FILETYPE == 'CV':
return CVImageFilenameGlob.__eq__(self.glob,other.glob)
elif IMAGE_FILETYPE == 'LSM':
return LSMImageFilenameGlob.__eq__(self.glob,other.glob)
|
{"/scripts/generate_all_maximum_projections.py": ["/generate_maximum_projection.py"], "/generate_all_maximum_projections.py": ["/generate_maximum_projection.py", "/models/image_filename.py", "/models/image_filename_glob.py", "/models/paths.py", "/models/swarm_job.py"], "/generate_all_spot_positions.py": ["/generate_spot_positions.py", "/models/paths.py", "/models/swarm_job.py", "/models/image_filename.py", "/models/image_filename_glob.py"], "/generate_all_cropped_cell_images.py": ["/generate_cropped_cell_image.py", "/models/image_filename.py", "/models/image_filename_glob.py", "/models/paths.py", "/models/swarm_job.py"], "/generate_all_nuclear_masks.py": ["/generate_nuclear_masks.py", "/models/paths.py", "/models/swarm_job.py"], "/generate_maximum_projection.py": ["/models/paths.py", "/models/z_sliced_image.py"], "/generate_spot_positions.py": ["/models/generate_spot_positions_config.py", "/models/image_filename.py", "/models/paths.py"], "/generate_cropped_cell_image.py": ["/models/image_filename.py", "/models/nuclear_mask.py", "/models/paths.py"], "/generate_all_distance_transforms.py": ["/generate_distance_transform.py", "/models/paths.py", "/models/swarm_job.py"], "/generate_distance_transform.py": ["/models/paths.py"], "/generate_all_nuclear_segmentations.py": ["/generate_nuclear_segmentation.py", "/models/paths.py", "/models/swarm_job.py", "/models/image_filename_glob.py"], "/models/z_sliced_image.py": ["/models/image_filename.py"], "/models/image_filename.py": ["/models/image_name_dictionaries/image_filename_CV.py", "/models/image_name_dictionaries/image_filename_LSM.py"], "/generate_spot_result_line.py": ["/models/image_filename.py", "/models/paths.py"], "/generate_nuclear_segmentation.py": ["/models/image_filename.py", "/models/paths.py"], "/generate_nuclear_masks.py": ["/models/nuclear_mask.py", "/models/paths.py"], "/models/image_filename_glob.py": ["/models/image_name_dictionaries/image_filename_glob_CV.py", "/models/image_name_dictionaries/image_filename_glob_LSM.py"], "/generate_all_spot_result_lines.py": ["/generate_spot_result_line.py", "/models/paths.py", "/models/image_filename.py", "/models/image_filename_glob.py", "/models/swarm_job.py"], "/generate_spot_results_file.py": ["/models/image_filename.py", "/models/image_filename_glob.py", "/models/paths.py"]}
|
31,852,769
|
kfinn/elizabeth-pipeline
|
refs/heads/main
|
/generate_all_spot_result_lines.py
|
import traceback
from datetime import datetime
import cli.log
import logging
from generate_spot_result_line import generate_spot_result_line_cli_str
from models.paths import *
from models.image_filename import ImageFilename
from models.image_filename_glob import ImageFilenameGlob
from models.swarm_job import SwarmJob, shard_job_params
FILES_PER_CALL_COUNT = 20000
MEMORY = 2
class GenerateAllSpotResultLinesJob:
def __init__(
self,
spots_source_directory,
z_centers_source_directory,
distance_transforms_source_directory,
nuclear_masks_source_directory_path,
destination,
log
):
self.spots_source_directory = spots_source_directory
self.z_centers_source_directory = z_centers_source_directory
self.distance_transforms_source_directory = distance_transforms_source_directory
self.nuclear_masks_source_directory_path = nuclear_masks_source_directory_path
self.destination = destination
self.logdir = log
self.logger = logging.getLogger()
def run(self):
SwarmJob(
self.spots_source_directory,
self.destination_path,
self.job_name,
self.jobs,
self.logdir,
MEMORY,
FILES_PER_CALL_COUNT
).run()
@property
def jobs(self):
if not hasattr(self, "_jobs"):
spot_source_paths_shards = shard_job_params(self.spot_source_paths, FILES_PER_CALL_COUNT)
self._jobs = [
generate_spot_result_line_cli_str(
spot_source_paths_shard,
self.spots_source_directory,
self.z_centers_source_directory,
self.distance_transforms_source_directory,
self.nuclear_masks_source_directory_path,
self.destination
)
for spot_source_paths_shard in spot_source_paths_shards
]
return self._jobs
@property
def job_name(self):
if not hasattr(self, "_job_name"):
self._job_name = "generate_all_spot_result_lines_%s" % datetime.now().strftime("%Y%m%d%H%M%S")
return self._job_name
@property
def spots_source_directory_path(self):
if not hasattr(self, "_spots_source_directory_path"):
self._spots_source_directory_path = source_path(self.spots_source_directory)
if not self._spots_source_directory_path.is_dir():
raise Exception("spots source directory does not exist")
return self._spots_source_directory_path
@property
def spot_source_paths(self):
return self.spots_source_directory_path.rglob(str(ImageFilenameGlob(suffix="_nucleus_???_spot_*", extension="npy")))
@property
def destination_path(self):
if not hasattr(self, "_destination_path"):
self._destination_path = destination_path(self.destination)
return self._destination_path
@cli.log.LoggingApp
def generate_all_spot_result_lines_cli(app):
try:
GenerateAllSpotResultLinesJob(
app.params.spots_source_directory,
app.params.z_centers_source_directory,
app.params.distance_transforms_source_directory,
app.params.nuclear_masks_source_directory_path,
app.params.destination,
).run()
except Exception as exception:
traceback.print_exc()
generate_all_spot_result_lines_cli.add_param("spots_source_directory", default="todo", nargs="?")
generate_all_spot_result_lines_cli.add_param("z_centers_source_directory", default="todo", nargs="?")
generate_all_spot_result_lines_cli.add_param("distance_transforms_source_directory", default="todo", nargs="?")
generate_all_spot_result_lines_cli.add_param("nuclear_masks_source_directory_path", default="todo", nargs="?")
generate_all_spot_result_lines_cli.add_param("destination", default="C:\\\\Users\\finne\\Documents\\python\\spot_result_lines\\", nargs="?")
if __name__ == "__main__":
generate_all_spot_result_lines_cli.run()
|
{"/scripts/generate_all_maximum_projections.py": ["/generate_maximum_projection.py"], "/generate_all_maximum_projections.py": ["/generate_maximum_projection.py", "/models/image_filename.py", "/models/image_filename_glob.py", "/models/paths.py", "/models/swarm_job.py"], "/generate_all_spot_positions.py": ["/generate_spot_positions.py", "/models/paths.py", "/models/swarm_job.py", "/models/image_filename.py", "/models/image_filename_glob.py"], "/generate_all_cropped_cell_images.py": ["/generate_cropped_cell_image.py", "/models/image_filename.py", "/models/image_filename_glob.py", "/models/paths.py", "/models/swarm_job.py"], "/generate_all_nuclear_masks.py": ["/generate_nuclear_masks.py", "/models/paths.py", "/models/swarm_job.py"], "/generate_maximum_projection.py": ["/models/paths.py", "/models/z_sliced_image.py"], "/generate_spot_positions.py": ["/models/generate_spot_positions_config.py", "/models/image_filename.py", "/models/paths.py"], "/generate_cropped_cell_image.py": ["/models/image_filename.py", "/models/nuclear_mask.py", "/models/paths.py"], "/generate_all_distance_transforms.py": ["/generate_distance_transform.py", "/models/paths.py", "/models/swarm_job.py"], "/generate_distance_transform.py": ["/models/paths.py"], "/generate_all_nuclear_segmentations.py": ["/generate_nuclear_segmentation.py", "/models/paths.py", "/models/swarm_job.py", "/models/image_filename_glob.py"], "/models/z_sliced_image.py": ["/models/image_filename.py"], "/models/image_filename.py": ["/models/image_name_dictionaries/image_filename_CV.py", "/models/image_name_dictionaries/image_filename_LSM.py"], "/generate_spot_result_line.py": ["/models/image_filename.py", "/models/paths.py"], "/generate_nuclear_segmentation.py": ["/models/image_filename.py", "/models/paths.py"], "/generate_nuclear_masks.py": ["/models/nuclear_mask.py", "/models/paths.py"], "/models/image_filename_glob.py": ["/models/image_name_dictionaries/image_filename_glob_CV.py", "/models/image_name_dictionaries/image_filename_glob_LSM.py"], "/generate_all_spot_result_lines.py": ["/generate_spot_result_line.py", "/models/paths.py", "/models/image_filename.py", "/models/image_filename_glob.py", "/models/swarm_job.py"], "/generate_spot_results_file.py": ["/models/image_filename.py", "/models/image_filename_glob.py", "/models/paths.py"]}
|
31,852,770
|
kfinn/elizabeth-pipeline
|
refs/heads/main
|
/models/image_name_dictionaries/image_filename_glob_CV.py
|
IMAGE_FILENAME_KEYS = set([
"experiment",
"well",
"t",
"f",
"l",
"a",
"z",
"c",
"suffix",
"extension"
])
class CVImageFilenameGlob:
@classmethod
def from_image_filename(cls, image_filename, excluding_keys=[]):
keys = IMAGE_FILENAME_KEYS - set(excluding_keys)
return cls(**{ key: getattr(image_filename, key) for key in keys })
def __init__(self, experiment=None, well=None, t=None, f=None, l=None, a=None, z=None, c=None, suffix=None, extension=None):
self.experiment = experiment
self.well = well
self.t = t
self.f = f
self.l = l
self.a = a
self.z = z
self.c = c
self.suffix = suffix
self.extension = extension
def __str__(self):
return ("%s_%s_T%sF%sL%sA%sZ%sC%s%s.%s" % (
self.experiment_glob,
self.well_glob,
self.t_glob,
self.f_glob,
self.l_glob,
self.a_glob,
self.z_glob,
self.c_glob,
self.suffix_glob,
self.extension_glob
))
def __hash__(self):
return hash((self.experiment, self.well, self.t, self.f, self.l, self.a, self.z, self.c, self.suffix, self.extension))
def __eq__(self, other):
return (
isinstance(other, CVImageFilenameGlob) and
self.experiment == other.experiment and
self.well == other.well and
self.t == other.t and
self.f == other.f and
self.l == other.l and
self.a == other.a and
self.z == other.z and
self.c == other.c and
self.suffix == other.suffix and
self.extension == other.extension
)
@property
def experiment_glob(self):
if self.experiment != None:
return self.experiment
else:
return "*"
@property
def well_glob(self):
if self.well != None:
return self.well
else:
return "?" * 3
@property
def t_glob(self):
if self.t != None:
return "%04i" % self.t
else:
return "?" * 4
@property
def f_glob(self):
if self.f != None:
return "%03i" % self.f
else:
return "?" * 3
@property
def l_glob(self):
if self.l != None:
return "%02i" % self.l
else:
return "?" * 2
@property
def a_glob(self):
if self.a != None:
return "%02i" % self.a
else:
return "?" * 2
@property
def z_glob(self):
if self.z != None:
return "%02i" % self.z
else:
return "?" * 2
@property
def c_glob(self):
if self.c != None:
return "%02i" % self.c
else:
return "?" * 2
@property
def suffix_glob(self):
if self.suffix != None:
return self.suffix
else:
return "*"
@property
def extension_glob(self):
if self.extension != None:
return self.extension
else:
return "*"
|
{"/scripts/generate_all_maximum_projections.py": ["/generate_maximum_projection.py"], "/generate_all_maximum_projections.py": ["/generate_maximum_projection.py", "/models/image_filename.py", "/models/image_filename_glob.py", "/models/paths.py", "/models/swarm_job.py"], "/generate_all_spot_positions.py": ["/generate_spot_positions.py", "/models/paths.py", "/models/swarm_job.py", "/models/image_filename.py", "/models/image_filename_glob.py"], "/generate_all_cropped_cell_images.py": ["/generate_cropped_cell_image.py", "/models/image_filename.py", "/models/image_filename_glob.py", "/models/paths.py", "/models/swarm_job.py"], "/generate_all_nuclear_masks.py": ["/generate_nuclear_masks.py", "/models/paths.py", "/models/swarm_job.py"], "/generate_maximum_projection.py": ["/models/paths.py", "/models/z_sliced_image.py"], "/generate_spot_positions.py": ["/models/generate_spot_positions_config.py", "/models/image_filename.py", "/models/paths.py"], "/generate_cropped_cell_image.py": ["/models/image_filename.py", "/models/nuclear_mask.py", "/models/paths.py"], "/generate_all_distance_transforms.py": ["/generate_distance_transform.py", "/models/paths.py", "/models/swarm_job.py"], "/generate_distance_transform.py": ["/models/paths.py"], "/generate_all_nuclear_segmentations.py": ["/generate_nuclear_segmentation.py", "/models/paths.py", "/models/swarm_job.py", "/models/image_filename_glob.py"], "/models/z_sliced_image.py": ["/models/image_filename.py"], "/models/image_filename.py": ["/models/image_name_dictionaries/image_filename_CV.py", "/models/image_name_dictionaries/image_filename_LSM.py"], "/generate_spot_result_line.py": ["/models/image_filename.py", "/models/paths.py"], "/generate_nuclear_segmentation.py": ["/models/image_filename.py", "/models/paths.py"], "/generate_nuclear_masks.py": ["/models/nuclear_mask.py", "/models/paths.py"], "/models/image_filename_glob.py": ["/models/image_name_dictionaries/image_filename_glob_CV.py", "/models/image_name_dictionaries/image_filename_glob_LSM.py"], "/generate_all_spot_result_lines.py": ["/generate_spot_result_line.py", "/models/paths.py", "/models/image_filename.py", "/models/image_filename_glob.py", "/models/swarm_job.py"], "/generate_spot_results_file.py": ["/models/image_filename.py", "/models/image_filename_glob.py", "/models/paths.py"]}
|
31,852,771
|
kfinn/elizabeth-pipeline
|
refs/heads/main
|
/models/image_name_dictionaries/image_filename_glob_LSM.py
|
IMAGE_FILENAME_KEYS = set([
"experiment",
"well",
"timestamp",
"f",
"z",
"c",
"suffix",
"extension"
])
class LSMImageFilenameGlob:
@classmethod
def from_image_filename(cls, image_filename, excluding_keys=[]):
keys = IMAGE_FILENAME_KEYS - set(excluding_keys)
return cls(**{ key: getattr(image_filename, key) for key in keys })
def __init__(self, experiment=None, well=None, timestamp=None, f=None, z=None, c=None, suffix=None, extension=None):
self.experiment = experiment
self.well = well
self.timestamp = timestamp
self.f = f
self.z = z
self.c = c
self.suffix = suffix
self.extension = extension
def __str__(self):
return ("%s/%s_%s/p%s/ch%s/z%s%s.%s" % (
self.experiment_glob,
self.well_glob,
self.timestamp_glob,
self.f_glob,
self.c_glob,
self.z_glob,
self.suffix_glob,
self.extension_glob
))
def __hash__(self):
return hash((self.experiment, self.well, self.timestamp, self.f, self.z, self.c, self.suffix, self.extension))
def __eq__(self, other):
return (
isinstance(other, LSMImageFilenameGlob) and
self.experiment == other.experiment and
self.well == other.well and
self.timestamp == other.timestamp and
self.f == other.f and
self.z == other.z and
self.c == other.c and
self.suffix == other.suffix and
self.extension == other.extension
)
@property
def experiment_glob(self):
if self.experiment != None:
return self.experiment
else:
return "*"
@property
def well_glob(self):
if self.well != None:
return self.well
else:
return "*"
@property
def timestamp_glob(self):
if self.timestamp != None:
return self.timestamp
else:
return "????_??_??__??_??_??"
@property
def f_glob(self):
if self.f != None:
return "%i" % self.f
else:
return "*"
@property
def z_glob(self):
if self.z != None:
return "%02i" % self.z
else:
return "?" * 2
@property
def c_glob(self):
if self.c != None:
return "%i" % self.c
else:
return "?*"
@property
def suffix_glob(self):
if self.suffix != None:
return self.suffix
else:
return "*"
@property
def extension_glob(self):
if self.extension != None:
return self.extension
else:
return "*"
|
{"/scripts/generate_all_maximum_projections.py": ["/generate_maximum_projection.py"], "/generate_all_maximum_projections.py": ["/generate_maximum_projection.py", "/models/image_filename.py", "/models/image_filename_glob.py", "/models/paths.py", "/models/swarm_job.py"], "/generate_all_spot_positions.py": ["/generate_spot_positions.py", "/models/paths.py", "/models/swarm_job.py", "/models/image_filename.py", "/models/image_filename_glob.py"], "/generate_all_cropped_cell_images.py": ["/generate_cropped_cell_image.py", "/models/image_filename.py", "/models/image_filename_glob.py", "/models/paths.py", "/models/swarm_job.py"], "/generate_all_nuclear_masks.py": ["/generate_nuclear_masks.py", "/models/paths.py", "/models/swarm_job.py"], "/generate_maximum_projection.py": ["/models/paths.py", "/models/z_sliced_image.py"], "/generate_spot_positions.py": ["/models/generate_spot_positions_config.py", "/models/image_filename.py", "/models/paths.py"], "/generate_cropped_cell_image.py": ["/models/image_filename.py", "/models/nuclear_mask.py", "/models/paths.py"], "/generate_all_distance_transforms.py": ["/generate_distance_transform.py", "/models/paths.py", "/models/swarm_job.py"], "/generate_distance_transform.py": ["/models/paths.py"], "/generate_all_nuclear_segmentations.py": ["/generate_nuclear_segmentation.py", "/models/paths.py", "/models/swarm_job.py", "/models/image_filename_glob.py"], "/models/z_sliced_image.py": ["/models/image_filename.py"], "/models/image_filename.py": ["/models/image_name_dictionaries/image_filename_CV.py", "/models/image_name_dictionaries/image_filename_LSM.py"], "/generate_spot_result_line.py": ["/models/image_filename.py", "/models/paths.py"], "/generate_nuclear_segmentation.py": ["/models/image_filename.py", "/models/paths.py"], "/generate_nuclear_masks.py": ["/models/nuclear_mask.py", "/models/paths.py"], "/models/image_filename_glob.py": ["/models/image_name_dictionaries/image_filename_glob_CV.py", "/models/image_name_dictionaries/image_filename_glob_LSM.py"], "/generate_all_spot_result_lines.py": ["/generate_spot_result_line.py", "/models/paths.py", "/models/image_filename.py", "/models/image_filename_glob.py", "/models/swarm_job.py"], "/generate_spot_results_file.py": ["/models/image_filename.py", "/models/image_filename_glob.py", "/models/paths.py"]}
|
31,852,772
|
kfinn/elizabeth-pipeline
|
refs/heads/main
|
/generate_spot_results_file.py
|
import traceback
import cli.log
from models.image_filename import ImageFilename
from models.image_filename_glob import ImageFilenameGlob
from models.paths import *
class GenerateSpotResultsFileJob:
def __init__(self, source, destination):
self.source = source
self.destination = destination
def run(self):
with open(self.destination_filename, 'w') as destination_file:
destination_file.write(self.headers)
for result_line_path in self.result_line_paths:
with open(result_line_path) as result_line_file:
next(result_line_file)
for line in result_line_file:
if not line.isspace():
destination_file.write(line)
@property
def source_path(self):
if not hasattr(self, "_source_path"):
self._source_path = source_path(self.source)
if not self._source_path.is_dir():
raise Exception("spots source directory does not exist")
return self._source_path
@property
def result_line_paths(self):
return self.source_path.rglob(str(ImageFilenameGlob(suffix="_nucleus_???_spot_*", extension="csv")))
@property
def arbitrary_result_line_path(self):
if not hasattr(self, "_arbitrary_result_line_path"):
self._arbitrary_result_line_path = next(self.result_line_paths)
return self._arbitrary_result_line_path
@property
def arbitrary_result_line_image_filename(self):
if not hasattr(self, "_arbitrary_result_line_image_filename"):
self._arbitrary_result_line_image_filename = ImageFilename.parse(str(self.arbitrary_result_line_path.relative_to(self.source_path)))
return self._arbitrary_result_line_image_filename
@property
def destination_path(self):
if not hasattr(self, "_destination_path"):
self._destination_path = destination_path(self.destination)
return self._destination_path
@property
def destination_filename(self):
if not hasattr(self, "_destination_filename"):
self._destination_filename = self.destination_path / ("%s_spot_positions.csv" % self.arbitrary_result_line_image_filename.experiment)
return self._destination_filename
@property
def headers(self):
if not hasattr(self, "_headers"):
with open(self.arbitrary_result_line_path) as artibrary_result_line_file:
self._headers = next(artibrary_result_line_file)
return self._headers
@cli.log.LoggingApp
def generate_spot_results_file_cli(app):
try:
GenerateSpotResultsFileJob(
app.params.source,
app.params.destination,
).run()
except Exception as exception:
traceback.print_exc()
generate_spot_results_file_cli.add_param("source")
generate_spot_results_file_cli.add_param("destination")
if __name__ == "__main__":
generate_spot_results_file_cli.run()
|
{"/scripts/generate_all_maximum_projections.py": ["/generate_maximum_projection.py"], "/generate_all_maximum_projections.py": ["/generate_maximum_projection.py", "/models/image_filename.py", "/models/image_filename_glob.py", "/models/paths.py", "/models/swarm_job.py"], "/generate_all_spot_positions.py": ["/generate_spot_positions.py", "/models/paths.py", "/models/swarm_job.py", "/models/image_filename.py", "/models/image_filename_glob.py"], "/generate_all_cropped_cell_images.py": ["/generate_cropped_cell_image.py", "/models/image_filename.py", "/models/image_filename_glob.py", "/models/paths.py", "/models/swarm_job.py"], "/generate_all_nuclear_masks.py": ["/generate_nuclear_masks.py", "/models/paths.py", "/models/swarm_job.py"], "/generate_maximum_projection.py": ["/models/paths.py", "/models/z_sliced_image.py"], "/generate_spot_positions.py": ["/models/generate_spot_positions_config.py", "/models/image_filename.py", "/models/paths.py"], "/generate_cropped_cell_image.py": ["/models/image_filename.py", "/models/nuclear_mask.py", "/models/paths.py"], "/generate_all_distance_transforms.py": ["/generate_distance_transform.py", "/models/paths.py", "/models/swarm_job.py"], "/generate_distance_transform.py": ["/models/paths.py"], "/generate_all_nuclear_segmentations.py": ["/generate_nuclear_segmentation.py", "/models/paths.py", "/models/swarm_job.py", "/models/image_filename_glob.py"], "/models/z_sliced_image.py": ["/models/image_filename.py"], "/models/image_filename.py": ["/models/image_name_dictionaries/image_filename_CV.py", "/models/image_name_dictionaries/image_filename_LSM.py"], "/generate_spot_result_line.py": ["/models/image_filename.py", "/models/paths.py"], "/generate_nuclear_segmentation.py": ["/models/image_filename.py", "/models/paths.py"], "/generate_nuclear_masks.py": ["/models/nuclear_mask.py", "/models/paths.py"], "/models/image_filename_glob.py": ["/models/image_name_dictionaries/image_filename_glob_CV.py", "/models/image_name_dictionaries/image_filename_glob_LSM.py"], "/generate_all_spot_result_lines.py": ["/generate_spot_result_line.py", "/models/paths.py", "/models/image_filename.py", "/models/image_filename_glob.py", "/models/swarm_job.py"], "/generate_spot_results_file.py": ["/models/image_filename.py", "/models/image_filename_glob.py", "/models/paths.py"]}
|
31,860,523
|
rodrigues-daniel/kerberos
|
refs/heads/master
|
/kerberosadm/apps.py
|
from django.apps import AppConfig
class KerberosadmConfig(AppConfig):
name = 'kerberosadm'
verbose_name = 'Visualizações'
|
{"/src/permissoes/admin.py": ["/src/permissoes/filters.py", "/src/permissoes/models.py", "/kerberosadm/admin.py"], "/src/usuarios/admin.py": ["/kerberosadm/admin.py"]}
|
31,860,524
|
rodrigues-daniel/kerberos
|
refs/heads/master
|
/kerberosadm/admin.py
|
from django.contrib import admin
from django import *
from .models import *
from .filters import *
# Register your models here.
class KerberosAdminSite(admin.AdminSite):
site_header = 'Monty Python administration'
admin_site = KerberosAdminSite(name="dcadmin")
class GrupoAdmin(admin.ModelAdmin):
fields = ("nomegrupo","ativo")
list_display = ("nomegrupo","ativo",)
list_filter = ('nomegrupo','ativo',)
search_fields = ['nomegrupo']
admin_site.register(Grupo,GrupoAdmin)
#readonly_fields = ["datainclusao","usuarioinclusao"]
class ProdutoAdmin(admin.ModelAdmin):
pass
#readonly_fields = ["datainclusao","usuarioinclusao"]
admin_site.register(Produto,ProdutoAdmin)
class ProdutoGrupoAdmin(admin.ModelAdmin):
list_display = ('idproduto', 'idgrupo',)
list_filter = ('idproduto','idgrupo',)
search_fields = ['idproduto__nomeproduto']
admin_site.register(Produtogrupo, ProdutoGrupoAdmin)
#class PrudutoProjetoAdmin(admin.ModelAdmin):
# list_display = ('idproduto', 'idprojeto',)
# list_filter = ('idproduto','idprojeto')
# search_fields = ['idproduto']
#readonly_fields = ["datainclusao","usuarioinclusao"]
class ProdutoSysDatabaseAdmin(admin.ModelAdmin):
list_display = ('idproduto', 'database_name',)
list_filter = ('idproduto','database_name')
search_fields = ['idproduto__nomeproduto']
admin_site.register(Produtosysdatabase,ProdutoSysDatabaseAdmin)
#@admin.register(Projeto)
#class ProjetoAdmin(admin.ModelAdmin):
# pass
#readonly_fields = ["datainclusao","usuarioinclusao"]
class UsuarioAdmin(admin.ModelAdmin):
list_display = ('nomeusuario', 'loginusuario')
search_fields = ['loginusuario','nomeusuario']
#readonly_fields = ["datainclusao","usuarioinclusao"]
admin_site.register(Usuario,UsuarioAdmin)
class UsuariogrupoAdmin(admin.ModelAdmin):
list_display = ('idusuario','idgrupo',)
list_filter = ('idusuario','idgrupo',)
search_fields = ['idusuario__nomeusuario']
#readonly_fields = ["datainclusao","usuarioinclusao"]
admin_site.register(Usuariogrupo,UsuariogrupoAdmin)
class UsuarioprodutoAdmin(admin.ModelAdmin):
list_display = ('idusuario','idproduto',)
list_filter = ('idusuario','idproduto',)
search_fields = ['idusuario__nomeusuario']
admin_site.register(Usuarioproduto,UsuarioprodutoAdmin)
class PermissoeslistAdmin(admin.ModelAdmin):
list_display = ('ambiente','permissionlevel','dbname','grupo','produto','usuario',)
list_filter = (
('ambiente',DropdownFilter)
, ('permissionlevel',DropdownFilter)
, ('dbname',DropdownFilter)
, ('grupo',DropdownFilter)
, ('produto',DropdownFilter)
, ('usuario',DropdownFilter),)
search_fields = ['ambiente','permissionlevel','dbname','grupo','produto','usuario']
readonly_fields = ('lider','grupo','usuario', 'produto', 'dbname', 'typeoflogin', 'typeofrole', 'permissionlevel',)
list_display_links = None
fieldsets = (
('Perfil',{'fields':('usuario','typeofrole',)}),
('Opções Avançadas',{'classes':('collapse',),'fields':('dbname',)}),
)
def get_actions(self, request):
actions = super().get_actions(request)
if 'delete_selected' in actions:
del actions['delete_selected']
return actions
def has_add_permission(self, request):
return False
#def has_change_permission(self, request):
#return False
def has_delete_permission(self, request,obj=None):
return False
admin_site.register(Permissoeslist, PermissoeslistAdmin)
|
{"/src/permissoes/admin.py": ["/src/permissoes/filters.py", "/src/permissoes/models.py", "/kerberosadm/admin.py"], "/src/usuarios/admin.py": ["/kerberosadm/admin.py"]}
|
31,860,525
|
rodrigues-daniel/kerberos
|
refs/heads/master
|
/kerberos/ldap.py
|
from django._python3_ldap import utils
def clean_user_data(model_fields):
"""
Transforms the user data loaded from
LDAP into a form suitable for creating a user.
"""
# Call the default handler.
model_fields = utils.clean_user_data(model_fields)
# Add our own data in.
model_fields["is_staff"] = True
model_fields["is_superuser"] = False
return model_fields
def menssagem():
print("ola mundo")
|
{"/src/permissoes/admin.py": ["/src/permissoes/filters.py", "/src/permissoes/models.py", "/kerberosadm/admin.py"], "/src/usuarios/admin.py": ["/kerberosadm/admin.py"]}
|
31,860,526
|
rodrigues-daniel/kerberos
|
refs/heads/master
|
/kerberosadm/views.py
|
from django.shortcuts import render
from django.http import HttpResponse
from django.db import connection
def first_view(request):
cursor = connection.cursor()
try:
#cursor.callproc('[dbo].[permissions_list]', ['', '', ''])
cursor.execute("EXEC [dbo].[permissions_list] '%', '%', '%' ")
if cursor.return_value == 1:
result_set = cursor.fetchall()
finally:
cursor.close()
return HttpResponse("primeira view")
|
{"/src/permissoes/admin.py": ["/src/permissoes/filters.py", "/src/permissoes/models.py", "/kerberosadm/admin.py"], "/src/usuarios/admin.py": ["/kerberosadm/admin.py"]}
|
31,860,527
|
rodrigues-daniel/kerberos
|
refs/heads/master
|
/kerberosadm/urls.py
|
from django.urls import path
from django.contrib import admin
from . import views
admin.site.site_header = 'Data Control Admin'
admin.site.site_title = 'Data Control Admin'
admin.site.index_title = 'Data Control Admin'
urlpatterns = [
# ex: /polls/
path('', views.first_view, name='first_view'),
# ex: /polls/5/
]
|
{"/src/permissoes/admin.py": ["/src/permissoes/filters.py", "/src/permissoes/models.py", "/kerberosadm/admin.py"], "/src/usuarios/admin.py": ["/kerberosadm/admin.py"]}
|
31,860,528
|
rodrigues-daniel/kerberos
|
refs/heads/master
|
/kerberos/settings.py
|
"""
Django settings for kerberos project.
Generated by 'django-admin startproject' using Django 2.0.7.
For more information on this file, see
https://docs.djangoproject.com/en/2.0/topics/settings/
For the full list of settings and their values, seeim
https://docs.djangoproject.com/en/2.0/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'hq4#1e&7@6ql1b$649r3$!t8efzjg31=ua#dt3z!1brajk3bsk'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['10.24.0.222','127.0.0.1']
AUTH_LDAP_SERVER_URI = "ldaps://prd-ad01.tce.govrn:636"
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'kerberosadm.apps.KerberosadmConfig',
'django_python3_ldap',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.contrib.auth.middleware.RemoteUserMiddleware',
]
ROOT_URLCONF = 'kerberos.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'kerberos.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.0/ref/settings/#databases
try:
from kerberos import local_settings
except ImportError:
pass
DATABASES = {
'default': {
'ENGINE': 'sql_server.pyodbc',
'NAME': local_settings.NAME,
'HOST': local_settings.HOST,
'PORT': local_settings.PORT,
'USER': local_settings.USER,
'PASSWORD': local_settings.PASSWORD,
'OPTIONS': {
'driver': 'ODBC Driver 17 for SQL Server',
},
}
}
'''
import ldap
from django_auth_ldap.config import LDAPSearch, GroupOfNamesType
'''
AUTHENTICATION_BACKENDS = (
'django_python3_ldap.auth.LDAPBackend',
'django.contrib.auth.backends.ModelBackend',
'django.contrib.auth.backends.RemoteUserBackend',
)
# The URL of the LDAP server.
LDAP_AUTH_URL = "ldap://prd-ad01.tce.govrn:389"
# Initiate TLS on connection.
LDAP_AUTH_USE_TLS = False
# The LDAP search base for looking up users.
LDAP_AUTH_SEARCH_BASE = "dc=tce,dc=govrn"
LDAP_AUTH_FORMAT_USERNAME = "django_python3_ldap.utils.format_username_active_directory"
LDAP_AUTH_FORMAT_USERNAME = "django_python3_ldap.utils.format_username_active_directory"
LDAP_AUTH_ACTIVE_DIRECTORY_DOMAIN = "TCE"
LDAP_AUTH_CONNECTION_USERNAME = ""
LDAP_AUTH_CONNECTION_PASSWORD = ""
LDAP_AUTH_FORMAT_USERNAME = "django_python3_ldap.utils.format_username_active_directory_principal"
LDAP_AUTH_ACTIVE_DIRECTORY_DOMAIN = "tce.govrn"
LDAP_AUTH_USER_FIELDS = {
"username": "sAMAccountName",
"first_name": "givenName",
"last_name": "sn",
"email": "mail",
}
LDAP_AUTH_OBJECT_CLASS = "user"
LDAP_AUTH_CLEAN_USER_DATA = "django_python3_ldap.utils.clean_user_data"
LOGGING = {
"version": 1,
"disable_existing_loggers": False,
"handlers": {
"console": {
"class": "logging.StreamHandler",
},
},
"loggers": {
"django_python3_ldap": {
"handlers": ["console"],
"level": "INFO",
},
},
}
# Password validation
# https://docs.djangoproject.com/en/2.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.0/topics/i18n/
LANGUAGE_CODE = 'pt-br'
TIME_ZONE = 'America/Recife'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.0/howto/static-files/
STATIC_URL = '/static/'
|
{"/src/permissoes/admin.py": ["/src/permissoes/filters.py", "/src/permissoes/models.py", "/kerberosadm/admin.py"], "/src/usuarios/admin.py": ["/kerberosadm/admin.py"]}
|
31,876,001
|
alexbara2000/CodeJam
|
refs/heads/main
|
/app.py
|
from selenium import webdriver
PATH = "chromedriver.exe"
driver = webdriver.Chrome(PATH)
driver.get("https://www.youtube.com/watch?v=Xjv1sY630Uc&ab_channel=TechWithTim")
|
{"/app.py": ["/_init_.py", "/testGettingProce.py", "/server.py"], "/server.py": ["/_init_.py", "/testGettingProce.py", "/emailsend.py"], "/testGettingProce.py": ["/_init_.py"], "/objects/item.py": ["/_init_.py", "/testGettingProce.py"]}
|
31,887,794
|
MilanaShhanukova/users_recommendation-hackaton
|
refs/heads/main
|
/project_info.py
|
import pymongo
import numpy as np
import csv
client = pymongo.MongoClient("localhost", 27017)
db = client.mongo_bd
users = db.users_data
projects = db.projects
# class for every project
class Project:
def __init__(self, name: str, sphere: str, main_creator, r=0, other_creators=list()):
self.name = name
self.sphere = sphere
self.r = r # save r
self.creators = other_creators
self.label = 0 # idea - 0, project - 1
self.main_creator = main_creator # object user
# add calculated creators
def add_creators(self, update_creators: list):
for user in update_creators:
if user not in self.creators:
self.creators.append(user)
# after like - update idea r
def update_r(self, v_user: float):
self.r += v_user
def make_idea_project(self):
# compare with other projects in this sphere
same_projects_rs = [project.r ** (-project.main_creator.v)
for project in projects.find() if project.spheres == self.sphere]
if self.r ** (-self.main_creator.v) >= np.mean(same_projects_rs):
self.label = 1
|
{"/interface.py": ["/users_info_prepare.py", "/project_info.py"], "/dataset.py": ["/users_info_prepare.py"], "/recommend_project.py": ["/project_info.py"]}
|
31,887,795
|
MilanaShhanukova/users_recommendation-hackaton
|
refs/heads/main
|
/random_data.py
|
from pymorphy2 import MorphAnalyzer
morph = MorphAnalyzer()
word = morph.parse("разработчиком")[0].normal_form
print(word)
|
{"/interface.py": ["/users_info_prepare.py", "/project_info.py"], "/dataset.py": ["/users_info_prepare.py"], "/recommend_project.py": ["/project_info.py"]}
|
31,887,796
|
MilanaShhanukova/users_recommendation-hackaton
|
refs/heads/main
|
/dataset.py
|
from IPython.display import display
import pymongo
from users_info_prepare import Users_info
import random
client = pymongo.MongoClient("localhost", 27017)
db = client.mongo_bd
users = db.users_data
projects = db.projects
users.drop()
def append_user(user_info: dict):
if user_info not in users.find():
users.insert_one(user_info)
return 1
else:
return 0
def get_user(user_name: str):
for user in users.find():
if user.name_first == user_name.lower():
return user
return 0
with open("names.txt", encoding="utf-8") as file:
users_names = file.read().split('\n')
with open("professions.txt", encoding="utf-8") as file:
professions = file.read().split('\n')
with open("spheres.txt", encoding="utf-8") as spheres:
areas = spheres.read().split('\n')
for i in range(len(users_names)):
name = users_names[i]
prof = professions[i]
sphere = areas[i].split()
user = Users_info(name, prof, sphere, [random.uniform(0, 1)])
user.main_info()
append_user(user.main_info_dict)
for entry in users.find():
print(entry)
|
{"/interface.py": ["/users_info_prepare.py", "/project_info.py"], "/dataset.py": ["/users_info_prepare.py"], "/recommend_project.py": ["/project_info.py"]}
|
31,887,797
|
MilanaShhanukova/users_recommendation-hackaton
|
refs/heads/main
|
/interface.py
|
from users_info_prepare import Users_info
from project_info import Project
class User_interface:
def __init__(self, user):
self.user = user # user_info one user
def make_project(self, name: str, sphere: str):
project = Project(name, sphere, self.user)
self.user.projects.append(project)
self.user.update_users_v(0.1, sphere) # for creating a project v increase
# save interests in projects
def like_other_project(self, project):
sphere_project = project.sphere
try:
sphere_ind = self.user.spheres.index(sphere_project)
self.user.interests[sphere_ind] += 0.02
except ValueError:
self.user.spheres.append(sphere_project)
self.user.interests.append(0.02)
except KeyError:
self.user.spheres.append(sphere_project)
self.user.interests.append(0.02)
# testing
random_user = Users_info("Иванова Милана", "программист", ["кулинария"], [0.05])
second_random = Users_info("Пирожкова Милана", "повар", ["кулинария"], [0.04])
random_project = Project("готовка_с_пирожковой", "программирование", second_random)
random_second_project = Project("готовка_с_пирожковой", "кулинария", second_random)
interface = User_interface(random_user)
interface.make_project("готовка", "кулинария")
# check before likes
print("Check what interest one user has:")
for int in range(len(random_user.interests)):
print(random_user.spheres[int], random_user.interests[int])
print("\nHere one user likes two projects in кулинария and программирование spheres \n")
interface.like_other_project(random_project)
interface.like_other_project(random_second_project)
# after likes
print("Check what interest one user has now")
for int in range(len(random_user.interests)):
print(random_user.spheres[int], random_user.interests[int])
|
{"/interface.py": ["/users_info_prepare.py", "/project_info.py"], "/dataset.py": ["/users_info_prepare.py"], "/recommend_project.py": ["/project_info.py"]}
|
31,887,798
|
MilanaShhanukova/users_recommendation-hackaton
|
refs/heads/main
|
/predictions.py
|
import pymongo
import pandas as pd
import numpy as np
from sklearn.neighbors import NearestNeighbors
from scipy.sparse import csr_matrix
with open("spheres.txt", encoding="utf-8") as file:
known_spheres = file.read().split("\n")
client = pymongo.MongoClient("localhost", 27017)
db = client.mongo_bd
users = db.users_data
projects = db.projects
all_users = []
heading = []
for entry in users.find():
all_users.append(entry)
heading = entry.keys()
def matrix():
all_interests_users = []
for user in all_users:
users_interests = user['spheres'].keys()
interest_matrix = [0] * len(known_spheres)
for ind, s in enumerate(known_spheres):
if s in users_interests:
interest_matrix[ind] = user['spheres'][s]
all_interests_users.append(interest_matrix)
matrix = pd.DataFrame(np.array(all_interests_users), columns=known_spheres)
return matrix
partner_reccom = matrix()
partner_reccom_matrix = csr_matrix(partner_reccom.values)
# read about algorithm and try different
model_knn = NearestNeighbors(metric = 'cosine', algorithm = 'brute', n_neighbors=3)
model_knn.fit(partner_reccom_matrix)
# we will randomly choose one user
user_ind = 4
distances, indices = model_knn.kneighbors(partner_reccom.iloc[user_ind, :].values.reshape(1, -1),
n_neighbors=7)
# check for random user, result are not that obvious cause there is not so much data
random_user = all_users[4]
for i in range(0, len(distances.flatten())):
if i == 0:
print(f"Recommendations for {all_users[user_ind]['name_first'] + ' ' + all_users[user_ind]['name_second']}")
else:
ind_person = partner_reccom.index[indices.flatten()[i]]
user_found = all_users[ind_person]["name_first"] + " " + all_users[user_ind]["name_second"]
print(f"{i} : {user_found} with distance {distances.flatten()[i]}")
|
{"/interface.py": ["/users_info_prepare.py", "/project_info.py"], "/dataset.py": ["/users_info_prepare.py"], "/recommend_project.py": ["/project_info.py"]}
|
31,887,799
|
MilanaShhanukova/users_recommendation-hackaton
|
refs/heads/main
|
/users_info_prepare.py
|
from yargy import Parser, rule, and_, not_
from yargy.interpretation import fact
from yargy.predicates import gram
from yargy.relations import gnc_relation
from yargy.pipelines import morph_pipeline
from pymorphy2 import MorphAnalyzer
morph = MorphAnalyzer()
#from system
known_spheres = ["IT",
"лингвистика",
"робототехника",
"инжинерия"]
class Users_info:
def __init__(self, name: str, profession: str, sphere: list, v:list):
self.name = name
self.profession = profession # profession of the user
self.spheres = sphere
self.spheres_v = {} # the spheres in which user is interested
self.interests = [0] * len(self.spheres) # interests of the user in all spheres
self.interests_spheres = {}
self.known_professions = [
'управляющий директор',
'вице-мэр',
'повар',
"разработчик"
]
self.projects = []
#self.known_spheres =
self.main_info_dict = {}
self.v = v
self.activity = 0
self.gnc = gnc_relation()
def prepare_name(self):
Name = fact(
'Name',
['first', 'last'],
)
LAST = and_(
gram('Surn'),
not_(gram('Abbr')),
)
FIRST = and_(
gram('Name'),
not_(gram('Abbr')),
)
NAME = rule(LAST.interpretation(
Name.last
).match(self.gnc),
FIRST.interpretation(
Name.first
).match(self.gnc)
).interpretation(
Name
)
NAME = rule(gram('Surn').interpretation(
Name.last.inflected()
),
gram('Name').interpretation(
Name.first.inflected()
)
).interpretation(
Name
)
name_parser = Parser(NAME)
return name_parser
def prepare_profession(self):
Profession = fact(
"Profession",
["profession"],
)
PROFESSION = morph_pipeline(self.known_professions)
PROFESSION = rule(
PROFESSION.interpretation(
Profession.profession.inflected()
),
).interpretation(
Profession
)
position_parser = Parser(PROFESSION)
return position_parser
def prepare_sphere(self):
Sphere = fact(
"Sphere",
["domen"]
)
SPHERE = morph_pipeline(known_spheres)
SPHERE = rule(
SPHERE.interpretation(
Sphere.domen.inflected()
),
).interpretation(
Sphere
)
sphere_parser = Parser(SPHERE)
return sphere_parser
def get_users_professions(self):
word = morph.parse(self.profession)[0].normal_form
if word not in self.known_professions:
self.known_professions.append(word)
# update everything with new profession
name_parser = self.prepare_profession()
matched_profession = name_parser.findall(self.profession)
professions = [match.fact.profession for match in matched_profession]
return professions
def get_users_sphere(self):
# append new spheres in spheres_v
for s in self.spheres:
# clean form of s
clean_s = morph.parse(s)[0].normal_form
if clean_s not in known_spheres:
known_spheres.append(clean_s)
try:
self.spheres_v[clean_s] = self.v[self.spheres.index(s)]
except KeyError:
self.spheres_v[clean_s] = 0
def update_users_v(self, update_num: float, sphere: str):
try:
self.spheres_v[sphere] += update_num
except KeyError:
self.spheres_v[sphere] = update_num
def calculate_activity(self, update_num: float):
self.activity += update_num
def main_info(self):
self.get_users_sphere()
self.main_info_dict["spheres"] = self.spheres_v
self.main_info_dict["profession"] = self.get_users_professions()
matched_name = self.prepare_name().findall(self.name)
matched_name_ = self.prepare_name().findall(self.name)
self.main_info_dict["name_first"] = "".join([match.fact.first for match in matched_name])
self.main_info_dict["name_second"] = "".join([match.fact.last for match in matched_name_])
if not self.main_info_dict["name_first"]:
self.main_info_dict["name_first"] = self.name.split()[0].lower()
self.main_info_dict["name_second"] = self.name.split()[1].lower()
self.main_info_dict["activity"] = self.activity
self.main_info_dict["projects"] = self.projects
|
{"/interface.py": ["/users_info_prepare.py", "/project_info.py"], "/dataset.py": ["/users_info_prepare.py"], "/recommend_project.py": ["/project_info.py"]}
|
31,887,800
|
MilanaShhanukova/users_recommendation-hackaton
|
refs/heads/main
|
/users.py
|
from yargy import Parser, rule, and_, not_
from yargy.interpretation import fact
from yargy.predicates import gram
from yargy.relations import gnc_relation
from yargy.pipelines import morph_pipeline
from IPython.display import display
Name = fact(
'Name',
['first', 'last'],
)
LAST = and_(
gram('Surn'),
not_(gram('Abbr')),
)
FIRST = and_(
gram('Name'),
not_(gram('Abbr')),
)
Position = fact(
"Position",
["position"],
)
Sphere = fact(
"Sphere",
["domen"]
)
known_positions = [
'управляющий директор',
'вице-мэр',
'повар'
]
known_spheres = [
"IT",
"лингвистика",
"робототехника",
"инжинерия",
]
POSITION = morph_pipeline(known_positions)
SPHERE = morph_pipeline(known_spheres)
gnc = gnc_relation()
NAME = rule(
FIRST.interpretation(
Name.first
).match(gnc),
LAST.interpretation(
Name.last
).match(gnc)
).interpretation(
Name
)
SPHERE = rule(
SPHERE.interpretation(
Sphere.domen.inflected()
),
).interpretation(
Sphere
)
POSITION = rule(
POSITION.interpretation(
Position.position.inflected()
),
).interpretation(
Position
)
NAME = rule(
gram('Name').interpretation(
Name.first.inflected()
),
gram('Surn').interpretation(
Name.last.inflected()
)
).interpretation(
Name
)
users_name = "Егор Иванов"
users_profession = "управляющий директор"
users_sphere = "кулинария"
name_parser = Parser(NAME)
position_parser = Parser(POSITION)
sphere_parser = Parser(SPHERE)
matched_name = name_parser.findall(users_name)
matched_position = position_parser.findall(users_profession)
matched_sphere = sphere_parser.findall(users_sphere)
if not list(matched_sphere):
known_spheres.append(users_sphere)
SPHERE = morph_pipeline(known_spheres)
SPHERE = rule(
SPHERE.interpretation(
Sphere.domen.inflected()
),
).interpretation(
Sphere
)
sphere_parser = Parser(SPHERE)
matched_sphere = sphere_parser.findall(users_sphere)
for match in matched_sphere:
print(match.fact.domen)
|
{"/interface.py": ["/users_info_prepare.py", "/project_info.py"], "/dataset.py": ["/users_info_prepare.py"], "/recommend_project.py": ["/project_info.py"]}
|
31,887,801
|
MilanaShhanukova/users_recommendation-hackaton
|
refs/heads/main
|
/recommend_project.py
|
import random
import pymongo
import pandas as pd
from project_info import Project
from sklearn.neighbors import NearestNeighbors
from scipy.sparse import csr_matrix
client = pymongo.MongoClient("localhost", 27017)
db = client.mongo_bd
users = db.users_data
projects = db.projects
all_users = []
heading = []
for entry in users.find():
all_users.append(entry)
heading = entry.keys()
projects_names = ["приготовление пироженных",
"разработка чат-бота",
"создание робота",
"разработка алгоритма",
"разработка рекламы",
"анализ рынка",
"разработка занятия для старшеклассников",
"рисование",
"анализ английской морфологии",
"разработка голосового помощника",
"приготовление торта"]
spheres = ["кулинария",
"питон",
"робототехника",
"программирование",
"маркетинг",
"экономика",
"преподавание",
"творчество",
"лингвистика",
"питон",
"кулинария"]
all_projects = []
for id_ in range(len(projects_names)):
pr = Project(projects_names[id_], spheres[id_], main_creator=all_users[id_], r=random.randint(0, 10))
all_projects.append(pr.__dict__.values())
heading = pr.__dict__.keys()
df = pd.DataFrame(all_projects, columns=heading)
projects_rec = df.pivot_table(index='name',columns='sphere',values='r').fillna(0)
partner_reccom_matrix = csr_matrix(projects_rec.values)
model_knn = NearestNeighbors(metric = 'cosine', algorithm = 'brute', n_neighbors=3)
model_knn.fit(partner_reccom_matrix)
# we will randomly choose one user
project_ind = 4
distances, indices = model_knn.kneighbors(projects_rec.iloc[project_ind, :].values.reshape(1, -1),
n_neighbors=7)
# check for random user, result are not that obvious cause there is not so much data
random_project = projects_names[4]
for i in range(0, len(distances.flatten())):
if i == 0:
print(f"Recommendations for {random_project}")
else:
ind_project = projects_rec.index[indices.flatten()[i]]
print(f"{i} : {ind_project} with distance {distances.flatten()[i]}")
|
{"/interface.py": ["/users_info_prepare.py", "/project_info.py"], "/dataset.py": ["/users_info_prepare.py"], "/recommend_project.py": ["/project_info.py"]}
|
31,887,802
|
MilanaShhanukova/users_recommendation-hackaton
|
refs/heads/main
|
/database.py
|
import pymongo
class DataBase_for_users:
def __init__(self):
client = pymongo.MongoClient("localhost", 27017)
db = client.mongo_bd
self.users = db.users
#self.projects = db.projects
def inserting(self, data):
self.users.insert_one(data)
def finding(self):
return self.users.find()
def replacing(self, one_dict, new_dict):
return self.users.replace_one(one_dict, new_dict)
def dropping(self):
return self.users.drop()
|
{"/interface.py": ["/users_info_prepare.py", "/project_info.py"], "/dataset.py": ["/users_info_prepare.py"], "/recommend_project.py": ["/project_info.py"]}
|
31,939,135
|
vitroid/genice-rdf
|
refs/heads/main
|
/genice2_rdf/formats/_RDF.py
|
# coding: utf-8
"""
A GenIce2 format plugin to calculate radial distribution functions.
Usage:
% genice 1c -r 3 3 3 -w tip4p -f _RDF > 1c.rdf
% genice 1c -r 3 3 3 -w tip4p -f _RDF[OW:H=HW1=HW2] > 1c.rdf
% analice data.gro -O OW -H HW[12] -w tip3p -f _RDF[OW:HW1=HW2] > data.rdf
Options:
Atom name
Atom name and aliases chained with "=".
json Output in JSON format.
range=x Range of interest (0.9 nm)
binw=x Bin width (0.003 nm)
Options must be separated with colons.
You can specify the list of atom types to be calculated.
For example, in the following case, TIP4P water has four different atom
types (OW, HW1, HW2, and MW), so all the possible 10 combinations of
atom types will be examined.
% genice 1c -r 3 3 3 -w tip4p -f _RDF > 1c.rdf
If you just want the RDF of OW and H, and HW1 and HW2 should be
abbreviated by H, specify the option string like following.
% genice 1c -r 3 3 3 -w tip4p -f _RDF[OW:H=HW1=HW2] > 1c.rdf
"""
desc = { "ref": {},
"brief": "Radial Distribution Functions.",
"usage": __doc__,
}
import itertools as it
import numpy as np
import pairlist as pl
from collections import defaultdict
import json
from logging import getLogger
def hist2rdf(hist, vol, natoms, binw, nbin):
rdf = np.zeros(nbin)
for r,c in hist.items():
if r < nbin:
rdf[int(r)] = c
if len(natoms) == 1:
mult = natoms[0]**2 / 2
else:
mult = natoms[0]*natoms[1]
ri = np.arange(nbin)*binw + binw/2
vshell = 4*np.pi*ri**2*binw
rdf *= vol / (vshell*mult)
return rdf
import genice2.formats
class Format(genice2.formats.Format):
def __init__(self, **kwargs):
logger = getLogger()
logger.info("Hook0: Preprocess.")
self.options={"atomtypes":{}, "json":False, "range":0.9, "binw":0.003}
for key, value in kwargs.items():
if key in ["JSON", "json"]:
self.options["json"] = True
logger.info(" JSON")
else:
if key == "range":
self.options["range"] = float(value)
logger.info(" Range/nm: {0}".format(self.options["range"]))
elif key == "binw":
self.options["binw"] = float(value)
logger.info(" Range/nm: {0}".format(self.options["binw"]))
else:
aliases = value.split("=")
aliases.append(key)
for alias in aliases:
self.options["atomtypes"][alias] = aliases[0]
logger.info(" {0} is an alias of {1}.".format(alias, aliases[0]))
logger.info(self.options["atomtypes"])
logger.info("Hook0: end.")
def hooks(self):
return {7:self.hook7}
def hook7(self, lattice):
logger = getLogger()
atomtypes = self.options["atomtypes"]
logger.info("Hook7: Output radial distribution functions.")
logger.info(" Total number of atoms: {0}".format(len(lattice.atoms)))
binw = self.options["binw"]
nbin = int(self.options["range"]/binw)
cellmat = lattice.repcell.mat
rpos = defaultdict(list)
for atom in lattice.atoms:
resno, resname, atomname, position, order = atom
alias = atomname
if len(atomtypes):
if atomname in atomtypes:
alias = atomtypes[atomname]
else:
continue
rpos[alias].append(lattice.repcell.abs2rel(position))
rdf = []
rdfname = []
volume = np.linalg.det(lattice.repcell.mat)
# grid = pl.determine_grid(cellmat,binw*nbin)
logger.info(f" {rpos.keys()}")
for atomname in rpos:
n = len(rpos[atomname])
ra = np.zeros([n,3])
ra[:] = rpos[atomname]
rpos[atomname] = ra
for atomname in rpos:
# rpos[atomname] = np.array(rpos[atomname])
logger.debug(rpos[atomname].shape)
ra = rpos[atomname]
na = ra.shape[0]
logger.info(f" Pair {atomname}-{atomname}")
i,j,delta = pl.pairs_iter(ra,
binw*nbin,
cellmat,
#grid=grid,
distance=True,
raw=True)
delta = np.floor(delta/binw)
hist = dict(zip(*np.unique(delta, return_counts=True)))
rdfname.append((atomname, atomname))
rdf.append(hist2rdf(hist, volume, (na,), binw, nbin))
for a, b in it.combinations(rpos, 2):
ra = rpos[a]
rb = rpos[b]
na = ra.shape[0]
nb = rb.shape[0]
logger.info(" Pair {0}-{1}".format(a,b))
i,j,delta = pl.pairs_iter(ra,
binw*nbin,
cellmat,
pos2=rb,
# grid=grid,
distance=True,
raw=True)
delta = np.floor(delta/binw)
hist = dict(zip(*np.unique(delta, return_counts=True)))
rdfname.append((a,b))
rdf.append(hist2rdf(hist, volume, (na,nb), binw, nbin))
if self.options["json"]:
D = dict()
D["r"] = [i*binw for i in range(1,nbin)]
for i, pair in enumerate(rdfname):
name = "{0}--{1}".format(*pair)
D[name] = [x for x in rdf[i]]
self.output = json.dumps(D, indent=2, sort_keys=True)
else:
s = ""
s += "# r/nm " + "\t".join(["{0}-{1}".format(*name) for name in rdfname]) + "\n"
for i in range(1,nbin):
values = [i*binw]+[r[i] for r in rdf]
s += "\t".join(["{0:.3f}".format(v) for v in values]) + "\n"
self.output = s
logger.info("Hook7: end.")
|
{"/replacer.py": ["/genice2_rdf/formats/_RDF.py"]}
|
31,939,136
|
vitroid/genice-rdf
|
refs/heads/main
|
/replacer.py
|
#!/usr/bin/env python
from genice2_dev import template
import sys
from genice2_rdf.formats._RDF import __doc__ as doc
import distutils.core
setup = distutils.core.run_setup("setup.py")
print(template(sys.stdin.read(), doc, setup))
|
{"/replacer.py": ["/genice2_rdf/formats/_RDF.py"]}
|
32,089,754
|
FabulousErin/Web-scraping-challenge
|
refs/heads/main
|
/mars.py
|
from bs4 import BeautifulSoup
import pandas as pd
from splinter import Browser
def init_browser():
executable_path = {"executable_path": "C:\\Users\\erinn\\Documents\\WashU\\chromedriver.exe"}
return Browser("chrome", **executable_path, headless=False)
def scrape():
browser = init_browser()
listings = []
url = "https://mars.nasa.gov/news/"
browser.visit(url)
html = browser.html
soup = BeautifulSoup(html, "html.parser")
def scrapeNew():
browser = init_browser()
listings = []
url = "https://mars.nasa.gov/news/"
browser.visit(url)
html = browser.html
soup = BeautifulSoup(html, "html.parser")
news_title = soup.find_all("div", {"class": "content_title"})
print(news_title[1].get_text())
news_title.pop(0)
clean_titles = []
for title in news_title:
title_dictionary = {
"title": title.get_text()
}
clean_titles.append(title_dictionary)
print(clean_titles)
return clean_titles
def table():
mars_facts_webpage = 'https://space-facts.com/mars/'
table = pd.read_html(mars_facts_webpage)
print(table)
return table
#!jupyter nbconvert --to python mission_to_mars.ipynb
|
{"/app.py": ["/mars.py"]}
|
32,089,755
|
FabulousErin/Web-scraping-challenge
|
refs/heads/main
|
/app.py
|
from flask import Flask, render_template, redirect
from flask_pymongo import PyMongo
import mars
app = Flask(__name__)
# Use flask_pymongo to set up mongo connection
app.config["MONGO_URI"] = "mongodb://localhost:27017/mars_app"
mongo = PyMongo(app)
@app.route("/")
def index():
# listings = mongo.df
single_title = mongo.db.mars_titles.find_one()
return render_template("index.html", dbperson=single_title)
@app.route("/scrape")
def scraper():
db = mongo.db.mars_titles
all_titles = mars.scrapeNew()
db.update({}, all_titles[0], upsert=True)
return redirect("/", code=302)
if __name__ == "__main__":
app.run(debug=True)
|
{"/app.py": ["/mars.py"]}
|
32,215,653
|
isabellaaquino/teste-tecnico
|
refs/heads/master
|
/byne/clients/views.py
|
from django.shortcuts import render, redirect
from django.template import RequestContext
from django.contrib.auth.forms import UserCreationForm
from django.contrib.auth import authenticate, login
from .models import Profile
from django.contrib.auth.models import User
import random
import datetime
import numpy
import time
global impares
global pares
impares = []
pares = []
for i in range(99):
if i%2==0:
pares.append(i)
else:
impares.append(i)
# Create your views here.
def index(request):
return render(request, 'clients/index.html')
def login(request):
return render(request, 'registration/login.html')
def register(request):
if request.method == 'POST':
form = UserCreationForm(request.POST)
if form.is_valid():
form.save()
username = form.cleaned_data['username']
password = form.cleaned_data['password1']
user = authenticate(username=username, password=password)
Profile.objects.create(user=user,number=0)
return redirect('/')
else:
form = UserCreationForm()
context = { 'form' : form }
return render(request, 'registration/register.html', context)
def NumberLoop(request):
user = request.user
profile = Profile.objects.get(user=user)
hora1 = datetime.datetime.now()
if profile.number == 0:
context = {
'number': 0
}
while True:
for i in numpy.arange(0.5):
profile.number+=1
time.sleep(0.5)
hora = str(datetime.datetime.now())
profile.log['time'] = hora[:16]
profile.log['valorAtual'] = profile.number
profile.log['incremento'] = 1
profile.log['type'] = 'None'
hora2 = datetime.datetime.now()
diferenca = str(hora2 - hora1)
profile.loglist.append(profile.log.copy())
profile.save()
randomNumb = random.randint(3,6)
if int(diferenca[6])==randomNumb:
break
return render(request, 'clients/profile.html', context)
while True:
loop(request)
context = {
'profile': profile,
'number': profile.number
}
return render(request, 'clients/profile.html', context)
def loop(request):
global impares
global pares
user = request.user
profile = Profile.objects.get(user=user)
options = ["PAR","ÍMPAR"]
which = random.choice(options)
randomNumb = random.randint(3,6)
for c in range(randomNumb):
if which == "PAR":
newNumber = random.choice(pares)
profile.log['incremento'] = newNumber
profile.log['type'] = which
elif which == "ÍMPAR":
newNumber = random.choice(impares)
profile.log['incremento'] = newNumber
profile.log['type'] = which
for c in range(randomNumb*2):
profile.number+=newNumber
profile.log['valorAtual'] = profile.number
hora = str(datetime.datetime.now())
profile.log['time'] = hora[:19]
time.sleep(0.5)
profile.loglist.append(profile.log.copy())
profile.save()
time.sleep(1)
def logView(request):
user = request.user
profile = Profile.objects.get(user=user)
context = {
'profile': profile
}
return render(request, 'clients/log.html', context)
|
{"/byne/clients/views.py": ["/byne/clients/models.py"]}
|
32,289,896
|
nileshnagarwal/djangorest_course_sarda
|
refs/heads/master
|
/watchlist_app/urls.py
|
from django.urls import path
from .views import *
urlpatterns = [
path('list/', movie_list, name='movie-list'),
path('<int:pk>', movie_detail, name='movie-detail')
]
|
{"/watchlist_app/api/urls.py": ["/watchlist_app/api/views.py"], "/watchlist_app/api/views.py": ["/watchlist_app/api/serializers.py"]}
|
32,543,828
|
rodrigocesarb/PBN_env
|
refs/heads/main
|
/__init__.py
|
#__init__.py
from .PBN import PBN
|
{"/Node.py": ["/utils.py"], "/__init__.py": ["/PBN.py"], "/PBN.py": ["/Node.py"]}
|
32,543,829
|
rodrigocesarb/PBN_env
|
refs/heads/main
|
/Node.py
|
"""Node.py
Represents nodes in a PBN.
"""
import random
import numpy as np
import copy
class Node():
def __init__(self, function, i, name = None):
"""represents node in a PBN.
args:
mask [Node]: List of node objects that are inputs of this node.
function [float]: matrix representation of function
name (String): Name of the gene
"""
self.input_nodes = None
self.function = function
self.i = i
if type(name) == type(None):
self.name = "G{0}".format(i)
else:
self.name = name
self.state = None
self.input_weights = None
self.value = None
def compute_next_value(self):
"""Return own next-state given the particular state according to own function and states of input genes.
Wowza this is data-type madness
"""
input_state = []
for i in range(len(self.input_nodes)):
input_node = self.input_nodes[i]
input_state += [int(input_node.value)]
input_state = tuple(input_state)
prob_true = self.function.item(input_state)
u = random.uniform(0,1) #Sample
self.potential_value = u < prob_true
def get_probs(self, state):
input_indices = []
for i in range(len(self.input_nodes)):
input_indices += [self.input_nodes[i].i]
input_state = state[input_indices].astype(int)
input_state = tuple(input_state)
prob_true = self.function.item(input_state)
return prob_true
def apply_next_value(self):
if self.potential_value == type(None):
raise Exception('Finishing transaction without computing next value.')
self.value = self.potential_value
self.potential_value = None
def __str__(self):
return "{0}: {1}".format(self.name, self.value)
|
{"/Node.py": ["/utils.py"], "/__init__.py": ["/PBN.py"], "/PBN.py": ["/Node.py"]}
|
32,543,830
|
rodrigocesarb/PBN_env
|
refs/heads/main
|
/PBN.py
|
"""PBN.py
The environment that runs PBNs.
"""
import numpy as np
import networkx as nx
import matplotlib.pyplot as plt
import copy
import time
from .Node import Node
class PBN():
def __init__(self, PBN_data = None):
"""Construct a PBN from given PBN data.
Args:
PBN_data (list): data representing the PBN.
returns:
PBN
"""
self.PBN_size = len(PBN_data)
self.nodes = np.empty((self.PBN_size), dtype=object)
self.PBN = None
self.STG = None
for i in range(self.PBN_size):
_, function = PBN_data[i]
self.nodes[i] = Node(function, i)
for i in range(self.PBN_size):
mask, _ = PBN_data[i]
input_nodes = self.nodes[mask]
self.nodes[i].input_nodes = input_nodes
def reset(self, state = None):
"""Set the state of the PBN to a particular one.
args:
state [bool]: The state to be set to. If left empty, defaults to a random state.
"""
if type(state) == type(None):
for node in self.nodes:
node.value = np.random.rand() > 0.5
else:
if state.shape[0] != self.PBN_size:
raise Exception('The length of the state given ({0}) is different from the PBN size ({1}).'.format(state.shape[0], self.PBN_size))
for i in range(self.PBN_size):
self.nodes[i].value = state[i].astype(bool)
def name_nodes(self, names):
for i in range(self.PBN_size):
self.nodes[i].name = names[i]
def print_PBN(self):
"""Construct a networkx graph representing the connetcivities of the PBN.
returns: networkx di-graph.
"""
if type(self.PBN) == type(None):
G = nx.DiGraph()
for i in range(self.PBN_size):
G.add_node(self.nodes[i].name)
for i in range(self.PBN_size):
#For each target node
node = self.nodes[i] #Current node object
input_nodes = self.nodes[i].input_nodes
inps = [] #List of names of input nodes
inps_i = [] #List of indexes of input nodes
weights = node.input_weights
for inp in input_nodes:
inps += [inp.name]
inps_i += [inp.i]
if type(weights) == type(None):
for inp in input_nodes:
G.add_edge(inp.name,node.name)
else:
for inp in input_nodes:
G.add_edge(inp.name,node.name, weight = weights[inp.i])
self.PBN = G
return self.PBN
def flip(self, index):
"""Flip the value of a gene at index.
args:
index (int): gene index to flip.
"""
self.state[index] = not self.state[index]
def get_funcs(self):
"""Print the functions of the PBN to inspect visually.
"""
for i in range(self.PBN_size):
print(self.nodes[i].function)
def step(self):
"""Perform a step of natural evolution.
"""
for node in self.nodes:
node.compute_next_value()
for node in self.nodes:
node.apply_next_value()
def get_state(self):
"""Get a state from the values of all the nodes
"""
state = np.empty(self.PBN_size, dtype=bool)
for i in range(self.PBN_size):
state[i] = self.nodes[i].value
return state
def gen_STG(self):
"""Generate the State Transition Graph (STG) of the PBN.
Go through each possible state.
Compute the probabilities of going to next states.
returns:
networkx DiGraph.
"""
if type(self.STG) == type(None):
N_states = 2**(self.PBN_size)
print("Total number of states: {0}".format(N_states))
G = nx.DiGraph()
start = time.time()
for state_index in range(N_states):
state = booleanize(state_index, self.PBN_size)
G.add_node(str(state.astype(int)))
next_states = self._compute_next_states(state)
G.add_weighted_edges_from(next_states)
end = time.time()
est = N_states*(end-start)/(state_index+1)
print("\rComputing STG: At index {4} {0}%. Est duration: {1}s, OR {2} mins, OR {3} hrs".format(state_index*100 / N_states, est, est/60, est/3600, state_index), end="")
self.STG = G
return self.STG
def get_node_by_name(self, nodename):
"""Get the appropriate node object given the name of the node.
"""
for node in self.nodes:
if node.name == nodename:
return node
raise Exception(f'Node with name \'{nodename}\' not found.')
def generate_weights(self):
"""Compute weights
"""
for node_i in range(self.PBN_size):
node = self.nodes[node_i]
function = node.function
mask = node.mask
node.compute_input_weights()
def apply_weights(self, weights):
"""Apply the weights provided (save them)
"""
for i in range(self.PBN_size):
node_weights = np.around(weights[i,:], 4)
relevant_node = self.nodes[i]
relevant_node.input_weights = node_weights
self.nodes[i] = relevant_node
def _compute_next_states(self, state):
"""Compute the probabilities of going to all next possible states from current state.
Go through each gene. Compute the probability of each gene being True after current state.
Convert those probabilities to next possible states.
args:
state [bool]: State to calculate probabilities from.
returns:
list of triplets. (Current State, next-possible-state, probability.)
"""
probabilities = np.zeros((2, self.PBN_size), dtype=float)
N_states = 2**(self.PBN_size)
output = []
for i in range(self.PBN_size):
prob_true = self.nodes[i].get_probs(state)
probs = np.array([1-prob_true, prob_true])
probabilities[:,i] = probs
protostates = self._probs_to_states(probabilities)
for prostate, proprob in protostates:
output += [(str(state.astype(int)), str(prostate.astype(int)), proprob)]
return output
def _probs_to_states(self, probs):
"""Compute the next possible states to go to, and their probabilities, given a set of probabilities of being true for each gene.
Set the next states as a list of 0.5 with probability 1.
A gene can not be at value 0.5, so it is used to signify an uncomputed value.
Go through each gene.
If probability is 1 or 0
set the value of all next states at that index to the particular value. Leave probability unaffected.
Else,
Make two copies of all next states - one for each state of the gene. Compute probabilities accordingly.
args:
probs [float]: Probabilities of each gene being true in the next state.
returns:
List of tuples. Each tuple is a possible next state with according probability.
"""
_, n_genes = probs.shape
protostate = np.ones(n_genes, dtype=float) * 0.5
protoprob = 1
prostate = [(protostate, protoprob)]
for gene_i in range(n_genes):
p = probs[:,gene_i]
if p[0] == 1 or p[0] == 0:
#Deterministic. Mainly for optimisation.
for pro in prostate:
protostate, protoprob = pro #Go through each next-state already computed, unpack them
protostate[gene_i] = p[1] #Set the value of the gene to the corresponding value.
else:
prostate_copy = []
for pro in prostate:
pro_1, prob_1 = copy.deepcopy(pro)
pro_2, prob_2 = copy.deepcopy(pro)
pro_1[gene_i] = 0 #Set value to 0
pro_2[gene_i] = 1 #Set value to 1
prob_1 *= p[0] #Set probability to that value being 0
prob_2 *= p[1] #^^^
#Put them back in.
protostate_1 = (pro_1, prob_1)
protostate_2 = (pro_2, prob_2)
prostate_copy += [protostate_1]
prostate_copy += [protostate_2]
prostate = prostate_copy
return prostate
|
{"/Node.py": ["/utils.py"], "/__init__.py": ["/PBN.py"], "/PBN.py": ["/Node.py"]}
|
32,599,135
|
mmingyeong/lvmnps
|
refs/heads/master
|
/python/lvmnps/actor/commands/onoff.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# @Author: Mingyeong YANG (mingyeong@khu.ac.kr)
# @Date: 2021-03-22
# @Filename: onoff.py
# @License: BSD 3-clause (http://www.opensource.org/licenses/BSD-3-Clause)
from __future__ import annotations
import click
from clu.command import Command
from lvmnps.actor.commands import parser
from lvmnps.exceptions import NpsActorError
# from lvmnps.switch.dli.powerswitch import PowerSwitch
async def switch_control(switches: [], on: bool, name: str, portnum: int):
status = {}
for switch in switches:
try:
await switch.setState(on, name, portnum)
# status |= await switch.statusAsJson(name, portnum) works only with python 3.9
status = dict(list(status.items()) +
list((await switch.statusAsJson(name, portnum)).items()))
except NpsActorError as err:
return {str(err)}
return status
@parser.command()
@click.argument("NAME", type=str, default="")
@click.argument("PORTNUM", type=int, default=0)
async def on(command: Command, switches: [], name: str, portnum: int):
"""Turn on the Outlet"""
command.info(STATUS=await switch_control(switches, True, name, portnum))
return command.finish(text="done")
@parser.command()
@click.argument("NAME", type=str, default="")
@click.argument("PORTNUM", type=int, default=0)
async def off(command: Command, switches: [], name: str, portnum: int):
"""Turn off the Outlet"""
command.info(STATUS=await switch_control(switches, False, name, portnum))
return command.finish(text="done")
@parser.command()
@click.argument("NAME", type=str, default="")
async def onall(command: Command, switches: [], name: str):
"""Turn on all Outlet"""
command.info(STATUS=await switch_control(switches, True, 0, name))
return command.finish(text="done")
@parser.command()
@click.argument("NAME", type=str, default="")
async def offall(command: Command, switches: [], name: str):
"""Turn off all Outlet"""
command.info(STATUS=await switch_control(switches, False, 0, name))
return command.finish(text="done")
|
{"/python/lvmnps/switch/factory.py": ["/python/lvmnps/switch/dli/powerswitch.py", "/python/lvmnps/switch/exceptions.py"]}
|
32,599,136
|
mmingyeong/lvmnps
|
refs/heads/master
|
/python/lvmnps/actor/commands/status.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# @Author: Mingyeong YANG (mingyeong@khu.ac.kr)
# @Date: 2021-03-22
# @Filename: status.py
# @License: BSD 3-clause (http://www.opensource.org/licenses/BSD-3-Clause)
from __future__ import annotations
import click
from clu.command import Command
from lvmnps.actor.commands import parser
from lvmnps.switch.exceptions import PowerException
@parser.command()
@click.argument("NAME", type=str, default="")
@click.argument("PORTNUM", type=int, default=0)
async def status(command: Command, switches: [], name: str, portnum: int):
"""print the status of the NPS."""
status = {}
for switch in switches:
try:
# status |= await switch.statusAsJson(name, portnum) works only with python 3.9
status = dict(list(status.items()) +
list((await switch.statusAsJson(name, portnum)).items()))
except PowerException as ex:
return command.fail(error=str(ex))
command.info(
STATUS=status
)
return command.finish("done")
|
{"/python/lvmnps/switch/factory.py": ["/python/lvmnps/switch/dli/powerswitch.py", "/python/lvmnps/switch/exceptions.py"]}
|
32,599,137
|
mmingyeong/lvmnps
|
refs/heads/master
|
/tests/test_02_amqp.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# @Author: José Sánchez-Gallego (gallegoj@uw.edu)
# @Date: 2020-08-26
# @Filename: test_actor.py
# @License: BSD 3-clause (http://www.opensource.org/licenses/BSD-3-Clause)
'''
import asyncio
import logging
import sys
import pytest
from clu import REPLY, AMQPActor, CluError, CommandError
from clu.client import AMQPReply
from clu.model import Model
from asynctest import CoroutineMock
pytestmark = [pytest.mark.asyncio]
@pytest.fixture
def message_maker(mocker):
def _make_message(headers=None, body=None):
headers = headers or {"command_id": 1, "message_code": "i", "sender": "me"}
message = mocker.MagicMock()
message.correlation_id = headers["command_id"]
message.info.return_value = {"headers": headers}
message.body = b"{}"
return message
yield _make_message
def test_actor(amqp_actor):
assert amqp_actor.name == "amqp_actor"
async def test_client_send_command(amqp_client, amqp_actor):
cmd = await amqp_client.send_command("amqp_actor", "ping")
await cmd
assert len(cmd.replies) == 2
assert cmd.replies[-1].message_code == ":"
assert cmd.replies[-1].body["text"] == "Pong."
async def test_client_send_command_args(amqp_client, amqp_actor):
cmd = await amqp_client.send_command("amqp_actor", "ping", "--help")
await cmd
assert len(cmd.replies) == 2
assert cmd.replies[-1].message_code == ":"
assert "help" in cmd.replies[-1].body
async def test_get_version(amqp_client, amqp_actor):
cmd = await amqp_client.send_command("amqp_actor", "version")
await cmd
assert len(cmd.replies) == 2
assert cmd.replies[-1].message_code == ":"
assert cmd.replies[-1].body["version"] == "?"
async def test_bad_command(amqp_client, amqp_actor):
cmd = await amqp_client.send_command("amqp_actor", "bad_command")
await cmd
assert "Command 'bad_command' failed." in cmd.replies[-1].body["error"]
async def test_send_command_actor_not_connected(amqp_client, amqp_actor):
cmd = await amqp_client.send_command("amqp_actor_2", "ping")
await cmd
assert cmd.status.did_fail
assert "Failed routing message" in cmd.replies[-1].body["error"]
async def test_queue_locked(amqp_actor):
with pytest.raises(CluError) as error:
actor2 = AMQPActor(name="amqp_actor", port=amqp_actor.connection.port)
await actor2.start()
assert "This may indicate that another instance" in str(error)
await actor2.stop()
async def test_model_callback(amqp_client, amqp_actor, mocker):
def callback(model, kw):
pass
callback_mock = mocker.create_autospec(callback)
amqp_client.models["amqp_actor"].register_callback(callback_mock)
kw = amqp_client.models["amqp_actor"]["text"]
assert kw.value is None
cmd = await amqp_client.send_command("amqp_actor", "ping")
await cmd
callback_mock.assert_called()
assert len(callback_mock.call_args) == 2
assert kw.value == "Pong."
assert kw.flatten() == {"text": "Pong."}
assert amqp_client.models["amqp_actor"].flatten() == {
"text": "Pong.",
"error": None,
"schema": None,
"fwhm": None,
"help": None,
"version": None,
"UserInfo": None,
"yourUserID": None,
"num_users": None,
}
json = (
'{"fwhm": null, "text": "Pong.", "schema": null, '
'"version": null, "help": null, "error": null, '
'"yourUserID": null, "UserInfo": null, "num_users": null}'
)
assert amqp_client.models["amqp_actor"].jsonify() == json
async def test_client_get_schema_fails(amqp_actor, amqp_client, caplog):
# Remove actor knowledge of its own model
amqp_actor.model = None
# Restart models.
del amqp_client.models[amqp_actor.name]
await amqp_client.models.load_schemas()
assert amqp_client.models == {}
log_msg = caplog.record_tuples[-1]
assert "Cannot load model" in log_msg[2]
async def test_bad_keyword(amqp_actor, caplog):
schema = """{
"type": "object",
"properties": {
"text": {"type": "string"}
},
"additionalProperties": false
}"""
# Replace actor schema
amqp_actor.model = Model(amqp_actor.name, schema)
with caplog.at_level(REPLY, logger=f"clu:{amqp_actor.name}"):
amqp_actor.write("i", {"bad_keyword": "blah"}, broadcast=True)
await asyncio.sleep(0.01)
assert "Failed validating the reply" in caplog.record_tuples[-1][2]
async def test_write_update_model_fails(amqp_actor, mocker):
mocker.patch.object(
amqp_actor.model,
"update_model",
return_value=(False, "failed updating model."),
)
mocker.patch.object(
amqp_actor.connection.exchange,
"publish",
new_callable=CoroutineMock,
)
apika_message = mocker.patch("aio_pika.Message")
amqp_actor.write("i", {"text": "Some message"})
await asyncio.sleep(0.01)
assert b"failed updating model" in apika_message.call_args[0][0]
async def test_write_no_validate(amqp_actor, mocker):
mock_func = mocker.patch.object(amqp_actor.model, "update_model")
amqp_actor.write("i", {"text": "Some message"}, validate=False)
mock_func.assert_not_called()
async def test_write_silent(amqp_actor, mocker):
mock_func = mocker.patch.object(amqp_actor, "_write_internal")
amqp_actor.write("i", {"text": "Some message"}, silent=True)
mock_func.assert_not_called()
async def test_new_command_fails(amqp_actor, mocker):
# Use CoroutineMock for Python 3.7-3.8 compatibility.
message = CoroutineMock()
mocker.patch("clu.actor.Command", side_effect=CommandError)
mocker.patch("json.loads")
actor_write = mocker.patch.object(
amqp_actor,
"_write_internal",
new_callable=CoroutineMock,
)
await amqp_actor.new_command(message)
actor_write.assert_called()
assert (
"Could not parse the following" in actor_write.call_args[0][0].message["error"]
)
class TestHandleReply:
async def test_client_handle_reply_bad_message(
self, amqp_client, message_maker, caplog
):
message = message_maker()
message.correlation_id = 100
await amqp_client.handle_reply(message)
assert "mismatch between message" in caplog.record_tuples[-2][2]
assert caplog.record_tuples[-1][2] == "Invalid message received."
@pytest.mark.parametrize("log", [False, logging.getLogger()])
async def test_reply_no_message_code(self, message_maker, log, caplog):
message = message_maker(headers={"command_id": 1, "sender": "me"})
reply = AMQPReply(message, log=log)
assert reply.is_valid is False
if log:
assert "message without message_code" in caplog.record_tuples[-1][2]
@pytest.mark.parametrize("log", [False, logging.getLogger()])
async def test_reply_no_sender(self, message_maker, log, caplog):
message = message_maker(headers={"command_id": 1, "message_code": "i"})
reply = AMQPReply(message, log=log)
assert reply.is_valid is True
if log:
assert "message without sender" in caplog.record_tuples[-1][2]
@pytest.mark.skipif(sys.version_info < (3, 8), reason="Test fails in PY37")
async def test_client_send_command_callback(amqp_client, amqp_actor, mocker):
callback_mock = mocker.MagicMock()
cmd = await amqp_client.send_command("amqp_actor", "ping", callback=callback_mock)
await cmd
callback_mock.assert_called()
assert isinstance(callback_mock.mock_calls[0].args[0], AMQPReply)
'''
|
{"/python/lvmnps/switch/factory.py": ["/python/lvmnps/switch/dli/powerswitch.py", "/python/lvmnps/switch/exceptions.py"]}
|
32,599,138
|
mmingyeong/lvmnps
|
refs/heads/master
|
/python/lvmnps/switch/powerswitchbase.py
|
# -*- coding: utf-8 -*-
#
# @Author: Florian Briegel (briegel@mpia.de)
# @Date: 2021-06-24
# @Filename: lvmnps/switch/powerswitchbase.py
# @License: BSD 3-clause (http://www.opensource.org/licenses/BSD-3-Clause)
from abc import abstractmethod
from sdsstools.logger import SDSSLogger
from lvmnps.switch.outlet import Outlet
__all__ = ['PowerSwitchBase']
class PowerSwitchBase(object):
""" Powerswitch class to manage the Digital Loggers Web power switch """
def __init__(self, name: str, config: [], log: SDSSLogger):
self.name = name
self.log = log
self.config = config
self.numports = self.config_get("ports.num", 8)
self.outlets = [Outlet(name,
self.config_get(f"ports.{portnum}.name"),
portnum,
self.config_get(f"ports.{portnum}.desc"),
-1,
) for portnum in range(1, self.numports + 1)
]
self.log.debug(f"{self.outlets}")
self.onlyusedones = self.config_get("ouo", True)
self.log.debug(f"Only used ones: {self.onlyusedones}")
def config_get(self, key, default=None):
""" DOESNT work for keys with dots !!! """
def g(config, key, d=None):
k = key.split('.', maxsplit=1)
c = config.get(k[0] if not k[0].isnumeric() else int(k[0])) # keys can be numeric
return d if c is None else c if len(k) < 2 else g(c, k[1], d) if type(c) is dict else d
return g(self.config, key, default)
def findOutletByName(self, name: str):
for o in self.outlets:
if o.name == name:
return o
def collectOutletsByNameAndPort(self, name: str, portnum: int = 0):
if not name or name == self.name:
if portnum:
if portnum > self.numports:
return []
return [self.outlets[portnum - 1]]
else:
outlets = []
self.log.debug(str(self.onlyusedones))
for o in self.outlets:
if o.inuse or not self.onlyusedones:
outlets.append(o)
return outlets
else:
o = self.findOutletByName(name)
if o:
return [o]
return []
async def setState(self, state, name: str = "", portnum: int = 0):
if portnum > self.numports:
return []
return await self.switch(Outlet.parse(state),
self.collectOutletsByNameAndPort(name, portnum))
async def statusAsJson(self, name: str = "", portnum: int = 0):
# name: can be a switch or an outlet name
outlets = self.collectOutletsByNameAndPort(name, portnum)
await self.update(outlets)
status = {}
for o in outlets:
status[f'{o.name}'] = o.toJson()
return status
@abstractmethod
async def start(self):
pass
@abstractmethod
async def stop(self):
pass
@abstractmethod
async def isReachable(self):
""" Verify we can reach the switch, returns true if ok """
pass
@abstractmethod
async def update(self, outlets):
pass
@abstractmethod
async def switch(self, state, outlets):
pass
|
{"/python/lvmnps/switch/factory.py": ["/python/lvmnps/switch/dli/powerswitch.py", "/python/lvmnps/switch/exceptions.py"]}
|
32,599,139
|
mmingyeong/lvmnps
|
refs/heads/master
|
/tests/test_01_switch.py
|
import os
import pytest
from clu import JSONActor
from clu.testing import setup_test_actor
from sdsstools.logger import get_logger
from lvmnps.actor.commands import parser as nps_command_parser
from lvmnps.switch.factory import powerSwitchFactory
@pytest.fixture
def switches():
default_config_file = os.path.join(os.path.dirname(__file__), "lvmnps.yml")
default_config = JSONActor._parse_config(default_config_file)
assert("switches" in default_config)
switches = []
for (name, config) in default_config["switches"].items():
print(f"Switch {name}: {config}")
try:
switches.append(powerSwitchFactory(name, config, get_logger("test")))
except Exception as ex:
print(f"Error in power switch factory {type(ex)}: {ex}")
return switches
async def send_command(actor, command_string):
command = actor.invoke_mock_command(command_string)
await command
assert command.status.is_done
assert actor.mock_replies[-1]['text'] == 'done'
status_reply = actor.mock_replies[-2]
return status_reply["STATUS"]
@pytest.mark.asyncio
async def test_actor(switches):
test_actor = await setup_test_actor(JSONActor('lvmnp', host='localhost', port=9999))
test_actor.parser = nps_command_parser
test_actor.parser_args = [switches]
status = await send_command(test_actor, 'status')
assert len(status) == 5
assert status['nps_dummy_1.port1']['STATE'] == -1
# switch nps_dummy_1 port1 'on'
status = await send_command(test_actor, 'on nps_dummy_1.port1')
assert status['nps_dummy_1.port1']['STATE'] == 1
# switch all ports on nps_dummy_1 on
status = await send_command(test_actor, 'on nps_dummy_1')
assert status['nps_dummy_1.port1']['STATE'] == 1
assert status['skye.what.ever']['STATE'] == 1
assert status['skyw.what.ever']['STATE'] == 1
# switch off port 4 on nps_dummy_1
status = await send_command(test_actor, 'off nps_dummy_1 4')
status = await send_command(test_actor, 'status')
assert status['nps_dummy_1.port1']['STATE'] == 1
assert status['skye.what.ever']['STATE'] == 1
assert status['skyw.what.ever']['STATE'] == 0
assert status['skye.pwi']['STATE'] == -1
assert status['skyw.pwi']['STATE'] == -1
# switch off everything - same as command offall
status = await send_command(test_actor, 'off')
assert status['nps_dummy_1.port1']['STATE'] == 0
assert status['skye.what.ever']['STATE'] == 0
assert status['skyw.what.ever']['STATE'] == 0
assert status['skye.pwi']['STATE'] == 0
assert status['skyw.pwi']['STATE'] == 0
|
{"/python/lvmnps/switch/factory.py": ["/python/lvmnps/switch/dli/powerswitch.py", "/python/lvmnps/switch/exceptions.py"]}
|
32,599,140
|
mmingyeong/lvmnps
|
refs/heads/master
|
/python/lvmnps/actor/actor.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# @Author: Mingyeong YANG (mingyeong@khu.ac.kr), Florian Briegel (briegel@mpia.de)
# @Date: 2021-03-22
# @Filename: lvmnps/actor/actor.py
# @License: BSD 3-clause (http://www.opensource.org/licenses/BSD-3-Clause)
from __future__ import annotations
import asyncio
from contextlib import suppress
from clu.actor import AMQPActor
from lvmnps.actor.commands import parser as nps_command_parser
from lvmnps.switch.factory import powerSwitchFactory
__all__ = ["lvmnps"]
class lvmnps(AMQPActor):
"""NPS actor.
In addition to the normal arguments and keyword parameters for
`~clu.actor.AMQPActor`, the class accepts the following parameters.
Parameters (TBD)
"""
parser = nps_command_parser # commands register..CK 20210402
def __init__(
self,
*args,
**kwargs
):
super().__init__(*args, **kwargs)
async def start(self):
await super().start()
connect_timeout = self.config["timeouts"]["switch_connect"]
assert len(self.parser_args) == 1
for switch in self.parser_args[0]:
try:
self.log.debug(f"Start {switch.name} ...")
await asyncio.wait_for(switch.start(), timeout=connect_timeout)
except Exception as ex:
self.log.error(f"Unexpected exception {type(ex)}: {ex}")
self.log.debug("Start done")
async def stop(self):
with suppress(asyncio.CancelledError):
for task in self._fetch_log_jobs:
task.cancel()
await task
return super().stop()
@classmethod
def from_config(cls, config, *args, **kwargs):
"""Creates an actor from a configuration file."""
instance = super(lvmnps, cls).from_config(config, *args, **kwargs)
assert isinstance(instance, lvmnps)
assert isinstance(instance.config, dict)
if "switches" in instance.config:
switches = []
for (name, config) in instance.config["switches"].items():
instance.log.info(f"Instance {name}: {config}")
try:
switches.append(powerSwitchFactory(name, config, instance.log))
except Exception as ex:
instance.log.error(f"Error in power switch factory {type(ex)}: {ex}")
instance.parser_args = [switches]
return instance
|
{"/python/lvmnps/switch/factory.py": ["/python/lvmnps/switch/dli/powerswitch.py", "/python/lvmnps/switch/exceptions.py"]}
|
32,599,141
|
mmingyeong/lvmnps
|
refs/heads/master
|
/python/lvmnps/switch/exceptions.py
|
class PowerException(Exception):
"""
An error occurred talking the the DLI Power switch
"""
pass
|
{"/python/lvmnps/switch/factory.py": ["/python/lvmnps/switch/dli/powerswitch.py", "/python/lvmnps/switch/exceptions.py"]}
|
32,599,142
|
mmingyeong/lvmnps
|
refs/heads/master
|
/python/lvmnps/switch/factory.py
|
# -*- coding: utf-8 -*-
#
# @Author: Florian Briegel (briegel@mpia.de)
# @Date: 2021-06-22
# @Filename: lvmnps/switch/__init__.py
# @License: BSD 3-clause (http://www.opensource.org/licenses/BSD-3-Clause)
from sdsstools.logger import SDSSLogger
from .dli.powerswitch import PowerSwitch as DliPowerSwitch
from .dummy.powerswitch import PowerSwitch as DummyPowerSwitch
from .exceptions import PowerException
from .iboot.powerswitch import PowerSwitch as IBootPowerSwitch
def powerSwitchFactory(name: str, config: dict, log: SDSSLogger):
def throwError(n, c):
raise PowerException(f"Power switch {n} with type {c['type']} not defined")
factorymap = {"dli": DliPowerSwitch,
"iboot": IBootPowerSwitch,
"dummy": DummyPowerSwitch}
return factorymap.get(config["type"], lambda n, c: throwError(n, c))(name, config, log)
|
{"/python/lvmnps/switch/factory.py": ["/python/lvmnps/switch/dli/powerswitch.py", "/python/lvmnps/switch/exceptions.py"]}
|
32,599,143
|
mmingyeong/lvmnps
|
refs/heads/master
|
/tests/conftest.py
|
# encoding: utf-8
#
# conftest.py
"""
Here you can add fixtures that will be used for all the tests in this
directory. You can also add conftest.py files in underlying subdirectories.
Those conftest.py will only be applies to the tests in that subdirectory and
underlying directories. See https://docs.pytest.org/en/2.7.3/plugins.html for
more information.
"""
# import os
# import shutil
import pytest
from clu import AMQPActor, AMQPClient
# from pytest_rabbitmq import factories
# rabbitmq_local_proc = shutil.which('rabbitmq-server', path="/usr/local/sbin:/usr/sbin")
# rabbitmq_local_plugindir = '/usr/lib64/rabbitmq/lib/rabbitmq_server-3.8.11/plugins/'
# rabbitmq_proc = factories.rabbitmq_proc(host='127.0.0.1',
# port=None,
# node="test",
# logsdir='/tmp/rabbitmq/logs',
# plugindir=rabbitmq_local_plugindir,
# server=rabbitmq_local_proc,
# ctl=f"{os.path.dirname(rabbitmq_local_proc)}/rabbitmqctl")
@pytest.fixture
async def amqp_actor(rabbitmq, event_loop):
port = rabbitmq.args["port"]
actor = AMQPActor(name="amqp_actor", port=port)
await actor.start()
yield actor
await actor.stop()
@pytest.fixture
async def amqp_client(rabbitmq, amqp_actor, event_loop):
port = rabbitmq.args["port"]
client = AMQPClient(name="amqp_client", models=["amqp_actor"], port=port)
await client.start()
yield client
await client.stop()
|
{"/python/lvmnps/switch/factory.py": ["/python/lvmnps/switch/dli/powerswitch.py", "/python/lvmnps/switch/exceptions.py"]}
|
32,599,144
|
mmingyeong/lvmnps
|
refs/heads/master
|
/python/lvmnps/switch/dli/powerswitch.py
|
# -*- coding: utf-8 -*-
#
# @Author: Florian Briegel (briegel@mpia.de)
# @Date: 2021-06-24
# @Filename: lvmnps/switch/iboot/powerswitch.py
# @License: BSD 3-clause (http://www.opensource.org/licenses/BSD-3-Clause)
from sdsstools.logger import SDSSLogger
from lvmnps.switch.dli.dlipower import PowerSwitch as DliPowerSwitch
from lvmnps.switch.powerswitchbase import PowerSwitchBase
# Todo: Dont inherit clu.Device in lvmnps.switch.dli.dlipower.PowerSwitch if you are not using it.
__all__ = ['PowerSwitch']
class PowerSwitch(PowerSwitchBase):
""" Powerswitch class to manage the iboot power switch """
def __init__(self, name: str, config: [], log: SDSSLogger):
super().__init__(name, config, log)
self.hostname = self.config_get('hostname')
self.username = self.config_get('user', 'admin')
self.password = self.config_get('password', 'admin')
self.use_https = self.config_get('use_https', False)
self.dli = None
async def start(self):
if not await self.isReachable():
self.log.warning(f"{self.name} not reachable on start up")
await self.update(self.outlets)
async def stop(self):
try:
pass
except Exception as ex:
self.log.error(f"Unexpected exception {type(ex)}: {ex}")
return False
self.log.debug("So Long, and Thanks for All the Fish ...")
async def isReachable(self):
try:
if not self.dli:
self.dli = DliPowerSwitch(userid=self.username, password=self.password,
hostname=self.hostname, use_https=self.use_https)
# reachable = self.statuslist()
reachable = self.dli.verify()
if not reachable:
self.dli = None
return reachable
except Exception as ex:
self.log.error(f"Unexpected exception {type(ex)}: {ex}")
self.dli = None
return False
async def update(self, outlets):
# outlets contains all targeted ports
self.log.debug(f"{outlets}")
try:
if await self.isReachable():
# get a list [] of port states, use outlets for a subset.
for o in outlets:
o.setState(self.dli.status(o.portnum))
else:
for o in outlets:
o.setState(-1)
except Exception as ex:
self.log.error(f"Unexpected exception {type(ex)}: {ex}")
async def switch(self, state, outlets):
# outlets contains all targeted ports
self.log.debug(f"{outlets} = {state}")
try:
if await self.isReachable():
# either loop over the outlets or pass the outlet list.
for o in outlets:
self.dli.on(o.portnum) if state else self.dli.off(o.portnum)
await self.update(outlets)
except Exception as ex:
self.log.error(f"Unexpected exception {type(ex)}: {ex}")
|
{"/python/lvmnps/switch/factory.py": ["/python/lvmnps/switch/dli/powerswitch.py", "/python/lvmnps/switch/exceptions.py"]}
|
32,599,145
|
mmingyeong/lvmnps
|
refs/heads/master
|
/python/lvmnps/__main__.py
|
import os
import click
from click_default_group import DefaultGroup
from clu.tools import cli_coro
from sdsstools.daemonizer import DaemonGroup
from lvmnps.actor.actor import lvmnps as NpsActorInstance
@click.group(cls=DefaultGroup, default="actor", default_if_no_args=True)
@click.option(
"-c",
"--config",
"config_file",
type=click.Path(exists=True, dir_okay=False),
help="Path to the user configuration file.",
)
@click.option(
"-v",
"--verbose",
count=True,
help="Debug mode. Use additional v for more details.",
)
@click.pass_context
def lvmnps(ctx, config_file, verbose):
"""Nps controller"""
ctx.obj = {"verbose": verbose, "config_file": config_file}
@lvmnps.group(cls=DaemonGroup, prog="nps_actor", workdir=os.getcwd())
@click.pass_context
@cli_coro
async def actor(ctx):
"""Runs the actor."""
default_config_file = os.path.join(os.path.dirname(__file__), "etc/lvmnps.yml")
config_file = ctx.obj["config_file"] or default_config_file
lvmnps_obj = NpsActorInstance.from_config(config_file)
if ctx.obj["verbose"]:
lvmnps_obj.log.fh.setLevel(0)
lvmnps_obj.log.sh.setLevel(0)
await lvmnps_obj.start()
await lvmnps_obj.run_forever()
if __name__ == "__main__":
lvmnps()
|
{"/python/lvmnps/switch/factory.py": ["/python/lvmnps/switch/dli/powerswitch.py", "/python/lvmnps/switch/exceptions.py"]}
|
32,599,146
|
mmingyeong/lvmnps
|
refs/heads/master
|
/python/lvmnps/switch/outlet.py
|
# -*- coding: utf-8 -*-
#
# @Author: Florian Briegel (briegel@mpia.de)
# @Date: 2021-06-22
# @Filename: lvmnps/switch/outlet.py
# @License: BSD 3-clause (http://www.opensource.org/licenses/BSD-3-Clause)
class Outlet(object):
"""
A power outlet class
"""
def __init__(self, swname, name, portnum, description, state):
self.swname = swname
self.name = name if name else f"{swname}.port{portnum}"
self.portnum = portnum
self.description = description if description else f"{swname} Port {portnum}"
self.inuse = bool(name) or bool(description)
self.state = state
def __str__(self):
return f"#{self.portnum}:{self.name}={self.state}"
def __repr__(self):
return self.__str__()
@staticmethod
def parse(value):
if value in ['off', 'OFF', '0', 0, False]:
return 0
if value in ['on', 'ON', '1', 1, True]:
return 1
return -1
def setState(self, value):
self.state = Outlet.parse(value)
def isOn(self):
return self.state == 1
def isOff(self):
return self.state == 0
def isValid(self):
return self.state == -1
def toJson(self):
return {
'STATE': self.state,
'DESCR': self.description,
'SWITCH': self.swname,
'PORT': self.portnum,
}
|
{"/python/lvmnps/switch/factory.py": ["/python/lvmnps/switch/dli/powerswitch.py", "/python/lvmnps/switch/exceptions.py"]}
|
32,612,379
|
melival/todo_service
|
refs/heads/master
|
/service/users/views.py
|
from rest_framework.viewsets import ModelViewSet
from .models import ServiceUser
from .serializers import ServiceUserModelSerializer
# Create your views here.
class UserModelViewSet(ModelViewSet):
queryset = ServiceUser.objects.all()
serializer_class = ServiceUserModelSerializer
|
{"/service/users/views.py": ["/service/users/models.py"]}
|
32,652,716
|
conorsch/septapy
|
HEAD
|
/setup.py
|
from distutils.core import setup
setup(
name = 'septapy',
packages = ['septapy'],
version = '0.0.1',
description = 'Library for querying SEPTA public transit API',
author = 'Conor Schaefer',
author_email = 'conor.schaefer@gmail.com',
url = 'https://github.com/ronocdh/septapy',
download_url = 'https://github.com/ronocdh/septapy/tarball/0.0.1',
install_requires=[
'requests>=2.2.1',
],
keywords = ['transit', 'API', 'Philadelphia', 'SEPTA'],
classifiers = [],
)
|
{"/runtests.py": ["/tests/all_tests.py"], "/tests/test_route.py": ["/septapy/route.py"]}
|
32,652,717
|
conorsch/septapy
|
HEAD
|
/tests/test_stop.py
|
import sys
import os
import unittest
sys.path.insert(0, os.path.abspath( os.path.join(os.path.dirname(__file__), '../septapy/') ))
import septapy
class TestStop(unittest.TestCase):
def setUp(self):
# Print out name of method
print "\nRunning test '%s'..." % self._testMethodName
self.mockRoute = 34
def testGetStopsByRoute(self):
r = septapy.route.Route(self.mockRoute)
r.identifier == self.mockRoute
if __name__ == '__main__':
unittest.main()
|
{"/runtests.py": ["/tests/all_tests.py"], "/tests/test_route.py": ["/septapy/route.py"]}
|
32,737,814
|
vishnusangli/autonomous_cars
|
refs/heads/master
|
/trial2.py
|
import pyglet
from pyglet import shapes
from pyglet.window import mouse
import math
import numpy as np
from pyglet.gl import *
import importlib
from calcmath import *
window = pyglet.window.Window(960, 540)
batch = pyglet.graphics.Batch()
line = shapes.Line(100, 100, 100, 200, batch=batch)
arc = shapes.Arc(100, 400, 34, segments=25, angle=np.pi, color=(255, 255, 0), batch=batch)
arc2 = shapes.Arc(100, 0, 100, segments=200, angle=np.pi/2, color=(255, 255, 0), batch=batch)
arc2.rotation = -90
line2 = shapes.Line(100, 100, 200, 100, batch=batch)
frame = 0
def update_frame(x, y):
global frame
if frame == None or frame == 20:
frame = 0
else:
frame += 1
batch.draw()
move = True
@window.event
def on_mouse_press(x, y, button, modifiers):
global line
global move
if move:
line = shapes.Line(100, 100, x, y, batch=batch)
do_thing(100, 100, x, y, batch)
do_thing2(100, 100, x, y, batch)
move = not move
@window.event
def on_mouse_motion(x, y, dx, dy):
global line
global move
if (move):
line = shapes.Line(100, 100, x, y, batch=batch)
do_thing(100, 100, x, y, batch)
do_thing2(100, 100, x, y, batch)
#@window.event
def on_mouse_drag(x, y, dx, dy, buttons, modifiers):
global line
line = shapes.Line(100, 100, x, y, width=19, batch=batch)
@window.event
def on_draw():
window.clear()
batch.draw()
line7 = None
def do_thing(x1, y1, x2, y2, batch):
global line5
global line6
global line7
start = Point(x1, y1)
end = Point(x2, y2)
halfpi = np.pi/2
width = 10
#print(start.angle(end))
perp = start.angle(end) - halfpi
diff = Point(width * np.cos(perp), width * np.sin(perp))
#print(perp)
lower = Point(start.xPos + diff.xPos, start.yPos + diff.yPos)
upper = Point(start.xPos - diff.xPos, start.yPos - diff.yPos)
line7 = shapes.Line(lower.xPos, lower.yPos, upper.xPos, upper.yPos, batch=batch)
upper_pair = Point(end.xPos - diff.xPos, end.yPos - diff.yPos)
lower_pair = Point(end.xPos + diff.xPos, end.yPos + diff.yPos)
line5 = shapes.Line(lower.xPos, lower.yPos, lower_pair.xPos, lower_pair.yPos, batch=batch)
line6 = shapes.Line(upper.xPos, upper.yPos, upper_pair.xPos, upper_pair.yPos, batch=batch)
arc5 = None
arc6 = None
arc7 = None
rad_deg = lambda x: x * (180./np.pi)
deg_rad = lambda x: x * (np.pi/180.)
def do_thing2(x1, y1, x2, y2, batch):
global arc5
global arc6
global arc7
start = Point(x1, y1, np.pi/2)
end = Point(x2, y2)
halfpi = np.pi/2
width = 10
try_val = turnCalc(start, end)
if try_val != None:
anchor, radius, phi, rotate = try_val
#print(start, end, anchor, radius, phi, rotate)
#print(rad_deg(rotate))
arc5 = shapes.Arc(anchor.xPos, anchor.yPos, radius, segments=25, angle=phi, color=(255, 255, 255), batch=batch)
arc5.rotation = rotate
arc6 = shapes.Arc(anchor.xPos, anchor.yPos, radius - width, segments=25, angle=phi, color=(255, 255, 255), batch=batch)
arc6.rotation = rotate
arc7 = shapes.Arc(anchor.xPos, anchor.yPos, radius + width, segments=25, angle=phi, color=(255, 255, 255), batch=batch)
arc7.rotation = rotate
pyglet.clock.schedule(update_frame, 1/10.0)
pyglet.app.run()
|
{"/trial8.py": ["/env.py"], "/trial6.py": ["/calcmath.py", "/car.py"], "/trial2.py": ["/calcmath.py"], "/trial7.py": ["/env.py"], "/car.py": ["/calcmath.py"], "/worldcreation.py": ["/env.py"], "/env.py": ["/calcmath.py"]}
|
32,737,815
|
vishnusangli/autonomous_cars
|
refs/heads/master
|
/env.py
|
import numpy as np
import calcmath
'''
World and track width are fixed, should not be changed
'''
trackWidth = 50
class Track:
'''
Track objects represent the entire world
'''
openArea = True #Is outside area traversable
#Relative dimensions of the world, which would be later scaled for the display
wireFrame = True
def __init__(self, track_elems, height, width) -> None:
'''
Track would've already been creaetd and verified, this is only placeholder
'''
self.track_elements = track_elems
self.HEIGHT = height
self.WIDTH = width
#Run the Track Engine and create according data structure
def render(batch):
pass
def checkCollision(thing): #Here thing is of datatype thing
pass
class TrackElement:
'''
A General data type for a track element
'''
nextElem = None
def __init__(self, prev, end, startPoint = None) -> None:
#Different treatment for the StartingStrip
if startPoint == None:
self.startPoint = prev.endPoint #Does not work with the very first lineElem
elif prev == None:
self.startPoint = startPoint #start is a Point, for the StartingStrip
else:
raise Exception #Should be one or the other
self.endPoint = end
self.prevElem = prev
class LineElement(TrackElement):
'''
Represents one straight line in the track as an object
'''
def __init__(self, prev, end) -> None:
super().__init__(prev, end)
self.set_endDir()
if Track.wireFrame:
self.points = self.wireFrame()
def set_endDir(self):
'''
Useful in the wireframe config & future stages
The startPoint's directional configuration should've been done by the previous track element
'''
angle = self.startPoint.angle(self.endPoint)
if self.startPoint.dirVec == None: #Treats cases of StartingStrip
self.startPoint.dirVec = angle
#assert angle == self.startPoint.dirVec, 'Directions from a line eleme should essentially be the same'
self.endPoint.dirVec = angle
def wireFrame(self):
'''
Returns a list of two lists - each (startX, startX, endX, endY)
For each side of the track
Start points for both lists will correlate with start Point of chosen startPoint
(For simplicity when defining and drawing the StartingStrip and FinishLine)
'''
perp_angle = self.startPoint.dirVec - np.pi/2
x_off = np.cos(perp_angle) * trackWidth
y_off = np.sin(perp_angle) * trackWidth
right = [self.startPoint.xPos + x_off, self.startPoint.yPos + y_off, self.endPoint.xPos + x_off, self.endPoint.yPos + y_off]
left = [self.startPoint.xPos - x_off, self.startPoint.yPos - y_off, self.endPoint.xPos - x_off, self.endPoint.yPos - y_off]
to_return = [[left], [right]]
return to_return
def render(batch, color):
pass
class StartingStrip(LineElement):
def __init__(self, start, end) -> None:
super().__init__(None, end, start)
if Track.wireFrame:
self.wireFrame()
def wireFrame(self):
third = [self.points[0][0:2], self.points[1][0:2]] #Back wall
self.points.append([third]) #bottom two points
class FinishLine(TrackElement):
def __init__(self, prev, end) -> None:
super().__init__(prev, end)
if Track.wireFrame:
self.wireFrame()
def wireFrame(self):
'''
Uses the final two points of previous element
Ne
'''
third = [self.points[0][0:2], self.points[1][0:2]] #Back wall
self.points.append([third]) #bottom two points
class TurnElement(TrackElement):
'''
Represents a curve in the track as an object
'''
def __init__(self, prev, end) -> None:
super().__init__(prev, end)
#Where are point directions settled?
self.set_endDir()
self.points = self.wireFrame()
if Track.wireFrame:
self.wireFrame()
def wireFrame(self):
'''
This is always run as the points generated here are important and neede regardless
'''
#Find inner side, get the perp direction towards centre
#Get a difference vector of start and end
#90 - (vector angle - perp direction) gives the arc angle
# difference vector magnitude / cos(vector angle - perp direction) gives radius
#go in perp direction to find anchor
def render(self, batch):
pass
class TrackEngine:
def __init__(self, width, height) -> None:
self.height = height
self.width = width
self.worldGrid = self.writeGrid(width, height)
def writeGrid(width, height):
world_grid = [[[] for y in range(height)] for x in range(width)]
return world_grid
def fillGrid(self, anchor, track):
'''
Adds a track to a corresponding Block, will be called when
'''
self.findBlock(anchor).append(track)
def findBlock(self, anchor):
'''
Returns the Block TrackElement list
'''
assert anchor[0] < self.width, 'Invalid coordinates'
assert anchor[1] < self.height, 'Invalid coordinates'
x = int(anchor[0])
y = int(anchor[1])
return self.worldGrid[x][y]
def findWall(self, start_anchor, angle):
'''
Searching the closest LineElement (point on the wall) from a point and angle
returns the point in that line and the corresponding Track Element
'''
pass
def shapeCollide(self, start, angle, shape):
pass
|
{"/trial8.py": ["/env.py"], "/trial6.py": ["/calcmath.py", "/car.py"], "/trial2.py": ["/calcmath.py"], "/trial7.py": ["/env.py"], "/car.py": ["/calcmath.py"], "/worldcreation.py": ["/env.py"], "/env.py": ["/calcmath.py"]}
|
32,737,816
|
vishnusangli/autonomous_cars
|
refs/heads/master
|
/calcmath.py
|
import numpy as np
rad_deg = lambda x: x * (180./np.pi)
deg_rad = lambda x: x * (np.pi/180.)
class Point:
'''
API used for reference points in track elements
Standard --- Angle Radian System
'''
def __init__(self, xPos, yPos, dir = None) -> None:
self.xPos = xPos
self.yPos = yPos
self.dirVec = dir #Directional vector of movement & following track creation
def angle(self, other):
'''
Common format to return in Radians
'''
d = np.inf
if other.xPos - self.xPos != 0:
d = np.divide(other.yPos - self.yPos, other.xPos - self.xPos)
angle = np.arctan(d)
if (angle < 0 and self.yPos < other.yPos):
angle += np.pi
elif angle > 0 and self.yPos > other.yPos:
angle = np.pi - angle
elif angle == 0 and self.xPos > other.xPos:
angle = np.pi
elif angle == np.inf and self.yPos > other.yPos:
angle = -np.inf
return angle
def distance(self, other):
'''
Returns distance to another point
'''
delt_y = np.power(other.yPos - self.yPos, 2)
delt_x = np.power(other.xPos - self.xPos, 2)
to_return = np.sqrt(delt_x + delt_y)
return to_return
def __str__(self):
return 'Point[{0:4.4f}, {1:4.4f}]|'.format(self.xPos, self.yPos) + str(self.dirVec)
def __repr__(self):
return 'Point[{0:4.4f}, {1:4.4f}]|'.format(self.xPos, self.yPos) + str(self.dirVec)
def rad_reduce(x):
if x < 0:
x = (2 * np.pi) + x
while x > 2 * np.pi:
x -= 2 * np.pi
if x > np.pi:
x -= 2 * np.pi
return x
def frame_angle(lims, theta):
theta = rad_reduce(theta)
'''
If a valid angle is give, only 1 while loop would be used
If invalid angle is given, first loop runs until overshot, after which second doesn't, returning
an angle that is beyond range
'''
while theta < lims[0]:
theta += (2 * np.pi)
while theta > lims[1]:
theta -= (2 * np.pi)
if lims[0] <= theta <= lims[1]:
return theta
else:
print(lims, theta)
return None
def get_angle(start, end):
d = np.divide(end[1] - start[1], end[0] - start[0])
angle = np.arctan(d)
if (angle < 0 and start[1] < end[1]):
angle += np.pi
elif angle > 0 and start[1] > end[1]:
angle = np.pi - angle
return angle
def get_distance(start, end):
distance = np.sqrt(np.power(start[1] - end[1], 2) + np.power(start[0] - end[0], 2))
return distance
def turnCalc(start, end):
'''
return anchor, angle, and rotation angle
Next stage -- radius changing with track width
'''
halfpi = np.pi/2
deriv = rad_reduce(start.dirVec)
angle = start.angle(end)
angle = frame_angle([deriv - halfpi, deriv + halfpi], angle)
if angle == None:
return
#Now both angles are in radians
standard = deriv - halfpi
if deriv < angle:
standard = deriv + halfpi
distance = start.distance(end)
theta = angle - standard
radius = np.divide(distance, 2 * np.cos(theta))
phi = np.pi - (2 * abs(rad_reduce(theta)))
anchor = Point(start.xPos + (radius * np.cos(standard)), start.xPos + (radius * np.sin(standard)))
rotate = min([anchor.angle(start), anchor.angle(end)])
rotate = -rad_deg(rotate)
return anchor, radius, phi, rotate
def funcsolve(f, g, lims, e = 1e-4, step = 0.2, min_step = 0.001):
diff = lambda x: np.power(f(x) - g(x), 2)
deriv = lambda f, x: np.divide(f(x) - f(x - step), step) #Backward differentiation
def is_converging(x):
f_val = f(x)
f_deriv = deriv(f, x)
g_val = g(x)
g_deriv = deriv(g, x)
#If two lines can converge
if f_val > g_val:
return g_deriv > f_deriv
elif f_val < g_val:
return f_deriv > g_deriv
else: #Solution found
return True
def could_converge(lim):
'''
Does the inequality check
Needn't worry about equality in either case as would've been checked already
'''
start = f(lim[0]) < g(lim[0])
end = f(lim[1]) < g(lim[1])
return start ^ end
x = lims[0]
prevDiff = diff(x)
prev = True #Will increase runtime for faulty ones, but setting initial True will ignore certain cases
#Prioritizing correctness over efficiency
while x <= lims[1]:
'''
Has converging switched from True to Negative
'''
if diff(x) <= e: #Base condition
return x, True
curr = is_converging(x)
if prev and not curr: #Was converging and is not now
pot_lim = [x - step, x]
if could_converge(pot_lim) and step >= min_step: #Could it have converged; include a lower limit on the step
return funcsolve(f, g, pot_lim, step = np.divide(step, 10))#Recursive call with smaller range and step
prev = curr
x += step
return 0, False
|
{"/trial8.py": ["/env.py"], "/trial6.py": ["/calcmath.py", "/car.py"], "/trial2.py": ["/calcmath.py"], "/trial7.py": ["/env.py"], "/car.py": ["/calcmath.py"], "/worldcreation.py": ["/env.py"], "/env.py": ["/calcmath.py"]}
|
32,737,817
|
vishnusangli/autonomous_cars
|
refs/heads/master
|
/car.py
|
class Thing: #Most basic controllable car, a rectangle without wheels
def __init__(self) -> None:
pass
class Car(Thing):
def __init__(self) -> None:
super().__init__()
class Wheel:
def __init__(self) -> None:
pass
|
{"/trial8.py": ["/env.py"], "/trial6.py": ["/calcmath.py", "/car.py"], "/trial2.py": ["/calcmath.py"], "/trial7.py": ["/env.py"], "/car.py": ["/calcmath.py"], "/worldcreation.py": ["/env.py"], "/env.py": ["/calcmath.py"]}
|
32,737,818
|
vishnusangli/autonomous_cars
|
refs/heads/master
|
/worldcreation.py
|
import pyglet
'''
This is the warm up assignment to the racetrack rendering
Should be completely standalone, including the render loop
'''
|
{"/trial8.py": ["/env.py"], "/trial6.py": ["/calcmath.py", "/car.py"], "/trial2.py": ["/calcmath.py"], "/trial7.py": ["/env.py"], "/car.py": ["/calcmath.py"], "/worldcreation.py": ["/env.py"], "/env.py": ["/calcmath.py"]}
|
32,762,347
|
EmersonsfDev/-Website-using-python-flask-and-bootstrap
|
refs/heads/main
|
/config.py
|
import os.path
basedir = os.path.abspath(os.path.dirname(__file__))
Debug= True
SQLALCHEMY_DATABASE_URI = 'sqlite:///'+ os.path.join(basedir,'storage.db')
SQLALCHEMY_TRACK_MODIFICATIONS= True
SECRET_KEY = '1235789aAsd!'
MAIL_SERVER= "smtp.gmail.com"
MAIL_PORT = 465
MAIL_USE_SSL = True
MAIL_USERNAME = 'emersonsf.info@gmail.com'
MAIL_PASSWORD = 'kezkxirfaqkrjiis'
|
{"/app/controllers/default.py": ["/app/__init__.py", "/app/models/forms.py"]}
|
32,762,348
|
EmersonsfDev/-Website-using-python-flask-and-bootstrap
|
refs/heads/main
|
/app/models/forms.py
|
from flask_wtf import FlaskForm
from wtforms import StringField, PasswordField, BooleanField
from wtforms.validators import DataRequired
from flask_wtf.form import Form
from wtforms import TextField, TextAreaField, SubmitField, validators, ValidationError
class LoginForm(FlaskForm):
email = StringField("email", validators=[DataRequired()])
password = PasswordField("password",validators=[DataRequired()])
remember = BooleanField("remember")
class ResgisterForm(FlaskForm):
username = StringField("username")
email = StringField("email", validators=[DataRequired()])
password = PasswordField("password",validators=[DataRequired()])
remember = BooleanField("remember")
class ContactForm(Form):
name = TextField("Name", [validators.Required("Please enter your name.")])
email = TextField("Email", [validators.Required("Please enter your email address."), validators.Email("Please enter your email address.")])
subject = TextField("Subject", [validators.Required("Please enter a subject.")])
message = TextAreaField("Message", [validators.Required("Please enter a message.")])
submit = SubmitField("Send")
|
{"/app/controllers/default.py": ["/app/__init__.py", "/app/models/forms.py"]}
|
32,762,349
|
EmersonsfDev/-Website-using-python-flask-and-bootstrap
|
refs/heads/main
|
/app/__init__.py
|
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_migrate import Migrate, MigrateCommand
from flask_script import Manager
from flask_login import LoginManager
from flask import Flask, render_template, request, flash
from flask_mysqldb import MySQL
import MySQLdb.cursors
import re
import stripe
app = Flask(__name__)
app.config['STRIPE_PUBLIC_KEY'] = 'pk_test_51HjRKQH7KlqUP8534T3z15yWA6P7ZRsaiUuNoidvGbbfVrZ1Pgjec44nWBI4CcrpK12q9m4PAn2OGMa27NwXDdOh00kmyM6vLa'
app.config['STRIPE_SECRET_KEY'] = 'sk_test_51HjRKQH7KlqUP853x0IoljLKMklujniwX4n9wCtVYX1qbNXxyXveIMRC4fVFVSVqMrc7fcaCFcmxO1qMXY9Crs2M00uPvPSBgO'
stripe.api_key = app.config['STRIPE_SECRET_KEY']
app.config.from_object('config')
db = MySQL(app)
migrate = Migrate(app, db)
manager = Manager(app)
manager.add_command('db', MigrateCommand)
login_manager = LoginManager(app)
from app.models import forms
from app.controllers import default
|
{"/app/controllers/default.py": ["/app/__init__.py", "/app/models/forms.py"]}
|
32,762,350
|
EmersonsfDev/-Website-using-python-flask-and-bootstrap
|
refs/heads/main
|
/app/controllers/default.py
|
from app import app,db
from flask import render_template, flash,redirect,url_for,request,flash,config,abort,session
from flask_login import login_user
from app.models.forms import LoginForm,ContactForm,ResgisterForm
from flask_mail import Message, Mail
from flask_mysqldb import MySQL
import MySQLdb.cursors
import re
import stripe
mail = Mail()
mail.init_app(app)
@app.route('/')
@app.route('/index', methods=['GET', 'POST'])
def index():
form = ContactForm()
if request.method == 'POST':
if form.validate() == False:
flash('Todos os campos são necessários.')
return render_template('index.html', form=form)
else:
msg = Message(form.subject.data, sender='emersonsf.info@gmail.com', recipients=['rualthof@gmail.com '])
msg.body = """
From: %s <%s>
%s
""" % (form.name.data, form.email.data, form.message.data)
mail.send(msg)
return render_template('index.html', success=True)
elif request.method == 'GET':
return render_template('index.html', form=form)
@app.route("/controleProducao")
def controleProducao():
return render_template('controleProducao.html')
@app.route("/blog")
def blog():
return render_template('blog.html')
@app.route("/blogpostbase")
def blogpostbase():
return render_template('blogpostbase.html')
@app.route("/blogpost")
def blogpost():
return render_template('blogpost.html')
@app.route("/blogpost2")
def blogpost2():
return render_template('blogpost2.html')
@app.route("/blogpost3")
def blogpost3():
return render_template('blogpost3.html')
@app.route("/blogpost4")
def blogpost4():
return render_template('blogpost4.html')
@app.route("/blogpost5")
def blogpost5():
return render_template('blogpost5.html')
@app.route("/blogpost6")
def blogpost6():
return render_template('blogpost6.html')
@app.route("/blogpost7")
def blogpost7():
return render_template('blogpost7.html')
'''
@app.route("/projeto")
def projeto():
return render_template('projeto.html')
@app.route("/compra")
def compra():
session = stripe.checkout.Session.create(
payment_method_types=['card'],
line_items=[{
'price':'price_1HjRbbH7KlqUP853FKmXS2CT',
'quantity' :1,
}],
mode='payment',
success_url=url_for('index', _external=True) + '?session_id={CHECKOUT_SESSION_ID}',
cancel_url=url_for('compra', _external=True),
)
return render_template('compra.html',checkout_session_id=session['id'],checkout_public_key=app.config['STRIPE_PUBLIC_KEY'])
@app.route('/stripe_webhook', methods=['POST'])
def stripe_webhook():
print('CHAMANDO WEBHOOK')
if request.content_length > 1024 * 1024:
print('PEDIDO MUITO GRANDE')
abort(400)
payload = request.get_data()
sig_header = request.environ.get('HTTP_STRIPE_SIGNATURE')
endpoint_secret = 'whsec_slJhJDzKOm0h9NUJFY300kLNcDXvrMZO'
event = None
try:
event = stripe.Webhook.construct_event(
payload, sig_header, endpoint_secret
)
except ValueError as e:
print('Erro no Pagamento')
return {}, 400
except stripe.error.SignatureVerificationError as e:
print('Assinatura Inválida')
return {}, 400
if event['type'] == 'checkout.session.completed':
session = event['data']['object']
print(session)
line_items = stripe.checkout.Session.list_line_items(session['id'], limit=1)
print(line_items['data'][0]['description'])
return {}
@app.route("/Planilha_20de_20Dashboard_20de_20Contas_20a_20Receber_202 ")
def Planilha_20de_20Dashboard_20de_20Contas_20a_20Receber_202 ():
return render_template('Planilha_20de_20Dashboard_20de_20Contas_20a_20Receber_202 .html')
@app.route("/ microsoft_powerapps_canvas_edi")
def microsoft_powerapps_canvas_edi ():
return render_template('microsoft_powerapps_canvas_edi.html')
'''
@app.route('/register', methods=['GET', 'POST'])
def register():
if request.method == 'POST':
username = request.form['username']
email = request.form['email']
pwd = request.form['password']
cursor = db.connection.cursor(MySQLdb.cursors.DictCursor)
cursor.execute('SELECT * FROM Usuario WHERE username = % s', (username, ))
Usuario = cursor.fetchone()
cursor.execute('INSERT INTO Usuario VALUES (NULL, % s, % s, % s)', (username, pwd, email, ))
db.connection.commit()
return render_template('register.html')
@app.route('/login', methods=['GET', 'POST'])
def login():
msg = ''
if request.method == 'POST':
email = request.form['email']
pwd = request.form['password']
cursor = db.connection.cursor(MySQLdb.cursors.DictCursor)
cursor.execute('SELECT * FROM Usuario WHERE email = % s AND password = % s', (email, pwd, ))
Usuario = cursor.fetchone()
if Usuario:
session['loggedin'] = True
session['id'] = Usuario['codigo']
session['username'] = Usuario['username']
msg = 'Logado!'
else:
msg = 'Credenciais Inválidas!'
return render_template('login.html', msg = msg)
@app.route('/logout')
def logout():
session.clear()
return render_template("login.html")
|
{"/app/controllers/default.py": ["/app/__init__.py", "/app/models/forms.py"]}
|
32,848,367
|
leonlbc/witz-api
|
refs/heads/master
|
/WitzAPI/endpoints/capm/capm.py
|
from flask import Response, request, Blueprint
import json
from WitzAPI.api.yahoo import Yahoo
from WitzAPI.endpoints.capm.capm_calcs import Capm
from WitzAPI.endpoints.capm.capm_calcs_2 import Capm2
from WitzAPI.endpoints.capm.utils.validate import validate_params
from WitzAPI.endpoints.markowitz.utils.add_ibov import add_ibov
from WitzAPI.models.portfolio import Portfolio
capm_page = Blueprint('capm', __name__) # Flask Config
@capm_page.route('/capm', methods=["POST"])
def capm():
req_data = request.get_json()
if validate_params(req_data):
req_data = add_ibov(req_data) # Adds BVSP to received data
received_portfolio = Portfolio(req_data) # Model the received data
yahoo_api = Yahoo(received_portfolio.stocks, received_portfolio.period) # API Call to get historic data
results = Capm(yahoo_api.hist_data, received_portfolio.data['risk-free']) # CAPM Calculations
results_json = json.dumps(results.exp_return) # Jsonify Response
return Response(results_json, mimetype='application/json')
else:
return Response("Invalid Request", 400, mimetype='application/json')
|
{"/WitzAPI/models/portfolio.py": ["/WitzAPI/models/stock.py"], "/WitzAPI/api/stock_data.py": ["/WitzAPI/api/stock_data_api.py"], "/WitzAPI/endpoints/markowitz/markowitz.py": ["/WitzAPI/api/stock_data.py", "/WitzAPI/endpoints/markowitz/utils/validate.py", "/WitzAPI/models/portfolio.py", "/WitzAPI/api/yahoo.py", "/WitzAPI/endpoints/markowitz/markowitz_calcs.py"], "/WitzAPI/endpoints/capm/capm.py": ["/WitzAPI/api/yahoo.py", "/WitzAPI/endpoints/capm/capm_calcs.py", "/WitzAPI/endpoints/capm/utils/validate.py", "/WitzAPI/endpoints/markowitz/utils/add_ibov.py", "/WitzAPI/models/portfolio.py"], "/witz.py": ["/WitzAPI/endpoints/capm/capm.py", "/WitzAPI/endpoints/markowitz/markowitz.py"]}
|
32,848,368
|
leonlbc/witz-api
|
refs/heads/master
|
/WitzAPI/endpoints/markowitz/utils/add_ibov.py
|
# Function that adds Ibovespa to all requests (Needed to calculate CAPM)
def add_ibov(req_data):
req_data["stocks"].append({
"ticker": "^BVSP"
})
return req_data
|
{"/WitzAPI/models/portfolio.py": ["/WitzAPI/models/stock.py"], "/WitzAPI/api/stock_data.py": ["/WitzAPI/api/stock_data_api.py"], "/WitzAPI/endpoints/markowitz/markowitz.py": ["/WitzAPI/api/stock_data.py", "/WitzAPI/endpoints/markowitz/utils/validate.py", "/WitzAPI/models/portfolio.py", "/WitzAPI/api/yahoo.py", "/WitzAPI/endpoints/markowitz/markowitz_calcs.py"], "/WitzAPI/endpoints/capm/capm.py": ["/WitzAPI/api/yahoo.py", "/WitzAPI/endpoints/capm/capm_calcs.py", "/WitzAPI/endpoints/capm/utils/validate.py", "/WitzAPI/endpoints/markowitz/utils/add_ibov.py", "/WitzAPI/models/portfolio.py"], "/witz.py": ["/WitzAPI/endpoints/capm/capm.py", "/WitzAPI/endpoints/markowitz/markowitz.py"]}
|
32,848,369
|
leonlbc/witz-api
|
refs/heads/master
|
/WitzAPI/endpoints/markowitz/utils/validate.py
|
import jsonschema
from jsonschema import validate
schema = {
"title": "Portfolio",
"type": "object",
"required": ["id", "stocks", "period"],
"properties": {
"id": {
"type": "integer"
},
"n_simulations": {
"type": "integer",
"minimum": 1,
"maximum": 1000
},
"stocks": {
"type": "array"
},
"period": {
"type": "string"
}
}
}
def validate_params(request_data):
try:
validate(request_data, schema)
return True
except jsonschema.exceptions.ValidationError:
return False
|
{"/WitzAPI/models/portfolio.py": ["/WitzAPI/models/stock.py"], "/WitzAPI/api/stock_data.py": ["/WitzAPI/api/stock_data_api.py"], "/WitzAPI/endpoints/markowitz/markowitz.py": ["/WitzAPI/api/stock_data.py", "/WitzAPI/endpoints/markowitz/utils/validate.py", "/WitzAPI/models/portfolio.py", "/WitzAPI/api/yahoo.py", "/WitzAPI/endpoints/markowitz/markowitz_calcs.py"], "/WitzAPI/endpoints/capm/capm.py": ["/WitzAPI/api/yahoo.py", "/WitzAPI/endpoints/capm/capm_calcs.py", "/WitzAPI/endpoints/capm/utils/validate.py", "/WitzAPI/endpoints/markowitz/utils/add_ibov.py", "/WitzAPI/models/portfolio.py"], "/witz.py": ["/WitzAPI/endpoints/capm/capm.py", "/WitzAPI/endpoints/markowitz/markowitz.py"]}
|
32,848,370
|
leonlbc/witz-api
|
refs/heads/master
|
/WitzAPI/models/stock.py
|
class Stock:
def __init__(self, stock):
self.ticker = stock["ticker"]
|
{"/WitzAPI/models/portfolio.py": ["/WitzAPI/models/stock.py"], "/WitzAPI/api/stock_data.py": ["/WitzAPI/api/stock_data_api.py"], "/WitzAPI/endpoints/markowitz/markowitz.py": ["/WitzAPI/api/stock_data.py", "/WitzAPI/endpoints/markowitz/utils/validate.py", "/WitzAPI/models/portfolio.py", "/WitzAPI/api/yahoo.py", "/WitzAPI/endpoints/markowitz/markowitz_calcs.py"], "/WitzAPI/endpoints/capm/capm.py": ["/WitzAPI/api/yahoo.py", "/WitzAPI/endpoints/capm/capm_calcs.py", "/WitzAPI/endpoints/capm/utils/validate.py", "/WitzAPI/endpoints/markowitz/utils/add_ibov.py", "/WitzAPI/models/portfolio.py"], "/witz.py": ["/WitzAPI/endpoints/capm/capm.py", "/WitzAPI/endpoints/markowitz/markowitz.py"]}
|
32,848,371
|
leonlbc/witz-api
|
refs/heads/master
|
/witz.py
|
from flask import Flask
from WitzAPI.endpoints.capm.capm import capm_page
from WitzAPI.endpoints.markowitz.markowitz import markowitz_page
app = Flask(__name__)
app.register_blueprint(markowitz_page)
app.register_blueprint(capm_page)
app.run(port=5000)
|
{"/WitzAPI/models/portfolio.py": ["/WitzAPI/models/stock.py"], "/WitzAPI/api/stock_data.py": ["/WitzAPI/api/stock_data_api.py"], "/WitzAPI/endpoints/markowitz/markowitz.py": ["/WitzAPI/api/stock_data.py", "/WitzAPI/endpoints/markowitz/utils/validate.py", "/WitzAPI/models/portfolio.py", "/WitzAPI/api/yahoo.py", "/WitzAPI/endpoints/markowitz/markowitz_calcs.py"], "/WitzAPI/endpoints/capm/capm.py": ["/WitzAPI/api/yahoo.py", "/WitzAPI/endpoints/capm/capm_calcs.py", "/WitzAPI/endpoints/capm/utils/validate.py", "/WitzAPI/endpoints/markowitz/utils/add_ibov.py", "/WitzAPI/models/portfolio.py"], "/witz.py": ["/WitzAPI/endpoints/capm/capm.py", "/WitzAPI/endpoints/markowitz/markowitz.py"]}
|
32,848,372
|
leonlbc/witz-api
|
refs/heads/master
|
/WitzAPI/models/portfolio.py
|
import dateutil.parser
from WitzAPI.models.stock import Stock
class Portfolio:
def __init__(self, data):
self.data = data
self.period = self.format_period() # 12-01-2010
self.stocks = self.to_stocks() # [Stock, Stock]
def format_period(self):
period = dateutil.parser.parse(self.data['period'], ignoretz=True).strftime("%m-%d-%Y")
return period
def to_stocks(self):
stock_list = []
for stock in self.data['stocks']: # [{ticker: "ITSA4"}, {ticker: "PETR4"}]
stock_list.append(Stock(stock))
return stock_list
|
{"/WitzAPI/models/portfolio.py": ["/WitzAPI/models/stock.py"], "/WitzAPI/api/stock_data.py": ["/WitzAPI/api/stock_data_api.py"], "/WitzAPI/endpoints/markowitz/markowitz.py": ["/WitzAPI/api/stock_data.py", "/WitzAPI/endpoints/markowitz/utils/validate.py", "/WitzAPI/models/portfolio.py", "/WitzAPI/api/yahoo.py", "/WitzAPI/endpoints/markowitz/markowitz_calcs.py"], "/WitzAPI/endpoints/capm/capm.py": ["/WitzAPI/api/yahoo.py", "/WitzAPI/endpoints/capm/capm_calcs.py", "/WitzAPI/endpoints/capm/utils/validate.py", "/WitzAPI/endpoints/markowitz/utils/add_ibov.py", "/WitzAPI/models/portfolio.py"], "/witz.py": ["/WitzAPI/endpoints/capm/capm.py", "/WitzAPI/endpoints/markowitz/markowitz.py"]}
|
32,848,373
|
leonlbc/witz-api
|
refs/heads/master
|
/WitzAPI/endpoints/capm/capm_calcs.py
|
import numpy as np
class Capm:
def __init__(self, hist_data, risk_free):
self.hist_data = hist_data
self.stock_return = self.stock_return()
self.risk_free = risk_free
self.mkt_cov = {}
self.mkt_var = {}
self.mkt_return = {}
self.beta = {}
self.exp_return = {}
self.capm_results()
def stock_return(self):
s_return = np.log(self.hist_data/self.hist_data.shift(1))
return s_return
def capm_results(self):
for stock in self.stock_return.columns:
if stock != '^BVSP':
self.mkt_values(stock)
self.calc_beta(stock)
self.calc_expected_return(stock)
def mkt_values(self, stock):
self.mkt_cov[stock] = self.stock_return[stock].cov(self.stock_return['^BVSP']) * 250
self.mkt_var[stock] = self.stock_return[stock].var() * 250
self.mkt_return[stock] = self.stock_return[stock].mean() * 250
def calc_beta(self, stock):
self.beta[stock] = self.mkt_cov[stock] / self.mkt_var[stock]
def calc_expected_return(self, stock):
self.exp_return[stock] = self.risk_free + self.beta[stock] * (self.mkt_return[stock] - self.risk_free)
self.exp_return[stock] = round(self.exp_return[stock] * 100, 2)
|
{"/WitzAPI/models/portfolio.py": ["/WitzAPI/models/stock.py"], "/WitzAPI/api/stock_data.py": ["/WitzAPI/api/stock_data_api.py"], "/WitzAPI/endpoints/markowitz/markowitz.py": ["/WitzAPI/api/stock_data.py", "/WitzAPI/endpoints/markowitz/utils/validate.py", "/WitzAPI/models/portfolio.py", "/WitzAPI/api/yahoo.py", "/WitzAPI/endpoints/markowitz/markowitz_calcs.py"], "/WitzAPI/endpoints/capm/capm.py": ["/WitzAPI/api/yahoo.py", "/WitzAPI/endpoints/capm/capm_calcs.py", "/WitzAPI/endpoints/capm/utils/validate.py", "/WitzAPI/endpoints/markowitz/utils/add_ibov.py", "/WitzAPI/models/portfolio.py"], "/witz.py": ["/WitzAPI/endpoints/capm/capm.py", "/WitzAPI/endpoints/markowitz/markowitz.py"]}
|
32,848,374
|
leonlbc/witz-api
|
refs/heads/master
|
/WitzAPI/endpoints/markowitz/markowitz_calcs.py
|
import numpy as np
import pandas as pd
class StockCalcs:
def __init__(self, hist):
self.historic_data = hist
self.s_return = self.h_return()
self.mean = self.s_mean()
self.cov = self.s_cov()
def h_return(self):
stock_return = np.log(self.historic_data/self.historic_data.shift(1))
return stock_return
def s_mean(self):
ret_mean = self.s_return.mean()*250
return ret_mean
def s_cov(self):
ret_cov = self.s_return.cov()*250
return ret_cov
class PortCalcs:
def __init__(self, portfolio, stock_calcs):
self.p_returns = []
self.p_vol = []
self.sim = portfolio.data['n_simulations']
self.stocks = portfolio.stocks
self.mean = stock_calcs.mean
self.cov = stock_calcs.cov
self.weights = self.simulate()
self.weights_df = self.format_df()
self.results = self.join_dfs().to_json()
def simulate(self):
weights = []
for i in range(self.sim):
rand_weights = np.random.random(len(self.stocks))
rand_weights /= np.sum(rand_weights)
port_ret = np.sum(rand_weights * self.mean)
port_var = np.dot(rand_weights.T, np.dot(self.cov, rand_weights))
p_vol = np.sqrt(port_var)
self.p_returns.append(port_ret)
self.p_vol.append(p_vol)
weights.append(rand_weights)
return weights
def format_df(self):
weights_df = pd.DataFrame(self.weights)
for index, stock in enumerate(self.stocks):
weights_df = weights_df.rename(columns={index: stock.ticker})
return weights_df
def join_dfs(self):
return_vol_df = pd.DataFrame({'Return': self.p_returns, 'Volatility': self.p_vol})
results = pd.concat([return_vol_df, self.weights_df], axis=1, join='inner')
return results
|
{"/WitzAPI/models/portfolio.py": ["/WitzAPI/models/stock.py"], "/WitzAPI/api/stock_data.py": ["/WitzAPI/api/stock_data_api.py"], "/WitzAPI/endpoints/markowitz/markowitz.py": ["/WitzAPI/api/stock_data.py", "/WitzAPI/endpoints/markowitz/utils/validate.py", "/WitzAPI/models/portfolio.py", "/WitzAPI/api/yahoo.py", "/WitzAPI/endpoints/markowitz/markowitz_calcs.py"], "/WitzAPI/endpoints/capm/capm.py": ["/WitzAPI/api/yahoo.py", "/WitzAPI/endpoints/capm/capm_calcs.py", "/WitzAPI/endpoints/capm/utils/validate.py", "/WitzAPI/endpoints/markowitz/utils/add_ibov.py", "/WitzAPI/models/portfolio.py"], "/witz.py": ["/WitzAPI/endpoints/capm/capm.py", "/WitzAPI/endpoints/markowitz/markowitz.py"]}
|
32,848,375
|
leonlbc/witz-api
|
refs/heads/master
|
/WitzAPI/endpoints/markowitz/markowitz.py
|
from flask import Response, request, Blueprint
from WitzAPI.api.yahoo import Yahoo
from WitzAPI.endpoints.markowitz.markowitz_calcs import StockCalcs, PortCalcs
from WitzAPI.endpoints.markowitz.utils.validate import validate_params
from WitzAPI.models.portfolio import Portfolio
markowitz_page = Blueprint('markowitz', __name__) # Flask Config
@markowitz_page.route('/markowitz', methods=["POST"])
def markowitz():
req_data = request.get_json()
if validate_params(req_data):
received_portfolio = Portfolio(req_data) # Model the received data
yahoo_api = Yahoo(received_portfolio.stocks, received_portfolio.period) # API Call to get historic data
stock_calculations = StockCalcs(yahoo_api.hist_data) # Needed calculations for each stock
portfolio_calculations = PortCalcs(received_portfolio, stock_calculations) # Simulate weights and format df
response = portfolio_calculations.results
return Response(response, mimetype='application/json')
else:
return Response("Invalid Request", 400, mimetype='application/json')
|
{"/WitzAPI/models/portfolio.py": ["/WitzAPI/models/stock.py"], "/WitzAPI/api/stock_data.py": ["/WitzAPI/api/stock_data_api.py"], "/WitzAPI/endpoints/markowitz/markowitz.py": ["/WitzAPI/api/stock_data.py", "/WitzAPI/endpoints/markowitz/utils/validate.py", "/WitzAPI/models/portfolio.py", "/WitzAPI/api/yahoo.py", "/WitzAPI/endpoints/markowitz/markowitz_calcs.py"], "/WitzAPI/endpoints/capm/capm.py": ["/WitzAPI/api/yahoo.py", "/WitzAPI/endpoints/capm/capm_calcs.py", "/WitzAPI/endpoints/capm/utils/validate.py", "/WitzAPI/endpoints/markowitz/utils/add_ibov.py", "/WitzAPI/models/portfolio.py"], "/witz.py": ["/WitzAPI/endpoints/capm/capm.py", "/WitzAPI/endpoints/markowitz/markowitz.py"]}
|
32,848,376
|
leonlbc/witz-api
|
refs/heads/master
|
/WitzAPI/endpoints/capm/utils/validate.py
|
import jsonschema
from jsonschema import validate
schema = {
"title": "Capm",
"type": "object",
"required": ["stocks", "risk-free", "period"],
"properties": {
"stocks": {
"type": "array"
},
"risk-free": {
"type": "number"
},
"period": {
"type": "string"
}
}
}
def validate_params(request_data):
try:
validate(request_data, schema)
return True
except jsonschema.exceptions.ValidationError:
return False
|
{"/WitzAPI/models/portfolio.py": ["/WitzAPI/models/stock.py"], "/WitzAPI/api/stock_data.py": ["/WitzAPI/api/stock_data_api.py"], "/WitzAPI/endpoints/markowitz/markowitz.py": ["/WitzAPI/api/stock_data.py", "/WitzAPI/endpoints/markowitz/utils/validate.py", "/WitzAPI/models/portfolio.py", "/WitzAPI/api/yahoo.py", "/WitzAPI/endpoints/markowitz/markowitz_calcs.py"], "/WitzAPI/endpoints/capm/capm.py": ["/WitzAPI/api/yahoo.py", "/WitzAPI/endpoints/capm/capm_calcs.py", "/WitzAPI/endpoints/capm/utils/validate.py", "/WitzAPI/endpoints/markowitz/utils/add_ibov.py", "/WitzAPI/models/portfolio.py"], "/witz.py": ["/WitzAPI/endpoints/capm/capm.py", "/WitzAPI/endpoints/markowitz/markowitz.py"]}
|
32,848,377
|
leonlbc/witz-api
|
refs/heads/master
|
/WitzAPI/api/yahoo.py
|
import pandas as pd
from pandas_datareader import data as wb
class Yahoo:
def __init__(self, stocks, period):
self.stocks = stocks
self.period = period
self.hist_data = self.historic_data()
def historic_data(self):
stock_data = pd.DataFrame()
for stock in self.stocks:
stock_data[stock.ticker] = self.fetch(stock.ticker)
return stock_data
def fetch(self, ticker):
ticker = self.ticker_format(ticker)
stock_data = wb.DataReader(ticker, 'yahoo', self.period)['Adj Close']
return stock_data
@staticmethod
def ticker_format(ticker):
if ticker.upper() != '^BVSP':
ticker = ticker + ".SA"
return ticker
|
{"/WitzAPI/models/portfolio.py": ["/WitzAPI/models/stock.py"], "/WitzAPI/api/stock_data.py": ["/WitzAPI/api/stock_data_api.py"], "/WitzAPI/endpoints/markowitz/markowitz.py": ["/WitzAPI/api/stock_data.py", "/WitzAPI/endpoints/markowitz/utils/validate.py", "/WitzAPI/models/portfolio.py", "/WitzAPI/api/yahoo.py", "/WitzAPI/endpoints/markowitz/markowitz_calcs.py"], "/WitzAPI/endpoints/capm/capm.py": ["/WitzAPI/api/yahoo.py", "/WitzAPI/endpoints/capm/capm_calcs.py", "/WitzAPI/endpoints/capm/utils/validate.py", "/WitzAPI/endpoints/markowitz/utils/add_ibov.py", "/WitzAPI/models/portfolio.py"], "/witz.py": ["/WitzAPI/endpoints/capm/capm.py", "/WitzAPI/endpoints/markowitz/markowitz.py"]}
|
32,911,032
|
Its-me-David/Hangman-2.0
|
refs/heads/main
|
/hangman/gameplay.py
|
from hangman import *
h = hangman("Word")
h.go()
|
{"/hangman/gameplay.py": ["/hangman/__init__.py"], "/hangman/.ipynb_checkpoints/main-checkpoint.py": ["/hangman/__init__.py"], "/hangman/__init__.py": ["/hangman/gameplay.py", "/hangman/hangman.py"]}
|
32,911,033
|
Its-me-David/Hangman-2.0
|
refs/heads/main
|
/hangman/.ipynb_checkpoints/hangman-checkpoint.py
|
import sets
class hangman:
def __init__(self, word):
self.word = word.upper()
self.shown = ""
self.guessed = []
self.step = 0
for i in word:
if i != " ":
self.shown += "-"
else:
self.shown += " "
def trial(self, guess):
if len(guess) != 1:
print("Please guess one letter!")
elif guess.upper() in self.guessed:
print("You have already guessed that letter!")
elif guess.upper() in self.word:
s = list(self.shown)
for i in range(len(self.word)):
if self.word[i] == guess.upper():
s[i] = guess.upper()
self.shown = "".join(s)
self.guessed.append(guess.upper())
self.guessed.sort()
return True
else:
self.guessed.append(guess.upper())
self.guessed.sort()
self.step += 1
return False
def print_shown(self):
print(self.shown)
def print_hangman(self):
for i in sets.hangman[self.step]:
print(i)
def print_guessed(self):
if len(self.guessed) == 0:
print("No Letters Previously Guessed")
else:
toprint = "Letters Guessed: "
for i in self.guessed:
toprint += i
toprint += " "
print(toprint)
def is_dead(self):
return self.step == len(sets.hangman) - 1
def is_won(self):
return not "-" in self.shown
def go(self):
while not self.is_won() and not self.is_dead():
self.print_shown()
self.print_hangman()
self.print_guessed()
print("What is your guess?")
guess = input(">> ")
self.trial(guess)
self.print_shown()
self.print_hangman()
self.print_guessed()
if self.is_won():
print("Congratulations! You win!")
elif self.is_dead():
print("LOL YOU LOSE")
|
{"/hangman/gameplay.py": ["/hangman/__init__.py"], "/hangman/.ipynb_checkpoints/main-checkpoint.py": ["/hangman/__init__.py"], "/hangman/__init__.py": ["/hangman/gameplay.py", "/hangman/hangman.py"]}
|
32,911,034
|
Its-me-David/Hangman-2.0
|
refs/heads/main
|
/hangman/__init__.py
|
from .gameplay import h
from .hangman import hangman
from .output import hangman
|
{"/hangman/gameplay.py": ["/hangman/__init__.py"], "/hangman/.ipynb_checkpoints/main-checkpoint.py": ["/hangman/__init__.py"], "/hangman/__init__.py": ["/hangman/gameplay.py", "/hangman/hangman.py"]}
|
32,928,428
|
khakhalin/echo-networks
|
refs/heads/main
|
/esn/create_reservoir/weights_in.py
|
import numpy as np
def weights_in(n_nodes, mode='alternating'):
"""Creaes a vector of input weights.
Parameters:
n_nodes (int): number of nodes
mode (string): what values input weights should take. Options:
'alternating': alternating between 1 and -1
'flat': all equal to +1
"""
if mode == 'flat':
return np.ones(n_nodes)
if mode == 'alternating':
return np.array([(i % 2)*2.0 - 1 for i in range(n_nodes)])
raise ValueError('Unrecognized input weights type.')
|
{"/esn/__init__.py": ["/esn/basic_reservoir.py", "/esn/reservoir.py", "/esn/data.py", "/esn/utils.py"], "/esn/esn.py": ["/esn/reservoir.py"], "/notebooks/echo.py": ["/esn/__init__.py"], "/esn/reservoir.py": ["/esn/__init__.py"], "/esn/create_reservoir/__init__.py": ["/esn/create_reservoir/make_graph.py", "/esn/create_reservoir/graph_to_weights.py", "/esn/create_reservoir/weights_in.py", "/esn/create_reservoir/activation.py"], "/tests/test_data.py": ["/esn/__init__.py"], "/tests/test_reservoir.py": ["/esn/__init__.py"], "/tests/test_creator.py": ["/esn/__init__.py", "/esn/create_reservoir/graph_to_weights.py"], "/tests/test_utils.py": ["/esn/__init__.py"], "/esn/create_reservoir/graph_to_weights.py": ["/esn/utils.py"]}
|
32,928,429
|
khakhalin/echo-networks
|
refs/heads/main
|
/esn/esn.py
|
from .reservoir import Reservoir
|
{"/esn/__init__.py": ["/esn/basic_reservoir.py", "/esn/reservoir.py", "/esn/data.py", "/esn/utils.py"], "/esn/esn.py": ["/esn/reservoir.py"], "/notebooks/echo.py": ["/esn/__init__.py"], "/esn/reservoir.py": ["/esn/__init__.py"], "/esn/create_reservoir/__init__.py": ["/esn/create_reservoir/make_graph.py", "/esn/create_reservoir/graph_to_weights.py", "/esn/create_reservoir/weights_in.py", "/esn/create_reservoir/activation.py"], "/tests/test_data.py": ["/esn/__init__.py"], "/tests/test_reservoir.py": ["/esn/__init__.py"], "/tests/test_creator.py": ["/esn/__init__.py", "/esn/create_reservoir/graph_to_weights.py"], "/tests/test_utils.py": ["/esn/__init__.py"], "/esn/create_reservoir/graph_to_weights.py": ["/esn/utils.py"]}
|
32,928,430
|
khakhalin/echo-networks
|
refs/heads/main
|
/notebooks/echo.py
|
"""Technical file to circumvent importing rules for Jupyter notebooks.
It allows to import 'esn' in notebooks using:
from echo import esn"""
import sys
sys.path.append('..')
import esn
|
{"/esn/__init__.py": ["/esn/basic_reservoir.py", "/esn/reservoir.py", "/esn/data.py", "/esn/utils.py"], "/esn/esn.py": ["/esn/reservoir.py"], "/notebooks/echo.py": ["/esn/__init__.py"], "/esn/reservoir.py": ["/esn/__init__.py"], "/esn/create_reservoir/__init__.py": ["/esn/create_reservoir/make_graph.py", "/esn/create_reservoir/graph_to_weights.py", "/esn/create_reservoir/weights_in.py", "/esn/create_reservoir/activation.py"], "/tests/test_data.py": ["/esn/__init__.py"], "/tests/test_reservoir.py": ["/esn/__init__.py"], "/tests/test_creator.py": ["/esn/__init__.py", "/esn/create_reservoir/graph_to_weights.py"], "/tests/test_utils.py": ["/esn/__init__.py"], "/esn/create_reservoir/graph_to_weights.py": ["/esn/utils.py"]}
|
32,928,431
|
khakhalin/echo-networks
|
refs/heads/main
|
/esn/create_reservoir/make_graph.py
|
import numpy as np
def make_graph(n_nodes=20, n_edges=None, network_type='erdos'):
"""Creates a graph dictionary.
Parameters:
n_nodes: number of nodes
n_edges: number of edges (optinal)
network_tpe: 'erdos', 'ws' (for Watts-Strogatz; default)
"""
if n_edges is None:
n_edges = n_nodes*2
if network_type == 'erdos':
graph_dict = _make_erdos(n_nodes, n_edges)
elif network_type == 'ws':
graph_dict = _make_ws(n_nodes, n_edges) # Actually there's also a beta parameter, but we park it for now
else:
raise ValueError('Unrecognized graph type.')
return graph_dict
def _make_erdos(n, e):
"""Create Erdos graph with N nodes and E edges."""
edges = [(i,j) for j in range(n) for i in range(n) if i != j]
edges = [edges[i] for i in np.random.choice(len(edges), e, replace=False)]
g = {i:list(set([j for k,j in edges if k==i])) for i in range(n)}
return g
def _make_ws(n, e, beta=0.5):
"""Makes an oriented Watts-Strogatz network with N nodes, E edges, and beta rewiring."""
if e > n*(n - 1):
e = n*(n - 1) # Max possible number of edges for a graph of N nodes
degree = (e // n) # Average out-degree, rounded down
n_with_extra_edge = e - (degree * n) # Number of elements with k+2 degree
g = {i: [] for i in range(n)} # Empty graph for now
for i in range(n): # First, create a ring
edges_left = degree // 2
edges_right = degree - edges_left
if i < n_with_extra_edge:
jlist = range(i - edges_left, i + edges_right + 2)
else:
jlist = range(i - edges_left, i + edges_right + 1)
for j in jlist:
if j == i: # Don't connect to itself
continue
jp = j % n # Loop around the ring
if jp not in g[i]: g[i].append(jp)
# Now rewire edges:
for i in range(n): # For every node in the graph
js = [j for j in g[i] if (j-i) % n < (n // 2)] # Only Those to the right are to be rewired
for j in js: # For every edge on the right
if np.random.uniform() < beta: # Toss a weighted coin if this edge needs to be rewired
k = i # New edge destination; set to a deliberately bad choice (self)
while k == i or (k in g[i]): # Draw while unhappy (self, or existing)
k = np.random.randint(n)
# Note that with high enough e, we'll get an infinite loop here,
# as rewiring will be impossible.
g[i].remove(j) # Unwire
g[i].append(k)
return g
|
{"/esn/__init__.py": ["/esn/basic_reservoir.py", "/esn/reservoir.py", "/esn/data.py", "/esn/utils.py"], "/esn/esn.py": ["/esn/reservoir.py"], "/notebooks/echo.py": ["/esn/__init__.py"], "/esn/reservoir.py": ["/esn/__init__.py"], "/esn/create_reservoir/__init__.py": ["/esn/create_reservoir/make_graph.py", "/esn/create_reservoir/graph_to_weights.py", "/esn/create_reservoir/weights_in.py", "/esn/create_reservoir/activation.py"], "/tests/test_data.py": ["/esn/__init__.py"], "/tests/test_reservoir.py": ["/esn/__init__.py"], "/tests/test_creator.py": ["/esn/__init__.py", "/esn/create_reservoir/graph_to_weights.py"], "/tests/test_utils.py": ["/esn/__init__.py"], "/esn/create_reservoir/graph_to_weights.py": ["/esn/utils.py"]}
|
32,928,432
|
khakhalin/echo-networks
|
refs/heads/main
|
/esn/reservoir.py
|
import numpy as np
import sklearn.linear_model as lm
from . import create_reservoir as creator
import warnings
from scipy.linalg import LinAlgWarning
class Reservoir(object):
""" Create a echo-networks model with the predefined tuning.
Args:
n_nodes (int): Number of processing neurons in internal reservoir
n_edges (int): Number of edges. Default = n_nodes*2
network_type (str): ws, erdos
leak (float): leak (aka alpha) for reservoir state update. Default=0.05
rho (float): target spectral radius. Default=0.8. Set to None for no spectral rescaling.
inhibition (str): alternating (default), distributed, none
weights_in (str): alternating (default), flat
"""
def __init__(self, n_nodes=20, n_edges=None, network_type='erdos',
leak=0.1, rho=0.8, l2=0.0001,
inhibition='alternating', weights_in='alternating'):
self.n_nodes = n_nodes
self.network_type = network_type
self.leak = leak
self.l2 = l2 # Ridge regression l2 regularization
if n_edges is None:
n_edges = max([2, 3*self.n_nodes,
int(round(self.n_nodes*(self.n_nodes-1)*0.2))]) # Reasonable heuristic
# Creator is a stateless all-static-methods utility class
self.meta = {}
self.graph = creator.make_graph(n_nodes, n_edges=n_edges, network_type=network_type)
self.weights, spectral_radius = creator.graph_to_weights(self.graph, n_nodes, inhibition=inhibition, rho=rho)
self.meta['original_rho'] = spectral_radius
self.weights_in = creator.weights_in(n_nodes, weights_in)
self.norm_input = None # Inputs normalization: [mean std]
self.weights_out = None # Originally the model is not fit
self.norm_out = None # Output normalization: [intercept std]
self.activation = creator.activation('tanh')
self.state = np.zeros(n_nodes)
def __str__(self):
return f"Reservoir of {self.n_nodes} nodes, `{self.network_type}` type."
def _forward(self, drive=None):
"""Make 1 step forward, update reservoir state.
If input is not provided, perform self-generation."""
if not drive:
drive = self.state @ self.weights_out * self.norm_out[1] + self.norm_out[0] # Try to self-drive
self.state = (self.state * (1-self.leak) +
self.leak * self.activation((self.weights.T @ self.state) +
self.weights_in * drive))
def run(self, input, n_steps=None):
"""Run the model several steps forward, driving it with input signal.
Arguments:
input (1D numpy): input signal
n_steps (int): how many steps to run
"""
if n_steps is None:
n_steps = len(input)
history = np.zeros((n_steps, self.n_nodes))
self.state = np.zeros(self.n_nodes)
for i_step in range(n_steps):
if i_step < input.shape[0]:
self._forward(input[i_step])
else:
self._forward()
history[i_step, :] = self.state
return history
def fit(self, x, y, skip=None):
"""
Fit model with input X and y
Args:
x (1d numpy array): input series
y (1d numpy array): output series
skip (int): how many first points to ignore (default: min(n_time / 4, n_nodes*4)
Returns: a vector of output weights (that is also saved as weights_out in the self object).
"""
if len(y.shape) == 1: # Simple vectors needs to be turned to column-vectors
y = y[:, np.newaxis]
if skip is None:
skip = min(self.n_nodes*4 , len(y) // 4)
self.norm_input = [np.mean(x), np.std(x)]
self.norm_out = [np.mean(y), np.std(y)]
history = self.run((x - self.norm_input[0]) / self.norm_input[1])
if self.l2 is None: # Simple regression
self.weights_out = (np.linalg.pinv(history[skip:, :].T @ history[skip:, :]) @
(history[skip:, :].T @ (y[skip:] - self.norm_out[0])/self.norm_out[1])
)
else: # Ridge regression
y_norm = (y - self.norm_out[0])/self.norm_out[1]
clf = lm.Ridge(alpha=self.l2, fit_intercept=False)
warnings.filterwarnings(action='ignore', category=LinAlgWarning, module='sklearn')
# Suppressing warnings is a dirty trick, but many of our random matrices are poorly defined,
# so for WS graphs at least this regression spews warnings way too often.
# If a matrix is bad, it is bad, that's ok; no need to shame us.
clf.fit(history, y_norm)
warnings.filterwarnings(action='default', category=LinAlgWarning, module='sklearn')
self.weights_out = clf.coef_.T
return self # In scikit-learn style, fit is supposed to return self
def predict(self, x, length=None):
"""
Args:
x (numpy array): input signal
n_steps (int): for how long to run
Returns:
y (numpy array): output
"""
if length is None:
length = len(x)
if self.weights_out is None:
raise Exception('The model needs to be fit first.')
history = self. run((x - self.norm_input[0]) / self.norm_input[1], length)
return (history @ self.weights_out * self.norm_out[1] + self.norm_out[0]).squeeze()
|
{"/esn/__init__.py": ["/esn/basic_reservoir.py", "/esn/reservoir.py", "/esn/data.py", "/esn/utils.py"], "/esn/esn.py": ["/esn/reservoir.py"], "/notebooks/echo.py": ["/esn/__init__.py"], "/esn/reservoir.py": ["/esn/__init__.py"], "/esn/create_reservoir/__init__.py": ["/esn/create_reservoir/make_graph.py", "/esn/create_reservoir/graph_to_weights.py", "/esn/create_reservoir/weights_in.py", "/esn/create_reservoir/activation.py"], "/tests/test_data.py": ["/esn/__init__.py"], "/tests/test_reservoir.py": ["/esn/__init__.py"], "/tests/test_creator.py": ["/esn/__init__.py", "/esn/create_reservoir/graph_to_weights.py"], "/tests/test_utils.py": ["/esn/__init__.py"], "/esn/create_reservoir/graph_to_weights.py": ["/esn/utils.py"]}
|
32,928,433
|
khakhalin/echo-networks
|
refs/heads/main
|
/tests/__init__.py
|
# Empty, needed for correct importing
# (otherwise `from esn import some_class` doesn't work.
# To run tests, in the terminal (not Python console, but terminal) run:
# pytest tests
|
{"/esn/__init__.py": ["/esn/basic_reservoir.py", "/esn/reservoir.py", "/esn/data.py", "/esn/utils.py"], "/esn/esn.py": ["/esn/reservoir.py"], "/notebooks/echo.py": ["/esn/__init__.py"], "/esn/reservoir.py": ["/esn/__init__.py"], "/esn/create_reservoir/__init__.py": ["/esn/create_reservoir/make_graph.py", "/esn/create_reservoir/graph_to_weights.py", "/esn/create_reservoir/weights_in.py", "/esn/create_reservoir/activation.py"], "/tests/test_data.py": ["/esn/__init__.py"], "/tests/test_reservoir.py": ["/esn/__init__.py"], "/tests/test_creator.py": ["/esn/__init__.py", "/esn/create_reservoir/graph_to_weights.py"], "/tests/test_utils.py": ["/esn/__init__.py"], "/esn/create_reservoir/graph_to_weights.py": ["/esn/utils.py"]}
|
32,928,434
|
khakhalin/echo-networks
|
refs/heads/main
|
/esn/data.py
|
import numpy as np
class Data():
@classmethod
def create_source(cls, process='lorenz', params=None, fileName=None):
"""
Factory method that creates custom chaotic time series generators.
Parameters:
process: string, type of the dynamical system (optional, default lorenz)
params: parameters for ths system (optional)
fileName: string, if time series needs to be loaded from a file (optional)
"""
if process == 'lorenz':
return cls.Lorenz(params)
raise ValueError('Chaotic process name not recognized.')
class _DataSource(object):
"""Abstract class for data generators"""
# abstractmethod
def _run(self, n_points, seed, integration_step):
"""Returns 2 numpy arrays: x and y."""
pass
# decorator
def generate(self, n_points, seed=None, integration_step=0.01, sampling_step=None):
"""Decorator that runs the model, then downsamples it."""
if sampling_step is None: # No need to downsample
_, xy = self._run(n_points, seed, integration_step)
return xy[:, 0], xy[:, 1]
full_n = int(round(n_points * sampling_step / integration_step * 1.1)) # With some excess just in case
if sampling_step < integration_step:
integration_step = sampling_step
# Warning here?
# raise ValueError('Integration step should be <= sampling_step')
if integration_step > 0.03: # Numerical instabilities happen here
pass # Maybe make a warning that this is not gonna be numerically stable?
if full_n > 1000000:
pass #M Maybe make a warning that it will be too slow?
time, xy = self._run(full_n, seed, integration_step)
# Now downsample
ind = np.floor(time / sampling_step) # Steps
ind = np.hstack(([0], ind[1:] - ind[:-1])).astype(bool) # Where steps change
return xy[ind, 0][:n_points], xy[ind, 1][:n_points] # Actual downsampling
class Lorenz(_DataSource):
"""Lorenz system."""
def __init__(self, params=None):
if params is None:
params = (10, 8 / 3, 28) # Sigma, beta, rho
self.sigma, self.beta, self.rho = params
def _run(self, n_points=100, seed=None, integration_step=0.01):
"""Lorenz system, with manual resampling"""
if seed is None:
seed = (1.0, 0.0, 1.0)
if np.isscalar(seed):
seed = (1.0, 0.0, seed) # We don't want [0,0,0] as a starting point, as it's trivial
(x, y, z) = seed
history = np.zeros((n_points, 3))
time = 0
for i in range(n_points):
x, y, z = (x + integration_step * self.sigma * (y - x),
y + integration_step * (x * (self.rho - z) - y),
z + integration_step * (x * y - self.beta * z))
time += integration_step
history[i, :] = (time, x, z)
return history[:, 0], history[:, 1:] # time, then x and z together
|
{"/esn/__init__.py": ["/esn/basic_reservoir.py", "/esn/reservoir.py", "/esn/data.py", "/esn/utils.py"], "/esn/esn.py": ["/esn/reservoir.py"], "/notebooks/echo.py": ["/esn/__init__.py"], "/esn/reservoir.py": ["/esn/__init__.py"], "/esn/create_reservoir/__init__.py": ["/esn/create_reservoir/make_graph.py", "/esn/create_reservoir/graph_to_weights.py", "/esn/create_reservoir/weights_in.py", "/esn/create_reservoir/activation.py"], "/tests/test_data.py": ["/esn/__init__.py"], "/tests/test_reservoir.py": ["/esn/__init__.py"], "/tests/test_creator.py": ["/esn/__init__.py", "/esn/create_reservoir/graph_to_weights.py"], "/tests/test_utils.py": ["/esn/__init__.py"], "/esn/create_reservoir/graph_to_weights.py": ["/esn/utils.py"]}
|
32,928,435
|
khakhalin/echo-networks
|
refs/heads/main
|
/esn/create_reservoir/activation.py
|
import numpy as np
def activation(name='tanh'):
"""Returns an activation function."""
if name == 'tanh':
fun = np.tanh
else:
raise ValueError('Unknown activation function.')
return fun
|
{"/esn/__init__.py": ["/esn/basic_reservoir.py", "/esn/reservoir.py", "/esn/data.py", "/esn/utils.py"], "/esn/esn.py": ["/esn/reservoir.py"], "/notebooks/echo.py": ["/esn/__init__.py"], "/esn/reservoir.py": ["/esn/__init__.py"], "/esn/create_reservoir/__init__.py": ["/esn/create_reservoir/make_graph.py", "/esn/create_reservoir/graph_to_weights.py", "/esn/create_reservoir/weights_in.py", "/esn/create_reservoir/activation.py"], "/tests/test_data.py": ["/esn/__init__.py"], "/tests/test_reservoir.py": ["/esn/__init__.py"], "/tests/test_creator.py": ["/esn/__init__.py", "/esn/create_reservoir/graph_to_weights.py"], "/tests/test_utils.py": ["/esn/__init__.py"], "/esn/create_reservoir/graph_to_weights.py": ["/esn/utils.py"]}
|
32,928,436
|
khakhalin/echo-networks
|
refs/heads/main
|
/esn/create_reservoir/__init__.py
|
from .make_graph import make_graph
from .graph_to_weights import graph_to_weights
from .weights_in import weights_in
from .activation import activation
|
{"/esn/__init__.py": ["/esn/basic_reservoir.py", "/esn/reservoir.py", "/esn/data.py", "/esn/utils.py"], "/esn/esn.py": ["/esn/reservoir.py"], "/notebooks/echo.py": ["/esn/__init__.py"], "/esn/reservoir.py": ["/esn/__init__.py"], "/esn/create_reservoir/__init__.py": ["/esn/create_reservoir/make_graph.py", "/esn/create_reservoir/graph_to_weights.py", "/esn/create_reservoir/weights_in.py", "/esn/create_reservoir/activation.py"], "/tests/test_data.py": ["/esn/__init__.py"], "/tests/test_reservoir.py": ["/esn/__init__.py"], "/tests/test_creator.py": ["/esn/__init__.py", "/esn/create_reservoir/graph_to_weights.py"], "/tests/test_utils.py": ["/esn/__init__.py"], "/esn/create_reservoir/graph_to_weights.py": ["/esn/utils.py"]}
|
32,928,437
|
khakhalin/echo-networks
|
refs/heads/main
|
/tests/test_data.py
|
import pytest
from esn import Data, utils
import numpy as np
def test_lorenz_zero_seed():
params = (10, 8 / 3, 0)
data = Data.Lorenz(params=params)
seed = (0, 0, 0)
x, y = data.generate(10, seed, integration_step=0.01)
expected = np.zeros(10)
assert isinstance(x, (np.ndarray, np.generic))
assert isinstance(y, (np.ndarray, np.generic))
assert x.size == 10
assert y.size == 10
assert expected == pytest.approx(x, rel=1e-3)
assert expected == pytest.approx(y, rel=1e-3)
def test_lorenz_zero_attract():
params = (10, 8 / 3, -1)
data = Data.Lorenz(params=params)
seed = (0, 1, 1.05)
x, y = data.generate(10, seed, integration_step=0.01)
expected_x = np.array([0.1000, 0.18900, 0.26791, 0.33757, 0.39877,
0.45225, 0.49867, 0.53868, 0.57285, 0.60171])
expected_y = np.array([1.02200, 0.99574, 0.97103, 0.94772, 0.92566,
0.90469, 0.88471, 0.86560, 0.84726, 0.82960])
assert isinstance(x, (np.ndarray, np.generic))
assert isinstance(y, (np.ndarray, np.generic))
assert x.size == 10
assert y.size == 10
assert np.allclose(expected_x, x)
assert expected_y == pytest.approx(y, rel=1e-4)
def test_lorenz_seed():
data = Data.Lorenz()
x1, y1 = data.generate(10, 0.0, integration_step=0.01) # Scalar seed
x2, y2 = data.generate(10, 0.1, integration_step=0.01)
assert not (x1 == x2).all() # Different seeds should give different values
def test_data_integration():
data = Data.Lorenz()
x1, y1 = data.generate(10, 0.0, integration_step=0.01) # Scalar seed
x2, y2 = data.generate(10, 0.0, integration_step=0.02)
assert np.allclose(y1[8], y2[4], atol=0.01)
def test_data_sampling():
data = Data.Lorenz()
_, y1 = data.generate(16, 0.0, sampling_step=0.02) # Scalar seed
_, y2 = data.generate(8, 0.0, sampling_step=0.04)
_, y3 = data.generate(4, 0.0, sampling_step=0.08)
_, y4 = data.generate(2, 0.0, sampling_step=0.16)
assert np.allclose(y1[-1], y2[-1], atol=0.01)
assert np.allclose(y2[-1], y3[-1], atol=0.01)
assert np.allclose(y3[-1], y4[-1], atol=0.01)
|
{"/esn/__init__.py": ["/esn/basic_reservoir.py", "/esn/reservoir.py", "/esn/data.py", "/esn/utils.py"], "/esn/esn.py": ["/esn/reservoir.py"], "/notebooks/echo.py": ["/esn/__init__.py"], "/esn/reservoir.py": ["/esn/__init__.py"], "/esn/create_reservoir/__init__.py": ["/esn/create_reservoir/make_graph.py", "/esn/create_reservoir/graph_to_weights.py", "/esn/create_reservoir/weights_in.py", "/esn/create_reservoir/activation.py"], "/tests/test_data.py": ["/esn/__init__.py"], "/tests/test_reservoir.py": ["/esn/__init__.py"], "/tests/test_creator.py": ["/esn/__init__.py", "/esn/create_reservoir/graph_to_weights.py"], "/tests/test_utils.py": ["/esn/__init__.py"], "/esn/create_reservoir/graph_to_weights.py": ["/esn/utils.py"]}
|
32,928,438
|
khakhalin/echo-networks
|
refs/heads/main
|
/tests/test_reservoir.py
|
import pytest
from esn import Data, Reservoir, utils
import numpy as np
def test_reservoir_integration():
"""Integration test, minimal case, mostly with defaults."""
data = Data.create_source('lorenz')
x,y = data.generate(1000) # Intentionally overfitting, so very short training
model = Reservoir(100, l2=0) # ... and no regularization
points_to_skip = 200
model.fit(x, y, skip=points_to_skip)
z = model.predict(x)
loss = utils.loss(y[points_to_skip:], z[points_to_skip:])
assert loss > 0
assert loss < 1 # A typical value with these settings is 1e-4 (overfitted)
def test_reservoir_predict():
"""Testing the part that predicts """
data = Data.create_source('lorenz')
x, y = data.generate(100)
model = Reservoir(100)
model.fit(x, y)
z = model.predict(x) # Default case: use x as input predict full length of y
assert len(z) == len(x)
z = model.predict(x, 50) # Truncate early (only generate a few)
assert len(z) == 50
model.fit(x, x) # Auto-generator mode (continuing the signal)
z = model.predict(x, 500) # Load x, then generate its continuation for 400 time points
assert len(z) == 500
|
{"/esn/__init__.py": ["/esn/basic_reservoir.py", "/esn/reservoir.py", "/esn/data.py", "/esn/utils.py"], "/esn/esn.py": ["/esn/reservoir.py"], "/notebooks/echo.py": ["/esn/__init__.py"], "/esn/reservoir.py": ["/esn/__init__.py"], "/esn/create_reservoir/__init__.py": ["/esn/create_reservoir/make_graph.py", "/esn/create_reservoir/graph_to_weights.py", "/esn/create_reservoir/weights_in.py", "/esn/create_reservoir/activation.py"], "/tests/test_data.py": ["/esn/__init__.py"], "/tests/test_reservoir.py": ["/esn/__init__.py"], "/tests/test_creator.py": ["/esn/__init__.py", "/esn/create_reservoir/graph_to_weights.py"], "/tests/test_utils.py": ["/esn/__init__.py"], "/esn/create_reservoir/graph_to_weights.py": ["/esn/utils.py"]}
|
32,928,439
|
khakhalin/echo-networks
|
refs/heads/main
|
/tests/test_creator.py
|
import pytest
from esn import create_reservoir as creator
from esn.create_reservoir.graph_to_weights import _spectral_radius
from esn import utils
import numpy as np
def test_make_graphs():
for net_type in ['erdos', 'ws']:
for i in range(100): # Generate a bunch of random graphs
n_nodes = np.random.randint(4, 20)
n_edges = np.random.randint(1, max(n_nodes, n_nodes*(n_nodes-2))) # Not too dense
g = creator.make_graph(n_nodes=n_nodes, n_edges=n_edges, network_type=net_type)
assert isinstance(g, dict)
assert len(g.keys()) <= 20 # Left-side nodes
assert len(set([i for k,v in g.items() for i in v])) <= 20 # Right side nodes
assert len(utils.edges(g)) == n_edges, f"graph: {g}" # n_edges
def test_activation_functions():
f = creator.activation('tanh')
assert f(1) == np.tanh(1)
def test_spectral_radius():
assert _spectral_radius(1) == 1
assert _spectral_radius(np.array([[1,0],[0,1]])) == 1
assert _spectral_radius(np.array([[2, 0], [0, 1]])) == 2
assert _spectral_radius(np.array([[0,0],[0,0]])) == 0
def test_weights_in():
w = creator.weights_in(2, 'flat')
assert (w == np.array([1, 1])).all()
w = creator.weights_in(2, 'alternating')
assert (w == np.array([-1, 1])).all()
w = creator.weights_in(99)
assert w.shape[0] == 99
def test_graph_to_weights():
# Weights table is graph-style, not operator-style: w_ij = w(j-->i)
w,_ = creator.graph_to_weights({0: [1], 1:[0]}, inhibition=None)
assert (w == np.array([[0, 1], [1, 0]])).all()
w, _ = creator.graph_to_weights({0: [0, 1], 1: [0]}, inhibition=None)
assert (w == np.array([[0, 1], [1, 0]])).all()
w,_ = creator.graph_to_weights({0: [1], 1: [0]}, inhibition=None, rho=0.9) # rho should scale this one
assert (w == 0.9*np.array([[0, 1], [1, 0]])).all()
w,_ = creator.graph_to_weights({0: [1,0], 1: [0]}, inhibition=None) # Loops should be removed
assert (w == np.array([[0, 1], [1, 0]])).all()
w,_ = creator.graph_to_weights({0: [1,2], 1: [0]}, inhibition='alternating')
assert (w == np.array([[0, 1, -1], [1, 0, 0], [0, 0, 0]])).all()
w,_ = creator.graph_to_weights({0: [1], 1: []}, inhibition='distributed')
assert (w == np.array([[0, 1], [-1, 0]])).all()
assert (sum(sum(w)) == 0)
w,_ = creator.graph_to_weights({0: [1, 2], 1: [0]}, inhibition='distributed')
assert (w == np.array([[0, 1, 1], [1, 0, -1], [-1, -1, 0]])).all()
assert (sum(sum(w)) == 0)
w, _ = creator.graph_to_weights({0: [1, 2], 1: [0]}, inhibition='balanced_in')
assert (w == np.array([[0, 1, 1], [1, 0, -1], [-1, -1, 0]])).all()
w, _ = creator.graph_to_weights({0: [1, 3], 1: [2,3], 2: [0,1,3]}, inhibition='balanced_in')
# I'll write a transposed matrix below, just cos it's easier. sum inputs ==0 for all qualified nodes
assert (w == np.array([[0,-0.5,1,-0.5],[1,0,1,-2],[-0.5,1,0,-0.5],[1,1,1,0]]).T).all()
w, _ = creator.graph_to_weights({0: [1, 2], 1: [0]}, inhibition='balanced_out')
assert (w == np.array([[0, 1, 1], [1, 0, -1], [0, 0, 0]])).all()
w, _ = creator.graph_to_weights({0: [1, 3], 1: [2, 3], 2: [0, 1, 3]}, inhibition='balanced_out')
assert (w == np.array([[0, 1, -2, 1], [-2, 0, 1, 1], [1, 1, 0, 1], [0, 0, 0, 0]])).all()
|
{"/esn/__init__.py": ["/esn/basic_reservoir.py", "/esn/reservoir.py", "/esn/data.py", "/esn/utils.py"], "/esn/esn.py": ["/esn/reservoir.py"], "/notebooks/echo.py": ["/esn/__init__.py"], "/esn/reservoir.py": ["/esn/__init__.py"], "/esn/create_reservoir/__init__.py": ["/esn/create_reservoir/make_graph.py", "/esn/create_reservoir/graph_to_weights.py", "/esn/create_reservoir/weights_in.py", "/esn/create_reservoir/activation.py"], "/tests/test_data.py": ["/esn/__init__.py"], "/tests/test_reservoir.py": ["/esn/__init__.py"], "/tests/test_creator.py": ["/esn/__init__.py", "/esn/create_reservoir/graph_to_weights.py"], "/tests/test_utils.py": ["/esn/__init__.py"], "/esn/create_reservoir/graph_to_weights.py": ["/esn/utils.py"]}
|
32,928,440
|
khakhalin/echo-networks
|
refs/heads/main
|
/tests/test_utils.py
|
import pytest
from esn import utils
import numpy as np
def test_edges():
assert utils.edges({0:[1,2], 1:[0]}) == {(0,1), (0,2), (1,0)}
def test_loss():
"""Test MSE loss function."""
assert np.allclose(utils.loss([0, 0], [2, 1]), 5 / 2)
assert np.allclose(utils.loss([0, 0], np.array([1, 1])), 1)
|
{"/esn/__init__.py": ["/esn/basic_reservoir.py", "/esn/reservoir.py", "/esn/data.py", "/esn/utils.py"], "/esn/esn.py": ["/esn/reservoir.py"], "/notebooks/echo.py": ["/esn/__init__.py"], "/esn/reservoir.py": ["/esn/__init__.py"], "/esn/create_reservoir/__init__.py": ["/esn/create_reservoir/make_graph.py", "/esn/create_reservoir/graph_to_weights.py", "/esn/create_reservoir/weights_in.py", "/esn/create_reservoir/activation.py"], "/tests/test_data.py": ["/esn/__init__.py"], "/tests/test_reservoir.py": ["/esn/__init__.py"], "/tests/test_creator.py": ["/esn/__init__.py", "/esn/create_reservoir/graph_to_weights.py"], "/tests/test_utils.py": ["/esn/__init__.py"], "/esn/create_reservoir/graph_to_weights.py": ["/esn/utils.py"]}
|
32,928,441
|
khakhalin/echo-networks
|
refs/heads/main
|
/esn/__init__.py
|
from . import create_reservoir as creator
from .reservoir import Reservoir
from .data import Data
from .utils import *
"""
Target usage (the spirit of it):
model = esn.reservoir(n_nodes=100, network_type='ws', inhibition='spread')
x,y = esn.data.lorenz(t=100, start=0.0)
out_weights = model.fit(x, y)
prediction = model.predict(esn.data.lorenz(t=100, start=0.5))
print(prediction.quality)
"""
|
{"/esn/__init__.py": ["/esn/basic_reservoir.py", "/esn/reservoir.py", "/esn/data.py", "/esn/utils.py"], "/esn/esn.py": ["/esn/reservoir.py"], "/notebooks/echo.py": ["/esn/__init__.py"], "/esn/reservoir.py": ["/esn/__init__.py"], "/esn/create_reservoir/__init__.py": ["/esn/create_reservoir/make_graph.py", "/esn/create_reservoir/graph_to_weights.py", "/esn/create_reservoir/weights_in.py", "/esn/create_reservoir/activation.py"], "/tests/test_data.py": ["/esn/__init__.py"], "/tests/test_reservoir.py": ["/esn/__init__.py"], "/tests/test_creator.py": ["/esn/__init__.py", "/esn/create_reservoir/graph_to_weights.py"], "/tests/test_utils.py": ["/esn/__init__.py"], "/esn/create_reservoir/graph_to_weights.py": ["/esn/utils.py"]}
|
32,928,442
|
khakhalin/echo-networks
|
refs/heads/main
|
/esn/create_reservoir/graph_to_weights.py
|
import numpy as np
import scipy.linalg as spla
from esn.utils import utils
def graph_to_weights(graph_dict, n_nodes=None, rho=None, inhibition='alternating'):
"""Creates a numpy weights matrix from a graph.
parameters:
graph_dict: a dictionary defining the graph
rho: target spectral radius. Default=None (no adjustment)
inhibition (string): how to introduce inhibition. Options include:
'none' - no inhibition
'alternating' - checkerboard pattern, with even edges excitatory
'distributed' - all edges are excitatory, but all missing edges are weakly inhibitory
"""
if n_nodes is None: # Try go guess n_nodes from the graph_dict itself
keys,values = zip(*graph_dict.items())
n_nodes = max(list(keys) + [i for edges in values for i in edges]) + 1
weights = np.zeros((n_nodes, n_nodes))
edges = [(i,j) for i,j in utils.edges(graph_dict) if i != j] # Remove loops
for (i,j) in edges:
weights[i, j] = 1 # The matrix is not flipped!
for i in range(weights.shape[0]):
weights[i,i] = 0 # No self-inhibition (we already have leak)
if inhibition == 'alternating':
for i in range(n_nodes):
for j in range(n_nodes):
weights[i,j] *= ((i+j) % 2)*2 - 1 # Checkerboard
elif inhibition == 'distributed':
# Sum across the entire matrix ==0
strength = len(edges)/(n_nodes*(n_nodes-1)-len(edges))
weights = weights*(1 + strength) - strength
i = range(n_nodes)
elif inhibition == 'balanced_in':
# Sum converging on each neuron ==0, unless all edges are either 1 or 0
total_input = np.sum(weights, axis=0)
total_input = total_input / np.maximum((weights.shape[0] - 1 - total_input), 1)
weights = weights * (1 + total_input[np.newaxis, :]) - total_input[np.newaxis, :]
elif inhibition == 'balanced_out':
# Sum of outputs of each neuron ==0 (unless out_edges are either all 1 or all 0)
total_input = np.sum(weights, axis=1)
total_input = total_input / np.maximum((weights.shape[0] - 1 - total_input), 1)
weights = weights * (1 + total_input[:, np.newaxis]) - total_input[:,np.newaxis]
elif inhibition is None:
pass # Explicitly do nothing
else:
raise(ValueError(f'Unrecognized inhibition type: {inhibition}'))
for i in range(weights.shape[0]):
weights[i,i] = 0 # Cleanup the diagonal once again (some methods above spoil it)
sr = _spectral_radius(weights)
if rho is not None: # Need to correct spectral radius
if sr != 0: # This matrix is hopeless as a weight matrix, but at least let's not divide by 0
weights = weights/sr*rho
return weights, sr
def _spectral_radius(mat):
"""Calculates spectral radius of a matrix."""
if isinstance(mat, float) or isinstance(mat, int):
mat = np.array([[mat]])
n = mat.shape[0]
# r = spla.eigh(mat, eigvals_only=True, subset_by_index=(n-1)) # Once scipy is updated to 1.6.1
# r = spla.eigh(mat, eigvals_only=True, eigvals=(n-1, n-1))
r = max(np.linalg.eigvals(mat))
return np.real(r) # I'm not sure it's in the definition, but feels right in this context?
|
{"/esn/__init__.py": ["/esn/basic_reservoir.py", "/esn/reservoir.py", "/esn/data.py", "/esn/utils.py"], "/esn/esn.py": ["/esn/reservoir.py"], "/notebooks/echo.py": ["/esn/__init__.py"], "/esn/reservoir.py": ["/esn/__init__.py"], "/esn/create_reservoir/__init__.py": ["/esn/create_reservoir/make_graph.py", "/esn/create_reservoir/graph_to_weights.py", "/esn/create_reservoir/weights_in.py", "/esn/create_reservoir/activation.py"], "/tests/test_data.py": ["/esn/__init__.py"], "/tests/test_reservoir.py": ["/esn/__init__.py"], "/tests/test_creator.py": ["/esn/__init__.py", "/esn/create_reservoir/graph_to_weights.py"], "/tests/test_utils.py": ["/esn/__init__.py"], "/esn/create_reservoir/graph_to_weights.py": ["/esn/utils.py"]}
|
32,928,443
|
khakhalin/echo-networks
|
refs/heads/main
|
/esn/utils.py
|
import numpy as np
import networkx as nx
import matplotlib.pyplot as plt
# Static class, never initialized
class utils():
"""A collection of relevant utilities."""
@staticmethod
def edges(g):
"""Returns a set of graph edges (as tuples)"""
return set([tuple([i, j]) for i, v in g.items() for j in v])
@staticmethod
def loss(y1, y2):
"""MSE, Mean Squared Error loss."""
return np.sum(np.square(np.array(y1) - np.array(y2))) / len(y1)
@staticmethod
def plot_graph(graph_dictionary):
"""Utility: plots a graph from a gdict."""
G = nx.Graph()
for node, edges in graph_dictionary.items():
for other in edges:
G.add_edge(node, other) # Undirected graph, so the order doesn't matter
nx.draw_kamada_kawai(G, node_size=30)
return G
@staticmethod
def plot_data(x, y, title='Lorenz attractor'):
"""AKH NOTE: I have no idea why we would need this. But keeping for now."""
if not x.ndim == 1:
raise ValueError("Argument x_array should be 1 dimensional. "
"It actually is {0} dimensional".format(x.ndim))
if not y.ndim == 1:
raise ValueError("Argument y_array should be 1 dimensional. "
"It actually is {0} dimensional".format(y.ndim))
fig, ax = plt.subplots()
ax.plot(x, y, ".")
ax.set(xlabel="X-comp", ylabel="Z-comp", title=title)
return fig
|
{"/esn/__init__.py": ["/esn/basic_reservoir.py", "/esn/reservoir.py", "/esn/data.py", "/esn/utils.py"], "/esn/esn.py": ["/esn/reservoir.py"], "/notebooks/echo.py": ["/esn/__init__.py"], "/esn/reservoir.py": ["/esn/__init__.py"], "/esn/create_reservoir/__init__.py": ["/esn/create_reservoir/make_graph.py", "/esn/create_reservoir/graph_to_weights.py", "/esn/create_reservoir/weights_in.py", "/esn/create_reservoir/activation.py"], "/tests/test_data.py": ["/esn/__init__.py"], "/tests/test_reservoir.py": ["/esn/__init__.py"], "/tests/test_creator.py": ["/esn/__init__.py", "/esn/create_reservoir/graph_to_weights.py"], "/tests/test_utils.py": ["/esn/__init__.py"], "/esn/create_reservoir/graph_to_weights.py": ["/esn/utils.py"]}
|
32,941,100
|
Deniz-Jasa/stock_prediction_model
|
refs/heads/master
|
/data_visualization.py
|
import streamlit as st
import time
import numpy as np
import pandas as pd
import rsi_converted as dt
# Web app title:
st.write("""
# Simple Stock Predictor App
""")
# Description:
st.markdown("Our Group: Khuong Tran, Deniz Jasarbasic, Eric Karpovits, Raz Levi")
st.markdown("Language: Python")
st.markdown("Technologies:")
st.markdown("Method & Project Description: Our stock prediction model is using a technical analysis approach at predicting the daily price movement of a stock and uses this data to attempt to predict future price movements.")
st.markdown("Demo:")
st.markdown("Confusion Matrix Visualization:")
# Visualization of the Confusion Matrix:
df = pd.DataFrame(
dt.prediction,
columns=('col %d' % i for i in range(500)))
#st.dataframe(df)
#st.plotly_chart(df)
streamlit.pyplot(fig=None, clear_figure=None, **df)
# Reruns the web app:
st.button("Re-run")
|
{"/rsi_converted.py": ["/stock_lib.py"], "/data_visualization.py": ["/rsi_converted.py"]}
|
32,985,558
|
SilasKenneth/fast-food-fast-api-v1
|
refs/heads/develop
|
/run.py
|
import os
from app import create_app
import app_config as config
env_setting = os.getenv("APP_SETTINGS")
# Incase there is no environment has been set fallback to production mode
if env_setting is None:
env_setting = "development"
elif env_setting.strip() == "":
env_setting = "development"
config_name = env_setting
app = create_app(config_name)
if __name__ == '__main__':
app.run(debug=config.configurations[config_name].DEBUG)
|
{"/app/resources/products.py": ["/app/models.py", "/app/db.py", "/app/utils.py"], "/app/resources/addresses.py": ["/app/models.py", "/app/db.py", "/app/utils.py"], "/tests/basetest.py": ["/app/db.py", "/app/models.py", "/app/__init__.py"], "/tests/test_orders.py": ["/app/models.py", "/tests/basetest.py", "/app/db.py"], "/app/__init__.py": ["/app_config.py", "/app/resources/products.py", "/app/resources/orders.py", "/app/resources/addresses.py", "/app/resources/users.py"], "/app/resources/orders.py": ["/app/db.py", "/app/models.py", "/app/utils.py"], "/app/resources/users.py": ["/app/models.py", "/app/db.py", "/app/utils.py"], "/tests/test_models.py": ["/app/models.py", "/app/db.py", "/tests/basetest.py"], "/run.py": ["/app/__init__.py", "/app_config.py"], "/app/models.py": ["/app/db.py"]}
|
32,985,559
|
SilasKenneth/fast-food-fast-api-v1
|
refs/heads/develop
|
/app/resources/products.py
|
from flask_restful import Resource, reqparse
from app.models import Product
from app.db import db
from app.utils import empty
class MenuResource(Resource):
parser = reqparse.RequestParser()
parser.add_argument("name", required=True,
help="Missing menu item name")
parser.add_argument("description", required=True,
help="Missing menu item description")
parser.add_argument("price", required=True, help="Missing prices")
def post(self, product_id=None):
"""A method to create a new product or
modify one if it a product_id is specified"""
args = MenuResource.parser.parse_args()
product_name = args.get("name", "")
product_description = args.get("description", "")
unit_price = args.get("price", "")
if empty(product_name) or empty(product_description) or empty(unit_price):
return {"ok" : False, "code": 403, "message": "Please provide"
"a price, a name and"
" a description"}, 403
def valid(number):
if not isinstance(number, str):
return False
if not str(number).isnumeric():
return False
number = int(number)
if number <= 0:
return False
return True
if not valid(unit_price):
return {"message": "The menu item should have a valid"
"price", "ok": False, "code": 403}, 403
exists = Product.exists(product_name)
if exists:
return {"message": "The product with name %s already exists"% product_name}, 400
pro = Product(product_name, product_description, unit_price)
db.add_menu_item(pro)
return {"ok": True, "message": "The menu item was saved successfully",
"data": pro.json}, 200
def get(self, product_id=None):
"""Get all products or a single product if
product_id is not None
"""
if product_id is not None:
order = Product.get_by_id(product_id)
if order is None:
return {"message": "The menu item with the id %s does not exists"% product_id}
return {"data": order}
products = db.menu
if len(products) == 0:
return {"ok": True, "code": "404",
"message": "No menu items are currently "
"available in our database"}
res = []
for product in products:
res.append(products[product].json)
return res
|
{"/app/resources/products.py": ["/app/models.py", "/app/db.py", "/app/utils.py"], "/app/resources/addresses.py": ["/app/models.py", "/app/db.py", "/app/utils.py"], "/tests/basetest.py": ["/app/db.py", "/app/models.py", "/app/__init__.py"], "/tests/test_orders.py": ["/app/models.py", "/tests/basetest.py", "/app/db.py"], "/app/__init__.py": ["/app_config.py", "/app/resources/products.py", "/app/resources/orders.py", "/app/resources/addresses.py", "/app/resources/users.py"], "/app/resources/orders.py": ["/app/db.py", "/app/models.py", "/app/utils.py"], "/app/resources/users.py": ["/app/models.py", "/app/db.py", "/app/utils.py"], "/tests/test_models.py": ["/app/models.py", "/app/db.py", "/tests/basetest.py"], "/run.py": ["/app/__init__.py", "/app_config.py"], "/app/models.py": ["/app/db.py"]}
|
32,985,560
|
SilasKenneth/fast-food-fast-api-v1
|
refs/heads/develop
|
/tests/basetest.py
|
from unittest import TestCase
from app.db import db
from app import create_app
class BaseTest(TestCase):
def setUp(self):
self.app = create_app("testing")
with self.app.app_context():
from app.models import (User, Address, Product)
self.client = self.app.test_client()
self.gloria = User("gloria", "gloria@gmail.com", "gloria")
self.silas = User("silas", "silaskenn@gmail.com", "Nyamwaro2012")
self.daniel = User("danielotieno", "daniel@gmail.com", "dan")
self.product1 = Product("Wet Fry", "Some yummy food", 120)
self.product2 = Product("Boiled fish", "Some yummy Fish", 200)
self.product3 = Product("Sushi", "Hot sushi from Japan", 300)
self.product4 = Product("Koria", "Hot Dog stuff cooked", 400)
self.silas_address = Address("Kisumu", "Kibuye", "0792333333")
self.daniel_address = Address("Kisumu", "Kondele", "0700000000")
self.gloria_address = Address("Kericho", "Kiserian", "0728828288")
self.database = db
self.multiple = {
"user_id" : "1",
"items": "1,2,3",
"address": "1"
}
self.single_valid = {
"user_id": "1",
"items": "1",
"address": "1"
}
self.sing_with_missing_address = {
"address": "",
"items": "1",
"user_id": "1"
}
self.test_address = {
"town": "Kisumu",
"phone": "0791350402",
"street": "Kondele"
}
self.item_without_name = {
"name": "",
"description": "Sushi from Japan",
"price": 300
}
self.new_user = {
"username":"jameskey",
"email": "jameskey@gmail.com",
"password": "SilasK@2019",
"confirm_pass": "SilasK@2019"
}
self.new_user1 = {
"username":"jameskeys",
"email": "jameskey@gmail.com",
"password": "SilasK@2019",
"confirm_pass": "SilasK@2019"
}
self.user_test = {"username": "silaskenn", "password": "SilasK@2018"}
self.ORDER_URL = "/api/v1/orders"
self.MENU_URL = "/api/v1/menu"
self.test_admin = User("admin", "admin@admin.com", "admin")
self.database.add_menu_item(self.product2)
self.database.add_menu_item(self.product3)
self.database.add_menu_item(self.product4)
self.silas.add_address(self.silas_address)
self.database.add_user(self.silas)
self.silas.add_address(self.silas_address)
self.test_admin.is_admin = True
def tearDown(self):
self.database.drop()
|
{"/app/resources/products.py": ["/app/models.py", "/app/db.py", "/app/utils.py"], "/app/resources/addresses.py": ["/app/models.py", "/app/db.py", "/app/utils.py"], "/tests/basetest.py": ["/app/db.py", "/app/models.py", "/app/__init__.py"], "/tests/test_orders.py": ["/app/models.py", "/tests/basetest.py", "/app/db.py"], "/app/__init__.py": ["/app_config.py", "/app/resources/products.py", "/app/resources/orders.py", "/app/resources/addresses.py", "/app/resources/users.py"], "/app/resources/orders.py": ["/app/db.py", "/app/models.py", "/app/utils.py"], "/app/resources/users.py": ["/app/models.py", "/app/db.py", "/app/utils.py"], "/tests/test_models.py": ["/app/models.py", "/app/db.py", "/tests/basetest.py"], "/run.py": ["/app/__init__.py", "/app_config.py"], "/app/models.py": ["/app/db.py"]}
|
32,985,561
|
SilasKenneth/fast-food-fast-api-v1
|
refs/heads/develop
|
/app/resources/users.py
|
import datetime
import os
import jwt
from flask_restful import Resource, reqparse
from app.db import db
from app.models import User
from app.utils import validate_username, validate_email, validate_password, empty
from werkzeug.security import check_password_hash
class SignUpResource(Resource):
"""The resource class for the User model"""
parser = reqparse.RequestParser()
parser.add_argument("username", required=True, help="Provide a username")
parser.add_argument("email", required=True, help="Missing required email")
parser.add_argument("password", required=True, help="Missing password")
parser.add_argument("confirm_pass", required=True, help="Missing password confirmation")
def post(self):
"""Create a new user to the database"""
args = self.parser.parse_args()
username = args.get("username", "")
email = args.get("email", "")
password = args.get("password", "")
confirm_pass = args.get("confirm_pass", "")
if empty(username) or empty(email) or empty(password) or empty(confirm_pass):
return {"code": "400", "message": "All fields are required"}, 200
if not validate_email(email):
return {"message": "You have entered an invalid email address"}
if not validate_username(username):
return {"message": "Invalid username, a username should contain"
"and be between 6 to 12 characters long"}, 400
if not validate_password(password):
return {"message": "Please provide a valid password"
"a valid password contains the following"
"atleast one special character,"
"atleast one lowercase and atleast a number"
"and atleast should be between 6 to 12 characters"
"long"}
exists = User.exists(username)
exists1 = User.exists(email)
if exists:
return {"message": "The username already in user"}, 400
if exists1:
return {"message": "The email already in use"}, 400
new_user = User(username, email, password)
if not check_password_hash(new_user.password, confirm_pass):
return {"ok": False, "code": 403, "message": "The password and the confirmation doesn't match"}, 403
db.add_user(new_user)
db.emails.update({new_user.email: new_user.username})
return {"ok": True, "message": "User was successfully saved login to get started",
"data": new_user.json}, 200
class LoginResource(Resource):
parser = reqparse.RequestParser()
parser.add_argument("username", required=True, help="Please provide a username"
"or email address")
parser.add_argument("password", required=True, help="Missing password")
def post(self):
"""Get all users or a single user from the database if
user_id is None the return all users otherwise
just return the user with the specified user_id
"""
users = User.all()
# print(users)
user = None
args = self.parser.parse_args()
username = args.get("username", "")
password = args.get("password", "")
key = os.getenv("JWT_SECRET_KEY", "Hacker")
if empty(username) or empty(password):
return {"message": "Please provide a username or email"
"and a password"}
if not users:
return {"ok": False, "code": 403, "message": "Invalid login credentials"}, 403
if username in db.users:
user = db.users.get(username)
payload = {
"exp": str(datetime.datetime.utcnow() + datetime.timedelta(minutes=10)),
"iat": str(datetime.datetime.utcnow()),
"data": user.json
}
if not check_password_hash(user.password, password):
return {"message": "Invalid login credentials"}, 403
token = jwt.encode(payload=payload, key=key).decode("utf-8")
return {"message": "You are successfully logged in", "token": token}, 200
if username in db.emails:
user = db.users.get(db.emails.get(username, None), None)
if user is None:
return {"message": "Invalid login credentials"}, 403
if not check_password_hash(user.password, password):
return {"message": "Invalid login credentials"}, 403
payload = {
"exp": str(datetime.datetime.utcnow() + datetime.timedelta(minutes=10)),
"iat": str(datetime.datetime.utcnow()),
"data": user.json
}
token = jwt.encode(payload=payload, key=key).decode("utf-8")
return {"message": "You are successfully logged in", "token": token}, 200
return {"message": "Invalid login credentials"}, 403
class UserProfileResource(Resource):
def get(self, user_id=None):
if user_id is None:
return {"ok": True, "data": User.all()}
|
{"/app/resources/products.py": ["/app/models.py", "/app/db.py", "/app/utils.py"], "/app/resources/addresses.py": ["/app/models.py", "/app/db.py", "/app/utils.py"], "/tests/basetest.py": ["/app/db.py", "/app/models.py", "/app/__init__.py"], "/tests/test_orders.py": ["/app/models.py", "/tests/basetest.py", "/app/db.py"], "/app/__init__.py": ["/app_config.py", "/app/resources/products.py", "/app/resources/orders.py", "/app/resources/addresses.py", "/app/resources/users.py"], "/app/resources/orders.py": ["/app/db.py", "/app/models.py", "/app/utils.py"], "/app/resources/users.py": ["/app/models.py", "/app/db.py", "/app/utils.py"], "/tests/test_models.py": ["/app/models.py", "/app/db.py", "/tests/basetest.py"], "/run.py": ["/app/__init__.py", "/app_config.py"], "/app/models.py": ["/app/db.py"]}
|
32,985,562
|
SilasKenneth/fast-food-fast-api-v1
|
refs/heads/develop
|
/app/__init__.py
|
from flask import Flask, jsonify, render_template
from flask_restful import Api
from flask_cors import CORS
import app_config as config
def create_app(config_name):
"""Create a flask application with certain configuration"""
app = Flask(__name__)
api = Api(app)
CORS(app)
app.config.from_object(config.configurations[config_name])
"""This ensures that the urls /login and /login/ are recognized as same
without considering the trailing slash """
app.url_map.strict_slashes = False
with app.app_context():
from app.resources.products import MenuResource
from app.resources.orders import OrderResource
from app.resources.addresses import AddressResource
from app.resources.users import LoginResource, SignUpResource
api.add_resource(MenuResource, "/api/v1/menu", "/api/v1/menu/<int:product_id>")
api.add_resource(OrderResource, "/api/v1/orders",
"/api/v1/orders/<int:order_id>")
api.add_resource(AddressResource, "/api/v1/addresses",
"/api/v1/addresses/<int:address_id>")
api.add_resource(LoginResource, "/api/v1/auth/login")
api.add_resource(SignUpResource, "/api/v1/auth/signup")
@app.errorhandler(404)
def error_404(e):
return jsonify({"code": "404", "message": "Not found"}), 200
@app.errorhandler(500)
def error_500(e):
return jsonify(
{"code": "503", "message": "We have some trouble"
"processing your request"
" please try again later"}), 500
@app.errorhandler(405)
def error_405(e):
return jsonify({"code": "405", "message": "We dont allow"
" the request method",
"ok": False}), 200
@app.route("/")
def home():
return render_template("index.html")
return app
|
{"/app/resources/products.py": ["/app/models.py", "/app/db.py", "/app/utils.py"], "/app/resources/addresses.py": ["/app/models.py", "/app/db.py", "/app/utils.py"], "/tests/basetest.py": ["/app/db.py", "/app/models.py", "/app/__init__.py"], "/tests/test_orders.py": ["/app/models.py", "/tests/basetest.py", "/app/db.py"], "/app/__init__.py": ["/app_config.py", "/app/resources/products.py", "/app/resources/orders.py", "/app/resources/addresses.py", "/app/resources/users.py"], "/app/resources/orders.py": ["/app/db.py", "/app/models.py", "/app/utils.py"], "/app/resources/users.py": ["/app/models.py", "/app/db.py", "/app/utils.py"], "/tests/test_models.py": ["/app/models.py", "/app/db.py", "/tests/basetest.py"], "/run.py": ["/app/__init__.py", "/app_config.py"], "/app/models.py": ["/app/db.py"]}
|
32,985,563
|
SilasKenneth/fast-food-fast-api-v1
|
refs/heads/develop
|
/tests/test_orders.py
|
from app.models import Order
import json
from app.db import db
from tests.basetest import BaseTest
class TestOrders(BaseTest):
def test_user_can_order_successfully(self):
res = self.client.post(self.ORDER_URL, data=json.dumps(self.single_valid), content_type="application/json")
response = res.data
response = json.loads(response)
self.assertEqual(response.get("message", None), "You successfully placed the order thank you")
self.assertEqual(res.status_code, 200)
def test_user_can_order_multiple_items(self):
result = self.client.post(self.ORDER_URL, data=json.dumps(self.multiple), content_type="application/json")
response = json.loads(result.data)
self.assertEqual(response.get("message", None), "You successfully placed the order thank you")
def test_user_can_order_one_item(self):
res = self.client.post(self.ORDER_URL, data=json.dumps(self.single_valid), content_type="application/json")
response = res.data
response_obj = json.loads(response)
self.assertEqual(response_obj.get("message", None), "You successfully placed the order thank you")
self.assertEqual(res.status_code, 200)
def test_user_cannot_order_without_address(self):
response = self.client.post("/api/v1/orders", data=json.dumps({"user_id": 1, "items": "1,2,3", "address": ""}),
content_type="application/json")
last_order = max(db.order_maps) if len(db.order_maps) > 0 else None
self.assertEqual(None, last_order)
self.assertEqual(response.status_code, 400)
def test_user_cannot_order_nothing(self):
response = self.client.post("/api/v1/orders", data=json.dumps({"user_id": 1, "items": "", "address": "1"}),
content_type="application/json")
last_order = max(db.order_maps) if len(db.order_maps) > 0 else None
self.assertEqual(None, last_order)
self.assertEqual(response.status_code, 400)
def test_order_update_admin(self):
posted = self.client.post(self.ORDER_URL, data=json.dumps(self.single_valid), content_type="application/json")
response = self.client.put(self.ORDER_URL+"/1", data=json.dumps({"status": "Complete"}), content_type="application/json")
response_obj = json.loads(response.data)
posted_obj = json.loads(posted.data)
print(response_obj)
self.assertEqual(posted.status_code, 200)
self.assertEqual(response.status_code, 200)
self.assertEqual(posted_obj.get("data", {}).get("status", None), "pending")
self.assertEqual(response_obj.get("data", {}).get("status", None), "complete")
def test_can_delete_order(self):
place = self.client.post("/api/v1/orders", data=json.dumps({"user_id": "1", "items": "1,2,3", "address": "1"}),
content_type="application/json")
data = self.client.get("/api/v1/orders/1", content_type="application/json")
response = self.client.delete("/api/v1/orders/1", content_type="application/json")
# print(response.data)
self.assertEqual(response.status_code, 200)
self.assertEqual(data.status_code, 200)
self.assertEqual(place.status_code, 200)
|
{"/app/resources/products.py": ["/app/models.py", "/app/db.py", "/app/utils.py"], "/app/resources/addresses.py": ["/app/models.py", "/app/db.py", "/app/utils.py"], "/tests/basetest.py": ["/app/db.py", "/app/models.py", "/app/__init__.py"], "/tests/test_orders.py": ["/app/models.py", "/tests/basetest.py", "/app/db.py"], "/app/__init__.py": ["/app_config.py", "/app/resources/products.py", "/app/resources/orders.py", "/app/resources/addresses.py", "/app/resources/users.py"], "/app/resources/orders.py": ["/app/db.py", "/app/models.py", "/app/utils.py"], "/app/resources/users.py": ["/app/models.py", "/app/db.py", "/app/utils.py"], "/tests/test_models.py": ["/app/models.py", "/app/db.py", "/tests/basetest.py"], "/run.py": ["/app/__init__.py", "/app_config.py"], "/app/models.py": ["/app/db.py"]}
|
32,985,564
|
SilasKenneth/fast-food-fast-api-v1
|
refs/heads/develop
|
/app/utils.py
|
import re
def validate_email(email):
if not isinstance(email, str):
return False
if len(email) <= 7:
return False
ans = re.match("^.+@(\\[?)[a-zA-Z0-9-.]+.([a-zA-Z]{2,3}|[0-9]{1,3})(]?)$", email)
if ans is None:
return False
return True
def validate_password(password):
if not isinstance(password, str):
return False
ans = re.match(r"(?=.*[A-Za-z0-9])(?=.*[A-Z])(?=.*\d)(?=.*[#$^+=!*()@%&]).{6,12}", password)
if ans is None:
return False
return True
def validate_username(username):
if not isinstance(username, str):
return False
if len(username) < 6 or len(username) > 12:
return False
else:
ans = re.match(r'^[a-z|\s]+$', username)
if ans is None:
return False
return True
def empty(x):
return len(x.strip()) == 0
|
{"/app/resources/products.py": ["/app/models.py", "/app/db.py", "/app/utils.py"], "/app/resources/addresses.py": ["/app/models.py", "/app/db.py", "/app/utils.py"], "/tests/basetest.py": ["/app/db.py", "/app/models.py", "/app/__init__.py"], "/tests/test_orders.py": ["/app/models.py", "/tests/basetest.py", "/app/db.py"], "/app/__init__.py": ["/app_config.py", "/app/resources/products.py", "/app/resources/orders.py", "/app/resources/addresses.py", "/app/resources/users.py"], "/app/resources/orders.py": ["/app/db.py", "/app/models.py", "/app/utils.py"], "/app/resources/users.py": ["/app/models.py", "/app/db.py", "/app/utils.py"], "/tests/test_models.py": ["/app/models.py", "/app/db.py", "/tests/basetest.py"], "/run.py": ["/app/__init__.py", "/app_config.py"], "/app/models.py": ["/app/db.py"]}
|
32,985,565
|
SilasKenneth/fast-food-fast-api-v1
|
refs/heads/develop
|
/tests/test_models.py
|
import json
from unittest import TestCase
from tests.basetest import BaseTest
class TestModel(BaseTest, TestCase):
def test_can_add_user(self):
random_user = "jadedness"
user_test = {"username": random_user, "email": random_user + "@gmail.com", "password": "SilasK@2018"}
user_test.update({"confirm_pass": "SilasK@2018"})
self.user_test.update({"username": random_user})
try_post = self.client.post("/api/v1/auth/signup", data=json.dumps(user_test), content_type="application/json")
try_login = self.client.post("/api/v1/auth/login", data=json.dumps(self.user_test),
content_type="application/json")
try_post_obj = json.loads(try_post.data)
try_login_obj = json.loads(try_login.data)
print(try_login_obj)
self.assertEqual(try_post.status_code, 200)
self.assertEqual(try_post_obj.get("message", None), "User was successfully saved login to get started")
self.assertEqual(try_login_obj.get("message", None), "You are successfully logged in")
def test_can_add_menu_item_admin(self):
response = self.client.post("/api/v1/menu", data=json.dumps(self.product1.json),
content_type="application/json")
response_obj = json.loads(response.data)
self.assertEqual(response_obj.get("message", None), "The menu item was saved successfully")
self.assertEqual(response.status_code, 200)
def test_cannot_add_item_without_name(self):
response = self.client.post(self.MENU_URL, data=json.dumps(self.item_without_name),
content_type="application/json")
response_obj = json.loads(response.data)
self.assertEqual(response_obj.get("message", None), "Please providea price, a name and a description")
self.assertEqual(response.status_code, 403)
def test_can_add_address(self):
response = self.client.post("/api/v1/addresses", data=json.dumps(self.silas_address.json),
content_type="application/json")
response_obj = json.loads(response.data)
self.assertEqual(response_obj.get("message", None), "The address was successfully added")
self.assertEqual(response.status_code, 200)
def test_existing_email(self):
response = self.client.post("/api/v1/auth/signup", data=json.dumps(self.new_user),
content_type="application/json")
response1 = self.client.post("/api/v1/auth/signup", data=json.dumps(self.new_user1),
content_type="application/json")
response2 = self.client.post("/api/v1/auth/signup", data=json.dumps(self.new_user),
content_type="application/json"
)
response_obj = json.loads(response.data)
response_obj1 = json.loads(response1.data)
response_obj2 = json.loads(response2.data)
self.assertEqual(response_obj.get("message", None), "User was successfully saved login to get started")
self.assertEqual(response_obj1.get("message", None), "The email already in use")
self.assertEqual(response_obj2.get("message", None), "The username already in user")
self.assertEqual(response.status_code, 200)
def test_existing_menu_item(self):
response = self.client.post("/api/v1/menu", data=json.dumps(self.product1.json),
content_type="application/json")
response1 = self.client.post("/api/v1/menu", data=json.dumps(self.product1.json),
content_type="application/json")
response_obj = json.loads(response.data)
response_obj1 = json.loads(response1.data)
self.assertEqual(response_obj.get("message", None), "The menu item was saved successfully")
self.assertEqual(response_obj1.get("message", None), "The product with name Wet Fry already exists")
self.assertEqual(response.status_code, 200)
|
{"/app/resources/products.py": ["/app/models.py", "/app/db.py", "/app/utils.py"], "/app/resources/addresses.py": ["/app/models.py", "/app/db.py", "/app/utils.py"], "/tests/basetest.py": ["/app/db.py", "/app/models.py", "/app/__init__.py"], "/tests/test_orders.py": ["/app/models.py", "/tests/basetest.py", "/app/db.py"], "/app/__init__.py": ["/app_config.py", "/app/resources/products.py", "/app/resources/orders.py", "/app/resources/addresses.py", "/app/resources/users.py"], "/app/resources/orders.py": ["/app/db.py", "/app/models.py", "/app/utils.py"], "/app/resources/users.py": ["/app/models.py", "/app/db.py", "/app/utils.py"], "/tests/test_models.py": ["/app/models.py", "/app/db.py", "/tests/basetest.py"], "/run.py": ["/app/__init__.py", "/app_config.py"], "/app/models.py": ["/app/db.py"]}
|
32,985,566
|
SilasKenneth/fast-food-fast-api-v1
|
refs/heads/develop
|
/app/db.py
|
class Database(object):
def __init__(self):
self.users = {}
self.order_maps = {}
self.address_maps = {}
self.menu = {}
self.emails = {}
self.user_ids = {}
def drop(self):
"""Method to clear the storage"""
self.users = {}
self.order_maps = {}
self.address_maps = {}
self.menu = {}
def add_user(self, user):
"""Method to add a user to the database"""
self.users.update({user.username: user})
self.emails.update({user.email: user.username})
self.user_ids.update({user.id: user.username})
def add_menu_item(self, menu_item):
"""Method to add a product to our database"""
menu_item.id = len(self.menu) + 1
self.menu.update({len(self.menu) + 1: menu_item})
def get_item(self, table):
return getattr(self, table, [])
db = Database()
|
{"/app/resources/products.py": ["/app/models.py", "/app/db.py", "/app/utils.py"], "/app/resources/addresses.py": ["/app/models.py", "/app/db.py", "/app/utils.py"], "/tests/basetest.py": ["/app/db.py", "/app/models.py", "/app/__init__.py"], "/tests/test_orders.py": ["/app/models.py", "/tests/basetest.py", "/app/db.py"], "/app/__init__.py": ["/app_config.py", "/app/resources/products.py", "/app/resources/orders.py", "/app/resources/addresses.py", "/app/resources/users.py"], "/app/resources/orders.py": ["/app/db.py", "/app/models.py", "/app/utils.py"], "/app/resources/users.py": ["/app/models.py", "/app/db.py", "/app/utils.py"], "/tests/test_models.py": ["/app/models.py", "/app/db.py", "/tests/basetest.py"], "/run.py": ["/app/__init__.py", "/app_config.py"], "/app/models.py": ["/app/db.py"]}
|
32,985,567
|
SilasKenneth/fast-food-fast-api-v1
|
refs/heads/develop
|
/app/models.py
|
import datetime
from app.db import db
from werkzeug.security import generate_password_hash
class Base(object):
def update(self):
setattr(self, "date_updated", datetime.datetime.utcnow())
class User(Base):
"""The user model class"""
def __init__(self, username, email, password):
"""Constructor for the user class"""
self.username = username
self.id = db.users.get(username).id \
if self.username in db.users else len(db.users) + 1
self.password = generate_password_hash(password)
self.email = email
self.date_created = datetime.datetime.utcnow()
self.last_update = datetime.datetime.utcnow()
self.addresses = []
self.is_admin = False
self.orders = []
def update_address(self, user_id, new_address):
"""Update a given user's address"""
if len(self.addresses) < user_id:
# In this case the address Id doesn't exist
return False
self.addresses[id - 1] = new_address
return True
def add_address(self, address):
"""Add an address to a given user"""
self.addresses.append(address)
db.address_maps.update({address.id: self.username})
@property
def json(self):
"""Return a JSON Serializable version of the class"""
return {
"id": self.id,
"username": self.username,
"email": self.email,
"addresses": [address.json for address in self.addresses
if isinstance(address, Address)]
}
@property
def json_without_id(self):
"""Get A JSON Serializable version of the class without the ID field"""
res = self.json
del res['id']
return res
@classmethod
def get_by_id(cls, user_id):
"""Get a user by an ID"""
users = db.users
if users is None:
return None
for user in users:
if users[user].id == user_id:
return user
return None
@classmethod
def get_by_email(cls, email):
"""Get a user by email"""
users = db.users
if users is None:
return None
if email in db.emails:
return db.users.get(db.emails.get(email, None), None)
return None
def delete(self):
"""Delete a user from the database"""
del db.user_ids[self.id]
del db.emails[self.email]
del db.users[self.username]
@classmethod
def all(cls):
"""Get all users from the database"""
users = db.get_item("users")
# print(users)
if users == []:
return []
res = []
for user in users:
res.append(users[user].json)
return res
@classmethod
def exists(self, username):
if username in db.users:
return True
if username in db.emails:
return True
return False
class Address(Base):
""" This is the address model to cater for user addresses. """
def __init__(self, town, street, phone):
""" Constructor for the model class"""
self.id = len(db.address_maps) + 1 if len(db.address_maps) > 0 else 1
self.town = town
self.street = street
self.phone = phone
@property
def json(self):
"""Return a json serializable version of the class"""
return {
"id": str(self.id),
"town": str(self.town),
"street": str(self.street),
"phone": str(self.phone)
}
@classmethod
def find_by_id(cls, address_id):
"""Get an address by id"""
addresses = db.address_maps
users = db.users
# print(addresses)
if addresses is None:
return None
if address_id in addresses:
owner = addresses[address_id]
# print(owner)
if owner not in users:
return None
current_addresses = users[owner].addresses
if current_addresses is None:
return None
if len(current_addresses) == 0:
return None
for address in current_addresses:
if address.id == address_id:
return address
return None
class Order(object):
"""The order model class"""
def __init__(self, order_by, address, items=None):
"""Constructor of the order class"""
self.id = max(db.order_maps) + 1 if len(db.order_maps) > 0 else 1
self.ordered_by = order_by
self.date_made = datetime.datetime.utcnow()
self.last_update = datetime.datetime.utcnow()
self.address = address
self.items = []
self.total = 0.00
self.status = "pending"
@property
def json(self):
"""Return a JSON serializable version of the class"""
return {
"id": self.id,
"order_by": self.ordered_by,
"date_ordered": str(self.date_made),
"address": self.address.json,
"items": [item.json for item in self.items],
"total": str(self.total),
"status": str(self.status)
}
@classmethod
def json1(self):
"""Return a dictionary representation of the class without items"""
return {
"id": self.id,
"order_by": self.ordered_by,
"date_ordered": str(self.date_made).split(" ")[0],
"total": str(self.total),
"status": self.status
}
def place(self, user_id, address_id, products):
"""A method to process the placing of orders"""
user = User.get_by_id(user_id)
if user is None:
return "user_not_exist"
if products is None:
return "no_products_selected"
if len(products) == 0:
return "no_products_selected"
self.items = products
@classmethod
def get_by_id(cls, order_id):
"""A method to get an order based on it's id"""
order_exist = db.order_maps.get(order_id)
if order_exist is None:
return None
orders = db.users.get(order_exist, None)
if orders is None:
return None
orders = orders.orders
for order in orders:
if order.id == order_id:
return order
return None
@classmethod
def all(cls):
"""A method to get all orders from the database"""
users = db.users
if not users:
return None
res = []
for user in users:
res += users[user].orders
return res
class Product(object):
"""The product model class"""
def __init__(self, product_name, product_description, unit_price):
"""Constructor for the product model"""
self.id = len(db.menu) + 1
self.product_name = product_name
self.product_description = product_description
self.last_update = datetime.datetime.utcnow()
self.unit_price = unit_price
@property
def json(self):
"""Make the class json serializable"""
return {
"id": self.id,
"name": self.product_name,
"description": self.product_description,
"price": self.unit_price
}
@classmethod
def get_by_id(cls, product_id):
"""Get an item by it's id"""
products = db.menu
if len(products) == 0:
return None
for product in products:
if products[product].id == product_id:
return products[product]
return None
@classmethod
def all(cls):
"""A method to fetch all menu items from the data structure"""
products = db.menu
if len(products) == 0:
return None
return [product.json for product in products]
@classmethod
def exists(self, name):
items = db.menu
if not items:
return False
for item in items:
if items[item].product_name == name:
return True
return False
|
{"/app/resources/products.py": ["/app/models.py", "/app/db.py", "/app/utils.py"], "/app/resources/addresses.py": ["/app/models.py", "/app/db.py", "/app/utils.py"], "/tests/basetest.py": ["/app/db.py", "/app/models.py", "/app/__init__.py"], "/tests/test_orders.py": ["/app/models.py", "/tests/basetest.py", "/app/db.py"], "/app/__init__.py": ["/app_config.py", "/app/resources/products.py", "/app/resources/orders.py", "/app/resources/addresses.py", "/app/resources/users.py"], "/app/resources/orders.py": ["/app/db.py", "/app/models.py", "/app/utils.py"], "/app/resources/users.py": ["/app/models.py", "/app/db.py", "/app/utils.py"], "/tests/test_models.py": ["/app/models.py", "/app/db.py", "/tests/basetest.py"], "/run.py": ["/app/__init__.py", "/app_config.py"], "/app/models.py": ["/app/db.py"]}
|
32,985,568
|
SilasKenneth/fast-food-fast-api-v1
|
refs/heads/develop
|
/app/resources/orders.py
|
from flask_restful import Resource, reqparse
from app.db import db
from app.models import Order, User, Product, Address
from app.utils import empty
class OrderResource(Resource):
parser = reqparse.RequestParser()
def get(self, order_id=None):
"""The method to get orders from the database or a
single order if order_id is specified"""
if order_id is None:
orders = Order.all()
if not orders:
return {"ok": False, "code": "404",
"message": "No orders are available yet"}, 200
return {"ok": True, "code": "200",
"data": [data.json1() for data in orders]}, 200
who_bought = db.order_maps.get(order_id, None)
user = db.users.get(who_bought, None)
# If the order is not associated to any user then it doesn't exist
if who_bought is None or user is None:
return {"ok": False, "code": "404",
"message": "The order requested for "
"doesn't exist in our database"}, 200
"""Find where a given order belongs to in an index that
maps orders to users and positions in the users order list"""
order_batch = user.orders
res = []
for order in order_batch:
if order is not None:
if order.id == order_id:
res.append(order.json)
return res, 200
def post(self):
"""A method to create a new order or modify an existing order"""
self.parser.add_argument("user_id", required=True,
help="You cannot place an order "
"anonymously")
self.parser.add_argument("address", required=True,
help="Please specify an address to ship "
"your order to")
self.parser.add_argument("items", required=True,
help="You cannot order nothing "
"please specify some items")
args = self.parser.parse_args()
user_id = args.get("user_id", "")
address = args.get("address", "")
items = args.get("items", "")
if empty(user_id) or empty(address) or empty(items):
return {"ok": False, "code": "400",
"message": "Please make sure you have no missing "
"requirements for an order"}, 400
def checker0(y):
if not isinstance(y, str) and not isinstance(y, int):
return None
y = str(y)
if not y.isnumeric():
return None
return User.get_by_id(int(y))
user = checker0(user_id)
if user is None:
return {"ok": False, "code": "400",
"message": "Could not recognize the person who's "
"trying to place the order"}, 400
def checker(y):
if not isinstance(y, str) and not isinstance(y, int):
return None
y = str(y)
if not y.isnumeric():
return None
return Address.find_by_id(int(y))
add = checker(address)
if add is None:
return {"ok": False, "code": "404",
"message": "Could not find the specified address"
" or you specified an address with an"
" invalid format "
"first add an address"}, 400
items = items.split(",")
if not items:
return {"ok": False, "code": 403, "message": "Please specify"
"items to complete the request"}, 403
def found(y):
if not isinstance(y, str) and not isinstance(y, int):
return None
if not str(y).isnumeric():
return None
return Product.get_by_id(int(y))
products = [found(item) for item in items if item is not None]
products = []
for item in items:
if found(item) is None:
return {"code": 404, "ok": False, "message": "The menu item with the id %s does not exist" % item}, 404
products.append(found(item))
products = [x for x in products if x is not None]
prods = [product for product in products]
if not prods:
return {"code": "404", "ok": False,
"message": "The products you specified don't exist or "
"you just never specified any please "
"check again"}, 400
order = Order(db.users[user].id, add, items)
total = 0.00
for p in prods:
total += float(p.unit_price)
order.items = prods
order.total = total
db.order_maps.update({order.id: user})
db.users[user].orders.append(order)
return {"code": 200, "ok": True,
"message": "You successfully placed "
"the order thank you", "data": order.json}
def delete(self, order_id):
"""Cancel an order with a delete request"""
user = db.order_maps.get(order_id, None)
who = db.users.get(user, None)
if not user or not who:
return {"code": "404", "ok": False,
"message": "Incorrect order id specified. Try again"}, 400
for i in range(len(who.orders)):
if who.orders[i].id == order_id:
copy = who.orders[i]
who.orders[i] = None
return {"code": 200, "ok": True,
"message": "The order was succcessfully deleted",
"data": copy.json}, 200
return {"code": 400, "ok": False,
"message": "Could not find the order specified. "
"Please try another"}, 400
def put(self, order_id):
parser = reqparse.RequestParser()
parser.add_argument("status", required=True,
help="Please specify whether to accept, "
"reject or mark order as complete")
args = parser.parse_args()
status = args.get("status")
if len(status.strip()) == 0:
return {"code": 400, "ok": False,
"message": "Please specify a valid status"}, 400
status = status.lower()
if status not in ["new", "processing", "canceled", "complete"]:
return {"message": "Please specify a valid status statuses are complete, pending, rejected or accepted"}
order = db.order_maps.get(order_id)
if order is None:
return {"code": "404", "ok": False,
"message": "Couldn't find the order specified "
"try another"}, 400
for i in range(len(db.users[order].orders)):
if db.users[order].orders[i] is not None:
if db.users[order].orders[i].id == order_id:
db.users[order].orders[i].status = status
return {"code": 200, "ok": True,
"message": "The order was successfully "
"updated",
"data": db.users[order].orders[i].json}, 200
return {"code": 400, "ok": False,
"message": "Could not find the order specified"}, 400
|
{"/app/resources/products.py": ["/app/models.py", "/app/db.py", "/app/utils.py"], "/app/resources/addresses.py": ["/app/models.py", "/app/db.py", "/app/utils.py"], "/tests/basetest.py": ["/app/db.py", "/app/models.py", "/app/__init__.py"], "/tests/test_orders.py": ["/app/models.py", "/tests/basetest.py", "/app/db.py"], "/app/__init__.py": ["/app_config.py", "/app/resources/products.py", "/app/resources/orders.py", "/app/resources/addresses.py", "/app/resources/users.py"], "/app/resources/orders.py": ["/app/db.py", "/app/models.py", "/app/utils.py"], "/app/resources/users.py": ["/app/models.py", "/app/db.py", "/app/utils.py"], "/tests/test_models.py": ["/app/models.py", "/app/db.py", "/tests/basetest.py"], "/run.py": ["/app/__init__.py", "/app_config.py"], "/app/models.py": ["/app/db.py"]}
|
32,985,569
|
SilasKenneth/fast-food-fast-api-v1
|
refs/heads/develop
|
/app_config.py
|
import os
class Config(object):
"""Base config class """
DEBUG = False
SECRET_KEY = os.getenv("SECRET_KEY")
JWT_SECRET_KEY = os.getenv("JWT_SECRET_KEY", "s01doutuutuutu")
class Development(Config):
"""The configurations for development """
DEBUG = True
TESTING = False
APP_SETTINGS = "development"
ENV = "development"
class Testing(Config):
""" The configurations for testing """
TESTING = True
DEBUG = True
APP_SETTINGS = "testing"
ENV = "testing"
class Production(Config):
"""The configurations for production environtment"""
DEBUG = False
TESTING = False
APP_SETTINGS = "production"
ENV = "production"
configurations = {
"development": Development,
"testing": Testing,
"production": Production
}
|
{"/app/resources/products.py": ["/app/models.py", "/app/db.py", "/app/utils.py"], "/app/resources/addresses.py": ["/app/models.py", "/app/db.py", "/app/utils.py"], "/tests/basetest.py": ["/app/db.py", "/app/models.py", "/app/__init__.py"], "/tests/test_orders.py": ["/app/models.py", "/tests/basetest.py", "/app/db.py"], "/app/__init__.py": ["/app_config.py", "/app/resources/products.py", "/app/resources/orders.py", "/app/resources/addresses.py", "/app/resources/users.py"], "/app/resources/orders.py": ["/app/db.py", "/app/models.py", "/app/utils.py"], "/app/resources/users.py": ["/app/models.py", "/app/db.py", "/app/utils.py"], "/tests/test_models.py": ["/app/models.py", "/app/db.py", "/tests/basetest.py"], "/run.py": ["/app/__init__.py", "/app_config.py"], "/app/models.py": ["/app/db.py"]}
|
32,985,570
|
SilasKenneth/fast-food-fast-api-v1
|
refs/heads/develop
|
/app/resources/addresses.py
|
from flask_restful import Resource, reqparse
from app.models import Address, User
from app.db import db
from app.utils import empty
class AddressResource(Resource):
parser = reqparse.RequestParser()
parser.add_argument("town", required=True, help="Missing the town name")
parser.add_argument("street", required=True,
help="Please specify a street its required")
parser.add_argument("phone", required=True,
help="Please specify a phone number")
def get(self, address_id=None):
"""Get all addresses or just a single one if
the address_id is specified"""
if address_id is None:
users = db.users
else:
address = Address.find_by_id(address_id)
if address is None:
return {"code": "404", "message": "The address was not "
" found in our database"}, \
200
return {"ok": True, "code": "200", "data": address.json}, 200
res = []
if not users:
return {"code": "404", "message": "No addresses were "
"found in the database"}, 200
for user in users:
res += users[user].addresses
if len(res) == 0:
return {"code": "404", "message": "No addresses were "
"found in the database"}, 200
return {"ok": True, "data": [address.json for address in res]}, 200
def post(self, address_id=None):
args = self.parser.parse_args()
town = args.get("town")
street = args.get("street")
phone = args.get("phone")
if empty(town) or empty(street) or empty(phone):
return {"code": "500", "message": "Please provide all the details "
"needed don't leave blanks"}, 200
address = Address(town, street, phone)
user = User.get_by_id(1)
if user is None:
return {"code": "404", "message": "Please become a "
"valid user", "ok": False}
db.users[db.users[user].username].add_address(address)
db.address_maps.update({address.id: db.users[user].username})
return {"code": "231", "message": "The address was successfully "
"added", "data": address.json}
|
{"/app/resources/products.py": ["/app/models.py", "/app/db.py", "/app/utils.py"], "/app/resources/addresses.py": ["/app/models.py", "/app/db.py", "/app/utils.py"], "/tests/basetest.py": ["/app/db.py", "/app/models.py", "/app/__init__.py"], "/tests/test_orders.py": ["/app/models.py", "/tests/basetest.py", "/app/db.py"], "/app/__init__.py": ["/app_config.py", "/app/resources/products.py", "/app/resources/orders.py", "/app/resources/addresses.py", "/app/resources/users.py"], "/app/resources/orders.py": ["/app/db.py", "/app/models.py", "/app/utils.py"], "/app/resources/users.py": ["/app/models.py", "/app/db.py", "/app/utils.py"], "/tests/test_models.py": ["/app/models.py", "/app/db.py", "/tests/basetest.py"], "/run.py": ["/app/__init__.py", "/app_config.py"], "/app/models.py": ["/app/db.py"]}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.